diff --git a/README.md b/README.md index 7ac2151c465841c9ab2b59eb35abf7bac843de6b..b799fdee99e91db860c7b9248096b18ccfe4941f 100644 --- a/README.md +++ b/README.md @@ -50,16 +50,8 @@ More details can be found in our paper. If you use LibMoE, please cite it using this BibTeX: ``` -@misc{nguyen2024libmoelibrarycomprehensivebenchmarking, - title={LIBMoE: A Library for comprehensive benchmarking Mixture of Experts in Large Language Models}, - author={Nam V. Nguyen and Thong T. Doan and Luong Tran and Van Nguyen and Quang Pham}, - year={2024}, - eprint={2411.00918}, - archivePrefix={arXiv}, - primaryClass={cs.CL}, - url={https://arxiv.org/abs/2411.00918}, -} ``` --- +This version improves readability, clarity, and grammar throughout. Let me know if you’d like further adjustments! \ No newline at end of file diff --git a/sft/hyperrouter/added_tokens.json b/sft/hyperrouter/added_tokens.json deleted file mode 100644 index c9d3d3a1b74d87e381e471f7b33784015d2dc0ea..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/added_tokens.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "<|assistant|>": 32001, - "<|endoftext|>": 32000, - "<|end|>": 32007, - "<|placeholder1|>": 32002, - "<|placeholder2|>": 32003, - "<|placeholder3|>": 32004, - "<|placeholder4|>": 32005, - "<|placeholder5|>": 32008, - "<|placeholder6|>": 32009, - "<|system|>": 32006, - "<|user|>": 32010 -} diff --git a/sft/hyperrouter/config.json b/sft/hyperrouter/config.json deleted file mode 100644 index f98eaa0e7fcfc35974a9041ccabf7b0b1dda4dbb..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/config.json +++ /dev/null @@ -1,68 +0,0 @@ -{ - "_name_or_path": "/cm/archive/namnv78/checkpoints/phi3mini-clip/pft", - "architectures": [ - "LlavaPhiForCausalLM" - ], - "attention_bias": false, - "attention_dropout": 0.0, - "auto_map": { - "AutoConfig": "microsoft/Phi-3-mini-4k-instruct--configuration_phi3.Phi3Config", - "AutoModelForCausalLM": "microsoft/Phi-3-mini-4k-instruct--modeling_phi3.Phi3ForCausalLM" - }, - "balance_loss_coef": 0.1, - "bos_token_id": 1, - "clip_smoe": true, - "dropout": false, - "embd_pdrop": 0.0, - "eos_token_id": 32000, - "freeze_mm_mlp_adapter": false, - "hidden_act": "silu", - "hidden_size": 3072, - "image_aspect_ratio": "pad", - "initializer_range": 0.02, - "intermediate_size": 8192, - "local_rank": 0, - "max_position_embeddings": 4096, - "mlp_smoe": true, - "mm_hidden_size": 1024, - "mm_patch_merge_type": "flat", - "mm_projector_lr": null, - "mm_projector_type": "moe", - "mm_use_im_patch_token": false, - "mm_use_im_start_end": false, - "mm_vision_select_feature": "patch", - "mm_vision_select_layer": -2, - "mm_vision_tower": "openai/clip-vit-large-patch14-336", - "model_type": "llava_phi", - "moe_name": "hyperrouter", - "num_attention_heads": 32, - "num_experts": 4, - "num_hidden_layers": 32, - "num_key_value_heads": 32, - "num_layers": 3, - "num_selected": 2, - "original_max_position_embeddings": 4096, - "pad_token_id": 32000, - "resid_pdrop": 0.0, - "rms_norm_eps": 1e-05, - "rope_scaling": null, - "rope_theta": 10000.0, - "router_z_loss_coef": 0.01, - "scales": [ - 1, - 3 - ], - "sliding_window": 2047, - "tie_word_embeddings": false, - "tokenizer_model_max_length": 2048, - "tokenizer_padding_side": "right", - "topk_max": 2, - "topk_min": 1, - "torch_dtype": "bfloat16", - "training": true, - "transformers_version": "4.43.0", - "tune_mm_mlp_adapter": false, - "use_cache": false, - "use_mm_proj": true, - "vocab_size": 32064 -} diff --git a/sft/hyperrouter/generation_config.json b/sft/hyperrouter/generation_config.json deleted file mode 100644 index f79d092444f37c54d37a669a57923ca3276d762c..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/generation_config.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "_from_model_config": true, - "bos_token_id": 1, - "do_sample": true, - "eos_token_id": [ - 32000, - 32001, - 32007 - ], - "pad_token_id": 32000, - "transformers_version": "4.43.0" -} diff --git a/sft/hyperrouter/latest b/sft/hyperrouter/latest deleted file mode 100644 index 15b842fabe685a86c9c52effdd8958a64045bed5..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/latest +++ /dev/null @@ -1 +0,0 @@ -global_step8316 \ No newline at end of file diff --git a/sft/hyperrouter/model-00001-of-00002.safetensors b/sft/hyperrouter/model-00001-of-00002.safetensors deleted file mode 100644 index 1c2bbda65f602a9356086b99e55f3836c6b6a10a..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/model-00001-of-00002.safetensors +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:35401ef988325f89ebbc7250b52aab44330e011ea64a401164bca010fffecde1 -size 4972489328 diff --git a/sft/hyperrouter/model-00002-of-00002.safetensors b/sft/hyperrouter/model-00002-of-00002.safetensors deleted file mode 100644 index 6de577d03f386c06bc92bd19ba9c207714bb415d..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/model-00002-of-00002.safetensors +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:c005ba0be526ad9350df17fc40314d317ba396d24d50e6df047c64ed9e509e8d -size 4685134224 diff --git a/sft/hyperrouter/model.safetensors.index.json b/sft/hyperrouter/model.safetensors.index.json deleted file mode 100644 index aaf7fa0ba3b7b308cc8c6401d2359bf16720b6a4..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/model.safetensors.index.json +++ /dev/null @@ -1,1020 +0,0 @@ -{ - "metadata": { - "total_size": 9657478344 - }, - "weight_map": { - "lm_head.weight": "model-00002-of-00002.safetensors", - "model.embed_tokens.weight": "model-00001-of-00002.safetensors", - "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.0.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.0.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.1.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.1.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.10.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.10.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.11.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.11.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.12.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.12.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.13.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.13.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.14.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.14.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.15.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.15.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.16.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.16.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.17.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.17.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.18.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.18.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.19.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.19.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.2.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.2.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.20.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.20.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.21.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.21.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.22.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.22.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.23.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.23.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.24.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.24.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.25.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.25.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.26.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.26.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.27.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.27.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.28.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.28.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.29.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.29.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.3.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.3.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.30.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.30.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.31.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.31.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.4.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.4.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.5.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.5.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.6.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.6.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.7.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.7.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.8.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.8.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.9.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.9.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.mm_projector.moelayer.experts.0.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.0.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.0.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.0.2.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.1.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.1.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.1.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.1.2.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.2.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.2.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.2.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.2.2.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.3.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.3.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.3.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.3.2.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.norm.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.embeddings.class_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.embeddings.position_embedding.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.hyper_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.hypernet.0.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.hypernet.0.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.hypernet.2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.hypernet.2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.pre_layrnorm.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.pre_layrnorm.weight": "model-00002-of-00002.safetensors" - } -} diff --git a/sft/hyperrouter/rng_state_0.pth b/sft/hyperrouter/rng_state_0.pth deleted file mode 100644 index 71ea030e2b6ccf2942e534710e59240994fbf63d..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/rng_state_0.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:98d7182be6bef60e0a2c9cafee3f25cd25e5efbb81e449bb83476786049e3afd -size 15024 diff --git a/sft/hyperrouter/rng_state_1.pth b/sft/hyperrouter/rng_state_1.pth deleted file mode 100644 index 9d2963e51043b85c2837399b5ae8212b62ea2cf9..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/rng_state_1.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ca439560b72fdfdeca80538c1d7fd13a79cb40a4113abdd40bed2ee18c276f6e -size 15024 diff --git a/sft/hyperrouter/rng_state_2.pth b/sft/hyperrouter/rng_state_2.pth deleted file mode 100644 index 51d113c2fd99f3ab9ae0a827bc55e4424d99d271..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/rng_state_2.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:37d8a08a60f1e45bbe669ccc291b732178afde092185a2275107087813030b6c -size 15024 diff --git a/sft/hyperrouter/rng_state_3.pth b/sft/hyperrouter/rng_state_3.pth deleted file mode 100644 index 2e37d90e8d2dbd6c0377326df7ded780972f9ced..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/rng_state_3.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5990ef8a1c2a5a5daffd1a6f0a3bfedabc1eacf2b1a98ac77694877c0faf73e4 -size 15024 diff --git a/sft/hyperrouter/special_tokens_map.json b/sft/hyperrouter/special_tokens_map.json deleted file mode 100644 index 3e4d5a5bc1cb51753cc9ae0305ece0da60052b10..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/special_tokens_map.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "bos_token": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - }, - "eos_token": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - }, - "pad_token": "", - "unk_token": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - } -} diff --git a/sft/hyperrouter/tokenizer.model b/sft/hyperrouter/tokenizer.model deleted file mode 100644 index 6c00c742ce03c627d6cd5b795984876fa49fa899..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/tokenizer.model +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347 -size 499723 diff --git a/sft/hyperrouter/tokenizer_config.json b/sft/hyperrouter/tokenizer_config.json deleted file mode 100644 index 3bd56c6314b14d6a33a69cd1802e04dbc1e47840..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/tokenizer_config.json +++ /dev/null @@ -1,132 +0,0 @@ -{ - "add_bos_token": true, - "add_eos_token": false, - "add_prefix_space": true, - "added_tokens_decoder": { - "0": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "1": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "2": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": false - }, - "32000": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32001": { - "content": "<|assistant|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32002": { - "content": "<|placeholder1|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32003": { - "content": "<|placeholder2|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32004": { - "content": "<|placeholder3|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32005": { - "content": "<|placeholder4|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32006": { - "content": "<|system|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32007": { - "content": "<|end|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32008": { - "content": "<|placeholder5|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32009": { - "content": "<|placeholder6|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32010": { - "content": "<|user|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - } - }, - "bos_token": "", - "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') %}{{'<|user|>' + '\n' + message['content'] + '<|end|>' + '\n' + '<|assistant|>' + '\n'}}{% elif (message['role'] == 'assistant') %}{{message['content'] + '<|end|>' + '\n'}}{% endif %}{% endfor %}", - "clean_up_tokenization_spaces": false, - "eos_token": "<|endoftext|>", - "legacy": false, - "model_max_length": 2048, - "pad_token": "", - "padding_side": "right", - "sp_model_kwargs": {}, - "spaces_between_special_tokens": false, - "tokenizer_class": "LlamaTokenizer", - "unk_token": "", - "use_default_system_prompt": false -} diff --git a/sft/hyperrouter/trainer_state.json b/sft/hyperrouter/trainer_state.json deleted file mode 100644 index 5e6a0553a3ed4fca5663b64b1730cac8864cb43e..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/trainer_state.json +++ /dev/null @@ -1,74877 +0,0 @@ -{ - "best_metric": null, - "best_model_checkpoint": null, - "epoch": 0.9999398785546805, - "eval_steps": 500, - "global_step": 8316, - "is_hyper_param_search": false, - "is_local_process_zero": true, - "is_world_process_zero": true, - "log_history": [ - { - "epoch": 0.00012024289063909097, - "flos": 18263040018000.0, - "grad_norm": 13.817966121833583, - "learning_rate": 0.0, - "loss": 1.4267, - "num_input_tokens_seen": 20095, - "step": 1 - }, - { - "epoch": 0.00024048578127818193, - "flos": 22506996816480.0, - "grad_norm": 18.108283812304055, - "learning_rate": 5.021476677069823e-07, - "loss": 1.3813, - "num_input_tokens_seen": 38475, - "step": 2 - }, - { - "epoch": 0.0003607286719172729, - "flos": 13893881084160.0, - "grad_norm": 17.568114336986252, - "learning_rate": 7.958852231401551e-07, - "loss": 1.2402, - "num_input_tokens_seen": 56760, - "step": 3 - }, - { - "epoch": 0.00048097156255636386, - "flos": 14108309722320.0, - "grad_norm": 11.07942172748333, - "learning_rate": 1.0042953354139647e-06, - "loss": 1.3203, - "num_input_tokens_seen": 75345, - "step": 4 - }, - { - "epoch": 0.0006012144531954548, - "flos": 10167680360400.0, - "grad_norm": 14.930126461201223, - "learning_rate": 1.1659507774310057e-06, - "loss": 1.3867, - "num_input_tokens_seen": 92950, - "step": 5 - }, - { - "epoch": 0.0007214573438345458, - "flos": 16944982125000.0, - "grad_norm": 16.141306594091233, - "learning_rate": 1.2980328908471373e-06, - "loss": 1.2601, - "num_input_tokens_seen": 112915, - "step": 6 - }, - { - "epoch": 0.0008417002344736367, - "flos": 49893916281720.0, - "grad_norm": 3.0801871116577098, - "learning_rate": 1.4097067265369432e-06, - "loss": 0.7714, - "num_input_tokens_seen": 177630, - "step": 7 - }, - { - "epoch": 0.0009619431251127277, - "flos": 15561860892120.0, - "grad_norm": 23.371684935799586, - "learning_rate": 1.506443003120947e-06, - "loss": 1.2059, - "num_input_tokens_seen": 192850, - "step": 8 - }, - { - "epoch": 0.0010821860157518186, - "flos": 17079926077680.0, - "grad_norm": 10.773063935351715, - "learning_rate": 1.5917704462803102e-06, - "loss": 1.3108, - "num_input_tokens_seen": 209115, - "step": 9 - }, - { - "epoch": 0.0012024289063909096, - "flos": 12400537836000.0, - "grad_norm": 11.279973092554343, - "learning_rate": 1.6680984451379884e-06, - "loss": 1.3537, - "num_input_tokens_seen": 221905, - "step": 10 - }, - { - "epoch": 0.0013226717970300007, - "flos": 15567812964600.0, - "grad_norm": 8.092702201271033, - "learning_rate": 1.7371455188905097e-06, - "loss": 1.138, - "num_input_tokens_seen": 241555, - "step": 11 - }, - { - "epoch": 0.0014429146876690916, - "flos": 19971392888400.0, - "grad_norm": 33.78233043484974, - "learning_rate": 1.8001805585541196e-06, - "loss": 1.1182, - "num_input_tokens_seen": 262405, - "step": 12 - }, - { - "epoch": 0.0015631575783081825, - "flos": 13920407639280.0, - "grad_norm": 6.876950906169764, - "learning_rate": 1.8581671739548328e-06, - "loss": 1.1964, - "num_input_tokens_seen": 279860, - "step": 13 - }, - { - "epoch": 0.0016834004689472734, - "flos": 35439300493320.0, - "grad_norm": 5.773836898926085, - "learning_rate": 1.9118543942439254e-06, - "loss": 1.0317, - "num_input_tokens_seen": 301765, - "step": 14 - }, - { - "epoch": 0.0018036433595863645, - "flos": 25693295089680.0, - "grad_norm": 5.786681227306793, - "learning_rate": 1.961836000571161e-06, - "loss": 1.0432, - "num_input_tokens_seen": 323140, - "step": 15 - }, - { - "epoch": 0.0019238862502254555, - "flos": 44049128081160.0, - "grad_norm": 2.5706490061589915, - "learning_rate": 2.0085906708279293e-06, - "loss": 0.6258, - "num_input_tokens_seen": 378230, - "step": 16 - }, - { - "epoch": 0.0020441291408645466, - "flos": 15217427255040.0, - "grad_norm": 5.213731740052087, - "learning_rate": 2.0525099325728135e-06, - "loss": 1.0917, - "num_input_tokens_seen": 396130, - "step": 17 - }, - { - "epoch": 0.0021643720315036373, - "flos": 47075380544760.0, - "grad_norm": 2.249954541065026, - "learning_rate": 2.0939181139872922e-06, - "loss": 0.7098, - "num_input_tokens_seen": 457565, - "step": 18 - }, - { - "epoch": 0.0022846149221427284, - "flos": 22966116523200.0, - "grad_norm": 5.084587046791643, - "learning_rate": 2.1330868934640175e-06, - "loss": 0.9383, - "num_input_tokens_seen": 477960, - "step": 19 - }, - { - "epoch": 0.002404857812781819, - "flos": 37621768786680.0, - "grad_norm": 2.0895544119215606, - "learning_rate": 2.170246112844971e-06, - "loss": 0.7579, - "num_input_tokens_seen": 532020, - "step": 20 - }, - { - "epoch": 0.0025251007034209102, - "flos": 11517594786600.0, - "grad_norm": 7.5789698808179535, - "learning_rate": 2.2055919496770983e-06, - "loss": 0.966, - "num_input_tokens_seen": 549880, - "step": 21 - }, - { - "epoch": 0.0026453435940600014, - "flos": 27825966757320.0, - "grad_norm": 5.763089415884376, - "learning_rate": 2.2392931865974923e-06, - "loss": 0.8608, - "num_input_tokens_seen": 572290, - "step": 22 - }, - { - "epoch": 0.002765586484699092, - "flos": 15459617186760.0, - "grad_norm": 7.636480974559204, - "learning_rate": 2.271496085962064e-06, - "loss": 0.9816, - "num_input_tokens_seen": 589705, - "step": 23 - }, - { - "epoch": 0.002885829375338183, - "flos": 15108344998320.0, - "grad_norm": 5.005354874841114, - "learning_rate": 2.3023282262611022e-06, - "loss": 0.9934, - "num_input_tokens_seen": 608200, - "step": 24 - }, - { - "epoch": 0.003006072265977274, - "flos": 25585099311840.0, - "grad_norm": 5.104600641394964, - "learning_rate": 2.3319015548620114e-06, - "loss": 0.8753, - "num_input_tokens_seen": 629060, - "step": 25 - }, - { - "epoch": 0.003126315156616365, - "flos": 17886550437600.0, - "grad_norm": 3.186201841006131, - "learning_rate": 2.3603148416618152e-06, - "loss": 0.8974, - "num_input_tokens_seen": 648295, - "step": 26 - }, - { - "epoch": 0.003246558047255456, - "flos": 17295146214840.0, - "grad_norm": 11.13022653628503, - "learning_rate": 2.3876556694204647e-06, - "loss": 0.9761, - "num_input_tokens_seen": 668170, - "step": 27 - }, - { - "epoch": 0.003366800937894547, - "flos": 13001123447160.0, - "grad_norm": 5.1525362454564245, - "learning_rate": 2.414002061950908e-06, - "loss": 0.8651, - "num_input_tokens_seen": 686765, - "step": 28 - }, - { - "epoch": 0.003487043828533638, - "flos": 17890159673040.0, - "grad_norm": 4.789056326782828, - "learning_rate": 2.4394238264681557e-06, - "loss": 0.9775, - "num_input_tokens_seen": 706220, - "step": 29 - }, - { - "epoch": 0.003607286719172729, - "flos": 19158689816160.0, - "grad_norm": 3.0290220272438297, - "learning_rate": 2.4639836682781433e-06, - "loss": 0.967, - "num_input_tokens_seen": 726070, - "step": 30 - }, - { - "epoch": 0.00372752960981182, - "flos": 15053170670760.0, - "grad_norm": 3.383246343336847, - "learning_rate": 2.487738122623307e-06, - "loss": 0.9538, - "num_input_tokens_seen": 744700, - "step": 31 - }, - { - "epoch": 0.003847772500450911, - "flos": 16593899896320.0, - "grad_norm": 5.524775276964839, - "learning_rate": 2.510738338534912e-06, - "loss": 0.9442, - "num_input_tokens_seen": 763145, - "step": 32 - }, - { - "epoch": 0.003968015391090002, - "flos": 13110522303480.0, - "grad_norm": 5.514101331297901, - "learning_rate": 2.5330307420306648e-06, - "loss": 1.0015, - "num_input_tokens_seen": 779715, - "step": 33 - }, - { - "epoch": 0.004088258281729093, - "flos": 20019443724960.0, - "grad_norm": 5.9010215909324035, - "learning_rate": 2.554657600279796e-06, - "loss": 0.8433, - "num_input_tokens_seen": 800710, - "step": 34 - }, - { - "epoch": 0.004208501172368184, - "flos": 17026113128400.0, - "grad_norm": 4.295900546466009, - "learning_rate": 2.5756575039679493e-06, - "loss": 1.0193, - "num_input_tokens_seen": 819980, - "step": 35 - }, - { - "epoch": 0.0043287440630072746, - "flos": 12625414260960.0, - "grad_norm": 7.186673058747025, - "learning_rate": 2.5960657816942747e-06, - "loss": 0.9287, - "num_input_tokens_seen": 838615, - "step": 36 - }, - { - "epoch": 0.004448986953646365, - "flos": 39108748382880.0, - "grad_norm": 1.061511477461804, - "learning_rate": 2.6159148575788668e-06, - "loss": 0.6319, - "num_input_tokens_seen": 896370, - "step": 37 - }, - { - "epoch": 0.004569229844285457, - "flos": 9762816842400.0, - "grad_norm": 5.4406360921905526, - "learning_rate": 2.635234561171e-06, - "loss": 0.9627, - "num_input_tokens_seen": 914485, - "step": 38 - }, - { - "epoch": 0.0046894727349245475, - "flos": 11808373774200.0, - "grad_norm": 5.571640391392913, - "learning_rate": 2.6540523970949877e-06, - "loss": 0.9251, - "num_input_tokens_seen": 929050, - "step": 39 - }, - { - "epoch": 0.004809715625563638, - "flos": 17511189510960.0, - "grad_norm": 4.942158686227952, - "learning_rate": 2.6723937805519533e-06, - "loss": 0.9125, - "num_input_tokens_seen": 946270, - "step": 40 - }, - { - "epoch": 0.00492995851620273, - "flos": 15185993406120.0, - "grad_norm": 4.11023288373305, - "learning_rate": 2.690282243737839e-06, - "loss": 0.9144, - "num_input_tokens_seen": 964925, - "step": 41 - }, - { - "epoch": 0.0050502014068418205, - "flos": 14865015308040.0, - "grad_norm": 4.578626585684053, - "learning_rate": 2.7077396173840807e-06, - "loss": 0.9691, - "num_input_tokens_seen": 982930, - "step": 42 - }, - { - "epoch": 0.005170444297480911, - "flos": 19051127237520.0, - "grad_norm": 8.79427970153042, - "learning_rate": 2.7247861909342594e-06, - "loss": 0.898, - "num_input_tokens_seen": 1003575, - "step": 43 - }, - { - "epoch": 0.005290687188120003, - "flos": 15319765940280.0, - "grad_norm": 4.446974619582612, - "learning_rate": 2.7414408543044743e-06, - "loss": 0.8088, - "num_input_tokens_seen": 1018935, - "step": 44 - }, - { - "epoch": 0.005410930078759093, - "flos": 11544216321600.0, - "grad_norm": 9.348319374854356, - "learning_rate": 2.7577212237113157e-06, - "loss": 0.7757, - "num_input_tokens_seen": 1035695, - "step": 45 - }, - { - "epoch": 0.005531172969398184, - "flos": 15432172492800.0, - "grad_norm": 4.741288277400874, - "learning_rate": 2.7736437536690466e-06, - "loss": 1.0219, - "num_input_tokens_seen": 1055045, - "step": 46 - }, - { - "epoch": 0.005651415860037276, - "flos": 15239014856400.0, - "grad_norm": 4.133535166909097, - "learning_rate": 2.789223836941131e-06, - "loss": 1.0517, - "num_input_tokens_seen": 1074900, - "step": 47 - }, - { - "epoch": 0.005771658750676366, - "flos": 9626448191520.0, - "grad_norm": 5.947806959148138, - "learning_rate": 2.8044758939680847e-06, - "loss": 1.0403, - "num_input_tokens_seen": 1090690, - "step": 48 - }, - { - "epoch": 0.005891901641315457, - "flos": 17889526473840.0, - "grad_norm": 11.095980739629047, - "learning_rate": 2.8194134530738863e-06, - "loss": 0.9874, - "num_input_tokens_seen": 1109180, - "step": 49 - }, - { - "epoch": 0.006012144531954548, - "flos": 16891137515760.0, - "grad_norm": 8.98428498596212, - "learning_rate": 2.834049222568994e-06, - "loss": 0.8812, - "num_input_tokens_seen": 1126250, - "step": 50 - }, - { - "epoch": 0.006132387422593639, - "flos": 16485989058120.0, - "grad_norm": 12.226682307719713, - "learning_rate": 2.848395155712969e-06, - "loss": 0.909, - "num_input_tokens_seen": 1146190, - "step": 51 - }, - { - "epoch": 0.00625263031323273, - "flos": 20258815920240.0, - "grad_norm": 8.391225100746984, - "learning_rate": 2.8624625093687977e-06, - "loss": 0.9724, - "num_input_tokens_seen": 1163045, - "step": 52 - }, - { - "epoch": 0.006372873203871821, - "flos": 16916366012520.0, - "grad_norm": 8.988484539068976, - "learning_rate": 2.876261897070029e-06, - "loss": 0.8662, - "num_input_tokens_seen": 1182895, - "step": 53 - }, - { - "epoch": 0.006493116094510912, - "flos": 16728020690040.0, - "grad_norm": 4.492974413074048, - "learning_rate": 2.889803337127447e-06, - "loss": 0.9103, - "num_input_tokens_seen": 1201215, - "step": 54 - }, - { - "epoch": 0.006613358985150003, - "flos": 16887180020760.0, - "grad_norm": 7.953218290909745, - "learning_rate": 2.903096296321516e-06, - "loss": 0.8334, - "num_input_tokens_seen": 1219080, - "step": 55 - }, - { - "epoch": 0.006733601875789094, - "flos": 19452191560320.0, - "grad_norm": 7.936220801896522, - "learning_rate": 2.9161497296578907e-06, - "loss": 0.9143, - "num_input_tokens_seen": 1238190, - "step": 56 - }, - { - "epoch": 0.006853844766428185, - "flos": 11301329870760.0, - "grad_norm": 4.395616402478639, - "learning_rate": 2.928972116604173e-06, - "loss": 0.8485, - "num_input_tokens_seen": 1254185, - "step": 57 - }, - { - "epoch": 0.006974087657067276, - "flos": 17755215720360.0, - "grad_norm": 8.873661853757119, - "learning_rate": 2.9415714941751377e-06, - "loss": 0.9943, - "num_input_tokens_seen": 1275125, - "step": 58 - }, - { - "epoch": 0.007094330547706367, - "flos": 18886079154240.0, - "grad_norm": 4.186188221905894, - "learning_rate": 2.9539554871897396e-06, - "loss": 0.9224, - "num_input_tokens_seen": 1295015, - "step": 59 - }, - { - "epoch": 0.007214573438345458, - "flos": 15589875465360.0, - "grad_norm": 3.373425575648723, - "learning_rate": 2.9661313359851253e-06, - "loss": 0.958, - "num_input_tokens_seen": 1312620, - "step": 60 - }, - { - "epoch": 0.007334816328984549, - "flos": 18268338336120.0, - "grad_norm": 3.5857938849917925, - "learning_rate": 2.978105921839922e-06, - "loss": 0.9307, - "num_input_tokens_seen": 1332885, - "step": 61 - }, - { - "epoch": 0.00745505921962364, - "flos": 13511586626280.0, - "grad_norm": 5.812450449742594, - "learning_rate": 2.9898857903302893e-06, - "loss": 0.7234, - "num_input_tokens_seen": 1351555, - "step": 62 - }, - { - "epoch": 0.007575302110262731, - "flos": 13487719507800.0, - "grad_norm": 5.276221595801576, - "learning_rate": 3.001477172817253e-06, - "loss": 0.872, - "num_input_tokens_seen": 1369165, - "step": 63 - }, - { - "epoch": 0.007695545000901822, - "flos": 18294991531080.0, - "grad_norm": 6.243728848441557, - "learning_rate": 3.012886006241894e-06, - "loss": 0.9586, - "num_input_tokens_seen": 1388270, - "step": 64 - }, - { - "epoch": 0.007815787891540913, - "flos": 15594466159560.0, - "grad_norm": 3.660141919412314, - "learning_rate": 3.0241179513858383e-06, - "loss": 0.8854, - "num_input_tokens_seen": 1407300, - "step": 65 - }, - { - "epoch": 0.007936030782180003, - "flos": 15780088725480.0, - "grad_norm": 3.7789390374843888, - "learning_rate": 3.035178409737647e-06, - "loss": 0.8737, - "num_input_tokens_seen": 1424470, - "step": 66 - }, - { - "epoch": 0.008056273672819095, - "flos": 14703924719760.0, - "grad_norm": 5.94667901714964, - "learning_rate": 3.046072539090907e-06, - "loss": 0.8728, - "num_input_tokens_seen": 1442155, - "step": 67 - }, - { - "epoch": 0.008176516563458186, - "flos": 13380948428160.0, - "grad_norm": 5.134053676810564, - "learning_rate": 3.056805267986779e-06, - "loss": 1.0399, - "num_input_tokens_seen": 1459345, - "step": 68 - }, - { - "epoch": 0.008296759454097276, - "flos": 15999994536720.0, - "grad_norm": 5.50090938332737, - "learning_rate": 3.0673813091022194e-06, - "loss": 0.9399, - "num_input_tokens_seen": 1478285, - "step": 69 - }, - { - "epoch": 0.008417002344736368, - "flos": 46746871867560.0, - "grad_norm": 1.2141129236538757, - "learning_rate": 3.0778051716749317e-06, - "loss": 0.6547, - "num_input_tokens_seen": 1541550, - "step": 70 - }, - { - "epoch": 0.008537245235375458, - "flos": 16809879872520.0, - "grad_norm": 4.141205829692235, - "learning_rate": 3.0880811730470094e-06, - "loss": 0.8977, - "num_input_tokens_seen": 1560725, - "step": 71 - }, - { - "epoch": 0.008657488126014549, - "flos": 45692707042680.0, - "grad_norm": 0.9566476792822703, - "learning_rate": 3.098213449401257e-06, - "loss": 0.6079, - "num_input_tokens_seen": 1627375, - "step": 72 - }, - { - "epoch": 0.00877773101665364, - "flos": 22234354494600.0, - "grad_norm": 4.712959222654002, - "learning_rate": 3.1082059657570015e-06, - "loss": 0.9707, - "num_input_tokens_seen": 1646330, - "step": 73 - }, - { - "epoch": 0.00889797390729273, - "flos": 17215186629960.0, - "grad_norm": 10.965363504113474, - "learning_rate": 3.1180625252858496e-06, - "loss": 0.9408, - "num_input_tokens_seen": 1664480, - "step": 74 - }, - { - "epoch": 0.009018216797931822, - "flos": 16890820916160.0, - "grad_norm": 3.9283860100920722, - "learning_rate": 3.1277867780021663e-06, - "loss": 0.7866, - "num_input_tokens_seen": 1680835, - "step": 75 - }, - { - "epoch": 0.009138459688570914, - "flos": 11593375256760.0, - "grad_norm": 2.8915168005138425, - "learning_rate": 3.1373822288779824e-06, - "loss": 0.9457, - "num_input_tokens_seen": 1697135, - "step": 76 - }, - { - "epoch": 0.009258702579210003, - "flos": 19807231284000.0, - "grad_norm": 3.3892857591915284, - "learning_rate": 3.1468522454274533e-06, - "loss": 0.7749, - "num_input_tokens_seen": 1717210, - "step": 77 - }, - { - "epoch": 0.009378945469849095, - "flos": 19722807644760.0, - "grad_norm": 3.1498169255416593, - "learning_rate": 3.15620006480197e-06, - "loss": 0.9012, - "num_input_tokens_seen": 1736200, - "step": 78 - }, - { - "epoch": 0.009499188360488187, - "flos": 26228480206200.0, - "grad_norm": 5.08425711487492, - "learning_rate": 3.1654288004333087e-06, - "loss": 0.7407, - "num_input_tokens_seen": 1754585, - "step": 79 - }, - { - "epoch": 0.009619431251127276, - "flos": 15726434076000.0, - "grad_norm": 3.9785422322840036, - "learning_rate": 3.1745414482589353e-06, - "loss": 0.7381, - "num_input_tokens_seen": 1773515, - "step": 80 - }, - { - "epoch": 0.009739674141766368, - "flos": 12705975385080.0, - "grad_norm": 3.1308173392764487, - "learning_rate": 3.1835408925606204e-06, - "loss": 0.8432, - "num_input_tokens_seen": 1791375, - "step": 81 - }, - { - "epoch": 0.00985991703240546, - "flos": 20185061687520.0, - "grad_norm": 5.157053566942035, - "learning_rate": 3.1924299114448214e-06, - "loss": 0.8754, - "num_input_tokens_seen": 1811575, - "step": 82 - }, - { - "epoch": 0.00998015992304455, - "flos": 10032008228640.0, - "grad_norm": 4.003129450179879, - "learning_rate": 3.2012111819909055e-06, - "loss": 0.8268, - "num_input_tokens_seen": 1828625, - "step": 83 - }, - { - "epoch": 0.010100402813683641, - "flos": 14755838071440.0, - "grad_norm": 3.313172038301558, - "learning_rate": 3.2098872850910627e-06, - "loss": 0.9393, - "num_input_tokens_seen": 1845020, - "step": 84 - }, - { - "epoch": 0.010220645704322733, - "flos": 12543523418520.0, - "grad_norm": 3.353935344293502, - "learning_rate": 3.2184607100038194e-06, - "loss": 0.8808, - "num_input_tokens_seen": 1863380, - "step": 85 - }, - { - "epoch": 0.010340888594961822, - "flos": 15702693597360.0, - "grad_norm": 3.1380323649889688, - "learning_rate": 3.2269338586412414e-06, - "loss": 0.9127, - "num_input_tokens_seen": 1880685, - "step": 86 - }, - { - "epoch": 0.010461131485600914, - "flos": 16836786347160.0, - "grad_norm": 15.044603788382533, - "learning_rate": 3.2353090496083106e-06, - "loss": 0.9546, - "num_input_tokens_seen": 1898240, - "step": 87 - }, - { - "epoch": 0.010581374376240005, - "flos": 24640016743680.0, - "grad_norm": 3.476606938796039, - "learning_rate": 3.2435885220114572e-06, - "loss": 0.8107, - "num_input_tokens_seen": 1919310, - "step": 88 - }, - { - "epoch": 0.010701617266879095, - "flos": 15919401752640.0, - "grad_norm": 3.221154361203631, - "learning_rate": 3.2517744390519113e-06, - "loss": 0.9245, - "num_input_tokens_seen": 1937895, - "step": 89 - }, - { - "epoch": 0.010821860157518187, - "flos": 13918666341480.0, - "grad_norm": 3.57502098877621, - "learning_rate": 3.259868891418298e-06, - "loss": 0.7398, - "num_input_tokens_seen": 1955380, - "step": 90 - }, - { - "epoch": 0.010942103048157278, - "flos": 18645915459960.0, - "grad_norm": 2.850145463410185, - "learning_rate": 3.2678739004917757e-06, - "loss": 0.8238, - "num_input_tokens_seen": 1974835, - "step": 91 - }, - { - "epoch": 0.011062345938796368, - "flos": 20160972949320.0, - "grad_norm": 2.6337257861385353, - "learning_rate": 3.275791421376029e-06, - "loss": 0.9096, - "num_input_tokens_seen": 1998000, - "step": 92 - }, - { - "epoch": 0.01118258882943546, - "flos": 11707016547720.0, - "grad_norm": 3.419591690585627, - "learning_rate": 3.2836233457634622e-06, - "loss": 0.9398, - "num_input_tokens_seen": 2015855, - "step": 93 - }, - { - "epoch": 0.011302831720074551, - "flos": 15109421436960.0, - "grad_norm": 2.9625103077125696, - "learning_rate": 3.2913715046481135e-06, - "loss": 0.8301, - "num_input_tokens_seen": 2035320, - "step": 94 - }, - { - "epoch": 0.011423074610713641, - "flos": 9486565285080.0, - "grad_norm": 3.6233140379091586, - "learning_rate": 3.299037670895023e-06, - "loss": 0.8683, - "num_input_tokens_seen": 2051655, - "step": 95 - }, - { - "epoch": 0.011543317501352733, - "flos": 22263445506480.0, - "grad_norm": 2.706996548693077, - "learning_rate": 3.3066235616750667e-06, - "loss": 0.7987, - "num_input_tokens_seen": 2072610, - "step": 96 - }, - { - "epoch": 0.011663560391991824, - "flos": 11301139911000.0, - "grad_norm": 3.3290104650743753, - "learning_rate": 3.3141308407736276e-06, - "loss": 0.9006, - "num_input_tokens_seen": 2088965, - "step": 97 - }, - { - "epoch": 0.011783803282630914, - "flos": 14542137612360.0, - "grad_norm": 3.1030764788227265, - "learning_rate": 3.321561120780869e-06, - "loss": 0.8482, - "num_input_tokens_seen": 2107395, - "step": 98 - }, - { - "epoch": 0.011904046173270006, - "flos": 16345979511840.0, - "grad_norm": 3.6309801956767105, - "learning_rate": 3.3289159651708192e-06, - "loss": 1.0016, - "num_input_tokens_seen": 2124690, - "step": 99 - }, - { - "epoch": 0.012024289063909096, - "flos": 13948643832240.0, - "grad_norm": 3.5272053198196534, - "learning_rate": 3.3361968902759768e-06, - "loss": 0.9633, - "num_input_tokens_seen": 2144090, - "step": 100 - }, - { - "epoch": 0.012144531954548187, - "flos": 10923151207680.0, - "grad_norm": 4.604635407535338, - "learning_rate": 3.343405367163663e-06, - "loss": 0.9351, - "num_input_tokens_seen": 2160740, - "step": 101 - }, - { - "epoch": 0.012264774845187279, - "flos": 11004472170840.0, - "grad_norm": 3.5824568068249074, - "learning_rate": 3.350542823419951e-06, - "loss": 0.8038, - "num_input_tokens_seen": 2177060, - "step": 102 - }, - { - "epoch": 0.012385017735826368, - "flos": 10135803272040.0, - "grad_norm": 4.713867086343742, - "learning_rate": 3.3576106448465615e-06, - "loss": 0.8571, - "num_input_tokens_seen": 2190160, - "step": 103 - }, - { - "epoch": 0.01250526062646546, - "flos": 17295969373800.0, - "grad_norm": 2.677911286523508, - "learning_rate": 3.3646101770757797e-06, - "loss": 0.8653, - "num_input_tokens_seen": 2208670, - "step": 104 - }, - { - "epoch": 0.012625503517104552, - "flos": 25448888960760.0, - "grad_norm": 2.798810406433796, - "learning_rate": 3.371542727108104e-06, - "loss": 0.8352, - "num_input_tokens_seen": 2230565, - "step": 105 - }, - { - "epoch": 0.012745746407743641, - "flos": 13002548145360.0, - "grad_norm": 4.574011298735708, - "learning_rate": 3.3784095647770114e-06, - "loss": 0.884, - "num_input_tokens_seen": 2248930, - "step": 106 - }, - { - "epoch": 0.012865989298382733, - "flos": 15054911968560.0, - "grad_norm": 3.487873732801168, - "learning_rate": 3.3852119241449547e-06, - "loss": 0.8663, - "num_input_tokens_seen": 2267770, - "step": 107 - }, - { - "epoch": 0.012986232189021825, - "flos": 17536639627440.0, - "grad_norm": 3.242975125528243, - "learning_rate": 3.3919510048344295e-06, - "loss": 0.945, - "num_input_tokens_seen": 2285500, - "step": 108 - }, - { - "epoch": 0.013106475079660914, - "flos": 17370546765480.0, - "grad_norm": 4.2261033011437945, - "learning_rate": 3.3986279732976907e-06, - "loss": 0.8398, - "num_input_tokens_seen": 2303695, - "step": 109 - }, - { - "epoch": 0.013226717970300006, - "flos": 15429924635640.0, - "grad_norm": 4.043200382102523, - "learning_rate": 3.4052439640284983e-06, - "loss": 0.9371, - "num_input_tokens_seen": 2322330, - "step": 110 - }, - { - "epoch": 0.013346960860939098, - "flos": 18214145467320.0, - "grad_norm": 4.027525134585357, - "learning_rate": 3.4118000807190217e-06, - "loss": 0.8031, - "num_input_tokens_seen": 2342930, - "step": 111 - }, - { - "epoch": 0.013467203751578187, - "flos": 20669409891000.0, - "grad_norm": 3.6971547089477514, - "learning_rate": 3.4182973973648723e-06, - "loss": 0.7544, - "num_input_tokens_seen": 2363860, - "step": 112 - }, - { - "epoch": 0.013587446642217279, - "flos": 13812401821200.0, - "grad_norm": 3.9076008406481315, - "learning_rate": 3.424736959321014e-06, - "loss": 0.9226, - "num_input_tokens_seen": 2381385, - "step": 113 - }, - { - "epoch": 0.01370768953285637, - "flos": 17566427158440.0, - "grad_norm": 3.03921014186877, - "learning_rate": 3.431119784311155e-06, - "loss": 0.8678, - "num_input_tokens_seen": 2400780, - "step": 114 - }, - { - "epoch": 0.01382793242349546, - "flos": 28825685433720.0, - "grad_norm": 14.510905407052375, - "learning_rate": 3.43744686339307e-06, - "loss": 0.7627, - "num_input_tokens_seen": 2422820, - "step": 115 - }, - { - "epoch": 0.013948175314134552, - "flos": 30417283232280.0, - "grad_norm": 4.7220946325879805, - "learning_rate": 3.44371916188212e-06, - "loss": 0.8903, - "num_input_tokens_seen": 2443295, - "step": 116 - }, - { - "epoch": 0.014068418204773643, - "flos": 16429991571600.0, - "grad_norm": 3.158379770975929, - "learning_rate": 3.449937620235143e-06, - "loss": 0.8523, - "num_input_tokens_seen": 2463610, - "step": 117 - }, - { - "epoch": 0.014188661095412733, - "flos": 17430501747000.0, - "grad_norm": 4.019049345884337, - "learning_rate": 3.456103154896722e-06, - "loss": 0.8646, - "num_input_tokens_seen": 2484605, - "step": 118 - }, - { - "epoch": 0.014308903986051825, - "flos": 17323414067760.0, - "grad_norm": 2.7940144321994596, - "learning_rate": 3.462216659109757e-06, - "loss": 0.9095, - "num_input_tokens_seen": 2504505, - "step": 119 - }, - { - "epoch": 0.014429146876690916, - "flos": 15108883217640.0, - "grad_norm": 6.324216940433651, - "learning_rate": 3.4682790036921077e-06, - "loss": 0.8315, - "num_input_tokens_seen": 2522885, - "step": 120 - }, - { - "epoch": 0.014549389767330006, - "flos": 14784010944480.0, - "grad_norm": 3.398480013877398, - "learning_rate": 3.4742910377810193e-06, - "loss": 0.8223, - "num_input_tokens_seen": 2540065, - "step": 121 - }, - { - "epoch": 0.014669632657969098, - "flos": 13137428778120.0, - "grad_norm": 5.4478643450099655, - "learning_rate": 3.4802535895469042e-06, - "loss": 0.867, - "num_input_tokens_seen": 2558535, - "step": 122 - }, - { - "epoch": 0.01478987554860819, - "flos": 16642805551800.0, - "grad_norm": 3.017696321512022, - "learning_rate": 3.4861674668779934e-06, - "loss": 0.8838, - "num_input_tokens_seen": 2576485, - "step": 123 - }, - { - "epoch": 0.01491011843924728, - "flos": 12540832321920.0, - "grad_norm": 3.2389351215701625, - "learning_rate": 3.492033458037272e-06, - "loss": 0.8353, - "num_input_tokens_seen": 2594775, - "step": 124 - }, - { - "epoch": 0.01503036132988637, - "flos": 12892706049600.0, - "grad_norm": 4.679628196684444, - "learning_rate": 3.497852332293018e-06, - "loss": 0.8498, - "num_input_tokens_seen": 2610070, - "step": 125 - }, - { - "epoch": 0.015150604220525462, - "flos": 13784070648360.0, - "grad_norm": 7.34467222226358, - "learning_rate": 3.5036248405242356e-06, - "loss": 0.9649, - "num_input_tokens_seen": 2628545, - "step": 126 - }, - { - "epoch": 0.015270847111164552, - "flos": 28986554402280.0, - "grad_norm": 3.6089088832549816, - "learning_rate": 3.509351715802146e-06, - "loss": 0.8139, - "num_input_tokens_seen": 2649150, - "step": 127 - }, - { - "epoch": 0.015391090001803644, - "flos": 32200202389560.0, - "grad_norm": 6.493564060401882, - "learning_rate": 3.5150336739488763e-06, - "loss": 0.7712, - "num_input_tokens_seen": 2671155, - "step": 128 - }, - { - "epoch": 0.015511332892442733, - "flos": 13812496801080.0, - "grad_norm": 8.66026504991985, - "learning_rate": 3.5206714140744143e-06, - "loss": 0.8159, - "num_input_tokens_seen": 2690930, - "step": 129 - }, - { - "epoch": 0.015631575783081827, - "flos": 17970910756920.0, - "grad_norm": 4.0911663725896075, - "learning_rate": 3.5262656190928208e-06, - "loss": 0.8493, - "num_input_tokens_seen": 2708950, - "step": 130 - }, - { - "epoch": 0.015751818673720917, - "flos": 45944770390560.0, - "grad_norm": 0.9399841774461044, - "learning_rate": 3.5318169562186737e-06, - "loss": 0.7087, - "num_input_tokens_seen": 2777515, - "step": 131 - }, - { - "epoch": 0.015872061564360006, - "flos": 17484884575560.0, - "grad_norm": 3.691314276301299, - "learning_rate": 3.5373260774446292e-06, - "loss": 0.8141, - "num_input_tokens_seen": 2797685, - "step": 132 - }, - { - "epoch": 0.0159923044549991, - "flos": 17106990852120.0, - "grad_norm": 3.5402008314331384, - "learning_rate": 3.542793620000961e-06, - "loss": 0.8935, - "num_input_tokens_seen": 2816880, - "step": 133 - }, - { - "epoch": 0.01611254734563819, - "flos": 13029486279960.0, - "grad_norm": 4.677219153841471, - "learning_rate": 3.5482202067978894e-06, - "loss": 0.8464, - "num_input_tokens_seen": 2833810, - "step": 134 - }, - { - "epoch": 0.01623279023627728, - "flos": 15320399139480.0, - "grad_norm": 4.266674094392351, - "learning_rate": 3.553606446851471e-06, - "loss": 0.7391, - "num_input_tokens_seen": 2850270, - "step": 135 - }, - { - "epoch": 0.016353033126916373, - "flos": 11463623537520.0, - "grad_norm": 3.655207558725398, - "learning_rate": 3.5589529356937613e-06, - "loss": 0.8232, - "num_input_tokens_seen": 2868385, - "step": 136 - }, - { - "epoch": 0.016473276017555463, - "flos": 13731840697080.0, - "grad_norm": 2.5369245243055034, - "learning_rate": 3.5642602557679627e-06, - "loss": 0.7556, - "num_input_tokens_seen": 2886555, - "step": 137 - }, - { - "epoch": 0.016593518908194552, - "flos": 17835713524560.0, - "grad_norm": 4.654622657634122, - "learning_rate": 3.569528976809202e-06, - "loss": 0.8306, - "num_input_tokens_seen": 2903490, - "step": 138 - }, - { - "epoch": 0.016713761798833646, - "flos": 16350791825760.0, - "grad_norm": 5.422779722337261, - "learning_rate": 3.5747596562115522e-06, - "loss": 0.8679, - "num_input_tokens_seen": 2923825, - "step": 139 - }, - { - "epoch": 0.016834004689472735, - "flos": 13002484825440.0, - "grad_norm": 4.424777422980722, - "learning_rate": 3.5799528393819138e-06, - "loss": 0.8939, - "num_input_tokens_seen": 2942625, - "step": 140 - }, - { - "epoch": 0.016954247580111825, - "flos": 14998629542400.0, - "grad_norm": 10.642236707968749, - "learning_rate": 3.585109060081286e-06, - "loss": 0.8597, - "num_input_tokens_seen": 2962145, - "step": 141 - }, - { - "epoch": 0.017074490470750915, - "flos": 16160135326200.0, - "grad_norm": 3.8955489410118886, - "learning_rate": 3.590228840753992e-06, - "loss": 0.7718, - "num_input_tokens_seen": 2982295, - "step": 142 - }, - { - "epoch": 0.01719473336139001, - "flos": 11652158819760.0, - "grad_norm": 21.423409238001927, - "learning_rate": 3.5953126928453423e-06, - "loss": 0.8573, - "num_input_tokens_seen": 2999565, - "step": 143 - }, - { - "epoch": 0.017314976252029098, - "flos": 16458765983880.0, - "grad_norm": 3.285644349840703, - "learning_rate": 3.600361117108239e-06, - "loss": 0.7898, - "num_input_tokens_seen": 3019085, - "step": 144 - }, - { - "epoch": 0.017435219142668188, - "flos": 16108032014760.0, - "grad_norm": 3.520134640199411, - "learning_rate": 3.6053746038991616e-06, - "loss": 0.9494, - "num_input_tokens_seen": 3037890, - "step": 145 - }, - { - "epoch": 0.01755546203330728, - "flos": 53278591593360.0, - "grad_norm": 0.9713907857065575, - "learning_rate": 3.6103536334639843e-06, - "loss": 0.6136, - "num_input_tokens_seen": 3090875, - "step": 146 - }, - { - "epoch": 0.01767570492394637, - "flos": 18564151257360.0, - "grad_norm": 2.988292579380076, - "learning_rate": 3.615298676214041e-06, - "loss": 0.8369, - "num_input_tokens_seen": 3110875, - "step": 147 - }, - { - "epoch": 0.01779594781458546, - "flos": 14946969470400.0, - "grad_norm": 3.225647434037604, - "learning_rate": 3.6202101929928317e-06, - "loss": 0.8797, - "num_input_tokens_seen": 3129185, - "step": 148 - }, - { - "epoch": 0.017916190705224554, - "flos": 11841992160360.0, - "grad_norm": 2.9853881394451287, - "learning_rate": 3.6250886353337413e-06, - "loss": 0.8718, - "num_input_tokens_seen": 3146435, - "step": 149 - }, - { - "epoch": 0.018036433595863644, - "flos": 17079261218520.0, - "grad_norm": 4.929706845494926, - "learning_rate": 3.6299344457091488e-06, - "loss": 0.8562, - "num_input_tokens_seen": 3167015, - "step": 150 - }, - { - "epoch": 0.018156676486502734, - "flos": 13569515370360.0, - "grad_norm": 3.3389518751598963, - "learning_rate": 3.634748057771256e-06, - "loss": 0.9073, - "num_input_tokens_seen": 3182675, - "step": 151 - }, - { - "epoch": 0.018276919377141827, - "flos": 18647086878480.0, - "grad_norm": 3.94498785212862, - "learning_rate": 3.639529896584965e-06, - "loss": 0.8454, - "num_input_tokens_seen": 3203770, - "step": 152 - }, - { - "epoch": 0.018397162267780917, - "flos": 14649478571280.0, - "grad_norm": 6.89100448705239, - "learning_rate": 3.6442803788531233e-06, - "loss": 0.8609, - "num_input_tokens_seen": 3221450, - "step": 153 - }, - { - "epoch": 0.018517405158420007, - "flos": 20213551160160.0, - "grad_norm": 5.30421789503668, - "learning_rate": 3.6489999131344357e-06, - "loss": 0.9378, - "num_input_tokens_seen": 3243945, - "step": 154 - }, - { - "epoch": 0.0186376480490591, - "flos": 14136419275440.0, - "grad_norm": 4.6808846141380185, - "learning_rate": 3.653688900054313e-06, - "loss": 0.8932, - "num_input_tokens_seen": 3262195, - "step": 155 - }, - { - "epoch": 0.01875789093969819, - "flos": 19563933253680.0, - "grad_norm": 4.5950417291948495, - "learning_rate": 3.6583477325089526e-06, - "loss": 0.7488, - "num_input_tokens_seen": 3282455, - "step": 156 - }, - { - "epoch": 0.01887813383033728, - "flos": 17836030124160.0, - "grad_norm": 3.970115847110452, - "learning_rate": 3.6629767958628916e-06, - "loss": 1.0233, - "num_input_tokens_seen": 3299550, - "step": 157 - }, - { - "epoch": 0.018998376720976373, - "flos": 10653294962280.0, - "grad_norm": 4.3476834497689145, - "learning_rate": 3.667576468140291e-06, - "loss": 0.8365, - "num_input_tokens_seen": 3317085, - "step": 158 - }, - { - "epoch": 0.019118619611615463, - "flos": 21499648089720.0, - "grad_norm": 3.523624660486108, - "learning_rate": 3.672147120210184e-06, - "loss": 0.8687, - "num_input_tokens_seen": 3333405, - "step": 159 - }, - { - "epoch": 0.019238862502254553, - "flos": 15270733644960.0, - "grad_norm": 3.9997799335978366, - "learning_rate": 3.6766891159659177e-06, - "loss": 0.8586, - "num_input_tokens_seen": 3351535, - "step": 160 - }, - { - "epoch": 0.019359105392893646, - "flos": 15621435954120.0, - "grad_norm": 10.175053206920929, - "learning_rate": 3.6812028124990075e-06, - "loss": 0.8592, - "num_input_tokens_seen": 3368525, - "step": 161 - }, - { - "epoch": 0.019479348283532736, - "flos": 11863389801960.0, - "grad_norm": 4.382431040872512, - "learning_rate": 3.6856885602676016e-06, - "loss": 0.8058, - "num_input_tokens_seen": 3384280, - "step": 162 - }, - { - "epoch": 0.019599591174171826, - "flos": 16162224883560.0, - "grad_norm": 3.77697726881213, - "learning_rate": 3.6901467032597733e-06, - "loss": 0.9314, - "num_input_tokens_seen": 3402485, - "step": 163 - }, - { - "epoch": 0.01971983406481092, - "flos": 14325619416840.0, - "grad_norm": 3.0422523840365825, - "learning_rate": 3.694577579151804e-06, - "loss": 0.8507, - "num_input_tokens_seen": 3420615, - "step": 164 - }, - { - "epoch": 0.01984007695545001, - "flos": 13948010633040.0, - "grad_norm": 4.325415689414194, - "learning_rate": 3.6989815194616703e-06, - "loss": 0.7339, - "num_input_tokens_seen": 3437530, - "step": 165 - }, - { - "epoch": 0.0199603198460891, - "flos": 15242117532480.0, - "grad_norm": 12.692833291577315, - "learning_rate": 3.703358849697888e-06, - "loss": 0.7898, - "num_input_tokens_seen": 3457160, - "step": 166 - }, - { - "epoch": 0.020080562736728192, - "flos": 15998823118200.0, - "grad_norm": 2.4375412766311797, - "learning_rate": 3.7077098895038803e-06, - "loss": 0.8195, - "num_input_tokens_seen": 3476250, - "step": 167 - }, - { - "epoch": 0.020200805627367282, - "flos": 15864227425080.0, - "grad_norm": 3.9530750618294626, - "learning_rate": 3.712034952798045e-06, - "loss": 0.9567, - "num_input_tokens_seen": 3494085, - "step": 168 - }, - { - "epoch": 0.02032104851800637, - "flos": 24636660787920.0, - "grad_norm": 3.668835749753182, - "learning_rate": 3.7163343479096656e-06, - "loss": 0.8156, - "num_input_tokens_seen": 3515380, - "step": 169 - }, - { - "epoch": 0.020441291408645465, - "flos": 23263702402200.0, - "grad_norm": 3.0124203746608598, - "learning_rate": 3.720608377710802e-06, - "loss": 0.8085, - "num_input_tokens_seen": 3535190, - "step": 170 - }, - { - "epoch": 0.020561534299284555, - "flos": 15270701985000.0, - "grad_norm": 3.61718473166572, - "learning_rate": 3.7248573397443277e-06, - "loss": 0.8468, - "num_input_tokens_seen": 3553835, - "step": 171 - }, - { - "epoch": 0.020681777189923645, - "flos": 15352117928040.0, - "grad_norm": 6.106735088403738, - "learning_rate": 3.729081526348224e-06, - "loss": 0.9664, - "num_input_tokens_seen": 3572085, - "step": 172 - }, - { - "epoch": 0.020802020080562738, - "flos": 20725723977120.0, - "grad_norm": 3.008506318364507, - "learning_rate": 3.7332812247762777e-06, - "loss": 0.8251, - "num_input_tokens_seen": 3593105, - "step": 173 - }, - { - "epoch": 0.020922262971201828, - "flos": 14378419247400.0, - "grad_norm": 2.92885209443298, - "learning_rate": 3.737456717315293e-06, - "loss": 0.9382, - "num_input_tokens_seen": 3611790, - "step": 174 - }, - { - "epoch": 0.021042505861840918, - "flos": 11406897871920.0, - "grad_norm": 2.4415218722621295, - "learning_rate": 3.7416082813989552e-06, - "loss": 0.8989, - "num_input_tokens_seen": 3628505, - "step": 175 - }, - { - "epoch": 0.02116274875248001, - "flos": 15459648846720.0, - "grad_norm": 7.25044038684704, - "learning_rate": 3.745736189718439e-06, - "loss": 0.8936, - "num_input_tokens_seen": 3647480, - "step": 176 - }, - { - "epoch": 0.0212829916431191, - "flos": 18104208391680.0, - "grad_norm": 3.3418346873317377, - "learning_rate": 3.749840710329894e-06, - "loss": 0.6951, - "num_input_tokens_seen": 3667905, - "step": 177 - }, - { - "epoch": 0.02140323453375819, - "flos": 12130681590600.0, - "grad_norm": 5.581285623092852, - "learning_rate": 3.7539221067588938e-06, - "loss": 0.9607, - "num_input_tokens_seen": 3681600, - "step": 178 - }, - { - "epoch": 0.021523477424397284, - "flos": 14835512716680.0, - "grad_norm": 11.586723727958947, - "learning_rate": 3.757980638101964e-06, - "loss": 0.9063, - "num_input_tokens_seen": 3694815, - "step": 179 - }, - { - "epoch": 0.021643720315036374, - "flos": 19131846661440.0, - "grad_norm": 4.234239414198027, - "learning_rate": 3.7620165591252806e-06, - "loss": 0.8824, - "num_input_tokens_seen": 3712635, - "step": 180 - }, - { - "epoch": 0.021763963205675464, - "flos": 18157261501920.0, - "grad_norm": 25.464788323014538, - "learning_rate": 3.766030120360636e-06, - "loss": 0.9203, - "num_input_tokens_seen": 3731985, - "step": 181 - }, - { - "epoch": 0.021884206096314557, - "flos": 18917101423680.0, - "grad_norm": 8.002763747612299, - "learning_rate": 3.7700215681987578e-06, - "loss": 0.8687, - "num_input_tokens_seen": 3751475, - "step": 182 - }, - { - "epoch": 0.022004448986953647, - "flos": 14675530227000.0, - "grad_norm": 3.5275899097817818, - "learning_rate": 3.7739911449800767e-06, - "loss": 0.8102, - "num_input_tokens_seen": 3771250, - "step": 183 - }, - { - "epoch": 0.022124691877592736, - "flos": 14969506870560.0, - "grad_norm": 3.7002353902636216, - "learning_rate": 3.7779390890830114e-06, - "loss": 0.7917, - "num_input_tokens_seen": 3789620, - "step": 184 - }, - { - "epoch": 0.02224493476823183, - "flos": 17269031239200.0, - "grad_norm": 4.094090571323645, - "learning_rate": 3.7818656350098723e-06, - "loss": 0.8415, - "num_input_tokens_seen": 3810290, - "step": 185 - }, - { - "epoch": 0.02236517765887092, - "flos": 12327670082160.0, - "grad_norm": 6.46014982539891, - "learning_rate": 3.7857710134704447e-06, - "loss": 0.7493, - "num_input_tokens_seen": 3828945, - "step": 186 - }, - { - "epoch": 0.02248542054951001, - "flos": 32012173666680.0, - "grad_norm": 3.9089427974377084, - "learning_rate": 3.7896554514633234e-06, - "loss": 0.7712, - "num_input_tokens_seen": 3852950, - "step": 187 - }, - { - "epoch": 0.022605663440149103, - "flos": 17106769232400.0, - "grad_norm": 4.513436919187675, - "learning_rate": 3.7935191723550955e-06, - "loss": 0.8429, - "num_input_tokens_seen": 3871625, - "step": 188 - }, - { - "epoch": 0.022725906330788193, - "flos": 21289398566280.0, - "grad_norm": 4.465374078234581, - "learning_rate": 3.797362395957408e-06, - "loss": 0.8766, - "num_input_tokens_seen": 3891910, - "step": 189 - }, - { - "epoch": 0.022846149221427282, - "flos": 17941598125320.0, - "grad_norm": 4.258998544671696, - "learning_rate": 3.8011853386020055e-06, - "loss": 0.7666, - "num_input_tokens_seen": 3912535, - "step": 190 - }, - { - "epoch": 0.022966392112066376, - "flos": 11301646470360.0, - "grad_norm": 4.710135775996305, - "learning_rate": 3.804988213213804e-06, - "loss": 0.8712, - "num_input_tokens_seen": 3930495, - "step": 191 - }, - { - "epoch": 0.023086635002705466, - "flos": 41002332794880.0, - "grad_norm": 1.033305979694802, - "learning_rate": 3.808771229382049e-06, - "loss": 0.6454, - "num_input_tokens_seen": 3989680, - "step": 192 - }, - { - "epoch": 0.023206877893344555, - "flos": 14105776925520.0, - "grad_norm": 20.39269475845992, - "learning_rate": 3.8125345934296324e-06, - "loss": 0.8414, - "num_input_tokens_seen": 4007710, - "step": 193 - }, - { - "epoch": 0.02332712078398365, - "flos": 16888288119360.0, - "grad_norm": 4.3272338390424325, - "learning_rate": 3.81627850848061e-06, - "loss": 0.8518, - "num_input_tokens_seen": 4028025, - "step": 194 - }, - { - "epoch": 0.02344736367462274, - "flos": 17890033033200.0, - "grad_norm": 6.654878143639405, - "learning_rate": 3.820003174525994e-06, - "loss": 0.8272, - "num_input_tokens_seen": 4047170, - "step": 195 - }, - { - "epoch": 0.02356760656526183, - "flos": 15783634641000.0, - "grad_norm": 14.866011267842662, - "learning_rate": 3.823708788487851e-06, - "loss": 0.8233, - "num_input_tokens_seen": 4063890, - "step": 196 - }, - { - "epoch": 0.02368784945590092, - "flos": 18458329976520.0, - "grad_norm": 3.465962157433028, - "learning_rate": 3.827395544281781e-06, - "loss": 0.8254, - "num_input_tokens_seen": 4085950, - "step": 197 - }, - { - "epoch": 0.02380809234654001, - "flos": 20211081683280.0, - "grad_norm": 6.831950846439402, - "learning_rate": 3.831063632877802e-06, - "loss": 0.7803, - "num_input_tokens_seen": 4105990, - "step": 198 - }, - { - "epoch": 0.0239283352371791, - "flos": 13326628919520.0, - "grad_norm": 13.73681119071865, - "learning_rate": 3.834713242359712e-06, - "loss": 0.7421, - "num_input_tokens_seen": 4123540, - "step": 199 - }, - { - "epoch": 0.02404857812781819, - "flos": 15646632790920.0, - "grad_norm": 4.032956966988739, - "learning_rate": 3.838344557982959e-06, - "loss": 0.8588, - "num_input_tokens_seen": 4144640, - "step": 200 - }, - { - "epoch": 0.024168821018457284, - "flos": 12138406620840.0, - "grad_norm": 6.402213560203922, - "learning_rate": 3.841957762231063e-06, - "loss": 0.8328, - "num_input_tokens_seen": 4161890, - "step": 201 - }, - { - "epoch": 0.024289063909096374, - "flos": 16702190654040.0, - "grad_norm": 3.010191454707514, - "learning_rate": 3.8455530348706454e-06, - "loss": 0.8668, - "num_input_tokens_seen": 4181210, - "step": 202 - }, - { - "epoch": 0.024409306799735464, - "flos": 12948260296680.0, - "grad_norm": 2.880885526894011, - "learning_rate": 3.849130553005099e-06, - "loss": 0.7622, - "num_input_tokens_seen": 4199145, - "step": 203 - }, - { - "epoch": 0.024529549690374557, - "flos": 15810572775600.0, - "grad_norm": 3.4276130676540393, - "learning_rate": 3.852690491126933e-06, - "loss": 0.8214, - "num_input_tokens_seen": 4218915, - "step": 204 - }, - { - "epoch": 0.024649792581013647, - "flos": 18722360789280.0, - "grad_norm": 5.187774834763557, - "learning_rate": 3.856233021168845e-06, - "loss": 0.8875, - "num_input_tokens_seen": 4238330, - "step": 205 - }, - { - "epoch": 0.024770035471652737, - "flos": 25342149541080.0, - "grad_norm": 5.733480191382933, - "learning_rate": 3.859758312553544e-06, - "loss": 0.909, - "num_input_tokens_seen": 4260270, - "step": 206 - }, - { - "epoch": 0.02489027836229183, - "flos": 15728396993520.0, - "grad_norm": 8.340873765433598, - "learning_rate": 3.8632665322423735e-06, - "loss": 0.9094, - "num_input_tokens_seen": 4279645, - "step": 207 - }, - { - "epoch": 0.02501052125293092, - "flos": 16996388917320.0, - "grad_norm": 32.16222270515422, - "learning_rate": 3.866757844782762e-06, - "loss": 0.8417, - "num_input_tokens_seen": 4299870, - "step": 208 - }, - { - "epoch": 0.02513076414357001, - "flos": 19341716265360.0, - "grad_norm": 5.94519123048955, - "learning_rate": 3.870232412354527e-06, - "loss": 0.9011, - "num_input_tokens_seen": 4316435, - "step": 209 - }, - { - "epoch": 0.025251007034209103, - "flos": 9871107600120.0, - "grad_norm": 3.392298553490418, - "learning_rate": 3.873690394815086e-06, - "loss": 0.9128, - "num_input_tokens_seen": 4332735, - "step": 210 - }, - { - "epoch": 0.025371249924848193, - "flos": 10950627561600.0, - "grad_norm": 6.470321095913905, - "learning_rate": 3.877131949743587e-06, - "loss": 0.8981, - "num_input_tokens_seen": 4349320, - "step": 211 - }, - { - "epoch": 0.025491492815487283, - "flos": 18724292046840.0, - "grad_norm": 4.261177399235895, - "learning_rate": 3.880557232483993e-06, - "loss": 0.7714, - "num_input_tokens_seen": 4368480, - "step": 212 - }, - { - "epoch": 0.025611735706126376, - "flos": 15081216903960.0, - "grad_norm": 33.99973072618554, - "learning_rate": 3.883966396187164e-06, - "loss": 0.8727, - "num_input_tokens_seen": 4387470, - "step": 213 - }, - { - "epoch": 0.025731978596765466, - "flos": 13920502619160.0, - "grad_norm": 2.9494239179980695, - "learning_rate": 3.887359591851937e-06, - "loss": 0.893, - "num_input_tokens_seen": 4404795, - "step": 214 - }, - { - "epoch": 0.025852221487404556, - "flos": 16215879533040.0, - "grad_norm": 4.66285063624262, - "learning_rate": 3.890736968365265e-06, - "loss": 0.9213, - "num_input_tokens_seen": 4424830, - "step": 215 - }, - { - "epoch": 0.02597246437804365, - "flos": 19455990755520.0, - "grad_norm": 22.001822656209928, - "learning_rate": 3.894098672541412e-06, - "loss": 0.845, - "num_input_tokens_seen": 4445455, - "step": 216 - }, - { - "epoch": 0.02609270726868274, - "flos": 23992393414680.0, - "grad_norm": 3.2469754450427324, - "learning_rate": 3.89744484916025e-06, - "loss": 0.7406, - "num_input_tokens_seen": 4466285, - "step": 217 - }, - { - "epoch": 0.02621295015932183, - "flos": 19234217006640.0, - "grad_norm": 3.636687543921564, - "learning_rate": 3.900775641004673e-06, - "loss": 0.8627, - "num_input_tokens_seen": 4485320, - "step": 218 - }, - { - "epoch": 0.026333193049960922, - "flos": 31577490957720.0, - "grad_norm": 6.491333098486613, - "learning_rate": 3.904091188897156e-06, - "loss": 0.7191, - "num_input_tokens_seen": 4504175, - "step": 219 - }, - { - "epoch": 0.026453435940600012, - "flos": 13107039707880.0, - "grad_norm": 3.9206249174911316, - "learning_rate": 3.90739163173548e-06, - "loss": 0.8025, - "num_input_tokens_seen": 4521730, - "step": 220 - }, - { - "epoch": 0.026573678831239102, - "flos": 13862098975680.0, - "grad_norm": 6.290399520630657, - "learning_rate": 3.910677106527646e-06, - "loss": 0.868, - "num_input_tokens_seen": 4538640, - "step": 221 - }, - { - "epoch": 0.026693921721878195, - "flos": 21453718470480.0, - "grad_norm": 4.1256556134416575, - "learning_rate": 3.913947748426004e-06, - "loss": 0.829, - "num_input_tokens_seen": 4555180, - "step": 222 - }, - { - "epoch": 0.026814164612517285, - "flos": 10267644548640.0, - "grad_norm": 22.238999980151704, - "learning_rate": 3.9172036907606136e-06, - "loss": 0.748, - "num_input_tokens_seen": 4568380, - "step": 223 - }, - { - "epoch": 0.026934407503156375, - "flos": 17213350352280.0, - "grad_norm": 8.100102233241556, - "learning_rate": 3.920445065071855e-06, - "loss": 0.9329, - "num_input_tokens_seen": 4589265, - "step": 224 - }, - { - "epoch": 0.027054650393795468, - "flos": 21238403353440.0, - "grad_norm": 10.000373833344463, - "learning_rate": 3.923672001142322e-06, - "loss": 0.7597, - "num_input_tokens_seen": 4609295, - "step": 225 - }, - { - "epoch": 0.027174893284434558, - "flos": 23074692220560.0, - "grad_norm": 7.858694968561047, - "learning_rate": 3.926884627027996e-06, - "loss": 0.8391, - "num_input_tokens_seen": 4632785, - "step": 226 - }, - { - "epoch": 0.027295136175073648, - "flos": 16134970149360.0, - "grad_norm": 2.831703072310705, - "learning_rate": 3.930083069088744e-06, - "loss": 0.7694, - "num_input_tokens_seen": 4652505, - "step": 227 - }, - { - "epoch": 0.02741537906571274, - "flos": 44814761775600.0, - "grad_norm": 1.0205057682582694, - "learning_rate": 3.933267452018137e-06, - "loss": 0.6265, - "num_input_tokens_seen": 4712020, - "step": 228 - }, - { - "epoch": 0.02753562195635183, - "flos": 18023172368160.0, - "grad_norm": 2.8999973428648254, - "learning_rate": 3.936437898872622e-06, - "loss": 0.8288, - "num_input_tokens_seen": 4731715, - "step": 229 - }, - { - "epoch": 0.02765586484699092, - "flos": 25045133541360.0, - "grad_norm": 12.282690531570676, - "learning_rate": 3.9395945311000525e-06, - "loss": 0.784, - "num_input_tokens_seen": 4753060, - "step": 230 - }, - { - "epoch": 0.027776107737630014, - "flos": 10788270574920.0, - "grad_norm": 3.799082173381805, - "learning_rate": 3.942737468567608e-06, - "loss": 0.8986, - "num_input_tokens_seen": 4770795, - "step": 231 - }, - { - "epoch": 0.027896350628269104, - "flos": 35277101806440.0, - "grad_norm": 6.2837394981076455, - "learning_rate": 3.9458668295891026e-06, - "loss": 0.8501, - "num_input_tokens_seen": 4792800, - "step": 232 - }, - { - "epoch": 0.028016593518908194, - "flos": 15860491549800.0, - "grad_norm": 4.587677139895199, - "learning_rate": 3.948982730951712e-06, - "loss": 0.8539, - "num_input_tokens_seen": 4810615, - "step": 233 - }, - { - "epoch": 0.028136836409547287, - "flos": 13459230035160.0, - "grad_norm": 2.945438129366458, - "learning_rate": 3.9520852879421254e-06, - "loss": 0.807, - "num_input_tokens_seen": 4827680, - "step": 234 - }, - { - "epoch": 0.028257079300186377, - "flos": 23182191479280.0, - "grad_norm": 2.938526866884137, - "learning_rate": 3.955174614372137e-06, - "loss": 0.7992, - "num_input_tokens_seen": 4847165, - "step": 235 - }, - { - "epoch": 0.028377322190825467, - "flos": 17214553430760.0, - "grad_norm": 3.173806490310878, - "learning_rate": 3.9582508226037045e-06, - "loss": 0.8347, - "num_input_tokens_seen": 4867025, - "step": 236 - }, - { - "epoch": 0.02849756508146456, - "flos": 14968620391680.0, - "grad_norm": 3.351688836124144, - "learning_rate": 3.9613140235734636e-06, - "loss": 0.9091, - "num_input_tokens_seen": 4883725, - "step": 237 - }, - { - "epoch": 0.02861780797210365, - "flos": 10383881956320.0, - "grad_norm": 7.9579739581015385, - "learning_rate": 3.96436432681674e-06, - "loss": 0.8029, - "num_input_tokens_seen": 4901435, - "step": 238 - }, - { - "epoch": 0.02873805086274274, - "flos": 18912669029280.0, - "grad_norm": 3.198206703047425, - "learning_rate": 3.967401840491044e-06, - "loss": 0.8818, - "num_input_tokens_seen": 4920435, - "step": 239 - }, - { - "epoch": 0.028858293753381833, - "flos": 12618480729720.0, - "grad_norm": 2.7225222670920455, - "learning_rate": 3.97042667139909e-06, - "loss": 0.8653, - "num_input_tokens_seen": 4937480, - "step": 240 - }, - { - "epoch": 0.028978536644020923, - "flos": 16862932982760.0, - "grad_norm": 2.62186062505371, - "learning_rate": 3.973438925011327e-06, - "loss": 0.8649, - "num_input_tokens_seen": 4955485, - "step": 241 - }, - { - "epoch": 0.029098779534660012, - "flos": 20779030367040.0, - "grad_norm": 3.3239515042491368, - "learning_rate": 3.976438705488002e-06, - "loss": 0.9123, - "num_input_tokens_seen": 4974865, - "step": 242 - }, - { - "epoch": 0.029219022425299106, - "flos": 10086549357000.0, - "grad_norm": 2.8760294522407537, - "learning_rate": 3.9794261157007744e-06, - "loss": 0.9177, - "num_input_tokens_seen": 4992340, - "step": 243 - }, - { - "epoch": 0.029339265315938196, - "flos": 14186433029520.0, - "grad_norm": 2.677344451717199, - "learning_rate": 3.982401257253887e-06, - "loss": 0.8182, - "num_input_tokens_seen": 5010400, - "step": 244 - }, - { - "epoch": 0.029459508206577285, - "flos": 11409715608360.0, - "grad_norm": 41.92681321082757, - "learning_rate": 3.985364230504893e-06, - "loss": 0.8819, - "num_input_tokens_seen": 5028005, - "step": 245 - }, - { - "epoch": 0.02957975109721638, - "flos": 20698659202680.0, - "grad_norm": 2.7020175848387953, - "learning_rate": 3.988315134584976e-06, - "loss": 0.8248, - "num_input_tokens_seen": 5047405, - "step": 246 - }, - { - "epoch": 0.02969999398785547, - "flos": 17808458790360.0, - "grad_norm": 2.3090089789241177, - "learning_rate": 3.991254067418851e-06, - "loss": 0.7936, - "num_input_tokens_seen": 5067665, - "step": 247 - }, - { - "epoch": 0.02982023687849456, - "flos": 25773507954240.0, - "grad_norm": 4.437955376268776, - "learning_rate": 3.994181125744254e-06, - "loss": 0.8211, - "num_input_tokens_seen": 5089190, - "step": 248 - }, - { - "epoch": 0.02994047976913365, - "flos": 19186007870280.0, - "grad_norm": 5.082189441655439, - "learning_rate": 3.99709640513106e-06, - "loss": 0.7378, - "num_input_tokens_seen": 5109790, - "step": 249 - }, - { - "epoch": 0.03006072265977274, - "flos": 18776711957880.0, - "grad_norm": 3.074869050750008, - "learning_rate": 4e-06, - "loss": 0.8389, - "num_input_tokens_seen": 5129345, - "step": 250 - }, - { - "epoch": 0.03018096555041183, - "flos": 16188656458800.0, - "grad_norm": 8.217389012194468, - "learning_rate": 3.999999848300794e-06, - "loss": 0.8805, - "num_input_tokens_seen": 5148050, - "step": 251 - }, - { - "epoch": 0.030301208441050925, - "flos": 22154109970080.0, - "grad_norm": 1.9718402647897704, - "learning_rate": 3.999999393203203e-06, - "loss": 0.8776, - "num_input_tokens_seen": 5170180, - "step": 252 - }, - { - "epoch": 0.030421451331690014, - "flos": 17294323055880.0, - "grad_norm": 2.5577961617648817, - "learning_rate": 3.999998634707293e-06, - "loss": 0.8426, - "num_input_tokens_seen": 5189450, - "step": 253 - }, - { - "epoch": 0.030541694222329104, - "flos": 20482077687240.0, - "grad_norm": 3.618689338557774, - "learning_rate": 3.999997572813182e-06, - "loss": 0.961, - "num_input_tokens_seen": 5206980, - "step": 254 - }, - { - "epoch": 0.030661937112968194, - "flos": 13569895289880.0, - "grad_norm": 3.0540265004745106, - "learning_rate": 3.999996207521028e-06, - "loss": 0.864, - "num_input_tokens_seen": 5225410, - "step": 255 - }, - { - "epoch": 0.030782180003607287, - "flos": 9411164734440.0, - "grad_norm": 20.414939257148106, - "learning_rate": 3.999994538831039e-06, - "loss": 0.8008, - "num_input_tokens_seen": 5241715, - "step": 256 - }, - { - "epoch": 0.030902422894246377, - "flos": 17083471993200.0, - "grad_norm": 3.7829989337319656, - "learning_rate": 3.99999256674347e-06, - "loss": 0.8289, - "num_input_tokens_seen": 5261585, - "step": 257 - }, - { - "epoch": 0.031022665784885467, - "flos": 40589395987080.0, - "grad_norm": 0.8942019926904305, - "learning_rate": 3.999990291258618e-06, - "loss": 0.5527, - "num_input_tokens_seen": 5319995, - "step": 258 - }, - { - "epoch": 0.03114290867552456, - "flos": 14109607780680.0, - "grad_norm": 2.9059479259070518, - "learning_rate": 3.999987712376829e-06, - "loss": 0.8535, - "num_input_tokens_seen": 5338035, - "step": 259 - }, - { - "epoch": 0.031263151566163654, - "flos": 15324514934280.0, - "grad_norm": 2.8035816785991203, - "learning_rate": 3.999984830098494e-06, - "loss": 0.7955, - "num_input_tokens_seen": 5357335, - "step": 260 - }, - { - "epoch": 0.03138339445680274, - "flos": 10760889200880.0, - "grad_norm": 7.129473971401167, - "learning_rate": 3.999981644424051e-06, - "loss": 0.9483, - "num_input_tokens_seen": 5371855, - "step": 261 - }, - { - "epoch": 0.03150363734744183, - "flos": 8439049051800.0, - "grad_norm": 3.003857199239886, - "learning_rate": 3.999978155353982e-06, - "loss": 0.8583, - "num_input_tokens_seen": 5388720, - "step": 262 - }, - { - "epoch": 0.03162388023808092, - "flos": 24773662638000.0, - "grad_norm": 2.622504183144956, - "learning_rate": 3.9999743628888186e-06, - "loss": 0.7909, - "num_input_tokens_seen": 5410230, - "step": 263 - }, - { - "epoch": 0.03174412312872001, - "flos": 15214261259040.0, - "grad_norm": 2.7668018315707226, - "learning_rate": 3.999970267029133e-06, - "loss": 0.8588, - "num_input_tokens_seen": 5428910, - "step": 264 - }, - { - "epoch": 0.0318643660193591, - "flos": 17372762962680.0, - "grad_norm": 2.936468064877417, - "learning_rate": 3.999965867775548e-06, - "loss": 0.7891, - "num_input_tokens_seen": 5449025, - "step": 265 - }, - { - "epoch": 0.0319846089099982, - "flos": 10113297531840.0, - "grad_norm": 3.3726323617986536, - "learning_rate": 3.9999611651287315e-06, - "loss": 0.8473, - "num_input_tokens_seen": 5466900, - "step": 266 - }, - { - "epoch": 0.03210485180063729, - "flos": 10731006690000.0, - "grad_norm": 10.731201413488401, - "learning_rate": 3.999956159089396e-06, - "loss": 0.7871, - "num_input_tokens_seen": 5484070, - "step": 267 - }, - { - "epoch": 0.03222509469127638, - "flos": 21210768699720.0, - "grad_norm": 4.431924675504504, - "learning_rate": 3.999950849658302e-06, - "loss": 0.7761, - "num_input_tokens_seen": 5502710, - "step": 268 - }, - { - "epoch": 0.03234533758191547, - "flos": 12354639876720.0, - "grad_norm": 4.8889826250241555, - "learning_rate": 3.999945236836254e-06, - "loss": 0.8316, - "num_input_tokens_seen": 5521395, - "step": 269 - }, - { - "epoch": 0.03246558047255456, - "flos": 13866309750360.0, - "grad_norm": 3.053766370269581, - "learning_rate": 3.999939320624103e-06, - "loss": 0.9184, - "num_input_tokens_seen": 5536265, - "step": 270 - }, - { - "epoch": 0.03258582336319365, - "flos": 17373111222240.0, - "grad_norm": 6.029000767145758, - "learning_rate": 3.999933101022749e-06, - "loss": 0.8857, - "num_input_tokens_seen": 5556390, - "step": 271 - }, - { - "epoch": 0.032706066253832745, - "flos": 20291104588080.0, - "grad_norm": 2.562809809122951, - "learning_rate": 3.999926578033132e-06, - "loss": 0.8539, - "num_input_tokens_seen": 5575925, - "step": 272 - }, - { - "epoch": 0.032826309144471835, - "flos": 33578574628440.0, - "grad_norm": 2.719724984570907, - "learning_rate": 3.999919751656244e-06, - "loss": 0.6314, - "num_input_tokens_seen": 5602545, - "step": 273 - }, - { - "epoch": 0.032946552035110925, - "flos": 18916056645000.0, - "grad_norm": 3.1808602984348715, - "learning_rate": 3.9999126218931195e-06, - "loss": 0.764, - "num_input_tokens_seen": 5620300, - "step": 274 - }, - { - "epoch": 0.033066794925750015, - "flos": 11004440510880.0, - "grad_norm": 3.617452722964166, - "learning_rate": 3.99990518874484e-06, - "loss": 0.8904, - "num_input_tokens_seen": 5636460, - "step": 275 - }, - { - "epoch": 0.033187037816389105, - "flos": 16668318988200.0, - "grad_norm": 3.169077273591039, - "learning_rate": 3.999897452212534e-06, - "loss": 0.9113, - "num_input_tokens_seen": 5653510, - "step": 276 - }, - { - "epoch": 0.033307280707028195, - "flos": 17079989397600.0, - "grad_norm": 2.313267451602348, - "learning_rate": 3.999889412297374e-06, - "loss": 0.9829, - "num_input_tokens_seen": 5672655, - "step": 277 - }, - { - "epoch": 0.03342752359766729, - "flos": 21156860770560.0, - "grad_norm": 3.1873402274984826, - "learning_rate": 3.999881069000581e-06, - "loss": 0.7748, - "num_input_tokens_seen": 5692105, - "step": 278 - }, - { - "epoch": 0.03354776648830638, - "flos": 14158133516640.0, - "grad_norm": 2.924069972410415, - "learning_rate": 3.99987242232342e-06, - "loss": 0.8616, - "num_input_tokens_seen": 5706830, - "step": 279 - }, - { - "epoch": 0.03366800937894547, - "flos": 13029771219600.0, - "grad_norm": 2.628388080288801, - "learning_rate": 3.9998634722672026e-06, - "loss": 0.7867, - "num_input_tokens_seen": 5726605, - "step": 280 - }, - { - "epoch": 0.03378825226958456, - "flos": 26418725126280.0, - "grad_norm": 2.0372591777374303, - "learning_rate": 3.999854218833286e-06, - "loss": 0.7728, - "num_input_tokens_seen": 5747145, - "step": 281 - }, - { - "epoch": 0.03390849516022365, - "flos": 18834672361920.0, - "grad_norm": 2.5193617652842595, - "learning_rate": 3.999844662023075e-06, - "loss": 0.8081, - "num_input_tokens_seen": 5766740, - "step": 282 - }, - { - "epoch": 0.03402873805086274, - "flos": 15564615308640.0, - "grad_norm": 1.9462465548622643, - "learning_rate": 3.999834801838018e-06, - "loss": 0.9084, - "num_input_tokens_seen": 5785440, - "step": 283 - }, - { - "epoch": 0.03414898094150183, - "flos": 16620743051040.0, - "grad_norm": 4.65276436621914, - "learning_rate": 3.9998246382796115e-06, - "loss": 0.7357, - "num_input_tokens_seen": 5804740, - "step": 284 - }, - { - "epoch": 0.03426922383214093, - "flos": 13785558666480.0, - "grad_norm": 3.9334931346522577, - "learning_rate": 3.999814171349399e-06, - "loss": 0.8912, - "num_input_tokens_seen": 5822320, - "step": 285 - }, - { - "epoch": 0.03438946672278002, - "flos": 25532077861560.0, - "grad_norm": 2.325005918925965, - "learning_rate": 3.9998034010489655e-06, - "loss": 0.7275, - "num_input_tokens_seen": 5845730, - "step": 286 - }, - { - "epoch": 0.03450970961341911, - "flos": 16216132812720.0, - "grad_norm": 3.159568175495352, - "learning_rate": 3.999792327379946e-06, - "loss": 0.7549, - "num_input_tokens_seen": 5864825, - "step": 287 - }, - { - "epoch": 0.034629952504058197, - "flos": 15891640459080.0, - "grad_norm": 2.5296105621117384, - "learning_rate": 3.999780950344021e-06, - "loss": 0.9601, - "num_input_tokens_seen": 5882735, - "step": 288 - }, - { - "epoch": 0.034750195394697286, - "flos": 14650080110520.0, - "grad_norm": 2.3970485977510902, - "learning_rate": 3.999769269942916e-06, - "loss": 0.8034, - "num_input_tokens_seen": 5902495, - "step": 289 - }, - { - "epoch": 0.034870438285336376, - "flos": 20509902300720.0, - "grad_norm": 2.24707731521888, - "learning_rate": 3.999757286178402e-06, - "loss": 0.806, - "num_input_tokens_seen": 5924650, - "step": 290 - }, - { - "epoch": 0.03499068117597547, - "flos": 16107557115360.0, - "grad_norm": 2.104343432061367, - "learning_rate": 3.999744999052299e-06, - "loss": 0.8942, - "num_input_tokens_seen": 5945760, - "step": 291 - }, - { - "epoch": 0.03511092406661456, - "flos": 42178397668920.0, - "grad_norm": 0.9622534448488158, - "learning_rate": 3.9997324085664675e-06, - "loss": 0.6571, - "num_input_tokens_seen": 6005710, - "step": 292 - }, - { - "epoch": 0.03523116695725365, - "flos": 16781200440120.0, - "grad_norm": 2.6475382040435447, - "learning_rate": 3.999719514722821e-06, - "loss": 0.9122, - "num_input_tokens_seen": 6025560, - "step": 293 - }, - { - "epoch": 0.03535140984789274, - "flos": 27123928939800.0, - "grad_norm": 3.0616995874862822, - "learning_rate": 3.999706317523314e-06, - "loss": 0.747, - "num_input_tokens_seen": 6043840, - "step": 294 - }, - { - "epoch": 0.03547165273853183, - "flos": 14946937810440.0, - "grad_norm": 2.672730872063935, - "learning_rate": 3.999692816969948e-06, - "loss": 0.8407, - "num_input_tokens_seen": 6063095, - "step": 295 - }, - { - "epoch": 0.03559189562917092, - "flos": 51512420554920.0, - "grad_norm": 0.9458578759144756, - "learning_rate": 3.999679013064772e-06, - "loss": 0.7015, - "num_input_tokens_seen": 6129560, - "step": 296 - }, - { - "epoch": 0.03571213851981002, - "flos": 15836909370960.0, - "grad_norm": 4.026125941149152, - "learning_rate": 3.99966490580988e-06, - "loss": 0.8487, - "num_input_tokens_seen": 6146640, - "step": 297 - }, - { - "epoch": 0.03583238141044911, - "flos": 32335842861360.0, - "grad_norm": 7.367363289508643, - "learning_rate": 3.999650495207411e-06, - "loss": 0.654, - "num_input_tokens_seen": 6172385, - "step": 298 - }, - { - "epoch": 0.0359526243010882, - "flos": 13807621167240.0, - "grad_norm": 3.6305584877765713, - "learning_rate": 3.999635781259553e-06, - "loss": 0.894, - "num_input_tokens_seen": 6187370, - "step": 299 - }, - { - "epoch": 0.03607286719172729, - "flos": 45456179752440.0, - "grad_norm": 0.9302988046106397, - "learning_rate": 3.999620763968535e-06, - "loss": 0.5524, - "num_input_tokens_seen": 6245965, - "step": 300 - }, - { - "epoch": 0.03619311008236638, - "flos": 20401136643600.0, - "grad_norm": 2.057269080959554, - "learning_rate": 3.999605443336638e-06, - "loss": 0.8677, - "num_input_tokens_seen": 6267815, - "step": 301 - }, - { - "epoch": 0.03631335297300547, - "flos": 9893011801080.0, - "grad_norm": 2.9757424974709896, - "learning_rate": 3.999589819366185e-06, - "loss": 0.8891, - "num_input_tokens_seen": 6281325, - "step": 302 - }, - { - "epoch": 0.036433595863644565, - "flos": 20261855276400.0, - "grad_norm": 2.4982691495190665, - "learning_rate": 3.999573892059547e-06, - "loss": 0.8226, - "num_input_tokens_seen": 6300175, - "step": 303 - }, - { - "epoch": 0.036553838754283655, - "flos": 17998133831160.0, - "grad_norm": 2.7999853814969606, - "learning_rate": 3.999557661419138e-06, - "loss": 0.8016, - "num_input_tokens_seen": 6320045, - "step": 304 - }, - { - "epoch": 0.036674081644922744, - "flos": 17133928986720.0, - "grad_norm": 2.224983592455085, - "learning_rate": 3.9995411274474225e-06, - "loss": 0.7912, - "num_input_tokens_seen": 6339045, - "step": 305 - }, - { - "epoch": 0.036794324535561834, - "flos": 20157237074040.0, - "grad_norm": 3.262334744883011, - "learning_rate": 3.999524290146908e-06, - "loss": 0.8194, - "num_input_tokens_seen": 6358970, - "step": 306 - }, - { - "epoch": 0.036914567426200924, - "flos": 14217391979040.0, - "grad_norm": 2.781784006351968, - "learning_rate": 3.9995071495201485e-06, - "loss": 0.8896, - "num_input_tokens_seen": 6375795, - "step": 307 - }, - { - "epoch": 0.037034810316840014, - "flos": 16323505431600.0, - "grad_norm": 3.3225322887726048, - "learning_rate": 3.999489705569744e-06, - "loss": 0.9472, - "num_input_tokens_seen": 6393215, - "step": 308 - }, - { - "epoch": 0.03715505320747911, - "flos": 13569800310000.0, - "grad_norm": 4.057253927594796, - "learning_rate": 3.999471958298341e-06, - "loss": 0.8499, - "num_input_tokens_seen": 6411845, - "step": 309 - }, - { - "epoch": 0.0372752960981182, - "flos": 26422524321480.0, - "grad_norm": 4.434397727902687, - "learning_rate": 3.999453907708631e-06, - "loss": 0.7586, - "num_input_tokens_seen": 6433970, - "step": 310 - }, - { - "epoch": 0.03739553898875729, - "flos": 15216984015600.0, - "grad_norm": 2.4282594267996886, - "learning_rate": 3.999435553803353e-06, - "loss": 0.8129, - "num_input_tokens_seen": 6453090, - "step": 311 - }, - { - "epoch": 0.03751578187939638, - "flos": 14809651020720.0, - "grad_norm": 3.642676434455871, - "learning_rate": 3.999416896585292e-06, - "loss": 0.8361, - "num_input_tokens_seen": 6469840, - "step": 312 - }, - { - "epoch": 0.03763602477003547, - "flos": 15108946537560.0, - "grad_norm": 3.616960753219757, - "learning_rate": 3.9993979360572775e-06, - "loss": 0.8467, - "num_input_tokens_seen": 6489700, - "step": 313 - }, - { - "epoch": 0.03775626766067456, - "flos": 12165946294680.0, - "grad_norm": 3.5178251036802566, - "learning_rate": 3.999378672222185e-06, - "loss": 0.8049, - "num_input_tokens_seen": 6507205, - "step": 314 - }, - { - "epoch": 0.03787651055131366, - "flos": 15459458886960.0, - "grad_norm": 2.357926789503742, - "learning_rate": 3.9993591050829385e-06, - "loss": 0.824, - "num_input_tokens_seen": 6524790, - "step": 315 - }, - { - "epoch": 0.037996753441952746, - "flos": 16108063674720.0, - "grad_norm": 2.5220471026796396, - "learning_rate": 3.999339234642506e-06, - "loss": 0.7916, - "num_input_tokens_seen": 6544260, - "step": 316 - }, - { - "epoch": 0.038116996332591836, - "flos": 20319942320280.0, - "grad_norm": 3.79945304849427, - "learning_rate": 3.9993190609038994e-06, - "loss": 0.8287, - "num_input_tokens_seen": 6562745, - "step": 317 - }, - { - "epoch": 0.038237239223230926, - "flos": 15486270381720.0, - "grad_norm": 2.830790407044167, - "learning_rate": 3.999298583870182e-06, - "loss": 0.8335, - "num_input_tokens_seen": 6582050, - "step": 318 - }, - { - "epoch": 0.038357482113870016, - "flos": 18726729863760.0, - "grad_norm": 2.7882460139028997, - "learning_rate": 3.999277803544458e-06, - "loss": 0.7592, - "num_input_tokens_seen": 6601925, - "step": 319 - }, - { - "epoch": 0.038477725004509106, - "flos": 43649900036040.0, - "grad_norm": 0.9438276372642573, - "learning_rate": 3.999256719929882e-06, - "loss": 0.6623, - "num_input_tokens_seen": 6662920, - "step": 320 - }, - { - "epoch": 0.0385979678951482, - "flos": 49635738070200.0, - "grad_norm": 1.255009341262157, - "learning_rate": 3.999235333029651e-06, - "loss": 0.7441, - "num_input_tokens_seen": 6716580, - "step": 321 - }, - { - "epoch": 0.03871821078578729, - "flos": 16647174626280.0, - "grad_norm": 2.9061768225204196, - "learning_rate": 3.999213642847009e-06, - "loss": 0.8141, - "num_input_tokens_seen": 6736885, - "step": 322 - }, - { - "epoch": 0.03883845367642638, - "flos": 19261725020520.0, - "grad_norm": 2.1544046660334857, - "learning_rate": 3.999191649385247e-06, - "loss": 0.9105, - "num_input_tokens_seen": 6757780, - "step": 323 - }, - { - "epoch": 0.03895869656706547, - "flos": 41973467018760.0, - "grad_norm": 0.8670651117512483, - "learning_rate": 3.999169352647702e-06, - "loss": 0.6279, - "num_input_tokens_seen": 6818680, - "step": 324 - }, - { - "epoch": 0.03907893945770456, - "flos": 18213670567920.0, - "grad_norm": 4.741341982129522, - "learning_rate": 3.999146752637755e-06, - "loss": 0.8063, - "num_input_tokens_seen": 6839445, - "step": 325 - }, - { - "epoch": 0.03919918234834365, - "flos": 13407063403800.0, - "grad_norm": 2.885942119766115, - "learning_rate": 3.999123849358836e-06, - "loss": 0.8915, - "num_input_tokens_seen": 6856830, - "step": 326 - }, - { - "epoch": 0.03931942523898275, - "flos": 18481215636240.0, - "grad_norm": 2.3250126869101626, - "learning_rate": 3.999100642814418e-06, - "loss": 0.7334, - "num_input_tokens_seen": 6876990, - "step": 327 - }, - { - "epoch": 0.03943966812962184, - "flos": 17025511589160.0, - "grad_norm": 3.4197284811555813, - "learning_rate": 3.999077133008022e-06, - "loss": 0.8795, - "num_input_tokens_seen": 6895295, - "step": 328 - }, - { - "epoch": 0.03955991102026093, - "flos": 21400032161040.0, - "grad_norm": 2.8641875577388016, - "learning_rate": 3.9990533199432145e-06, - "loss": 0.8937, - "num_input_tokens_seen": 6916510, - "step": 329 - }, - { - "epoch": 0.03968015391090002, - "flos": 12840064518840.0, - "grad_norm": 2.9814993700061665, - "learning_rate": 3.999029203623608e-06, - "loss": 0.7498, - "num_input_tokens_seen": 6933950, - "step": 330 - }, - { - "epoch": 0.03980039680153911, - "flos": 15946149927480.0, - "grad_norm": 2.2320796674231036, - "learning_rate": 3.99900478405286e-06, - "loss": 0.8549, - "num_input_tokens_seen": 6952980, - "step": 331 - }, - { - "epoch": 0.0399206396921782, - "flos": 11058601719720.0, - "grad_norm": 2.688839955405701, - "learning_rate": 3.998980061234676e-06, - "loss": 0.7976, - "num_input_tokens_seen": 6970615, - "step": 332 - }, - { - "epoch": 0.040040882582817294, - "flos": 10486252301520.0, - "grad_norm": 7.009411615555835, - "learning_rate": 3.9989550351728055e-06, - "loss": 0.7495, - "num_input_tokens_seen": 6987265, - "step": 333 - }, - { - "epoch": 0.040161125473456384, - "flos": 14082258066600.0, - "grad_norm": 2.8751223994198067, - "learning_rate": 3.998929705871046e-06, - "loss": 0.8232, - "num_input_tokens_seen": 7004340, - "step": 334 - }, - { - "epoch": 0.040281368364095474, - "flos": 13001851626240.0, - "grad_norm": 3.6134627013138827, - "learning_rate": 3.99890407333324e-06, - "loss": 0.8777, - "num_input_tokens_seen": 7022590, - "step": 335 - }, - { - "epoch": 0.040401611254734564, - "flos": 14299124521680.0, - "grad_norm": 4.0064054699991445, - "learning_rate": 3.998878137563275e-06, - "loss": 0.8525, - "num_input_tokens_seen": 7041860, - "step": 336 - }, - { - "epoch": 0.040521854145373654, - "flos": 16134970149360.0, - "grad_norm": 2.955615250104931, - "learning_rate": 3.998851898565085e-06, - "loss": 0.8285, - "num_input_tokens_seen": 7061385, - "step": 337 - }, - { - "epoch": 0.04064209703601274, - "flos": 16593931556280.0, - "grad_norm": 2.798799758203029, - "learning_rate": 3.998825356342653e-06, - "loss": 0.8296, - "num_input_tokens_seen": 7081280, - "step": 338 - }, - { - "epoch": 0.04076233992665183, - "flos": 28366534067040.0, - "grad_norm": 3.132019551446333, - "learning_rate": 3.998798510900003e-06, - "loss": 0.7103, - "num_input_tokens_seen": 7103800, - "step": 339 - }, - { - "epoch": 0.04088258281729093, - "flos": 18970027894080.0, - "grad_norm": 3.5833860142110368, - "learning_rate": 3.998771362241207e-06, - "loss": 0.8302, - "num_input_tokens_seen": 7123925, - "step": 340 - }, - { - "epoch": 0.04100282570793002, - "flos": 14458632111960.0, - "grad_norm": 3.191806950154671, - "learning_rate": 3.998743910370385e-06, - "loss": 0.8631, - "num_input_tokens_seen": 7142505, - "step": 341 - }, - { - "epoch": 0.04112306859856911, - "flos": 16512895532760.0, - "grad_norm": 3.532527726180004, - "learning_rate": 3.998716155291702e-06, - "loss": 0.7229, - "num_input_tokens_seen": 7160065, - "step": 342 - }, - { - "epoch": 0.0412433114892082, - "flos": 18344150466240.0, - "grad_norm": 2.227326165126184, - "learning_rate": 3.998688097009366e-06, - "loss": 0.8911, - "num_input_tokens_seen": 7180550, - "step": 343 - }, - { - "epoch": 0.04136355437984729, - "flos": 18456367059000.0, - "grad_norm": 3.0494231694805727, - "learning_rate": 3.998659735527636e-06, - "loss": 0.795, - "num_input_tokens_seen": 7199360, - "step": 344 - }, - { - "epoch": 0.04148379727048638, - "flos": 16810164812160.0, - "grad_norm": 2.182072254400686, - "learning_rate": 3.998631070850813e-06, - "loss": 0.762, - "num_input_tokens_seen": 7219700, - "step": 345 - }, - { - "epoch": 0.041604040161125476, - "flos": 10221018410280.0, - "grad_norm": 3.371242267966111, - "learning_rate": 3.9986021029832455e-06, - "loss": 0.8241, - "num_input_tokens_seen": 7236735, - "step": 346 - }, - { - "epoch": 0.041724283051764566, - "flos": 8761705127760.0, - "grad_norm": 4.441958699219444, - "learning_rate": 3.9985728319293285e-06, - "loss": 0.9077, - "num_input_tokens_seen": 7250430, - "step": 347 - }, - { - "epoch": 0.041844525942403656, - "flos": 8979078142200.0, - "grad_norm": 3.1724349406179497, - "learning_rate": 3.998543257693501e-06, - "loss": 0.8394, - "num_input_tokens_seen": 7266905, - "step": 348 - }, - { - "epoch": 0.041964768833042745, - "flos": 17404101831720.0, - "grad_norm": 2.2346626576741992, - "learning_rate": 3.998513380280251e-06, - "loss": 0.8709, - "num_input_tokens_seen": 7286905, - "step": 349 - }, - { - "epoch": 0.042085011723681835, - "flos": 8601785958000.0, - "grad_norm": 2.8870116047852235, - "learning_rate": 3.99848319969411e-06, - "loss": 0.9399, - "num_input_tokens_seen": 7304225, - "step": 350 - }, - { - "epoch": 0.042205254614320925, - "flos": 12301111867080.0, - "grad_norm": 2.9539222246700088, - "learning_rate": 3.9984527159396564e-06, - "loss": 0.7701, - "num_input_tokens_seen": 7322585, - "step": 351 - }, - { - "epoch": 0.04232549750496002, - "flos": 18402459129840.0, - "grad_norm": 6.043488571998217, - "learning_rate": 3.9984219290215154e-06, - "loss": 0.8239, - "num_input_tokens_seen": 7342480, - "step": 352 - }, - { - "epoch": 0.04244574039559911, - "flos": 19591124667960.0, - "grad_norm": 2.9407238415637242, - "learning_rate": 3.998390838944356e-06, - "loss": 0.8849, - "num_input_tokens_seen": 7363705, - "step": 353 - }, - { - "epoch": 0.0425659832862382, - "flos": 15297766759440.0, - "grad_norm": 2.63691215653517, - "learning_rate": 3.998359445712895e-06, - "loss": 0.8723, - "num_input_tokens_seen": 7382530, - "step": 354 - }, - { - "epoch": 0.04268622617687729, - "flos": 17079736117920.0, - "grad_norm": 3.3820637850078783, - "learning_rate": 3.9983277493318955e-06, - "loss": 0.7897, - "num_input_tokens_seen": 7401545, - "step": 355 - }, - { - "epoch": 0.04280646906751638, - "flos": 19050367398480.0, - "grad_norm": 1.9513070152305578, - "learning_rate": 3.998295749806165e-06, - "loss": 0.799, - "num_input_tokens_seen": 7422490, - "step": 356 - }, - { - "epoch": 0.04292671195815547, - "flos": 19726005300720.0, - "grad_norm": 2.919925957616293, - "learning_rate": 3.998263447140558e-06, - "loss": 0.8247, - "num_input_tokens_seen": 7442410, - "step": 357 - }, - { - "epoch": 0.04304695484879457, - "flos": 29014125736080.0, - "grad_norm": 2.7419632605352766, - "learning_rate": 3.998230841339976e-06, - "loss": 0.8102, - "num_input_tokens_seen": 7464140, - "step": 358 - }, - { - "epoch": 0.04316719773943366, - "flos": 14352747511200.0, - "grad_norm": 2.749749519038043, - "learning_rate": 3.998197932409363e-06, - "loss": 0.8357, - "num_input_tokens_seen": 7481870, - "step": 359 - }, - { - "epoch": 0.04328744063007275, - "flos": 16428915132960.0, - "grad_norm": 2.369325886878722, - "learning_rate": 3.9981647203537125e-06, - "loss": 0.8509, - "num_input_tokens_seen": 7499090, - "step": 360 - }, - { - "epoch": 0.04340768352071184, - "flos": 15563317250280.0, - "grad_norm": 2.9287251023436216, - "learning_rate": 3.998131205178063e-06, - "loss": 0.9439, - "num_input_tokens_seen": 7517280, - "step": 361 - }, - { - "epoch": 0.04352792641135093, - "flos": 8385995941560.0, - "grad_norm": 3.0932128131616365, - "learning_rate": 3.998097386887498e-06, - "loss": 0.7438, - "num_input_tokens_seen": 7534075, - "step": 362 - }, - { - "epoch": 0.04364816930199002, - "flos": 17296222653480.0, - "grad_norm": 2.1088264034519866, - "learning_rate": 3.998063265487148e-06, - "loss": 0.8298, - "num_input_tokens_seen": 7554845, - "step": 363 - }, - { - "epoch": 0.043768412192629114, - "flos": 10492267693920.0, - "grad_norm": 2.2745619677045466, - "learning_rate": 3.99802884098219e-06, - "loss": 0.8035, - "num_input_tokens_seen": 7572675, - "step": 364 - }, - { - "epoch": 0.043888655083268203, - "flos": 19401892866600.0, - "grad_norm": 3.290296744324203, - "learning_rate": 3.997994113377845e-06, - "loss": 0.8084, - "num_input_tokens_seen": 7591295, - "step": 365 - }, - { - "epoch": 0.04400889797390729, - "flos": 19969081711320.0, - "grad_norm": 2.4285069384947993, - "learning_rate": 3.9979590826793815e-06, - "loss": 0.8267, - "num_input_tokens_seen": 7612205, - "step": 366 - }, - { - "epoch": 0.04412914086454638, - "flos": 14702816621160.0, - "grad_norm": 2.3919933118184464, - "learning_rate": 3.997923748892113e-06, - "loss": 0.7869, - "num_input_tokens_seen": 7631245, - "step": 367 - }, - { - "epoch": 0.04424938375518547, - "flos": 16542524763960.0, - "grad_norm": 2.5264469306667303, - "learning_rate": 3.9978881120214015e-06, - "loss": 0.8808, - "num_input_tokens_seen": 7652485, - "step": 368 - }, - { - "epoch": 0.04436962664582456, - "flos": 17672501718960.0, - "grad_norm": 2.3596956148893398, - "learning_rate": 3.997852172072652e-06, - "loss": 0.796, - "num_input_tokens_seen": 7673420, - "step": 369 - }, - { - "epoch": 0.04448986953646366, - "flos": 13299279205440.0, - "grad_norm": 4.951264522370361, - "learning_rate": 3.9978159290513155e-06, - "loss": 0.8749, - "num_input_tokens_seen": 7691220, - "step": 370 - }, - { - "epoch": 0.04461011242710275, - "flos": 22102481558040.0, - "grad_norm": 2.1368526437710686, - "learning_rate": 3.997779382962892e-06, - "loss": 0.8006, - "num_input_tokens_seen": 7713825, - "step": 371 - }, - { - "epoch": 0.04473035531774184, - "flos": 21831992113440.0, - "grad_norm": 2.699586790749613, - "learning_rate": 3.997742533812924e-06, - "loss": 0.7248, - "num_input_tokens_seen": 7736810, - "step": 372 - }, - { - "epoch": 0.04485059820838093, - "flos": 9546172007040.0, - "grad_norm": 5.873557705066504, - "learning_rate": 3.997705381607001e-06, - "loss": 0.9, - "num_input_tokens_seen": 7753345, - "step": 373 - }, - { - "epoch": 0.04497084109902002, - "flos": 50211886683600.0, - "grad_norm": 0.9888860789240038, - "learning_rate": 3.997667926350761e-06, - "loss": 0.6274, - "num_input_tokens_seen": 7811395, - "step": 374 - }, - { - "epoch": 0.04509108398965911, - "flos": 47436055741320.0, - "grad_norm": 0.854726286292233, - "learning_rate": 3.997630168049886e-06, - "loss": 0.5973, - "num_input_tokens_seen": 7869480, - "step": 375 - }, - { - "epoch": 0.045211326880298205, - "flos": 16294984299000.0, - "grad_norm": 2.7209214056539466, - "learning_rate": 3.997592106710101e-06, - "loss": 0.7695, - "num_input_tokens_seen": 7888660, - "step": 376 - }, - { - "epoch": 0.045331569770937295, - "flos": 23613106653000.0, - "grad_norm": 4.09698544840036, - "learning_rate": 3.997553742337182e-06, - "loss": 0.6556, - "num_input_tokens_seen": 7907805, - "step": 377 - }, - { - "epoch": 0.045451812661576385, - "flos": 16215562933440.0, - "grad_norm": 1.9163818631523601, - "learning_rate": 3.997515074936949e-06, - "loss": 0.9042, - "num_input_tokens_seen": 7928400, - "step": 378 - }, - { - "epoch": 0.045572055552215475, - "flos": 12085005251040.0, - "grad_norm": 2.486863431517755, - "learning_rate": 3.997476104515268e-06, - "loss": 0.8378, - "num_input_tokens_seen": 7946310, - "step": 379 - }, - { - "epoch": 0.045692298442854565, - "flos": 12840887677800.0, - "grad_norm": 2.108969077153549, - "learning_rate": 3.9974368310780485e-06, - "loss": 0.7632, - "num_input_tokens_seen": 7963205, - "step": 380 - }, - { - "epoch": 0.045812541333493655, - "flos": 19618189442400.0, - "grad_norm": 3.146625309634657, - "learning_rate": 3.997397254631251e-06, - "loss": 0.7247, - "num_input_tokens_seen": 7983545, - "step": 381 - }, - { - "epoch": 0.04593278422413275, - "flos": 44406985541280.0, - "grad_norm": 0.8747688737519179, - "learning_rate": 3.997357375180878e-06, - "loss": 0.6462, - "num_input_tokens_seen": 8047545, - "step": 382 - }, - { - "epoch": 0.04605302711477184, - "flos": 15946118267520.0, - "grad_norm": 2.401609429500908, - "learning_rate": 3.997317192732979e-06, - "loss": 0.7322, - "num_input_tokens_seen": 8066045, - "step": 383 - }, - { - "epoch": 0.04617327000541093, - "flos": 14214067683240.0, - "grad_norm": 1.9762514495361725, - "learning_rate": 3.99727670729365e-06, - "loss": 0.8107, - "num_input_tokens_seen": 8084325, - "step": 384 - }, - { - "epoch": 0.04629351289605002, - "flos": 18619420564800.0, - "grad_norm": 2.0796402304627546, - "learning_rate": 3.997235918869033e-06, - "loss": 0.7745, - "num_input_tokens_seen": 8105080, - "step": 385 - }, - { - "epoch": 0.04641375578668911, - "flos": 15027657234360.0, - "grad_norm": 2.890337460624884, - "learning_rate": 3.997194827465315e-06, - "loss": 0.8243, - "num_input_tokens_seen": 8123395, - "step": 386 - }, - { - "epoch": 0.0465339986773282, - "flos": 9573711680880.0, - "grad_norm": 4.662901971992874, - "learning_rate": 3.997153433088728e-06, - "loss": 0.9081, - "num_input_tokens_seen": 8140240, - "step": 387 - }, - { - "epoch": 0.0466542415679673, - "flos": 18726666543840.0, - "grad_norm": 2.4022117516454813, - "learning_rate": 3.997111735745554e-06, - "loss": 0.7949, - "num_input_tokens_seen": 8162930, - "step": 388 - }, - { - "epoch": 0.04677448445860639, - "flos": 16269850782120.0, - "grad_norm": 2.714747672816571, - "learning_rate": 3.997069735442118e-06, - "loss": 0.8196, - "num_input_tokens_seen": 8182345, - "step": 389 - }, - { - "epoch": 0.04689472734924548, - "flos": 20644497993840.0, - "grad_norm": 1.9415044932385876, - "learning_rate": 3.997027432184792e-06, - "loss": 0.7897, - "num_input_tokens_seen": 8206725, - "step": 390 - }, - { - "epoch": 0.04701497023988457, - "flos": 16944950465040.0, - "grad_norm": 2.732910897788335, - "learning_rate": 3.99698482597999e-06, - "loss": 0.8736, - "num_input_tokens_seen": 8224125, - "step": 391 - }, - { - "epoch": 0.04713521313052366, - "flos": 47796857577720.0, - "grad_norm": 1.1912933296096921, - "learning_rate": 3.99694191683418e-06, - "loss": 0.6526, - "num_input_tokens_seen": 8284645, - "step": 392 - }, - { - "epoch": 0.047255456021162746, - "flos": 13705535761680.0, - "grad_norm": 3.306510837123572, - "learning_rate": 3.996898704753867e-06, - "loss": 0.8118, - "num_input_tokens_seen": 8302315, - "step": 393 - }, - { - "epoch": 0.04737569891180184, - "flos": 16134400270080.0, - "grad_norm": 4.373631726606673, - "learning_rate": 3.996855189745609e-06, - "loss": 0.874, - "num_input_tokens_seen": 8321300, - "step": 394 - }, - { - "epoch": 0.04749594180244093, - "flos": 21317254839720.0, - "grad_norm": 2.5086656111668457, - "learning_rate": 3.996811371816007e-06, - "loss": 0.9184, - "num_input_tokens_seen": 8343445, - "step": 395 - }, - { - "epoch": 0.04761618469308002, - "flos": 25798039931880.0, - "grad_norm": 3.090072449757607, - "learning_rate": 3.996767250971707e-06, - "loss": 0.7621, - "num_input_tokens_seen": 8365905, - "step": 396 - }, - { - "epoch": 0.04773642758371911, - "flos": 18781714231560.0, - "grad_norm": 2.2291346633847993, - "learning_rate": 3.996722827219403e-06, - "loss": 0.845, - "num_input_tokens_seen": 8387240, - "step": 397 - }, - { - "epoch": 0.0478566704743582, - "flos": 15081881763120.0, - "grad_norm": 3.462369532141191, - "learning_rate": 3.996678100565833e-06, - "loss": 0.7992, - "num_input_tokens_seen": 8406015, - "step": 398 - }, - { - "epoch": 0.04797691336499729, - "flos": 13751750320560.0, - "grad_norm": 3.1393066979644213, - "learning_rate": 3.996633071017783e-06, - "loss": 0.8601, - "num_input_tokens_seen": 8422365, - "step": 399 - }, - { - "epoch": 0.04809715625563638, - "flos": 15427486818720.0, - "grad_norm": 2.6679267489030143, - "learning_rate": 3.996587738582084e-06, - "loss": 0.8043, - "num_input_tokens_seen": 8438885, - "step": 400 - }, - { - "epoch": 0.04821739914627548, - "flos": 17430944986440.0, - "grad_norm": 3.31959852732667, - "learning_rate": 3.9965421032656115e-06, - "loss": 0.8361, - "num_input_tokens_seen": 8458535, - "step": 401 - }, - { - "epoch": 0.04833764203691457, - "flos": 16243134267240.0, - "grad_norm": 4.1258719707527245, - "learning_rate": 3.99649616507529e-06, - "loss": 0.9225, - "num_input_tokens_seen": 8477350, - "step": 402 - }, - { - "epoch": 0.04845788492755366, - "flos": 48591166213200.0, - "grad_norm": 0.9387970217472649, - "learning_rate": 3.996449924018088e-06, - "loss": 0.6702, - "num_input_tokens_seen": 8537530, - "step": 403 - }, - { - "epoch": 0.04857812781819275, - "flos": 14082606326160.0, - "grad_norm": 2.511596202085503, - "learning_rate": 3.99640338010102e-06, - "loss": 0.7866, - "num_input_tokens_seen": 8556355, - "step": 404 - }, - { - "epoch": 0.04869837070883184, - "flos": 17621126586600.0, - "grad_norm": 2.2682459962368675, - "learning_rate": 3.996356533331146e-06, - "loss": 0.7672, - "num_input_tokens_seen": 8577945, - "step": 405 - }, - { - "epoch": 0.04881861359947093, - "flos": 18452662843680.0, - "grad_norm": 4.67158390018574, - "learning_rate": 3.996309383715573e-06, - "loss": 0.5883, - "num_input_tokens_seen": 8596445, - "step": 406 - }, - { - "epoch": 0.048938856490110025, - "flos": 11922869884080.0, - "grad_norm": 2.6839331977194507, - "learning_rate": 3.996261931261454e-06, - "loss": 0.7097, - "num_input_tokens_seen": 8614745, - "step": 407 - }, - { - "epoch": 0.049059099380749115, - "flos": 21937021895280.0, - "grad_norm": 2.431698963124244, - "learning_rate": 3.996214175975987e-06, - "loss": 0.854, - "num_input_tokens_seen": 8634985, - "step": 408 - }, - { - "epoch": 0.049179342271388204, - "flos": 26395522866960.0, - "grad_norm": 2.8186461368211333, - "learning_rate": 3.996166117866417e-06, - "loss": 0.7863, - "num_input_tokens_seen": 8656640, - "step": 409 - }, - { - "epoch": 0.049299585162027294, - "flos": 10626451807560.0, - "grad_norm": 3.177461111819697, - "learning_rate": 3.996117756940035e-06, - "loss": 0.8612, - "num_input_tokens_seen": 8673045, - "step": 410 - }, - { - "epoch": 0.049419828052666384, - "flos": 14293995608160.0, - "grad_norm": 2.4866226872841035, - "learning_rate": 3.996069093204175e-06, - "loss": 0.9627, - "num_input_tokens_seen": 8688725, - "step": 411 - }, - { - "epoch": 0.049540070943305474, - "flos": 9922039493040.0, - "grad_norm": 2.9493553685761444, - "learning_rate": 3.996020126666221e-06, - "loss": 0.8616, - "num_input_tokens_seen": 8705425, - "step": 412 - }, - { - "epoch": 0.04966031383394457, - "flos": 15969858746160.0, - "grad_norm": 4.699266441401282, - "learning_rate": 3.995970857333601e-06, - "loss": 0.8152, - "num_input_tokens_seen": 8725555, - "step": 413 - }, - { - "epoch": 0.04978055672458366, - "flos": 20992572526320.0, - "grad_norm": 2.678394773198341, - "learning_rate": 3.995921285213789e-06, - "loss": 0.7858, - "num_input_tokens_seen": 8745535, - "step": 414 - }, - { - "epoch": 0.04990079961522275, - "flos": 14487311544360.0, - "grad_norm": 3.803903988602288, - "learning_rate": 3.995871410314305e-06, - "loss": 0.7855, - "num_input_tokens_seen": 8763815, - "step": 415 - }, - { - "epoch": 0.05002104250586184, - "flos": 46246345424520.0, - "grad_norm": 1.012090666173632, - "learning_rate": 3.995821232642714e-06, - "loss": 0.651, - "num_input_tokens_seen": 8821940, - "step": 416 - }, - { - "epoch": 0.05014128539650093, - "flos": 20482141007160.0, - "grad_norm": 3.3177487517110125, - "learning_rate": 3.995770752206629e-06, - "loss": 0.808, - "num_input_tokens_seen": 8842735, - "step": 417 - }, - { - "epoch": 0.05026152828714002, - "flos": 12915496729440.0, - "grad_norm": 3.5335525162808805, - "learning_rate": 3.995719969013709e-06, - "loss": 0.9634, - "num_input_tokens_seen": 8859635, - "step": 418 - }, - { - "epoch": 0.05038177117777912, - "flos": 13972764230400.0, - "grad_norm": 4.049181189004957, - "learning_rate": 3.995668883071655e-06, - "loss": 0.8338, - "num_input_tokens_seen": 8875580, - "step": 419 - }, - { - "epoch": 0.050502014068418206, - "flos": 15108629937960.0, - "grad_norm": 2.7952460121704292, - "learning_rate": 3.995617494388219e-06, - "loss": 0.8907, - "num_input_tokens_seen": 8893420, - "step": 420 - }, - { - "epoch": 0.050622256959057296, - "flos": 15621815873640.0, - "grad_norm": 3.9618021305361775, - "learning_rate": 3.995565802971196e-06, - "loss": 0.7845, - "num_input_tokens_seen": 8913115, - "step": 421 - }, - { - "epoch": 0.050742499849696386, - "flos": 20293542405000.0, - "grad_norm": 3.5149074245975824, - "learning_rate": 3.995513808828427e-06, - "loss": 0.6658, - "num_input_tokens_seen": 8935630, - "step": 422 - }, - { - "epoch": 0.050862742740335476, - "flos": 14515452757440.0, - "grad_norm": 5.050166423812499, - "learning_rate": 3.9954615119678e-06, - "loss": 0.7478, - "num_input_tokens_seen": 8953905, - "step": 423 - }, - { - "epoch": 0.050982985630974566, - "flos": 16156811030400.0, - "grad_norm": 3.1843624754894306, - "learning_rate": 3.995408912397248e-06, - "loss": 0.7901, - "num_input_tokens_seen": 8971520, - "step": 424 - }, - { - "epoch": 0.05110322852161366, - "flos": 15162886126680.0, - "grad_norm": 4.751702656006952, - "learning_rate": 3.99535601012475e-06, - "loss": 0.9187, - "num_input_tokens_seen": 8986570, - "step": 425 - }, - { - "epoch": 0.05122347141225275, - "flos": 20939867675640.0, - "grad_norm": 3.0599223597032834, - "learning_rate": 3.995302805158333e-06, - "loss": 0.7455, - "num_input_tokens_seen": 9008945, - "step": 426 - }, - { - "epoch": 0.05134371430289184, - "flos": 14409758116440.0, - "grad_norm": 2.743635615786454, - "learning_rate": 3.9952492975060665e-06, - "loss": 0.8242, - "num_input_tokens_seen": 9028735, - "step": 427 - }, - { - "epoch": 0.05146395719353093, - "flos": 25315021446720.0, - "grad_norm": 3.722487935186937, - "learning_rate": 3.995195487176067e-06, - "loss": 0.8517, - "num_input_tokens_seen": 9048685, - "step": 428 - }, - { - "epoch": 0.05158420008417002, - "flos": 15916932275760.0, - "grad_norm": 2.498977452011935, - "learning_rate": 3.995141374176499e-06, - "loss": 0.8476, - "num_input_tokens_seen": 9066800, - "step": 429 - }, - { - "epoch": 0.05170444297480911, - "flos": 53513599205520.0, - "grad_norm": 0.9332536532813118, - "learning_rate": 3.995086958515572e-06, - "loss": 0.6692, - "num_input_tokens_seen": 9124540, - "step": 430 - }, - { - "epoch": 0.05182468586544821, - "flos": 46011690563280.0, - "grad_norm": 0.9932890356723935, - "learning_rate": 3.995032240201538e-06, - "loss": 0.6383, - "num_input_tokens_seen": 9186655, - "step": 431 - }, - { - "epoch": 0.0519449287560873, - "flos": 30325518973680.0, - "grad_norm": 0.9945950172280326, - "learning_rate": 3.9949772192427e-06, - "loss": 0.6466, - "num_input_tokens_seen": 9233000, - "step": 432 - }, - { - "epoch": 0.05206517164672639, - "flos": 12760421533560.0, - "grad_norm": 3.6345111116225537, - "learning_rate": 3.994921895647405e-06, - "loss": 0.7817, - "num_input_tokens_seen": 9250890, - "step": 433 - }, - { - "epoch": 0.05218541453736548, - "flos": 47183228063040.0, - "grad_norm": 0.8095526054847891, - "learning_rate": 3.994866269424043e-06, - "loss": 0.5651, - "num_input_tokens_seen": 9306980, - "step": 434 - }, - { - "epoch": 0.05230565742800457, - "flos": 14109196201200.0, - "grad_norm": 3.4892123048827113, - "learning_rate": 3.9948103405810545e-06, - "loss": 0.7675, - "num_input_tokens_seen": 9325650, - "step": 435 - }, - { - "epoch": 0.05242590031864366, - "flos": 18535440165000.0, - "grad_norm": 3.1579314936623546, - "learning_rate": 3.994754109126923e-06, - "loss": 0.8467, - "num_input_tokens_seen": 9346865, - "step": 436 - }, - { - "epoch": 0.052546143209282754, - "flos": 19211236367040.0, - "grad_norm": 2.683514856236607, - "learning_rate": 3.994697575070181e-06, - "loss": 0.9164, - "num_input_tokens_seen": 9366045, - "step": 437 - }, - { - "epoch": 0.052666386099921844, - "flos": 16210877259360.0, - "grad_norm": 2.2778514472812637, - "learning_rate": 3.994640738419402e-06, - "loss": 0.8967, - "num_input_tokens_seen": 9385140, - "step": 438 - }, - { - "epoch": 0.052786628990560934, - "flos": 17487765631920.0, - "grad_norm": 4.1237765723161806, - "learning_rate": 3.9945835991832075e-06, - "loss": 0.7935, - "num_input_tokens_seen": 9406745, - "step": 439 - }, - { - "epoch": 0.052906871881200024, - "flos": 18022064269560.0, - "grad_norm": 4.380486018767794, - "learning_rate": 3.994526157370268e-06, - "loss": 0.9126, - "num_input_tokens_seen": 9425080, - "step": 440 - }, - { - "epoch": 0.053027114771839114, - "flos": 41603899864800.0, - "grad_norm": 0.8614963012655764, - "learning_rate": 3.994468412989296e-06, - "loss": 0.6055, - "num_input_tokens_seen": 9486210, - "step": 441 - }, - { - "epoch": 0.053147357662478203, - "flos": 12624527782080.0, - "grad_norm": 6.885821062717479, - "learning_rate": 3.994410366049052e-06, - "loss": 0.9335, - "num_input_tokens_seen": 9503790, - "step": 442 - }, - { - "epoch": 0.0532676005531173, - "flos": 12516363664200.0, - "grad_norm": 4.05693283093903, - "learning_rate": 3.994352016558341e-06, - "loss": 0.8227, - "num_input_tokens_seen": 9520815, - "step": 443 - }, - { - "epoch": 0.05338784344375639, - "flos": 20401928142600.0, - "grad_norm": 3.5534447142826604, - "learning_rate": 3.994293364526014e-06, - "loss": 0.7272, - "num_input_tokens_seen": 9541420, - "step": 444 - }, - { - "epoch": 0.05350808633439548, - "flos": 15730391571000.0, - "grad_norm": 2.415993816058348, - "learning_rate": 3.99423440996097e-06, - "loss": 0.8183, - "num_input_tokens_seen": 9560680, - "step": 445 - }, - { - "epoch": 0.05362832922503457, - "flos": 14676923265240.0, - "grad_norm": 19.01873337628669, - "learning_rate": 3.994175152872152e-06, - "loss": 0.7943, - "num_input_tokens_seen": 9579485, - "step": 446 - }, - { - "epoch": 0.05374857211567366, - "flos": 19155967059600.0, - "grad_norm": 3.0606849118320887, - "learning_rate": 3.994115593268548e-06, - "loss": 0.7787, - "num_input_tokens_seen": 9598985, - "step": 447 - }, - { - "epoch": 0.05386881500631275, - "flos": 20154704277240.0, - "grad_norm": 7.811103879160433, - "learning_rate": 3.994055731159195e-06, - "loss": 0.8079, - "num_input_tokens_seen": 9616175, - "step": 448 - }, - { - "epoch": 0.053989057896951846, - "flos": 17267479901160.0, - "grad_norm": 3.549589353639062, - "learning_rate": 3.993995566553172e-06, - "loss": 0.8613, - "num_input_tokens_seen": 9634860, - "step": 449 - }, - { - "epoch": 0.054109300787590936, - "flos": 18484254992400.0, - "grad_norm": 3.2616175835733117, - "learning_rate": 3.993935099459607e-06, - "loss": 0.7547, - "num_input_tokens_seen": 9656195, - "step": 450 - }, - { - "epoch": 0.054229543678230026, - "flos": 17457408221640.0, - "grad_norm": 2.8374350721014063, - "learning_rate": 3.993874329887673e-06, - "loss": 0.7311, - "num_input_tokens_seen": 9674570, - "step": 451 - }, - { - "epoch": 0.054349786568869116, - "flos": 11891024455680.0, - "grad_norm": 4.125865267544811, - "learning_rate": 3.993813257846589e-06, - "loss": 0.8439, - "num_input_tokens_seen": 9691045, - "step": 452 - }, - { - "epoch": 0.054470029459508205, - "flos": 13623296659680.0, - "grad_norm": 3.2668250402349157, - "learning_rate": 3.993751883345619e-06, - "loss": 0.9178, - "num_input_tokens_seen": 9709125, - "step": 453 - }, - { - "epoch": 0.054590272350147295, - "flos": 12975451710960.0, - "grad_norm": 4.7397551229455175, - "learning_rate": 3.993690206394073e-06, - "loss": 0.8571, - "num_input_tokens_seen": 9725145, - "step": 454 - }, - { - "epoch": 0.054710515240786385, - "flos": 12976021590240.0, - "grad_norm": 2.8393419916516995, - "learning_rate": 3.993628227001307e-06, - "loss": 0.8706, - "num_input_tokens_seen": 9743065, - "step": 455 - }, - { - "epoch": 0.05483075813142548, - "flos": 35491942024080.0, - "grad_norm": 2.4942984415309217, - "learning_rate": 3.993565945176726e-06, - "loss": 0.7083, - "num_input_tokens_seen": 9763810, - "step": 456 - }, - { - "epoch": 0.05495100102206457, - "flos": 14379274066320.0, - "grad_norm": 2.38320549397522, - "learning_rate": 3.993503360929776e-06, - "loss": 0.8094, - "num_input_tokens_seen": 9782415, - "step": 457 - }, - { - "epoch": 0.05507124391270366, - "flos": 19321426722360.0, - "grad_norm": 2.0732117128221277, - "learning_rate": 3.99344047426995e-06, - "loss": 0.8033, - "num_input_tokens_seen": 9803395, - "step": 458 - }, - { - "epoch": 0.05519148680334275, - "flos": 16539612047640.0, - "grad_norm": 4.005297252658449, - "learning_rate": 3.993377285206789e-06, - "loss": 0.899, - "num_input_tokens_seen": 9822900, - "step": 459 - }, - { - "epoch": 0.05531172969398184, - "flos": 29824739250960.0, - "grad_norm": 2.3240624126779514, - "learning_rate": 3.99331379374988e-06, - "loss": 0.8525, - "num_input_tokens_seen": 9846225, - "step": 460 - }, - { - "epoch": 0.05543197258462093, - "flos": 17188470115080.0, - "grad_norm": 2.70683008192373, - "learning_rate": 3.993249999908852e-06, - "loss": 0.7857, - "num_input_tokens_seen": 9866095, - "step": 461 - }, - { - "epoch": 0.05555221547526003, - "flos": 13596611804760.0, - "grad_norm": 2.478341218961045, - "learning_rate": 3.993185903693384e-06, - "loss": 0.8555, - "num_input_tokens_seen": 9882615, - "step": 462 - }, - { - "epoch": 0.05567245836589912, - "flos": 17269252858920.0, - "grad_norm": 3.8526690210970114, - "learning_rate": 3.9931215051131995e-06, - "loss": 0.818, - "num_input_tokens_seen": 9902980, - "step": 463 - }, - { - "epoch": 0.05579270125653821, - "flos": 20347640293920.0, - "grad_norm": 6.21868099001832, - "learning_rate": 3.993056804178068e-06, - "loss": 0.7909, - "num_input_tokens_seen": 9924245, - "step": 464 - }, - { - "epoch": 0.0559129441471773, - "flos": 19805141726640.0, - "grad_norm": 12.1851378670439, - "learning_rate": 3.992991800897803e-06, - "loss": 0.8302, - "num_input_tokens_seen": 9943770, - "step": 465 - }, - { - "epoch": 0.05603318703781639, - "flos": 11083988516280.0, - "grad_norm": 4.354779846723337, - "learning_rate": 3.9929264952822665e-06, - "loss": 0.8782, - "num_input_tokens_seen": 9961025, - "step": 466 - }, - { - "epoch": 0.05615342992845548, - "flos": 16290900164160.0, - "grad_norm": 2.4798864927773057, - "learning_rate": 3.992860887341366e-06, - "loss": 0.8695, - "num_input_tokens_seen": 9978915, - "step": 467 - }, - { - "epoch": 0.056273672819094574, - "flos": 17267384921280.0, - "grad_norm": 2.604030595403379, - "learning_rate": 3.992794977085052e-06, - "loss": 0.7924, - "num_input_tokens_seen": 9996635, - "step": 468 - }, - { - "epoch": 0.056393915709733664, - "flos": 14509025785560.0, - "grad_norm": 3.1935955928496873, - "learning_rate": 3.992728764523326e-06, - "loss": 0.8391, - "num_input_tokens_seen": 10015300, - "step": 469 - }, - { - "epoch": 0.05651415860037275, - "flos": 16215847873080.0, - "grad_norm": 2.54331037391842, - "learning_rate": 3.99266224966623e-06, - "loss": 0.7859, - "num_input_tokens_seen": 10035935, - "step": 470 - }, - { - "epoch": 0.05663440149101184, - "flos": 14217455298960.0, - "grad_norm": 2.30578700848216, - "learning_rate": 3.992595432523855e-06, - "loss": 0.8631, - "num_input_tokens_seen": 10052945, - "step": 471 - }, - { - "epoch": 0.05675464438165093, - "flos": 16590037381200.0, - "grad_norm": 2.308205818849968, - "learning_rate": 3.992528313106338e-06, - "loss": 0.847, - "num_input_tokens_seen": 10070865, - "step": 472 - }, - { - "epoch": 0.05687488727229002, - "flos": 12319945051920.0, - "grad_norm": 3.3112229098647945, - "learning_rate": 3.9924608914238595e-06, - "loss": 0.7984, - "num_input_tokens_seen": 10085580, - "step": 473 - }, - { - "epoch": 0.05699513016292912, - "flos": 21400158800880.0, - "grad_norm": 3.368219699770615, - "learning_rate": 3.992393167486648e-06, - "loss": 0.823, - "num_input_tokens_seen": 10104450, - "step": 474 - }, - { - "epoch": 0.05711537305356821, - "flos": 13812370161240.0, - "grad_norm": 4.641211886815804, - "learning_rate": 3.992325141304977e-06, - "loss": 0.7888, - "num_input_tokens_seen": 10122125, - "step": 475 - }, - { - "epoch": 0.0572356159442073, - "flos": 19616321504760.0, - "grad_norm": 2.930640470008753, - "learning_rate": 3.992256812889166e-06, - "loss": 0.8491, - "num_input_tokens_seen": 10137950, - "step": 476 - }, - { - "epoch": 0.05735585883484639, - "flos": 25801459207560.0, - "grad_norm": 3.3245520956909203, - "learning_rate": 3.992188182249582e-06, - "loss": 0.7484, - "num_input_tokens_seen": 10159565, - "step": 477 - }, - { - "epoch": 0.05747610172548548, - "flos": 13677901107960.0, - "grad_norm": 2.469572871725434, - "learning_rate": 3.992119249396633e-06, - "loss": 0.8915, - "num_input_tokens_seen": 10177970, - "step": 478 - }, - { - "epoch": 0.05759634461612457, - "flos": 20508920841960.0, - "grad_norm": 2.954159187196231, - "learning_rate": 3.992050014340778e-06, - "loss": 0.8083, - "num_input_tokens_seen": 10198045, - "step": 479 - }, - { - "epoch": 0.057716587506763666, - "flos": 51097457509680.0, - "grad_norm": 1.126726997206704, - "learning_rate": 3.99198047709252e-06, - "loss": 0.5662, - "num_input_tokens_seen": 10259285, - "step": 480 - }, - { - "epoch": 0.057836830397402755, - "flos": 18321581406120.0, - "grad_norm": 2.416380426840529, - "learning_rate": 3.991910637662408e-06, - "loss": 0.7651, - "num_input_tokens_seen": 10279295, - "step": 481 - }, - { - "epoch": 0.057957073288041845, - "flos": 18754427837400.0, - "grad_norm": 2.519235330399457, - "learning_rate": 3.9918404960610355e-06, - "loss": 0.7905, - "num_input_tokens_seen": 10298045, - "step": 482 - }, - { - "epoch": 0.058077316178680935, - "flos": 15189855921240.0, - "grad_norm": 2.7367358669711637, - "learning_rate": 3.991770052299043e-06, - "loss": 0.7546, - "num_input_tokens_seen": 10315995, - "step": 483 - }, - { - "epoch": 0.058197559069320025, - "flos": 13812623440920.0, - "grad_norm": 3.9909860852371, - "learning_rate": 3.991699306387118e-06, - "loss": 0.8597, - "num_input_tokens_seen": 10334185, - "step": 484 - }, - { - "epoch": 0.058317801959959115, - "flos": 18213638907960.0, - "grad_norm": 2.476318828737293, - "learning_rate": 3.991628258335991e-06, - "loss": 0.7796, - "num_input_tokens_seen": 10356110, - "step": 485 - }, - { - "epoch": 0.05843804485059821, - "flos": 17025574909080.0, - "grad_norm": 15.096901258612798, - "learning_rate": 3.991556908156442e-06, - "loss": 0.8534, - "num_input_tokens_seen": 10372355, - "step": 486 - }, - { - "epoch": 0.0585582877412373, - "flos": 16946343503280.0, - "grad_norm": 2.175654354538506, - "learning_rate": 3.9914852558592914e-06, - "loss": 0.8571, - "num_input_tokens_seen": 10393125, - "step": 487 - }, - { - "epoch": 0.05867853063187639, - "flos": 17208443058480.0, - "grad_norm": 11.17275592025153, - "learning_rate": 3.991413301455413e-06, - "loss": 0.7702, - "num_input_tokens_seen": 10409295, - "step": 488 - }, - { - "epoch": 0.05879877352251548, - "flos": 21641810513280.0, - "grad_norm": 3.8747275472042517, - "learning_rate": 3.991341044955719e-06, - "loss": 0.7576, - "num_input_tokens_seen": 10428770, - "step": 489 - }, - { - "epoch": 0.05891901641315457, - "flos": 14731116134040.0, - "grad_norm": 2.29438085793766, - "learning_rate": 3.991268486371172e-06, - "loss": 0.8091, - "num_input_tokens_seen": 10447045, - "step": 490 - }, - { - "epoch": 0.05903925930379366, - "flos": 18051440221080.0, - "grad_norm": 3.6338143884749665, - "learning_rate": 3.991195625712779e-06, - "loss": 0.8511, - "num_input_tokens_seen": 10463730, - "step": 491 - }, - { - "epoch": 0.05915950219443276, - "flos": 15539703411480.0, - "grad_norm": 3.8102271387272766, - "learning_rate": 3.991122462991592e-06, - "loss": 0.7962, - "num_input_tokens_seen": 10482970, - "step": 492 - }, - { - "epoch": 0.05927974508507185, - "flos": 7141586196600.0, - "grad_norm": 4.18430878613185, - "learning_rate": 3.991048998218712e-06, - "loss": 0.7828, - "num_input_tokens_seen": 10495995, - "step": 493 - }, - { - "epoch": 0.05939998797571094, - "flos": 13326375639840.0, - "grad_norm": 3.174890447213359, - "learning_rate": 3.990975231405281e-06, - "loss": 0.7416, - "num_input_tokens_seen": 10514165, - "step": 494 - }, - { - "epoch": 0.05952023086635003, - "flos": 20724900818160.0, - "grad_norm": 2.8307887225771755, - "learning_rate": 3.990901162562491e-06, - "loss": 0.7853, - "num_input_tokens_seen": 10534575, - "step": 495 - }, - { - "epoch": 0.05964047375698912, - "flos": 10841925224400.0, - "grad_norm": 7.164125864462145, - "learning_rate": 3.9908267917015765e-06, - "loss": 0.8819, - "num_input_tokens_seen": 10552355, - "step": 496 - }, - { - "epoch": 0.059760716647628206, - "flos": 16972078559400.0, - "grad_norm": 3.425730027410672, - "learning_rate": 3.990752118833821e-06, - "loss": 0.9153, - "num_input_tokens_seen": 10569515, - "step": 497 - }, - { - "epoch": 0.0598809595382673, - "flos": 16647364586040.0, - "grad_norm": 2.2441174644556603, - "learning_rate": 3.990677143970553e-06, - "loss": 0.7699, - "num_input_tokens_seen": 10590045, - "step": 498 - }, - { - "epoch": 0.06000120242890639, - "flos": 16188783098640.0, - "grad_norm": 3.32278142352906, - "learning_rate": 3.990601867123144e-06, - "loss": 0.7906, - "num_input_tokens_seen": 10609490, - "step": 499 - }, - { - "epoch": 0.06012144531954548, - "flos": 14001538642680.0, - "grad_norm": 3.7904985046665556, - "learning_rate": 3.990526288303014e-06, - "loss": 0.8416, - "num_input_tokens_seen": 10628000, - "step": 500 - }, - { - "epoch": 0.06024168821018457, - "flos": 16674271060680.0, - "grad_norm": 2.3949841417970243, - "learning_rate": 3.9904504075216295e-06, - "loss": 0.8868, - "num_input_tokens_seen": 10648480, - "step": 501 - }, - { - "epoch": 0.06036193110082366, - "flos": 13704744262680.0, - "grad_norm": 3.363500855971004, - "learning_rate": 3.990374224790501e-06, - "loss": 0.931, - "num_input_tokens_seen": 10666405, - "step": 502 - }, - { - "epoch": 0.06048217399146275, - "flos": 12543460098600.0, - "grad_norm": 2.6893211227080527, - "learning_rate": 3.990297740121185e-06, - "loss": 0.6969, - "num_input_tokens_seen": 10684060, - "step": 503 - }, - { - "epoch": 0.06060241688210185, - "flos": 17592573794040.0, - "grad_norm": 2.3532242590650276, - "learning_rate": 3.990220953525284e-06, - "loss": 0.7646, - "num_input_tokens_seen": 10700890, - "step": 504 - }, - { - "epoch": 0.06072265977274094, - "flos": 10626420147600.0, - "grad_norm": 3.258282305097567, - "learning_rate": 3.9901438650144465e-06, - "loss": 0.7233, - "num_input_tokens_seen": 10716860, - "step": 505 - }, - { - "epoch": 0.06084290266338003, - "flos": 15027657234360.0, - "grad_norm": 2.805780479387285, - "learning_rate": 3.990066474600367e-06, - "loss": 0.8959, - "num_input_tokens_seen": 10734550, - "step": 506 - }, - { - "epoch": 0.06096314555401912, - "flos": 16323410451720.0, - "grad_norm": 2.3920053722933656, - "learning_rate": 3.989988782294786e-06, - "loss": 0.6689, - "num_input_tokens_seen": 10754360, - "step": 507 - }, - { - "epoch": 0.06108338844465821, - "flos": 13971086252520.0, - "grad_norm": 1.9787690281871813, - "learning_rate": 3.989910788109489e-06, - "loss": 0.9315, - "num_input_tokens_seen": 10770730, - "step": 508 - }, - { - "epoch": 0.0612036313352973, - "flos": 24667619737440.0, - "grad_norm": 3.185097929977564, - "learning_rate": 3.989832492056307e-06, - "loss": 0.7355, - "num_input_tokens_seen": 10791475, - "step": 509 - }, - { - "epoch": 0.06132387422593639, - "flos": 20157996913080.0, - "grad_norm": 3.1525565717251207, - "learning_rate": 3.989753894147119e-06, - "loss": 0.7802, - "num_input_tokens_seen": 10811320, - "step": 510 - }, - { - "epoch": 0.061444117116575485, - "flos": 18969806274360.0, - "grad_norm": 2.23792992409365, - "learning_rate": 3.989674994393846e-06, - "loss": 0.7816, - "num_input_tokens_seen": 10830515, - "step": 511 - }, - { - "epoch": 0.061564360007214575, - "flos": 20913942659760.0, - "grad_norm": 3.0350470603046933, - "learning_rate": 3.98959579280846e-06, - "loss": 0.9283, - "num_input_tokens_seen": 10848635, - "step": 512 - }, - { - "epoch": 0.061684602897853665, - "flos": 8763889665000.0, - "grad_norm": 3.063234824947875, - "learning_rate": 3.989516289402973e-06, - "loss": 0.8126, - "num_input_tokens_seen": 10863985, - "step": 513 - }, - { - "epoch": 0.061804845788492754, - "flos": 14268038932320.0, - "grad_norm": 3.3109439577955104, - "learning_rate": 3.989436484189447e-06, - "loss": 0.8001, - "num_input_tokens_seen": 10881650, - "step": 514 - }, - { - "epoch": 0.061925088679131844, - "flos": 11166639197760.0, - "grad_norm": 3.663554532561146, - "learning_rate": 3.9893563771799885e-06, - "loss": 0.7849, - "num_input_tokens_seen": 10897845, - "step": 515 - }, - { - "epoch": 0.062045331569770934, - "flos": 18995383030680.0, - "grad_norm": 4.576972061121961, - "learning_rate": 3.989275968386749e-06, - "loss": 0.8542, - "num_input_tokens_seen": 10915475, - "step": 516 - }, - { - "epoch": 0.06216557446041003, - "flos": 20616863340120.0, - "grad_norm": 3.0428275588533573, - "learning_rate": 3.989195257821926e-06, - "loss": 0.7499, - "num_input_tokens_seen": 10933680, - "step": 517 - }, - { - "epoch": 0.06228581735104912, - "flos": 17188723394760.0, - "grad_norm": 2.6716028264494107, - "learning_rate": 3.989114245497765e-06, - "loss": 0.8259, - "num_input_tokens_seen": 10953200, - "step": 518 - }, - { - "epoch": 0.06240606024168821, - "flos": 11058665039640.0, - "grad_norm": 2.9968480400328654, - "learning_rate": 3.989032931426554e-06, - "loss": 0.9327, - "num_input_tokens_seen": 10970075, - "step": 519 - }, - { - "epoch": 0.06252630313232731, - "flos": 15082166702760.0, - "grad_norm": 2.6971584025291766, - "learning_rate": 3.9889513156206295e-06, - "loss": 0.8528, - "num_input_tokens_seen": 10989235, - "step": 520 - }, - { - "epoch": 0.06264654602296639, - "flos": 15190425800520.0, - "grad_norm": 6.410737043310743, - "learning_rate": 3.988869398092371e-06, - "loss": 0.6872, - "num_input_tokens_seen": 11008865, - "step": 521 - }, - { - "epoch": 0.06276678891360549, - "flos": 21723289776240.0, - "grad_norm": 2.429087672140574, - "learning_rate": 3.988787178854206e-06, - "loss": 0.76, - "num_input_tokens_seen": 11028120, - "step": 522 - }, - { - "epoch": 0.06288703180424457, - "flos": 16187959939680.0, - "grad_norm": 2.578679897818141, - "learning_rate": 3.988704657918608e-06, - "loss": 0.8567, - "num_input_tokens_seen": 11046900, - "step": 523 - }, - { - "epoch": 0.06300727469488367, - "flos": 10897004572080.0, - "grad_norm": 3.0715635434780664, - "learning_rate": 3.988621835298094e-06, - "loss": 0.7982, - "num_input_tokens_seen": 11063835, - "step": 524 - }, - { - "epoch": 0.06312751758552275, - "flos": 17971005736800.0, - "grad_norm": 2.3523330186531313, - "learning_rate": 3.988538711005229e-06, - "loss": 0.9084, - "num_input_tokens_seen": 11083010, - "step": 525 - }, - { - "epoch": 0.06324776047616185, - "flos": 15729790031760.0, - "grad_norm": 2.541829828330407, - "learning_rate": 3.988455285052622e-06, - "loss": 0.8772, - "num_input_tokens_seen": 11098910, - "step": 526 - }, - { - "epoch": 0.06336800336680094, - "flos": 15863562565920.0, - "grad_norm": 2.7082475404111093, - "learning_rate": 3.98837155745293e-06, - "loss": 0.8236, - "num_input_tokens_seen": 11116670, - "step": 527 - }, - { - "epoch": 0.06348824625744003, - "flos": 14245216592520.0, - "grad_norm": 3.803462338434245, - "learning_rate": 3.988287528218854e-06, - "loss": 0.7542, - "num_input_tokens_seen": 11135175, - "step": 528 - }, - { - "epoch": 0.06360848914807912, - "flos": 11269516102320.0, - "grad_norm": 2.76984458625072, - "learning_rate": 3.98820319736314e-06, - "loss": 0.8844, - "num_input_tokens_seen": 11151510, - "step": 529 - }, - { - "epoch": 0.0637287320387182, - "flos": 15053423950440.0, - "grad_norm": 2.1402055092647543, - "learning_rate": 3.988118564898582e-06, - "loss": 0.8417, - "num_input_tokens_seen": 11170770, - "step": 530 - }, - { - "epoch": 0.0638489749293573, - "flos": 12698187034920.0, - "grad_norm": 3.684265269683031, - "learning_rate": 3.988033630838019e-06, - "loss": 0.8838, - "num_input_tokens_seen": 11184530, - "step": 531 - }, - { - "epoch": 0.0639692178199964, - "flos": 17432717944200.0, - "grad_norm": 3.2832873604229773, - "learning_rate": 3.987948395194334e-06, - "loss": 0.8625, - "num_input_tokens_seen": 11206630, - "step": 532 - }, - { - "epoch": 0.06408946071063548, - "flos": 13487339588280.0, - "grad_norm": 2.80142855645824, - "learning_rate": 3.987862857980458e-06, - "loss": 0.767, - "num_input_tokens_seen": 11222295, - "step": 533 - }, - { - "epoch": 0.06420970360127458, - "flos": 19915015482360.0, - "grad_norm": 2.468614502097693, - "learning_rate": 3.987777019209368e-06, - "loss": 0.748, - "num_input_tokens_seen": 11242530, - "step": 534 - }, - { - "epoch": 0.06432994649191366, - "flos": 17322907508400.0, - "grad_norm": 4.352596796965361, - "learning_rate": 3.987690878894084e-06, - "loss": 0.8003, - "num_input_tokens_seen": 11261965, - "step": 535 - }, - { - "epoch": 0.06445018938255276, - "flos": 17133612387120.0, - "grad_norm": 3.233021770947938, - "learning_rate": 3.987604437047673e-06, - "loss": 0.8309, - "num_input_tokens_seen": 11281485, - "step": 536 - }, - { - "epoch": 0.06457043227319184, - "flos": 14353095770760.0, - "grad_norm": 2.8004256572067305, - "learning_rate": 3.987517693683251e-06, - "loss": 0.7685, - "num_input_tokens_seen": 11299780, - "step": 537 - }, - { - "epoch": 0.06469067516383094, - "flos": 12378000435840.0, - "grad_norm": 2.5698344705019482, - "learning_rate": 3.9874306488139745e-06, - "loss": 0.9484, - "num_input_tokens_seen": 11314760, - "step": 538 - }, - { - "epoch": 0.06481091805447003, - "flos": 17054286001440.0, - "grad_norm": 3.871949625720067, - "learning_rate": 3.987343302453049e-06, - "loss": 0.8474, - "num_input_tokens_seen": 11335755, - "step": 539 - }, - { - "epoch": 0.06493116094510912, - "flos": 21403134837120.0, - "grad_norm": 2.1024256638892553, - "learning_rate": 3.987255654613724e-06, - "loss": 0.8099, - "num_input_tokens_seen": 11359240, - "step": 540 - }, - { - "epoch": 0.06505140383574821, - "flos": 14514629598480.0, - "grad_norm": 2.695544335166443, - "learning_rate": 3.987167705309296e-06, - "loss": 0.6942, - "num_input_tokens_seen": 11378235, - "step": 541 - }, - { - "epoch": 0.0651716467263873, - "flos": 13077695416320.0, - "grad_norm": 2.5610885454545866, - "learning_rate": 3.987079454553108e-06, - "loss": 0.936, - "num_input_tokens_seen": 11395905, - "step": 542 - }, - { - "epoch": 0.0652918896170264, - "flos": 15237210238680.0, - "grad_norm": 2.02450380250626, - "learning_rate": 3.986990902358546e-06, - "loss": 0.9043, - "num_input_tokens_seen": 11412565, - "step": 543 - }, - { - "epoch": 0.06541213250766549, - "flos": 16000374456240.0, - "grad_norm": 2.5904286272193033, - "learning_rate": 3.986902048739045e-06, - "loss": 0.9119, - "num_input_tokens_seen": 11432230, - "step": 544 - }, - { - "epoch": 0.06553237539830457, - "flos": 16916429332440.0, - "grad_norm": 4.041455266608437, - "learning_rate": 3.986812893708082e-06, - "loss": 0.7827, - "num_input_tokens_seen": 11448140, - "step": 545 - }, - { - "epoch": 0.06565261828894367, - "flos": 13076872257360.0, - "grad_norm": 6.975953352115998, - "learning_rate": 3.9867234372791826e-06, - "loss": 0.8018, - "num_input_tokens_seen": 11465815, - "step": 546 - }, - { - "epoch": 0.06577286117958275, - "flos": 16674017781000.0, - "grad_norm": 2.1593285297522136, - "learning_rate": 3.986633679465918e-06, - "loss": 0.8606, - "num_input_tokens_seen": 11485690, - "step": 547 - }, - { - "epoch": 0.06589310407022185, - "flos": 17349940622880.0, - "grad_norm": 2.9998472492748585, - "learning_rate": 3.986543620281904e-06, - "loss": 0.8018, - "num_input_tokens_seen": 11505060, - "step": 548 - }, - { - "epoch": 0.06601334696086093, - "flos": 19694096552400.0, - "grad_norm": 1.7215947277897723, - "learning_rate": 3.986453259740802e-06, - "loss": 0.8998, - "num_input_tokens_seen": 11522950, - "step": 549 - }, - { - "epoch": 0.06613358985150003, - "flos": 9114053754840.0, - "grad_norm": 3.6241711951571274, - "learning_rate": 3.986362597856319e-06, - "loss": 0.7678, - "num_input_tokens_seen": 11539170, - "step": 550 - }, - { - "epoch": 0.06625383274213913, - "flos": 13380410208840.0, - "grad_norm": 2.9788802927378457, - "learning_rate": 3.986271634642211e-06, - "loss": 0.7934, - "num_input_tokens_seen": 11555870, - "step": 551 - }, - { - "epoch": 0.06637407563277821, - "flos": 11191867694520.0, - "grad_norm": 2.9685448826652343, - "learning_rate": 3.986180370112274e-06, - "loss": 0.8053, - "num_input_tokens_seen": 11572110, - "step": 552 - }, - { - "epoch": 0.0664943185234173, - "flos": 17593555252800.0, - "grad_norm": 2.531170219765356, - "learning_rate": 3.986088804280354e-06, - "loss": 0.734, - "num_input_tokens_seen": 11591560, - "step": 553 - }, - { - "epoch": 0.06661456141405639, - "flos": 15322805296440.0, - "grad_norm": 3.316016436021885, - "learning_rate": 3.985996937160342e-06, - "loss": 0.9204, - "num_input_tokens_seen": 11610470, - "step": 554 - }, - { - "epoch": 0.06673480430469549, - "flos": 38461817081640.0, - "grad_norm": 2.577361392596482, - "learning_rate": 3.985904768766173e-06, - "loss": 0.6794, - "num_input_tokens_seen": 11632965, - "step": 555 - }, - { - "epoch": 0.06685504719533458, - "flos": 11814959045880.0, - "grad_norm": 3.669809482836926, - "learning_rate": 3.98581229911183e-06, - "loss": 0.7396, - "num_input_tokens_seen": 11651605, - "step": 556 - }, - { - "epoch": 0.06697529008597367, - "flos": 16458322744440.0, - "grad_norm": 2.207297412048131, - "learning_rate": 3.985719528211341e-06, - "loss": 0.8997, - "num_input_tokens_seen": 11670695, - "step": 557 - }, - { - "epoch": 0.06709553297661276, - "flos": 46210225901520.0, - "grad_norm": 0.9252762523741491, - "learning_rate": 3.985626456078777e-06, - "loss": 0.6653, - "num_input_tokens_seen": 11735070, - "step": 558 - }, - { - "epoch": 0.06721577586725185, - "flos": 8113986818880.0, - "grad_norm": 2.8924249766914567, - "learning_rate": 3.985533082728259e-06, - "loss": 0.8498, - "num_input_tokens_seen": 11750445, - "step": 559 - }, - { - "epoch": 0.06733601875789094, - "flos": 18997377608160.0, - "grad_norm": 2.096672864742043, - "learning_rate": 3.985439408173951e-06, - "loss": 0.7295, - "num_input_tokens_seen": 11770390, - "step": 560 - }, - { - "epoch": 0.06745626164853002, - "flos": 15216635756040.0, - "grad_norm": 2.8217683453011264, - "learning_rate": 3.9853454324300634e-06, - "loss": 0.6876, - "num_input_tokens_seen": 11789320, - "step": 561 - }, - { - "epoch": 0.06757650453916912, - "flos": 14488229683200.0, - "grad_norm": 3.509910856457166, - "learning_rate": 3.985251155510852e-06, - "loss": 0.7584, - "num_input_tokens_seen": 11808070, - "step": 562 - }, - { - "epoch": 0.06769674742980822, - "flos": 18861895436160.0, - "grad_norm": 2.8439357822641806, - "learning_rate": 3.98515657743062e-06, - "loss": 0.7985, - "num_input_tokens_seen": 11827255, - "step": 563 - }, - { - "epoch": 0.0678169903204473, - "flos": 9488813142240.0, - "grad_norm": 2.454822706047718, - "learning_rate": 3.985061698203711e-06, - "loss": 0.7584, - "num_input_tokens_seen": 11844090, - "step": 564 - }, - { - "epoch": 0.0679372332110864, - "flos": 52262889128520.0, - "grad_norm": 0.8626638422712917, - "learning_rate": 3.984966517844523e-06, - "loss": 0.661, - "num_input_tokens_seen": 11899055, - "step": 565 - }, - { - "epoch": 0.06805747610172548, - "flos": 20724742518360.0, - "grad_norm": 3.51540472400194, - "learning_rate": 3.984871036367492e-06, - "loss": 0.8111, - "num_input_tokens_seen": 11918800, - "step": 566 - }, - { - "epoch": 0.06817771899236458, - "flos": 14703703100040.0, - "grad_norm": 2.277164825163232, - "learning_rate": 3.984775253787102e-06, - "loss": 0.8246, - "num_input_tokens_seen": 11936810, - "step": 567 - }, - { - "epoch": 0.06829796188300366, - "flos": 13082919309720.0, - "grad_norm": 3.723563020793957, - "learning_rate": 3.984679170117885e-06, - "loss": 0.8624, - "num_input_tokens_seen": 11952735, - "step": 568 - }, - { - "epoch": 0.06841820477364276, - "flos": 10545320804160.0, - "grad_norm": 2.843554311873215, - "learning_rate": 3.984582785374415e-06, - "loss": 0.7674, - "num_input_tokens_seen": 11969895, - "step": 569 - }, - { - "epoch": 0.06853844766428185, - "flos": 16048203673080.0, - "grad_norm": 2.1233940788135466, - "learning_rate": 3.9844860995713155e-06, - "loss": 0.7965, - "num_input_tokens_seen": 11989155, - "step": 570 - }, - { - "epoch": 0.06865869055492094, - "flos": 12246792358440.0, - "grad_norm": 3.694821992543022, - "learning_rate": 3.9843891127232524e-06, - "loss": 0.7984, - "num_input_tokens_seen": 12006410, - "step": 571 - }, - { - "epoch": 0.06877893344556003, - "flos": 14567967648360.0, - "grad_norm": 3.441458070605161, - "learning_rate": 3.984291824844938e-06, - "loss": 0.6512, - "num_input_tokens_seen": 12021225, - "step": 572 - }, - { - "epoch": 0.06889917633619912, - "flos": 16863851121600.0, - "grad_norm": 2.7876282324611634, - "learning_rate": 3.984194235951132e-06, - "loss": 0.8253, - "num_input_tokens_seen": 12037090, - "step": 573 - }, - { - "epoch": 0.06901941922683821, - "flos": 15325496393040.0, - "grad_norm": 3.7824624379795977, - "learning_rate": 3.9840963460566375e-06, - "loss": 0.8206, - "num_input_tokens_seen": 12055590, - "step": 574 - }, - { - "epoch": 0.06913966211747731, - "flos": 18182236719000.0, - "grad_norm": 2.1788932419665943, - "learning_rate": 3.983998155176305e-06, - "loss": 0.8823, - "num_input_tokens_seen": 12075670, - "step": 575 - }, - { - "epoch": 0.06925990500811639, - "flos": 43015542230280.0, - "grad_norm": 0.8029901324481784, - "learning_rate": 3.9838996633250305e-06, - "loss": 0.5855, - "num_input_tokens_seen": 12135905, - "step": 576 - }, - { - "epoch": 0.06938014789875549, - "flos": 9248902727640.0, - "grad_norm": 2.64066373356508, - "learning_rate": 3.983800870517753e-06, - "loss": 0.8728, - "num_input_tokens_seen": 12152415, - "step": 577 - }, - { - "epoch": 0.06950039078939457, - "flos": 16701620774760.0, - "grad_norm": 4.1838549791516755, - "learning_rate": 3.983701776769463e-06, - "loss": 0.7771, - "num_input_tokens_seen": 12169545, - "step": 578 - }, - { - "epoch": 0.06962063368003367, - "flos": 16050736469880.0, - "grad_norm": 2.864551298944934, - "learning_rate": 3.9836023820951885e-06, - "loss": 0.8439, - "num_input_tokens_seen": 12188480, - "step": 579 - }, - { - "epoch": 0.06974087657067275, - "flos": 15137246050440.0, - "grad_norm": 2.377992150584902, - "learning_rate": 3.983502686510011e-06, - "loss": 0.6752, - "num_input_tokens_seen": 12209030, - "step": 580 - }, - { - "epoch": 0.06986111946131185, - "flos": 16566993421680.0, - "grad_norm": 2.2263423342354574, - "learning_rate": 3.9834026900290525e-06, - "loss": 0.7108, - "num_input_tokens_seen": 12228145, - "step": 581 - }, - { - "epoch": 0.06998136235195095, - "flos": 19753133395080.0, - "grad_norm": 3.703173544521278, - "learning_rate": 3.983302392667482e-06, - "loss": 0.9787, - "num_input_tokens_seen": 12248710, - "step": 582 - }, - { - "epoch": 0.07010160524259003, - "flos": 16459177563360.0, - "grad_norm": 2.317987786636893, - "learning_rate": 3.983201794440517e-06, - "loss": 0.9263, - "num_input_tokens_seen": 12268005, - "step": 583 - }, - { - "epoch": 0.07022184813322913, - "flos": 13380441868800.0, - "grad_norm": 2.5772252217743796, - "learning_rate": 3.9831008953634165e-06, - "loss": 0.6581, - "num_input_tokens_seen": 12287015, - "step": 584 - }, - { - "epoch": 0.07034209102386821, - "flos": 18073914301320.0, - "grad_norm": 2.5555026652442203, - "learning_rate": 3.9829996954514864e-06, - "loss": 0.7992, - "num_input_tokens_seen": 12305875, - "step": 585 - }, - { - "epoch": 0.0704623339145073, - "flos": 19052013716400.0, - "grad_norm": 2.3400219577182946, - "learning_rate": 3.982898194720079e-06, - "loss": 0.8328, - "num_input_tokens_seen": 12326325, - "step": 586 - }, - { - "epoch": 0.0705825768051464, - "flos": 18565259355960.0, - "grad_norm": 2.3527210714669327, - "learning_rate": 3.982796393184592e-06, - "loss": 0.7916, - "num_input_tokens_seen": 12345125, - "step": 587 - }, - { - "epoch": 0.07070281969578548, - "flos": 48696734214360.0, - "grad_norm": 0.8072840085230369, - "learning_rate": 3.98269429086047e-06, - "loss": 0.6439, - "num_input_tokens_seen": 12402685, - "step": 588 - }, - { - "epoch": 0.07082306258642458, - "flos": 17317525315200.0, - "grad_norm": 2.9017014543645305, - "learning_rate": 3.982591887763199e-06, - "loss": 0.8378, - "num_input_tokens_seen": 12419865, - "step": 589 - }, - { - "epoch": 0.07094330547706366, - "flos": 10085124658800.0, - "grad_norm": 3.1610521100584346, - "learning_rate": 3.982489183908316e-06, - "loss": 0.7979, - "num_input_tokens_seen": 12436005, - "step": 590 - }, - { - "epoch": 0.07106354836770276, - "flos": 18052231720080.0, - "grad_norm": 2.1077923524203306, - "learning_rate": 3.982386179311399e-06, - "loss": 0.8297, - "num_input_tokens_seen": 12456245, - "step": 591 - }, - { - "epoch": 0.07118379125834184, - "flos": 11815085685720.0, - "grad_norm": 3.4868906288596713, - "learning_rate": 3.982282873988075e-06, - "loss": 0.8517, - "num_input_tokens_seen": 12473840, - "step": 592 - }, - { - "epoch": 0.07130403414898094, - "flos": 14407383619440.0, - "grad_norm": 2.005840238277446, - "learning_rate": 3.982179267954016e-06, - "loss": 0.8646, - "num_input_tokens_seen": 12493990, - "step": 593 - }, - { - "epoch": 0.07142427703962004, - "flos": 15999867896880.0, - "grad_norm": 3.600418273150799, - "learning_rate": 3.982075361224937e-06, - "loss": 0.9478, - "num_input_tokens_seen": 12512075, - "step": 594 - }, - { - "epoch": 0.07154451993025912, - "flos": 13353915313680.0, - "grad_norm": 3.0339298004196658, - "learning_rate": 3.981971153816602e-06, - "loss": 0.8796, - "num_input_tokens_seen": 12529400, - "step": 595 - }, - { - "epoch": 0.07166476282089822, - "flos": 16213125116520.0, - "grad_norm": 1.9512733532365227, - "learning_rate": 3.981866645744819e-06, - "loss": 0.9486, - "num_input_tokens_seen": 12549835, - "step": 596 - }, - { - "epoch": 0.0717850057115373, - "flos": 10274926339440.0, - "grad_norm": 3.1417114504211745, - "learning_rate": 3.9817618370254416e-06, - "loss": 0.7907, - "num_input_tokens_seen": 12566210, - "step": 597 - }, - { - "epoch": 0.0719052486021764, - "flos": 22694044080600.0, - "grad_norm": 3.0761188611503303, - "learning_rate": 3.9816567276743684e-06, - "loss": 0.8571, - "num_input_tokens_seen": 12585795, - "step": 598 - }, - { - "epoch": 0.0720254914928155, - "flos": 15566831505840.0, - "grad_norm": 2.1648320231125227, - "learning_rate": 3.9815513177075466e-06, - "loss": 0.7485, - "num_input_tokens_seen": 12604300, - "step": 599 - }, - { - "epoch": 0.07214573438345458, - "flos": 20401231623480.0, - "grad_norm": 2.408934841695226, - "learning_rate": 3.9814456071409646e-06, - "loss": 0.697, - "num_input_tokens_seen": 12624555, - "step": 600 - }, - { - "epoch": 0.07226597727409367, - "flos": 18672663634800.0, - "grad_norm": 3.2659466598742917, - "learning_rate": 3.981339595990659e-06, - "loss": 0.8366, - "num_input_tokens_seen": 12642805, - "step": 601 - }, - { - "epoch": 0.07238622016473276, - "flos": 17214996670200.0, - "grad_norm": 2.5457794079685825, - "learning_rate": 3.981233284272713e-06, - "loss": 0.804, - "num_input_tokens_seen": 12662270, - "step": 602 - }, - { - "epoch": 0.07250646305537185, - "flos": 18651329313120.0, - "grad_norm": 1.7814771273180778, - "learning_rate": 3.981126672003253e-06, - "loss": 0.8876, - "num_input_tokens_seen": 12684665, - "step": 603 - }, - { - "epoch": 0.07262670594601094, - "flos": 19908683490360.0, - "grad_norm": 3.010751056689012, - "learning_rate": 3.981019759198451e-06, - "loss": 0.7676, - "num_input_tokens_seen": 12703335, - "step": 604 - }, - { - "epoch": 0.07274694883665003, - "flos": 19537153418880.0, - "grad_norm": 3.4307674714880356, - "learning_rate": 3.980912545874528e-06, - "loss": 0.8076, - "num_input_tokens_seen": 12723220, - "step": 605 - }, - { - "epoch": 0.07286719172728913, - "flos": 21913313076600.0, - "grad_norm": 3.0075225293971686, - "learning_rate": 3.980805032047746e-06, - "loss": 0.8393, - "num_input_tokens_seen": 12744410, - "step": 606 - }, - { - "epoch": 0.07298743461792821, - "flos": 12676757733360.0, - "grad_norm": 2.3069147242678794, - "learning_rate": 3.980697217734415e-06, - "loss": 0.7849, - "num_input_tokens_seen": 12761870, - "step": 607 - }, - { - "epoch": 0.07310767750856731, - "flos": 14242842095520.0, - "grad_norm": 1.8959400719857364, - "learning_rate": 3.980589102950891e-06, - "loss": 0.8949, - "num_input_tokens_seen": 12779755, - "step": 608 - }, - { - "epoch": 0.07322792039920639, - "flos": 21399905521200.0, - "grad_norm": 3.5012221246254565, - "learning_rate": 3.9804806877135755e-06, - "loss": 0.7641, - "num_input_tokens_seen": 12797520, - "step": 609 - }, - { - "epoch": 0.07334816328984549, - "flos": 17188406795160.0, - "grad_norm": 3.2080758887373517, - "learning_rate": 3.980371972038915e-06, - "loss": 0.8514, - "num_input_tokens_seen": 12817730, - "step": 610 - }, - { - "epoch": 0.07346840618048459, - "flos": 16806365616960.0, - "grad_norm": 2.301727980953271, - "learning_rate": 3.980262955943399e-06, - "loss": 0.8289, - "num_input_tokens_seen": 12837115, - "step": 611 - }, - { - "epoch": 0.07358864907112367, - "flos": 12892737709560.0, - "grad_norm": 3.3551131782828003, - "learning_rate": 3.980153639443569e-06, - "loss": 0.8672, - "num_input_tokens_seen": 12852820, - "step": 612 - }, - { - "epoch": 0.07370889196176277, - "flos": 17646513383160.0, - "grad_norm": 2.8665557913814514, - "learning_rate": 3.980044022556005e-06, - "loss": 0.7932, - "num_input_tokens_seen": 12872225, - "step": 613 - }, - { - "epoch": 0.07382913485240185, - "flos": 18970566113400.0, - "grad_norm": 3.6438423315163737, - "learning_rate": 3.9799341052973375e-06, - "loss": 0.713, - "num_input_tokens_seen": 12891780, - "step": 614 - }, - { - "epoch": 0.07394937774304094, - "flos": 12301016887200.0, - "grad_norm": 2.8864243787027797, - "learning_rate": 3.979823887684241e-06, - "loss": 0.7172, - "num_input_tokens_seen": 12910440, - "step": 615 - }, - { - "epoch": 0.07406962063368003, - "flos": 15134998193280.0, - "grad_norm": 3.784779438360139, - "learning_rate": 3.979713369733434e-06, - "loss": 0.8485, - "num_input_tokens_seen": 12928025, - "step": 616 - }, - { - "epoch": 0.07418986352431912, - "flos": 15673096026120.0, - "grad_norm": 3.2851643783341773, - "learning_rate": 3.979602551461683e-06, - "loss": 0.8267, - "num_input_tokens_seen": 12948525, - "step": 617 - }, - { - "epoch": 0.07431010641495822, - "flos": 8709665136240.0, - "grad_norm": 3.299997017598972, - "learning_rate": 3.979491432885799e-06, - "loss": 0.9018, - "num_input_tokens_seen": 12964510, - "step": 618 - }, - { - "epoch": 0.0744303493055973, - "flos": 15322868616360.0, - "grad_norm": 2.434061412666559, - "learning_rate": 3.97938001402264e-06, - "loss": 0.8154, - "num_input_tokens_seen": 12983355, - "step": 619 - }, - { - "epoch": 0.0745505921962364, - "flos": 11841580580880.0, - "grad_norm": 4.417459716613202, - "learning_rate": 3.979268294889105e-06, - "loss": 0.7913, - "num_input_tokens_seen": 12998625, - "step": 620 - }, - { - "epoch": 0.07467083508687548, - "flos": 37514708276040.0, - "grad_norm": 3.333784422551915, - "learning_rate": 3.979156275502143e-06, - "loss": 0.7428, - "num_input_tokens_seen": 13022005, - "step": 621 - }, - { - "epoch": 0.07479107797751458, - "flos": 12786251569560.0, - "grad_norm": 3.1159379409760652, - "learning_rate": 3.979043955878749e-06, - "loss": 0.8984, - "num_input_tokens_seen": 13039570, - "step": 622 - }, - { - "epoch": 0.07491132086815366, - "flos": 17184987519480.0, - "grad_norm": 2.899212592202434, - "learning_rate": 3.978931336035959e-06, - "loss": 0.8119, - "num_input_tokens_seen": 13058100, - "step": 623 - }, - { - "epoch": 0.07503156375879276, - "flos": 14731179453960.0, - "grad_norm": 3.147964934293116, - "learning_rate": 3.9788184159908595e-06, - "loss": 0.8084, - "num_input_tokens_seen": 13074950, - "step": 624 - }, - { - "epoch": 0.07515180664943186, - "flos": 10998741718080.0, - "grad_norm": 3.0533214744048833, - "learning_rate": 3.97870519576058e-06, - "loss": 0.8018, - "num_input_tokens_seen": 13091095, - "step": 625 - }, - { - "epoch": 0.07527204954007094, - "flos": 15567433045080.0, - "grad_norm": 2.939918496435919, - "learning_rate": 3.978591675362295e-06, - "loss": 0.7875, - "num_input_tokens_seen": 13109530, - "step": 626 - }, - { - "epoch": 0.07539229243071004, - "flos": 15594434499600.0, - "grad_norm": 2.1367208700500795, - "learning_rate": 3.978477854813226e-06, - "loss": 0.86, - "num_input_tokens_seen": 13128590, - "step": 627 - }, - { - "epoch": 0.07551253532134912, - "flos": 9465547563000.0, - "grad_norm": 2.8571082964577994, - "learning_rate": 3.97836373413064e-06, - "loss": 0.8059, - "num_input_tokens_seen": 13146365, - "step": 628 - }, - { - "epoch": 0.07563277821198822, - "flos": 14028983336640.0, - "grad_norm": 2.639471309927228, - "learning_rate": 3.978249313331848e-06, - "loss": 0.7394, - "num_input_tokens_seen": 13164315, - "step": 629 - }, - { - "epoch": 0.07575302110262731, - "flos": 14272059747240.0, - "grad_norm": 4.070546793280856, - "learning_rate": 3.978134592434208e-06, - "loss": 0.592, - "num_input_tokens_seen": 13181785, - "step": 630 - }, - { - "epoch": 0.0758732639932664, - "flos": 50115816670440.0, - "grad_norm": 0.9858666879703285, - "learning_rate": 3.978019571455123e-06, - "loss": 0.6381, - "num_input_tokens_seen": 13233450, - "step": 631 - }, - { - "epoch": 0.07599350688390549, - "flos": 13866404730240.0, - "grad_norm": 2.785941378451144, - "learning_rate": 3.977904250412042e-06, - "loss": 0.8284, - "num_input_tokens_seen": 13252125, - "step": 632 - }, - { - "epoch": 0.07611374977454458, - "flos": 15405614277720.0, - "grad_norm": 2.6386832597714243, - "learning_rate": 3.97778862932246e-06, - "loss": 0.8417, - "num_input_tokens_seen": 13269010, - "step": 633 - }, - { - "epoch": 0.07623399266518367, - "flos": 13515069221880.0, - "grad_norm": 2.829624179108441, - "learning_rate": 3.9776727082039144e-06, - "loss": 0.9273, - "num_input_tokens_seen": 13285700, - "step": 634 - }, - { - "epoch": 0.07635423555582276, - "flos": 32870209310280.0, - "grad_norm": 0.8115480054310843, - "learning_rate": 3.977556487073991e-06, - "loss": 0.5722, - "num_input_tokens_seen": 13339975, - "step": 635 - }, - { - "epoch": 0.07647447844646185, - "flos": 15917692114800.0, - "grad_norm": 2.197852786131691, - "learning_rate": 3.97743996595032e-06, - "loss": 0.7913, - "num_input_tokens_seen": 13359735, - "step": 636 - }, - { - "epoch": 0.07659472133710095, - "flos": 17431958105160.0, - "grad_norm": 2.338172073441031, - "learning_rate": 3.9773231448505804e-06, - "loss": 0.8007, - "num_input_tokens_seen": 13381245, - "step": 637 - }, - { - "epoch": 0.07671496422774003, - "flos": 15702313677840.0, - "grad_norm": 3.1649637328084306, - "learning_rate": 3.977206023792491e-06, - "loss": 0.7499, - "num_input_tokens_seen": 13400855, - "step": 638 - }, - { - "epoch": 0.07683520711837913, - "flos": 12379963353360.0, - "grad_norm": 3.072372405566475, - "learning_rate": 3.97708860279382e-06, - "loss": 0.8005, - "num_input_tokens_seen": 13418685, - "step": 639 - }, - { - "epoch": 0.07695545000901821, - "flos": 17187456996360.0, - "grad_norm": 2.5980002268731632, - "learning_rate": 3.97697088187238e-06, - "loss": 0.774, - "num_input_tokens_seen": 13438920, - "step": 640 - }, - { - "epoch": 0.07707569289965731, - "flos": 12862950178560.0, - "grad_norm": 3.3501485763169425, - "learning_rate": 3.976852861046029e-06, - "loss": 0.8967, - "num_input_tokens_seen": 13455255, - "step": 641 - }, - { - "epoch": 0.0771959357902964, - "flos": 18888865230720.0, - "grad_norm": 2.6312873565849064, - "learning_rate": 3.97673454033267e-06, - "loss": 0.786, - "num_input_tokens_seen": 13477075, - "step": 642 - }, - { - "epoch": 0.07731617868093549, - "flos": 14487691463880.0, - "grad_norm": 2.54361192858089, - "learning_rate": 3.976615919750254e-06, - "loss": 0.8023, - "num_input_tokens_seen": 13494495, - "step": 643 - }, - { - "epoch": 0.07743642157157458, - "flos": 15594846079080.0, - "grad_norm": 2.476589744249544, - "learning_rate": 3.976496999316775e-06, - "loss": 0.845, - "num_input_tokens_seen": 13512970, - "step": 644 - }, - { - "epoch": 0.07755666446221367, - "flos": 14590884968040.0, - "grad_norm": 3.8459480017818324, - "learning_rate": 3.976377779050271e-06, - "loss": 0.827, - "num_input_tokens_seen": 13530820, - "step": 645 - }, - { - "epoch": 0.07767690735285276, - "flos": 17295937713840.0, - "grad_norm": 3.1907870230041797, - "learning_rate": 3.976258258968831e-06, - "loss": 0.8153, - "num_input_tokens_seen": 13549085, - "step": 646 - }, - { - "epoch": 0.07779715024349185, - "flos": 16269407542680.0, - "grad_norm": 2.7085686703674225, - "learning_rate": 3.976138439090583e-06, - "loss": 0.7129, - "num_input_tokens_seen": 13566885, - "step": 647 - }, - { - "epoch": 0.07791739313413094, - "flos": 15320684079120.0, - "grad_norm": 2.5380229054132077, - "learning_rate": 3.976018319433706e-06, - "loss": 0.8278, - "num_input_tokens_seen": 13584150, - "step": 648 - }, - { - "epoch": 0.07803763602477004, - "flos": 14104795466760.0, - "grad_norm": 3.2555386225665424, - "learning_rate": 3.9758979000164205e-06, - "loss": 0.9082, - "num_input_tokens_seen": 13600690, - "step": 649 - }, - { - "epoch": 0.07815787891540912, - "flos": 16620648071160.0, - "grad_norm": 2.379764896769785, - "learning_rate": 3.975777180856995e-06, - "loss": 0.6927, - "num_input_tokens_seen": 13619530, - "step": 650 - }, - { - "epoch": 0.07827812180604822, - "flos": 16620933010800.0, - "grad_norm": 4.879330203570325, - "learning_rate": 3.975656161973742e-06, - "loss": 0.8338, - "num_input_tokens_seen": 13638335, - "step": 651 - }, - { - "epoch": 0.0783983646966873, - "flos": 15891513819240.0, - "grad_norm": 3.552263242280343, - "learning_rate": 3.9755348433850194e-06, - "loss": 0.865, - "num_input_tokens_seen": 13653395, - "step": 652 - }, - { - "epoch": 0.0785186075873264, - "flos": 44694471893040.0, - "grad_norm": 1.0591542354327437, - "learning_rate": 3.975413225109232e-06, - "loss": 0.7082, - "num_input_tokens_seen": 13713665, - "step": 653 - }, - { - "epoch": 0.0786388504779655, - "flos": 17484314696280.0, - "grad_norm": 3.66960404965244, - "learning_rate": 3.975291307164829e-06, - "loss": 0.9068, - "num_input_tokens_seen": 13732030, - "step": 654 - }, - { - "epoch": 0.07875909336860458, - "flos": 11031631925160.0, - "grad_norm": 2.5872769761835044, - "learning_rate": 3.975169089570306e-06, - "loss": 0.8448, - "num_input_tokens_seen": 13750125, - "step": 655 - }, - { - "epoch": 0.07887933625924368, - "flos": 16269945762000.0, - "grad_norm": 3.0658127909002624, - "learning_rate": 3.975046572344202e-06, - "loss": 0.9051, - "num_input_tokens_seen": 13766305, - "step": 656 - }, - { - "epoch": 0.07899957914988276, - "flos": 15190140860880.0, - "grad_norm": 3.1956407895995356, - "learning_rate": 3.974923755505103e-06, - "loss": 0.6992, - "num_input_tokens_seen": 13785255, - "step": 657 - }, - { - "epoch": 0.07911982204052186, - "flos": 16836976306920.0, - "grad_norm": 2.0761722195069683, - "learning_rate": 3.974800639071641e-06, - "loss": 0.8981, - "num_input_tokens_seen": 13805695, - "step": 658 - }, - { - "epoch": 0.07924006493116094, - "flos": 16916999211720.0, - "grad_norm": 2.399112767891306, - "learning_rate": 3.974677223062492e-06, - "loss": 0.9916, - "num_input_tokens_seen": 13822630, - "step": 659 - }, - { - "epoch": 0.07936030782180004, - "flos": 12003684287880.0, - "grad_norm": 6.338665206904732, - "learning_rate": 3.974553507496378e-06, - "loss": 0.7161, - "num_input_tokens_seen": 13840925, - "step": 660 - }, - { - "epoch": 0.07948055071243913, - "flos": 17377480296720.0, - "grad_norm": 3.3709443767601592, - "learning_rate": 3.974429492392068e-06, - "loss": 0.8723, - "num_input_tokens_seen": 13860670, - "step": 661 - }, - { - "epoch": 0.07960079360307822, - "flos": 14298744602160.0, - "grad_norm": 4.319963934359295, - "learning_rate": 3.974305177768373e-06, - "loss": 0.8855, - "num_input_tokens_seen": 13878600, - "step": 662 - }, - { - "epoch": 0.07972103649371731, - "flos": 17214616750680.0, - "grad_norm": 3.1483410235974594, - "learning_rate": 3.974180563644152e-06, - "loss": 0.838, - "num_input_tokens_seen": 13896885, - "step": 663 - }, - { - "epoch": 0.0798412793843564, - "flos": 12192092930280.0, - "grad_norm": 3.509313996923602, - "learning_rate": 3.97405565003831e-06, - "loss": 0.875, - "num_input_tokens_seen": 13912690, - "step": 664 - }, - { - "epoch": 0.07996152227499549, - "flos": 13299659124960.0, - "grad_norm": 3.5196912519558072, - "learning_rate": 3.973930436969794e-06, - "loss": 0.7667, - "num_input_tokens_seen": 13930865, - "step": 665 - }, - { - "epoch": 0.08008176516563459, - "flos": 15054753668760.0, - "grad_norm": 2.8989553477337826, - "learning_rate": 3.973804924457602e-06, - "loss": 0.8415, - "num_input_tokens_seen": 13948665, - "step": 666 - }, - { - "epoch": 0.08020200805627367, - "flos": 23372942958720.0, - "grad_norm": 1.9551547680445778, - "learning_rate": 3.973679112520771e-06, - "loss": 0.8458, - "num_input_tokens_seen": 13970100, - "step": 667 - }, - { - "epoch": 0.08032225094691277, - "flos": 12973868712960.0, - "grad_norm": 2.6373859832851734, - "learning_rate": 3.973553001178389e-06, - "loss": 0.9793, - "num_input_tokens_seen": 13987325, - "step": 668 - }, - { - "epoch": 0.08044249383755185, - "flos": 17620525047360.0, - "grad_norm": 3.550133372205785, - "learning_rate": 3.973426590449585e-06, - "loss": 0.7423, - "num_input_tokens_seen": 14005000, - "step": 669 - }, - { - "epoch": 0.08056273672819095, - "flos": 13299912404640.0, - "grad_norm": 2.9961774632820886, - "learning_rate": 3.9732998803535364e-06, - "loss": 0.7418, - "num_input_tokens_seen": 14022780, - "step": 670 - }, - { - "epoch": 0.08068297961883003, - "flos": 14374525072320.0, - "grad_norm": 3.1795607782929527, - "learning_rate": 3.973172870909465e-06, - "loss": 0.8495, - "num_input_tokens_seen": 14037265, - "step": 671 - }, - { - "epoch": 0.08080322250946913, - "flos": 16944760505280.0, - "grad_norm": 5.7760091585208215, - "learning_rate": 3.973045562136638e-06, - "loss": 0.7929, - "num_input_tokens_seen": 14053800, - "step": 672 - }, - { - "epoch": 0.08092346540010822, - "flos": 15919876652040.0, - "grad_norm": 2.5399284881377104, - "learning_rate": 3.972917954054368e-06, - "loss": 0.8969, - "num_input_tokens_seen": 14072075, - "step": 673 - }, - { - "epoch": 0.08104370829074731, - "flos": 15378581163240.0, - "grad_norm": 2.7815507019019043, - "learning_rate": 3.972790046682013e-06, - "loss": 0.7988, - "num_input_tokens_seen": 14090470, - "step": 674 - }, - { - "epoch": 0.0811639511813864, - "flos": 14675941806480.0, - "grad_norm": 2.5794889598949577, - "learning_rate": 3.972661840038977e-06, - "loss": 0.7729, - "num_input_tokens_seen": 14110480, - "step": 675 - }, - { - "epoch": 0.08128419407202549, - "flos": 12273445553400.0, - "grad_norm": 4.05667776899822, - "learning_rate": 3.972533334144707e-06, - "loss": 0.8084, - "num_input_tokens_seen": 14127125, - "step": 676 - }, - { - "epoch": 0.08140443696266458, - "flos": 17404038511800.0, - "grad_norm": 2.9850447137956535, - "learning_rate": 3.972404529018699e-06, - "loss": 0.7683, - "num_input_tokens_seen": 14146705, - "step": 677 - }, - { - "epoch": 0.08152467985330367, - "flos": 17863601457960.0, - "grad_norm": 2.1335661398499024, - "learning_rate": 3.972275424680493e-06, - "loss": 0.8378, - "num_input_tokens_seen": 14166535, - "step": 678 - }, - { - "epoch": 0.08164492274394276, - "flos": 14109576120720.0, - "grad_norm": 3.5290189463668113, - "learning_rate": 3.972146021149673e-06, - "loss": 0.8916, - "num_input_tokens_seen": 14184530, - "step": 679 - }, - { - "epoch": 0.08176516563458186, - "flos": 10788745474320.0, - "grad_norm": 2.6778317801736007, - "learning_rate": 3.972016318445868e-06, - "loss": 0.7746, - "num_input_tokens_seen": 14202250, - "step": 680 - }, - { - "epoch": 0.08188540852522094, - "flos": 16540498526520.0, - "grad_norm": 2.9897074568409256, - "learning_rate": 3.971886316588757e-06, - "loss": 0.8934, - "num_input_tokens_seen": 14222475, - "step": 681 - }, - { - "epoch": 0.08200565141586004, - "flos": 14217613598760.0, - "grad_norm": 3.3781065595407593, - "learning_rate": 3.9717560155980595e-06, - "loss": 0.7167, - "num_input_tokens_seen": 14237845, - "step": 682 - }, - { - "epoch": 0.08212589430649912, - "flos": 15054563709000.0, - "grad_norm": 2.994038380409352, - "learning_rate": 3.971625415493542e-06, - "loss": 0.8978, - "num_input_tokens_seen": 14255885, - "step": 683 - }, - { - "epoch": 0.08224613719713822, - "flos": 19019915008320.0, - "grad_norm": 2.4225824122784774, - "learning_rate": 3.971494516295017e-06, - "loss": 0.8618, - "num_input_tokens_seen": 14275055, - "step": 684 - }, - { - "epoch": 0.08236638008777732, - "flos": 17403563612400.0, - "grad_norm": 2.7901311798430593, - "learning_rate": 3.971363318022341e-06, - "loss": 0.8255, - "num_input_tokens_seen": 14296115, - "step": 685 - }, - { - "epoch": 0.0824866229784164, - "flos": 28526833156320.0, - "grad_norm": 2.49271207926865, - "learning_rate": 3.971231820695417e-06, - "loss": 0.6748, - "num_input_tokens_seen": 14319450, - "step": 686 - }, - { - "epoch": 0.0826068658690555, - "flos": 16913801555760.0, - "grad_norm": 2.2979302090902607, - "learning_rate": 3.971100024334193e-06, - "loss": 0.8037, - "num_input_tokens_seen": 14336690, - "step": 687 - }, - { - "epoch": 0.08272710875969458, - "flos": 15454678233000.0, - "grad_norm": 2.537824029387811, - "learning_rate": 3.970967928958663e-06, - "loss": 0.8537, - "num_input_tokens_seen": 14353525, - "step": 688 - }, - { - "epoch": 0.08284735165033368, - "flos": 13921135818360.0, - "grad_norm": 1.87943493548654, - "learning_rate": 3.970835534588865e-06, - "loss": 0.8221, - "num_input_tokens_seen": 14370740, - "step": 689 - }, - { - "epoch": 0.08296759454097276, - "flos": 12192599489640.0, - "grad_norm": 2.318874234472735, - "learning_rate": 3.970702841244883e-06, - "loss": 0.8424, - "num_input_tokens_seen": 14388780, - "step": 690 - }, - { - "epoch": 0.08308783743161186, - "flos": 13138093637280.0, - "grad_norm": 2.805519596872397, - "learning_rate": 3.970569848946847e-06, - "loss": 0.801, - "num_input_tokens_seen": 14408315, - "step": 691 - }, - { - "epoch": 0.08320808032225095, - "flos": 10972405122720.0, - "grad_norm": 2.7870885023526415, - "learning_rate": 3.970436557714932e-06, - "loss": 0.7886, - "num_input_tokens_seen": 14424555, - "step": 692 - }, - { - "epoch": 0.08332832321289003, - "flos": 16378204859760.0, - "grad_norm": 2.456821936104918, - "learning_rate": 3.970302967569358e-06, - "loss": 0.8382, - "num_input_tokens_seen": 14442865, - "step": 693 - }, - { - "epoch": 0.08344856610352913, - "flos": 18105633089880.0, - "grad_norm": 3.235405504379783, - "learning_rate": 3.9701690785303896e-06, - "loss": 0.6671, - "num_input_tokens_seen": 14461780, - "step": 694 - }, - { - "epoch": 0.08356880899416821, - "flos": 18588208335600.0, - "grad_norm": 2.6448502003123515, - "learning_rate": 3.970034890618339e-06, - "loss": 0.8614, - "num_input_tokens_seen": 14481190, - "step": 695 - }, - { - "epoch": 0.08368905188480731, - "flos": 17836093444080.0, - "grad_norm": 2.7546910243193032, - "learning_rate": 3.969900403853562e-06, - "loss": 0.8578, - "num_input_tokens_seen": 14499950, - "step": 696 - }, - { - "epoch": 0.08380929477544641, - "flos": 13161485856360.0, - "grad_norm": 2.8790102646152143, - "learning_rate": 3.96976561825646e-06, - "loss": 0.7673, - "num_input_tokens_seen": 14516760, - "step": 697 - }, - { - "epoch": 0.08392953766608549, - "flos": 19266727294200.0, - "grad_norm": 2.5206983125535185, - "learning_rate": 3.969630533847479e-06, - "loss": 0.86, - "num_input_tokens_seen": 14535440, - "step": 698 - }, - { - "epoch": 0.08404978055672459, - "flos": 16458860963760.0, - "grad_norm": 2.827095729619854, - "learning_rate": 3.969495150647113e-06, - "loss": 0.8257, - "num_input_tokens_seen": 14553330, - "step": 699 - }, - { - "epoch": 0.08417002344736367, - "flos": 18186669113400.0, - "grad_norm": 2.6485166284673975, - "learning_rate": 3.969359468675899e-06, - "loss": 0.7519, - "num_input_tokens_seen": 14573180, - "step": 700 - }, - { - "epoch": 0.08429026633800277, - "flos": 12354228297240.0, - "grad_norm": 3.0400032999623106, - "learning_rate": 3.969223487954418e-06, - "loss": 0.8763, - "num_input_tokens_seen": 14590360, - "step": 701 - }, - { - "epoch": 0.08441050922864185, - "flos": 17457756481200.0, - "grad_norm": 2.2605627923895812, - "learning_rate": 3.969087208503301e-06, - "loss": 0.8147, - "num_input_tokens_seen": 14610160, - "step": 702 - }, - { - "epoch": 0.08453075211928095, - "flos": 18699791729160.0, - "grad_norm": 4.638112219528275, - "learning_rate": 3.968950630343219e-06, - "loss": 0.8221, - "num_input_tokens_seen": 14626865, - "step": 703 - }, - { - "epoch": 0.08465099500992004, - "flos": 14267912292480.0, - "grad_norm": 2.527755680150662, - "learning_rate": 3.968813753494892e-06, - "loss": 0.917, - "num_input_tokens_seen": 14644745, - "step": 704 - }, - { - "epoch": 0.08477123790055913, - "flos": 21535071093600.0, - "grad_norm": 2.894662377891837, - "learning_rate": 3.968676577979084e-06, - "loss": 0.7428, - "num_input_tokens_seen": 14664015, - "step": 705 - }, - { - "epoch": 0.08489148079119822, - "flos": 13596516824880.0, - "grad_norm": 2.6135870465235818, - "learning_rate": 3.968539103816605e-06, - "loss": 0.7672, - "num_input_tokens_seen": 14681535, - "step": 706 - }, - { - "epoch": 0.0850117236818373, - "flos": 17182739662320.0, - "grad_norm": 2.4562634843839573, - "learning_rate": 3.9684013310283085e-06, - "loss": 0.8787, - "num_input_tokens_seen": 14699940, - "step": 707 - }, - { - "epoch": 0.0851319665724764, - "flos": 29878362240480.0, - "grad_norm": 2.24512409011689, - "learning_rate": 3.9682632596350956e-06, - "loss": 0.6159, - "num_input_tokens_seen": 14720825, - "step": 708 - }, - { - "epoch": 0.0852522094631155, - "flos": 11563872665400.0, - "grad_norm": 2.053612605564674, - "learning_rate": 3.968124889657911e-06, - "loss": 0.7683, - "num_input_tokens_seen": 14735645, - "step": 709 - }, - { - "epoch": 0.08537245235375458, - "flos": 10592706781560.0, - "grad_norm": 2.8112278074318917, - "learning_rate": 3.967986221117746e-06, - "loss": 0.8959, - "num_input_tokens_seen": 14751305, - "step": 710 - }, - { - "epoch": 0.08549269524439368, - "flos": 19401924526560.0, - "grad_norm": 2.7308188720109134, - "learning_rate": 3.967847254035635e-06, - "loss": 0.8557, - "num_input_tokens_seen": 14770410, - "step": 711 - }, - { - "epoch": 0.08561293813503276, - "flos": 9870917640360.0, - "grad_norm": 3.54112187103604, - "learning_rate": 3.967707988432661e-06, - "loss": 0.85, - "num_input_tokens_seen": 14787835, - "step": 712 - }, - { - "epoch": 0.08573318102567186, - "flos": 19753196715000.0, - "grad_norm": 2.740633880357263, - "learning_rate": 3.967568424329949e-06, - "loss": 0.8609, - "num_input_tokens_seen": 14807980, - "step": 713 - }, - { - "epoch": 0.08585342391631094, - "flos": 49627732591680.0, - "grad_norm": 0.8034630505576782, - "learning_rate": 3.967428561748671e-06, - "loss": 0.5872, - "num_input_tokens_seen": 14875670, - "step": 714 - }, - { - "epoch": 0.08597366680695004, - "flos": 16432682668200.0, - "grad_norm": 2.151886214626759, - "learning_rate": 3.967288400710045e-06, - "loss": 0.849, - "num_input_tokens_seen": 14894855, - "step": 715 - }, - { - "epoch": 0.08609390969758914, - "flos": 17242156424520.0, - "grad_norm": 2.6874229952403623, - "learning_rate": 3.9671479412353335e-06, - "loss": 0.8674, - "num_input_tokens_seen": 14913040, - "step": 716 - }, - { - "epoch": 0.08621415258822822, - "flos": 18970502793480.0, - "grad_norm": 3.3518211425005178, - "learning_rate": 3.967007183345843e-06, - "loss": 0.7261, - "num_input_tokens_seen": 14932615, - "step": 717 - }, - { - "epoch": 0.08633439547886732, - "flos": 9870347761080.0, - "grad_norm": 3.164760484355782, - "learning_rate": 3.966866127062927e-06, - "loss": 0.8764, - "num_input_tokens_seen": 14949460, - "step": 718 - }, - { - "epoch": 0.0864546383695064, - "flos": 42569113676160.0, - "grad_norm": 0.9082173007966258, - "learning_rate": 3.966724772407982e-06, - "loss": 0.6747, - "num_input_tokens_seen": 15006695, - "step": 719 - }, - { - "epoch": 0.0865748812601455, - "flos": 14649161971680.0, - "grad_norm": 14.23467032401518, - "learning_rate": 3.966583119402454e-06, - "loss": 0.8786, - "num_input_tokens_seen": 15023180, - "step": 720 - }, - { - "epoch": 0.08669512415078459, - "flos": 25909781625240.0, - "grad_norm": 1.975575196731492, - "learning_rate": 3.9664411680678305e-06, - "loss": 0.8044, - "num_input_tokens_seen": 15044655, - "step": 721 - }, - { - "epoch": 0.08681536704142367, - "flos": 48407348265000.0, - "grad_norm": 0.8881396701682985, - "learning_rate": 3.966298918425644e-06, - "loss": 0.6339, - "num_input_tokens_seen": 15101865, - "step": 722 - }, - { - "epoch": 0.08693560993206277, - "flos": 25367283057960.0, - "grad_norm": 2.6090880824434124, - "learning_rate": 3.966156370497476e-06, - "loss": 0.8165, - "num_input_tokens_seen": 15125195, - "step": 723 - }, - { - "epoch": 0.08705585282270185, - "flos": 16945362044520.0, - "grad_norm": 2.8917409031671224, - "learning_rate": 3.96601352430495e-06, - "loss": 0.868, - "num_input_tokens_seen": 15144685, - "step": 724 - }, - { - "epoch": 0.08717609571334095, - "flos": 21643393511280.0, - "grad_norm": 2.031345775987213, - "learning_rate": 3.965870379869735e-06, - "loss": 0.8157, - "num_input_tokens_seen": 15166450, - "step": 725 - }, - { - "epoch": 0.08729633860398003, - "flos": 15108439978200.0, - "grad_norm": 2.5675687203872717, - "learning_rate": 3.965726937213547e-06, - "loss": 0.8451, - "num_input_tokens_seen": 15184805, - "step": 726 - }, - { - "epoch": 0.08741658149461913, - "flos": 13407854902800.0, - "grad_norm": 2.5124245893254424, - "learning_rate": 3.965583196358144e-06, - "loss": 0.7938, - "num_input_tokens_seen": 15203560, - "step": 727 - }, - { - "epoch": 0.08753682438525823, - "flos": 13673753653200.0, - "grad_norm": 3.327918104447901, - "learning_rate": 3.965439157325335e-06, - "loss": 0.7243, - "num_input_tokens_seen": 15220645, - "step": 728 - }, - { - "epoch": 0.08765706727589731, - "flos": 20368436396280.0, - "grad_norm": 2.72068875343925, - "learning_rate": 3.965294820136968e-06, - "loss": 0.7358, - "num_input_tokens_seen": 15242165, - "step": 729 - }, - { - "epoch": 0.08777731016653641, - "flos": 17862714979080.0, - "grad_norm": 3.470636902769326, - "learning_rate": 3.965150184814938e-06, - "loss": 0.8424, - "num_input_tokens_seen": 15261370, - "step": 730 - }, - { - "epoch": 0.08789755305717549, - "flos": 16081283839920.0, - "grad_norm": 2.5184244253101054, - "learning_rate": 3.965005251381189e-06, - "loss": 0.7357, - "num_input_tokens_seen": 15279025, - "step": 731 - }, - { - "epoch": 0.08801779594781459, - "flos": 43009083598440.0, - "grad_norm": 1.0369091235725991, - "learning_rate": 3.964860019857705e-06, - "loss": 0.659, - "num_input_tokens_seen": 15343660, - "step": 732 - }, - { - "epoch": 0.08813803883845367, - "flos": 17052861303240.0, - "grad_norm": 10.296939091345491, - "learning_rate": 3.964714490266518e-06, - "loss": 0.8209, - "num_input_tokens_seen": 15364025, - "step": 733 - }, - { - "epoch": 0.08825828172909277, - "flos": 46756428684120.0, - "grad_norm": 0.8681848306821497, - "learning_rate": 3.964568662629706e-06, - "loss": 0.6519, - "num_input_tokens_seen": 15425050, - "step": 734 - }, - { - "epoch": 0.08837852461973186, - "flos": 19587990331920.0, - "grad_norm": 2.575672161926645, - "learning_rate": 3.9644225369693895e-06, - "loss": 0.8232, - "num_input_tokens_seen": 15445070, - "step": 735 - }, - { - "epoch": 0.08849876751037095, - "flos": 19990511012880.0, - "grad_norm": 2.3540338349353664, - "learning_rate": 3.964276113307735e-06, - "loss": 0.8563, - "num_input_tokens_seen": 15464755, - "step": 736 - }, - { - "epoch": 0.08861901040101004, - "flos": 14487786443760.0, - "grad_norm": 2.7967624748226605, - "learning_rate": 3.9641293916669574e-06, - "loss": 0.7803, - "num_input_tokens_seen": 15483435, - "step": 737 - }, - { - "epoch": 0.08873925329164913, - "flos": 17322052689480.0, - "grad_norm": 1.8590407511865208, - "learning_rate": 3.9639823720693115e-06, - "loss": 0.8098, - "num_input_tokens_seen": 15505010, - "step": 738 - }, - { - "epoch": 0.08885949618228822, - "flos": 52979997087000.0, - "grad_norm": 0.9014870345928282, - "learning_rate": 3.963835054537102e-06, - "loss": 0.6451, - "num_input_tokens_seen": 15573695, - "step": 739 - }, - { - "epoch": 0.08897973907292732, - "flos": 16350823485720.0, - "grad_norm": 3.0591767633601075, - "learning_rate": 3.963687439092676e-06, - "loss": 0.5972, - "num_input_tokens_seen": 15594100, - "step": 740 - }, - { - "epoch": 0.0890999819635664, - "flos": 15540969809880.0, - "grad_norm": 2.32418848867511, - "learning_rate": 3.963539525758427e-06, - "loss": 0.7902, - "num_input_tokens_seen": 15613380, - "step": 741 - }, - { - "epoch": 0.0892202248542055, - "flos": 18588809874840.0, - "grad_norm": 5.644221039269876, - "learning_rate": 3.9633913145567925e-06, - "loss": 0.6583, - "num_input_tokens_seen": 15633590, - "step": 742 - }, - { - "epoch": 0.08934046774484458, - "flos": 17913013672800.0, - "grad_norm": 2.1685769267652426, - "learning_rate": 3.9632428055102575e-06, - "loss": 0.8051, - "num_input_tokens_seen": 15653320, - "step": 743 - }, - { - "epoch": 0.08946071063548368, - "flos": 26287390409040.0, - "grad_norm": 2.1671863690671054, - "learning_rate": 3.9630939986413495e-06, - "loss": 0.6501, - "num_input_tokens_seen": 15674840, - "step": 744 - }, - { - "epoch": 0.08958095352612276, - "flos": 10438106485080.0, - "grad_norm": 2.8774474962947814, - "learning_rate": 3.962944893972643e-06, - "loss": 0.769, - "num_input_tokens_seen": 15693010, - "step": 745 - }, - { - "epoch": 0.08970119641676186, - "flos": 13025053885560.0, - "grad_norm": 2.7713442357705094, - "learning_rate": 3.962795491526756e-06, - "loss": 0.8961, - "num_input_tokens_seen": 15709890, - "step": 746 - }, - { - "epoch": 0.08982143930740095, - "flos": 15214894458240.0, - "grad_norm": 2.323535260912477, - "learning_rate": 3.962645791326354e-06, - "loss": 0.8694, - "num_input_tokens_seen": 15728865, - "step": 747 - }, - { - "epoch": 0.08994168219804004, - "flos": 17647114922400.0, - "grad_norm": 2.200781258666689, - "learning_rate": 3.962495793394146e-06, - "loss": 0.8163, - "num_input_tokens_seen": 15747775, - "step": 748 - }, - { - "epoch": 0.09006192508867913, - "flos": 42141776077920.0, - "grad_norm": 0.6837422236919171, - "learning_rate": 3.9623454977528864e-06, - "loss": 0.6044, - "num_input_tokens_seen": 15806150, - "step": 749 - }, - { - "epoch": 0.09018216797931822, - "flos": 14975585582880.0, - "grad_norm": 2.9525583520819247, - "learning_rate": 3.962194904425375e-06, - "loss": 0.8398, - "num_input_tokens_seen": 15826500, - "step": 750 - }, - { - "epoch": 0.09030241086995731, - "flos": 16566898441800.0, - "grad_norm": 2.323132891206678, - "learning_rate": 3.9620440134344566e-06, - "loss": 0.6629, - "num_input_tokens_seen": 15844375, - "step": 751 - }, - { - "epoch": 0.09042265376059641, - "flos": 15999487977360.0, - "grad_norm": 4.460772456174123, - "learning_rate": 3.9618928248030215e-06, - "loss": 0.8138, - "num_input_tokens_seen": 15863605, - "step": 752 - }, - { - "epoch": 0.0905428966512355, - "flos": 17808743730000.0, - "grad_norm": 3.029742780538167, - "learning_rate": 3.961741338554005e-06, - "loss": 0.8201, - "num_input_tokens_seen": 15881665, - "step": 753 - }, - { - "epoch": 0.09066313954187459, - "flos": 26340791778840.0, - "grad_norm": 3.7940123013310547, - "learning_rate": 3.9615895547103865e-06, - "loss": 0.7425, - "num_input_tokens_seen": 15905030, - "step": 754 - }, - { - "epoch": 0.09078338243251367, - "flos": 21399937181160.0, - "grad_norm": 2.6704633229330934, - "learning_rate": 3.961437473295193e-06, - "loss": 0.7653, - "num_input_tokens_seen": 15924895, - "step": 755 - }, - { - "epoch": 0.09090362532315277, - "flos": 16026236152200.0, - "grad_norm": 2.870319038761605, - "learning_rate": 3.961285094331495e-06, - "loss": 0.7001, - "num_input_tokens_seen": 15942530, - "step": 756 - }, - { - "epoch": 0.09102386821379185, - "flos": 20049896115120.0, - "grad_norm": 2.590503453255159, - "learning_rate": 3.961132417842406e-06, - "loss": 0.8405, - "num_input_tokens_seen": 15962035, - "step": 757 - }, - { - "epoch": 0.09114411110443095, - "flos": 15216509116200.0, - "grad_norm": 3.0425603590081836, - "learning_rate": 3.960979443851089e-06, - "loss": 0.7412, - "num_input_tokens_seen": 15978780, - "step": 758 - }, - { - "epoch": 0.09126435399507005, - "flos": 19163660429880.0, - "grad_norm": 1.9644146369141766, - "learning_rate": 3.96082617238075e-06, - "loss": 0.7788, - "num_input_tokens_seen": 16001125, - "step": 759 - }, - { - "epoch": 0.09138459688570913, - "flos": 17862525019320.0, - "grad_norm": 2.7237965017255785, - "learning_rate": 3.960672603454639e-06, - "loss": 0.769, - "num_input_tokens_seen": 16020825, - "step": 760 - }, - { - "epoch": 0.09150483977634823, - "flos": 15378739463040.0, - "grad_norm": 3.732482047322069, - "learning_rate": 3.960518737096054e-06, - "loss": 0.744, - "num_input_tokens_seen": 16040175, - "step": 761 - }, - { - "epoch": 0.09162508266698731, - "flos": 16728875508960.0, - "grad_norm": 2.9734411819924325, - "learning_rate": 3.960364573328334e-06, - "loss": 0.7057, - "num_input_tokens_seen": 16059220, - "step": 762 - }, - { - "epoch": 0.0917453255576264, - "flos": 15890469040560.0, - "grad_norm": 2.4165090860314007, - "learning_rate": 3.9602101121748675e-06, - "loss": 0.8718, - "num_input_tokens_seen": 16079435, - "step": 763 - }, - { - "epoch": 0.0918655684482655, - "flos": 10624678849800.0, - "grad_norm": 2.4319057335402974, - "learning_rate": 3.960055353659085e-06, - "loss": 0.7049, - "num_input_tokens_seen": 16096265, - "step": 764 - }, - { - "epoch": 0.09198581133890459, - "flos": 17155833187680.0, - "grad_norm": 2.1011265866302375, - "learning_rate": 3.959900297804465e-06, - "loss": 0.8256, - "num_input_tokens_seen": 16116155, - "step": 765 - }, - { - "epoch": 0.09210605422954368, - "flos": 12242613243720.0, - "grad_norm": 3.1504012200496048, - "learning_rate": 3.9597449446345276e-06, - "loss": 0.7551, - "num_input_tokens_seen": 16133120, - "step": 766 - }, - { - "epoch": 0.09222629712018277, - "flos": 16593583296720.0, - "grad_norm": 2.754970912883648, - "learning_rate": 3.95958929417284e-06, - "loss": 0.8137, - "num_input_tokens_seen": 16150995, - "step": 767 - }, - { - "epoch": 0.09234654001082186, - "flos": 51608178459840.0, - "grad_norm": 0.7760766211367277, - "learning_rate": 3.9594333464430145e-06, - "loss": 0.6033, - "num_input_tokens_seen": 16220205, - "step": 768 - }, - { - "epoch": 0.09246678290146094, - "flos": 14623015336080.0, - "grad_norm": 3.3923998990678137, - "learning_rate": 3.959277101468709e-06, - "loss": 0.8647, - "num_input_tokens_seen": 16239475, - "step": 769 - }, - { - "epoch": 0.09258702579210004, - "flos": 12947247177960.0, - "grad_norm": 6.171673194386636, - "learning_rate": 3.959120559273624e-06, - "loss": 0.7691, - "num_input_tokens_seen": 16256980, - "step": 770 - }, - { - "epoch": 0.09270726868273914, - "flos": 15270575345160.0, - "grad_norm": 2.389954017771423, - "learning_rate": 3.958963719881509e-06, - "loss": 0.8296, - "num_input_tokens_seen": 16274790, - "step": 771 - }, - { - "epoch": 0.09282751157337822, - "flos": 12405413469840.0, - "grad_norm": 2.972134397720398, - "learning_rate": 3.958806583316154e-06, - "loss": 0.912, - "num_input_tokens_seen": 16292480, - "step": 772 - }, - { - "epoch": 0.09294775446401732, - "flos": 23882551318920.0, - "grad_norm": 2.003564043179571, - "learning_rate": 3.9586491496013985e-06, - "loss": 0.7768, - "num_input_tokens_seen": 16314595, - "step": 773 - }, - { - "epoch": 0.0930679973546564, - "flos": 13326597259560.0, - "grad_norm": 2.214360475376782, - "learning_rate": 3.958491418761124e-06, - "loss": 0.7991, - "num_input_tokens_seen": 16331885, - "step": 774 - }, - { - "epoch": 0.0931882402452955, - "flos": 15427550138640.0, - "grad_norm": 3.8911987862064326, - "learning_rate": 3.958333390819258e-06, - "loss": 0.7209, - "num_input_tokens_seen": 16348535, - "step": 775 - }, - { - "epoch": 0.0933084831359346, - "flos": 17728214265840.0, - "grad_norm": 2.522090304234974, - "learning_rate": 3.9581750657997754e-06, - "loss": 0.7828, - "num_input_tokens_seen": 16367620, - "step": 776 - }, - { - "epoch": 0.09342872602657368, - "flos": 18669750918480.0, - "grad_norm": 1.824036027468864, - "learning_rate": 3.95801644372669e-06, - "loss": 0.885, - "num_input_tokens_seen": 16387245, - "step": 777 - }, - { - "epoch": 0.09354896891721277, - "flos": 16945045444920.0, - "grad_norm": 2.005049173223, - "learning_rate": 3.957857524624068e-06, - "loss": 0.8222, - "num_input_tokens_seen": 16405845, - "step": 778 - }, - { - "epoch": 0.09366921180785186, - "flos": 17781615635640.0, - "grad_norm": 1.7039697634382818, - "learning_rate": 3.957698308516016e-06, - "loss": 0.8888, - "num_input_tokens_seen": 16426865, - "step": 779 - }, - { - "epoch": 0.09378945469849095, - "flos": 13674070252800.0, - "grad_norm": 2.1436947162050384, - "learning_rate": 3.957538795426688e-06, - "loss": 0.8161, - "num_input_tokens_seen": 16444010, - "step": 780 - }, - { - "epoch": 0.09390969758913004, - "flos": 16996230617520.0, - "grad_norm": 2.944962781790767, - "learning_rate": 3.9573789853802804e-06, - "loss": 0.7576, - "num_input_tokens_seen": 16462205, - "step": 781 - }, - { - "epoch": 0.09402994047976913, - "flos": 14353127430720.0, - "grad_norm": 2.379052268872747, - "learning_rate": 3.957218878401037e-06, - "loss": 0.7407, - "num_input_tokens_seen": 16480415, - "step": 782 - }, - { - "epoch": 0.09415018337040823, - "flos": 21585718046880.0, - "grad_norm": 2.2727805955006795, - "learning_rate": 3.957058474513246e-06, - "loss": 0.8707, - "num_input_tokens_seen": 16499990, - "step": 783 - }, - { - "epoch": 0.09427042626104731, - "flos": 17998197151080.0, - "grad_norm": 3.294430518018147, - "learning_rate": 3.956897773741241e-06, - "loss": 0.7679, - "num_input_tokens_seen": 16518700, - "step": 784 - }, - { - "epoch": 0.09439066915168641, - "flos": 19321015142880.0, - "grad_norm": 1.8228843104806083, - "learning_rate": 3.956736776109398e-06, - "loss": 0.7097, - "num_input_tokens_seen": 16539595, - "step": 785 - }, - { - "epoch": 0.09451091204232549, - "flos": 14190643804200.0, - "grad_norm": 2.0436094603206985, - "learning_rate": 3.956575481642143e-06, - "loss": 0.8175, - "num_input_tokens_seen": 16558205, - "step": 786 - }, - { - "epoch": 0.09463115493296459, - "flos": 18586308738000.0, - "grad_norm": 4.279562958837309, - "learning_rate": 3.956413890363943e-06, - "loss": 0.7213, - "num_input_tokens_seen": 16574905, - "step": 787 - }, - { - "epoch": 0.09475139782360369, - "flos": 7305019621920.0, - "grad_norm": 2.8032550181979525, - "learning_rate": 3.956252002299312e-06, - "loss": 0.8081, - "num_input_tokens_seen": 16590525, - "step": 788 - }, - { - "epoch": 0.09487164071424277, - "flos": 12564319520880.0, - "grad_norm": 2.3282480911383048, - "learning_rate": 3.956089817472807e-06, - "loss": 0.8958, - "num_input_tokens_seen": 16607550, - "step": 789 - }, - { - "epoch": 0.09499188360488187, - "flos": 22644220286280.0, - "grad_norm": 2.3473227164019272, - "learning_rate": 3.955927335909032e-06, - "loss": 0.8441, - "num_input_tokens_seen": 16630480, - "step": 790 - }, - { - "epoch": 0.09511212649552095, - "flos": 21535419353160.0, - "grad_norm": 2.3365943010836188, - "learning_rate": 3.955764557632634e-06, - "loss": 0.7516, - "num_input_tokens_seen": 16650010, - "step": 791 - }, - { - "epoch": 0.09523236938616005, - "flos": 7494061463520.0, - "grad_norm": 3.9066542759548017, - "learning_rate": 3.955601482668309e-06, - "loss": 0.9265, - "num_input_tokens_seen": 16667590, - "step": 792 - }, - { - "epoch": 0.09535261227679913, - "flos": 13920091039680.0, - "grad_norm": 2.9232101245986666, - "learning_rate": 3.955438111040794e-06, - "loss": 0.8683, - "num_input_tokens_seen": 16685585, - "step": 793 - }, - { - "epoch": 0.09547285516743823, - "flos": 15297545139720.0, - "grad_norm": 2.3349451106806254, - "learning_rate": 3.955274442774873e-06, - "loss": 0.7948, - "num_input_tokens_seen": 16703885, - "step": 794 - }, - { - "epoch": 0.09559309805807732, - "flos": 22129483012560.0, - "grad_norm": 2.948913477578172, - "learning_rate": 3.9551104778953725e-06, - "loss": 0.7034, - "num_input_tokens_seen": 16723900, - "step": 795 - }, - { - "epoch": 0.0957133409487164, - "flos": 15403619700240.0, - "grad_norm": 2.2996175572421635, - "learning_rate": 3.954946216427167e-06, - "loss": 0.8374, - "num_input_tokens_seen": 16744080, - "step": 796 - }, - { - "epoch": 0.0958335838393555, - "flos": 52581904309080.0, - "grad_norm": 0.8181931137860982, - "learning_rate": 3.954781658395176e-06, - "loss": 0.6405, - "num_input_tokens_seen": 16800055, - "step": 797 - }, - { - "epoch": 0.09595382672999458, - "flos": 16000786035720.0, - "grad_norm": 2.581666028946399, - "learning_rate": 3.95461680382436e-06, - "loss": 0.9047, - "num_input_tokens_seen": 16818700, - "step": 798 - }, - { - "epoch": 0.09607406962063368, - "flos": 13648841756040.0, - "grad_norm": 2.8109798137709445, - "learning_rate": 3.9544516527397295e-06, - "loss": 0.8417, - "num_input_tokens_seen": 16834770, - "step": 799 - }, - { - "epoch": 0.09619431251127276, - "flos": 16515808249080.0, - "grad_norm": 3.3982891883496036, - "learning_rate": 3.954286205166338e-06, - "loss": 0.7954, - "num_input_tokens_seen": 16855655, - "step": 800 - }, - { - "epoch": 0.09631455540191186, - "flos": 10356310622520.0, - "grad_norm": 2.9178834466070436, - "learning_rate": 3.954120461129282e-06, - "loss": 0.814, - "num_input_tokens_seen": 16872785, - "step": 801 - }, - { - "epoch": 0.09643479829255096, - "flos": 15162379567320.0, - "grad_norm": 2.520744185539636, - "learning_rate": 3.953954420653706e-06, - "loss": 0.8354, - "num_input_tokens_seen": 16889530, - "step": 802 - }, - { - "epoch": 0.09655504118319004, - "flos": 17885189059320.0, - "grad_norm": 2.0430461219738656, - "learning_rate": 3.953788083764798e-06, - "loss": 0.8704, - "num_input_tokens_seen": 16908485, - "step": 803 - }, - { - "epoch": 0.09667528407382914, - "flos": 13461414572400.0, - "grad_norm": 5.179113203751247, - "learning_rate": 3.953621450487792e-06, - "loss": 0.9062, - "num_input_tokens_seen": 16926825, - "step": 804 - }, - { - "epoch": 0.09679552696446822, - "flos": 52229686813200.0, - "grad_norm": 0.8384665264006176, - "learning_rate": 3.953454520847964e-06, - "loss": 0.6425, - "num_input_tokens_seen": 16991390, - "step": 805 - }, - { - "epoch": 0.09691576985510732, - "flos": 16054409025240.0, - "grad_norm": 2.7987068187249866, - "learning_rate": 3.9532872948706395e-06, - "loss": 0.7161, - "num_input_tokens_seen": 17010605, - "step": 806 - }, - { - "epoch": 0.09703601274574641, - "flos": 13108242786360.0, - "grad_norm": 2.6336922010715966, - "learning_rate": 3.9531197725811845e-06, - "loss": 0.8195, - "num_input_tokens_seen": 17025710, - "step": 807 - }, - { - "epoch": 0.0971562556363855, - "flos": 16214423174880.0, - "grad_norm": 2.1470460026809333, - "learning_rate": 3.952951954005013e-06, - "loss": 0.8612, - "num_input_tokens_seen": 17045115, - "step": 808 - }, - { - "epoch": 0.0972764985270246, - "flos": 18942931459680.0, - "grad_norm": 2.1623291345026985, - "learning_rate": 3.952783839167584e-06, - "loss": 0.8385, - "num_input_tokens_seen": 17064880, - "step": 809 - }, - { - "epoch": 0.09739674141766368, - "flos": 14865648507240.0, - "grad_norm": 3.150335832889146, - "learning_rate": 3.952615428094398e-06, - "loss": 0.719, - "num_input_tokens_seen": 17084120, - "step": 810 - }, - { - "epoch": 0.09751698430830277, - "flos": 11464193416800.0, - "grad_norm": 3.0950303944218414, - "learning_rate": 3.952446720811004e-06, - "loss": 0.7293, - "num_input_tokens_seen": 17102165, - "step": 811 - }, - { - "epoch": 0.09763722719894186, - "flos": 46972440320280.0, - "grad_norm": 0.8090019485723655, - "learning_rate": 3.952277717342995e-06, - "loss": 0.6655, - "num_input_tokens_seen": 17168320, - "step": 812 - }, - { - "epoch": 0.09775747008958095, - "flos": 16161939943920.0, - "grad_norm": 2.4728611383628567, - "learning_rate": 3.952108417716009e-06, - "loss": 0.8425, - "num_input_tokens_seen": 17187495, - "step": 813 - }, - { - "epoch": 0.09787771298022005, - "flos": 15565976686920.0, - "grad_norm": 2.0117538300946243, - "learning_rate": 3.951938821955727e-06, - "loss": 0.8378, - "num_input_tokens_seen": 17206615, - "step": 814 - }, - { - "epoch": 0.09799795587085913, - "flos": 16135065129240.0, - "grad_norm": 1.7669580204353568, - "learning_rate": 3.9517689300878786e-06, - "loss": 0.753, - "num_input_tokens_seen": 17226070, - "step": 815 - }, - { - "epoch": 0.09811819876149823, - "flos": 16215816213120.0, - "grad_norm": 2.1178167892982263, - "learning_rate": 3.951598742138236e-06, - "loss": 0.7692, - "num_input_tokens_seen": 17244515, - "step": 816 - }, - { - "epoch": 0.09823844165213731, - "flos": 16264880168400.0, - "grad_norm": 2.6387373819415436, - "learning_rate": 3.951428258132615e-06, - "loss": 0.7763, - "num_input_tokens_seen": 17262355, - "step": 817 - }, - { - "epoch": 0.09835868454277641, - "flos": 16454966788680.0, - "grad_norm": 2.5826075570086267, - "learning_rate": 3.951257478096879e-06, - "loss": 0.8305, - "num_input_tokens_seen": 17280440, - "step": 818 - }, - { - "epoch": 0.0984789274334155, - "flos": 11922553284480.0, - "grad_norm": 4.853289654573628, - "learning_rate": 3.951086402056936e-06, - "loss": 0.6814, - "num_input_tokens_seen": 17294760, - "step": 819 - }, - { - "epoch": 0.09859917032405459, - "flos": 17755025760600.0, - "grad_norm": 2.417349740756294, - "learning_rate": 3.950915030038735e-06, - "loss": 0.8234, - "num_input_tokens_seen": 17314275, - "step": 820 - }, - { - "epoch": 0.09871941321469369, - "flos": 12705215546040.0, - "grad_norm": 4.690734154475074, - "learning_rate": 3.9507433620682765e-06, - "loss": 0.8167, - "num_input_tokens_seen": 17330930, - "step": 821 - }, - { - "epoch": 0.09883965610533277, - "flos": 20888524203240.0, - "grad_norm": 1.9020321569281549, - "learning_rate": 3.9505713981716e-06, - "loss": 0.87, - "num_input_tokens_seen": 17353480, - "step": 822 - }, - { - "epoch": 0.09895989899597187, - "flos": 17347946045400.0, - "grad_norm": 3.703638998808617, - "learning_rate": 3.950399138374795e-06, - "loss": 0.7963, - "num_input_tokens_seen": 17372280, - "step": 823 - }, - { - "epoch": 0.09908014188661095, - "flos": 18077491876800.0, - "grad_norm": 2.199267425811512, - "learning_rate": 3.95022658270399e-06, - "loss": 0.7174, - "num_input_tokens_seen": 17392365, - "step": 824 - }, - { - "epoch": 0.09920038477725004, - "flos": 10221746589360.0, - "grad_norm": 2.164827287405546, - "learning_rate": 3.9500537311853635e-06, - "loss": 0.7677, - "num_input_tokens_seen": 17410040, - "step": 825 - }, - { - "epoch": 0.09932062766788914, - "flos": 9735277168560.0, - "grad_norm": 4.01766120553986, - "learning_rate": 3.949880583845136e-06, - "loss": 0.8103, - "num_input_tokens_seen": 17427835, - "step": 826 - }, - { - "epoch": 0.09944087055852822, - "flos": 14245089952680.0, - "grad_norm": 2.437858750768036, - "learning_rate": 3.949707140709575e-06, - "loss": 0.8004, - "num_input_tokens_seen": 17447285, - "step": 827 - }, - { - "epoch": 0.09956111344916732, - "flos": 12948703536120.0, - "grad_norm": 2.349071108076718, - "learning_rate": 3.949533401804991e-06, - "loss": 0.8127, - "num_input_tokens_seen": 17463910, - "step": 828 - }, - { - "epoch": 0.0996813563398064, - "flos": 13110332343720.0, - "grad_norm": 2.4785775732260276, - "learning_rate": 3.949359367157739e-06, - "loss": 0.9021, - "num_input_tokens_seen": 17482325, - "step": 829 - }, - { - "epoch": 0.0998015992304455, - "flos": 12732533600160.0, - "grad_norm": 4.247888826082877, - "learning_rate": 3.949185036794222e-06, - "loss": 0.7471, - "num_input_tokens_seen": 17500055, - "step": 830 - }, - { - "epoch": 0.0999218421210846, - "flos": 18973067250240.0, - "grad_norm": 1.9314146767932308, - "learning_rate": 3.949010410740884e-06, - "loss": 0.7621, - "num_input_tokens_seen": 17522600, - "step": 831 - }, - { - "epoch": 0.10004208501172368, - "flos": 15514474914720.0, - "grad_norm": 2.2030287917753952, - "learning_rate": 3.948835489024216e-06, - "loss": 0.853, - "num_input_tokens_seen": 17542055, - "step": 832 - }, - { - "epoch": 0.10016232790236278, - "flos": 12652257415680.0, - "grad_norm": 2.2261212898929053, - "learning_rate": 3.948660271670755e-06, - "loss": 0.8804, - "num_input_tokens_seen": 17558925, - "step": 833 - }, - { - "epoch": 0.10028257079300186, - "flos": 18808145806800.0, - "grad_norm": 2.5855833427645667, - "learning_rate": 3.948484758707079e-06, - "loss": 0.8311, - "num_input_tokens_seen": 17578245, - "step": 834 - }, - { - "epoch": 0.10040281368364096, - "flos": 18430378723200.0, - "grad_norm": 2.531789086474929, - "learning_rate": 3.948308950159815e-06, - "loss": 0.8251, - "num_input_tokens_seen": 17596645, - "step": 835 - }, - { - "epoch": 0.10052305657428004, - "flos": 12840824357880.0, - "grad_norm": 2.602893294226475, - "learning_rate": 3.9481328460556326e-06, - "loss": 0.7387, - "num_input_tokens_seen": 17613585, - "step": 836 - }, - { - "epoch": 0.10064329946491914, - "flos": 13622790100320.0, - "grad_norm": 3.013408894168222, - "learning_rate": 3.9479564464212455e-06, - "loss": 0.8641, - "num_input_tokens_seen": 17632465, - "step": 837 - }, - { - "epoch": 0.10076354235555823, - "flos": 12541243901400.0, - "grad_norm": 3.0606243072627386, - "learning_rate": 3.947779751283414e-06, - "loss": 0.7515, - "num_input_tokens_seen": 17649355, - "step": 838 - }, - { - "epoch": 0.10088378524619732, - "flos": 16806175657200.0, - "grad_norm": 2.3468217175729955, - "learning_rate": 3.947602760668944e-06, - "loss": 0.7405, - "num_input_tokens_seen": 17668865, - "step": 839 - }, - { - "epoch": 0.10100402813683641, - "flos": 27850752014640.0, - "grad_norm": 1.9106112759252918, - "learning_rate": 3.947425474604684e-06, - "loss": 0.6969, - "num_input_tokens_seen": 17692520, - "step": 840 - }, - { - "epoch": 0.1011242710274755, - "flos": 15756759826320.0, - "grad_norm": 2.22784943448209, - "learning_rate": 3.947247893117528e-06, - "loss": 0.9116, - "num_input_tokens_seen": 17710745, - "step": 841 - }, - { - "epoch": 0.10124451391811459, - "flos": 9893296740720.0, - "grad_norm": 3.627580447734434, - "learning_rate": 3.947070016234413e-06, - "loss": 0.6691, - "num_input_tokens_seen": 17726255, - "step": 842 - }, - { - "epoch": 0.10136475680875369, - "flos": 12133974226440.0, - "grad_norm": 3.428744453650984, - "learning_rate": 3.946891843982326e-06, - "loss": 0.7264, - "num_input_tokens_seen": 17743640, - "step": 843 - }, - { - "epoch": 0.10148499969939277, - "flos": 14215239101760.0, - "grad_norm": 3.0995507622352854, - "learning_rate": 3.9467133763882935e-06, - "loss": 0.7343, - "num_input_tokens_seen": 17761825, - "step": 844 - }, - { - "epoch": 0.10160524259003187, - "flos": 15432172492800.0, - "grad_norm": 2.5821076120693336, - "learning_rate": 3.9465346134793905e-06, - "loss": 0.8452, - "num_input_tokens_seen": 17781355, - "step": 845 - }, - { - "epoch": 0.10172548548067095, - "flos": 12921765401520.0, - "grad_norm": 2.08491073655167, - "learning_rate": 3.9463555552827335e-06, - "loss": 0.7846, - "num_input_tokens_seen": 17798245, - "step": 846 - }, - { - "epoch": 0.10184572837131005, - "flos": 15432045852960.0, - "grad_norm": 3.7087561691330584, - "learning_rate": 3.946176201825487e-06, - "loss": 0.8352, - "num_input_tokens_seen": 17816000, - "step": 847 - }, - { - "epoch": 0.10196597126194913, - "flos": 19104908526840.0, - "grad_norm": 2.463662905999559, - "learning_rate": 3.9459965531348575e-06, - "loss": 0.8168, - "num_input_tokens_seen": 17835375, - "step": 848 - }, - { - "epoch": 0.10208621415258823, - "flos": 21507911339280.0, - "grad_norm": 3.0669161038611015, - "learning_rate": 3.945816609238098e-06, - "loss": 0.8388, - "num_input_tokens_seen": 17854505, - "step": 849 - }, - { - "epoch": 0.10220645704322733, - "flos": 17431071626280.0, - "grad_norm": 2.1855078208781658, - "learning_rate": 3.945636370162507e-06, - "loss": 0.8406, - "num_input_tokens_seen": 17874335, - "step": 850 - }, - { - "epoch": 0.10232669993386641, - "flos": 16995977337840.0, - "grad_norm": 1.8845021118006484, - "learning_rate": 3.945455835935425e-06, - "loss": 0.7817, - "num_input_tokens_seen": 17893240, - "step": 851 - }, - { - "epoch": 0.1024469428245055, - "flos": 16776673065840.0, - "grad_norm": 4.074114819204117, - "learning_rate": 3.94527500658424e-06, - "loss": 0.7292, - "num_input_tokens_seen": 17910625, - "step": 852 - }, - { - "epoch": 0.10256718571514459, - "flos": 23021480810520.0, - "grad_norm": 2.8309359412078994, - "learning_rate": 3.945093882136382e-06, - "loss": 0.7898, - "num_input_tokens_seen": 17934120, - "step": 853 - }, - { - "epoch": 0.10268742860578368, - "flos": 17185810678440.0, - "grad_norm": 1.8422457490857223, - "learning_rate": 3.944912462619329e-06, - "loss": 0.8313, - "num_input_tokens_seen": 17952805, - "step": 854 - }, - { - "epoch": 0.10280767149642277, - "flos": 18699886709040.0, - "grad_norm": 2.4844776828106703, - "learning_rate": 3.9447307480606025e-06, - "loss": 0.7957, - "num_input_tokens_seen": 17972610, - "step": 855 - }, - { - "epoch": 0.10292791438706186, - "flos": 12651624216480.0, - "grad_norm": 4.0756353620173105, - "learning_rate": 3.944548738487767e-06, - "loss": 0.8846, - "num_input_tokens_seen": 17989845, - "step": 856 - }, - { - "epoch": 0.10304815727770096, - "flos": 19833947798880.0, - "grad_norm": 2.378402080259623, - "learning_rate": 3.944366433928434e-06, - "loss": 0.8935, - "num_input_tokens_seen": 18009545, - "step": 857 - }, - { - "epoch": 0.10316840016834004, - "flos": 16673606201520.0, - "grad_norm": 1.7982722944778609, - "learning_rate": 3.9441838344102594e-06, - "loss": 0.8191, - "num_input_tokens_seen": 18028990, - "step": 858 - }, - { - "epoch": 0.10328864305897914, - "flos": 15135979652040.0, - "grad_norm": 2.9491083187623777, - "learning_rate": 3.944000939960943e-06, - "loss": 0.666, - "num_input_tokens_seen": 18047435, - "step": 859 - }, - { - "epoch": 0.10340888594961822, - "flos": 20889822261600.0, - "grad_norm": 5.138597268201277, - "learning_rate": 3.943817750608229e-06, - "loss": 0.7941, - "num_input_tokens_seen": 18069705, - "step": 860 - }, - { - "epoch": 0.10352912884025732, - "flos": 9708054094320.0, - "grad_norm": 2.6072440211755805, - "learning_rate": 3.943634266379908e-06, - "loss": 0.8008, - "num_input_tokens_seen": 18086320, - "step": 861 - }, - { - "epoch": 0.10364937173089642, - "flos": 18728439501600.0, - "grad_norm": 1.8785388611141545, - "learning_rate": 3.943450487303815e-06, - "loss": 0.8407, - "num_input_tokens_seen": 18106535, - "step": 862 - }, - { - "epoch": 0.1037696146215355, - "flos": 15514094995200.0, - "grad_norm": 2.209459977087239, - "learning_rate": 3.943266413407827e-06, - "loss": 0.8343, - "num_input_tokens_seen": 18125530, - "step": 863 - }, - { - "epoch": 0.1038898575121746, - "flos": 18911655910560.0, - "grad_norm": 2.17007734152089, - "learning_rate": 3.94308204471987e-06, - "loss": 0.8296, - "num_input_tokens_seen": 18144265, - "step": 864 - }, - { - "epoch": 0.10401010040281368, - "flos": 13920629259000.0, - "grad_norm": 2.51682020902141, - "learning_rate": 3.942897381267912e-06, - "loss": 0.7303, - "num_input_tokens_seen": 18160350, - "step": 865 - }, - { - "epoch": 0.10413034329345278, - "flos": 11915999672760.0, - "grad_norm": 3.4551813553332025, - "learning_rate": 3.942712423079965e-06, - "loss": 0.6635, - "num_input_tokens_seen": 18176460, - "step": 866 - }, - { - "epoch": 0.10425058618409186, - "flos": 12569100174840.0, - "grad_norm": 3.0166467704608597, - "learning_rate": 3.942527170184088e-06, - "loss": 0.8954, - "num_input_tokens_seen": 18192800, - "step": 867 - }, - { - "epoch": 0.10437082907473096, - "flos": 13110142383960.0, - "grad_norm": 2.4020937409606953, - "learning_rate": 3.942341622608385e-06, - "loss": 0.7579, - "num_input_tokens_seen": 18209550, - "step": 868 - }, - { - "epoch": 0.10449107196537005, - "flos": 26665189152600.0, - "grad_norm": 2.551487402853274, - "learning_rate": 3.942155780381001e-06, - "loss": 0.7618, - "num_input_tokens_seen": 18233005, - "step": 869 - }, - { - "epoch": 0.10461131485600914, - "flos": 17427810650400.0, - "grad_norm": 2.33059653361197, - "learning_rate": 3.94196964353013e-06, - "loss": 0.7451, - "num_input_tokens_seen": 18252175, - "step": 870 - }, - { - "epoch": 0.10473155774664823, - "flos": 13434413117880.0, - "grad_norm": 3.1403618697587494, - "learning_rate": 3.941783212084008e-06, - "loss": 0.7915, - "num_input_tokens_seen": 18269650, - "step": 871 - }, - { - "epoch": 0.10485180063728732, - "flos": 18753319738800.0, - "grad_norm": 3.6959416376105083, - "learning_rate": 3.941596486070916e-06, - "loss": 0.7522, - "num_input_tokens_seen": 18287415, - "step": 872 - }, - { - "epoch": 0.10497204352792641, - "flos": 19860569333880.0, - "grad_norm": 6.709181390393258, - "learning_rate": 3.941409465519182e-06, - "loss": 0.5652, - "num_input_tokens_seen": 18307660, - "step": 873 - }, - { - "epoch": 0.10509228641856551, - "flos": 24131263202400.0, - "grad_norm": 1.7853232541420603, - "learning_rate": 3.941222150457176e-06, - "loss": 0.8397, - "num_input_tokens_seen": 18330635, - "step": 874 - }, - { - "epoch": 0.10521252930920459, - "flos": 10410281871600.0, - "grad_norm": 4.852565692376366, - "learning_rate": 3.941034540913311e-06, - "loss": 0.6996, - "num_input_tokens_seen": 18347885, - "step": 875 - }, - { - "epoch": 0.10533277219984369, - "flos": 15862676087040.0, - "grad_norm": 1.9854287180846701, - "learning_rate": 3.940846636916051e-06, - "loss": 0.8108, - "num_input_tokens_seen": 18367640, - "step": 876 - }, - { - "epoch": 0.10545301509048277, - "flos": 16293812880480.0, - "grad_norm": 2.475605102705647, - "learning_rate": 3.940658438493899e-06, - "loss": 0.8467, - "num_input_tokens_seen": 18385205, - "step": 877 - }, - { - "epoch": 0.10557325798112187, - "flos": 16242374428200.0, - "grad_norm": 3.3536882394968166, - "learning_rate": 3.940469945675405e-06, - "loss": 0.745, - "num_input_tokens_seen": 18403310, - "step": 878 - }, - { - "epoch": 0.10569350087176095, - "flos": 18889055190480.0, - "grad_norm": 2.12396271015918, - "learning_rate": 3.940281158489163e-06, - "loss": 0.8918, - "num_input_tokens_seen": 18422260, - "step": 879 - }, - { - "epoch": 0.10581374376240005, - "flos": 12624781061760.0, - "grad_norm": 2.116100750260702, - "learning_rate": 3.940092076963812e-06, - "loss": 0.8143, - "num_input_tokens_seen": 18439475, - "step": 880 - }, - { - "epoch": 0.10593398665303914, - "flos": 25233985423200.0, - "grad_norm": 2.356461399622616, - "learning_rate": 3.9399027011280355e-06, - "loss": 0.7752, - "num_input_tokens_seen": 18461290, - "step": 881 - }, - { - "epoch": 0.10605422954367823, - "flos": 17025574909080.0, - "grad_norm": 2.5870488017595545, - "learning_rate": 3.939713031010561e-06, - "loss": 0.755, - "num_input_tokens_seen": 18479375, - "step": 882 - }, - { - "epoch": 0.10617447243431732, - "flos": 16701525794880.0, - "grad_norm": 2.4212792432887027, - "learning_rate": 3.939523066640163e-06, - "loss": 0.7625, - "num_input_tokens_seen": 18497990, - "step": 883 - }, - { - "epoch": 0.10629471532495641, - "flos": 17860467121920.0, - "grad_norm": 2.2213265945375094, - "learning_rate": 3.939332808045657e-06, - "loss": 0.793, - "num_input_tokens_seen": 18517360, - "step": 884 - }, - { - "epoch": 0.1064149582155955, - "flos": 15432615732240.0, - "grad_norm": 4.581895544932029, - "learning_rate": 3.939142255255906e-06, - "loss": 0.8282, - "num_input_tokens_seen": 18537965, - "step": 885 - }, - { - "epoch": 0.1065352011062346, - "flos": 15133731794880.0, - "grad_norm": 2.4745700655421627, - "learning_rate": 3.938951408299817e-06, - "loss": 0.8628, - "num_input_tokens_seen": 18556525, - "step": 886 - }, - { - "epoch": 0.10665544399687368, - "flos": 46190189638200.0, - "grad_norm": 0.8179001350371006, - "learning_rate": 3.938760267206342e-06, - "loss": 0.5719, - "num_input_tokens_seen": 18618065, - "step": 887 - }, - { - "epoch": 0.10677568688751278, - "flos": 19158753136080.0, - "grad_norm": 2.5071969390923488, - "learning_rate": 3.938568832004475e-06, - "loss": 0.7592, - "num_input_tokens_seen": 18636490, - "step": 888 - }, - { - "epoch": 0.10689592977815186, - "flos": 9275999162040.0, - "grad_norm": 2.3117610678710543, - "learning_rate": 3.938377102723257e-06, - "loss": 0.7402, - "num_input_tokens_seen": 18653345, - "step": 889 - }, - { - "epoch": 0.10701617266879096, - "flos": 16188308199240.0, - "grad_norm": 2.5485585271092117, - "learning_rate": 3.938185079391774e-06, - "loss": 0.8194, - "num_input_tokens_seen": 18670110, - "step": 890 - }, - { - "epoch": 0.10713641555943004, - "flos": 14424823766040.0, - "grad_norm": 2.8951680542127063, - "learning_rate": 3.937992762039157e-06, - "loss": 1.0528, - "num_input_tokens_seen": 18683155, - "step": 891 - }, - { - "epoch": 0.10725665845006914, - "flos": 17540185542960.0, - "grad_norm": 2.421096873642961, - "learning_rate": 3.937800150694577e-06, - "loss": 0.7836, - "num_input_tokens_seen": 18704050, - "step": 892 - }, - { - "epoch": 0.10737690134070824, - "flos": 13542672215640.0, - "grad_norm": 2.447843564071499, - "learning_rate": 3.937607245387255e-06, - "loss": 0.7339, - "num_input_tokens_seen": 18723135, - "step": 893 - }, - { - "epoch": 0.10749714423134732, - "flos": 16617830334720.0, - "grad_norm": 2.7726346425418096, - "learning_rate": 3.937414046146455e-06, - "loss": 0.7026, - "num_input_tokens_seen": 18740810, - "step": 894 - }, - { - "epoch": 0.10761738712198642, - "flos": 15433122291600.0, - "grad_norm": 2.6609864136909187, - "learning_rate": 3.9372205530014845e-06, - "loss": 0.7362, - "num_input_tokens_seen": 18759010, - "step": 895 - }, - { - "epoch": 0.1077376300126255, - "flos": 17401790654640.0, - "grad_norm": 3.077493825576253, - "learning_rate": 3.937026765981696e-06, - "loss": 0.7122, - "num_input_tokens_seen": 18778800, - "step": 896 - }, - { - "epoch": 0.1078578729032646, - "flos": 15294537443520.0, - "grad_norm": 2.1283700810274384, - "learning_rate": 3.936832685116488e-06, - "loss": 0.7727, - "num_input_tokens_seen": 18796615, - "step": 897 - }, - { - "epoch": 0.10797811579390369, - "flos": 10788238914960.0, - "grad_norm": 2.7935916659485382, - "learning_rate": 3.936638310435301e-06, - "loss": 0.874, - "num_input_tokens_seen": 18814200, - "step": 898 - }, - { - "epoch": 0.10809835868454278, - "flos": 14271869787480.0, - "grad_norm": 3.0587024109150596, - "learning_rate": 3.936443641967623e-06, - "loss": 0.8122, - "num_input_tokens_seen": 18832750, - "step": 899 - }, - { - "epoch": 0.10821860157518187, - "flos": 13461731172000.0, - "grad_norm": 2.6645997699849975, - "learning_rate": 3.936248679742983e-06, - "loss": 0.8151, - "num_input_tokens_seen": 18850965, - "step": 900 - }, - { - "epoch": 0.10833884446582095, - "flos": 36344437174680.0, - "grad_norm": 1.006574654184535, - "learning_rate": 3.936053423790959e-06, - "loss": 0.7468, - "num_input_tokens_seen": 18899005, - "step": 901 - }, - { - "epoch": 0.10845908735646005, - "flos": 14919271496760.0, - "grad_norm": 2.2654316786796684, - "learning_rate": 3.935857874141168e-06, - "loss": 0.7546, - "num_input_tokens_seen": 18917560, - "step": 902 - }, - { - "epoch": 0.10857933024709913, - "flos": 10194681814920.0, - "grad_norm": 3.7261242238917047, - "learning_rate": 3.935662030823279e-06, - "loss": 0.8217, - "num_input_tokens_seen": 18933465, - "step": 903 - }, - { - "epoch": 0.10869957313773823, - "flos": 9706882675800.0, - "grad_norm": 3.732618868673095, - "learning_rate": 3.935465893866998e-06, - "loss": 0.6937, - "num_input_tokens_seen": 18951410, - "step": 904 - }, - { - "epoch": 0.10881981602837733, - "flos": 18911402630880.0, - "grad_norm": 2.1631555963670848, - "learning_rate": 3.935269463302079e-06, - "loss": 0.7958, - "num_input_tokens_seen": 18969335, - "step": 905 - }, - { - "epoch": 0.10894005891901641, - "flos": 15189982561080.0, - "grad_norm": 2.2345785835192395, - "learning_rate": 3.935072739158322e-06, - "loss": 0.7404, - "num_input_tokens_seen": 18988765, - "step": 906 - }, - { - "epoch": 0.10906030180965551, - "flos": 19535633740800.0, - "grad_norm": 1.638689261561331, - "learning_rate": 3.934875721465569e-06, - "loss": 0.7785, - "num_input_tokens_seen": 19008905, - "step": 907 - }, - { - "epoch": 0.10918054470029459, - "flos": 26850400139040.0, - "grad_norm": 2.6632483348563714, - "learning_rate": 3.9346784102537076e-06, - "loss": 0.6844, - "num_input_tokens_seen": 19030760, - "step": 908 - }, - { - "epoch": 0.10930078759093369, - "flos": 15918926853240.0, - "grad_norm": 2.316954678075324, - "learning_rate": 3.934480805552669e-06, - "loss": 0.7574, - "num_input_tokens_seen": 19051490, - "step": 909 - }, - { - "epoch": 0.10942103048157277, - "flos": 16540023627120.0, - "grad_norm": 2.399646764827537, - "learning_rate": 3.93428290739243e-06, - "loss": 0.8564, - "num_input_tokens_seen": 19070580, - "step": 910 - }, - { - "epoch": 0.10954127337221187, - "flos": 10947588205440.0, - "grad_norm": 2.920166821874758, - "learning_rate": 3.9340847158030125e-06, - "loss": 0.7836, - "num_input_tokens_seen": 19083880, - "step": 911 - }, - { - "epoch": 0.10966151626285096, - "flos": 15836054552040.0, - "grad_norm": 2.1253148014037633, - "learning_rate": 3.9338862308144814e-06, - "loss": 0.7452, - "num_input_tokens_seen": 19102420, - "step": 912 - }, - { - "epoch": 0.10978175915349005, - "flos": 14704431279120.0, - "grad_norm": 2.1398848764702034, - "learning_rate": 3.933687452456946e-06, - "loss": 0.8401, - "num_input_tokens_seen": 19122040, - "step": 913 - }, - { - "epoch": 0.10990200204412914, - "flos": 14919936355920.0, - "grad_norm": 4.235609036624227, - "learning_rate": 3.933488380760562e-06, - "loss": 0.8372, - "num_input_tokens_seen": 19141120, - "step": 914 - }, - { - "epoch": 0.11002224493476823, - "flos": 12786631489080.0, - "grad_norm": 2.7837767535235747, - "learning_rate": 3.9332890157555286e-06, - "loss": 0.8647, - "num_input_tokens_seen": 19157775, - "step": 915 - }, - { - "epoch": 0.11014248782540732, - "flos": 8844260829360.0, - "grad_norm": 3.712889854435086, - "learning_rate": 3.933089357472088e-06, - "loss": 0.7438, - "num_input_tokens_seen": 19175525, - "step": 916 - }, - { - "epoch": 0.11026273071604642, - "flos": 16378109879880.0, - "grad_norm": 2.261909121076547, - "learning_rate": 3.932889405940529e-06, - "loss": 0.8486, - "num_input_tokens_seen": 19193340, - "step": 917 - }, - { - "epoch": 0.1103829736066855, - "flos": 14488039723440.0, - "grad_norm": 2.81139368974887, - "learning_rate": 3.932689161191184e-06, - "loss": 0.7926, - "num_input_tokens_seen": 19210765, - "step": 918 - }, - { - "epoch": 0.1105032164973246, - "flos": 16589499161880.0, - "grad_norm": 2.395383698661293, - "learning_rate": 3.93248862325443e-06, - "loss": 0.862, - "num_input_tokens_seen": 19229390, - "step": 919 - }, - { - "epoch": 0.11062345938796368, - "flos": 49019105350680.0, - "grad_norm": 0.9762551396194347, - "learning_rate": 3.932287792160688e-06, - "loss": 0.648, - "num_input_tokens_seen": 19287570, - "step": 920 - }, - { - "epoch": 0.11074370227860278, - "flos": 16026299472120.0, - "grad_norm": 2.947833613830934, - "learning_rate": 3.932086667940424e-06, - "loss": 0.7891, - "num_input_tokens_seen": 19303995, - "step": 921 - }, - { - "epoch": 0.11086394516924186, - "flos": 21022075117680.0, - "grad_norm": 2.0800649436568857, - "learning_rate": 3.93188525062415e-06, - "loss": 0.8042, - "num_input_tokens_seen": 19324180, - "step": 922 - }, - { - "epoch": 0.11098418805988096, - "flos": 17970879096960.0, - "grad_norm": 10.130806470666279, - "learning_rate": 3.931683540242418e-06, - "loss": 0.8552, - "num_input_tokens_seen": 19344965, - "step": 923 - }, - { - "epoch": 0.11110443095052006, - "flos": 16804529339280.0, - "grad_norm": 5.507138388212948, - "learning_rate": 3.9314815368258295e-06, - "loss": 0.8779, - "num_input_tokens_seen": 19361165, - "step": 924 - }, - { - "epoch": 0.11122467384115914, - "flos": 13837028778720.0, - "grad_norm": 1.9564900211398535, - "learning_rate": 3.9312792404050275e-06, - "loss": 0.7572, - "num_input_tokens_seen": 19378940, - "step": 925 - }, - { - "epoch": 0.11134491673179824, - "flos": 18376280834280.0, - "grad_norm": 2.6812354418663906, - "learning_rate": 3.9310766510107e-06, - "loss": 0.7654, - "num_input_tokens_seen": 19397835, - "step": 926 - }, - { - "epoch": 0.11146515962243732, - "flos": 17944194242040.0, - "grad_norm": 2.0732794464975153, - "learning_rate": 3.9308737686735806e-06, - "loss": 0.9167, - "num_input_tokens_seen": 19417515, - "step": 927 - }, - { - "epoch": 0.11158540251307641, - "flos": 16348702268400.0, - "grad_norm": 3.584780437816731, - "learning_rate": 3.9306705934244455e-06, - "loss": 0.8081, - "num_input_tokens_seen": 19437315, - "step": 928 - }, - { - "epoch": 0.11170564540371551, - "flos": 14542295912160.0, - "grad_norm": 2.1295155064631124, - "learning_rate": 3.930467125294116e-06, - "loss": 0.8703, - "num_input_tokens_seen": 19456585, - "step": 929 - }, - { - "epoch": 0.1118258882943546, - "flos": 47614998055680.0, - "grad_norm": 0.9733544719917623, - "learning_rate": 3.930263364313458e-06, - "loss": 0.6283, - "num_input_tokens_seen": 19506875, - "step": 930 - }, - { - "epoch": 0.11194613118499369, - "flos": 12543808358160.0, - "grad_norm": 2.739458862107211, - "learning_rate": 3.930059310513384e-06, - "loss": 0.8224, - "num_input_tokens_seen": 19525635, - "step": 931 - }, - { - "epoch": 0.11206637407563277, - "flos": 23393485781400.0, - "grad_norm": 1.7795505782482899, - "learning_rate": 3.929854963924846e-06, - "loss": 0.8185, - "num_input_tokens_seen": 19545620, - "step": 932 - }, - { - "epoch": 0.11218661696627187, - "flos": 16054504005120.0, - "grad_norm": 2.4152875341308904, - "learning_rate": 3.929650324578845e-06, - "loss": 0.7553, - "num_input_tokens_seen": 19564805, - "step": 933 - }, - { - "epoch": 0.11230685985691095, - "flos": 18963759222000.0, - "grad_norm": 3.077508390215322, - "learning_rate": 3.929445392506423e-06, - "loss": 0.8135, - "num_input_tokens_seen": 19582465, - "step": 934 - }, - { - "epoch": 0.11242710274755005, - "flos": 16265228427960.0, - "grad_norm": 2.1396346202771572, - "learning_rate": 3.92924016773867e-06, - "loss": 0.7482, - "num_input_tokens_seen": 19598680, - "step": 935 - }, - { - "epoch": 0.11254734563818915, - "flos": 12921322162080.0, - "grad_norm": 3.0332251337337395, - "learning_rate": 3.9290346503067175e-06, - "loss": 0.7266, - "num_input_tokens_seen": 19615065, - "step": 936 - }, - { - "epoch": 0.11266758852882823, - "flos": 40465686828840.0, - "grad_norm": 2.5171426495582443, - "learning_rate": 3.9288288402417415e-06, - "loss": 0.7703, - "num_input_tokens_seen": 19641045, - "step": 937 - }, - { - "epoch": 0.11278783141946733, - "flos": 13783975668480.0, - "grad_norm": 3.4685560292489943, - "learning_rate": 3.928622737574964e-06, - "loss": 0.6796, - "num_input_tokens_seen": 19656100, - "step": 938 - }, - { - "epoch": 0.11290807431010641, - "flos": 19402209466200.0, - "grad_norm": 2.3366818470083137, - "learning_rate": 3.928416342337652e-06, - "loss": 0.8997, - "num_input_tokens_seen": 19675555, - "step": 939 - }, - { - "epoch": 0.1130283172007455, - "flos": 16620458111400.0, - "grad_norm": 2.2767738427459068, - "learning_rate": 3.928209654561113e-06, - "loss": 0.8091, - "num_input_tokens_seen": 19696110, - "step": 940 - }, - { - "epoch": 0.1131485600913846, - "flos": 16997180416320.0, - "grad_norm": 3.1617026488636695, - "learning_rate": 3.928002674276703e-06, - "loss": 0.7855, - "num_input_tokens_seen": 19715220, - "step": 941 - }, - { - "epoch": 0.11326880298202369, - "flos": 10221714929400.0, - "grad_norm": 2.654527352426928, - "learning_rate": 3.92779540151582e-06, - "loss": 0.7384, - "num_input_tokens_seen": 19732025, - "step": 942 - }, - { - "epoch": 0.11338904587266278, - "flos": 11894950290720.0, - "grad_norm": 2.2654664082761387, - "learning_rate": 3.927587836309907e-06, - "loss": 0.8465, - "num_input_tokens_seen": 19749575, - "step": 943 - }, - { - "epoch": 0.11350928876330187, - "flos": 17890286312880.0, - "grad_norm": 3.363546646758727, - "learning_rate": 3.927379978690452e-06, - "loss": 0.7617, - "num_input_tokens_seen": 19768560, - "step": 944 - }, - { - "epoch": 0.11362953165394096, - "flos": 17942389624320.0, - "grad_norm": 2.541166777516975, - "learning_rate": 3.927171828688987e-06, - "loss": 0.8489, - "num_input_tokens_seen": 19787805, - "step": 945 - }, - { - "epoch": 0.11374977454458005, - "flos": 17619765208320.0, - "grad_norm": 2.3006057024630637, - "learning_rate": 3.926963386337088e-06, - "loss": 0.7951, - "num_input_tokens_seen": 19805755, - "step": 946 - }, - { - "epoch": 0.11387001743521914, - "flos": 29014410675720.0, - "grad_norm": 4.4756214157919025, - "learning_rate": 3.926754651666375e-06, - "loss": 0.6841, - "num_input_tokens_seen": 19826035, - "step": 947 - }, - { - "epoch": 0.11399026032585824, - "flos": 18375837594840.0, - "grad_norm": 3.936469291327875, - "learning_rate": 3.926545624708513e-06, - "loss": 0.7626, - "num_input_tokens_seen": 19844995, - "step": 948 - }, - { - "epoch": 0.11411050321649732, - "flos": 13105266750120.0, - "grad_norm": 3.9316779796722727, - "learning_rate": 3.926336305495213e-06, - "loss": 0.8478, - "num_input_tokens_seen": 19863275, - "step": 949 - }, - { - "epoch": 0.11423074610713642, - "flos": 16431637889520.0, - "grad_norm": 2.6257180639607736, - "learning_rate": 3.926126694058226e-06, - "loss": 0.8664, - "num_input_tokens_seen": 19882145, - "step": 950 - }, - { - "epoch": 0.1143509889977755, - "flos": 14405357382000.0, - "grad_norm": 1.9846216484032393, - "learning_rate": 3.92591679042935e-06, - "loss": 0.8061, - "num_input_tokens_seen": 19901755, - "step": 951 - }, - { - "epoch": 0.1144712318884146, - "flos": 14482594210320.0, - "grad_norm": 1.6088696028822127, - "learning_rate": 3.92570659464043e-06, - "loss": 0.8114, - "num_input_tokens_seen": 19919535, - "step": 952 - }, - { - "epoch": 0.1145914747790537, - "flos": 10869116638680.0, - "grad_norm": 2.312931742187511, - "learning_rate": 3.925496106723349e-06, - "loss": 0.7861, - "num_input_tokens_seen": 19936695, - "step": 953 - }, - { - "epoch": 0.11471171766969278, - "flos": 14514597938520.0, - "grad_norm": 2.5095418986189637, - "learning_rate": 3.9252853267100405e-06, - "loss": 0.8238, - "num_input_tokens_seen": 19955660, - "step": 954 - }, - { - "epoch": 0.11483196056033187, - "flos": 16483962820680.0, - "grad_norm": 2.2694584478628217, - "learning_rate": 3.9250742546324786e-06, - "loss": 0.8235, - "num_input_tokens_seen": 19975615, - "step": 955 - }, - { - "epoch": 0.11495220345097096, - "flos": 20698057663440.0, - "grad_norm": 2.3267015035183625, - "learning_rate": 3.924862890522683e-06, - "loss": 0.8514, - "num_input_tokens_seen": 19995345, - "step": 956 - }, - { - "epoch": 0.11507244634161005, - "flos": 12758426956080.0, - "grad_norm": 3.611907202845579, - "learning_rate": 3.9246512344127174e-06, - "loss": 0.8488, - "num_input_tokens_seen": 20012725, - "step": 957 - }, - { - "epoch": 0.11519268923224914, - "flos": 16512705573000.0, - "grad_norm": 2.1679832664209946, - "learning_rate": 3.9244392863346895e-06, - "loss": 0.809, - "num_input_tokens_seen": 20031850, - "step": 958 - }, - { - "epoch": 0.11531293212288823, - "flos": 12381609671280.0, - "grad_norm": 2.3629227799863597, - "learning_rate": 3.9242270463207524e-06, - "loss": 0.8959, - "num_input_tokens_seen": 20049960, - "step": 959 - }, - { - "epoch": 0.11543317501352733, - "flos": 9006269556480.0, - "grad_norm": 3.5122539719184367, - "learning_rate": 3.924014514403102e-06, - "loss": 0.8285, - "num_input_tokens_seen": 20065835, - "step": 960 - }, - { - "epoch": 0.11555341790416641, - "flos": 14481517771680.0, - "grad_norm": 3.305476966871139, - "learning_rate": 3.92380169061398e-06, - "loss": 0.9079, - "num_input_tokens_seen": 20083335, - "step": 961 - }, - { - "epoch": 0.11567366079480551, - "flos": 18861578836560.0, - "grad_norm": 2.341922403491591, - "learning_rate": 3.9235885749856705e-06, - "loss": 0.8242, - "num_input_tokens_seen": 20101735, - "step": 962 - }, - { - "epoch": 0.1157939036854446, - "flos": 13299627465000.0, - "grad_norm": 2.437136598542904, - "learning_rate": 3.9233751675505035e-06, - "loss": 0.817, - "num_input_tokens_seen": 20120165, - "step": 963 - }, - { - "epoch": 0.11591414657608369, - "flos": 16888984638480.0, - "grad_norm": 4.272777778430287, - "learning_rate": 3.923161468340853e-06, - "loss": 0.8264, - "num_input_tokens_seen": 20139720, - "step": 964 - }, - { - "epoch": 0.11603438946672277, - "flos": 14215840641000.0, - "grad_norm": 2.101127607876032, - "learning_rate": 3.9229474773891374e-06, - "loss": 0.7996, - "num_input_tokens_seen": 20157980, - "step": 965 - }, - { - "epoch": 0.11615463235736187, - "flos": 19670356073760.0, - "grad_norm": 3.064488135405888, - "learning_rate": 3.922733194727818e-06, - "loss": 0.8334, - "num_input_tokens_seen": 20177495, - "step": 966 - }, - { - "epoch": 0.11627487524800097, - "flos": 13562740138920.0, - "grad_norm": 4.975407776342916, - "learning_rate": 3.922518620389402e-06, - "loss": 0.8293, - "num_input_tokens_seen": 20194080, - "step": 967 - }, - { - "epoch": 0.11639511813864005, - "flos": 13245751195800.0, - "grad_norm": 3.1510335286353244, - "learning_rate": 3.922303754406439e-06, - "loss": 0.8916, - "num_input_tokens_seen": 20211640, - "step": 968 - }, - { - "epoch": 0.11651536102927915, - "flos": 15296690320800.0, - "grad_norm": 3.271246201684632, - "learning_rate": 3.922088596811526e-06, - "loss": 0.7781, - "num_input_tokens_seen": 20230490, - "step": 969 - }, - { - "epoch": 0.11663560391991823, - "flos": 12030654082440.0, - "grad_norm": 4.344003573913228, - "learning_rate": 3.9218731476373e-06, - "loss": 0.8431, - "num_input_tokens_seen": 20246395, - "step": 970 - }, - { - "epoch": 0.11675584681055733, - "flos": 14514597938520.0, - "grad_norm": 3.2215029600663585, - "learning_rate": 3.9216574069164455e-06, - "loss": 0.8391, - "num_input_tokens_seen": 20265090, - "step": 971 - }, - { - "epoch": 0.11687608970119642, - "flos": 16053395906520.0, - "grad_norm": 2.692871138018472, - "learning_rate": 3.921441374681691e-06, - "loss": 0.79, - "num_input_tokens_seen": 20284870, - "step": 972 - }, - { - "epoch": 0.1169963325918355, - "flos": 17620050147960.0, - "grad_norm": 3.074793372926683, - "learning_rate": 3.921225050965808e-06, - "loss": 0.6362, - "num_input_tokens_seen": 20304475, - "step": 973 - }, - { - "epoch": 0.1171165754824746, - "flos": 17107212471840.0, - "grad_norm": 2.5344350109215585, - "learning_rate": 3.921008435801612e-06, - "loss": 0.7207, - "num_input_tokens_seen": 20323280, - "step": 974 - }, - { - "epoch": 0.11723681837311369, - "flos": 13543147115040.0, - "grad_norm": 2.787684508918584, - "learning_rate": 3.920791529221963e-06, - "loss": 0.7314, - "num_input_tokens_seen": 20341675, - "step": 975 - }, - { - "epoch": 0.11735706126375278, - "flos": 17242188084480.0, - "grad_norm": 2.247002512320526, - "learning_rate": 3.920574331259768e-06, - "loss": 0.7512, - "num_input_tokens_seen": 20362595, - "step": 976 - }, - { - "epoch": 0.11747730415439187, - "flos": 16376843481480.0, - "grad_norm": 2.9067124849902077, - "learning_rate": 3.9203568419479716e-06, - "loss": 0.7758, - "num_input_tokens_seen": 20382870, - "step": 977 - }, - { - "epoch": 0.11759754704503096, - "flos": 16243260907080.0, - "grad_norm": 2.291442779239439, - "learning_rate": 3.92013906131957e-06, - "loss": 0.7298, - "num_input_tokens_seen": 20401520, - "step": 978 - }, - { - "epoch": 0.11771778993567006, - "flos": 16323885351120.0, - "grad_norm": 2.2143145001205986, - "learning_rate": 3.9199209894076e-06, - "loss": 0.7984, - "num_input_tokens_seen": 20421555, - "step": 979 - }, - { - "epoch": 0.11783803282630914, - "flos": 15567844624560.0, - "grad_norm": 2.125948862395622, - "learning_rate": 3.919702626245142e-06, - "loss": 0.8831, - "num_input_tokens_seen": 20440930, - "step": 980 - }, - { - "epoch": 0.11795827571694824, - "flos": 18589379754120.0, - "grad_norm": 3.4396146338871856, - "learning_rate": 3.919483971865322e-06, - "loss": 0.6358, - "num_input_tokens_seen": 20460645, - "step": 981 - }, - { - "epoch": 0.11807851860758732, - "flos": 17295051234960.0, - "grad_norm": 2.674134665305941, - "learning_rate": 3.91926502630131e-06, - "loss": 0.8528, - "num_input_tokens_seen": 20480980, - "step": 982 - }, - { - "epoch": 0.11819876149822642, - "flos": 18294389991840.0, - "grad_norm": 4.127171493941738, - "learning_rate": 3.91904578958632e-06, - "loss": 0.7022, - "num_input_tokens_seen": 20500115, - "step": 983 - }, - { - "epoch": 0.11831900438886551, - "flos": 16837134606720.0, - "grad_norm": 3.149067477295858, - "learning_rate": 3.918826261753608e-06, - "loss": 0.8356, - "num_input_tokens_seen": 20519415, - "step": 984 - }, - { - "epoch": 0.1184392472795046, - "flos": 20509269101520.0, - "grad_norm": 3.921349235643372, - "learning_rate": 3.918606442836478e-06, - "loss": 0.6817, - "num_input_tokens_seen": 20541355, - "step": 985 - }, - { - "epoch": 0.1185594901701437, - "flos": 14538939956400.0, - "grad_norm": 2.2478580876148886, - "learning_rate": 3.918386332868277e-06, - "loss": 0.7627, - "num_input_tokens_seen": 20559045, - "step": 986 - }, - { - "epoch": 0.11867973306078278, - "flos": 13808982545520.0, - "grad_norm": 2.9322495307853687, - "learning_rate": 3.918165931882394e-06, - "loss": 0.9354, - "num_input_tokens_seen": 20577165, - "step": 987 - }, - { - "epoch": 0.11879997595142187, - "flos": 12381578011320.0, - "grad_norm": 3.775057679803031, - "learning_rate": 3.917945239912264e-06, - "loss": 0.7324, - "num_input_tokens_seen": 20594360, - "step": 988 - }, - { - "epoch": 0.11892021884206096, - "flos": 12786853108800.0, - "grad_norm": 4.18706248260954, - "learning_rate": 3.917724256991367e-06, - "loss": 0.7492, - "num_input_tokens_seen": 20612825, - "step": 989 - }, - { - "epoch": 0.11904046173270005, - "flos": 22722470233320.0, - "grad_norm": 2.6887038440014717, - "learning_rate": 3.9175029831532245e-06, - "loss": 0.7895, - "num_input_tokens_seen": 20632060, - "step": 990 - }, - { - "epoch": 0.11916070462333915, - "flos": 14730894514320.0, - "grad_norm": 2.7339931548394976, - "learning_rate": 3.917281418431404e-06, - "loss": 0.8616, - "num_input_tokens_seen": 20650825, - "step": 991 - }, - { - "epoch": 0.11928094751397823, - "flos": 17242694643840.0, - "grad_norm": 2.6269863424363606, - "learning_rate": 3.917059562859516e-06, - "loss": 0.7488, - "num_input_tokens_seen": 20669870, - "step": 992 - }, - { - "epoch": 0.11940119040461733, - "flos": 17506060597440.0, - "grad_norm": 2.251873190289103, - "learning_rate": 3.916837416471218e-06, - "loss": 0.8747, - "num_input_tokens_seen": 20686210, - "step": 993 - }, - { - "epoch": 0.11952143329525641, - "flos": 10058978023200.0, - "grad_norm": 3.6503023205277123, - "learning_rate": 3.916614979300207e-06, - "loss": 0.7007, - "num_input_tokens_seen": 20700775, - "step": 994 - }, - { - "epoch": 0.11964167618589551, - "flos": 19806344805120.0, - "grad_norm": 1.7296327246286487, - "learning_rate": 3.9163922513802274e-06, - "loss": 0.7663, - "num_input_tokens_seen": 20722830, - "step": 995 - }, - { - "epoch": 0.1197619190765346, - "flos": 9113990434920.0, - "grad_norm": 3.1342309038198626, - "learning_rate": 3.916169232745067e-06, - "loss": 0.8064, - "num_input_tokens_seen": 20740225, - "step": 996 - }, - { - "epoch": 0.11988216196717369, - "flos": 12327226842720.0, - "grad_norm": 3.4332186306308032, - "learning_rate": 3.915945923428559e-06, - "loss": 0.8833, - "num_input_tokens_seen": 20756470, - "step": 997 - }, - { - "epoch": 0.12000240485781279, - "flos": 11814515806440.0, - "grad_norm": 3.3949006716955226, - "learning_rate": 3.915722323464577e-06, - "loss": 0.8126, - "num_input_tokens_seen": 20774795, - "step": 998 - }, - { - "epoch": 0.12012264774845187, - "flos": 36331583230920.0, - "grad_norm": 2.9608482810395675, - "learning_rate": 3.91549843288704e-06, - "loss": 0.6914, - "num_input_tokens_seen": 20798195, - "step": 999 - }, - { - "epoch": 0.12024289063909097, - "flos": 19779691610160.0, - "grad_norm": 2.3531114919814295, - "learning_rate": 3.915274251729916e-06, - "loss": 0.7859, - "num_input_tokens_seen": 20819205, - "step": 1000 - }, - { - "epoch": 0.12036313352973005, - "flos": 14272154727120.0, - "grad_norm": 3.0865661753313676, - "learning_rate": 3.91504978002721e-06, - "loss": 0.888, - "num_input_tokens_seen": 20837980, - "step": 1001 - }, - { - "epoch": 0.12048337642036915, - "flos": 12591922514640.0, - "grad_norm": 2.74549809340378, - "learning_rate": 3.914825017812974e-06, - "loss": 0.753, - "num_input_tokens_seen": 20854350, - "step": 1002 - }, - { - "epoch": 0.12060361931100824, - "flos": 16728843849000.0, - "grad_norm": 2.855831932005934, - "learning_rate": 3.9145999651213065e-06, - "loss": 0.7163, - "num_input_tokens_seen": 20873310, - "step": 1003 - }, - { - "epoch": 0.12072386220164733, - "flos": 12192029610360.0, - "grad_norm": 4.253597083648663, - "learning_rate": 3.9143746219863465e-06, - "loss": 0.86, - "num_input_tokens_seen": 20890135, - "step": 1004 - }, - { - "epoch": 0.12084410509228642, - "flos": 40628333246520.0, - "grad_norm": 1.0565756391753636, - "learning_rate": 3.914148988442278e-06, - "loss": 0.7116, - "num_input_tokens_seen": 20945645, - "step": 1005 - }, - { - "epoch": 0.1209643479829255, - "flos": 19617429603360.0, - "grad_norm": 4.854818581394361, - "learning_rate": 3.91392306452333e-06, - "loss": 0.935, - "num_input_tokens_seen": 20962440, - "step": 1006 - }, - { - "epoch": 0.1210845908735646, - "flos": 7979739385320.0, - "grad_norm": 3.571213756640405, - "learning_rate": 3.913696850263774e-06, - "loss": 0.6306, - "num_input_tokens_seen": 20976525, - "step": 1007 - }, - { - "epoch": 0.1212048337642037, - "flos": 14973052786080.0, - "grad_norm": 2.2732709581127857, - "learning_rate": 3.913470345697929e-06, - "loss": 0.7731, - "num_input_tokens_seen": 20994875, - "step": 1008 - }, - { - "epoch": 0.12132507665484278, - "flos": 16350728505840.0, - "grad_norm": 2.516052967890669, - "learning_rate": 3.913243550860153e-06, - "loss": 0.8435, - "num_input_tokens_seen": 21012360, - "step": 1009 - }, - { - "epoch": 0.12144531954548188, - "flos": 21508196278920.0, - "grad_norm": 2.2813771878673488, - "learning_rate": 3.913016465784852e-06, - "loss": 0.752, - "num_input_tokens_seen": 21032755, - "step": 1010 - }, - { - "epoch": 0.12156556243612096, - "flos": 14973591005400.0, - "grad_norm": 5.0231658245087525, - "learning_rate": 3.912789090506474e-06, - "loss": 0.6978, - "num_input_tokens_seen": 21051735, - "step": 1011 - }, - { - "epoch": 0.12168580532676006, - "flos": 12003557648040.0, - "grad_norm": 3.2472601859429067, - "learning_rate": 3.9125614250595114e-06, - "loss": 0.71, - "num_input_tokens_seen": 21067665, - "step": 1012 - }, - { - "epoch": 0.12180604821739914, - "flos": 11166797497560.0, - "grad_norm": 6.53748983524619, - "learning_rate": 3.912333469478502e-06, - "loss": 0.8775, - "num_input_tokens_seen": 21085350, - "step": 1013 - }, - { - "epoch": 0.12192629110803824, - "flos": 14109829400400.0, - "grad_norm": 2.4256819798402627, - "learning_rate": 3.912105223798025e-06, - "loss": 0.7648, - "num_input_tokens_seen": 21104490, - "step": 1014 - }, - { - "epoch": 0.12204653399867733, - "flos": 35136178612680.0, - "grad_norm": 1.0209368374311236, - "learning_rate": 3.9118766880527065e-06, - "loss": 0.7115, - "num_input_tokens_seen": 21158645, - "step": 1015 - }, - { - "epoch": 0.12216677688931642, - "flos": 13298424386520.0, - "grad_norm": 1.8083466923420934, - "learning_rate": 3.9116478622772145e-06, - "loss": 0.7186, - "num_input_tokens_seen": 21176940, - "step": 1016 - }, - { - "epoch": 0.12228701977995551, - "flos": 20183763629160.0, - "grad_norm": 2.861993310052822, - "learning_rate": 3.911418746506261e-06, - "loss": 0.8653, - "num_input_tokens_seen": 21196790, - "step": 1017 - }, - { - "epoch": 0.1224072626705946, - "flos": 15945136808760.0, - "grad_norm": 2.6108697504628293, - "learning_rate": 3.911189340774604e-06, - "loss": 0.7719, - "num_input_tokens_seen": 21216640, - "step": 1018 - }, - { - "epoch": 0.1225275055612337, - "flos": 15135251472960.0, - "grad_norm": 2.1561251618003046, - "learning_rate": 3.910959645117043e-06, - "loss": 0.7814, - "num_input_tokens_seen": 21235695, - "step": 1019 - }, - { - "epoch": 0.12264774845187278, - "flos": 42554839525560.0, - "grad_norm": 0.784250524606429, - "learning_rate": 3.910729659568423e-06, - "loss": 0.5877, - "num_input_tokens_seen": 21292600, - "step": 1020 - }, - { - "epoch": 0.12276799134251187, - "flos": 19348206557160.0, - "grad_norm": 2.3491346429827766, - "learning_rate": 3.9104993841636344e-06, - "loss": 0.8081, - "num_input_tokens_seen": 21312890, - "step": 1021 - }, - { - "epoch": 0.12288823423315097, - "flos": 15401371843080.0, - "grad_norm": 1.994888031095243, - "learning_rate": 3.910268818937608e-06, - "loss": 0.7939, - "num_input_tokens_seen": 21330765, - "step": 1022 - }, - { - "epoch": 0.12300847712379005, - "flos": 8925170213040.0, - "grad_norm": 3.8500669101994442, - "learning_rate": 3.9100379639253196e-06, - "loss": 0.8676, - "num_input_tokens_seen": 21347205, - "step": 1023 - }, - { - "epoch": 0.12312872001442915, - "flos": 12218112926040.0, - "grad_norm": 3.2310126329964293, - "learning_rate": 3.909806819161791e-06, - "loss": 0.8425, - "num_input_tokens_seen": 21362400, - "step": 1024 - }, - { - "epoch": 0.12324896290506823, - "flos": 13433938218480.0, - "grad_norm": 2.5618069339522256, - "learning_rate": 3.909575384682086e-06, - "loss": 0.8421, - "num_input_tokens_seen": 21381000, - "step": 1025 - }, - { - "epoch": 0.12336920579570733, - "flos": 13811673642120.0, - "grad_norm": 2.0323285100645134, - "learning_rate": 3.9093436605213144e-06, - "loss": 0.6704, - "num_input_tokens_seen": 21401220, - "step": 1026 - }, - { - "epoch": 0.12348944868634643, - "flos": 17484726275760.0, - "grad_norm": 2.351633875577919, - "learning_rate": 3.909111646714627e-06, - "loss": 0.7719, - "num_input_tokens_seen": 21421785, - "step": 1027 - }, - { - "epoch": 0.12360969157698551, - "flos": 13893754444320.0, - "grad_norm": 2.0676657979002893, - "learning_rate": 3.9088793432972206e-06, - "loss": 0.6967, - "num_input_tokens_seen": 21440325, - "step": 1028 - }, - { - "epoch": 0.1237299344676246, - "flos": 9600238236000.0, - "grad_norm": 2.7106597494264464, - "learning_rate": 3.908646750304336e-06, - "loss": 0.797, - "num_input_tokens_seen": 21457730, - "step": 1029 - }, - { - "epoch": 0.12385017735826369, - "flos": 14973970924920.0, - "grad_norm": 1.6224834322561068, - "learning_rate": 3.908413867771257e-06, - "loss": 0.8479, - "num_input_tokens_seen": 21476360, - "step": 1030 - }, - { - "epoch": 0.12397042024890279, - "flos": 12651750856320.0, - "grad_norm": 2.2937152953471838, - "learning_rate": 3.908180695733311e-06, - "loss": 0.7902, - "num_input_tokens_seen": 21495570, - "step": 1031 - }, - { - "epoch": 0.12409066313954187, - "flos": 14919651416280.0, - "grad_norm": 3.2874421321286746, - "learning_rate": 3.907947234225871e-06, - "loss": 0.8144, - "num_input_tokens_seen": 21514300, - "step": 1032 - }, - { - "epoch": 0.12421090603018096, - "flos": 15159118591440.0, - "grad_norm": 2.317974723908505, - "learning_rate": 3.907713483284352e-06, - "loss": 0.8591, - "num_input_tokens_seen": 21533495, - "step": 1033 - }, - { - "epoch": 0.12433114892082006, - "flos": 17943940962360.0, - "grad_norm": 3.1834622408612385, - "learning_rate": 3.907479442944216e-06, - "loss": 0.9646, - "num_input_tokens_seen": 21551620, - "step": 1034 - }, - { - "epoch": 0.12445139181145914, - "flos": 14379337386240.0, - "grad_norm": 2.673098813691935, - "learning_rate": 3.907245113240963e-06, - "loss": 0.907, - "num_input_tokens_seen": 21569460, - "step": 1035 - }, - { - "epoch": 0.12457163470209824, - "flos": 34168839092640.0, - "grad_norm": 1.994447331469237, - "learning_rate": 3.907010494210144e-06, - "loss": 0.7315, - "num_input_tokens_seen": 21591840, - "step": 1036 - }, - { - "epoch": 0.12469187759273732, - "flos": 14893029881280.0, - "grad_norm": 3.5966722631779198, - "learning_rate": 3.9067755858873495e-06, - "loss": 0.9189, - "num_input_tokens_seen": 21608360, - "step": 1037 - }, - { - "epoch": 0.12481212048337642, - "flos": 51048393554400.0, - "grad_norm": 0.8402535791476926, - "learning_rate": 3.906540388308214e-06, - "loss": 0.6559, - "num_input_tokens_seen": 21667665, - "step": 1038 - }, - { - "epoch": 0.12493236337401552, - "flos": 13299975724560.0, - "grad_norm": 12.128664701701052, - "learning_rate": 3.906304901508417e-06, - "loss": 0.8046, - "num_input_tokens_seen": 21686285, - "step": 1039 - }, - { - "epoch": 0.12505260626465461, - "flos": 22047972089640.0, - "grad_norm": 4.102083532754103, - "learning_rate": 3.9060691255236835e-06, - "loss": 0.7394, - "num_input_tokens_seen": 21706570, - "step": 1040 - }, - { - "epoch": 0.1251728491552937, - "flos": 17897093204280.0, - "grad_norm": 1.9410773445768188, - "learning_rate": 3.905833060389778e-06, - "loss": 0.8092, - "num_input_tokens_seen": 21730410, - "step": 1041 - }, - { - "epoch": 0.12529309204593278, - "flos": 19882790134440.0, - "grad_norm": 4.138803678882822, - "learning_rate": 3.905596706142513e-06, - "loss": 0.7529, - "num_input_tokens_seen": 21751540, - "step": 1042 - }, - { - "epoch": 0.12541333493657186, - "flos": 22588064499960.0, - "grad_norm": 2.337639638228389, - "learning_rate": 3.9053600628177435e-06, - "loss": 0.8301, - "num_input_tokens_seen": 21770870, - "step": 1043 - }, - { - "epoch": 0.12553357782721097, - "flos": 17322812528520.0, - "grad_norm": 2.4863519060626253, - "learning_rate": 3.905123130451367e-06, - "loss": 0.8284, - "num_input_tokens_seen": 21791690, - "step": 1044 - }, - { - "epoch": 0.12565382071785006, - "flos": 18214018827480.0, - "grad_norm": 2.3927883756471915, - "learning_rate": 3.904885909079326e-06, - "loss": 0.7734, - "num_input_tokens_seen": 21810195, - "step": 1045 - }, - { - "epoch": 0.12577406360848914, - "flos": 15621530934000.0, - "grad_norm": 28.897255779771935, - "learning_rate": 3.904648398737607e-06, - "loss": 0.7567, - "num_input_tokens_seen": 21828480, - "step": 1046 - }, - { - "epoch": 0.12589430649912825, - "flos": 26557784873760.0, - "grad_norm": 2.538789892967905, - "learning_rate": 3.9044105994622406e-06, - "loss": 0.7659, - "num_input_tokens_seen": 21849345, - "step": 1047 - }, - { - "epoch": 0.12601454938976733, - "flos": 18564689476680.0, - "grad_norm": 2.92065306219215, - "learning_rate": 3.9041725112893005e-06, - "loss": 0.7928, - "num_input_tokens_seen": 21870290, - "step": 1048 - }, - { - "epoch": 0.12613479228040642, - "flos": 11328711244800.0, - "grad_norm": 3.2570349042015962, - "learning_rate": 3.903934134254904e-06, - "loss": 0.7367, - "num_input_tokens_seen": 21887800, - "step": 1049 - }, - { - "epoch": 0.1262550351710455, - "flos": 15702851897160.0, - "grad_norm": 2.8590000928229644, - "learning_rate": 3.903695468395213e-06, - "loss": 0.8289, - "num_input_tokens_seen": 21905390, - "step": 1050 - }, - { - "epoch": 0.1263752780616846, - "flos": 23182001519520.0, - "grad_norm": 5.944760029973022, - "learning_rate": 3.903456513746434e-06, - "loss": 0.5519, - "num_input_tokens_seen": 21926085, - "step": 1051 - }, - { - "epoch": 0.1264955209523237, - "flos": 21103649360520.0, - "grad_norm": 2.483957795434192, - "learning_rate": 3.903217270344815e-06, - "loss": 0.8614, - "num_input_tokens_seen": 21946055, - "step": 1052 - }, - { - "epoch": 0.12661576384296278, - "flos": 21453433530840.0, - "grad_norm": 2.433494210695051, - "learning_rate": 3.902977738226648e-06, - "loss": 0.8023, - "num_input_tokens_seen": 21966510, - "step": 1053 - }, - { - "epoch": 0.12673600673360189, - "flos": 15243827170320.0, - "grad_norm": 2.259975571964543, - "learning_rate": 3.902737917428273e-06, - "loss": 0.9026, - "num_input_tokens_seen": 21984395, - "step": 1054 - }, - { - "epoch": 0.12685624962424097, - "flos": 18509800088760.0, - "grad_norm": 2.0436231101305755, - "learning_rate": 3.902497807986068e-06, - "loss": 0.8155, - "num_input_tokens_seen": 22004135, - "step": 1055 - }, - { - "epoch": 0.12697649251488005, - "flos": 20185061687520.0, - "grad_norm": 2.5048035250304816, - "learning_rate": 3.902257409936458e-06, - "loss": 0.8185, - "num_input_tokens_seen": 22024620, - "step": 1056 - }, - { - "epoch": 0.12709673540551916, - "flos": 15541001469840.0, - "grad_norm": 2.4281904017846987, - "learning_rate": 3.902016723315912e-06, - "loss": 0.8172, - "num_input_tokens_seen": 22042280, - "step": 1057 - }, - { - "epoch": 0.12721697829615825, - "flos": 18564721136640.0, - "grad_norm": 5.088216572752712, - "learning_rate": 3.901775748160941e-06, - "loss": 0.6715, - "num_input_tokens_seen": 22061180, - "step": 1058 - }, - { - "epoch": 0.12733722118679733, - "flos": 45661938052920.0, - "grad_norm": 0.8100947210477285, - "learning_rate": 3.901534484508101e-06, - "loss": 0.6304, - "num_input_tokens_seen": 22123575, - "step": 1059 - }, - { - "epoch": 0.1274574640774364, - "flos": 19777317113160.0, - "grad_norm": 2.5952990398942144, - "learning_rate": 3.901292932393991e-06, - "loss": 0.7217, - "num_input_tokens_seen": 22142175, - "step": 1060 - }, - { - "epoch": 0.12757770696807552, - "flos": 16269470862600.0, - "grad_norm": 2.7519616153280135, - "learning_rate": 3.9010510918552555e-06, - "loss": 0.8412, - "num_input_tokens_seen": 22160970, - "step": 1061 - }, - { - "epoch": 0.1276979498587146, - "flos": 20940405894960.0, - "grad_norm": 3.2941712777796837, - "learning_rate": 3.900808962928581e-06, - "loss": 0.7332, - "num_input_tokens_seen": 22178305, - "step": 1062 - }, - { - "epoch": 0.1278181927493537, - "flos": 12705342185880.0, - "grad_norm": 3.1860090770604104, - "learning_rate": 3.900566545650698e-06, - "loss": 0.8832, - "num_input_tokens_seen": 22195695, - "step": 1063 - }, - { - "epoch": 0.1279384356399928, - "flos": 15456672810480.0, - "grad_norm": 2.6217309487091605, - "learning_rate": 3.900323840058381e-06, - "loss": 0.7998, - "num_input_tokens_seen": 22213125, - "step": 1064 - }, - { - "epoch": 0.12805867853063188, - "flos": 19481472531960.0, - "grad_norm": 3.004973262978934, - "learning_rate": 3.900080846188449e-06, - "loss": 0.7997, - "num_input_tokens_seen": 22231435, - "step": 1065 - }, - { - "epoch": 0.12817892142127096, - "flos": 11977157732760.0, - "grad_norm": 2.284100671831003, - "learning_rate": 3.8998375640777625e-06, - "loss": 0.7947, - "num_input_tokens_seen": 22249025, - "step": 1066 - }, - { - "epoch": 0.12829916431191005, - "flos": 44781396669120.0, - "grad_norm": 0.7899577661282836, - "learning_rate": 3.899593993763229e-06, - "loss": 0.5456, - "num_input_tokens_seen": 22309705, - "step": 1067 - }, - { - "epoch": 0.12841940720254916, - "flos": 21857125630320.0, - "grad_norm": 2.9179295303589194, - "learning_rate": 3.899350135281796e-06, - "loss": 0.7835, - "num_input_tokens_seen": 22330425, - "step": 1068 - }, - { - "epoch": 0.12853965009318824, - "flos": 19018363670280.0, - "grad_norm": 2.369106710746284, - "learning_rate": 3.8991059886704585e-06, - "loss": 0.7881, - "num_input_tokens_seen": 22349650, - "step": 1069 - }, - { - "epoch": 0.12865989298382732, - "flos": 22641845789280.0, - "grad_norm": 2.697108104617003, - "learning_rate": 3.898861553966252e-06, - "loss": 0.8109, - "num_input_tokens_seen": 22369020, - "step": 1070 - }, - { - "epoch": 0.12878013587446643, - "flos": 18970439473560.0, - "grad_norm": 2.9903809195700166, - "learning_rate": 3.898616831206257e-06, - "loss": 0.8643, - "num_input_tokens_seen": 22389165, - "step": 1071 - }, - { - "epoch": 0.12890037876510552, - "flos": 17081509075680.0, - "grad_norm": 2.206371323488922, - "learning_rate": 3.8983718204276e-06, - "loss": 0.7535, - "num_input_tokens_seen": 22411105, - "step": 1072 - }, - { - "epoch": 0.1290206216557446, - "flos": 17269379498760.0, - "grad_norm": 2.1182949646547087, - "learning_rate": 3.898126521667446e-06, - "loss": 0.8226, - "num_input_tokens_seen": 22430980, - "step": 1073 - }, - { - "epoch": 0.12914086454638368, - "flos": 17701054511520.0, - "grad_norm": 2.258405033488919, - "learning_rate": 3.897880934963007e-06, - "loss": 0.8286, - "num_input_tokens_seen": 22450250, - "step": 1074 - }, - { - "epoch": 0.1292611074370228, - "flos": 14812310457360.0, - "grad_norm": 2.519186212142277, - "learning_rate": 3.89763506035154e-06, - "loss": 0.7697, - "num_input_tokens_seen": 22467820, - "step": 1075 - }, - { - "epoch": 0.12938135032766188, - "flos": 20073984853320.0, - "grad_norm": 4.020480652727367, - "learning_rate": 3.897388897870343e-06, - "loss": 0.8019, - "num_input_tokens_seen": 22488180, - "step": 1076 - }, - { - "epoch": 0.12950159321830096, - "flos": 21479991745920.0, - "grad_norm": 1.935384703154257, - "learning_rate": 3.89714244755676e-06, - "loss": 0.7229, - "num_input_tokens_seen": 22509260, - "step": 1077 - }, - { - "epoch": 0.12962183610894007, - "flos": 17970562497360.0, - "grad_norm": 2.8770146152945855, - "learning_rate": 3.896895709448175e-06, - "loss": 0.854, - "num_input_tokens_seen": 22528730, - "step": 1078 - }, - { - "epoch": 0.12974207899957915, - "flos": 8113416939600.0, - "grad_norm": 3.858561318062921, - "learning_rate": 3.896648683582019e-06, - "loss": 0.7424, - "num_input_tokens_seen": 22543785, - "step": 1079 - }, - { - "epoch": 0.12986232189021824, - "flos": 18106076329320.0, - "grad_norm": 3.801249178303332, - "learning_rate": 3.896401369995766e-06, - "loss": 0.7962, - "num_input_tokens_seen": 22563310, - "step": 1080 - }, - { - "epoch": 0.12998256478085732, - "flos": 17512170969720.0, - "grad_norm": 2.742013779052571, - "learning_rate": 3.896153768726932e-06, - "loss": 0.7671, - "num_input_tokens_seen": 22583340, - "step": 1081 - }, - { - "epoch": 0.13010280767149643, - "flos": 13596263545200.0, - "grad_norm": 2.6376092607908896, - "learning_rate": 3.8959058798130806e-06, - "loss": 0.8526, - "num_input_tokens_seen": 22601035, - "step": 1082 - }, - { - "epoch": 0.1302230505621355, - "flos": 16674840939960.0, - "grad_norm": 2.056753302201838, - "learning_rate": 3.895657703291814e-06, - "loss": 0.7242, - "num_input_tokens_seen": 22620860, - "step": 1083 - }, - { - "epoch": 0.1303432934527746, - "flos": 15593927940240.0, - "grad_norm": 3.4120399192898723, - "learning_rate": 3.895409239200781e-06, - "loss": 0.7782, - "num_input_tokens_seen": 22636465, - "step": 1084 - }, - { - "epoch": 0.1304635363434137, - "flos": 15297101900280.0, - "grad_norm": 2.9302977879974543, - "learning_rate": 3.895160487577673e-06, - "loss": 0.8969, - "num_input_tokens_seen": 22653755, - "step": 1085 - }, - { - "epoch": 0.1305837792340528, - "flos": 46624207488000.0, - "grad_norm": 0.7747944959312069, - "learning_rate": 3.894911448460226e-06, - "loss": 0.6247, - "num_input_tokens_seen": 22712790, - "step": 1086 - }, - { - "epoch": 0.13070402212469187, - "flos": 19371757076040.0, - "grad_norm": 2.4268582175531948, - "learning_rate": 3.8946621218862195e-06, - "loss": 0.7172, - "num_input_tokens_seen": 22733510, - "step": 1087 - }, - { - "epoch": 0.13082426501533098, - "flos": 20293384105200.0, - "grad_norm": 2.65202417260225, - "learning_rate": 3.894412507893475e-06, - "loss": 0.8682, - "num_input_tokens_seen": 22753510, - "step": 1088 - }, - { - "epoch": 0.13094450790597006, - "flos": 18186574133520.0, - "grad_norm": 2.8115717733666328, - "learning_rate": 3.894162606519859e-06, - "loss": 0.6993, - "num_input_tokens_seen": 22772180, - "step": 1089 - }, - { - "epoch": 0.13106475079660915, - "flos": 13920787558800.0, - "grad_norm": 2.0887625205705143, - "learning_rate": 3.893912417803282e-06, - "loss": 0.7495, - "num_input_tokens_seen": 22791615, - "step": 1090 - }, - { - "epoch": 0.13118499368724823, - "flos": 21210800359680.0, - "grad_norm": 2.7875474934316737, - "learning_rate": 3.8936619417816975e-06, - "loss": 0.7459, - "num_input_tokens_seen": 22811665, - "step": 1091 - }, - { - "epoch": 0.13130523657788734, - "flos": 10383913616280.0, - "grad_norm": 2.2919150015630265, - "learning_rate": 3.8934111784931015e-06, - "loss": 0.7082, - "num_input_tokens_seen": 22828835, - "step": 1092 - }, - { - "epoch": 0.13142547946852642, - "flos": 51751222870920.0, - "grad_norm": 0.9649932374865512, - "learning_rate": 3.893160127975535e-06, - "loss": 0.6357, - "num_input_tokens_seen": 22889245, - "step": 1093 - }, - { - "epoch": 0.1315457223591655, - "flos": 33712600442280.0, - "grad_norm": 3.0457688108638443, - "learning_rate": 3.8929087902670826e-06, - "loss": 0.7966, - "num_input_tokens_seen": 22910595, - "step": 1094 - }, - { - "epoch": 0.13166596524980462, - "flos": 46354509542400.0, - "grad_norm": 0.8700992740795114, - "learning_rate": 3.8926571654058715e-06, - "loss": 0.6184, - "num_input_tokens_seen": 22966960, - "step": 1095 - }, - { - "epoch": 0.1317862081404437, - "flos": 17268746299560.0, - "grad_norm": 2.5883327120919186, - "learning_rate": 3.892405253430074e-06, - "loss": 0.744, - "num_input_tokens_seen": 22984200, - "step": 1096 - }, - { - "epoch": 0.13190645103108278, - "flos": 14806674984480.0, - "grad_norm": 2.1741259868652265, - "learning_rate": 3.892153054377904e-06, - "loss": 0.8119, - "num_input_tokens_seen": 23001325, - "step": 1097 - }, - { - "epoch": 0.13202669392172187, - "flos": 39377084950200.0, - "grad_norm": 0.9781939802130032, - "learning_rate": 3.891900568287619e-06, - "loss": 0.6302, - "num_input_tokens_seen": 23053430, - "step": 1098 - }, - { - "epoch": 0.13214693681236098, - "flos": 11544121341720.0, - "grad_norm": 5.549442352597663, - "learning_rate": 3.891647795197523e-06, - "loss": 0.7065, - "num_input_tokens_seen": 23069190, - "step": 1099 - }, - { - "epoch": 0.13226717970300006, - "flos": 14136229315680.0, - "grad_norm": 2.58315309285436, - "learning_rate": 3.8913947351459605e-06, - "loss": 0.6675, - "num_input_tokens_seen": 23086450, - "step": 1100 - }, - { - "epoch": 0.13238742259363914, - "flos": 15130249199280.0, - "grad_norm": 1.9941127674275418, - "learning_rate": 3.89114138817132e-06, - "loss": 0.6658, - "num_input_tokens_seen": 23102835, - "step": 1101 - }, - { - "epoch": 0.13250766548427825, - "flos": 15378549503280.0, - "grad_norm": 1.9999989271161063, - "learning_rate": 3.890887754312035e-06, - "loss": 0.8364, - "num_input_tokens_seen": 23120800, - "step": 1102 - }, - { - "epoch": 0.13262790837491734, - "flos": 16566676822080.0, - "grad_norm": 2.562414772663295, - "learning_rate": 3.890633833606581e-06, - "loss": 0.8573, - "num_input_tokens_seen": 23140210, - "step": 1103 - }, - { - "epoch": 0.13274815126555642, - "flos": 14380065565320.0, - "grad_norm": 3.001782364663889, - "learning_rate": 3.890379626093477e-06, - "loss": 0.6756, - "num_input_tokens_seen": 23159680, - "step": 1104 - }, - { - "epoch": 0.1328683941561955, - "flos": 15588704046840.0, - "grad_norm": 2.7429413805242335, - "learning_rate": 3.890125131811287e-06, - "loss": 0.8915, - "num_input_tokens_seen": 23177450, - "step": 1105 - }, - { - "epoch": 0.1329886370468346, - "flos": 9951447104520.0, - "grad_norm": 2.4216118454246796, - "learning_rate": 3.889870350798618e-06, - "loss": 0.7338, - "num_input_tokens_seen": 23194515, - "step": 1106 - }, - { - "epoch": 0.1331088799374737, - "flos": 15378517843320.0, - "grad_norm": 1.5920541754279465, - "learning_rate": 3.889615283094119e-06, - "loss": 0.7715, - "num_input_tokens_seen": 23213425, - "step": 1107 - }, - { - "epoch": 0.13322912282811278, - "flos": 13327198798800.0, - "grad_norm": 3.2886334862073037, - "learning_rate": 3.889359928736485e-06, - "loss": 0.8301, - "num_input_tokens_seen": 23231090, - "step": 1108 - }, - { - "epoch": 0.1333493657187519, - "flos": 17915641449480.0, - "grad_norm": 2.3768845158802003, - "learning_rate": 3.889104287764451e-06, - "loss": 0.8884, - "num_input_tokens_seen": 23251185, - "step": 1109 - }, - { - "epoch": 0.13346960860939097, - "flos": 16211573778480.0, - "grad_norm": 2.288870949916643, - "learning_rate": 3.888848360216798e-06, - "loss": 0.8892, - "num_input_tokens_seen": 23268550, - "step": 1110 - }, - { - "epoch": 0.13358985150003005, - "flos": 50094007449360.0, - "grad_norm": 0.7938020583975955, - "learning_rate": 3.888592146132351e-06, - "loss": 0.575, - "num_input_tokens_seen": 23329540, - "step": 1111 - }, - { - "epoch": 0.13371009439066917, - "flos": 19672382311200.0, - "grad_norm": 2.0000216959730195, - "learning_rate": 3.888335645549978e-06, - "loss": 0.7635, - "num_input_tokens_seen": 23349680, - "step": 1112 - }, - { - "epoch": 0.13383033728130825, - "flos": 19293792068640.0, - "grad_norm": 2.858972573233173, - "learning_rate": 3.888078858508588e-06, - "loss": 0.802, - "num_input_tokens_seen": 23369260, - "step": 1113 - }, - { - "epoch": 0.13395058017194733, - "flos": 16511819094120.0, - "grad_norm": 2.50910312799763, - "learning_rate": 3.8878217850471365e-06, - "loss": 0.8288, - "num_input_tokens_seen": 23388895, - "step": 1114 - }, - { - "epoch": 0.13407082306258641, - "flos": 18914790246600.0, - "grad_norm": 1.730571115191487, - "learning_rate": 3.887564425204621e-06, - "loss": 0.7171, - "num_input_tokens_seen": 23410300, - "step": 1115 - }, - { - "epoch": 0.13419106595322552, - "flos": 50390987297760.0, - "grad_norm": 0.8065610019650712, - "learning_rate": 3.887306779020083e-06, - "loss": 0.5628, - "num_input_tokens_seen": 23464675, - "step": 1116 - }, - { - "epoch": 0.1343113088438646, - "flos": 14946874490520.0, - "grad_norm": 2.018680828787251, - "learning_rate": 3.887048846532608e-06, - "loss": 0.6878, - "num_input_tokens_seen": 23481370, - "step": 1117 - }, - { - "epoch": 0.1344315517345037, - "flos": 49688981140200.0, - "grad_norm": 0.7468143598128075, - "learning_rate": 3.8867906277813224e-06, - "loss": 0.5903, - "num_input_tokens_seen": 23539245, - "step": 1118 - }, - { - "epoch": 0.1345517946251428, - "flos": 29959303284120.0, - "grad_norm": 2.442860601975646, - "learning_rate": 3.886532122805399e-06, - "loss": 0.7248, - "num_input_tokens_seen": 23561445, - "step": 1119 - }, - { - "epoch": 0.13467203751578188, - "flos": 16698644738520.0, - "grad_norm": 2.298952577344922, - "learning_rate": 3.886273331644053e-06, - "loss": 0.8828, - "num_input_tokens_seen": 23580035, - "step": 1120 - }, - { - "epoch": 0.13479228040642097, - "flos": 12461980835640.0, - "grad_norm": 2.113004373685803, - "learning_rate": 3.886014254336542e-06, - "loss": 0.8107, - "num_input_tokens_seen": 23596230, - "step": 1121 - }, - { - "epoch": 0.13491252329706005, - "flos": 17375042479800.0, - "grad_norm": 1.9216495784894145, - "learning_rate": 3.885754890922168e-06, - "loss": 0.9072, - "num_input_tokens_seen": 23616280, - "step": 1122 - }, - { - "epoch": 0.13503276618769916, - "flos": 25068905679960.0, - "grad_norm": 2.3277949156796676, - "learning_rate": 3.885495241440277e-06, - "loss": 0.7736, - "num_input_tokens_seen": 23640095, - "step": 1123 - }, - { - "epoch": 0.13515300907833824, - "flos": 12921448801920.0, - "grad_norm": 2.0806742928676965, - "learning_rate": 3.885235305930257e-06, - "loss": 0.73, - "num_input_tokens_seen": 23658015, - "step": 1124 - }, - { - "epoch": 0.13527325196897733, - "flos": 14806769964360.0, - "grad_norm": 2.127873105281176, - "learning_rate": 3.884975084431539e-06, - "loss": 0.8586, - "num_input_tokens_seen": 23672685, - "step": 1125 - }, - { - "epoch": 0.13539349485961644, - "flos": 13272341070840.0, - "grad_norm": 2.940288437688003, - "learning_rate": 3.8847145769836e-06, - "loss": 0.9105, - "num_input_tokens_seen": 23688825, - "step": 1126 - }, - { - "epoch": 0.13551373775025552, - "flos": 14109322841040.0, - "grad_norm": 2.9825986998375194, - "learning_rate": 3.884453783625959e-06, - "loss": 0.6561, - "num_input_tokens_seen": 23706155, - "step": 1127 - }, - { - "epoch": 0.1356339806408946, - "flos": 15244238749800.0, - "grad_norm": 2.469539178185678, - "learning_rate": 3.884192704398176e-06, - "loss": 0.8394, - "num_input_tokens_seen": 23723075, - "step": 1128 - }, - { - "epoch": 0.13575422353153369, - "flos": 37168723300920.0, - "grad_norm": 2.257454497257905, - "learning_rate": 3.883931339339858e-06, - "loss": 0.7267, - "num_input_tokens_seen": 23747180, - "step": 1129 - }, - { - "epoch": 0.1358744664221728, - "flos": 13246036135440.0, - "grad_norm": 2.435661502884693, - "learning_rate": 3.883669688490654e-06, - "loss": 0.7694, - "num_input_tokens_seen": 23764670, - "step": 1130 - }, - { - "epoch": 0.13599470931281188, - "flos": 13462301051280.0, - "grad_norm": 2.254403679234066, - "learning_rate": 3.883407751890256e-06, - "loss": 0.8346, - "num_input_tokens_seen": 23782995, - "step": 1131 - }, - { - "epoch": 0.13611495220345096, - "flos": 19557696241560.0, - "grad_norm": 1.8178239437672306, - "learning_rate": 3.8831455295783994e-06, - "loss": 0.8366, - "num_input_tokens_seen": 23801965, - "step": 1132 - }, - { - "epoch": 0.13623519509409007, - "flos": 15861979567920.0, - "grad_norm": 2.203193825803263, - "learning_rate": 3.882883021594864e-06, - "loss": 0.7306, - "num_input_tokens_seen": 23819825, - "step": 1133 - }, - { - "epoch": 0.13635543798472916, - "flos": 10788682154400.0, - "grad_norm": 2.409890925364831, - "learning_rate": 3.8826202279794705e-06, - "loss": 0.8478, - "num_input_tokens_seen": 23836605, - "step": 1134 - }, - { - "epoch": 0.13647568087536824, - "flos": 16753090887000.0, - "grad_norm": 2.417285533019327, - "learning_rate": 3.882357148772085e-06, - "loss": 0.6785, - "num_input_tokens_seen": 23853750, - "step": 1135 - }, - { - "epoch": 0.13659592376600732, - "flos": 14568537527640.0, - "grad_norm": 3.0008594553399406, - "learning_rate": 3.882093784012617e-06, - "loss": 0.8368, - "num_input_tokens_seen": 23872110, - "step": 1136 - }, - { - "epoch": 0.13671616665664643, - "flos": 15670974808800.0, - "grad_norm": 1.8839691534817526, - "learning_rate": 3.881830133741019e-06, - "loss": 0.8173, - "num_input_tokens_seen": 23890695, - "step": 1137 - }, - { - "epoch": 0.13683640954728551, - "flos": 16669680366480.0, - "grad_norm": 2.1555278508655213, - "learning_rate": 3.881566197997285e-06, - "loss": 0.7535, - "num_input_tokens_seen": 23906850, - "step": 1138 - }, - { - "epoch": 0.1369566524379246, - "flos": 15891767098920.0, - "grad_norm": 1.8961797404742027, - "learning_rate": 3.881301976821456e-06, - "loss": 0.7348, - "num_input_tokens_seen": 23926600, - "step": 1139 - }, - { - "epoch": 0.1370768953285637, - "flos": 13596485164920.0, - "grad_norm": 2.3093309080795406, - "learning_rate": 3.881037470253612e-06, - "loss": 0.887, - "num_input_tokens_seen": 23945835, - "step": 1140 - }, - { - "epoch": 0.1371971382192028, - "flos": 10869401578320.0, - "grad_norm": 2.5153905155056155, - "learning_rate": 3.88077267833388e-06, - "loss": 0.7826, - "num_input_tokens_seen": 23962070, - "step": 1141 - }, - { - "epoch": 0.13731738110984187, - "flos": 13891728206880.0, - "grad_norm": 6.403252919354252, - "learning_rate": 3.880507601102427e-06, - "loss": 0.8188, - "num_input_tokens_seen": 23979725, - "step": 1142 - }, - { - "epoch": 0.13743762400048098, - "flos": 13273195889760.0, - "grad_norm": 1.6834286212704705, - "learning_rate": 3.880242238599467e-06, - "loss": 0.8066, - "num_input_tokens_seen": 23995970, - "step": 1143 - }, - { - "epoch": 0.13755786689112007, - "flos": 15378137923800.0, - "grad_norm": 1.9129986979215412, - "learning_rate": 3.879976590865254e-06, - "loss": 0.821, - "num_input_tokens_seen": 24015145, - "step": 1144 - }, - { - "epoch": 0.13767810978175915, - "flos": 15621404294160.0, - "grad_norm": 2.468090307243532, - "learning_rate": 3.879710657940087e-06, - "loss": 0.8638, - "num_input_tokens_seen": 24033815, - "step": 1145 - }, - { - "epoch": 0.13779835267239823, - "flos": 22453373826960.0, - "grad_norm": 2.7453891072578847, - "learning_rate": 3.879444439864308e-06, - "loss": 0.6827, - "num_input_tokens_seen": 24053110, - "step": 1146 - }, - { - "epoch": 0.13791859556303734, - "flos": 16589720781600.0, - "grad_norm": 1.9579746918956853, - "learning_rate": 3.879177936678301e-06, - "loss": 0.8465, - "num_input_tokens_seen": 24071835, - "step": 1147 - }, - { - "epoch": 0.13803883845367643, - "flos": 25881197172720.0, - "grad_norm": 2.0627448196762184, - "learning_rate": 3.878911148422496e-06, - "loss": 0.7653, - "num_input_tokens_seen": 24093030, - "step": 1148 - }, - { - "epoch": 0.1381590813443155, - "flos": 23506018973760.0, - "grad_norm": 2.828745905544067, - "learning_rate": 3.878644075137364e-06, - "loss": 0.6886, - "num_input_tokens_seen": 24113400, - "step": 1149 - }, - { - "epoch": 0.13827932423495462, - "flos": 13002516485400.0, - "grad_norm": 2.500237262911184, - "learning_rate": 3.878376716863418e-06, - "loss": 0.7783, - "num_input_tokens_seen": 24129420, - "step": 1150 - }, - { - "epoch": 0.1383995671255937, - "flos": 14190485504400.0, - "grad_norm": 2.1119179550187237, - "learning_rate": 3.878109073641219e-06, - "loss": 0.6861, - "num_input_tokens_seen": 24148170, - "step": 1151 - }, - { - "epoch": 0.13851981001623279, - "flos": 20755004948760.0, - "grad_norm": 1.6617088328335947, - "learning_rate": 3.877841145511366e-06, - "loss": 0.798, - "num_input_tokens_seen": 24170630, - "step": 1152 - }, - { - "epoch": 0.13864005290687187, - "flos": 15511973777880.0, - "grad_norm": 2.64347180284845, - "learning_rate": 3.8775729325145035e-06, - "loss": 0.8186, - "num_input_tokens_seen": 24189585, - "step": 1153 - }, - { - "epoch": 0.13876029579751098, - "flos": 52848757349640.0, - "grad_norm": 0.7758657695601624, - "learning_rate": 3.877304434691321e-06, - "loss": 0.6388, - "num_input_tokens_seen": 24256155, - "step": 1154 - }, - { - "epoch": 0.13888053868815006, - "flos": 16050736469880.0, - "grad_norm": 2.37887658412332, - "learning_rate": 3.877035652082548e-06, - "loss": 0.7841, - "num_input_tokens_seen": 24275320, - "step": 1155 - }, - { - "epoch": 0.13900078157878915, - "flos": 14324701278000.0, - "grad_norm": 2.040791330379074, - "learning_rate": 3.87676658472896e-06, - "loss": 0.8335, - "num_input_tokens_seen": 24293850, - "step": 1156 - }, - { - "epoch": 0.13912102446942826, - "flos": 16566835121880.0, - "grad_norm": 3.123409171498364, - "learning_rate": 3.876497232671372e-06, - "loss": 0.8341, - "num_input_tokens_seen": 24313525, - "step": 1157 - }, - { - "epoch": 0.13924126736006734, - "flos": 21751209369600.0, - "grad_norm": 2.5669065594550076, - "learning_rate": 3.876227595950647e-06, - "loss": 0.8113, - "num_input_tokens_seen": 24332675, - "step": 1158 - }, - { - "epoch": 0.13936151025070642, - "flos": 20104057323960.0, - "grad_norm": 1.8096906001321849, - "learning_rate": 3.875957674607686e-06, - "loss": 0.7703, - "num_input_tokens_seen": 24354670, - "step": 1159 - }, - { - "epoch": 0.1394817531413455, - "flos": 11949808018680.0, - "grad_norm": 2.21878503046455, - "learning_rate": 3.8756874686834386e-06, - "loss": 0.868, - "num_input_tokens_seen": 24372605, - "step": 1160 - }, - { - "epoch": 0.13960199603198462, - "flos": 22698033235560.0, - "grad_norm": 1.953205747842544, - "learning_rate": 3.875416978218893e-06, - "loss": 0.7921, - "num_input_tokens_seen": 24395520, - "step": 1161 - }, - { - "epoch": 0.1397222389226237, - "flos": 13218559781520.0, - "grad_norm": 2.836664448194462, - "learning_rate": 3.8751462032550835e-06, - "loss": 0.8111, - "num_input_tokens_seen": 24412245, - "step": 1162 - }, - { - "epoch": 0.13984248181326278, - "flos": 12300067088400.0, - "grad_norm": 2.683919251092834, - "learning_rate": 3.874875143833085e-06, - "loss": 0.8057, - "num_input_tokens_seen": 24430205, - "step": 1163 - }, - { - "epoch": 0.1399627247039019, - "flos": 39866082676440.0, - "grad_norm": 2.8375602682157846, - "learning_rate": 3.874603799994019e-06, - "loss": 0.6687, - "num_input_tokens_seen": 24453460, - "step": 1164 - }, - { - "epoch": 0.14008296759454097, - "flos": 8520464994840.0, - "grad_norm": 2.1885552177269916, - "learning_rate": 3.874332171779046e-06, - "loss": 0.8676, - "num_input_tokens_seen": 24468060, - "step": 1165 - }, - { - "epoch": 0.14020321048518006, - "flos": 16107557115360.0, - "grad_norm": 2.3458480599609097, - "learning_rate": 3.874060259229373e-06, - "loss": 0.7449, - "num_input_tokens_seen": 24489355, - "step": 1166 - }, - { - "epoch": 0.14032345337581917, - "flos": 17133928986720.0, - "grad_norm": 3.9708365414670865, - "learning_rate": 3.873788062386249e-06, - "loss": 0.9007, - "num_input_tokens_seen": 24507335, - "step": 1167 - }, - { - "epoch": 0.14044369626645825, - "flos": 21753362246880.0, - "grad_norm": 1.8919124790861042, - "learning_rate": 3.873515581290965e-06, - "loss": 0.8174, - "num_input_tokens_seen": 24531860, - "step": 1168 - }, - { - "epoch": 0.14056393915709733, - "flos": 13380821788320.0, - "grad_norm": 2.6862204410991124, - "learning_rate": 3.8732428159848575e-06, - "loss": 0.7416, - "num_input_tokens_seen": 24550555, - "step": 1169 - }, - { - "epoch": 0.14068418204773642, - "flos": 19563648314040.0, - "grad_norm": 2.261513603538944, - "learning_rate": 3.872969766509304e-06, - "loss": 0.7704, - "num_input_tokens_seen": 24570830, - "step": 1170 - }, - { - "epoch": 0.14080442493837553, - "flos": 48115429518840.0, - "grad_norm": 0.8236466722993513, - "learning_rate": 3.872696432905726e-06, - "loss": 0.5794, - "num_input_tokens_seen": 24631370, - "step": 1171 - }, - { - "epoch": 0.1409246678290146, - "flos": 18889783369560.0, - "grad_norm": 2.57137177041307, - "learning_rate": 3.872422815215589e-06, - "loss": 0.6853, - "num_input_tokens_seen": 24650170, - "step": 1172 - }, - { - "epoch": 0.1410449107196537, - "flos": 15996860200680.0, - "grad_norm": 2.4540186912427937, - "learning_rate": 3.8721489134803994e-06, - "loss": 0.7342, - "num_input_tokens_seen": 24668680, - "step": 1173 - }, - { - "epoch": 0.1411651536102928, - "flos": 12159582642720.0, - "grad_norm": 3.315150514065418, - "learning_rate": 3.871874727741707e-06, - "loss": 0.7075, - "num_input_tokens_seen": 24685630, - "step": 1174 - }, - { - "epoch": 0.1412853965009319, - "flos": 15348287072880.0, - "grad_norm": 2.2298415282524866, - "learning_rate": 3.871600258041108e-06, - "loss": 0.9551, - "num_input_tokens_seen": 24704875, - "step": 1175 - }, - { - "epoch": 0.14140563939157097, - "flos": 14862039271800.0, - "grad_norm": 2.787490273360316, - "learning_rate": 3.871325504420238e-06, - "loss": 0.8358, - "num_input_tokens_seen": 24723585, - "step": 1176 - }, - { - "epoch": 0.14152588228221005, - "flos": 15405455977920.0, - "grad_norm": 2.201590686885686, - "learning_rate": 3.871050466920776e-06, - "loss": 0.8056, - "num_input_tokens_seen": 24743210, - "step": 1177 - }, - { - "epoch": 0.14164612517284916, - "flos": 13299500825160.0, - "grad_norm": 2.230631433222743, - "learning_rate": 3.870775145584447e-06, - "loss": 0.7869, - "num_input_tokens_seen": 24760710, - "step": 1178 - }, - { - "epoch": 0.14176636806348825, - "flos": 16645148388840.0, - "grad_norm": 3.115828980528437, - "learning_rate": 3.8704995404530145e-06, - "loss": 0.6192, - "num_input_tokens_seen": 24776055, - "step": 1179 - }, - { - "epoch": 0.14188661095412733, - "flos": 16162098243720.0, - "grad_norm": 2.5061082604870144, - "learning_rate": 3.87022365156829e-06, - "loss": 0.8289, - "num_input_tokens_seen": 24796490, - "step": 1180 - }, - { - "epoch": 0.14200685384476644, - "flos": 17835491904840.0, - "grad_norm": 2.619553592448942, - "learning_rate": 3.869947478972123e-06, - "loss": 0.7955, - "num_input_tokens_seen": 24817235, - "step": 1181 - }, - { - "epoch": 0.14212709673540552, - "flos": 17590737516360.0, - "grad_norm": 2.2810766075812845, - "learning_rate": 3.869671022706412e-06, - "loss": 0.7968, - "num_input_tokens_seen": 24835685, - "step": 1182 - }, - { - "epoch": 0.1422473396260446, - "flos": 19374353192760.0, - "grad_norm": 2.0686912090326235, - "learning_rate": 3.869394282813092e-06, - "loss": 0.6433, - "num_input_tokens_seen": 24854605, - "step": 1183 - }, - { - "epoch": 0.1423675825166837, - "flos": 12435169340880.0, - "grad_norm": 3.140235535115705, - "learning_rate": 3.869117259334147e-06, - "loss": 0.8612, - "num_input_tokens_seen": 24872250, - "step": 1184 - }, - { - "epoch": 0.1424878254073228, - "flos": 13081906191000.0, - "grad_norm": 1.8723520654931252, - "learning_rate": 3.868839952311599e-06, - "loss": 0.8095, - "num_input_tokens_seen": 24889925, - "step": 1185 - }, - { - "epoch": 0.14260806829796188, - "flos": 14839280251920.0, - "grad_norm": 3.6386207662976666, - "learning_rate": 3.868562361787516e-06, - "loss": 0.7857, - "num_input_tokens_seen": 24908775, - "step": 1186 - }, - { - "epoch": 0.14272831118860096, - "flos": 16972205199240.0, - "grad_norm": 2.2503394824459004, - "learning_rate": 3.868284487804009e-06, - "loss": 0.6855, - "num_input_tokens_seen": 24927725, - "step": 1187 - }, - { - "epoch": 0.14284855407924008, - "flos": 19966390614720.0, - "grad_norm": 1.8846147071621575, - "learning_rate": 3.86800633040323e-06, - "loss": 0.778, - "num_input_tokens_seen": 24948035, - "step": 1188 - }, - { - "epoch": 0.14296879696987916, - "flos": 20671404468480.0, - "grad_norm": 2.2806967495530026, - "learning_rate": 3.867727889627376e-06, - "loss": 0.7698, - "num_input_tokens_seen": 24967370, - "step": 1189 - }, - { - "epoch": 0.14308903986051824, - "flos": 14163199110240.0, - "grad_norm": 2.2582477295288848, - "learning_rate": 3.867449165518687e-06, - "loss": 0.7667, - "num_input_tokens_seen": 24983560, - "step": 1190 - }, - { - "epoch": 0.14320928275115732, - "flos": 12732945179640.0, - "grad_norm": 2.1795003393618835, - "learning_rate": 3.867170158119444e-06, - "loss": 0.6799, - "num_input_tokens_seen": 25002280, - "step": 1191 - }, - { - "epoch": 0.14332952564179643, - "flos": 15698862742200.0, - "grad_norm": 2.4778868205999895, - "learning_rate": 3.866890867471972e-06, - "loss": 0.7426, - "num_input_tokens_seen": 25020470, - "step": 1192 - }, - { - "epoch": 0.14344976853243552, - "flos": 11947750121280.0, - "grad_norm": 3.510473384832894, - "learning_rate": 3.86661129361864e-06, - "loss": 0.8803, - "num_input_tokens_seen": 25034680, - "step": 1193 - }, - { - "epoch": 0.1435700114230746, - "flos": 13812845060640.0, - "grad_norm": 2.7072868281564975, - "learning_rate": 3.866331436601859e-06, - "loss": 0.8366, - "num_input_tokens_seen": 25052395, - "step": 1194 - }, - { - "epoch": 0.1436902543137137, - "flos": 14433435275160.0, - "grad_norm": 2.4954624004401467, - "learning_rate": 3.866051296464083e-06, - "loss": 0.732, - "num_input_tokens_seen": 25070950, - "step": 1195 - }, - { - "epoch": 0.1438104972043528, - "flos": 10680992935920.0, - "grad_norm": 3.0916013095524724, - "learning_rate": 3.86577087324781e-06, - "loss": 0.8324, - "num_input_tokens_seen": 25087160, - "step": 1196 - }, - { - "epoch": 0.14393074009499188, - "flos": 12462645694800.0, - "grad_norm": 2.299974802127968, - "learning_rate": 3.865490166995578e-06, - "loss": 0.7532, - "num_input_tokens_seen": 25105110, - "step": 1197 - }, - { - "epoch": 0.144050982985631, - "flos": 22366702330560.0, - "grad_norm": 1.985337752234616, - "learning_rate": 3.86520917774997e-06, - "loss": 0.8168, - "num_input_tokens_seen": 25124265, - "step": 1198 - }, - { - "epoch": 0.14417122587627007, - "flos": 13029644579760.0, - "grad_norm": 2.496535666542753, - "learning_rate": 3.864927905553614e-06, - "loss": 0.7367, - "num_input_tokens_seen": 25141895, - "step": 1199 - }, - { - "epoch": 0.14429146876690915, - "flos": 15808546538160.0, - "grad_norm": 2.4419196725766583, - "learning_rate": 3.8646463504491765e-06, - "loss": 0.869, - "num_input_tokens_seen": 25161750, - "step": 1200 - }, - { - "epoch": 0.14441171165754824, - "flos": 17025701548920.0, - "grad_norm": 1.9076767256877762, - "learning_rate": 3.8643645124793705e-06, - "loss": 0.8204, - "num_input_tokens_seen": 25180370, - "step": 1201 - }, - { - "epoch": 0.14453195454818735, - "flos": 31528585302240.0, - "grad_norm": 1.989339550588129, - "learning_rate": 3.8640823916869515e-06, - "loss": 0.7297, - "num_input_tokens_seen": 25204400, - "step": 1202 - }, - { - "epoch": 0.14465219743882643, - "flos": 19969619930640.0, - "grad_norm": 3.3004755544463658, - "learning_rate": 3.863799988114714e-06, - "loss": 0.7697, - "num_input_tokens_seen": 25226150, - "step": 1203 - }, - { - "epoch": 0.1447724403294655, - "flos": 12165376415400.0, - "grad_norm": 3.464571110988092, - "learning_rate": 3.863517301805502e-06, - "loss": 0.6845, - "num_input_tokens_seen": 25244260, - "step": 1204 - }, - { - "epoch": 0.14489268322010462, - "flos": 14667615237000.0, - "grad_norm": 2.532894683389409, - "learning_rate": 3.863234332802196e-06, - "loss": 0.9481, - "num_input_tokens_seen": 25256185, - "step": 1205 - }, - { - "epoch": 0.1450129261107437, - "flos": 19887919047960.0, - "grad_norm": 4.342289082044849, - "learning_rate": 3.862951081147723e-06, - "loss": 0.7058, - "num_input_tokens_seen": 25276070, - "step": 1206 - }, - { - "epoch": 0.1451331690013828, - "flos": 18834545722080.0, - "grad_norm": 2.7671072053810595, - "learning_rate": 3.862667546885053e-06, - "loss": 0.7626, - "num_input_tokens_seen": 25294340, - "step": 1207 - }, - { - "epoch": 0.14525341189202187, - "flos": 18860280778200.0, - "grad_norm": 2.0013316013109277, - "learning_rate": 3.8623837300571965e-06, - "loss": 0.7104, - "num_input_tokens_seen": 25313045, - "step": 1208 - }, - { - "epoch": 0.14537365478266098, - "flos": 16889111278320.0, - "grad_norm": 2.2448505178527283, - "learning_rate": 3.8620996307072085e-06, - "loss": 0.8275, - "num_input_tokens_seen": 25333470, - "step": 1209 - }, - { - "epoch": 0.14549389767330007, - "flos": 15054880308600.0, - "grad_norm": 2.0335601581110145, - "learning_rate": 3.861815248878188e-06, - "loss": 0.6304, - "num_input_tokens_seen": 25350675, - "step": 1210 - }, - { - "epoch": 0.14561414056393915, - "flos": 11004282211080.0, - "grad_norm": 3.6823488884795825, - "learning_rate": 3.861530584613274e-06, - "loss": 0.7837, - "num_input_tokens_seen": 25368395, - "step": 1211 - }, - { - "epoch": 0.14573438345457826, - "flos": 14190833763960.0, - "grad_norm": 2.7222900144305817, - "learning_rate": 3.86124563795565e-06, - "loss": 0.8019, - "num_input_tokens_seen": 25386930, - "step": 1212 - }, - { - "epoch": 0.14585462634521734, - "flos": 18188030491680.0, - "grad_norm": 2.023295865201649, - "learning_rate": 3.860960408948543e-06, - "loss": 0.6951, - "num_input_tokens_seen": 25408400, - "step": 1213 - }, - { - "epoch": 0.14597486923585642, - "flos": 11245933923480.0, - "grad_norm": 2.637205991584108, - "learning_rate": 3.860674897635222e-06, - "loss": 0.8815, - "num_input_tokens_seen": 25424605, - "step": 1214 - }, - { - "epoch": 0.1460951121264955, - "flos": 12139198119840.0, - "grad_norm": 2.460607791188365, - "learning_rate": 3.860389104058998e-06, - "loss": 0.8144, - "num_input_tokens_seen": 25442555, - "step": 1215 - }, - { - "epoch": 0.14621535501713462, - "flos": 18213607248000.0, - "grad_norm": 5.463255121688736, - "learning_rate": 3.860103028263227e-06, - "loss": 0.7043, - "num_input_tokens_seen": 25465380, - "step": 1216 - }, - { - "epoch": 0.1463355979077737, - "flos": 18483495153360.0, - "grad_norm": 2.147507344811772, - "learning_rate": 3.859816670291304e-06, - "loss": 0.675, - "num_input_tokens_seen": 25484195, - "step": 1217 - }, - { - "epoch": 0.14645584079841278, - "flos": 16134685209720.0, - "grad_norm": 2.36808110357271, - "learning_rate": 3.859530030186672e-06, - "loss": 0.884, - "num_input_tokens_seen": 25500925, - "step": 1218 - }, - { - "epoch": 0.1465760836890519, - "flos": 17296507593120.0, - "grad_norm": 2.6547398481770346, - "learning_rate": 3.859243107992813e-06, - "loss": 0.8047, - "num_input_tokens_seen": 25519450, - "step": 1219 - }, - { - "epoch": 0.14669632657969098, - "flos": 27496535449920.0, - "grad_norm": 2.881908905749805, - "learning_rate": 3.858955903753252e-06, - "loss": 0.7697, - "num_input_tokens_seen": 25537810, - "step": 1220 - }, - { - "epoch": 0.14681656947033006, - "flos": 20805778541880.0, - "grad_norm": 2.239027820858992, - "learning_rate": 3.858668417511559e-06, - "loss": 0.8198, - "num_input_tokens_seen": 25560280, - "step": 1221 - }, - { - "epoch": 0.14693681236096917, - "flos": 13489334165760.0, - "grad_norm": 2.957518523570181, - "learning_rate": 3.8583806493113445e-06, - "loss": 0.7451, - "num_input_tokens_seen": 25578345, - "step": 1222 - }, - { - "epoch": 0.14705705525160825, - "flos": 15190204180800.0, - "grad_norm": 2.0607140640481565, - "learning_rate": 3.858092599196263e-06, - "loss": 0.8164, - "num_input_tokens_seen": 25596020, - "step": 1223 - }, - { - "epoch": 0.14717729814224734, - "flos": 21965226428280.0, - "grad_norm": 2.4389174814751957, - "learning_rate": 3.857804267210012e-06, - "loss": 0.8071, - "num_input_tokens_seen": 25615040, - "step": 1224 - }, - { - "epoch": 0.14729754103288642, - "flos": 14649636871080.0, - "grad_norm": 2.119516140764083, - "learning_rate": 3.857515653396331e-06, - "loss": 0.8639, - "num_input_tokens_seen": 25631970, - "step": 1225 - }, - { - "epoch": 0.14741778392352553, - "flos": 14082796285920.0, - "grad_norm": 2.957746814737998, - "learning_rate": 3.857226757799002e-06, - "loss": 0.8552, - "num_input_tokens_seen": 25649245, - "step": 1226 - }, - { - "epoch": 0.1475380268141646, - "flos": 18619103965200.0, - "grad_norm": 2.892591312849748, - "learning_rate": 3.85693758046185e-06, - "loss": 0.7105, - "num_input_tokens_seen": 25667255, - "step": 1227 - }, - { - "epoch": 0.1476582697048037, - "flos": 15241452673320.0, - "grad_norm": 2.876095438704039, - "learning_rate": 3.8566481214287435e-06, - "loss": 0.816, - "num_input_tokens_seen": 25685095, - "step": 1228 - }, - { - "epoch": 0.1477785125954428, - "flos": 10194998414520.0, - "grad_norm": 3.0653335541632023, - "learning_rate": 3.8563583807435935e-06, - "loss": 0.8911, - "num_input_tokens_seen": 25700960, - "step": 1229 - }, - { - "epoch": 0.1478987554860819, - "flos": 14996160065520.0, - "grad_norm": 3.7274301986248, - "learning_rate": 3.856068358450353e-06, - "loss": 0.7555, - "num_input_tokens_seen": 25720630, - "step": 1230 - }, - { - "epoch": 0.14801899837672097, - "flos": 13028346521400.0, - "grad_norm": 1.9248766525185161, - "learning_rate": 3.8557780545930186e-06, - "loss": 0.8437, - "num_input_tokens_seen": 25738765, - "step": 1231 - }, - { - "epoch": 0.14813924126736006, - "flos": 15266047970880.0, - "grad_norm": 2.0051032761304888, - "learning_rate": 3.855487469215628e-06, - "loss": 0.7698, - "num_input_tokens_seen": 25757415, - "step": 1232 - }, - { - "epoch": 0.14825948415799917, - "flos": 27501980963040.0, - "grad_norm": 3.0171988535923746, - "learning_rate": 3.855196602362264e-06, - "loss": 0.7035, - "num_input_tokens_seen": 25780055, - "step": 1233 - }, - { - "epoch": 0.14837972704863825, - "flos": 16105024318560.0, - "grad_norm": 2.470485801336347, - "learning_rate": 3.854905454077051e-06, - "loss": 0.933, - "num_input_tokens_seen": 25797385, - "step": 1234 - }, - { - "epoch": 0.14849996993927733, - "flos": 15351864648360.0, - "grad_norm": 3.546633136585991, - "learning_rate": 3.854614024404155e-06, - "loss": 0.8557, - "num_input_tokens_seen": 25815415, - "step": 1235 - }, - { - "epoch": 0.14862021282991644, - "flos": 14650080110520.0, - "grad_norm": 2.1821887250730487, - "learning_rate": 3.8543223133877865e-06, - "loss": 0.8777, - "num_input_tokens_seen": 25833730, - "step": 1236 - }, - { - "epoch": 0.14874045572055553, - "flos": 16621724509800.0, - "grad_norm": 2.4456641581144964, - "learning_rate": 3.854030321072198e-06, - "loss": 0.8586, - "num_input_tokens_seen": 25853355, - "step": 1237 - }, - { - "epoch": 0.1488606986111946, - "flos": 18619262265000.0, - "grad_norm": 2.420550470246388, - "learning_rate": 3.853738047501682e-06, - "loss": 0.7202, - "num_input_tokens_seen": 25873635, - "step": 1238 - }, - { - "epoch": 0.1489809415018337, - "flos": 12408104566440.0, - "grad_norm": 3.1940515543790537, - "learning_rate": 3.85344549272058e-06, - "loss": 0.7563, - "num_input_tokens_seen": 25891335, - "step": 1239 - }, - { - "epoch": 0.1491011843924728, - "flos": 24723743863800.0, - "grad_norm": 1.7639280958982964, - "learning_rate": 3.853152656773269e-06, - "loss": 0.8166, - "num_input_tokens_seen": 25912490, - "step": 1240 - }, - { - "epoch": 0.14922142728311188, - "flos": 15487093540680.0, - "grad_norm": 2.100238482239932, - "learning_rate": 3.852859539704174e-06, - "loss": 0.818, - "num_input_tokens_seen": 25931510, - "step": 1241 - }, - { - "epoch": 0.14934167017375097, - "flos": 21911730078600.0, - "grad_norm": 2.4637590975039436, - "learning_rate": 3.85256614155776e-06, - "loss": 0.7425, - "num_input_tokens_seen": 25951360, - "step": 1242 - }, - { - "epoch": 0.14946191306439008, - "flos": 12408516145920.0, - "grad_norm": 2.68126700869414, - "learning_rate": 3.852272462378535e-06, - "loss": 0.7269, - "num_input_tokens_seen": 25968955, - "step": 1243 - }, - { - "epoch": 0.14958215595502916, - "flos": 11409272368920.0, - "grad_norm": 2.0796318614232256, - "learning_rate": 3.85197850221105e-06, - "loss": 0.7739, - "num_input_tokens_seen": 25984975, - "step": 1244 - }, - { - "epoch": 0.14970239884566824, - "flos": 24315587709960.0, - "grad_norm": 2.762811986644651, - "learning_rate": 3.851684261099899e-06, - "loss": 0.7447, - "num_input_tokens_seen": 26006435, - "step": 1245 - }, - { - "epoch": 0.14982264173630733, - "flos": 13001978266080.0, - "grad_norm": 1.9782871366708088, - "learning_rate": 3.851389739089718e-06, - "loss": 0.8516, - "num_input_tokens_seen": 26022775, - "step": 1246 - }, - { - "epoch": 0.14994288462694644, - "flos": 23797462820520.0, - "grad_norm": 3.2485483302026013, - "learning_rate": 3.851094936225186e-06, - "loss": 0.7961, - "num_input_tokens_seen": 26043380, - "step": 1247 - }, - { - "epoch": 0.15006312751758552, - "flos": 23343155427720.0, - "grad_norm": 1.711878408962726, - "learning_rate": 3.850799852551024e-06, - "loss": 0.7586, - "num_input_tokens_seen": 26065520, - "step": 1248 - }, - { - "epoch": 0.1501833704082246, - "flos": 12111626786040.0, - "grad_norm": 2.620286251475753, - "learning_rate": 3.850504488111995e-06, - "loss": 0.8402, - "num_input_tokens_seen": 26081915, - "step": 1249 - }, - { - "epoch": 0.15030361329886371, - "flos": 17182549702560.0, - "grad_norm": 1.9366981323671242, - "learning_rate": 3.850208842952907e-06, - "loss": 0.8107, - "num_input_tokens_seen": 26100440, - "step": 1250 - }, - { - "epoch": 0.1504238561895028, - "flos": 18780574473000.0, - "grad_norm": 2.5898417246999057, - "learning_rate": 3.849912917118608e-06, - "loss": 0.7781, - "num_input_tokens_seen": 26121200, - "step": 1251 - }, - { - "epoch": 0.15054409908014188, - "flos": 38374078129320.0, - "grad_norm": 0.9770141777720039, - "learning_rate": 3.849616710653992e-06, - "loss": 0.6196, - "num_input_tokens_seen": 26182390, - "step": 1252 - }, - { - "epoch": 0.150664341970781, - "flos": 13785843606120.0, - "grad_norm": 2.8613224589321034, - "learning_rate": 3.84932022360399e-06, - "loss": 0.7429, - "num_input_tokens_seen": 26200775, - "step": 1253 - }, - { - "epoch": 0.15078458486142007, - "flos": 16215372973680.0, - "grad_norm": 3.1790455913028413, - "learning_rate": 3.849023456013581e-06, - "loss": 0.8112, - "num_input_tokens_seen": 26218055, - "step": 1254 - }, - { - "epoch": 0.15090482775205916, - "flos": 19537406698560.0, - "grad_norm": 2.4745850957027167, - "learning_rate": 3.848726407927784e-06, - "loss": 0.6083, - "num_input_tokens_seen": 26238160, - "step": 1255 - }, - { - "epoch": 0.15102507064269824, - "flos": 15945896647800.0, - "grad_norm": 2.7513271943857216, - "learning_rate": 3.84842907939166e-06, - "loss": 0.852, - "num_input_tokens_seen": 26257105, - "step": 1256 - }, - { - "epoch": 0.15114531353333735, - "flos": 16702222314000.0, - "grad_norm": 3.1659502590992834, - "learning_rate": 3.8481314704503146e-06, - "loss": 0.6905, - "num_input_tokens_seen": 26276655, - "step": 1257 - }, - { - "epoch": 0.15126555642397643, - "flos": 14379653985840.0, - "grad_norm": 3.919537459352018, - "learning_rate": 3.847833581148895e-06, - "loss": 0.8579, - "num_input_tokens_seen": 26295285, - "step": 1258 - }, - { - "epoch": 0.15138579931461552, - "flos": 21071613972360.0, - "grad_norm": 3.1384778630434793, - "learning_rate": 3.84753541153259e-06, - "loss": 0.7866, - "num_input_tokens_seen": 26314575, - "step": 1259 - }, - { - "epoch": 0.15150604220525463, - "flos": 16188593138880.0, - "grad_norm": 1.6978302291992027, - "learning_rate": 3.847236961646633e-06, - "loss": 0.8227, - "num_input_tokens_seen": 26333275, - "step": 1260 - }, - { - "epoch": 0.1516262850958937, - "flos": 9410784814920.0, - "grad_norm": 2.9773257420920416, - "learning_rate": 3.846938231536296e-06, - "loss": 0.7722, - "num_input_tokens_seen": 26348615, - "step": 1261 - }, - { - "epoch": 0.1517465279865328, - "flos": 15944250329880.0, - "grad_norm": 1.7255342623664667, - "learning_rate": 3.8466392212468995e-06, - "loss": 0.8, - "num_input_tokens_seen": 26368525, - "step": 1262 - }, - { - "epoch": 0.15186677087717187, - "flos": 42871321909320.0, - "grad_norm": 0.8082641814498343, - "learning_rate": 3.8463399308238e-06, - "loss": 0.6404, - "num_input_tokens_seen": 26427350, - "step": 1263 - }, - { - "epoch": 0.15198701376781099, - "flos": 23990652116880.0, - "grad_norm": 1.8808693256689337, - "learning_rate": 3.846040360312402e-06, - "loss": 0.6254, - "num_input_tokens_seen": 26450330, - "step": 1264 - }, - { - "epoch": 0.15210725665845007, - "flos": 20832938296200.0, - "grad_norm": 2.336196527823327, - "learning_rate": 3.8457405097581485e-06, - "loss": 0.7996, - "num_input_tokens_seen": 26469040, - "step": 1265 - }, - { - "epoch": 0.15222749954908915, - "flos": 14568759147360.0, - "grad_norm": 2.3731037399951163, - "learning_rate": 3.8454403792065275e-06, - "loss": 0.77, - "num_input_tokens_seen": 26487580, - "step": 1266 - }, - { - "epoch": 0.15234774243972826, - "flos": 15594466159560.0, - "grad_norm": 2.852582357222369, - "learning_rate": 3.845139968703068e-06, - "loss": 0.8393, - "num_input_tokens_seen": 26504820, - "step": 1267 - }, - { - "epoch": 0.15246798533036734, - "flos": 19023619223640.0, - "grad_norm": 2.7955522019723826, - "learning_rate": 3.844839278293342e-06, - "loss": 0.8189, - "num_input_tokens_seen": 26525390, - "step": 1268 - }, - { - "epoch": 0.15258822822100643, - "flos": 18915645065520.0, - "grad_norm": 2.36173549574235, - "learning_rate": 3.8445383080229654e-06, - "loss": 0.7481, - "num_input_tokens_seen": 26541125, - "step": 1269 - }, - { - "epoch": 0.1527084711116455, - "flos": 18511351426800.0, - "grad_norm": 2.9255871370958553, - "learning_rate": 3.844237057937593e-06, - "loss": 0.7185, - "num_input_tokens_seen": 26559850, - "step": 1270 - }, - { - "epoch": 0.15282871400228462, - "flos": 21452642031840.0, - "grad_norm": 3.4266637450575588, - "learning_rate": 3.843935528082926e-06, - "loss": 0.7604, - "num_input_tokens_seen": 26580595, - "step": 1271 - }, - { - "epoch": 0.1529489568929237, - "flos": 15267409349160.0, - "grad_norm": 2.091816023196673, - "learning_rate": 3.843633718504704e-06, - "loss": 0.8397, - "num_input_tokens_seen": 26598760, - "step": 1272 - }, - { - "epoch": 0.1530691997835628, - "flos": 14676511685760.0, - "grad_norm": 4.152658158149113, - "learning_rate": 3.843331629248715e-06, - "loss": 0.8767, - "num_input_tokens_seen": 26616080, - "step": 1273 - }, - { - "epoch": 0.1531894426742019, - "flos": 21096240929880.0, - "grad_norm": 4.183126414578192, - "learning_rate": 3.843029260360782e-06, - "loss": 0.7612, - "num_input_tokens_seen": 26634170, - "step": 1274 - }, - { - "epoch": 0.15330968556484098, - "flos": 16269439202640.0, - "grad_norm": 2.2767825343005628, - "learning_rate": 3.8427266118867755e-06, - "loss": 0.7731, - "num_input_tokens_seen": 26653640, - "step": 1275 - }, - { - "epoch": 0.15342992845548006, - "flos": 20185409947080.0, - "grad_norm": 2.2934223616677767, - "learning_rate": 3.842423683872608e-06, - "loss": 0.8074, - "num_input_tokens_seen": 26673935, - "step": 1276 - }, - { - "epoch": 0.15355017134611917, - "flos": 14325556096920.0, - "grad_norm": 2.918836023014662, - "learning_rate": 3.842120476364232e-06, - "loss": 0.7648, - "num_input_tokens_seen": 26692105, - "step": 1277 - }, - { - "epoch": 0.15367041423675826, - "flos": 13488542666760.0, - "grad_norm": 2.7448854569125145, - "learning_rate": 3.841816989407644e-06, - "loss": 0.8109, - "num_input_tokens_seen": 26707315, - "step": 1278 - }, - { - "epoch": 0.15379065712739734, - "flos": 30472330920000.0, - "grad_norm": 2.962594487095847, - "learning_rate": 3.841513223048884e-06, - "loss": 0.7524, - "num_input_tokens_seen": 26727720, - "step": 1279 - }, - { - "epoch": 0.15391090001803642, - "flos": 16134748529640.0, - "grad_norm": 2.9044956942181774, - "learning_rate": 3.841209177334031e-06, - "loss": 0.7554, - "num_input_tokens_seen": 26745800, - "step": 1280 - }, - { - "epoch": 0.15403114290867553, - "flos": 11247453601560.0, - "grad_norm": 2.8961310656695307, - "learning_rate": 3.84090485230921e-06, - "loss": 0.7372, - "num_input_tokens_seen": 26763760, - "step": 1281 - }, - { - "epoch": 0.15415138579931462, - "flos": 13081621251360.0, - "grad_norm": 4.553888633642908, - "learning_rate": 3.840600248020588e-06, - "loss": 0.7371, - "num_input_tokens_seen": 26780420, - "step": 1282 - }, - { - "epoch": 0.1542716286899537, - "flos": 8270550032880.0, - "grad_norm": 2.6530609734204234, - "learning_rate": 3.840295364514371e-06, - "loss": 0.7754, - "num_input_tokens_seen": 26797520, - "step": 1283 - }, - { - "epoch": 0.1543918715805928, - "flos": 12705342185880.0, - "grad_norm": 2.802889021484842, - "learning_rate": 3.83999020183681e-06, - "loss": 0.7611, - "num_input_tokens_seen": 26815935, - "step": 1284 - }, - { - "epoch": 0.1545121144712319, - "flos": 12976401509760.0, - "grad_norm": 2.2789734232525842, - "learning_rate": 3.839684760034199e-06, - "loss": 0.7658, - "num_input_tokens_seen": 26833860, - "step": 1285 - }, - { - "epoch": 0.15463235736187098, - "flos": 20697962683560.0, - "grad_norm": 2.9011356103412065, - "learning_rate": 3.8393790391528716e-06, - "loss": 0.6285, - "num_input_tokens_seen": 26854275, - "step": 1286 - }, - { - "epoch": 0.15475260025251006, - "flos": 16728780529080.0, - "grad_norm": 2.9755369006461563, - "learning_rate": 3.8390730392392075e-06, - "loss": 0.8706, - "num_input_tokens_seen": 26873975, - "step": 1287 - }, - { - "epoch": 0.15487284314314917, - "flos": 12840444438360.0, - "grad_norm": 2.7996519144860055, - "learning_rate": 3.838766760339626e-06, - "loss": 0.7849, - "num_input_tokens_seen": 26892220, - "step": 1288 - }, - { - "epoch": 0.15499308603378825, - "flos": 14674928687760.0, - "grad_norm": 3.278380383488587, - "learning_rate": 3.838460202500587e-06, - "loss": 0.7579, - "num_input_tokens_seen": 26907730, - "step": 1289 - }, - { - "epoch": 0.15511332892442733, - "flos": 11592647077680.0, - "grad_norm": 3.5122402597417617, - "learning_rate": 3.838153365768599e-06, - "loss": 0.7232, - "num_input_tokens_seen": 26923960, - "step": 1290 - }, - { - "epoch": 0.15523357181506645, - "flos": 30364989961080.0, - "grad_norm": 2.8564276174665593, - "learning_rate": 3.837846250190206e-06, - "loss": 0.7263, - "num_input_tokens_seen": 26946545, - "step": 1291 - }, - { - "epoch": 0.15535381470570553, - "flos": 13488700966560.0, - "grad_norm": 2.7067316062854028, - "learning_rate": 3.837538855811998e-06, - "loss": 0.7522, - "num_input_tokens_seen": 26964440, - "step": 1292 - }, - { - "epoch": 0.1554740575963446, - "flos": 10113550811520.0, - "grad_norm": 2.6725785963100486, - "learning_rate": 3.837231182680606e-06, - "loss": 0.6945, - "num_input_tokens_seen": 26982125, - "step": 1293 - }, - { - "epoch": 0.1555943004869837, - "flos": 15240946113960.0, - "grad_norm": 2.6056469995014178, - "learning_rate": 3.836923230842706e-06, - "loss": 0.7549, - "num_input_tokens_seen": 27000960, - "step": 1294 - }, - { - "epoch": 0.1557145433776228, - "flos": 16157507549520.0, - "grad_norm": 2.451161661924689, - "learning_rate": 3.836615000345011e-06, - "loss": 0.7971, - "num_input_tokens_seen": 27018860, - "step": 1295 - }, - { - "epoch": 0.1558347862682619, - "flos": 14460531709560.0, - "grad_norm": 2.547476013040716, - "learning_rate": 3.836306491234282e-06, - "loss": 0.7648, - "num_input_tokens_seen": 27036430, - "step": 1296 - }, - { - "epoch": 0.15595502915890097, - "flos": 12569606734200.0, - "grad_norm": 2.6325530425231696, - "learning_rate": 3.835997703557317e-06, - "loss": 0.7233, - "num_input_tokens_seen": 27052890, - "step": 1297 - }, - { - "epoch": 0.15607527204954008, - "flos": 14406972039960.0, - "grad_norm": 1.8736646347500887, - "learning_rate": 3.83568863736096e-06, - "loss": 0.7877, - "num_input_tokens_seen": 27071480, - "step": 1298 - }, - { - "epoch": 0.15619551494017916, - "flos": 13516050680640.0, - "grad_norm": 2.5921120125032733, - "learning_rate": 3.8353792926920975e-06, - "loss": 0.8617, - "num_input_tokens_seen": 27089850, - "step": 1299 - }, - { - "epoch": 0.15631575783081825, - "flos": 14542137612360.0, - "grad_norm": 3.7370549884330395, - "learning_rate": 3.835069669597655e-06, - "loss": 0.7907, - "num_input_tokens_seen": 27107960, - "step": 1300 - }, - { - "epoch": 0.15643600072145733, - "flos": 15190077540960.0, - "grad_norm": 2.516523496921712, - "learning_rate": 3.834759768124603e-06, - "loss": 0.7864, - "num_input_tokens_seen": 27126555, - "step": 1301 - }, - { - "epoch": 0.15655624361209644, - "flos": 13537986541560.0, - "grad_norm": 5.844255558115546, - "learning_rate": 3.834449588319953e-06, - "loss": 0.7408, - "num_input_tokens_seen": 27144310, - "step": 1302 - }, - { - "epoch": 0.15667648650273552, - "flos": 18484856531640.0, - "grad_norm": 1.996178253833683, - "learning_rate": 3.834139130230758e-06, - "loss": 0.8295, - "num_input_tokens_seen": 27163335, - "step": 1303 - }, - { - "epoch": 0.1567967293933746, - "flos": 18186859073160.0, - "grad_norm": 1.7551858902429285, - "learning_rate": 3.833828393904117e-06, - "loss": 0.7945, - "num_input_tokens_seen": 27183335, - "step": 1304 - }, - { - "epoch": 0.15691697228401372, - "flos": 13995174990720.0, - "grad_norm": 2.846422553513342, - "learning_rate": 3.833517379387165e-06, - "loss": 0.7504, - "num_input_tokens_seen": 27199510, - "step": 1305 - }, - { - "epoch": 0.1570372151746528, - "flos": 18160142558280.0, - "grad_norm": 1.9804728430831717, - "learning_rate": 3.833206086727085e-06, - "loss": 0.8743, - "num_input_tokens_seen": 27218580, - "step": 1306 - }, - { - "epoch": 0.15715745806529188, - "flos": 18213923847600.0, - "grad_norm": 2.9119963902251955, - "learning_rate": 3.8328945159710994e-06, - "loss": 0.6852, - "num_input_tokens_seen": 27238480, - "step": 1307 - }, - { - "epoch": 0.157277700955931, - "flos": 16000057856640.0, - "grad_norm": 2.1272015106918865, - "learning_rate": 3.832582667166473e-06, - "loss": 0.8627, - "num_input_tokens_seen": 27258010, - "step": 1308 - }, - { - "epoch": 0.15739794384657008, - "flos": 17969201119080.0, - "grad_norm": 2.0182454426427046, - "learning_rate": 3.8322705403605125e-06, - "loss": 0.8034, - "num_input_tokens_seen": 27278075, - "step": 1309 - }, - { - "epoch": 0.15751818673720916, - "flos": 12945379240320.0, - "grad_norm": 2.1645915902952915, - "learning_rate": 3.831958135600568e-06, - "loss": 0.7975, - "num_input_tokens_seen": 27295345, - "step": 1310 - }, - { - "epoch": 0.15763842962784824, - "flos": 13029676239720.0, - "grad_norm": 2.251103236674304, - "learning_rate": 3.831645452934032e-06, - "loss": 0.7863, - "num_input_tokens_seen": 27313495, - "step": 1311 - }, - { - "epoch": 0.15775867251848735, - "flos": 19780989668520.0, - "grad_norm": 1.891822372727845, - "learning_rate": 3.831332492408336e-06, - "loss": 0.7925, - "num_input_tokens_seen": 27334625, - "step": 1312 - }, - { - "epoch": 0.15787891540912644, - "flos": 14051710696560.0, - "grad_norm": 2.090447967743054, - "learning_rate": 3.831019254070957e-06, - "loss": 0.6729, - "num_input_tokens_seen": 27352130, - "step": 1313 - }, - { - "epoch": 0.15799915829976552, - "flos": 19995513286560.0, - "grad_norm": 3.572082481465753, - "learning_rate": 3.8307057379694135e-06, - "loss": 0.9276, - "num_input_tokens_seen": 27371185, - "step": 1314 - }, - { - "epoch": 0.15811940119040463, - "flos": 14913636023880.0, - "grad_norm": 2.1898533832121774, - "learning_rate": 3.830391944151264e-06, - "loss": 0.8041, - "num_input_tokens_seen": 27386785, - "step": 1315 - }, - { - "epoch": 0.1582396440810437, - "flos": 23938960384920.0, - "grad_norm": 2.3248139460783985, - "learning_rate": 3.830077872664114e-06, - "loss": 0.6525, - "num_input_tokens_seen": 27407630, - "step": 1316 - }, - { - "epoch": 0.1583598869716828, - "flos": 24827538907200.0, - "grad_norm": 2.402167585163086, - "learning_rate": 3.829763523555604e-06, - "loss": 0.7244, - "num_input_tokens_seen": 27427750, - "step": 1317 - }, - { - "epoch": 0.15848012986232188, - "flos": 18079011554880.0, - "grad_norm": 3.7320378224213373, - "learning_rate": 3.829448896873423e-06, - "loss": 0.767, - "num_input_tokens_seen": 27446570, - "step": 1318 - }, - { - "epoch": 0.158600372752961, - "flos": 16540276906800.0, - "grad_norm": 2.9242499995721225, - "learning_rate": 3.829133992665299e-06, - "loss": 0.7745, - "num_input_tokens_seen": 27465415, - "step": 1319 - }, - { - "epoch": 0.15872061564360007, - "flos": 20481634447800.0, - "grad_norm": 3.071674229375754, - "learning_rate": 3.828818810979002e-06, - "loss": 0.8663, - "num_input_tokens_seen": 27483465, - "step": 1320 - }, - { - "epoch": 0.15884085853423915, - "flos": 17350478842200.0, - "grad_norm": 2.4155662815710195, - "learning_rate": 3.8285033518623454e-06, - "loss": 0.7886, - "num_input_tokens_seen": 27503435, - "step": 1321 - }, - { - "epoch": 0.15896110142487826, - "flos": 16917854030640.0, - "grad_norm": 3.04825361314095, - "learning_rate": 3.8281876153631845e-06, - "loss": 0.7942, - "num_input_tokens_seen": 27519910, - "step": 1322 - }, - { - "epoch": 0.15908134431551735, - "flos": 10681372855440.0, - "grad_norm": 2.5717166671127987, - "learning_rate": 3.827871601529416e-06, - "loss": 0.6214, - "num_input_tokens_seen": 27538150, - "step": 1323 - }, - { - "epoch": 0.15920158720615643, - "flos": 14757800988960.0, - "grad_norm": 1.9695912940236546, - "learning_rate": 3.827555310408979e-06, - "loss": 0.7929, - "num_input_tokens_seen": 27557265, - "step": 1324 - }, - { - "epoch": 0.1593218300967955, - "flos": 18186447493680.0, - "grad_norm": 1.6903404772053423, - "learning_rate": 3.827238742049854e-06, - "loss": 0.8051, - "num_input_tokens_seen": 27577280, - "step": 1325 - }, - { - "epoch": 0.15944207298743462, - "flos": 20778143888160.0, - "grad_norm": 2.7491715657249416, - "learning_rate": 3.826921896500066e-06, - "loss": 0.5115, - "num_input_tokens_seen": 27598285, - "step": 1326 - }, - { - "epoch": 0.1595623158780737, - "flos": 16808455174320.0, - "grad_norm": 2.166142864978906, - "learning_rate": 3.826604773807678e-06, - "loss": 0.7667, - "num_input_tokens_seen": 27615980, - "step": 1327 - }, - { - "epoch": 0.1596825587687128, - "flos": 14399057049960.0, - "grad_norm": 3.2298215796733163, - "learning_rate": 3.826287374020798e-06, - "loss": 0.7184, - "num_input_tokens_seen": 27630505, - "step": 1328 - }, - { - "epoch": 0.1598028016593519, - "flos": 16566550182240.0, - "grad_norm": 1.9107110401630212, - "learning_rate": 3.825969697187575e-06, - "loss": 0.8051, - "num_input_tokens_seen": 27649555, - "step": 1329 - }, - { - "epoch": 0.15992304454999098, - "flos": 14971533108000.0, - "grad_norm": 2.2763185892763897, - "learning_rate": 3.8256517433562015e-06, - "loss": 0.6922, - "num_input_tokens_seen": 27667215, - "step": 1330 - }, - { - "epoch": 0.16004328744063007, - "flos": 12894668967120.0, - "grad_norm": 5.233860093027771, - "learning_rate": 3.82533351257491e-06, - "loss": 0.8984, - "num_input_tokens_seen": 27684885, - "step": 1331 - }, - { - "epoch": 0.16016353033126918, - "flos": 17648001401280.0, - "grad_norm": 2.370729622698297, - "learning_rate": 3.825015004891975e-06, - "loss": 0.876, - "num_input_tokens_seen": 27703345, - "step": 1332 - }, - { - "epoch": 0.16028377322190826, - "flos": 20264546373000.0, - "grad_norm": 2.300543625482421, - "learning_rate": 3.824696220355716e-06, - "loss": 0.7433, - "num_input_tokens_seen": 27724655, - "step": 1333 - }, - { - "epoch": 0.16040401611254734, - "flos": 15326066272320.0, - "grad_norm": 1.8076383567819372, - "learning_rate": 3.824377159014491e-06, - "loss": 0.7774, - "num_input_tokens_seen": 27745270, - "step": 1334 - }, - { - "epoch": 0.16052425900318643, - "flos": 15536759035200.0, - "grad_norm": 2.0841977297138077, - "learning_rate": 3.824057820916702e-06, - "loss": 0.8352, - "num_input_tokens_seen": 27762195, - "step": 1335 - }, - { - "epoch": 0.16064450189382554, - "flos": 11302058049840.0, - "grad_norm": 2.895796649801141, - "learning_rate": 3.8237382061107904e-06, - "loss": 0.7008, - "num_input_tokens_seen": 27778635, - "step": 1336 - }, - { - "epoch": 0.16076474478446462, - "flos": 15486650301240.0, - "grad_norm": 3.7945298224531494, - "learning_rate": 3.823418314645243e-06, - "loss": 0.7656, - "num_input_tokens_seen": 27797230, - "step": 1337 - }, - { - "epoch": 0.1608849876751037, - "flos": 13405195466160.0, - "grad_norm": 2.4300205211008388, - "learning_rate": 3.823098146568588e-06, - "loss": 0.7395, - "num_input_tokens_seen": 27816655, - "step": 1338 - }, - { - "epoch": 0.1610052305657428, - "flos": 21642886951920.0, - "grad_norm": 1.924507571067992, - "learning_rate": 3.822777701929394e-06, - "loss": 0.7065, - "num_input_tokens_seen": 27838200, - "step": 1339 - }, - { - "epoch": 0.1611254734563819, - "flos": 19645444176600.0, - "grad_norm": 2.3019945866896467, - "learning_rate": 3.8224569807762714e-06, - "loss": 0.7239, - "num_input_tokens_seen": 27857240, - "step": 1340 - }, - { - "epoch": 0.16124571634702098, - "flos": 16405206314280.0, - "grad_norm": 1.995311068517161, - "learning_rate": 3.822135983157873e-06, - "loss": 0.7557, - "num_input_tokens_seen": 27876235, - "step": 1341 - }, - { - "epoch": 0.16136595923766006, - "flos": 7953244490160.0, - "grad_norm": 3.0720115406197097, - "learning_rate": 3.821814709122896e-06, - "loss": 0.8276, - "num_input_tokens_seen": 27894005, - "step": 1342 - }, - { - "epoch": 0.16148620212829917, - "flos": 15513651755760.0, - "grad_norm": 2.9270701198800033, - "learning_rate": 3.821493158720076e-06, - "loss": 0.8396, - "num_input_tokens_seen": 27912830, - "step": 1343 - }, - { - "epoch": 0.16160644501893826, - "flos": 12214788630240.0, - "grad_norm": 4.487007883444574, - "learning_rate": 3.821171331998191e-06, - "loss": 0.7028, - "num_input_tokens_seen": 27929080, - "step": 1344 - }, - { - "epoch": 0.16172668790957734, - "flos": 47511108032400.0, - "grad_norm": 0.8024089738434347, - "learning_rate": 3.820849229006064e-06, - "loss": 0.5845, - "num_input_tokens_seen": 27996550, - "step": 1345 - }, - { - "epoch": 0.16184693080021645, - "flos": 17025321629400.0, - "grad_norm": 2.3744986155464343, - "learning_rate": 3.8205268497925564e-06, - "loss": 0.6965, - "num_input_tokens_seen": 28016740, - "step": 1346 - }, - { - "epoch": 0.16196717369085553, - "flos": 12727309706760.0, - "grad_norm": 2.826858995870105, - "learning_rate": 3.8202041944065725e-06, - "loss": 0.7683, - "num_input_tokens_seen": 28032280, - "step": 1347 - }, - { - "epoch": 0.16208741658149461, - "flos": 17480325541320.0, - "grad_norm": 2.40382481022676, - "learning_rate": 3.819881262897061e-06, - "loss": 0.7243, - "num_input_tokens_seen": 28050135, - "step": 1348 - }, - { - "epoch": 0.1622076594721337, - "flos": 18969584654640.0, - "grad_norm": 2.6187933251340865, - "learning_rate": 3.819558055313008e-06, - "loss": 0.7188, - "num_input_tokens_seen": 28070540, - "step": 1349 - }, - { - "epoch": 0.1623279023627728, - "flos": 15753498850440.0, - "grad_norm": 2.1579516786498476, - "learning_rate": 3.819234571703444e-06, - "loss": 0.7543, - "num_input_tokens_seen": 28089085, - "step": 1350 - }, - { - "epoch": 0.1624481452534119, - "flos": 16157095970040.0, - "grad_norm": 1.924260651429978, - "learning_rate": 3.8189108121174435e-06, - "loss": 0.8395, - "num_input_tokens_seen": 28108570, - "step": 1351 - }, - { - "epoch": 0.16256838814405097, - "flos": 19859461235280.0, - "grad_norm": 1.9171524952473027, - "learning_rate": 3.818586776604118e-06, - "loss": 0.8198, - "num_input_tokens_seen": 28128930, - "step": 1352 - }, - { - "epoch": 0.16268863103469008, - "flos": 14703544800240.0, - "grad_norm": 3.404812430706493, - "learning_rate": 3.818262465212625e-06, - "loss": 0.6054, - "num_input_tokens_seen": 28148775, - "step": 1353 - }, - { - "epoch": 0.16280887392532917, - "flos": 13380663488520.0, - "grad_norm": 3.171963093854406, - "learning_rate": 3.817937877992161e-06, - "loss": 0.7609, - "num_input_tokens_seen": 28165790, - "step": 1354 - }, - { - "epoch": 0.16292911681596825, - "flos": 8600899479120.0, - "grad_norm": 2.839801100629707, - "learning_rate": 3.817613014991967e-06, - "loss": 0.8434, - "num_input_tokens_seen": 28181650, - "step": 1355 - }, - { - "epoch": 0.16304935970660733, - "flos": 19131371762040.0, - "grad_norm": 2.3214355217127456, - "learning_rate": 3.817287876261323e-06, - "loss": 0.7439, - "num_input_tokens_seen": 28201705, - "step": 1356 - }, - { - "epoch": 0.16316960259724644, - "flos": 21535640972880.0, - "grad_norm": 1.9025906944964022, - "learning_rate": 3.816962461849553e-06, - "loss": 0.7811, - "num_input_tokens_seen": 28223295, - "step": 1357 - }, - { - "epoch": 0.16328984548788553, - "flos": 15270765304920.0, - "grad_norm": 2.544362238657768, - "learning_rate": 3.8166367718060235e-06, - "loss": 0.824, - "num_input_tokens_seen": 28242905, - "step": 1358 - }, - { - "epoch": 0.1634100883785246, - "flos": 13164810152160.0, - "grad_norm": 2.9903378658711777, - "learning_rate": 3.816310806180139e-06, - "loss": 0.7267, - "num_input_tokens_seen": 28261035, - "step": 1359 - }, - { - "epoch": 0.16353033126916372, - "flos": 17998228811040.0, - "grad_norm": 1.9414112850868435, - "learning_rate": 3.81598456502135e-06, - "loss": 0.7925, - "num_input_tokens_seen": 28280775, - "step": 1360 - }, - { - "epoch": 0.1636505741598028, - "flos": 14534001002640.0, - "grad_norm": 2.124780980211181, - "learning_rate": 3.8156580483791455e-06, - "loss": 0.856, - "num_input_tokens_seen": 28295685, - "step": 1361 - }, - { - "epoch": 0.16377081705044189, - "flos": 20832969956160.0, - "grad_norm": 2.9197531763035496, - "learning_rate": 3.815331256303059e-06, - "loss": 0.7518, - "num_input_tokens_seen": 28315435, - "step": 1362 - }, - { - "epoch": 0.163891059941081, - "flos": 16026552751800.0, - "grad_norm": 2.3413162820031483, - "learning_rate": 3.815004188842665e-06, - "loss": 0.7372, - "num_input_tokens_seen": 28333195, - "step": 1363 - }, - { - "epoch": 0.16401130283172008, - "flos": 19645222556880.0, - "grad_norm": 3.2785387736266505, - "learning_rate": 3.814676846047578e-06, - "loss": 0.7761, - "num_input_tokens_seen": 28353790, - "step": 1364 - }, - { - "epoch": 0.16413154572235916, - "flos": 24233095328280.0, - "grad_norm": 1.9549772101242262, - "learning_rate": 3.8143492279674565e-06, - "loss": 0.6862, - "num_input_tokens_seen": 28376205, - "step": 1365 - }, - { - "epoch": 0.16425178861299825, - "flos": 29504937062760.0, - "grad_norm": 0.9377334621926826, - "learning_rate": 3.8140213346519997e-06, - "loss": 0.6499, - "num_input_tokens_seen": 28426520, - "step": 1366 - }, - { - "epoch": 0.16437203150363736, - "flos": 18646010439840.0, - "grad_norm": 1.9014703206372383, - "learning_rate": 3.813693166150948e-06, - "loss": 0.7462, - "num_input_tokens_seen": 28446450, - "step": 1367 - }, - { - "epoch": 0.16449227439427644, - "flos": 17187583636200.0, - "grad_norm": 5.080949297238499, - "learning_rate": 3.813364722514086e-06, - "loss": 0.8284, - "num_input_tokens_seen": 28464505, - "step": 1368 - }, - { - "epoch": 0.16461251728491552, - "flos": 9836412775320.0, - "grad_norm": 2.823071511914471, - "learning_rate": 3.8130360037912368e-06, - "loss": 0.7883, - "num_input_tokens_seen": 28480670, - "step": 1369 - }, - { - "epoch": 0.16473276017555463, - "flos": 16837197926640.0, - "grad_norm": 2.4379764971456894, - "learning_rate": 3.812707010032268e-06, - "loss": 0.7932, - "num_input_tokens_seen": 28499445, - "step": 1370 - }, - { - "epoch": 0.16485300306619372, - "flos": 18159920938560.0, - "grad_norm": 2.4906188428699774, - "learning_rate": 3.8123777412870863e-06, - "loss": 0.7757, - "num_input_tokens_seen": 28518665, - "step": 1371 - }, - { - "epoch": 0.1649732459568328, - "flos": 15433502211120.0, - "grad_norm": 2.1902206940255127, - "learning_rate": 3.812048197605643e-06, - "loss": 0.7649, - "num_input_tokens_seen": 28537280, - "step": 1372 - }, - { - "epoch": 0.16509348884747188, - "flos": 14811930537840.0, - "grad_norm": 3.2473886709534505, - "learning_rate": 3.8117183790379277e-06, - "loss": 0.7891, - "num_input_tokens_seen": 28555450, - "step": 1373 - }, - { - "epoch": 0.165213731738111, - "flos": 7980150964800.0, - "grad_norm": 3.0416587585625114, - "learning_rate": 3.811388285633976e-06, - "loss": 0.9137, - "num_input_tokens_seen": 28571155, - "step": 1374 - }, - { - "epoch": 0.16533397462875007, - "flos": 21994539059880.0, - "grad_norm": 2.9385007411663113, - "learning_rate": 3.811057917443861e-06, - "loss": 0.6077, - "num_input_tokens_seen": 28590140, - "step": 1375 - }, - { - "epoch": 0.16545421751938916, - "flos": 48336348448800.0, - "grad_norm": 0.8640569938498027, - "learning_rate": 3.8107272745177e-06, - "loss": 0.6978, - "num_input_tokens_seen": 28662190, - "step": 1376 - }, - { - "epoch": 0.16557446041002827, - "flos": 16459050923520.0, - "grad_norm": 2.3517507686846906, - "learning_rate": 3.8103963569056513e-06, - "loss": 0.773, - "num_input_tokens_seen": 28681045, - "step": 1377 - }, - { - "epoch": 0.16569470330066735, - "flos": 18020038032120.0, - "grad_norm": 1.7007456266276035, - "learning_rate": 3.8100651646579146e-06, - "loss": 0.8695, - "num_input_tokens_seen": 28699975, - "step": 1378 - }, - { - "epoch": 0.16581494619130643, - "flos": 10917863994360.0, - "grad_norm": 2.0837134459549653, - "learning_rate": 3.8097336978247317e-06, - "loss": 0.9039, - "num_input_tokens_seen": 28716400, - "step": 1379 - }, - { - "epoch": 0.16593518908194552, - "flos": 12705342185880.0, - "grad_norm": 2.87046057537722, - "learning_rate": 3.8094019564563854e-06, - "loss": 0.8675, - "num_input_tokens_seen": 28733050, - "step": 1380 - }, - { - "epoch": 0.16605543197258463, - "flos": 14919904695960.0, - "grad_norm": 3.5267031944512897, - "learning_rate": 3.809069940603201e-06, - "loss": 0.7568, - "num_input_tokens_seen": 28750725, - "step": 1381 - }, - { - "epoch": 0.1661756748632237, - "flos": 10329309168000.0, - "grad_norm": 2.472329357649938, - "learning_rate": 3.8087376503155452e-06, - "loss": 0.76, - "num_input_tokens_seen": 28767930, - "step": 1382 - }, - { - "epoch": 0.1662959177538628, - "flos": 48720949592400.0, - "grad_norm": 1.5776951270476536, - "learning_rate": 3.808405085643826e-06, - "loss": 0.6011, - "num_input_tokens_seen": 28832530, - "step": 1383 - }, - { - "epoch": 0.1664161606445019, - "flos": 15162537867120.0, - "grad_norm": 2.234252499510157, - "learning_rate": 3.8080722466384925e-06, - "loss": 0.8834, - "num_input_tokens_seen": 28850100, - "step": 1384 - }, - { - "epoch": 0.166536403535141, - "flos": 18507362271840.0, - "grad_norm": 2.4370131373186363, - "learning_rate": 3.8077391333500376e-06, - "loss": 0.6817, - "num_input_tokens_seen": 28868960, - "step": 1385 - }, - { - "epoch": 0.16665664642578007, - "flos": 18645978779880.0, - "grad_norm": 1.8400290491050115, - "learning_rate": 3.8074057458289934e-06, - "loss": 0.7421, - "num_input_tokens_seen": 28889370, - "step": 1386 - }, - { - "epoch": 0.16677688931641918, - "flos": 16243292567040.0, - "grad_norm": 2.7790971631953547, - "learning_rate": 3.807072084125934e-06, - "loss": 0.8014, - "num_input_tokens_seen": 28910940, - "step": 1387 - }, - { - "epoch": 0.16689713220705826, - "flos": 12354259957200.0, - "grad_norm": 2.612702857359733, - "learning_rate": 3.806738148291477e-06, - "loss": 0.7726, - "num_input_tokens_seen": 28927485, - "step": 1388 - }, - { - "epoch": 0.16701737509769735, - "flos": 26636066480760.0, - "grad_norm": 2.3892336517761383, - "learning_rate": 3.8064039383762793e-06, - "loss": 0.6926, - "num_input_tokens_seen": 28949570, - "step": 1389 - }, - { - "epoch": 0.16713761798833643, - "flos": 17026176448320.0, - "grad_norm": 2.4024309840297438, - "learning_rate": 3.8060694544310396e-06, - "loss": 0.7605, - "num_input_tokens_seen": 28967800, - "step": 1390 - }, - { - "epoch": 0.16725786087897554, - "flos": 18538859440680.0, - "grad_norm": 2.368347991407627, - "learning_rate": 3.8057346965065006e-06, - "loss": 0.7701, - "num_input_tokens_seen": 28988750, - "step": 1391 - }, - { - "epoch": 0.16737810376961462, - "flos": 23370505141800.0, - "grad_norm": 2.471367522760658, - "learning_rate": 3.805399664653443e-06, - "loss": 0.8401, - "num_input_tokens_seen": 29010610, - "step": 1392 - }, - { - "epoch": 0.1674983466602537, - "flos": 20506356385200.0, - "grad_norm": 3.143519647550518, - "learning_rate": 3.805064358922692e-06, - "loss": 0.733, - "num_input_tokens_seen": 29028620, - "step": 1393 - }, - { - "epoch": 0.16761858955089282, - "flos": 15919021833120.0, - "grad_norm": 2.8324428916891113, - "learning_rate": 3.8047287793651136e-06, - "loss": 0.7816, - "num_input_tokens_seen": 29049785, - "step": 1394 - }, - { - "epoch": 0.1677388324415319, - "flos": 17430818346600.0, - "grad_norm": 2.1641137292630472, - "learning_rate": 3.8043929260316137e-06, - "loss": 0.867, - "num_input_tokens_seen": 29067660, - "step": 1395 - }, - { - "epoch": 0.16785907533217098, - "flos": 15028163793720.0, - "grad_norm": 2.198452045145836, - "learning_rate": 3.8040567989731417e-06, - "loss": 0.82, - "num_input_tokens_seen": 29085325, - "step": 1396 - }, - { - "epoch": 0.16797931822281006, - "flos": 11409810588240.0, - "grad_norm": 2.3180047577882377, - "learning_rate": 3.8037203982406876e-06, - "loss": 0.7956, - "num_input_tokens_seen": 29103210, - "step": 1397 - }, - { - "epoch": 0.16809956111344918, - "flos": 12054141281400.0, - "grad_norm": 2.3706784332392457, - "learning_rate": 3.8033837238852835e-06, - "loss": 0.7161, - "num_input_tokens_seen": 29119630, - "step": 1398 - }, - { - "epoch": 0.16821980400408826, - "flos": 17025669888960.0, - "grad_norm": 3.8821260701324287, - "learning_rate": 3.8030467759580017e-06, - "loss": 0.6774, - "num_input_tokens_seen": 29140270, - "step": 1399 - }, - { - "epoch": 0.16834004689472734, - "flos": 15187513084200.0, - "grad_norm": 2.347747343965055, - "learning_rate": 3.802709554509958e-06, - "loss": 0.8615, - "num_input_tokens_seen": 29157790, - "step": 1400 - }, - { - "epoch": 0.16846028978536645, - "flos": 19563458354280.0, - "grad_norm": 4.040452020870807, - "learning_rate": 3.8023720595923083e-06, - "loss": 0.7667, - "num_input_tokens_seen": 29176765, - "step": 1401 - }, - { - "epoch": 0.16858053267600553, - "flos": 13758430572120.0, - "grad_norm": 1.9267946869095653, - "learning_rate": 3.80203429125625e-06, - "loss": 0.857, - "num_input_tokens_seen": 29194660, - "step": 1402 - }, - { - "epoch": 0.16870077556664462, - "flos": 20345772356280.0, - "grad_norm": 4.228069269527736, - "learning_rate": 3.8016962495530225e-06, - "loss": 0.688, - "num_input_tokens_seen": 29213570, - "step": 1403 - }, - { - "epoch": 0.1688210184572837, - "flos": 9974174464440.0, - "grad_norm": 3.1709791901000948, - "learning_rate": 3.8013579345339063e-06, - "loss": 0.7446, - "num_input_tokens_seen": 29228155, - "step": 1404 - }, - { - "epoch": 0.1689412613479228, - "flos": 19401797886720.0, - "grad_norm": 2.8241998400127644, - "learning_rate": 3.801019346250224e-06, - "loss": 0.6751, - "num_input_tokens_seen": 29248020, - "step": 1405 - }, - { - "epoch": 0.1690615042385619, - "flos": 15456862770240.0, - "grad_norm": 3.0989548798015596, - "learning_rate": 3.8006804847533395e-06, - "loss": 0.8088, - "num_input_tokens_seen": 29267255, - "step": 1406 - }, - { - "epoch": 0.16918174712920098, - "flos": 15242940691440.0, - "grad_norm": 2.438385924708425, - "learning_rate": 3.8003413500946556e-06, - "loss": 0.8322, - "num_input_tokens_seen": 29287085, - "step": 1407 - }, - { - "epoch": 0.1693019900198401, - "flos": 12381989590800.0, - "grad_norm": 3.224913686144337, - "learning_rate": 3.8000019423256216e-06, - "loss": 0.8041, - "num_input_tokens_seen": 29304570, - "step": 1408 - }, - { - "epoch": 0.16942223291047917, - "flos": 19644589357680.0, - "grad_norm": 1.9876228246583976, - "learning_rate": 3.7996622614977234e-06, - "loss": 0.8646, - "num_input_tokens_seen": 29325480, - "step": 1409 - }, - { - "epoch": 0.16954247580111825, - "flos": 13565716175160.0, - "grad_norm": 2.069700676105575, - "learning_rate": 3.799322307662492e-06, - "loss": 0.7819, - "num_input_tokens_seen": 29343020, - "step": 1410 - }, - { - "epoch": 0.16966271869175734, - "flos": 10160968448880.0, - "grad_norm": 2.7239649075391705, - "learning_rate": 3.798982080871496e-06, - "loss": 0.8296, - "num_input_tokens_seen": 29357880, - "step": 1411 - }, - { - "epoch": 0.16978296158239645, - "flos": 27556237151760.0, - "grad_norm": 2.3078536704697155, - "learning_rate": 3.798641581176349e-06, - "loss": 0.6612, - "num_input_tokens_seen": 29379880, - "step": 1412 - }, - { - "epoch": 0.16990320447303553, - "flos": 20778365507880.0, - "grad_norm": 2.8487185157016937, - "learning_rate": 3.7983008086287044e-06, - "loss": 0.7257, - "num_input_tokens_seen": 29400920, - "step": 1413 - }, - { - "epoch": 0.1700234473636746, - "flos": 14753020335000.0, - "grad_norm": 2.4389052619567937, - "learning_rate": 3.797959763280257e-06, - "loss": 0.7725, - "num_input_tokens_seen": 29419325, - "step": 1414 - }, - { - "epoch": 0.17014369025431372, - "flos": 18209523113160.0, - "grad_norm": 6.061135364596223, - "learning_rate": 3.797618445182743e-06, - "loss": 0.7748, - "num_input_tokens_seen": 29440440, - "step": 1415 - }, - { - "epoch": 0.1702639331449528, - "flos": 11999346873360.0, - "grad_norm": 2.857017504122063, - "learning_rate": 3.79727685438794e-06, - "loss": 0.8254, - "num_input_tokens_seen": 29454350, - "step": 1416 - }, - { - "epoch": 0.1703841760355919, - "flos": 38676851750400.0, - "grad_norm": 0.850795435371224, - "learning_rate": 3.796934990947667e-06, - "loss": 0.6284, - "num_input_tokens_seen": 29515755, - "step": 1417 - }, - { - "epoch": 0.170504418926231, - "flos": 36354283422240.0, - "grad_norm": 0.9251590360068978, - "learning_rate": 3.7965928549137854e-06, - "loss": 0.6495, - "num_input_tokens_seen": 29572290, - "step": 1418 - }, - { - "epoch": 0.17062466181687008, - "flos": 18943374699120.0, - "grad_norm": 4.450365998722241, - "learning_rate": 3.7962504463381953e-06, - "loss": 0.7335, - "num_input_tokens_seen": 29593500, - "step": 1419 - }, - { - "epoch": 0.17074490470750917, - "flos": 15325369753200.0, - "grad_norm": 2.174165464756454, - "learning_rate": 3.7959077652728412e-06, - "loss": 0.7722, - "num_input_tokens_seen": 29611675, - "step": 1420 - }, - { - "epoch": 0.17086514759814825, - "flos": 15324799873920.0, - "grad_norm": 2.562735383922861, - "learning_rate": 3.795564811769707e-06, - "loss": 0.7571, - "num_input_tokens_seen": 29629750, - "step": 1421 - }, - { - "epoch": 0.17098539048878736, - "flos": 20885643146880.0, - "grad_norm": 2.3868283310628167, - "learning_rate": 3.795221585880818e-06, - "loss": 0.7683, - "num_input_tokens_seen": 29650150, - "step": 1422 - }, - { - "epoch": 0.17110563337942644, - "flos": 11868930294960.0, - "grad_norm": 2.384032390572858, - "learning_rate": 3.794878087658242e-06, - "loss": 0.8982, - "num_input_tokens_seen": 29667640, - "step": 1423 - }, - { - "epoch": 0.17122587627006552, - "flos": 21773271870360.0, - "grad_norm": 1.7693793966386258, - "learning_rate": 3.7945343171540873e-06, - "loss": 0.7651, - "num_input_tokens_seen": 29688235, - "step": 1424 - }, - { - "epoch": 0.17134611916070464, - "flos": 18565322675880.0, - "grad_norm": 2.2332007250156556, - "learning_rate": 3.7941902744205033e-06, - "loss": 0.7684, - "num_input_tokens_seen": 29708990, - "step": 1425 - }, - { - "epoch": 0.17146636205134372, - "flos": 10139855746920.0, - "grad_norm": 3.1807288261943505, - "learning_rate": 3.7938459595096817e-06, - "loss": 0.8208, - "num_input_tokens_seen": 29727255, - "step": 1426 - }, - { - "epoch": 0.1715866049419828, - "flos": 17512044329880.0, - "grad_norm": 2.371144025542979, - "learning_rate": 3.7935013724738545e-06, - "loss": 0.8346, - "num_input_tokens_seen": 29747475, - "step": 1427 - }, - { - "epoch": 0.17170684783262188, - "flos": 16619381672760.0, - "grad_norm": 2.234711094900936, - "learning_rate": 3.7931565133652945e-06, - "loss": 0.7598, - "num_input_tokens_seen": 29767270, - "step": 1428 - }, - { - "epoch": 0.171827090723261, - "flos": 19508537306400.0, - "grad_norm": 2.8088293069627914, - "learning_rate": 3.792811382236317e-06, - "loss": 0.6611, - "num_input_tokens_seen": 29785500, - "step": 1429 - }, - { - "epoch": 0.17194733361390008, - "flos": 20645099533080.0, - "grad_norm": 4.662923446939858, - "learning_rate": 3.792465979139279e-06, - "loss": 0.7577, - "num_input_tokens_seen": 29807825, - "step": 1430 - }, - { - "epoch": 0.17206757650453916, - "flos": 48313363317840.0, - "grad_norm": 1.0605831807422346, - "learning_rate": 3.792120304126576e-06, - "loss": 0.7323, - "num_input_tokens_seen": 29870920, - "step": 1431 - }, - { - "epoch": 0.17218781939517827, - "flos": 16297073856360.0, - "grad_norm": 2.0912727390291175, - "learning_rate": 3.791774357250649e-06, - "loss": 0.8246, - "num_input_tokens_seen": 29889470, - "step": 1432 - }, - { - "epoch": 0.17230806228581735, - "flos": 10275781158360.0, - "grad_norm": 3.808507360799076, - "learning_rate": 3.7914281385639757e-06, - "loss": 0.7716, - "num_input_tokens_seen": 29907065, - "step": 1433 - }, - { - "epoch": 0.17242830517645644, - "flos": 15136201271760.0, - "grad_norm": 2.309301793818396, - "learning_rate": 3.7910816481190784e-06, - "loss": 0.7715, - "num_input_tokens_seen": 29926600, - "step": 1434 - }, - { - "epoch": 0.17254854806709552, - "flos": 22588444419480.0, - "grad_norm": 2.195612007969207, - "learning_rate": 3.7907348859685193e-06, - "loss": 0.7378, - "num_input_tokens_seen": 29948025, - "step": 1435 - }, - { - "epoch": 0.17266879095773463, - "flos": 19509803704800.0, - "grad_norm": 2.412517855879567, - "learning_rate": 3.790387852164902e-06, - "loss": 0.7819, - "num_input_tokens_seen": 29968475, - "step": 1436 - }, - { - "epoch": 0.1727890338483737, - "flos": 14811550618320.0, - "grad_norm": 4.075647529688366, - "learning_rate": 3.7900405467608707e-06, - "loss": 0.7473, - "num_input_tokens_seen": 29987740, - "step": 1437 - }, - { - "epoch": 0.1729092767390128, - "flos": 13266452318280.0, - "grad_norm": 2.9090126606864706, - "learning_rate": 3.7896929698091114e-06, - "loss": 0.7491, - "num_input_tokens_seen": 30000275, - "step": 1438 - }, - { - "epoch": 0.1730295196296519, - "flos": 19616796404160.0, - "grad_norm": 3.512421229012068, - "learning_rate": 3.7893451213623518e-06, - "loss": 0.6762, - "num_input_tokens_seen": 30017225, - "step": 1439 - }, - { - "epoch": 0.173149762520291, - "flos": 17458294700520.0, - "grad_norm": 2.7163180293320974, - "learning_rate": 3.7889970014733606e-06, - "loss": 0.8079, - "num_input_tokens_seen": 30036050, - "step": 1440 - }, - { - "epoch": 0.17327000541093007, - "flos": 17107244131800.0, - "grad_norm": 2.0365812324714865, - "learning_rate": 3.7886486101949463e-06, - "loss": 0.7594, - "num_input_tokens_seen": 30056950, - "step": 1441 - }, - { - "epoch": 0.17339024830156918, - "flos": 13297854507240.0, - "grad_norm": 2.1871655889755197, - "learning_rate": 3.7882999475799594e-06, - "loss": 0.8598, - "num_input_tokens_seen": 30074705, - "step": 1442 - }, - { - "epoch": 0.17351049119220827, - "flos": 17080527616920.0, - "grad_norm": 2.0309153354490097, - "learning_rate": 3.787951013681293e-06, - "loss": 0.8021, - "num_input_tokens_seen": 30092470, - "step": 1443 - }, - { - "epoch": 0.17363073408284735, - "flos": 17429045388840.0, - "grad_norm": 2.1346875566480583, - "learning_rate": 3.787601808551879e-06, - "loss": 0.766, - "num_input_tokens_seen": 30112005, - "step": 1444 - }, - { - "epoch": 0.17375097697348643, - "flos": 13515734081040.0, - "grad_norm": 2.6696902260403053, - "learning_rate": 3.7872523322446926e-06, - "loss": 0.815, - "num_input_tokens_seen": 30130610, - "step": 1445 - }, - { - "epoch": 0.17387121986412554, - "flos": 28585933318920.0, - "grad_norm": 2.395968380867473, - "learning_rate": 3.7869025848127478e-06, - "loss": 0.5807, - "num_input_tokens_seen": 30154525, - "step": 1446 - }, - { - "epoch": 0.17399146275476463, - "flos": 14892523321920.0, - "grad_norm": 3.0068303870422137, - "learning_rate": 3.786552566309102e-06, - "loss": 0.7712, - "num_input_tokens_seen": 30172455, - "step": 1447 - }, - { - "epoch": 0.1741117056454037, - "flos": 13994985030960.0, - "grad_norm": 3.2346032103593165, - "learning_rate": 3.7862022767868517e-06, - "loss": 0.8546, - "num_input_tokens_seen": 30189765, - "step": 1448 - }, - { - "epoch": 0.17423194853604282, - "flos": 18591817571040.0, - "grad_norm": 2.276329377333726, - "learning_rate": 3.7858517162991367e-06, - "loss": 0.8264, - "num_input_tokens_seen": 30209560, - "step": 1449 - }, - { - "epoch": 0.1743521914266819, - "flos": 18456810298440.0, - "grad_norm": 3.506752176753099, - "learning_rate": 3.7855008848991363e-06, - "loss": 0.5952, - "num_input_tokens_seen": 30227485, - "step": 1450 - }, - { - "epoch": 0.17447243431732098, - "flos": 18808177466760.0, - "grad_norm": 2.2552359375941817, - "learning_rate": 3.7851497826400714e-06, - "loss": 0.7657, - "num_input_tokens_seen": 30247345, - "step": 1451 - }, - { - "epoch": 0.17459267720796007, - "flos": 26663986074120.0, - "grad_norm": 2.3958869817504516, - "learning_rate": 3.7847984095752034e-06, - "loss": 0.7377, - "num_input_tokens_seen": 30270520, - "step": 1452 - }, - { - "epoch": 0.17471292009859918, - "flos": 14622730396440.0, - "grad_norm": 2.276389705721688, - "learning_rate": 3.784446765757836e-06, - "loss": 0.7791, - "num_input_tokens_seen": 30288885, - "step": 1453 - }, - { - "epoch": 0.17483316298923826, - "flos": 20401294943400.0, - "grad_norm": 2.6946614681642354, - "learning_rate": 3.7840948512413133e-06, - "loss": 0.769, - "num_input_tokens_seen": 30306190, - "step": 1454 - }, - { - "epoch": 0.17495340587987734, - "flos": 32389275891120.0, - "grad_norm": 3.008872264094264, - "learning_rate": 3.7837426660790196e-06, - "loss": 0.7627, - "num_input_tokens_seen": 30327325, - "step": 1455 - }, - { - "epoch": 0.17507364877051645, - "flos": 15269942145960.0, - "grad_norm": 2.556783580353237, - "learning_rate": 3.783390210324382e-06, - "loss": 0.7978, - "num_input_tokens_seen": 30346770, - "step": 1456 - }, - { - "epoch": 0.17519389166115554, - "flos": 18105823049640.0, - "grad_norm": 2.7728429022611643, - "learning_rate": 3.7830374840308676e-06, - "loss": 0.7006, - "num_input_tokens_seen": 30366645, - "step": 1457 - }, - { - "epoch": 0.17531413455179462, - "flos": 17424328054800.0, - "grad_norm": 2.639567597986382, - "learning_rate": 3.7826844872519842e-06, - "loss": 0.8016, - "num_input_tokens_seen": 30384220, - "step": 1458 - }, - { - "epoch": 0.1754343774424337, - "flos": 17998450430760.0, - "grad_norm": 2.5412663212227207, - "learning_rate": 3.782331220041282e-06, - "loss": 0.7138, - "num_input_tokens_seen": 30404005, - "step": 1459 - }, - { - "epoch": 0.17555462033307281, - "flos": 13219287960600.0, - "grad_norm": 3.5627963294211082, - "learning_rate": 3.7819776824523504e-06, - "loss": 0.82, - "num_input_tokens_seen": 30421590, - "step": 1460 - }, - { - "epoch": 0.1756748632237119, - "flos": 20805873521760.0, - "grad_norm": 2.1521326089580373, - "learning_rate": 3.7816238745388213e-06, - "loss": 0.8297, - "num_input_tokens_seen": 30440855, - "step": 1461 - }, - { - "epoch": 0.17579510611435098, - "flos": 18856354943160.0, - "grad_norm": 2.1410425154889157, - "learning_rate": 3.781269796354367e-06, - "loss": 0.8633, - "num_input_tokens_seen": 30460195, - "step": 1462 - }, - { - "epoch": 0.1759153490049901, - "flos": 13569578690280.0, - "grad_norm": 3.6539198794629675, - "learning_rate": 3.7809154479527006e-06, - "loss": 0.859, - "num_input_tokens_seen": 30479120, - "step": 1463 - }, - { - "epoch": 0.17603559189562917, - "flos": 13591957790640.0, - "grad_norm": 2.3929155135203146, - "learning_rate": 3.780560829387577e-06, - "loss": 0.8147, - "num_input_tokens_seen": 30497340, - "step": 1464 - }, - { - "epoch": 0.17615583478626826, - "flos": 44612897650200.0, - "grad_norm": 0.8549581597781926, - "learning_rate": 3.7802059407127915e-06, - "loss": 0.607, - "num_input_tokens_seen": 30555610, - "step": 1465 - }, - { - "epoch": 0.17627607767690734, - "flos": 17289764021640.0, - "grad_norm": 2.2674599418760844, - "learning_rate": 3.7798507819821797e-06, - "loss": 0.8386, - "num_input_tokens_seen": 30572455, - "step": 1466 - }, - { - "epoch": 0.17639632056754645, - "flos": 12867192613200.0, - "grad_norm": 3.065105653220664, - "learning_rate": 3.7794953532496197e-06, - "loss": 0.7778, - "num_input_tokens_seen": 30588080, - "step": 1467 - }, - { - "epoch": 0.17651656345818553, - "flos": 42694152552720.0, - "grad_norm": 1.5880166692315298, - "learning_rate": 3.7791396545690295e-06, - "loss": 0.6312, - "num_input_tokens_seen": 30649035, - "step": 1468 - }, - { - "epoch": 0.17663680634882462, - "flos": 16782466838520.0, - "grad_norm": 2.689594605823851, - "learning_rate": 3.7787836859943685e-06, - "loss": 0.7851, - "num_input_tokens_seen": 30667480, - "step": 1469 - }, - { - "epoch": 0.17675704923946373, - "flos": 16566771801960.0, - "grad_norm": 4.085489106237174, - "learning_rate": 3.7784274475796363e-06, - "loss": 0.7672, - "num_input_tokens_seen": 30685830, - "step": 1470 - }, - { - "epoch": 0.1768772921301028, - "flos": 19888710546960.0, - "grad_norm": 2.61514995458638, - "learning_rate": 3.7780709393788745e-06, - "loss": 0.7535, - "num_input_tokens_seen": 30706025, - "step": 1471 - }, - { - "epoch": 0.1769975350207419, - "flos": 14001855242280.0, - "grad_norm": 2.2982387516439857, - "learning_rate": 3.777714161446165e-06, - "loss": 0.7429, - "num_input_tokens_seen": 30725450, - "step": 1472 - }, - { - "epoch": 0.177117777911381, - "flos": 26555030457240.0, - "grad_norm": 3.574281852456969, - "learning_rate": 3.7773571138356304e-06, - "loss": 0.6556, - "num_input_tokens_seen": 30745340, - "step": 1473 - }, - { - "epoch": 0.17723802080202009, - "flos": 16161655004280.0, - "grad_norm": 2.3722204708159405, - "learning_rate": 3.776999796601435e-06, - "loss": 0.8781, - "num_input_tokens_seen": 30763820, - "step": 1474 - }, - { - "epoch": 0.17735826369265917, - "flos": 22179338466840.0, - "grad_norm": 2.2289624581134055, - "learning_rate": 3.776642209797783e-06, - "loss": 0.695, - "num_input_tokens_seen": 30785370, - "step": 1475 - }, - { - "epoch": 0.17747850658329825, - "flos": 15648817328160.0, - "grad_norm": 2.7304409827920204, - "learning_rate": 3.7762843534789205e-06, - "loss": 0.7543, - "num_input_tokens_seen": 30803840, - "step": 1476 - }, - { - "epoch": 0.17759874947393736, - "flos": 12382052910720.0, - "grad_norm": 2.397088581700323, - "learning_rate": 3.7759262276991343e-06, - "loss": 0.8521, - "num_input_tokens_seen": 30821170, - "step": 1477 - }, - { - "epoch": 0.17771899236457644, - "flos": 8358013028280.0, - "grad_norm": 2.510092296195242, - "learning_rate": 3.7755678325127506e-06, - "loss": 0.7818, - "num_input_tokens_seen": 30838570, - "step": 1478 - }, - { - "epoch": 0.17783923525521553, - "flos": 13732252276560.0, - "grad_norm": 1.9112115297007253, - "learning_rate": 3.7752091679741393e-06, - "loss": 0.7299, - "num_input_tokens_seen": 30856080, - "step": 1479 - }, - { - "epoch": 0.17795947814585464, - "flos": 22317226795800.0, - "grad_norm": 2.900237067168194, - "learning_rate": 3.774850234137708e-06, - "loss": 0.7563, - "num_input_tokens_seen": 30873095, - "step": 1480 - }, - { - "epoch": 0.17807972103649372, - "flos": 17862651659160.0, - "grad_norm": 2.823287282640976, - "learning_rate": 3.7744910310579076e-06, - "loss": 0.8103, - "num_input_tokens_seen": 30891740, - "step": 1481 - }, - { - "epoch": 0.1781999639271328, - "flos": 14837475634200.0, - "grad_norm": 2.401147460813383, - "learning_rate": 3.774131558789229e-06, - "loss": 0.8442, - "num_input_tokens_seen": 30910790, - "step": 1482 - }, - { - "epoch": 0.1783202068177719, - "flos": 11598314210520.0, - "grad_norm": 3.968234291512282, - "learning_rate": 3.773771817386203e-06, - "loss": 0.6798, - "num_input_tokens_seen": 30927840, - "step": 1483 - }, - { - "epoch": 0.178440449708411, - "flos": 15082103382840.0, - "grad_norm": 1.8548559843438654, - "learning_rate": 3.773411806903403e-06, - "loss": 0.7811, - "num_input_tokens_seen": 30946640, - "step": 1484 - }, - { - "epoch": 0.17856069259905008, - "flos": 15861821268120.0, - "grad_norm": 3.8692093929243407, - "learning_rate": 3.7730515273954415e-06, - "loss": 0.92, - "num_input_tokens_seen": 30964970, - "step": 1485 - }, - { - "epoch": 0.17868093548968916, - "flos": 19478148236160.0, - "grad_norm": 2.0039398012085283, - "learning_rate": 3.772690978916973e-06, - "loss": 0.8226, - "num_input_tokens_seen": 30984445, - "step": 1486 - }, - { - "epoch": 0.17880117838032827, - "flos": 13563341678160.0, - "grad_norm": 2.3213080636909003, - "learning_rate": 3.772330161522693e-06, - "loss": 0.8493, - "num_input_tokens_seen": 31002075, - "step": 1487 - }, - { - "epoch": 0.17892142127096736, - "flos": 19455325896360.0, - "grad_norm": 2.9436751046341825, - "learning_rate": 3.7719690752673365e-06, - "loss": 0.7871, - "num_input_tokens_seen": 31022590, - "step": 1488 - }, - { - "epoch": 0.17904166416160644, - "flos": 17479660682160.0, - "grad_norm": 2.299566858390087, - "learning_rate": 3.7716077202056796e-06, - "loss": 0.7629, - "num_input_tokens_seen": 31040785, - "step": 1489 - }, - { - "epoch": 0.17916190705224552, - "flos": 13973777349120.0, - "grad_norm": 2.284997240060688, - "learning_rate": 3.7712460963925404e-06, - "loss": 0.9187, - "num_input_tokens_seen": 31056445, - "step": 1490 - }, - { - "epoch": 0.17928214994288463, - "flos": 18426927787560.0, - "grad_norm": 1.9300184699642322, - "learning_rate": 3.7708842038827775e-06, - "loss": 0.7387, - "num_input_tokens_seen": 31075125, - "step": 1491 - }, - { - "epoch": 0.17940239283352372, - "flos": 16379566238040.0, - "grad_norm": 2.4573047770729493, - "learning_rate": 3.770522042731288e-06, - "loss": 0.8349, - "num_input_tokens_seen": 31096740, - "step": 1492 - }, - { - "epoch": 0.1795226357241628, - "flos": 16965999847080.0, - "grad_norm": 2.1942510787928184, - "learning_rate": 3.7701596129930122e-06, - "loss": 0.8666, - "num_input_tokens_seen": 31115185, - "step": 1493 - }, - { - "epoch": 0.1796428786148019, - "flos": 16161496704480.0, - "grad_norm": 2.0795176722588966, - "learning_rate": 3.7697969147229315e-06, - "loss": 0.7112, - "num_input_tokens_seen": 31133065, - "step": 1494 - }, - { - "epoch": 0.179763121505441, - "flos": 15594814419120.0, - "grad_norm": 2.2139840316058765, - "learning_rate": 3.7694339479760647e-06, - "loss": 0.8439, - "num_input_tokens_seen": 31151815, - "step": 1495 - }, - { - "epoch": 0.17988336439608008, - "flos": 50261203918560.0, - "grad_norm": 0.7796993889217568, - "learning_rate": 3.769070712807476e-06, - "loss": 0.5996, - "num_input_tokens_seen": 31213565, - "step": 1496 - }, - { - "epoch": 0.18000360728671919, - "flos": 16053997445760.0, - "grad_norm": 2.262782661948253, - "learning_rate": 3.768707209272266e-06, - "loss": 0.7658, - "num_input_tokens_seen": 31233415, - "step": 1497 - }, - { - "epoch": 0.18012385017735827, - "flos": 13863871933440.0, - "grad_norm": 10.165935510911513, - "learning_rate": 3.768343437425579e-06, - "loss": 0.7454, - "num_input_tokens_seen": 31251705, - "step": 1498 - }, - { - "epoch": 0.18024409306799735, - "flos": 14427008303280.0, - "grad_norm": 2.4748612104862433, - "learning_rate": 3.7679793973225987e-06, - "loss": 0.8414, - "num_input_tokens_seen": 31267235, - "step": 1499 - }, - { - "epoch": 0.18036433595863643, - "flos": 49571450165520.0, - "grad_norm": 0.8779912636350491, - "learning_rate": 3.767615089018549e-06, - "loss": 0.6321, - "num_input_tokens_seen": 31329300, - "step": 1500 - }, - { - "epoch": 0.18048457884927555, - "flos": 13267908676440.0, - "grad_norm": 2.252649654451007, - "learning_rate": 3.7672505125686966e-06, - "loss": 0.8419, - "num_input_tokens_seen": 31345385, - "step": 1501 - }, - { - "epoch": 0.18060482173991463, - "flos": 11516201748360.0, - "grad_norm": 3.5475195748539807, - "learning_rate": 3.7668856680283455e-06, - "loss": 0.8426, - "num_input_tokens_seen": 31362130, - "step": 1502 - }, - { - "epoch": 0.1807250646305537, - "flos": 13568977151040.0, - "grad_norm": 2.442249659005324, - "learning_rate": 3.7665205554528437e-06, - "loss": 0.8107, - "num_input_tokens_seen": 31381205, - "step": 1503 - }, - { - "epoch": 0.18084530752119282, - "flos": 16945108764840.0, - "grad_norm": 1.956694735835308, - "learning_rate": 3.7661551748975782e-06, - "loss": 0.732, - "num_input_tokens_seen": 31399100, - "step": 1504 - }, - { - "epoch": 0.1809655504118319, - "flos": 44074704837480.0, - "grad_norm": 0.8381530777497229, - "learning_rate": 3.7657895264179772e-06, - "loss": 0.6093, - "num_input_tokens_seen": 31454795, - "step": 1505 - }, - { - "epoch": 0.181085793302471, - "flos": 32525676201960.0, - "grad_norm": 1.775956301907623, - "learning_rate": 3.765423610069509e-06, - "loss": 0.7337, - "num_input_tokens_seen": 31479905, - "step": 1506 - }, - { - "epoch": 0.18120603619311007, - "flos": 25640463599160.0, - "grad_norm": 2.466094314912539, - "learning_rate": 3.765057425907683e-06, - "loss": 0.7152, - "num_input_tokens_seen": 31501085, - "step": 1507 - }, - { - "epoch": 0.18132627908374918, - "flos": 15729568412040.0, - "grad_norm": 2.239355977664594, - "learning_rate": 3.764690973988048e-06, - "loss": 0.7718, - "num_input_tokens_seen": 31521145, - "step": 1508 - }, - { - "epoch": 0.18144652197438826, - "flos": 21317539779360.0, - "grad_norm": 2.1462859876900353, - "learning_rate": 3.7643242543661967e-06, - "loss": 0.7293, - "num_input_tokens_seen": 31543525, - "step": 1509 - }, - { - "epoch": 0.18156676486502735, - "flos": 50641345499160.0, - "grad_norm": 0.8430602115088457, - "learning_rate": 3.7639572670977573e-06, - "loss": 0.635, - "num_input_tokens_seen": 31598740, - "step": 1510 - }, - { - "epoch": 0.18168700775566646, - "flos": 19404204043680.0, - "grad_norm": 1.6115505735419458, - "learning_rate": 3.7635900122384042e-06, - "loss": 0.7536, - "num_input_tokens_seen": 31621455, - "step": 1511 - }, - { - "epoch": 0.18180725064630554, - "flos": 10917389094960.0, - "grad_norm": 2.663644797057017, - "learning_rate": 3.7632224898438477e-06, - "loss": 0.8496, - "num_input_tokens_seen": 31637650, - "step": 1512 - }, - { - "epoch": 0.18192749353694462, - "flos": 14379400706160.0, - "grad_norm": 1.8367107305185346, - "learning_rate": 3.762854699969842e-06, - "loss": 0.7801, - "num_input_tokens_seen": 31657880, - "step": 1513 - }, - { - "epoch": 0.1820477364275837, - "flos": 15134333334120.0, - "grad_norm": 2.9221471975902866, - "learning_rate": 3.762486642672179e-06, - "loss": 0.7109, - "num_input_tokens_seen": 31674540, - "step": 1514 - }, - { - "epoch": 0.18216797931822282, - "flos": 12489552169440.0, - "grad_norm": 2.529102309118837, - "learning_rate": 3.7621183180066946e-06, - "loss": 0.8379, - "num_input_tokens_seen": 31692220, - "step": 1515 - }, - { - "epoch": 0.1822882222088619, - "flos": 21454699929240.0, - "grad_norm": 1.8959384372068158, - "learning_rate": 3.7617497260292625e-06, - "loss": 0.7221, - "num_input_tokens_seen": 31713995, - "step": 1516 - }, - { - "epoch": 0.18240846509950098, - "flos": 12912995592600.0, - "grad_norm": 3.499106429337783, - "learning_rate": 3.7613808667957967e-06, - "loss": 0.7751, - "num_input_tokens_seen": 31726405, - "step": 1517 - }, - { - "epoch": 0.1825287079901401, - "flos": 10758261424200.0, - "grad_norm": 2.930113250314392, - "learning_rate": 3.7610117403622547e-06, - "loss": 0.9031, - "num_input_tokens_seen": 31742685, - "step": 1518 - }, - { - "epoch": 0.18264895088077918, - "flos": 16055042224440.0, - "grad_norm": 2.209545007016378, - "learning_rate": 3.7606423467846313e-06, - "loss": 0.884, - "num_input_tokens_seen": 31762010, - "step": 1519 - }, - { - "epoch": 0.18276919377141826, - "flos": 15270765304920.0, - "grad_norm": 2.0057459783398457, - "learning_rate": 3.760272686118964e-06, - "loss": 0.7769, - "num_input_tokens_seen": 31779950, - "step": 1520 - }, - { - "epoch": 0.18288943666205737, - "flos": 15702218697960.0, - "grad_norm": 2.4549525554291183, - "learning_rate": 3.7599027584213297e-06, - "loss": 0.902, - "num_input_tokens_seen": 31798550, - "step": 1521 - }, - { - "epoch": 0.18300967955269645, - "flos": 15753150590880.0, - "grad_norm": 3.100025786015723, - "learning_rate": 3.7595325637478465e-06, - "loss": 0.7719, - "num_input_tokens_seen": 31816295, - "step": 1522 - }, - { - "epoch": 0.18312992244333554, - "flos": 21183387325680.0, - "grad_norm": 1.825750324736588, - "learning_rate": 3.7591621021546723e-06, - "loss": 0.8096, - "num_input_tokens_seen": 31838010, - "step": 1523 - }, - { - "epoch": 0.18325016533397462, - "flos": 14703323180520.0, - "grad_norm": 5.070859059644944, - "learning_rate": 3.7587913736980062e-06, - "loss": 0.7982, - "num_input_tokens_seen": 31857370, - "step": 1524 - }, - { - "epoch": 0.18337040822461373, - "flos": 17078026480080.0, - "grad_norm": 2.0581502418580384, - "learning_rate": 3.7584203784340865e-06, - "loss": 0.8336, - "num_input_tokens_seen": 31876260, - "step": 1525 - }, - { - "epoch": 0.1834906511152528, - "flos": 18321898005720.0, - "grad_norm": 3.377956367116018, - "learning_rate": 3.7580491164191938e-06, - "loss": 0.8393, - "num_input_tokens_seen": 31894290, - "step": 1526 - }, - { - "epoch": 0.1836108940058919, - "flos": 49587718893600.0, - "grad_norm": 0.7564304766984115, - "learning_rate": 3.757677587709648e-06, - "loss": 0.6248, - "num_input_tokens_seen": 31957275, - "step": 1527 - }, - { - "epoch": 0.183731136896531, - "flos": 18861452196720.0, - "grad_norm": 2.248955802215193, - "learning_rate": 3.7573057923618095e-06, - "loss": 0.7419, - "num_input_tokens_seen": 31977090, - "step": 1528 - }, - { - "epoch": 0.1838513797871701, - "flos": 14947159430160.0, - "grad_norm": 2.443752778580981, - "learning_rate": 3.7569337304320793e-06, - "loss": 0.7203, - "num_input_tokens_seen": 31996395, - "step": 1529 - }, - { - "epoch": 0.18397162267780917, - "flos": 50559929556120.0, - "grad_norm": 0.8462372254008951, - "learning_rate": 3.756561401976899e-06, - "loss": 0.6525, - "num_input_tokens_seen": 32055820, - "step": 1530 - }, - { - "epoch": 0.18409186556844825, - "flos": 23451604485240.0, - "grad_norm": 2.5414945721340363, - "learning_rate": 3.7561888070527514e-06, - "loss": 0.8141, - "num_input_tokens_seen": 32077580, - "step": 1531 - }, - { - "epoch": 0.18421210845908736, - "flos": 14703703100040.0, - "grad_norm": 2.4840070344023686, - "learning_rate": 3.7558159457161577e-06, - "loss": 0.7775, - "num_input_tokens_seen": 32095265, - "step": 1532 - }, - { - "epoch": 0.18433235134972645, - "flos": 16916207712720.0, - "grad_norm": 2.949790242383356, - "learning_rate": 3.755442818023681e-06, - "loss": 0.7596, - "num_input_tokens_seen": 32114610, - "step": 1533 - }, - { - "epoch": 0.18445259424036553, - "flos": 13349292959520.0, - "grad_norm": 2.814933741113263, - "learning_rate": 3.7550694240319246e-06, - "loss": 0.7429, - "num_input_tokens_seen": 32132205, - "step": 1534 - }, - { - "epoch": 0.18457283713100464, - "flos": 15594339519720.0, - "grad_norm": 3.1843973655027566, - "learning_rate": 3.7546957637975326e-06, - "loss": 0.7365, - "num_input_tokens_seen": 32149335, - "step": 1535 - }, - { - "epoch": 0.18469308002164372, - "flos": 14892491661960.0, - "grad_norm": 2.1745458413750343, - "learning_rate": 3.7543218373771873e-06, - "loss": 0.733, - "num_input_tokens_seen": 32168380, - "step": 1536 - }, - { - "epoch": 0.1848133229122828, - "flos": 19377962428200.0, - "grad_norm": 1.4641170886436723, - "learning_rate": 3.753947644827615e-06, - "loss": 0.7778, - "num_input_tokens_seen": 32191560, - "step": 1537 - }, - { - "epoch": 0.1849335658029219, - "flos": 52028677506720.0, - "grad_norm": 1.0310310940609266, - "learning_rate": 3.753573186205579e-06, - "loss": 0.5929, - "num_input_tokens_seen": 32259400, - "step": 1538 - }, - { - "epoch": 0.185053808693561, - "flos": 12678594011040.0, - "grad_norm": 2.6063903368300654, - "learning_rate": 3.753198461567885e-06, - "loss": 0.7518, - "num_input_tokens_seen": 32276365, - "step": 1539 - }, - { - "epoch": 0.18517405158420008, - "flos": 21264803268720.0, - "grad_norm": 2.0655453480470642, - "learning_rate": 3.7528234709713783e-06, - "loss": 0.9024, - "num_input_tokens_seen": 32298830, - "step": 1540 - }, - { - "epoch": 0.18529429447483917, - "flos": 19642879719840.0, - "grad_norm": 2.9730766490367118, - "learning_rate": 3.7524482144729447e-06, - "loss": 0.8238, - "num_input_tokens_seen": 32318005, - "step": 1541 - }, - { - "epoch": 0.18541453736547828, - "flos": 9863319249960.0, - "grad_norm": 2.9083219956083197, - "learning_rate": 3.7520726921295106e-06, - "loss": 0.8256, - "num_input_tokens_seen": 32334445, - "step": 1542 - }, - { - "epoch": 0.18553478025611736, - "flos": 17593713552600.0, - "grad_norm": 2.5709308471816175, - "learning_rate": 3.751696903998042e-06, - "loss": 0.7205, - "num_input_tokens_seen": 32352800, - "step": 1543 - }, - { - "epoch": 0.18565502314675644, - "flos": 18969901254240.0, - "grad_norm": 2.2827543759110323, - "learning_rate": 3.7513208501355456e-06, - "loss": 0.6785, - "num_input_tokens_seen": 32373625, - "step": 1544 - }, - { - "epoch": 0.18577526603739553, - "flos": 14325967676400.0, - "grad_norm": 2.462382446426526, - "learning_rate": 3.750944530599069e-06, - "loss": 0.8138, - "num_input_tokens_seen": 32392915, - "step": 1545 - }, - { - "epoch": 0.18589550892803464, - "flos": 13485281690880.0, - "grad_norm": 3.8384450885660737, - "learning_rate": 3.7505679454456992e-06, - "loss": 0.7847, - "num_input_tokens_seen": 32409245, - "step": 1546 - }, - { - "epoch": 0.18601575181867372, - "flos": 17241681525120.0, - "grad_norm": 2.1127549959935497, - "learning_rate": 3.750191094732564e-06, - "loss": 0.6747, - "num_input_tokens_seen": 32429830, - "step": 1547 - }, - { - "epoch": 0.1861359947093128, - "flos": 19321553362200.0, - "grad_norm": 2.0345952584237943, - "learning_rate": 3.7498139785168313e-06, - "loss": 0.7379, - "num_input_tokens_seen": 32450155, - "step": 1548 - }, - { - "epoch": 0.1862562375999519, - "flos": 17079799437840.0, - "grad_norm": 2.344536407780273, - "learning_rate": 3.749436596855709e-06, - "loss": 0.7623, - "num_input_tokens_seen": 32469175, - "step": 1549 - }, - { - "epoch": 0.186376480490591, - "flos": 12133087747560.0, - "grad_norm": 2.3080622388754923, - "learning_rate": 3.749058949806446e-06, - "loss": 0.8996, - "num_input_tokens_seen": 32485620, - "step": 1550 - }, - { - "epoch": 0.18649672338123008, - "flos": 15701142259320.0, - "grad_norm": 2.1953829825276325, - "learning_rate": 3.748681037426331e-06, - "loss": 0.8247, - "num_input_tokens_seen": 32504550, - "step": 1551 - }, - { - "epoch": 0.1866169662718692, - "flos": 8925423492720.0, - "grad_norm": 2.3203093146613765, - "learning_rate": 3.7483028597726936e-06, - "loss": 0.8995, - "num_input_tokens_seen": 32521040, - "step": 1552 - }, - { - "epoch": 0.18673720916250827, - "flos": 17264123945400.0, - "grad_norm": 2.2759683162180053, - "learning_rate": 3.7479244169029017e-06, - "loss": 0.6148, - "num_input_tokens_seen": 32540550, - "step": 1553 - }, - { - "epoch": 0.18685745205314735, - "flos": 14405958921240.0, - "grad_norm": 3.3836854861041785, - "learning_rate": 3.7475457088743658e-06, - "loss": 0.6926, - "num_input_tokens_seen": 32557520, - "step": 1554 - }, - { - "epoch": 0.18697769494378644, - "flos": 25126517824440.0, - "grad_norm": 2.7711461017353605, - "learning_rate": 3.7471667357445348e-06, - "loss": 0.722, - "num_input_tokens_seen": 32577070, - "step": 1555 - }, - { - "epoch": 0.18709793783442555, - "flos": 25154152478160.0, - "grad_norm": 2.6602295628778867, - "learning_rate": 3.7467874975709e-06, - "loss": 0.7088, - "num_input_tokens_seen": 32597595, - "step": 1556 - }, - { - "epoch": 0.18721818072506463, - "flos": 29959398264000.0, - "grad_norm": 3.851985389753068, - "learning_rate": 3.7464079944109904e-06, - "loss": 0.7764, - "num_input_tokens_seen": 32619175, - "step": 1557 - }, - { - "epoch": 0.18733842361570371, - "flos": 16210212400200.0, - "grad_norm": 2.561603389281938, - "learning_rate": 3.746028226322376e-06, - "loss": 0.7646, - "num_input_tokens_seen": 32634775, - "step": 1558 - }, - { - "epoch": 0.18745866650634282, - "flos": 13811262062640.0, - "grad_norm": 2.4431703587398386, - "learning_rate": 3.745648193362669e-06, - "loss": 0.741, - "num_input_tokens_seen": 32653850, - "step": 1559 - }, - { - "epoch": 0.1875789093969819, - "flos": 14106821704200.0, - "grad_norm": 3.655578682068471, - "learning_rate": 3.745267895589518e-06, - "loss": 0.7163, - "num_input_tokens_seen": 32672110, - "step": 1560 - }, - { - "epoch": 0.187699152287621, - "flos": 12408294526200.0, - "grad_norm": 2.8135243563712122, - "learning_rate": 3.7448873330606154e-06, - "loss": 0.7914, - "num_input_tokens_seen": 32689600, - "step": 1561 - }, - { - "epoch": 0.18781939517826007, - "flos": 16351013445480.0, - "grad_norm": 5.086520628893025, - "learning_rate": 3.7445065058336914e-06, - "loss": 0.8512, - "num_input_tokens_seen": 32708190, - "step": 1562 - }, - { - "epoch": 0.18793963806889918, - "flos": 10626673427280.0, - "grad_norm": 2.161912862639698, - "learning_rate": 3.7441254139665176e-06, - "loss": 0.845, - "num_input_tokens_seen": 32724095, - "step": 1563 - }, - { - "epoch": 0.18805988095953827, - "flos": 12732565260120.0, - "grad_norm": 2.886990298247941, - "learning_rate": 3.743744057516905e-06, - "loss": 0.81, - "num_input_tokens_seen": 32741875, - "step": 1564 - }, - { - "epoch": 0.18818012385017735, - "flos": 10945910227560.0, - "grad_norm": 3.234271503948746, - "learning_rate": 3.743362436542706e-06, - "loss": 0.8601, - "num_input_tokens_seen": 32756285, - "step": 1565 - }, - { - "epoch": 0.18830036674081646, - "flos": 35003984585160.0, - "grad_norm": 2.2950509022904053, - "learning_rate": 3.7429805511018115e-06, - "loss": 0.7534, - "num_input_tokens_seen": 32777665, - "step": 1566 - }, - { - "epoch": 0.18842060963145554, - "flos": 22048193709360.0, - "grad_norm": 3.1838357845112513, - "learning_rate": 3.7425984012521524e-06, - "loss": 0.7543, - "num_input_tokens_seen": 32797585, - "step": 1567 - }, - { - "epoch": 0.18854085252209463, - "flos": 51857709010920.0, - "grad_norm": 0.7328166660605235, - "learning_rate": 3.7422159870517025e-06, - "loss": 0.6186, - "num_input_tokens_seen": 32862560, - "step": 1568 - }, - { - "epoch": 0.1886610954127337, - "flos": 15568857743280.0, - "grad_norm": 1.849049468324148, - "learning_rate": 3.7418333085584717e-06, - "loss": 0.7734, - "num_input_tokens_seen": 32883465, - "step": 1569 - }, - { - "epoch": 0.18878133830337282, - "flos": 12590656116240.0, - "grad_norm": 2.3669765869452473, - "learning_rate": 3.7414503658305128e-06, - "loss": 0.8951, - "num_input_tokens_seen": 32900420, - "step": 1570 - }, - { - "epoch": 0.1889015811940119, - "flos": 18888453651240.0, - "grad_norm": 3.0421539508554734, - "learning_rate": 3.7410671589259185e-06, - "loss": 0.7639, - "num_input_tokens_seen": 32918740, - "step": 1571 - }, - { - "epoch": 0.18902182408465099, - "flos": 15378327883560.0, - "grad_norm": 3.5593049294504886, - "learning_rate": 3.7406836879028205e-06, - "loss": 0.7713, - "num_input_tokens_seen": 32938685, - "step": 1572 - }, - { - "epoch": 0.1891420669752901, - "flos": 16295870777880.0, - "grad_norm": 2.1718011198582454, - "learning_rate": 3.7402999528193907e-06, - "loss": 0.7505, - "num_input_tokens_seen": 32957905, - "step": 1573 - }, - { - "epoch": 0.18926230986592918, - "flos": 16107810395040.0, - "grad_norm": 3.3145601595858345, - "learning_rate": 3.739915953733842e-06, - "loss": 0.8162, - "num_input_tokens_seen": 32975670, - "step": 1574 - }, - { - "epoch": 0.18938255275656826, - "flos": 17917224447480.0, - "grad_norm": 2.0472021642237466, - "learning_rate": 3.7395316907044264e-06, - "loss": 0.8006, - "num_input_tokens_seen": 32996175, - "step": 1575 - }, - { - "epoch": 0.18950279564720737, - "flos": 17890982832000.0, - "grad_norm": 3.7299386331863995, - "learning_rate": 3.7391471637894364e-06, - "loss": 0.7801, - "num_input_tokens_seen": 33018160, - "step": 1576 - }, - { - "epoch": 0.18962303853784646, - "flos": 14434226774160.0, - "grad_norm": 2.2589437957026632, - "learning_rate": 3.738762373047205e-06, - "loss": 0.8372, - "num_input_tokens_seen": 33037800, - "step": 1577 - }, - { - "epoch": 0.18974328142848554, - "flos": 15378201243720.0, - "grad_norm": 2.1518779049736576, - "learning_rate": 3.738377318536103e-06, - "loss": 0.8269, - "num_input_tokens_seen": 33057405, - "step": 1578 - }, - { - "epoch": 0.18986352431912462, - "flos": 9408505297800.0, - "grad_norm": 2.9294000510025047, - "learning_rate": 3.7379920003145447e-06, - "loss": 0.6986, - "num_input_tokens_seen": 33071400, - "step": 1579 - }, - { - "epoch": 0.18998376720976373, - "flos": 17402835433320.0, - "grad_norm": 2.2072627393463136, - "learning_rate": 3.7376064184409817e-06, - "loss": 0.8288, - "num_input_tokens_seen": 33090700, - "step": 1580 - }, - { - "epoch": 0.19010401010040281, - "flos": 16809499953000.0, - "grad_norm": 1.5649471955333634, - "learning_rate": 3.7372205729739063e-06, - "loss": 0.8557, - "num_input_tokens_seen": 33112235, - "step": 1581 - }, - { - "epoch": 0.1902242529910419, - "flos": 13974853787760.0, - "grad_norm": 3.299751509356202, - "learning_rate": 3.7368344639718514e-06, - "loss": 0.7041, - "num_input_tokens_seen": 33129890, - "step": 1582 - }, - { - "epoch": 0.190344495881681, - "flos": 18672980234400.0, - "grad_norm": 1.9675596362410257, - "learning_rate": 3.7364480914933895e-06, - "loss": 0.7967, - "num_input_tokens_seen": 33149850, - "step": 1583 - }, - { - "epoch": 0.1904647387723201, - "flos": 19641011782200.0, - "grad_norm": 2.4352645404468913, - "learning_rate": 3.7360614555971325e-06, - "loss": 0.8024, - "num_input_tokens_seen": 33169225, - "step": 1584 - }, - { - "epoch": 0.19058498166295917, - "flos": 17565667319400.0, - "grad_norm": 2.81173356527425, - "learning_rate": 3.735674556341733e-06, - "loss": 0.828, - "num_input_tokens_seen": 33188560, - "step": 1585 - }, - { - "epoch": 0.19070522455359826, - "flos": 20752092232440.0, - "grad_norm": 2.1771307275410603, - "learning_rate": 3.7352873937858835e-06, - "loss": 0.8186, - "num_input_tokens_seen": 33209815, - "step": 1586 - }, - { - "epoch": 0.19082546744423737, - "flos": 18802573653840.0, - "grad_norm": 2.2489356596526555, - "learning_rate": 3.734899967988316e-06, - "loss": 0.701, - "num_input_tokens_seen": 33227715, - "step": 1587 - }, - { - "epoch": 0.19094571033487645, - "flos": 14406940380000.0, - "grad_norm": 2.083328043613076, - "learning_rate": 3.7345122790078026e-06, - "loss": 0.8251, - "num_input_tokens_seen": 33245000, - "step": 1588 - }, - { - "epoch": 0.19106595322551553, - "flos": 15810667755480.0, - "grad_norm": 3.737180669404936, - "learning_rate": 3.7341243269031556e-06, - "loss": 0.9121, - "num_input_tokens_seen": 33263710, - "step": 1589 - }, - { - "epoch": 0.19118619611615464, - "flos": 21938604893280.0, - "grad_norm": 2.385678927956598, - "learning_rate": 3.7337361117332275e-06, - "loss": 0.7634, - "num_input_tokens_seen": 33285170, - "step": 1590 - }, - { - "epoch": 0.19130643900679373, - "flos": 12596481548880.0, - "grad_norm": 3.2887567300992253, - "learning_rate": 3.7333476335569087e-06, - "loss": 0.7484, - "num_input_tokens_seen": 33302890, - "step": 1591 - }, - { - "epoch": 0.1914266818974328, - "flos": 18186194214000.0, - "grad_norm": 4.093005163559032, - "learning_rate": 3.7329588924331325e-06, - "loss": 0.6466, - "num_input_tokens_seen": 33323815, - "step": 1592 - }, - { - "epoch": 0.1915469247880719, - "flos": 13839181656000.0, - "grad_norm": 2.3827475617111804, - "learning_rate": 3.732569888420871e-06, - "loss": 0.8008, - "num_input_tokens_seen": 33343070, - "step": 1593 - }, - { - "epoch": 0.191667167678711, - "flos": 15378992742720.0, - "grad_norm": 2.349277438010123, - "learning_rate": 3.732180621579134e-06, - "loss": 0.808, - "num_input_tokens_seen": 33362005, - "step": 1594 - }, - { - "epoch": 0.1917874105693501, - "flos": 25150923162240.0, - "grad_norm": 2.041027890401244, - "learning_rate": 3.7317910919669745e-06, - "loss": 0.7907, - "num_input_tokens_seen": 33382920, - "step": 1595 - }, - { - "epoch": 0.19190765345998917, - "flos": 17242251404400.0, - "grad_norm": 2.8288382795164724, - "learning_rate": 3.7314012996434826e-06, - "loss": 0.7465, - "num_input_tokens_seen": 33401370, - "step": 1596 - }, - { - "epoch": 0.19202789635062828, - "flos": 14511336962640.0, - "grad_norm": 1.975214683357007, - "learning_rate": 3.7310112446677907e-06, - "loss": 0.7835, - "num_input_tokens_seen": 33419000, - "step": 1597 - }, - { - "epoch": 0.19214813924126736, - "flos": 15297513479760.0, - "grad_norm": 2.3436873364018815, - "learning_rate": 3.7306209270990695e-06, - "loss": 0.6722, - "num_input_tokens_seen": 33436725, - "step": 1598 - }, - { - "epoch": 0.19226838213190645, - "flos": 19321141782720.0, - "grad_norm": 2.4941743685621174, - "learning_rate": 3.7302303469965292e-06, - "loss": 0.8471, - "num_input_tokens_seen": 33455985, - "step": 1599 - }, - { - "epoch": 0.19238862502254553, - "flos": 15243985470120.0, - "grad_norm": 2.0760202863874397, - "learning_rate": 3.7298395044194206e-06, - "loss": 0.6906, - "num_input_tokens_seen": 33474515, - "step": 1600 - }, - { - "epoch": 0.19250886791318464, - "flos": 15891925398720.0, - "grad_norm": 1.9697278939556109, - "learning_rate": 3.7294483994270356e-06, - "loss": 0.9089, - "num_input_tokens_seen": 33492560, - "step": 1601 - }, - { - "epoch": 0.19262911080382372, - "flos": 17106927532200.0, - "grad_norm": 2.4420337084319037, - "learning_rate": 3.7290570320787033e-06, - "loss": 0.7485, - "num_input_tokens_seen": 33511860, - "step": 1602 - }, - { - "epoch": 0.1927493536944628, - "flos": 16052636067480.0, - "grad_norm": 2.823940743551394, - "learning_rate": 3.728665402433793e-06, - "loss": 0.6967, - "num_input_tokens_seen": 33530150, - "step": 1603 - }, - { - "epoch": 0.19286959658510192, - "flos": 12058098776400.0, - "grad_norm": 3.2531587948908043, - "learning_rate": 3.7282735105517164e-06, - "loss": 0.8396, - "num_input_tokens_seen": 33547995, - "step": 1604 - }, - { - "epoch": 0.192989839475741, - "flos": 15811237634760.0, - "grad_norm": 2.5360519637190215, - "learning_rate": 3.727881356491922e-06, - "loss": 0.6678, - "num_input_tokens_seen": 33566125, - "step": 1605 - }, - { - "epoch": 0.19311008236638008, - "flos": 14082796285920.0, - "grad_norm": 2.080980332691794, - "learning_rate": 3.7274889403139002e-06, - "loss": 0.7486, - "num_input_tokens_seen": 33583470, - "step": 1606 - }, - { - "epoch": 0.1932303252570192, - "flos": 21017009524080.0, - "grad_norm": 3.065557549499672, - "learning_rate": 3.727096262077179e-06, - "loss": 0.7737, - "num_input_tokens_seen": 33602185, - "step": 1607 - }, - { - "epoch": 0.19335056814765827, - "flos": 13407411663360.0, - "grad_norm": 1.8540197878607707, - "learning_rate": 3.7267033218413285e-06, - "loss": 0.8356, - "num_input_tokens_seen": 33619700, - "step": 1608 - }, - { - "epoch": 0.19347081103829736, - "flos": 9627049730760.0, - "grad_norm": 2.4692630476159345, - "learning_rate": 3.726310119665957e-06, - "loss": 0.7961, - "num_input_tokens_seen": 33635755, - "step": 1609 - }, - { - "epoch": 0.19359105392893644, - "flos": 14836209235800.0, - "grad_norm": 1.9849328886357078, - "learning_rate": 3.725916655610713e-06, - "loss": 0.8369, - "num_input_tokens_seen": 33654805, - "step": 1610 - }, - { - "epoch": 0.19371129681957555, - "flos": 14973021126120.0, - "grad_norm": 3.031917695118577, - "learning_rate": 3.725522929735284e-06, - "loss": 0.7288, - "num_input_tokens_seen": 33671460, - "step": 1611 - }, - { - "epoch": 0.19383153971021463, - "flos": 22344798129600.0, - "grad_norm": 3.1635643837751313, - "learning_rate": 3.725128942099399e-06, - "loss": 0.7188, - "num_input_tokens_seen": 33691580, - "step": 1612 - }, - { - "epoch": 0.19395178260085372, - "flos": 17996297553480.0, - "grad_norm": 2.049339616056469, - "learning_rate": 3.7247346927628245e-06, - "loss": 0.7881, - "num_input_tokens_seen": 33711235, - "step": 1613 - }, - { - "epoch": 0.19407202549149283, - "flos": 21238624973160.0, - "grad_norm": 2.4972160096940486, - "learning_rate": 3.7243401817853694e-06, - "loss": 0.768, - "num_input_tokens_seen": 33731645, - "step": 1614 - }, - { - "epoch": 0.1941922683821319, - "flos": 13137713717760.0, - "grad_norm": 2.593866736301408, - "learning_rate": 3.723945409226879e-06, - "loss": 0.709, - "num_input_tokens_seen": 33749855, - "step": 1615 - }, - { - "epoch": 0.194312511272771, - "flos": 7006610583960.0, - "grad_norm": 2.58408282287777, - "learning_rate": 3.723550375147241e-06, - "loss": 0.7969, - "num_input_tokens_seen": 33764350, - "step": 1616 - }, - { - "epoch": 0.19443275416341008, - "flos": 19806946344360.0, - "grad_norm": 1.9817032136212605, - "learning_rate": 3.7231550796063816e-06, - "loss": 0.7798, - "num_input_tokens_seen": 33784080, - "step": 1617 - }, - { - "epoch": 0.1945529970540492, - "flos": 11540005546920.0, - "grad_norm": 2.1278820689012847, - "learning_rate": 3.722759522664266e-06, - "loss": 0.6271, - "num_input_tokens_seen": 33801100, - "step": 1618 - }, - { - "epoch": 0.19467323994468827, - "flos": 14107138303800.0, - "grad_norm": 2.05953025869362, - "learning_rate": 3.7223637043809016e-06, - "loss": 0.7909, - "num_input_tokens_seen": 33819800, - "step": 1619 - }, - { - "epoch": 0.19479348283532735, - "flos": 17673261558000.0, - "grad_norm": 1.8534693174536738, - "learning_rate": 3.7219676248163322e-06, - "loss": 0.8457, - "num_input_tokens_seen": 33836685, - "step": 1620 - }, - { - "epoch": 0.19491372572596646, - "flos": 18888833570760.0, - "grad_norm": 2.0823467080069573, - "learning_rate": 3.721571284030643e-06, - "loss": 0.905, - "num_input_tokens_seen": 33856215, - "step": 1621 - }, - { - "epoch": 0.19503396861660555, - "flos": 14351797712400.0, - "grad_norm": 2.434228707105915, - "learning_rate": 3.7211746820839587e-06, - "loss": 0.7598, - "num_input_tokens_seen": 33873030, - "step": 1622 - }, - { - "epoch": 0.19515421150724463, - "flos": 15379056062640.0, - "grad_norm": 1.9912378777986885, - "learning_rate": 3.7207778190364437e-06, - "loss": 0.808, - "num_input_tokens_seen": 33891175, - "step": 1623 - }, - { - "epoch": 0.1952744543978837, - "flos": 24206378813400.0, - "grad_norm": 1.9163281791450928, - "learning_rate": 3.720380694948302e-06, - "loss": 0.7324, - "num_input_tokens_seen": 33913780, - "step": 1624 - }, - { - "epoch": 0.19539469728852282, - "flos": 47214693571920.0, - "grad_norm": 0.9824684389883707, - "learning_rate": 3.719983309879777e-06, - "loss": 0.7362, - "num_input_tokens_seen": 33973280, - "step": 1625 - }, - { - "epoch": 0.1955149401791619, - "flos": 9708560653680.0, - "grad_norm": 1.8677773282238834, - "learning_rate": 3.719585663891151e-06, - "loss": 0.7662, - "num_input_tokens_seen": 33990535, - "step": 1626 - }, - { - "epoch": 0.195635183069801, - "flos": 13673057134080.0, - "grad_norm": 3.0377153485101624, - "learning_rate": 3.719187757042747e-06, - "loss": 0.7648, - "num_input_tokens_seen": 34008075, - "step": 1627 - }, - { - "epoch": 0.1957554259604401, - "flos": 51116801745240.0, - "grad_norm": 0.7753556865824806, - "learning_rate": 3.7187895893949275e-06, - "loss": 0.5895, - "num_input_tokens_seen": 34074265, - "step": 1628 - }, - { - "epoch": 0.19587566885107918, - "flos": 15405835897440.0, - "grad_norm": 2.242716550715999, - "learning_rate": 3.7183911610080937e-06, - "loss": 0.719, - "num_input_tokens_seen": 34090850, - "step": 1629 - }, - { - "epoch": 0.19599591174171827, - "flos": 16237910373840.0, - "grad_norm": 3.020750127939949, - "learning_rate": 3.7179924719426872e-06, - "loss": 0.723, - "num_input_tokens_seen": 34108465, - "step": 1630 - }, - { - "epoch": 0.19611615463235738, - "flos": 17403247012800.0, - "grad_norm": 2.572248106877278, - "learning_rate": 3.7175935222591885e-06, - "loss": 0.7331, - "num_input_tokens_seen": 34127485, - "step": 1631 - }, - { - "epoch": 0.19623639752299646, - "flos": 20992065966960.0, - "grad_norm": 2.084483236222125, - "learning_rate": 3.717194312018118e-06, - "loss": 0.7422, - "num_input_tokens_seen": 34146190, - "step": 1632 - }, - { - "epoch": 0.19635664041363554, - "flos": 15378771123000.0, - "grad_norm": 2.5862721575252916, - "learning_rate": 3.716794841280036e-06, - "loss": 0.7383, - "num_input_tokens_seen": 34164615, - "step": 1633 - }, - { - "epoch": 0.19647688330427462, - "flos": 13785400366680.0, - "grad_norm": 2.1789905158401535, - "learning_rate": 3.7163951101055407e-06, - "loss": 0.7532, - "num_input_tokens_seen": 34182395, - "step": 1634 - }, - { - "epoch": 0.19659712619491373, - "flos": 17754107621760.0, - "grad_norm": 2.1525038090228006, - "learning_rate": 3.715995118555273e-06, - "loss": 0.7698, - "num_input_tokens_seen": 34202090, - "step": 1635 - }, - { - "epoch": 0.19671736908555282, - "flos": 18105664749840.0, - "grad_norm": 4.7009414216988645, - "learning_rate": 3.71559486668991e-06, - "loss": 0.8244, - "num_input_tokens_seen": 34220670, - "step": 1636 - }, - { - "epoch": 0.1968376119761919, - "flos": 17458073080800.0, - "grad_norm": 2.543286281748753, - "learning_rate": 3.715194354570169e-06, - "loss": 0.7542, - "num_input_tokens_seen": 34240395, - "step": 1637 - }, - { - "epoch": 0.196957854866831, - "flos": 13218686421360.0, - "grad_norm": 3.163678331953761, - "learning_rate": 3.714793582256809e-06, - "loss": 0.8148, - "num_input_tokens_seen": 34257180, - "step": 1638 - }, - { - "epoch": 0.1970780977574701, - "flos": 15837954149640.0, - "grad_norm": 2.5675276769466784, - "learning_rate": 3.7143925498106253e-06, - "loss": 0.8258, - "num_input_tokens_seen": 34275440, - "step": 1639 - }, - { - "epoch": 0.19719834064810918, - "flos": 15215021098080.0, - "grad_norm": 2.098559421340616, - "learning_rate": 3.7139912572924558e-06, - "loss": 0.7798, - "num_input_tokens_seen": 34294190, - "step": 1640 - }, - { - "epoch": 0.19731858353874826, - "flos": 17155643227920.0, - "grad_norm": 2.993302817185574, - "learning_rate": 3.7135897047631744e-06, - "loss": 0.7979, - "num_input_tokens_seen": 34311795, - "step": 1641 - }, - { - "epoch": 0.19743882642938737, - "flos": 17566047238920.0, - "grad_norm": 2.5059254043692487, - "learning_rate": 3.713187892283698e-06, - "loss": 0.7281, - "num_input_tokens_seen": 34331125, - "step": 1642 - }, - { - "epoch": 0.19755906932002645, - "flos": 10916217676440.0, - "grad_norm": 3.8149637485046677, - "learning_rate": 3.71278581991498e-06, - "loss": 0.8446, - "num_input_tokens_seen": 34346705, - "step": 1643 - }, - { - "epoch": 0.19767931221066554, - "flos": 14239549459680.0, - "grad_norm": 3.421881549428307, - "learning_rate": 3.712383487718015e-06, - "loss": 0.7721, - "num_input_tokens_seen": 34364665, - "step": 1644 - }, - { - "epoch": 0.19779955510130465, - "flos": 18860724017640.0, - "grad_norm": 2.32043775226833, - "learning_rate": 3.7119808957538365e-06, - "loss": 0.8484, - "num_input_tokens_seen": 34383380, - "step": 1645 - }, - { - "epoch": 0.19791979799194373, - "flos": 15190109200920.0, - "grad_norm": 2.4413276354530353, - "learning_rate": 3.711578044083517e-06, - "loss": 0.7785, - "num_input_tokens_seen": 34399900, - "step": 1646 - }, - { - "epoch": 0.1980400408825828, - "flos": 18750122082840.0, - "grad_norm": 2.379388769664556, - "learning_rate": 3.7111749327681698e-06, - "loss": 0.7395, - "num_input_tokens_seen": 34419655, - "step": 1647 - }, - { - "epoch": 0.1981602837732219, - "flos": 17214996670200.0, - "grad_norm": 5.719382996260187, - "learning_rate": 3.7107715618689455e-06, - "loss": 0.8403, - "num_input_tokens_seen": 34438350, - "step": 1648 - }, - { - "epoch": 0.198280526663861, - "flos": 16971888599640.0, - "grad_norm": 1.6092988523968081, - "learning_rate": 3.710367931447035e-06, - "loss": 0.8141, - "num_input_tokens_seen": 34459850, - "step": 1649 - }, - { - "epoch": 0.1984007695545001, - "flos": 15864828964320.0, - "grad_norm": 4.2258437809520055, - "learning_rate": 3.70996404156367e-06, - "loss": 0.8436, - "num_input_tokens_seen": 34479205, - "step": 1650 - }, - { - "epoch": 0.19852101244513917, - "flos": 26502832165920.0, - "grad_norm": 2.0625450822931604, - "learning_rate": 3.7095598922801187e-06, - "loss": 0.7089, - "num_input_tokens_seen": 34501000, - "step": 1651 - }, - { - "epoch": 0.19864125533577828, - "flos": 16912155237840.0, - "grad_norm": 4.844274873905802, - "learning_rate": 3.7091554836576914e-06, - "loss": 0.7466, - "num_input_tokens_seen": 34517395, - "step": 1652 - }, - { - "epoch": 0.19876149822641737, - "flos": 18025135285680.0, - "grad_norm": 2.024383088319784, - "learning_rate": 3.708750815757736e-06, - "loss": 0.8086, - "num_input_tokens_seen": 34537885, - "step": 1653 - }, - { - "epoch": 0.19888174111705645, - "flos": 23640741306720.0, - "grad_norm": 3.2484033036920614, - "learning_rate": 3.7083458886416407e-06, - "loss": 0.6966, - "num_input_tokens_seen": 34556800, - "step": 1654 - }, - { - "epoch": 0.19900198400769553, - "flos": 18025103625720.0, - "grad_norm": 3.3298528779668044, - "learning_rate": 3.707940702370832e-06, - "loss": 0.8584, - "num_input_tokens_seen": 34577365, - "step": 1655 - }, - { - "epoch": 0.19912222689833464, - "flos": 50080425326520.0, - "grad_norm": 0.7738293127352185, - "learning_rate": 3.707535257006777e-06, - "loss": 0.5893, - "num_input_tokens_seen": 34642710, - "step": 1656 - }, - { - "epoch": 0.19924246978897373, - "flos": 11463560217600.0, - "grad_norm": 3.122107884118626, - "learning_rate": 3.707129552610981e-06, - "loss": 0.8712, - "num_input_tokens_seen": 34661080, - "step": 1657 - }, - { - "epoch": 0.1993627126796128, - "flos": 12813601283640.0, - "grad_norm": 3.8134511792929233, - "learning_rate": 3.70672358924499e-06, - "loss": 0.7223, - "num_input_tokens_seen": 34680040, - "step": 1658 - }, - { - "epoch": 0.19948295557025192, - "flos": 30040244327760.0, - "grad_norm": 2.263277297564615, - "learning_rate": 3.706317366970386e-06, - "loss": 0.769, - "num_input_tokens_seen": 34700760, - "step": 1659 - }, - { - "epoch": 0.199603198460891, - "flos": 18375805934880.0, - "grad_norm": 2.24326226929124, - "learning_rate": 3.705910885848795e-06, - "loss": 0.8272, - "num_input_tokens_seen": 34718855, - "step": 1660 - }, - { - "epoch": 0.19972344135153008, - "flos": 14676764965440.0, - "grad_norm": 2.8036129734060795, - "learning_rate": 3.705504145941879e-06, - "loss": 0.8245, - "num_input_tokens_seen": 34736745, - "step": 1661 - }, - { - "epoch": 0.1998436842421692, - "flos": 17372351383200.0, - "grad_norm": 2.330910162406776, - "learning_rate": 3.7050971473113403e-06, - "loss": 0.7682, - "num_input_tokens_seen": 34756240, - "step": 1662 - }, - { - "epoch": 0.19996392713280828, - "flos": 26531479938360.0, - "grad_norm": 1.8963235144550679, - "learning_rate": 3.7046898900189196e-06, - "loss": 0.7924, - "num_input_tokens_seen": 34780295, - "step": 1663 - }, - { - "epoch": 0.20008417002344736, - "flos": 17321134550640.0, - "grad_norm": 1.9098601004409264, - "learning_rate": 3.704282374126398e-06, - "loss": 0.8229, - "num_input_tokens_seen": 34799695, - "step": 1664 - }, - { - "epoch": 0.20020441291408644, - "flos": 15999994536720.0, - "grad_norm": 2.317849852852564, - "learning_rate": 3.7038745996955954e-06, - "loss": 0.8627, - "num_input_tokens_seen": 34818760, - "step": 1665 - }, - { - "epoch": 0.20032465580472555, - "flos": 16966886325960.0, - "grad_norm": 2.897674706946139, - "learning_rate": 3.703466566788371e-06, - "loss": 0.6891, - "num_input_tokens_seen": 34837610, - "step": 1666 - }, - { - "epoch": 0.20044489869536464, - "flos": 17480452181160.0, - "grad_norm": 4.847769171101208, - "learning_rate": 3.703058275466622e-06, - "loss": 0.7304, - "num_input_tokens_seen": 34856565, - "step": 1667 - }, - { - "epoch": 0.20056514158600372, - "flos": 16054155745560.0, - "grad_norm": 1.9493142756292838, - "learning_rate": 3.7026497257922877e-06, - "loss": 0.7573, - "num_input_tokens_seen": 34876595, - "step": 1668 - }, - { - "epoch": 0.20068538447664283, - "flos": 17485422794880.0, - "grad_norm": 2.2252664642222046, - "learning_rate": 3.7022409178273436e-06, - "loss": 0.8229, - "num_input_tokens_seen": 34897295, - "step": 1669 - }, - { - "epoch": 0.2008056273672819, - "flos": 13462174411440.0, - "grad_norm": 1.9307582216741002, - "learning_rate": 3.7018318516338054e-06, - "loss": 0.7643, - "num_input_tokens_seen": 34916175, - "step": 1670 - }, - { - "epoch": 0.200925870257921, - "flos": 17322780868560.0, - "grad_norm": 2.7129802647852523, - "learning_rate": 3.7014225272737284e-06, - "loss": 0.7876, - "num_input_tokens_seen": 34935120, - "step": 1671 - }, - { - "epoch": 0.20104611314856008, - "flos": 11814990705840.0, - "grad_norm": 2.5839232929878957, - "learning_rate": 3.701012944809207e-06, - "loss": 0.7183, - "num_input_tokens_seen": 34951955, - "step": 1672 - }, - { - "epoch": 0.2011663560391992, - "flos": 15648627368400.0, - "grad_norm": 2.176827033864604, - "learning_rate": 3.700603104302374e-06, - "loss": 0.7728, - "num_input_tokens_seen": 34971485, - "step": 1673 - }, - { - "epoch": 0.20128659892983827, - "flos": 45872915755440.0, - "grad_norm": 0.8650061174959163, - "learning_rate": 3.7001930058154027e-06, - "loss": 0.5769, - "num_input_tokens_seen": 35036165, - "step": 1674 - }, - { - "epoch": 0.20140684182047736, - "flos": 20859654811080.0, - "grad_norm": 3.0220944282527586, - "learning_rate": 3.6997826494105037e-06, - "loss": 0.7831, - "num_input_tokens_seen": 35056330, - "step": 1675 - }, - { - "epoch": 0.20152708471111647, - "flos": 20589798565680.0, - "grad_norm": 2.5719693765671128, - "learning_rate": 3.6993720351499286e-06, - "loss": 0.679, - "num_input_tokens_seen": 35077175, - "step": 1676 - }, - { - "epoch": 0.20164732760175555, - "flos": 17241491565360.0, - "grad_norm": 1.828951656881069, - "learning_rate": 3.6989611630959666e-06, - "loss": 0.757, - "num_input_tokens_seen": 35095450, - "step": 1677 - }, - { - "epoch": 0.20176757049239463, - "flos": 52438891557960.0, - "grad_norm": 0.7139165315912531, - "learning_rate": 3.6985500333109474e-06, - "loss": 0.607, - "num_input_tokens_seen": 35163500, - "step": 1678 - }, - { - "epoch": 0.20188781338303372, - "flos": 15672367847040.0, - "grad_norm": 2.6899025402353067, - "learning_rate": 3.6981386458572385e-06, - "loss": 0.7478, - "num_input_tokens_seen": 35181195, - "step": 1679 - }, - { - "epoch": 0.20200805627367283, - "flos": 8358108008160.0, - "grad_norm": 2.3660636223101306, - "learning_rate": 3.6977270007972468e-06, - "loss": 0.7364, - "num_input_tokens_seen": 35198450, - "step": 1680 - }, - { - "epoch": 0.2021282991643119, - "flos": 20938189697760.0, - "grad_norm": 2.5824971435176565, - "learning_rate": 3.6973150981934196e-06, - "loss": 0.6978, - "num_input_tokens_seen": 35219400, - "step": 1681 - }, - { - "epoch": 0.202248542054951, - "flos": 13076587317720.0, - "grad_norm": 3.0844971977538997, - "learning_rate": 3.6969029381082415e-06, - "loss": 0.8109, - "num_input_tokens_seen": 35235115, - "step": 1682 - }, - { - "epoch": 0.2023687849455901, - "flos": 14514249678960.0, - "grad_norm": 2.107051671897696, - "learning_rate": 3.696490520604237e-06, - "loss": 0.7916, - "num_input_tokens_seen": 35253525, - "step": 1683 - }, - { - "epoch": 0.20248902783622919, - "flos": 16512515613240.0, - "grad_norm": 1.8656908365514804, - "learning_rate": 3.696077845743968e-06, - "loss": 0.7908, - "num_input_tokens_seen": 35272835, - "step": 1684 - }, - { - "epoch": 0.20260927072686827, - "flos": 16619698272360.0, - "grad_norm": 2.4300092379861438, - "learning_rate": 3.69566491359004e-06, - "loss": 0.7055, - "num_input_tokens_seen": 35289200, - "step": 1685 - }, - { - "epoch": 0.20272951361750738, - "flos": 37574346657960.0, - "grad_norm": 1.8690843085372162, - "learning_rate": 3.695251724205092e-06, - "loss": 0.6891, - "num_input_tokens_seen": 35313280, - "step": 1686 - }, - { - "epoch": 0.20284975650814646, - "flos": 19482707270400.0, - "grad_norm": 1.904393652388617, - "learning_rate": 3.6948382776518054e-06, - "loss": 0.8482, - "num_input_tokens_seen": 35333705, - "step": 1687 - }, - { - "epoch": 0.20296999939878554, - "flos": 11679571853760.0, - "grad_norm": 3.621909303807162, - "learning_rate": 3.6944245739929e-06, - "loss": 0.7801, - "num_input_tokens_seen": 35349585, - "step": 1688 - }, - { - "epoch": 0.20309024228942463, - "flos": 14024234342640.0, - "grad_norm": 2.3146528584373125, - "learning_rate": 3.6940106132911332e-06, - "loss": 0.7104, - "num_input_tokens_seen": 35366490, - "step": 1689 - }, - { - "epoch": 0.20321048518006374, - "flos": 16702380613800.0, - "grad_norm": 2.261915129205182, - "learning_rate": 3.6935963956093037e-06, - "loss": 0.8639, - "num_input_tokens_seen": 35386295, - "step": 1690 - }, - { - "epoch": 0.20333072807070282, - "flos": 13945414516320.0, - "grad_norm": 1.9904576589052407, - "learning_rate": 3.6931819210102474e-06, - "loss": 0.6781, - "num_input_tokens_seen": 35405410, - "step": 1691 - }, - { - "epoch": 0.2034509709613419, - "flos": 13267465437000.0, - "grad_norm": 2.0753192747901883, - "learning_rate": 3.6927671895568402e-06, - "loss": 0.8399, - "num_input_tokens_seen": 35424190, - "step": 1692 - }, - { - "epoch": 0.20357121385198101, - "flos": 16777527884760.0, - "grad_norm": 2.01713777299099, - "learning_rate": 3.692352201311996e-06, - "loss": 0.8559, - "num_input_tokens_seen": 35442760, - "step": 1693 - }, - { - "epoch": 0.2036914567426201, - "flos": 15296785300680.0, - "grad_norm": 2.4052523241498736, - "learning_rate": 3.6919369563386687e-06, - "loss": 0.7479, - "num_input_tokens_seen": 35462280, - "step": 1694 - }, - { - "epoch": 0.20381169963325918, - "flos": 11297783955240.0, - "grad_norm": 2.7310250824733617, - "learning_rate": 3.69152145469985e-06, - "loss": 0.7801, - "num_input_tokens_seen": 35479045, - "step": 1695 - }, - { - "epoch": 0.20393194252389826, - "flos": 21150560438520.0, - "grad_norm": 2.300529634612744, - "learning_rate": 3.691105696458572e-06, - "loss": 0.8088, - "num_input_tokens_seen": 35496060, - "step": 1696 - }, - { - "epoch": 0.20405218541453737, - "flos": 16455694967760.0, - "grad_norm": 3.466615724800639, - "learning_rate": 3.690689681677904e-06, - "loss": 0.6561, - "num_input_tokens_seen": 35514250, - "step": 1697 - }, - { - "epoch": 0.20417242830517646, - "flos": 18591912550920.0, - "grad_norm": 1.9851857128756778, - "learning_rate": 3.690273410420956e-06, - "loss": 0.8686, - "num_input_tokens_seen": 35533735, - "step": 1698 - }, - { - "epoch": 0.20429267119581554, - "flos": 10760952520800.0, - "grad_norm": 2.6350886526522075, - "learning_rate": 3.689856882750875e-06, - "loss": 0.7528, - "num_input_tokens_seen": 35548655, - "step": 1699 - }, - { - "epoch": 0.20441291408645465, - "flos": 12972412354800.0, - "grad_norm": 3.1099963310195524, - "learning_rate": 3.6894400987308486e-06, - "loss": 0.7741, - "num_input_tokens_seen": 35565895, - "step": 1700 - }, - { - "epoch": 0.20453315697709373, - "flos": 11976682833360.0, - "grad_norm": 2.2304302686588544, - "learning_rate": 3.6890230584241024e-06, - "loss": 0.825, - "num_input_tokens_seen": 35582545, - "step": 1701 - }, - { - "epoch": 0.20465339986773282, - "flos": 49190517085920.0, - "grad_norm": 0.9286733272730795, - "learning_rate": 3.6886057618939016e-06, - "loss": 0.693, - "num_input_tokens_seen": 35645085, - "step": 1702 - }, - { - "epoch": 0.2047736427583719, - "flos": 30876624558720.0, - "grad_norm": 2.296423835300324, - "learning_rate": 3.6881882092035492e-06, - "loss": 0.6713, - "num_input_tokens_seen": 35666190, - "step": 1703 - }, - { - "epoch": 0.204893885649011, - "flos": 51576550159800.0, - "grad_norm": 1.0984760630364905, - "learning_rate": 3.6877704004163873e-06, - "loss": 0.6732, - "num_input_tokens_seen": 35726315, - "step": 1704 - }, - { - "epoch": 0.2050141285396501, - "flos": 16242627707880.0, - "grad_norm": 2.3571106611257617, - "learning_rate": 3.6873523355957984e-06, - "loss": 0.7698, - "num_input_tokens_seen": 35745035, - "step": 1705 - }, - { - "epoch": 0.20513437143028918, - "flos": 34071792112080.0, - "grad_norm": 1.0588957296014623, - "learning_rate": 3.686934014805201e-06, - "loss": 0.7144, - "num_input_tokens_seen": 35795385, - "step": 1706 - }, - { - "epoch": 0.20525461432092829, - "flos": 16023260115960.0, - "grad_norm": 2.076294745526341, - "learning_rate": 3.6865154381080552e-06, - "loss": 0.7947, - "num_input_tokens_seen": 35815790, - "step": 1707 - }, - { - "epoch": 0.20537485721156737, - "flos": 15513018556560.0, - "grad_norm": 2.1907884131414392, - "learning_rate": 3.6860966055678585e-06, - "loss": 0.8092, - "num_input_tokens_seen": 35831865, - "step": 1708 - }, - { - "epoch": 0.20549510010220645, - "flos": 14755679771640.0, - "grad_norm": 1.8256845078895811, - "learning_rate": 3.685677517248147e-06, - "loss": 0.8495, - "num_input_tokens_seen": 35850475, - "step": 1709 - }, - { - "epoch": 0.20561534299284553, - "flos": 12406299948720.0, - "grad_norm": 1.9394755597464066, - "learning_rate": 3.6852581732124967e-06, - "loss": 0.7929, - "num_input_tokens_seen": 35867540, - "step": 1710 - }, - { - "epoch": 0.20573558588348465, - "flos": 16753217526840.0, - "grad_norm": 2.3690481395862353, - "learning_rate": 3.6848385735245213e-06, - "loss": 0.7473, - "num_input_tokens_seen": 35886350, - "step": 1711 - }, - { - "epoch": 0.20585582877412373, - "flos": 18050490422280.0, - "grad_norm": 2.185576001502466, - "learning_rate": 3.6844187182478734e-06, - "loss": 0.8495, - "num_input_tokens_seen": 35906925, - "step": 1712 - }, - { - "epoch": 0.2059760716647628, - "flos": 17727929326200.0, - "grad_norm": 2.8692742427617093, - "learning_rate": 3.683998607446246e-06, - "loss": 0.7352, - "num_input_tokens_seen": 35925295, - "step": 1713 - }, - { - "epoch": 0.20609631455540192, - "flos": 14784295884120.0, - "grad_norm": 2.2907808383529913, - "learning_rate": 3.6835782411833686e-06, - "loss": 0.7278, - "num_input_tokens_seen": 35944535, - "step": 1714 - }, - { - "epoch": 0.206216557446041, - "flos": 14514534618600.0, - "grad_norm": 2.1687859172210446, - "learning_rate": 3.68315761952301e-06, - "loss": 0.7275, - "num_input_tokens_seen": 35961485, - "step": 1715 - }, - { - "epoch": 0.2063368003366801, - "flos": 17645880183960.0, - "grad_norm": 2.784545317812789, - "learning_rate": 3.6827367425289797e-06, - "loss": 0.8176, - "num_input_tokens_seen": 35980980, - "step": 1716 - }, - { - "epoch": 0.2064570432273192, - "flos": 14866345026360.0, - "grad_norm": 3.001438272934793, - "learning_rate": 3.6823156102651225e-06, - "loss": 0.7061, - "num_input_tokens_seen": 35998855, - "step": 1717 - }, - { - "epoch": 0.20657728611795828, - "flos": 14999389381440.0, - "grad_norm": 2.3868054563065844, - "learning_rate": 3.6818942227953257e-06, - "loss": 0.6917, - "num_input_tokens_seen": 36019120, - "step": 1718 - }, - { - "epoch": 0.20669752900859736, - "flos": 15946149927480.0, - "grad_norm": 3.002284928241195, - "learning_rate": 3.681472580183512e-06, - "loss": 0.6786, - "num_input_tokens_seen": 36037490, - "step": 1719 - }, - { - "epoch": 0.20681777189923645, - "flos": 11004472170840.0, - "grad_norm": 2.3315287037358114, - "learning_rate": 3.6810506824936455e-06, - "loss": 0.855, - "num_input_tokens_seen": 36055290, - "step": 1720 - }, - { - "epoch": 0.20693801478987556, - "flos": 41615831178360.0, - "grad_norm": 1.06073513432537, - "learning_rate": 3.680628529789726e-06, - "loss": 0.6553, - "num_input_tokens_seen": 36107420, - "step": 1721 - }, - { - "epoch": 0.20705825768051464, - "flos": 15808799817840.0, - "grad_norm": 2.490177408202578, - "learning_rate": 3.680206122135796e-06, - "loss": 0.8364, - "num_input_tokens_seen": 36127745, - "step": 1722 - }, - { - "epoch": 0.20717850057115372, - "flos": 18943089759480.0, - "grad_norm": 2.3158293805037737, - "learning_rate": 3.6797834595959323e-06, - "loss": 0.767, - "num_input_tokens_seen": 36147365, - "step": 1723 - }, - { - "epoch": 0.20729874346179283, - "flos": 21371669328240.0, - "grad_norm": 3.233930916637519, - "learning_rate": 3.679360542234254e-06, - "loss": 0.7594, - "num_input_tokens_seen": 36166430, - "step": 1724 - }, - { - "epoch": 0.20741898635243192, - "flos": 21292437922440.0, - "grad_norm": 3.1361963732601934, - "learning_rate": 3.678937370114916e-06, - "loss": 0.7112, - "num_input_tokens_seen": 36185955, - "step": 1725 - }, - { - "epoch": 0.207539229243071, - "flos": 11328141365520.0, - "grad_norm": 2.482027204040169, - "learning_rate": 3.678513943302114e-06, - "loss": 0.7769, - "num_input_tokens_seen": 36202450, - "step": 1726 - }, - { - "epoch": 0.20765947213371008, - "flos": 15000022580640.0, - "grad_norm": 2.7766162117918727, - "learning_rate": 3.678090261860082e-06, - "loss": 0.8341, - "num_input_tokens_seen": 36221900, - "step": 1727 - }, - { - "epoch": 0.2077797150243492, - "flos": 14136735875040.0, - "grad_norm": 2.288851679444967, - "learning_rate": 3.6776663258530906e-06, - "loss": 0.7621, - "num_input_tokens_seen": 36240270, - "step": 1728 - }, - { - "epoch": 0.20789995791498828, - "flos": 15968022468480.0, - "grad_norm": 2.1891177054066784, - "learning_rate": 3.6772421353454516e-06, - "loss": 0.7003, - "num_input_tokens_seen": 36258585, - "step": 1729 - }, - { - "epoch": 0.20802020080562736, - "flos": 16944855485160.0, - "grad_norm": 4.4548918849443435, - "learning_rate": 3.6768176904015153e-06, - "loss": 0.8644, - "num_input_tokens_seen": 36278110, - "step": 1730 - }, - { - "epoch": 0.20814044369626647, - "flos": 16888034839680.0, - "grad_norm": 2.47346991820788, - "learning_rate": 3.6763929910856674e-06, - "loss": 0.5912, - "num_input_tokens_seen": 36296280, - "step": 1731 - }, - { - "epoch": 0.20826068658690555, - "flos": 14324163058680.0, - "grad_norm": 2.608311419247677, - "learning_rate": 3.6759680374623365e-06, - "loss": 0.7562, - "num_input_tokens_seen": 36313915, - "step": 1732 - }, - { - "epoch": 0.20838092947754464, - "flos": 18591595951320.0, - "grad_norm": 2.9109131513591846, - "learning_rate": 3.675542829595986e-06, - "loss": 0.7252, - "num_input_tokens_seen": 36333300, - "step": 1733 - }, - { - "epoch": 0.20850117236818372, - "flos": 17620556707320.0, - "grad_norm": 1.569944727708298, - "learning_rate": 3.6751173675511213e-06, - "loss": 0.7757, - "num_input_tokens_seen": 36355065, - "step": 1734 - }, - { - "epoch": 0.20862141525882283, - "flos": 14671192812480.0, - "grad_norm": 2.4180485183795524, - "learning_rate": 3.674691651392283e-06, - "loss": 0.8729, - "num_input_tokens_seen": 36372455, - "step": 1735 - }, - { - "epoch": 0.2087416581494619, - "flos": 28686309086640.0, - "grad_norm": 2.5418220430836724, - "learning_rate": 3.674265681184053e-06, - "loss": 0.7448, - "num_input_tokens_seen": 36395435, - "step": 1736 - }, - { - "epoch": 0.208861901040101, - "flos": 19130073703680.0, - "grad_norm": 1.9241762727299667, - "learning_rate": 3.6738394569910504e-06, - "loss": 0.8484, - "num_input_tokens_seen": 36415695, - "step": 1737 - }, - { - "epoch": 0.2089821439307401, - "flos": 20829550680480.0, - "grad_norm": 2.554517734498134, - "learning_rate": 3.6734129788779333e-06, - "loss": 0.8108, - "num_input_tokens_seen": 36434590, - "step": 1738 - }, - { - "epoch": 0.2091023868213792, - "flos": 15406152497040.0, - "grad_norm": 2.179767894287722, - "learning_rate": 3.6729862469093976e-06, - "loss": 0.8927, - "num_input_tokens_seen": 36453405, - "step": 1739 - }, - { - "epoch": 0.20922262971201827, - "flos": 16431701209440.0, - "grad_norm": 3.856004733751486, - "learning_rate": 3.6725592611501782e-06, - "loss": 0.7891, - "num_input_tokens_seen": 36471800, - "step": 1740 - }, - { - "epoch": 0.20934287260265738, - "flos": 20425225381800.0, - "grad_norm": 2.111666642540036, - "learning_rate": 3.6721320216650496e-06, - "loss": 0.7481, - "num_input_tokens_seen": 36492135, - "step": 1741 - }, - { - "epoch": 0.20946311549329646, - "flos": 11976366233760.0, - "grad_norm": 2.1289091617669307, - "learning_rate": 3.6717045285188215e-06, - "loss": 0.831, - "num_input_tokens_seen": 36509550, - "step": 1742 - }, - { - "epoch": 0.20958335838393555, - "flos": 16459367523120.0, - "grad_norm": 3.5596802912965537, - "learning_rate": 3.671276781776346e-06, - "loss": 0.849, - "num_input_tokens_seen": 36527925, - "step": 1743 - }, - { - "epoch": 0.20970360127457463, - "flos": 18480645756960.0, - "grad_norm": 2.8420143280205026, - "learning_rate": 3.6708487815025128e-06, - "loss": 0.6542, - "num_input_tokens_seen": 36548225, - "step": 1744 - }, - { - "epoch": 0.20982384416521374, - "flos": 13489049226120.0, - "grad_norm": 2.552815255807918, - "learning_rate": 3.6704205277622463e-06, - "loss": 0.7214, - "num_input_tokens_seen": 36566385, - "step": 1745 - }, - { - "epoch": 0.20994408705585282, - "flos": 18591152711880.0, - "grad_norm": 1.8561687875572122, - "learning_rate": 3.6699920206205146e-06, - "loss": 0.7833, - "num_input_tokens_seen": 36586845, - "step": 1746 - }, - { - "epoch": 0.2100643299464919, - "flos": 15591585103200.0, - "grad_norm": 1.817532360518173, - "learning_rate": 3.669563260142321e-06, - "loss": 0.8095, - "num_input_tokens_seen": 36605455, - "step": 1747 - }, - { - "epoch": 0.21018457283713102, - "flos": 14136704215080.0, - "grad_norm": 2.537724820404908, - "learning_rate": 3.6691342463927083e-06, - "loss": 0.817, - "num_input_tokens_seen": 36624170, - "step": 1748 - }, - { - "epoch": 0.2103048157277701, - "flos": 20779315306680.0, - "grad_norm": 2.1472537576986963, - "learning_rate": 3.668704979436758e-06, - "loss": 0.8027, - "num_input_tokens_seen": 36643985, - "step": 1749 - }, - { - "epoch": 0.21042505861840918, - "flos": 12732755219880.0, - "grad_norm": 2.4676015572871814, - "learning_rate": 3.668275459339588e-06, - "loss": 0.7715, - "num_input_tokens_seen": 36662185, - "step": 1750 - }, - { - "epoch": 0.21054530150904827, - "flos": 10329372487920.0, - "grad_norm": 2.137813866309142, - "learning_rate": 3.667845686166358e-06, - "loss": 0.7856, - "num_input_tokens_seen": 36678830, - "step": 1751 - }, - { - "epoch": 0.21066554439968738, - "flos": 13590976331880.0, - "grad_norm": 1.8629921742887654, - "learning_rate": 3.6674156599822634e-06, - "loss": 0.8462, - "num_input_tokens_seen": 36694345, - "step": 1752 - }, - { - "epoch": 0.21078578729032646, - "flos": 17322210989280.0, - "grad_norm": 2.0895536132199988, - "learning_rate": 3.666985380852539e-06, - "loss": 0.795, - "num_input_tokens_seen": 36713070, - "step": 1753 - }, - { - "epoch": 0.21090603018096554, - "flos": 21530733679080.0, - "grad_norm": 2.5673496596792056, - "learning_rate": 3.6665548488424576e-06, - "loss": 0.7329, - "num_input_tokens_seen": 36731550, - "step": 1754 - }, - { - "epoch": 0.21102627307160465, - "flos": 17028455965440.0, - "grad_norm": 1.8744999536660292, - "learning_rate": 3.6661240640173307e-06, - "loss": 0.8629, - "num_input_tokens_seen": 36752740, - "step": 1755 - }, - { - "epoch": 0.21114651596224374, - "flos": 40250376203160.0, - "grad_norm": 0.939190104061514, - "learning_rate": 3.6656930264425085e-06, - "loss": 0.6301, - "num_input_tokens_seen": 36816505, - "step": 1756 - }, - { - "epoch": 0.21126675885288282, - "flos": 15756633186480.0, - "grad_norm": 2.073028395307861, - "learning_rate": 3.665261736183378e-06, - "loss": 0.742, - "num_input_tokens_seen": 36836260, - "step": 1757 - }, - { - "epoch": 0.2113870017435219, - "flos": 7925578176480.0, - "grad_norm": 4.094908106405117, - "learning_rate": 3.664830193305366e-06, - "loss": 0.8612, - "num_input_tokens_seen": 36853755, - "step": 1758 - }, - { - "epoch": 0.211507244634161, - "flos": 12138026701320.0, - "grad_norm": 3.3774392532092494, - "learning_rate": 3.6643983978739373e-06, - "loss": 0.7454, - "num_input_tokens_seen": 36870090, - "step": 1759 - }, - { - "epoch": 0.2116274875248001, - "flos": 15319797600240.0, - "grad_norm": 1.753975983658619, - "learning_rate": 3.663966349954596e-06, - "loss": 0.7993, - "num_input_tokens_seen": 36889990, - "step": 1760 - }, - { - "epoch": 0.21174773041543918, - "flos": 50286970634640.0, - "grad_norm": 1.0371112715709176, - "learning_rate": 3.6635340496128816e-06, - "loss": 0.6107, - "num_input_tokens_seen": 36946640, - "step": 1761 - }, - { - "epoch": 0.2118679733060783, - "flos": 15109168157280.0, - "grad_norm": 1.9101916974919015, - "learning_rate": 3.6631014969143747e-06, - "loss": 0.9042, - "num_input_tokens_seen": 36966050, - "step": 1762 - }, - { - "epoch": 0.21198821619671737, - "flos": 17000283092400.0, - "grad_norm": 1.9003640277879266, - "learning_rate": 3.662668691924693e-06, - "loss": 0.8691, - "num_input_tokens_seen": 36986820, - "step": 1763 - }, - { - "epoch": 0.21210845908735645, - "flos": 17943687682680.0, - "grad_norm": 3.023749283675779, - "learning_rate": 3.6622356347094927e-06, - "loss": 0.6963, - "num_input_tokens_seen": 37008105, - "step": 1764 - }, - { - "epoch": 0.21222870197799554, - "flos": 19861487472720.0, - "grad_norm": 2.1536461408461904, - "learning_rate": 3.6618023253344684e-06, - "loss": 0.7715, - "num_input_tokens_seen": 37026685, - "step": 1765 - }, - { - "epoch": 0.21234894486863465, - "flos": 12301016887200.0, - "grad_norm": 1.900415277272175, - "learning_rate": 3.6613687638653527e-06, - "loss": 0.8275, - "num_input_tokens_seen": 37044575, - "step": 1766 - }, - { - "epoch": 0.21246918775927373, - "flos": 17185873998360.0, - "grad_norm": 2.3512729200334066, - "learning_rate": 3.660934950367916e-06, - "loss": 0.765, - "num_input_tokens_seen": 37063540, - "step": 1767 - }, - { - "epoch": 0.21258943064991281, - "flos": 16377508340640.0, - "grad_norm": 1.8462460965558876, - "learning_rate": 3.660500884907968e-06, - "loss": 0.8204, - "num_input_tokens_seen": 37084000, - "step": 1768 - }, - { - "epoch": 0.21270967354055192, - "flos": 43806336610200.0, - "grad_norm": 0.8566338256621763, - "learning_rate": 3.660066567551356e-06, - "loss": 0.6176, - "num_input_tokens_seen": 37143865, - "step": 1769 - }, - { - "epoch": 0.212829916431191, - "flos": 15757266385680.0, - "grad_norm": 2.0610750072958197, - "learning_rate": 3.6596319983639657e-06, - "loss": 0.8136, - "num_input_tokens_seen": 37162165, - "step": 1770 - }, - { - "epoch": 0.2129501593218301, - "flos": 21266196306960.0, - "grad_norm": 1.99925253728868, - "learning_rate": 3.6591971774117214e-06, - "loss": 0.849, - "num_input_tokens_seen": 37184860, - "step": 1771 - }, - { - "epoch": 0.2130704022124692, - "flos": 13731587417400.0, - "grad_norm": 2.874668102180297, - "learning_rate": 3.6587621047605833e-06, - "loss": 0.7932, - "num_input_tokens_seen": 37201750, - "step": 1772 - }, - { - "epoch": 0.21319064510310828, - "flos": 10113550811520.0, - "grad_norm": 2.0446044908756527, - "learning_rate": 3.6583267804765542e-06, - "loss": 0.8579, - "num_input_tokens_seen": 37215805, - "step": 1773 - }, - { - "epoch": 0.21331088799374737, - "flos": 15324736554000.0, - "grad_norm": 2.1431128530972976, - "learning_rate": 3.6578912046256702e-06, - "loss": 0.8422, - "num_input_tokens_seen": 37234045, - "step": 1774 - }, - { - "epoch": 0.21343113088438645, - "flos": 13596643464720.0, - "grad_norm": 2.193596980743779, - "learning_rate": 3.6574553772740083e-06, - "loss": 0.7471, - "num_input_tokens_seen": 37251695, - "step": 1775 - }, - { - "epoch": 0.21355137377502556, - "flos": 49709560114200.0, - "grad_norm": 0.8834905805317715, - "learning_rate": 3.657019298487684e-06, - "loss": 0.6546, - "num_input_tokens_seen": 37316425, - "step": 1776 - }, - { - "epoch": 0.21367161666566464, - "flos": 25369182655560.0, - "grad_norm": 1.9506384317949492, - "learning_rate": 3.6565829683328495e-06, - "loss": 0.7993, - "num_input_tokens_seen": 37338770, - "step": 1777 - }, - { - "epoch": 0.21379185955630373, - "flos": 13866531370080.0, - "grad_norm": 1.873616598316198, - "learning_rate": 3.6561463868756965e-06, - "loss": 0.8405, - "num_input_tokens_seen": 37357190, - "step": 1778 - }, - { - "epoch": 0.21391210244694284, - "flos": 20696474665440.0, - "grad_norm": 1.8033216187430963, - "learning_rate": 3.655709554182452e-06, - "loss": 0.775, - "num_input_tokens_seen": 37377250, - "step": 1779 - }, - { - "epoch": 0.21403234533758192, - "flos": 12730792302360.0, - "grad_norm": 2.0164433673574287, - "learning_rate": 3.6552724703193855e-06, - "loss": 0.8456, - "num_input_tokens_seen": 37394160, - "step": 1780 - }, - { - "epoch": 0.214152588228221, - "flos": 38032679357040.0, - "grad_norm": 0.7781331640221231, - "learning_rate": 3.654835135352801e-06, - "loss": 0.5756, - "num_input_tokens_seen": 37448690, - "step": 1781 - }, - { - "epoch": 0.21427283111886009, - "flos": 14241037477800.0, - "grad_norm": 2.1374523849091718, - "learning_rate": 3.654397549349043e-06, - "loss": 0.8637, - "num_input_tokens_seen": 37465785, - "step": 1782 - }, - { - "epoch": 0.2143930740094992, - "flos": 14677366504680.0, - "grad_norm": 2.3975157317758207, - "learning_rate": 3.653959712374491e-06, - "loss": 0.73, - "num_input_tokens_seen": 37483610, - "step": 1783 - }, - { - "epoch": 0.21451331690013828, - "flos": 15945548388240.0, - "grad_norm": 2.0788818357941423, - "learning_rate": 3.6535216244955663e-06, - "loss": 0.8134, - "num_input_tokens_seen": 37503225, - "step": 1784 - }, - { - "epoch": 0.21463355979077736, - "flos": 24127210727520.0, - "grad_norm": 1.8045914529450249, - "learning_rate": 3.653083285778726e-06, - "loss": 0.686, - "num_input_tokens_seen": 37524315, - "step": 1785 - }, - { - "epoch": 0.21475380268141647, - "flos": 15757076425920.0, - "grad_norm": 2.7767616659935808, - "learning_rate": 3.6526446962904653e-06, - "loss": 0.7873, - "num_input_tokens_seen": 37542750, - "step": 1786 - }, - { - "epoch": 0.21487404557205556, - "flos": 23612790053400.0, - "grad_norm": 1.7398361966432077, - "learning_rate": 3.652205856097318e-06, - "loss": 0.7226, - "num_input_tokens_seen": 37565655, - "step": 1787 - }, - { - "epoch": 0.21499428846269464, - "flos": 9191480542920.0, - "grad_norm": 3.1967253977207077, - "learning_rate": 3.651766765265856e-06, - "loss": 0.7768, - "num_input_tokens_seen": 37582385, - "step": 1788 - }, - { - "epoch": 0.21511453135333372, - "flos": 17182897962120.0, - "grad_norm": 4.242593707172034, - "learning_rate": 3.65132742386269e-06, - "loss": 0.7829, - "num_input_tokens_seen": 37597325, - "step": 1789 - }, - { - "epoch": 0.21523477424397283, - "flos": 19753228374960.0, - "grad_norm": 2.510618832205275, - "learning_rate": 3.6508878319544656e-06, - "loss": 0.8257, - "num_input_tokens_seen": 37617260, - "step": 1790 - }, - { - "epoch": 0.21535501713461191, - "flos": 13812338501280.0, - "grad_norm": 3.4193593711265757, - "learning_rate": 3.65044798960787e-06, - "loss": 0.7914, - "num_input_tokens_seen": 37635320, - "step": 1791 - }, - { - "epoch": 0.215475260025251, - "flos": 13056772674120.0, - "grad_norm": 5.828701941848141, - "learning_rate": 3.650007896889627e-06, - "loss": 0.7627, - "num_input_tokens_seen": 37653620, - "step": 1792 - }, - { - "epoch": 0.2155955029158901, - "flos": 12138438280800.0, - "grad_norm": 4.127404639137746, - "learning_rate": 3.6495675538664974e-06, - "loss": 0.7858, - "num_input_tokens_seen": 37672355, - "step": 1793 - }, - { - "epoch": 0.2157157458065292, - "flos": 17294164756080.0, - "grad_norm": 2.026851292888224, - "learning_rate": 3.649126960605282e-06, - "loss": 0.8105, - "num_input_tokens_seen": 37693060, - "step": 1794 - }, - { - "epoch": 0.21583598869716827, - "flos": 16188656458800.0, - "grad_norm": 2.5652345162721715, - "learning_rate": 3.6486861171728174e-06, - "loss": 0.806, - "num_input_tokens_seen": 37711175, - "step": 1795 - }, - { - "epoch": 0.21595623158780738, - "flos": 17161310360760.0, - "grad_norm": 2.0951924835128515, - "learning_rate": 3.6482450236359803e-06, - "loss": 0.7723, - "num_input_tokens_seen": 37732750, - "step": 1796 - }, - { - "epoch": 0.21607647447844647, - "flos": 19725720361080.0, - "grad_norm": 2.5037169481629817, - "learning_rate": 3.647803680061683e-06, - "loss": 0.7505, - "num_input_tokens_seen": 37752885, - "step": 1797 - }, - { - "epoch": 0.21619671736908555, - "flos": 10539811971120.0, - "grad_norm": 3.306322058773986, - "learning_rate": 3.6473620865168776e-06, - "loss": 0.7216, - "num_input_tokens_seen": 37769475, - "step": 1798 - }, - { - "epoch": 0.21631696025972463, - "flos": 13083330889200.0, - "grad_norm": 2.2487077180832524, - "learning_rate": 3.646920243068554e-06, - "loss": 0.8071, - "num_input_tokens_seen": 37787090, - "step": 1799 - }, - { - "epoch": 0.21643720315036374, - "flos": 17916971167800.0, - "grad_norm": 1.7744885661513792, - "learning_rate": 3.6464781497837384e-06, - "loss": 0.7241, - "num_input_tokens_seen": 37808785, - "step": 1800 - }, - { - "epoch": 0.21655744604100283, - "flos": 20885959746480.0, - "grad_norm": 5.713628758404521, - "learning_rate": 3.6460358067294965e-06, - "loss": 0.7182, - "num_input_tokens_seen": 37829735, - "step": 1801 - }, - { - "epoch": 0.2166776889316419, - "flos": 14727126979080.0, - "grad_norm": 2.375013150630481, - "learning_rate": 3.645593213972932e-06, - "loss": 0.7575, - "num_input_tokens_seen": 37848360, - "step": 1802 - }, - { - "epoch": 0.21679793182228102, - "flos": 11057145361560.0, - "grad_norm": 2.57420819154464, - "learning_rate": 3.6451503715811852e-06, - "loss": 0.7638, - "num_input_tokens_seen": 37866390, - "step": 1803 - }, - { - "epoch": 0.2169181747129201, - "flos": 12678878950680.0, - "grad_norm": 2.337427792989329, - "learning_rate": 3.6447072796214345e-06, - "loss": 0.7934, - "num_input_tokens_seen": 37884675, - "step": 1804 - }, - { - "epoch": 0.21703841760355919, - "flos": 43305656358720.0, - "grad_norm": 0.96274916903951, - "learning_rate": 3.644263938160898e-06, - "loss": 0.6625, - "num_input_tokens_seen": 37940360, - "step": 1805 - }, - { - "epoch": 0.21715866049419827, - "flos": 16404224855520.0, - "grad_norm": 3.7351981177633498, - "learning_rate": 3.6438203472668293e-06, - "loss": 0.706, - "num_input_tokens_seen": 37959725, - "step": 1806 - }, - { - "epoch": 0.21727890338483738, - "flos": 12570176613480.0, - "grad_norm": 5.500517560709078, - "learning_rate": 3.6433765070065206e-06, - "loss": 0.81, - "num_input_tokens_seen": 37977235, - "step": 1807 - }, - { - "epoch": 0.21739914627547646, - "flos": 9754553592840.0, - "grad_norm": 3.1111261238765286, - "learning_rate": 3.6429324174473025e-06, - "loss": 0.8669, - "num_input_tokens_seen": 37990495, - "step": 1808 - }, - { - "epoch": 0.21751938916611555, - "flos": 15324578254200.0, - "grad_norm": 3.3267286353241885, - "learning_rate": 3.6424880786565425e-06, - "loss": 0.8316, - "num_input_tokens_seen": 38006360, - "step": 1809 - }, - { - "epoch": 0.21763963205675466, - "flos": 20238937956720.0, - "grad_norm": 2.479784677448586, - "learning_rate": 3.6420434907016482e-06, - "loss": 0.7719, - "num_input_tokens_seen": 38025770, - "step": 1810 - }, - { - "epoch": 0.21775987494739374, - "flos": 15672589466760.0, - "grad_norm": 2.0140210060834907, - "learning_rate": 3.6415986536500606e-06, - "loss": 0.7994, - "num_input_tokens_seen": 38043820, - "step": 1811 - }, - { - "epoch": 0.21788011783803282, - "flos": 13380821788320.0, - "grad_norm": 2.1580264748477482, - "learning_rate": 3.641153567569263e-06, - "loss": 0.8065, - "num_input_tokens_seen": 38061855, - "step": 1812 - }, - { - "epoch": 0.2180003607286719, - "flos": 22209505917360.0, - "grad_norm": 4.494153145014612, - "learning_rate": 3.640708232526774e-06, - "loss": 0.9375, - "num_input_tokens_seen": 38080230, - "step": 1813 - }, - { - "epoch": 0.21812060361931102, - "flos": 18669750918480.0, - "grad_norm": 2.1551997626312285, - "learning_rate": 3.6402626485901504e-06, - "loss": 0.7641, - "num_input_tokens_seen": 38099045, - "step": 1814 - }, - { - "epoch": 0.2182408465099501, - "flos": 16026647731680.0, - "grad_norm": 2.0971126597952563, - "learning_rate": 3.639816815826988e-06, - "loss": 0.7548, - "num_input_tokens_seen": 38118090, - "step": 1815 - }, - { - "epoch": 0.21836108940058918, - "flos": 17320849611000.0, - "grad_norm": 2.191736206672667, - "learning_rate": 3.6393707343049176e-06, - "loss": 0.7621, - "num_input_tokens_seen": 38138140, - "step": 1816 - }, - { - "epoch": 0.2184813322912283, - "flos": 18078726615240.0, - "grad_norm": 2.9624026254352307, - "learning_rate": 3.6389244040916104e-06, - "loss": 0.7275, - "num_input_tokens_seen": 38156935, - "step": 1817 - }, - { - "epoch": 0.21860157518186737, - "flos": 19479636254280.0, - "grad_norm": 2.6760770536770417, - "learning_rate": 3.6384778252547747e-06, - "loss": 0.7812, - "num_input_tokens_seen": 38172535, - "step": 1818 - }, - { - "epoch": 0.21872181807250646, - "flos": 15270353725440.0, - "grad_norm": 2.822273903804899, - "learning_rate": 3.638030997862155e-06, - "loss": 0.759, - "num_input_tokens_seen": 38191190, - "step": 1819 - }, - { - "epoch": 0.21884206096314554, - "flos": 45118268067120.0, - "grad_norm": 0.830966680329268, - "learning_rate": 3.6375839219815356e-06, - "loss": 0.6178, - "num_input_tokens_seen": 38248710, - "step": 1820 - }, - { - "epoch": 0.21896230385378465, - "flos": 17184892539600.0, - "grad_norm": 2.4067280282134766, - "learning_rate": 3.6371365976807375e-06, - "loss": 0.8131, - "num_input_tokens_seen": 38268825, - "step": 1821 - }, - { - "epoch": 0.21908254674442373, - "flos": 18375362695440.0, - "grad_norm": 2.1974715615299574, - "learning_rate": 3.6366890250276185e-06, - "loss": 0.8118, - "num_input_tokens_seen": 38289500, - "step": 1822 - }, - { - "epoch": 0.21920278963506282, - "flos": 17214933350280.0, - "grad_norm": 1.9413052998936102, - "learning_rate": 3.6362412040900764e-06, - "loss": 0.8793, - "num_input_tokens_seen": 38309010, - "step": 1823 - }, - { - "epoch": 0.21932303252570193, - "flos": 21454414989600.0, - "grad_norm": 2.0067097645099286, - "learning_rate": 3.635793134936044e-06, - "loss": 0.7852, - "num_input_tokens_seen": 38329740, - "step": 1824 - }, - { - "epoch": 0.219443275416341, - "flos": 15210652023600.0, - "grad_norm": 2.4839908134855717, - "learning_rate": 3.635344817633494e-06, - "loss": 0.7158, - "num_input_tokens_seen": 38348775, - "step": 1825 - }, - { - "epoch": 0.2195635183069801, - "flos": 10545257484240.0, - "grad_norm": 4.162950435243488, - "learning_rate": 3.634896252250436e-06, - "loss": 0.7425, - "num_input_tokens_seen": 38365260, - "step": 1826 - }, - { - "epoch": 0.2196837611976192, - "flos": 17754962440680.0, - "grad_norm": 2.551108183070831, - "learning_rate": 3.6344474388549157e-06, - "loss": 0.8022, - "num_input_tokens_seen": 38384635, - "step": 1827 - }, - { - "epoch": 0.2198040040882583, - "flos": 13189120510080.0, - "grad_norm": 3.471589447007555, - "learning_rate": 3.6339983775150183e-06, - "loss": 0.7972, - "num_input_tokens_seen": 38400915, - "step": 1828 - }, - { - "epoch": 0.21992424697889737, - "flos": 12974470252200.0, - "grad_norm": 4.044674779693211, - "learning_rate": 3.6335490682988664e-06, - "loss": 0.8336, - "num_input_tokens_seen": 38416245, - "step": 1829 - }, - { - "epoch": 0.22004448986953645, - "flos": 12866686053840.0, - "grad_norm": 2.277299465056395, - "learning_rate": 3.63309951127462e-06, - "loss": 0.812, - "num_input_tokens_seen": 38432875, - "step": 1830 - }, - { - "epoch": 0.22016473276017556, - "flos": 16298878474080.0, - "grad_norm": 2.118653300281387, - "learning_rate": 3.6326497065104757e-06, - "loss": 0.7355, - "num_input_tokens_seen": 38453060, - "step": 1831 - }, - { - "epoch": 0.22028497565081465, - "flos": 18726413264160.0, - "grad_norm": 2.812179292831543, - "learning_rate": 3.6321996540746697e-06, - "loss": 0.7603, - "num_input_tokens_seen": 38471855, - "step": 1832 - }, - { - "epoch": 0.22040521854145373, - "flos": 26638630937520.0, - "grad_norm": 1.9126270127350367, - "learning_rate": 3.6317493540354733e-06, - "loss": 0.7817, - "num_input_tokens_seen": 38494990, - "step": 1833 - }, - { - "epoch": 0.22052546143209284, - "flos": 8574404583960.0, - "grad_norm": 2.199919079246033, - "learning_rate": 3.6312988064611976e-06, - "loss": 0.7545, - "num_input_tokens_seen": 38513020, - "step": 1834 - }, - { - "epoch": 0.22064570432273192, - "flos": 17729955563640.0, - "grad_norm": 2.000187388224587, - "learning_rate": 3.6308480114201896e-06, - "loss": 0.7894, - "num_input_tokens_seen": 38534660, - "step": 1835 - }, - { - "epoch": 0.220765947213371, - "flos": 13083552508920.0, - "grad_norm": 2.05336820651244, - "learning_rate": 3.630396968980835e-06, - "loss": 0.7501, - "num_input_tokens_seen": 38552255, - "step": 1836 - }, - { - "epoch": 0.2208861901040101, - "flos": 19614611866920.0, - "grad_norm": 3.155584586223375, - "learning_rate": 3.6299456792115575e-06, - "loss": 0.8339, - "num_input_tokens_seen": 38573230, - "step": 1837 - }, - { - "epoch": 0.2210064329946492, - "flos": 12999097209720.0, - "grad_norm": 2.35587004980301, - "learning_rate": 3.629494142180815e-06, - "loss": 0.794, - "num_input_tokens_seen": 38591695, - "step": 1838 - }, - { - "epoch": 0.22112667588528828, - "flos": 13110174043920.0, - "grad_norm": 2.7739523826093135, - "learning_rate": 3.6290423579571075e-06, - "loss": 0.8323, - "num_input_tokens_seen": 38607955, - "step": 1839 - }, - { - "epoch": 0.22124691877592736, - "flos": 13407601623120.0, - "grad_norm": 1.9073386913803418, - "learning_rate": 3.6285903266089694e-06, - "loss": 0.7824, - "num_input_tokens_seen": 38626950, - "step": 1840 - }, - { - "epoch": 0.22136716166656648, - "flos": 15216319156440.0, - "grad_norm": 2.1136707786003095, - "learning_rate": 3.628138048204974e-06, - "loss": 0.757, - "num_input_tokens_seen": 38647355, - "step": 1841 - }, - { - "epoch": 0.22148740455720556, - "flos": 12894320707560.0, - "grad_norm": 2.023287380938926, - "learning_rate": 3.6276855228137304e-06, - "loss": 0.7508, - "num_input_tokens_seen": 38665280, - "step": 1842 - }, - { - "epoch": 0.22160764744784464, - "flos": 15892495278000.0, - "grad_norm": 2.1968214611838888, - "learning_rate": 3.6272327505038874e-06, - "loss": 0.8019, - "num_input_tokens_seen": 38681465, - "step": 1843 - }, - { - "epoch": 0.22172789033848372, - "flos": 17400017696880.0, - "grad_norm": 2.178693975624527, - "learning_rate": 3.626779731344131e-06, - "loss": 0.7708, - "num_input_tokens_seen": 38700975, - "step": 1844 - }, - { - "epoch": 0.22184813322912283, - "flos": 12381134771880.0, - "grad_norm": 3.543911002133101, - "learning_rate": 3.6263264654031814e-06, - "loss": 0.8356, - "num_input_tokens_seen": 38717595, - "step": 1845 - }, - { - "epoch": 0.22196837611976192, - "flos": 45572037240600.0, - "grad_norm": 0.6954383950432114, - "learning_rate": 3.6258729527498008e-06, - "loss": 0.5936, - "num_input_tokens_seen": 38778160, - "step": 1846 - }, - { - "epoch": 0.222088619010401, - "flos": 18727964602200.0, - "grad_norm": 2.93954591319497, - "learning_rate": 3.6254191934527854e-06, - "loss": 0.6061, - "num_input_tokens_seen": 38797235, - "step": 1847 - }, - { - "epoch": 0.2222088619010401, - "flos": 14110335959760.0, - "grad_norm": 2.9596833564891214, - "learning_rate": 3.6249651875809715e-06, - "loss": 0.6254, - "num_input_tokens_seen": 38816835, - "step": 1848 - }, - { - "epoch": 0.2223291047916792, - "flos": 13948010633040.0, - "grad_norm": 2.115015471201194, - "learning_rate": 3.62451093520323e-06, - "loss": 0.8713, - "num_input_tokens_seen": 38834460, - "step": 1849 - }, - { - "epoch": 0.22244934768231828, - "flos": 15243890490240.0, - "grad_norm": 4.159599159442689, - "learning_rate": 3.6240564363884714e-06, - "loss": 0.8814, - "num_input_tokens_seen": 38854125, - "step": 1850 - }, - { - "epoch": 0.2225695905729574, - "flos": 11382587514000.0, - "grad_norm": 2.24480304287346, - "learning_rate": 3.623601691205643e-06, - "loss": 0.683, - "num_input_tokens_seen": 38872920, - "step": 1851 - }, - { - "epoch": 0.22268983346359647, - "flos": 18591184371840.0, - "grad_norm": 2.4331488286793435, - "learning_rate": 3.623146699723729e-06, - "loss": 0.7896, - "num_input_tokens_seen": 38892100, - "step": 1852 - }, - { - "epoch": 0.22281007635423555, - "flos": 9627556290120.0, - "grad_norm": 2.3351936304157785, - "learning_rate": 3.6226914620117507e-06, - "loss": 0.7708, - "num_input_tokens_seen": 38910440, - "step": 1853 - }, - { - "epoch": 0.22293031924487464, - "flos": 11166132638400.0, - "grad_norm": 2.3826185804160205, - "learning_rate": 3.622235978138768e-06, - "loss": 0.7889, - "num_input_tokens_seen": 38927785, - "step": 1854 - }, - { - "epoch": 0.22305056213551375, - "flos": 16512452293320.0, - "grad_norm": 2.5755374754910267, - "learning_rate": 3.621780248173877e-06, - "loss": 0.8049, - "num_input_tokens_seen": 38945705, - "step": 1855 - }, - { - "epoch": 0.22317080502615283, - "flos": 47833035929280.0, - "grad_norm": 0.8816568355396782, - "learning_rate": 3.6213242721862125e-06, - "loss": 0.6479, - "num_input_tokens_seen": 39003880, - "step": 1856 - }, - { - "epoch": 0.2232910479167919, - "flos": 18889150170360.0, - "grad_norm": 1.7526178534220365, - "learning_rate": 3.620868050244945e-06, - "loss": 0.7311, - "num_input_tokens_seen": 39024080, - "step": 1857 - }, - { - "epoch": 0.22341129080743102, - "flos": 17020129395960.0, - "grad_norm": 2.818086797337275, - "learning_rate": 3.6204115824192817e-06, - "loss": 0.7524, - "num_input_tokens_seen": 39041275, - "step": 1858 - }, - { - "epoch": 0.2235315336980701, - "flos": 15513271836240.0, - "grad_norm": 3.610600251147167, - "learning_rate": 3.619954868778471e-06, - "loss": 0.7496, - "num_input_tokens_seen": 39057690, - "step": 1859 - }, - { - "epoch": 0.2236517765887092, - "flos": 14541979312560.0, - "grad_norm": 2.3482874372485694, - "learning_rate": 3.6194979093917944e-06, - "loss": 0.8083, - "num_input_tokens_seen": 39076825, - "step": 1860 - }, - { - "epoch": 0.22377201947934827, - "flos": 16992716361960.0, - "grad_norm": 2.4075514011303003, - "learning_rate": 3.6190407043285724e-06, - "loss": 0.8722, - "num_input_tokens_seen": 39094280, - "step": 1861 - }, - { - "epoch": 0.22389226236998738, - "flos": 19642246520640.0, - "grad_norm": 2.4537844409361975, - "learning_rate": 3.618583253658163e-06, - "loss": 0.7305, - "num_input_tokens_seen": 39114100, - "step": 1862 - }, - { - "epoch": 0.22401250526062647, - "flos": 17700832891800.0, - "grad_norm": 2.000856216259562, - "learning_rate": 3.618125557449961e-06, - "loss": 0.8476, - "num_input_tokens_seen": 39131875, - "step": 1863 - }, - { - "epoch": 0.22413274815126555, - "flos": 12215928388800.0, - "grad_norm": 2.0716723312746503, - "learning_rate": 3.6176676157733983e-06, - "loss": 0.8149, - "num_input_tokens_seen": 39146605, - "step": 1864 - }, - { - "epoch": 0.22425299104190466, - "flos": 15621024374640.0, - "grad_norm": 2.6646029911305353, - "learning_rate": 3.6172094286979443e-06, - "loss": 0.7374, - "num_input_tokens_seen": 39163695, - "step": 1865 - }, - { - "epoch": 0.22437323393254374, - "flos": 23618488846200.0, - "grad_norm": 1.6946393623528484, - "learning_rate": 3.6167509962931064e-06, - "loss": 0.7971, - "num_input_tokens_seen": 39189115, - "step": 1866 - }, - { - "epoch": 0.22449347682318282, - "flos": 13136225699640.0, - "grad_norm": 7.198122616888634, - "learning_rate": 3.6162923186284276e-06, - "loss": 0.7581, - "num_input_tokens_seen": 39204795, - "step": 1867 - }, - { - "epoch": 0.2246137197138219, - "flos": 13650361434120.0, - "grad_norm": 2.482781721035964, - "learning_rate": 3.6158333957734888e-06, - "loss": 0.8433, - "num_input_tokens_seen": 39223105, - "step": 1868 - }, - { - "epoch": 0.22473396260446102, - "flos": 11350488805920.0, - "grad_norm": 2.51403018795513, - "learning_rate": 3.6153742277979088e-06, - "loss": 0.8178, - "num_input_tokens_seen": 39240255, - "step": 1869 - }, - { - "epoch": 0.2248542054951001, - "flos": 10518635949240.0, - "grad_norm": 2.5556760036260964, - "learning_rate": 3.6149148147713434e-06, - "loss": 0.7688, - "num_input_tokens_seen": 39258210, - "step": 1870 - }, - { - "epoch": 0.22497444838573918, - "flos": 14054781712680.0, - "grad_norm": 2.2129283339544146, - "learning_rate": 3.614455156763484e-06, - "loss": 0.8585, - "num_input_tokens_seen": 39276235, - "step": 1871 - }, - { - "epoch": 0.2250946912763783, - "flos": 12327986681760.0, - "grad_norm": 2.252885134305618, - "learning_rate": 3.613995253844061e-06, - "loss": 0.6911, - "num_input_tokens_seen": 39293635, - "step": 1872 - }, - { - "epoch": 0.22521493416701738, - "flos": 18079043214840.0, - "grad_norm": 2.3119459777315243, - "learning_rate": 3.6135351060828414e-06, - "loss": 0.7909, - "num_input_tokens_seen": 39313830, - "step": 1873 - }, - { - "epoch": 0.22533517705765646, - "flos": 13002611465280.0, - "grad_norm": 2.551246121677888, - "learning_rate": 3.6130747135496285e-06, - "loss": 0.6645, - "num_input_tokens_seen": 39332550, - "step": 1874 - }, - { - "epoch": 0.22545541994829554, - "flos": 24746787823320.0, - "grad_norm": 2.2512948231150705, - "learning_rate": 3.6126140763142646e-06, - "loss": 0.653, - "num_input_tokens_seen": 39357300, - "step": 1875 - }, - { - "epoch": 0.22557566283893465, - "flos": 14001380342880.0, - "grad_norm": 3.412511966670367, - "learning_rate": 3.6121531944466275e-06, - "loss": 0.831, - "num_input_tokens_seen": 39374345, - "step": 1876 - }, - { - "epoch": 0.22569590572957374, - "flos": 15186215025840.0, - "grad_norm": 2.8247842183031193, - "learning_rate": 3.611692068016633e-06, - "loss": 0.7683, - "num_input_tokens_seen": 39390395, - "step": 1877 - }, - { - "epoch": 0.22581614862021282, - "flos": 13462079431560.0, - "grad_norm": 2.396113558980567, - "learning_rate": 3.611230697094233e-06, - "loss": 0.7306, - "num_input_tokens_seen": 39406815, - "step": 1878 - }, - { - "epoch": 0.22593639151085193, - "flos": 14650428370080.0, - "grad_norm": 19.907340745970362, - "learning_rate": 3.6107690817494173e-06, - "loss": 0.864, - "num_input_tokens_seen": 39426755, - "step": 1879 - }, - { - "epoch": 0.226056634401491, - "flos": 9519202212480.0, - "grad_norm": 2.2307690954334816, - "learning_rate": 3.6103072220522117e-06, - "loss": 0.6941, - "num_input_tokens_seen": 39442005, - "step": 1880 - }, - { - "epoch": 0.2261768772921301, - "flos": 13865676551160.0, - "grad_norm": 1.9661957656549347, - "learning_rate": 3.609845118072682e-06, - "loss": 0.8971, - "num_input_tokens_seen": 39460395, - "step": 1881 - }, - { - "epoch": 0.2262971201827692, - "flos": 14595222382560.0, - "grad_norm": 9.558932966753696, - "learning_rate": 3.6093827698809276e-06, - "loss": 0.7725, - "num_input_tokens_seen": 39479215, - "step": 1882 - }, - { - "epoch": 0.2264173630734083, - "flos": 12138659900520.0, - "grad_norm": 3.107207496484343, - "learning_rate": 3.6089201775470864e-06, - "loss": 0.8331, - "num_input_tokens_seen": 39494390, - "step": 1883 - }, - { - "epoch": 0.22653760596404737, - "flos": 17863316518320.0, - "grad_norm": 1.9217160787237384, - "learning_rate": 3.6084573411413334e-06, - "loss": 0.7703, - "num_input_tokens_seen": 39513505, - "step": 1884 - }, - { - "epoch": 0.22665784885468646, - "flos": 13380441868800.0, - "grad_norm": 2.661089269999275, - "learning_rate": 3.607994260733881e-06, - "loss": 0.7877, - "num_input_tokens_seen": 39532465, - "step": 1885 - }, - { - "epoch": 0.22677809174532557, - "flos": 17617359051360.0, - "grad_norm": 2.007607772423565, - "learning_rate": 3.6075309363949776e-06, - "loss": 0.7343, - "num_input_tokens_seen": 39551355, - "step": 1886 - }, - { - "epoch": 0.22689833463596465, - "flos": 14891225263560.0, - "grad_norm": 2.895520328282394, - "learning_rate": 3.6070673681949094e-06, - "loss": 0.7903, - "num_input_tokens_seen": 39569440, - "step": 1887 - }, - { - "epoch": 0.22701857752660373, - "flos": 22104666095280.0, - "grad_norm": 2.8211359317217357, - "learning_rate": 3.606603556203999e-06, - "loss": 0.7831, - "num_input_tokens_seen": 39594105, - "step": 1888 - }, - { - "epoch": 0.22713882041724284, - "flos": 16458892623720.0, - "grad_norm": 2.150393840291931, - "learning_rate": 3.6061395004926066e-06, - "loss": 0.8323, - "num_input_tokens_seen": 39612760, - "step": 1889 - }, - { - "epoch": 0.22725906330788193, - "flos": 15000434160120.0, - "grad_norm": 3.0954662148109664, - "learning_rate": 3.605675201131129e-06, - "loss": 0.8222, - "num_input_tokens_seen": 39630940, - "step": 1890 - }, - { - "epoch": 0.227379306198521, - "flos": 13867037929440.0, - "grad_norm": 3.4831868014627463, - "learning_rate": 3.60521065819e-06, - "loss": 0.7739, - "num_input_tokens_seen": 39647970, - "step": 1891 - }, - { - "epoch": 0.2274995490891601, - "flos": 15945326768520.0, - "grad_norm": 2.0563052230886614, - "learning_rate": 3.60474587173969e-06, - "loss": 0.8566, - "num_input_tokens_seen": 39666175, - "step": 1892 - }, - { - "epoch": 0.2276197919797992, - "flos": 13917304963200.0, - "grad_norm": 2.7680546492325067, - "learning_rate": 3.6042808418507084e-06, - "loss": 0.8224, - "num_input_tokens_seen": 39683580, - "step": 1893 - }, - { - "epoch": 0.22774003487043828, - "flos": 13731397457640.0, - "grad_norm": 2.1224787846056494, - "learning_rate": 3.6038155685935976e-06, - "loss": 0.7574, - "num_input_tokens_seen": 39699870, - "step": 1894 - }, - { - "epoch": 0.22786027776107737, - "flos": 16836533067480.0, - "grad_norm": 2.67380289047739, - "learning_rate": 3.6033500520389404e-06, - "loss": 0.686, - "num_input_tokens_seen": 39716260, - "step": 1895 - }, - { - "epoch": 0.22798052065171648, - "flos": 49185039912840.0, - "grad_norm": 0.8193197873948387, - "learning_rate": 3.6028842922573553e-06, - "loss": 0.6802, - "num_input_tokens_seen": 39780125, - "step": 1896 - }, - { - "epoch": 0.22810076354235556, - "flos": 45762598760280.0, - "grad_norm": 0.8144991196193819, - "learning_rate": 3.602418289319497e-06, - "loss": 0.6557, - "num_input_tokens_seen": 39838400, - "step": 1897 - }, - { - "epoch": 0.22822100643299464, - "flos": 17482953318000.0, - "grad_norm": 1.876474309495869, - "learning_rate": 3.601952043296059e-06, - "loss": 0.7245, - "num_input_tokens_seen": 39858115, - "step": 1898 - }, - { - "epoch": 0.22834124932363373, - "flos": 15347463913920.0, - "grad_norm": 2.235593023628711, - "learning_rate": 3.6014855542577696e-06, - "loss": 0.7963, - "num_input_tokens_seen": 39875045, - "step": 1899 - }, - { - "epoch": 0.22846149221427284, - "flos": 18242191700520.0, - "grad_norm": 1.9507761144360403, - "learning_rate": 3.6010188222753943e-06, - "loss": 0.8249, - "num_input_tokens_seen": 39895535, - "step": 1900 - }, - { - "epoch": 0.22858173510491192, - "flos": 41360031955200.0, - "grad_norm": 1.0109103592733057, - "learning_rate": 3.6005518474197372e-06, - "loss": 0.6671, - "num_input_tokens_seen": 39947300, - "step": 1901 - }, - { - "epoch": 0.228701977995551, - "flos": 17701244471280.0, - "grad_norm": 2.424004977872666, - "learning_rate": 3.6000846297616373e-06, - "loss": 0.7695, - "num_input_tokens_seen": 39965320, - "step": 1902 - }, - { - "epoch": 0.22882222088619011, - "flos": 15640269138960.0, - "grad_norm": 3.070395121542597, - "learning_rate": 3.5996171693719717e-06, - "loss": 0.7199, - "num_input_tokens_seen": 39981135, - "step": 1903 - }, - { - "epoch": 0.2289424637768292, - "flos": 47617974091920.0, - "grad_norm": 0.9215684154653087, - "learning_rate": 3.5991494663216528e-06, - "loss": 0.6619, - "num_input_tokens_seen": 40043840, - "step": 1904 - }, - { - "epoch": 0.22906270666746828, - "flos": 16215531273480.0, - "grad_norm": 2.2175391748316065, - "learning_rate": 3.5986815206816314e-06, - "loss": 0.8529, - "num_input_tokens_seen": 40062380, - "step": 1905 - }, - { - "epoch": 0.2291829495581074, - "flos": 18885920854440.0, - "grad_norm": 2.3942699694319014, - "learning_rate": 3.598213332522895e-06, - "loss": 0.7278, - "num_input_tokens_seen": 40082130, - "step": 1906 - }, - { - "epoch": 0.22930319244874647, - "flos": 22882674342720.0, - "grad_norm": 2.392225141372392, - "learning_rate": 3.597744901916466e-06, - "loss": 0.7662, - "num_input_tokens_seen": 40103135, - "step": 1907 - }, - { - "epoch": 0.22942343533938556, - "flos": 17022155633400.0, - "grad_norm": 2.281761948087056, - "learning_rate": 3.5972762289334058e-06, - "loss": 0.7426, - "num_input_tokens_seen": 40122485, - "step": 1908 - }, - { - "epoch": 0.22954367823002464, - "flos": 10626166867920.0, - "grad_norm": 2.8432418872338823, - "learning_rate": 3.5968073136448116e-06, - "loss": 0.8307, - "num_input_tokens_seen": 40140225, - "step": 1909 - }, - { - "epoch": 0.22966392112066375, - "flos": 12219316004520.0, - "grad_norm": 2.061008376377044, - "learning_rate": 3.596338156121818e-06, - "loss": 0.8923, - "num_input_tokens_seen": 40158830, - "step": 1910 - }, - { - "epoch": 0.22978416401130283, - "flos": 49753115236440.0, - "grad_norm": 0.75906009816606, - "learning_rate": 3.595868756435595e-06, - "loss": 0.6126, - "num_input_tokens_seen": 40226230, - "step": 1911 - }, - { - "epoch": 0.22990440690194192, - "flos": 14514692918400.0, - "grad_norm": 2.2472671230290615, - "learning_rate": 3.5953991146573504e-06, - "loss": 0.7872, - "num_input_tokens_seen": 40244595, - "step": 1912 - }, - { - "epoch": 0.23002464979258103, - "flos": 9646896034320.0, - "grad_norm": 2.607812013620339, - "learning_rate": 3.5949292308583294e-06, - "loss": 0.8126, - "num_input_tokens_seen": 40257560, - "step": 1913 - }, - { - "epoch": 0.2301448926832201, - "flos": 16215847873080.0, - "grad_norm": 2.313037397472361, - "learning_rate": 3.594459105109811e-06, - "loss": 0.7844, - "num_input_tokens_seen": 40276460, - "step": 1914 - }, - { - "epoch": 0.2302651355738592, - "flos": 15135884672160.0, - "grad_norm": 2.4921663576383275, - "learning_rate": 3.593988737483115e-06, - "loss": 0.7973, - "num_input_tokens_seen": 40296120, - "step": 1915 - }, - { - "epoch": 0.23038537846449827, - "flos": 13570338529320.0, - "grad_norm": 2.181018928596112, - "learning_rate": 3.5935181280495947e-06, - "loss": 0.771, - "num_input_tokens_seen": 40314420, - "step": 1916 - }, - { - "epoch": 0.23050562135513739, - "flos": 47347516307280.0, - "grad_norm": 0.901601213387618, - "learning_rate": 3.5930472768806412e-06, - "loss": 0.5798, - "num_input_tokens_seen": 40372810, - "step": 1917 - }, - { - "epoch": 0.23062586424577647, - "flos": 12626554019520.0, - "grad_norm": 2.1134205770484833, - "learning_rate": 3.5925761840476826e-06, - "loss": 0.7472, - "num_input_tokens_seen": 40391140, - "step": 1918 - }, - { - "epoch": 0.23074610713641555, - "flos": 20428518017640.0, - "grad_norm": 2.3203480521923834, - "learning_rate": 3.592104849622183e-06, - "loss": 0.8058, - "num_input_tokens_seen": 40413115, - "step": 1919 - }, - { - "epoch": 0.23086635002705466, - "flos": 21158190488880.0, - "grad_norm": 1.9848127708248686, - "learning_rate": 3.591633273675644e-06, - "loss": 0.714, - "num_input_tokens_seen": 40435070, - "step": 1920 - }, - { - "epoch": 0.23098659291769374, - "flos": 46381664805360.0, - "grad_norm": 0.9770970216834738, - "learning_rate": 3.591161456279602e-06, - "loss": 0.6204, - "num_input_tokens_seen": 40480335, - "step": 1921 - }, - { - "epoch": 0.23110683580833283, - "flos": 17187520316280.0, - "grad_norm": 1.6853514521571482, - "learning_rate": 3.590689397505633e-06, - "loss": 0.782, - "num_input_tokens_seen": 40500965, - "step": 1922 - }, - { - "epoch": 0.2312270786989719, - "flos": 19995481626600.0, - "grad_norm": 1.704906546752754, - "learning_rate": 3.590217097425347e-06, - "loss": 0.8495, - "num_input_tokens_seen": 40520585, - "step": 1923 - }, - { - "epoch": 0.23134732158961102, - "flos": 9627461310240.0, - "grad_norm": 2.8543530331395535, - "learning_rate": 3.589744556110391e-06, - "loss": 0.6787, - "num_input_tokens_seen": 40538295, - "step": 1924 - }, - { - "epoch": 0.2314675644802501, - "flos": 27179198247240.0, - "grad_norm": 1.9895521497695987, - "learning_rate": 3.58927177363245e-06, - "loss": 0.8302, - "num_input_tokens_seen": 40560840, - "step": 1925 - }, - { - "epoch": 0.2315878073708892, - "flos": 17458009760880.0, - "grad_norm": 2.9432419194692434, - "learning_rate": 3.5887987500632447e-06, - "loss": 0.719, - "num_input_tokens_seen": 40578565, - "step": 1926 - }, - { - "epoch": 0.2317080502615283, - "flos": 16863882781560.0, - "grad_norm": 2.1420738718738663, - "learning_rate": 3.5883254854745325e-06, - "loss": 0.8214, - "num_input_tokens_seen": 40596675, - "step": 1927 - }, - { - "epoch": 0.23182829315216738, - "flos": 8142381311640.0, - "grad_norm": 3.1706051845070444, - "learning_rate": 3.587851979938107e-06, - "loss": 0.7433, - "num_input_tokens_seen": 40613285, - "step": 1928 - }, - { - "epoch": 0.23194853604280646, - "flos": 14487944743560.0, - "grad_norm": 2.456209127071517, - "learning_rate": 3.5873782335257985e-06, - "loss": 0.7598, - "num_input_tokens_seen": 40631170, - "step": 1929 - }, - { - "epoch": 0.23206877893344555, - "flos": 11139986002800.0, - "grad_norm": 5.814010587899796, - "learning_rate": 3.5869042463094744e-06, - "loss": 0.772, - "num_input_tokens_seen": 40648605, - "step": 1930 - }, - { - "epoch": 0.23218902182408466, - "flos": 16621249610400.0, - "grad_norm": 2.2551842409980494, - "learning_rate": 3.586430018361038e-06, - "loss": 0.7597, - "num_input_tokens_seen": 40668095, - "step": 1931 - }, - { - "epoch": 0.23230926471472374, - "flos": 16620584751240.0, - "grad_norm": 2.6889854584141477, - "learning_rate": 3.5859555497524283e-06, - "loss": 0.7408, - "num_input_tokens_seen": 40685050, - "step": 1932 - }, - { - "epoch": 0.23242950760536282, - "flos": 14892333362160.0, - "grad_norm": 2.4349060928754165, - "learning_rate": 3.5854808405556237e-06, - "loss": 0.9077, - "num_input_tokens_seen": 40704005, - "step": 1933 - }, - { - "epoch": 0.23254975049600193, - "flos": 12326245383960.0, - "grad_norm": 3.130409746254212, - "learning_rate": 3.5850058908426355e-06, - "loss": 0.7368, - "num_input_tokens_seen": 40722275, - "step": 1934 - }, - { - "epoch": 0.23266999338664102, - "flos": 17052892963200.0, - "grad_norm": 2.6606601680324826, - "learning_rate": 3.584530700685514e-06, - "loss": 0.8275, - "num_input_tokens_seen": 40742255, - "step": 1935 - }, - { - "epoch": 0.2327902362772801, - "flos": 14295800225880.0, - "grad_norm": 2.6283136071057736, - "learning_rate": 3.5840552701563448e-06, - "loss": 0.8709, - "num_input_tokens_seen": 40758175, - "step": 1936 - }, - { - "epoch": 0.2329104791679192, - "flos": 12192662809560.0, - "grad_norm": 4.037574479210371, - "learning_rate": 3.5835795993272513e-06, - "loss": 0.7984, - "num_input_tokens_seen": 40776180, - "step": 1937 - }, - { - "epoch": 0.2330307220585583, - "flos": 16215816213120.0, - "grad_norm": 4.930153705297769, - "learning_rate": 3.583103688270391e-06, - "loss": 0.6916, - "num_input_tokens_seen": 40795680, - "step": 1938 - }, - { - "epoch": 0.23315096494919738, - "flos": 14109196201200.0, - "grad_norm": 2.87777128582995, - "learning_rate": 3.58262753705796e-06, - "loss": 0.8686, - "num_input_tokens_seen": 40810290, - "step": 1939 - }, - { - "epoch": 0.23327120783983646, - "flos": 39064908321000.0, - "grad_norm": 0.7697217289217011, - "learning_rate": 3.5821511457621902e-06, - "loss": 0.5664, - "num_input_tokens_seen": 40867310, - "step": 1940 - }, - { - "epoch": 0.23339145073047557, - "flos": 12488032491360.0, - "grad_norm": 3.7720803294199183, - "learning_rate": 3.5816745144553497e-06, - "loss": 0.7844, - "num_input_tokens_seen": 40882350, - "step": 1941 - }, - { - "epoch": 0.23351169362111465, - "flos": 9492454037640.0, - "grad_norm": 3.4235802151049146, - "learning_rate": 3.5811976432097424e-06, - "loss": 0.728, - "num_input_tokens_seen": 40899740, - "step": 1942 - }, - { - "epoch": 0.23363193651175373, - "flos": 11544089681760.0, - "grad_norm": 2.4132259363792334, - "learning_rate": 3.58072053209771e-06, - "loss": 0.8286, - "num_input_tokens_seen": 40916015, - "step": 1943 - }, - { - "epoch": 0.23375217940239285, - "flos": 15372660750720.0, - "grad_norm": 11.740355733911517, - "learning_rate": 3.5802431811916296e-06, - "loss": 0.7785, - "num_input_tokens_seen": 40932345, - "step": 1944 - }, - { - "epoch": 0.23387242229303193, - "flos": 15054595368960.0, - "grad_norm": 2.3180372597676033, - "learning_rate": 3.579765590563916e-06, - "loss": 0.7833, - "num_input_tokens_seen": 40951465, - "step": 1945 - }, - { - "epoch": 0.233992665183671, - "flos": 17781552315720.0, - "grad_norm": 2.8507496968742303, - "learning_rate": 3.579287760287017e-06, - "loss": 0.7937, - "num_input_tokens_seen": 40971935, - "step": 1946 - }, - { - "epoch": 0.2341129080743101, - "flos": 22129672972320.0, - "grad_norm": 1.7673655345222696, - "learning_rate": 3.578809690433421e-06, - "loss": 0.7071, - "num_input_tokens_seen": 40993365, - "step": 1947 - }, - { - "epoch": 0.2342331509649492, - "flos": 16674967579800.0, - "grad_norm": 3.4723704921006946, - "learning_rate": 3.578331381075651e-06, - "loss": 0.7841, - "num_input_tokens_seen": 41013585, - "step": 1948 - }, - { - "epoch": 0.2343533938555883, - "flos": 17295874393920.0, - "grad_norm": 4.6755526047766995, - "learning_rate": 3.5778528322862646e-06, - "loss": 0.6691, - "num_input_tokens_seen": 41032125, - "step": 1949 - }, - { - "epoch": 0.23447363674622737, - "flos": 17996645813040.0, - "grad_norm": 1.8563231739559174, - "learning_rate": 3.5773740441378585e-06, - "loss": 0.8515, - "num_input_tokens_seen": 41052600, - "step": 1950 - }, - { - "epoch": 0.23459387963686648, - "flos": 39140589319920.0, - "grad_norm": 2.989725001010133, - "learning_rate": 3.5768950167030633e-06, - "loss": 0.7242, - "num_input_tokens_seen": 41077020, - "step": 1951 - }, - { - "epoch": 0.23471412252750556, - "flos": 17538792504720.0, - "grad_norm": 2.9165411422603094, - "learning_rate": 3.576415750054548e-06, - "loss": 0.7632, - "num_input_tokens_seen": 41096860, - "step": 1952 - }, - { - "epoch": 0.23483436541814465, - "flos": 11437191962280.0, - "grad_norm": 2.190968950943907, - "learning_rate": 3.5759362442650172e-06, - "loss": 0.8375, - "num_input_tokens_seen": 41113330, - "step": 1953 - }, - { - "epoch": 0.23495460830878373, - "flos": 18267610157040.0, - "grad_norm": 3.3025797441717843, - "learning_rate": 3.5754564994072113e-06, - "loss": 0.8193, - "num_input_tokens_seen": 41131890, - "step": 1954 - }, - { - "epoch": 0.23507485119942284, - "flos": 22372401123360.0, - "grad_norm": 2.6638595190496526, - "learning_rate": 3.5749765155539067e-06, - "loss": 0.5922, - "num_input_tokens_seen": 41152095, - "step": 1955 - }, - { - "epoch": 0.23519509409006192, - "flos": 13377434172600.0, - "grad_norm": 2.9225471253915267, - "learning_rate": 3.574496292777917e-06, - "loss": 0.9052, - "num_input_tokens_seen": 41170025, - "step": 1956 - }, - { - "epoch": 0.235315336980701, - "flos": 21751114389720.0, - "grad_norm": 2.5062335501342656, - "learning_rate": 3.574015831152092e-06, - "loss": 0.6882, - "num_input_tokens_seen": 41190160, - "step": 1957 - }, - { - "epoch": 0.23543557987134012, - "flos": 13542672215640.0, - "grad_norm": 2.6016207851719795, - "learning_rate": 3.573535130749316e-06, - "loss": 0.8123, - "num_input_tokens_seen": 41207830, - "step": 1958 - }, - { - "epoch": 0.2355558227619792, - "flos": 18076985317440.0, - "grad_norm": 1.9036626297557209, - "learning_rate": 3.5730541916425127e-06, - "loss": 0.7166, - "num_input_tokens_seen": 41229030, - "step": 1959 - }, - { - "epoch": 0.23567606565261828, - "flos": 16053775826040.0, - "grad_norm": 2.046080456923934, - "learning_rate": 3.572573013904639e-06, - "loss": 0.8394, - "num_input_tokens_seen": 41248660, - "step": 1960 - }, - { - "epoch": 0.2357963085432574, - "flos": 9870157801320.0, - "grad_norm": 2.720202704618063, - "learning_rate": 3.572091597608689e-06, - "loss": 0.9057, - "num_input_tokens_seen": 41266505, - "step": 1961 - }, - { - "epoch": 0.23591655143389648, - "flos": 16159977026400.0, - "grad_norm": 2.8202704006444956, - "learning_rate": 3.571609942827694e-06, - "loss": 0.7036, - "num_input_tokens_seen": 41285340, - "step": 1962 - }, - { - "epoch": 0.23603679432453556, - "flos": 12406869828000.0, - "grad_norm": 2.280489232963532, - "learning_rate": 3.57112804963472e-06, - "loss": 0.8612, - "num_input_tokens_seen": 41303275, - "step": 1963 - }, - { - "epoch": 0.23615703721517464, - "flos": 14001127063200.0, - "grad_norm": 2.2183939688420704, - "learning_rate": 3.57064591810287e-06, - "loss": 0.7463, - "num_input_tokens_seen": 41320495, - "step": 1964 - }, - { - "epoch": 0.23627728010581375, - "flos": 13947535733640.0, - "grad_norm": 3.8455445357171567, - "learning_rate": 3.570163548305284e-06, - "loss": 0.7839, - "num_input_tokens_seen": 41339145, - "step": 1965 - }, - { - "epoch": 0.23639752299645284, - "flos": 10382868837600.0, - "grad_norm": 3.0034944527353975, - "learning_rate": 3.569680940315135e-06, - "loss": 0.6809, - "num_input_tokens_seen": 41355265, - "step": 1966 - }, - { - "epoch": 0.23651776588709192, - "flos": 16755813643560.0, - "grad_norm": 3.073776349004783, - "learning_rate": 3.5691980942056356e-06, - "loss": 0.7977, - "num_input_tokens_seen": 41374355, - "step": 1967 - }, - { - "epoch": 0.23663800877773103, - "flos": 13596548484840.0, - "grad_norm": 3.087005816613276, - "learning_rate": 3.5687150100500332e-06, - "loss": 0.7724, - "num_input_tokens_seen": 41393775, - "step": 1968 - }, - { - "epoch": 0.2367582516683701, - "flos": 18726033344640.0, - "grad_norm": 2.059287951283766, - "learning_rate": 3.568231687921611e-06, - "loss": 0.7333, - "num_input_tokens_seen": 41413670, - "step": 1969 - }, - { - "epoch": 0.2368784945590092, - "flos": 17053114582920.0, - "grad_norm": 1.6648154547661866, - "learning_rate": 3.5677481278936883e-06, - "loss": 0.7993, - "num_input_tokens_seen": 41432970, - "step": 1970 - }, - { - "epoch": 0.23699873744964828, - "flos": 51520113925200.0, - "grad_norm": 0.833403552593718, - "learning_rate": 3.5672643300396214e-06, - "loss": 0.5864, - "num_input_tokens_seen": 41501835, - "step": 1971 - }, - { - "epoch": 0.2371189803402874, - "flos": 15972518182800.0, - "grad_norm": 5.420340109144786, - "learning_rate": 3.566780294432802e-06, - "loss": 0.6641, - "num_input_tokens_seen": 41518730, - "step": 1972 - }, - { - "epoch": 0.23723922323092647, - "flos": 16026932671320.0, - "grad_norm": 2.8864397422570858, - "learning_rate": 3.566296021146657e-06, - "loss": 0.7152, - "num_input_tokens_seen": 41537830, - "step": 1973 - }, - { - "epoch": 0.23735946612156555, - "flos": 24019964748480.0, - "grad_norm": 2.0694435455297557, - "learning_rate": 3.565811510254652e-06, - "loss": 0.7087, - "num_input_tokens_seen": 41558430, - "step": 1974 - }, - { - "epoch": 0.23747970901220466, - "flos": 52028234267280.0, - "grad_norm": 0.7843668371132964, - "learning_rate": 3.5653267618302845e-06, - "loss": 0.5855, - "num_input_tokens_seen": 41625730, - "step": 1975 - }, - { - "epoch": 0.23759995190284375, - "flos": 15243352270920.0, - "grad_norm": 2.1158455399888187, - "learning_rate": 3.564841775947093e-06, - "loss": 0.8425, - "num_input_tokens_seen": 41646340, - "step": 1976 - }, - { - "epoch": 0.23772019479348283, - "flos": 24176622942360.0, - "grad_norm": 2.6505834836795987, - "learning_rate": 3.5643565526786475e-06, - "loss": 0.7397, - "num_input_tokens_seen": 41666000, - "step": 1977 - }, - { - "epoch": 0.2378404376841219, - "flos": 23748905424600.0, - "grad_norm": 2.5861377465660773, - "learning_rate": 3.5638710920985574e-06, - "loss": 0.7534, - "num_input_tokens_seen": 41687180, - "step": 1978 - }, - { - "epoch": 0.23796068057476102, - "flos": 16832195652960.0, - "grad_norm": 4.879419524267085, - "learning_rate": 3.5633853942804655e-06, - "loss": 0.7876, - "num_input_tokens_seen": 41705225, - "step": 1979 - }, - { - "epoch": 0.2380809234654001, - "flos": 9790071576600.0, - "grad_norm": 4.170250737178462, - "learning_rate": 3.5628994592980527e-06, - "loss": 0.7473, - "num_input_tokens_seen": 41722850, - "step": 1980 - }, - { - "epoch": 0.2382011663560392, - "flos": 12299212269480.0, - "grad_norm": 2.608345056899499, - "learning_rate": 3.562413287225034e-06, - "loss": 0.6934, - "num_input_tokens_seen": 41740680, - "step": 1981 - }, - { - "epoch": 0.2383214092466783, - "flos": 13460686393320.0, - "grad_norm": 3.101499465570891, - "learning_rate": 3.5619268781351623e-06, - "loss": 0.8689, - "num_input_tokens_seen": 41758470, - "step": 1982 - }, - { - "epoch": 0.23844165213731738, - "flos": 14433720214800.0, - "grad_norm": 3.133210801373295, - "learning_rate": 3.5614402321022256e-06, - "loss": 0.7568, - "num_input_tokens_seen": 41776020, - "step": 1983 - }, - { - "epoch": 0.23856189502795647, - "flos": 17106515952720.0, - "grad_norm": 2.4440761768000825, - "learning_rate": 3.5609533492000463e-06, - "loss": 0.8443, - "num_input_tokens_seen": 41794630, - "step": 1984 - }, - { - "epoch": 0.23868213791859555, - "flos": 17185968978240.0, - "grad_norm": 6.218210168152288, - "learning_rate": 3.560466229502485e-06, - "loss": 0.7602, - "num_input_tokens_seen": 41813695, - "step": 1985 - }, - { - "epoch": 0.23880238080923466, - "flos": 12111025246800.0, - "grad_norm": 3.1709835509827276, - "learning_rate": 3.5599788730834384e-06, - "loss": 0.8836, - "num_input_tokens_seen": 41831375, - "step": 1986 - }, - { - "epoch": 0.23892262369987374, - "flos": 12652257415680.0, - "grad_norm": 4.9163754694854225, - "learning_rate": 3.559491280016836e-06, - "loss": 0.7777, - "num_input_tokens_seen": 41849040, - "step": 1987 - }, - { - "epoch": 0.23904286659051283, - "flos": 16324011990960.0, - "grad_norm": 2.233526749087998, - "learning_rate": 3.5590034503766465e-06, - "loss": 0.696, - "num_input_tokens_seen": 41868425, - "step": 1988 - }, - { - "epoch": 0.23916310948115194, - "flos": 15486428681520.0, - "grad_norm": 2.8677733443863636, - "learning_rate": 3.558515384236874e-06, - "loss": 0.7948, - "num_input_tokens_seen": 41885575, - "step": 1989 - }, - { - "epoch": 0.23928335237179102, - "flos": 10275812818320.0, - "grad_norm": 1.8265314574332647, - "learning_rate": 3.558027081671556e-06, - "loss": 0.814, - "num_input_tokens_seen": 41902280, - "step": 1990 - }, - { - "epoch": 0.2394035952624301, - "flos": 17404323451440.0, - "grad_norm": 2.19938049263687, - "learning_rate": 3.557538542754769e-06, - "loss": 0.668, - "num_input_tokens_seen": 41921695, - "step": 1991 - }, - { - "epoch": 0.2395238381530692, - "flos": 17727264467040.0, - "grad_norm": 2.2028142831713704, - "learning_rate": 3.557049767560623e-06, - "loss": 0.6503, - "num_input_tokens_seen": 41940330, - "step": 1992 - }, - { - "epoch": 0.2396440810437083, - "flos": 18532907368200.0, - "grad_norm": 2.174154718068184, - "learning_rate": 3.5565607561632655e-06, - "loss": 0.838, - "num_input_tokens_seen": 41958890, - "step": 1993 - }, - { - "epoch": 0.23976432393434738, - "flos": 20936163460320.0, - "grad_norm": 2.4738839764044993, - "learning_rate": 3.5560715086368787e-06, - "loss": 0.7672, - "num_input_tokens_seen": 41976480, - "step": 1994 - }, - { - "epoch": 0.23988456682498646, - "flos": 14239454479800.0, - "grad_norm": 1.898631772748468, - "learning_rate": 3.5555820250556816e-06, - "loss": 0.8056, - "num_input_tokens_seen": 41993400, - "step": 1995 - }, - { - "epoch": 0.24000480971562557, - "flos": 14811582278280.0, - "grad_norm": 2.619296462552537, - "learning_rate": 3.5550923054939278e-06, - "loss": 0.6728, - "num_input_tokens_seen": 42012575, - "step": 1996 - }, - { - "epoch": 0.24012505260626466, - "flos": 18642211244640.0, - "grad_norm": 4.353741687434867, - "learning_rate": 3.5546023500259083e-06, - "loss": 0.7114, - "num_input_tokens_seen": 42033390, - "step": 1997 - }, - { - "epoch": 0.24024529549690374, - "flos": 11322759172320.0, - "grad_norm": 2.867168873406799, - "learning_rate": 3.5541121587259477e-06, - "loss": 0.7876, - "num_input_tokens_seen": 42050945, - "step": 1998 - }, - { - "epoch": 0.24036553838754285, - "flos": 42093440301720.0, - "grad_norm": 0.8023684519203281, - "learning_rate": 3.553621731668408e-06, - "loss": 0.5834, - "num_input_tokens_seen": 42113875, - "step": 1999 - }, - { - "epoch": 0.24048578127818193, - "flos": 18292142134680.0, - "grad_norm": 1.9314238115136737, - "learning_rate": 3.553131068927688e-06, - "loss": 0.8218, - "num_input_tokens_seen": 42132000, - "step": 2000 - }, - { - "epoch": 0.24060602416882101, - "flos": 17079577818120.0, - "grad_norm": 2.329697455545462, - "learning_rate": 3.552640170578219e-06, - "loss": 0.7874, - "num_input_tokens_seen": 42151970, - "step": 2001 - }, - { - "epoch": 0.2407262670594601, - "flos": 10302307713480.0, - "grad_norm": 3.2549417045752302, - "learning_rate": 3.5521490366944703e-06, - "loss": 0.7629, - "num_input_tokens_seen": 42169340, - "step": 2002 - }, - { - "epoch": 0.2408465099500992, - "flos": 9924857229480.0, - "grad_norm": 4.654507509396596, - "learning_rate": 3.5516576673509474e-06, - "loss": 0.7806, - "num_input_tokens_seen": 42187060, - "step": 2003 - }, - { - "epoch": 0.2409667528407383, - "flos": 22938703489200.0, - "grad_norm": 1.7573132971710146, - "learning_rate": 3.5511660626221896e-06, - "loss": 0.8457, - "num_input_tokens_seen": 42207420, - "step": 2004 - }, - { - "epoch": 0.24108699573137737, - "flos": 16242564387960.0, - "grad_norm": 2.7069258235862677, - "learning_rate": 3.5506742225827744e-06, - "loss": 0.8539, - "num_input_tokens_seen": 42223995, - "step": 2005 - }, - { - "epoch": 0.24120723862201648, - "flos": 19130991842520.0, - "grad_norm": 2.748865240230549, - "learning_rate": 3.5501821473073116e-06, - "loss": 0.8816, - "num_input_tokens_seen": 42240300, - "step": 2006 - }, - { - "epoch": 0.24132748151265557, - "flos": 13596896744400.0, - "grad_norm": 2.230793249572642, - "learning_rate": 3.54968983687045e-06, - "loss": 0.8522, - "num_input_tokens_seen": 42256890, - "step": 2007 - }, - { - "epoch": 0.24144772440329465, - "flos": 11112224709240.0, - "grad_norm": 4.447839859858414, - "learning_rate": 3.549197291346872e-06, - "loss": 0.8743, - "num_input_tokens_seen": 42273135, - "step": 2008 - }, - { - "epoch": 0.24156796729393373, - "flos": 17592700433880.0, - "grad_norm": 2.494950917894506, - "learning_rate": 3.548704510811297e-06, - "loss": 0.7816, - "num_input_tokens_seen": 42292050, - "step": 2009 - }, - { - "epoch": 0.24168821018457284, - "flos": 19267170533640.0, - "grad_norm": 3.563611576599233, - "learning_rate": 3.5482114953384787e-06, - "loss": 0.7244, - "num_input_tokens_seen": 42311000, - "step": 2010 - }, - { - "epoch": 0.24180845307521193, - "flos": 13299532485120.0, - "grad_norm": 2.5871446448411186, - "learning_rate": 3.5477182450032077e-06, - "loss": 0.8226, - "num_input_tokens_seen": 42329320, - "step": 2011 - }, - { - "epoch": 0.241928695965851, - "flos": 14947032790320.0, - "grad_norm": 2.2433031875187073, - "learning_rate": 3.5472247598803097e-06, - "loss": 0.8117, - "num_input_tokens_seen": 42348385, - "step": 2012 - }, - { - "epoch": 0.24204893885649012, - "flos": 18726603223920.0, - "grad_norm": 2.166986686352301, - "learning_rate": 3.546731040044645e-06, - "loss": 0.8329, - "num_input_tokens_seen": 42363275, - "step": 2013 - }, - { - "epoch": 0.2421691817471292, - "flos": 22503102641400.0, - "grad_norm": 1.9328288713654622, - "learning_rate": 3.546237085571112e-06, - "loss": 0.7383, - "num_input_tokens_seen": 42381430, - "step": 2014 - }, - { - "epoch": 0.24228942463776829, - "flos": 16053965785800.0, - "grad_norm": 2.214792079423738, - "learning_rate": 3.5457428965346425e-06, - "loss": 0.7011, - "num_input_tokens_seen": 42400090, - "step": 2015 - }, - { - "epoch": 0.2424096675284074, - "flos": 24963875898120.0, - "grad_norm": 2.145000135515115, - "learning_rate": 3.545248473010205e-06, - "loss": 0.7394, - "num_input_tokens_seen": 42422615, - "step": 2016 - }, - { - "epoch": 0.24252991041904648, - "flos": 15838207429320.0, - "grad_norm": 4.33013494761431, - "learning_rate": 3.544753815072802e-06, - "loss": 0.8529, - "num_input_tokens_seen": 42440990, - "step": 2017 - }, - { - "epoch": 0.24265015330968556, - "flos": 15998221578960.0, - "grad_norm": 3.33179397006477, - "learning_rate": 3.544258922797474e-06, - "loss": 0.8774, - "num_input_tokens_seen": 42458830, - "step": 2018 - }, - { - "epoch": 0.24277039620032465, - "flos": 18779561354280.0, - "grad_norm": 2.4841491788521375, - "learning_rate": 3.543763796259295e-06, - "loss": 0.7692, - "num_input_tokens_seen": 42478505, - "step": 2019 - }, - { - "epoch": 0.24289063909096376, - "flos": 19266980573880.0, - "grad_norm": 3.1136079279380144, - "learning_rate": 3.5432684355333754e-06, - "loss": 0.8899, - "num_input_tokens_seen": 42496880, - "step": 2020 - }, - { - "epoch": 0.24301088198160284, - "flos": 18369315643080.0, - "grad_norm": 3.040848939966568, - "learning_rate": 3.5427728406948613e-06, - "loss": 0.7466, - "num_input_tokens_seen": 42515715, - "step": 2021 - }, - { - "epoch": 0.24313112487224192, - "flos": 51551674413960.0, - "grad_norm": 0.767779957101449, - "learning_rate": 3.542277011818934e-06, - "loss": 0.5956, - "num_input_tokens_seen": 42579270, - "step": 2022 - }, - { - "epoch": 0.24325136776288103, - "flos": 29906060214120.0, - "grad_norm": 3.2751060002768333, - "learning_rate": 3.5417809489808104e-06, - "loss": 0.7162, - "num_input_tokens_seen": 42600600, - "step": 2023 - }, - { - "epoch": 0.24337161065352012, - "flos": 18349216059840.0, - "grad_norm": 2.2741072150045376, - "learning_rate": 3.5412846522557422e-06, - "loss": 0.706, - "num_input_tokens_seen": 42621210, - "step": 2024 - }, - { - "epoch": 0.2434918535441592, - "flos": 13623676579200.0, - "grad_norm": 2.9721518851857622, - "learning_rate": 3.540788121719018e-06, - "loss": 0.7306, - "num_input_tokens_seen": 42639350, - "step": 2025 - }, - { - "epoch": 0.24361209643479828, - "flos": 17511981009960.0, - "grad_norm": 2.372797697823167, - "learning_rate": 3.5402913574459604e-06, - "loss": 0.8079, - "num_input_tokens_seen": 42658975, - "step": 2026 - }, - { - "epoch": 0.2437323393254374, - "flos": 21021536898360.0, - "grad_norm": 2.0826363605674576, - "learning_rate": 3.5397943595119297e-06, - "loss": 0.8367, - "num_input_tokens_seen": 42680115, - "step": 2027 - }, - { - "epoch": 0.24385258221607647, - "flos": 17241998124720.0, - "grad_norm": 3.613059405140702, - "learning_rate": 3.5392971279923177e-06, - "loss": 0.7432, - "num_input_tokens_seen": 42698055, - "step": 2028 - }, - { - "epoch": 0.24397282510671556, - "flos": 18563296438440.0, - "grad_norm": 3.1593915959647725, - "learning_rate": 3.5387996629625557e-06, - "loss": 0.811, - "num_input_tokens_seen": 42715365, - "step": 2029 - }, - { - "epoch": 0.24409306799735467, - "flos": 48062376408600.0, - "grad_norm": 0.8431154620152115, - "learning_rate": 3.5383019644981083e-06, - "loss": 0.5922, - "num_input_tokens_seen": 42778780, - "step": 2030 - }, - { - "epoch": 0.24421331088799375, - "flos": 14271964767360.0, - "grad_norm": 3.2351254684431985, - "learning_rate": 3.5378040326744763e-06, - "loss": 0.6975, - "num_input_tokens_seen": 42797985, - "step": 2031 - }, - { - "epoch": 0.24433355377863283, - "flos": 15405234358200.0, - "grad_norm": 2.7484913068734165, - "learning_rate": 3.5373058675671946e-06, - "loss": 0.8423, - "num_input_tokens_seen": 42815710, - "step": 2032 - }, - { - "epoch": 0.24445379666927192, - "flos": 16566075282840.0, - "grad_norm": 2.370655100938342, - "learning_rate": 3.536807469251836e-06, - "loss": 0.7033, - "num_input_tokens_seen": 42834585, - "step": 2033 - }, - { - "epoch": 0.24457403955991103, - "flos": 15540779850120.0, - "grad_norm": 2.6623758027387017, - "learning_rate": 3.5363088378040055e-06, - "loss": 0.8032, - "num_input_tokens_seen": 42853195, - "step": 2034 - }, - { - "epoch": 0.2446942824505501, - "flos": 49400228390040.0, - "grad_norm": 0.8082028978512473, - "learning_rate": 3.5358099732993463e-06, - "loss": 0.6743, - "num_input_tokens_seen": 42912025, - "step": 2035 - }, - { - "epoch": 0.2448145253411892, - "flos": 14918574977640.0, - "grad_norm": 3.56242303597819, - "learning_rate": 3.535310875813535e-06, - "loss": 0.8704, - "num_input_tokens_seen": 42930140, - "step": 2036 - }, - { - "epoch": 0.2449347682318283, - "flos": 21130682475000.0, - "grad_norm": 2.4161044376612786, - "learning_rate": 3.5348115454222843e-06, - "loss": 0.8054, - "num_input_tokens_seen": 42952445, - "step": 2037 - }, - { - "epoch": 0.2450550111224674, - "flos": 16486179017880.0, - "grad_norm": 2.4266035618105004, - "learning_rate": 3.5343119822013425e-06, - "loss": 0.831, - "num_input_tokens_seen": 42971275, - "step": 2038 - }, - { - "epoch": 0.24517525401310647, - "flos": 15915919157040.0, - "grad_norm": 2.875191889454351, - "learning_rate": 3.533812186226493e-06, - "loss": 0.7654, - "num_input_tokens_seen": 42991705, - "step": 2039 - }, - { - "epoch": 0.24529549690374555, - "flos": 18346493303280.0, - "grad_norm": 2.7768818703273874, - "learning_rate": 3.5333121575735545e-06, - "loss": 0.7482, - "num_input_tokens_seen": 43011065, - "step": 2040 - }, - { - "epoch": 0.24541573979438466, - "flos": 23586706737720.0, - "grad_norm": 2.35587703272534, - "learning_rate": 3.532811896318381e-06, - "loss": 0.7371, - "num_input_tokens_seen": 43032855, - "step": 2041 - }, - { - "epoch": 0.24553598268502375, - "flos": 23019771172680.0, - "grad_norm": 2.5719444404581266, - "learning_rate": 3.5323114025368615e-06, - "loss": 0.7864, - "num_input_tokens_seen": 43047640, - "step": 2042 - }, - { - "epoch": 0.24565622557566283, - "flos": 10194428535240.0, - "grad_norm": 2.7080953248909627, - "learning_rate": 3.53181067630492e-06, - "loss": 0.7953, - "num_input_tokens_seen": 43064830, - "step": 2043 - }, - { - "epoch": 0.24577646846630194, - "flos": 12084562011600.0, - "grad_norm": 6.064210001668473, - "learning_rate": 3.5313097176985175e-06, - "loss": 0.7445, - "num_input_tokens_seen": 43082860, - "step": 2044 - }, - { - "epoch": 0.24589671135694102, - "flos": 13731967336920.0, - "grad_norm": 2.3457061742148166, - "learning_rate": 3.5308085267936482e-06, - "loss": 0.7875, - "num_input_tokens_seen": 43100295, - "step": 2045 - }, - { - "epoch": 0.2460169542475801, - "flos": 14569075746960.0, - "grad_norm": 2.2541144157554216, - "learning_rate": 3.530307103666342e-06, - "loss": 0.8923, - "num_input_tokens_seen": 43119095, - "step": 2046 - }, - { - "epoch": 0.24613719713821922, - "flos": 17701592730840.0, - "grad_norm": 2.741034719186736, - "learning_rate": 3.5298054483926658e-06, - "loss": 0.7799, - "num_input_tokens_seen": 43139510, - "step": 2047 - }, - { - "epoch": 0.2462574400288583, - "flos": 22177913768640.0, - "grad_norm": 2.795903724508187, - "learning_rate": 3.5293035610487187e-06, - "loss": 0.8147, - "num_input_tokens_seen": 43158595, - "step": 2048 - }, - { - "epoch": 0.24637768291949738, - "flos": 50841436666800.0, - "grad_norm": 0.6981451647381157, - "learning_rate": 3.5288014417106374e-06, - "loss": 0.6368, - "num_input_tokens_seen": 43224335, - "step": 2049 - }, - { - "epoch": 0.24649792581013646, - "flos": 25259277239880.0, - "grad_norm": 2.055895780066707, - "learning_rate": 3.528299090454593e-06, - "loss": 0.7433, - "num_input_tokens_seen": 43244590, - "step": 2050 - }, - { - "epoch": 0.24661816870077558, - "flos": 14377691068320.0, - "grad_norm": 3.2521293706932366, - "learning_rate": 3.527796507356792e-06, - "loss": 0.8085, - "num_input_tokens_seen": 43258200, - "step": 2051 - }, - { - "epoch": 0.24673841159141466, - "flos": 14614942046280.0, - "grad_norm": 3.324799644915917, - "learning_rate": 3.527293692493475e-06, - "loss": 0.8615, - "num_input_tokens_seen": 43273785, - "step": 2052 - }, - { - "epoch": 0.24685865448205374, - "flos": 15832192036920.0, - "grad_norm": 3.374259867575505, - "learning_rate": 3.52679064594092e-06, - "loss": 0.7116, - "num_input_tokens_seen": 43290845, - "step": 2053 - }, - { - "epoch": 0.24697889737269285, - "flos": 13104063671640.0, - "grad_norm": 2.648818177177945, - "learning_rate": 3.5262873677754375e-06, - "loss": 0.7245, - "num_input_tokens_seen": 43308570, - "step": 2054 - }, - { - "epoch": 0.24709914026333193, - "flos": 20049991095000.0, - "grad_norm": 1.9998937817024285, - "learning_rate": 3.5257838580733745e-06, - "loss": 0.7893, - "num_input_tokens_seen": 43327895, - "step": 2055 - }, - { - "epoch": 0.24721938315397102, - "flos": 14077509072600.0, - "grad_norm": 3.41560171944798, - "learning_rate": 3.5252801169111138e-06, - "loss": 0.8486, - "num_input_tokens_seen": 43345280, - "step": 2056 - }, - { - "epoch": 0.2473396260446101, - "flos": 16972268519160.0, - "grad_norm": 2.2581093140961883, - "learning_rate": 3.524776144365072e-06, - "loss": 0.7759, - "num_input_tokens_seen": 43363455, - "step": 2057 - }, - { - "epoch": 0.2474598689352492, - "flos": 15460028766240.0, - "grad_norm": 2.4525169056223204, - "learning_rate": 3.5242719405117016e-06, - "loss": 0.7862, - "num_input_tokens_seen": 43382980, - "step": 2058 - }, - { - "epoch": 0.2475801118258883, - "flos": 15833743374960.0, - "grad_norm": 4.661728471426515, - "learning_rate": 3.5237675054274893e-06, - "loss": 0.7187, - "num_input_tokens_seen": 43401900, - "step": 2059 - }, - { - "epoch": 0.24770035471652738, - "flos": 16593804916440.0, - "grad_norm": 5.022827301221931, - "learning_rate": 3.5232628391889584e-06, - "loss": 0.7861, - "num_input_tokens_seen": 43419910, - "step": 2060 - }, - { - "epoch": 0.2478205976071665, - "flos": 16215879533040.0, - "grad_norm": 3.2836717660309485, - "learning_rate": 3.522757941872666e-06, - "loss": 0.6269, - "num_input_tokens_seen": 43437785, - "step": 2061 - }, - { - "epoch": 0.24794084049780557, - "flos": 18294928211160.0, - "grad_norm": 1.8931919673184276, - "learning_rate": 3.5222528135552042e-06, - "loss": 0.8121, - "num_input_tokens_seen": 43458965, - "step": 2062 - }, - { - "epoch": 0.24806108338844465, - "flos": 13353725353920.0, - "grad_norm": 3.146570300415445, - "learning_rate": 3.521747454313201e-06, - "loss": 0.7928, - "num_input_tokens_seen": 43477365, - "step": 2063 - }, - { - "epoch": 0.24818132627908374, - "flos": 14083366165200.0, - "grad_norm": 3.4398193857550443, - "learning_rate": 3.521241864223319e-06, - "loss": 0.6567, - "num_input_tokens_seen": 43496045, - "step": 2064 - }, - { - "epoch": 0.24830156916972285, - "flos": 51834190152000.0, - "grad_norm": 0.80807338316684, - "learning_rate": 3.5207360433622552e-06, - "loss": 0.6229, - "num_input_tokens_seen": 43557765, - "step": 2065 - }, - { - "epoch": 0.24842181206036193, - "flos": 29718981290040.0, - "grad_norm": 2.1675945887006534, - "learning_rate": 3.5202299918067437e-06, - "loss": 0.7324, - "num_input_tokens_seen": 43581080, - "step": 2066 - }, - { - "epoch": 0.248542054951001, - "flos": 14675276947320.0, - "grad_norm": 2.748661322314704, - "learning_rate": 3.519723709633551e-06, - "loss": 0.6835, - "num_input_tokens_seen": 43599560, - "step": 2067 - }, - { - "epoch": 0.24866229784164012, - "flos": 17215091650080.0, - "grad_norm": 2.5109022839491417, - "learning_rate": 3.519217196919479e-06, - "loss": 0.8074, - "num_input_tokens_seen": 43618265, - "step": 2068 - }, - { - "epoch": 0.2487825407322792, - "flos": 14514661258440.0, - "grad_norm": 2.1109546539264734, - "learning_rate": 3.518710453741367e-06, - "loss": 0.7135, - "num_input_tokens_seen": 43637185, - "step": 2069 - }, - { - "epoch": 0.2489027836229183, - "flos": 16107652095240.0, - "grad_norm": 2.5198942174356107, - "learning_rate": 3.518203480176086e-06, - "loss": 0.656, - "num_input_tokens_seen": 43655835, - "step": 2070 - }, - { - "epoch": 0.2490230265135574, - "flos": 17052481383720.0, - "grad_norm": 2.367363750077944, - "learning_rate": 3.517696276300545e-06, - "loss": 0.7787, - "num_input_tokens_seen": 43677095, - "step": 2071 - }, - { - "epoch": 0.24914326940419648, - "flos": 14486045145960.0, - "grad_norm": 7.096956712999005, - "learning_rate": 3.517188842191685e-06, - "loss": 0.6808, - "num_input_tokens_seen": 43694965, - "step": 2072 - }, - { - "epoch": 0.24926351229483557, - "flos": 14784074264400.0, - "grad_norm": 2.229161402140548, - "learning_rate": 3.5166811779264837e-06, - "loss": 0.7221, - "num_input_tokens_seen": 43715005, - "step": 2073 - }, - { - "epoch": 0.24938375518547465, - "flos": 17052671343480.0, - "grad_norm": 3.3675039899387524, - "learning_rate": 3.5161732835819545e-06, - "loss": 0.7609, - "num_input_tokens_seen": 43734035, - "step": 2074 - }, - { - "epoch": 0.24950399807611376, - "flos": 12625034341440.0, - "grad_norm": 4.269128722876629, - "learning_rate": 3.515665159235143e-06, - "loss": 0.8049, - "num_input_tokens_seen": 43752640, - "step": 2075 - }, - { - "epoch": 0.24962424096675284, - "flos": 13892678005680.0, - "grad_norm": 2.254012450785465, - "learning_rate": 3.5151568049631318e-06, - "loss": 0.7298, - "num_input_tokens_seen": 43771075, - "step": 2076 - }, - { - "epoch": 0.24974448385739192, - "flos": 24530142987960.0, - "grad_norm": 2.549712770778399, - "learning_rate": 3.5146482208430385e-06, - "loss": 0.781, - "num_input_tokens_seen": 43792625, - "step": 2077 - }, - { - "epoch": 0.24986472674803104, - "flos": 22020432415800.0, - "grad_norm": 3.1569583211328003, - "learning_rate": 3.514139406952014e-06, - "loss": 0.6662, - "num_input_tokens_seen": 43814370, - "step": 2078 - }, - { - "epoch": 0.24998496963867012, - "flos": 19508347346640.0, - "grad_norm": 3.202477515303261, - "learning_rate": 3.5136303633672454e-06, - "loss": 0.8283, - "num_input_tokens_seen": 43834220, - "step": 2079 - }, - { - "epoch": 0.25010521252930923, - "flos": 17245227440640.0, - "grad_norm": 2.3363103154429563, - "learning_rate": 3.5131210901659544e-06, - "loss": 0.7382, - "num_input_tokens_seen": 43855695, - "step": 2080 - }, - { - "epoch": 0.2502254554199483, - "flos": 17161278700800.0, - "grad_norm": 3.3041169235378427, - "learning_rate": 3.5126115874253967e-06, - "loss": 0.7995, - "num_input_tokens_seen": 43874970, - "step": 2081 - }, - { - "epoch": 0.2503456983105874, - "flos": 21097887247800.0, - "grad_norm": 2.1912527319569595, - "learning_rate": 3.5121018552228644e-06, - "loss": 0.7885, - "num_input_tokens_seen": 43893195, - "step": 2082 - }, - { - "epoch": 0.2504659412012265, - "flos": 13704364343160.0, - "grad_norm": 3.4891521737874522, - "learning_rate": 3.5115918936356827e-06, - "loss": 0.7583, - "num_input_tokens_seen": 43909670, - "step": 2083 - }, - { - "epoch": 0.25058618409186556, - "flos": 12300605307720.0, - "grad_norm": 2.3809431904660823, - "learning_rate": 3.5110817027412123e-06, - "loss": 0.7673, - "num_input_tokens_seen": 43928480, - "step": 2084 - }, - { - "epoch": 0.25070642698250467, - "flos": 17889653113680.0, - "grad_norm": 2.6786225541094875, - "learning_rate": 3.5105712826168493e-06, - "loss": 0.6655, - "num_input_tokens_seen": 43947850, - "step": 2085 - }, - { - "epoch": 0.2508266698731437, - "flos": 14806833284280.0, - "grad_norm": 2.718311230561635, - "learning_rate": 3.5100606333400235e-06, - "loss": 0.6926, - "num_input_tokens_seen": 43964705, - "step": 2086 - }, - { - "epoch": 0.25094691276378284, - "flos": 14056048111080.0, - "grad_norm": 3.180239617778707, - "learning_rate": 3.5095497549882006e-06, - "loss": 0.7576, - "num_input_tokens_seen": 43982870, - "step": 2087 - }, - { - "epoch": 0.25106715565442195, - "flos": 19752943435320.0, - "grad_norm": 3.4175841022403866, - "learning_rate": 3.50903864763888e-06, - "loss": 0.6998, - "num_input_tokens_seen": 44003380, - "step": 2088 - }, - { - "epoch": 0.251187398545061, - "flos": 35602353999120.0, - "grad_norm": 2.8693325739378555, - "learning_rate": 3.5085273113695965e-06, - "loss": 0.7377, - "num_input_tokens_seen": 44027670, - "step": 2089 - }, - { - "epoch": 0.2513076414357001, - "flos": 19806851364480.0, - "grad_norm": 3.4820150414787805, - "learning_rate": 3.508015746257919e-06, - "loss": 0.7692, - "num_input_tokens_seen": 44046430, - "step": 2090 - }, - { - "epoch": 0.2514278843263392, - "flos": 14217518618880.0, - "grad_norm": 2.801011080070679, - "learning_rate": 3.5075039523814518e-06, - "loss": 0.8136, - "num_input_tokens_seen": 44065340, - "step": 2091 - }, - { - "epoch": 0.2515481272169783, - "flos": 12294178335840.0, - "grad_norm": 4.226561371808422, - "learning_rate": 3.506991929817834e-06, - "loss": 0.8016, - "num_input_tokens_seen": 44081780, - "step": 2092 - }, - { - "epoch": 0.2516683701076174, - "flos": 17376752117640.0, - "grad_norm": 2.0336793896891385, - "learning_rate": 3.506479678644738e-06, - "loss": 0.8074, - "num_input_tokens_seen": 44101895, - "step": 2093 - }, - { - "epoch": 0.2517886129982565, - "flos": 20265527831760.0, - "grad_norm": 3.269073417812357, - "learning_rate": 3.505967198939873e-06, - "loss": 0.7196, - "num_input_tokens_seen": 44118655, - "step": 2094 - }, - { - "epoch": 0.25190885588889556, - "flos": 28012634101920.0, - "grad_norm": 3.001480055336535, - "learning_rate": 3.5054544907809813e-06, - "loss": 0.7585, - "num_input_tokens_seen": 44138875, - "step": 2095 - }, - { - "epoch": 0.25202909877953467, - "flos": 16293907860360.0, - "grad_norm": 2.700282106849854, - "learning_rate": 3.50494155424584e-06, - "loss": 0.7835, - "num_input_tokens_seen": 44157500, - "step": 2096 - }, - { - "epoch": 0.2521493416701738, - "flos": 15918230334120.0, - "grad_norm": 1.9984010026475698, - "learning_rate": 3.504428389412262e-06, - "loss": 0.8206, - "num_input_tokens_seen": 44178030, - "step": 2097 - }, - { - "epoch": 0.25226958456081283, - "flos": 20347703613840.0, - "grad_norm": 9.074245654824514, - "learning_rate": 3.5039149963580927e-06, - "loss": 0.7168, - "num_input_tokens_seen": 44197770, - "step": 2098 - }, - { - "epoch": 0.25238982745145194, - "flos": 22556219071560.0, - "grad_norm": 3.82994764884847, - "learning_rate": 3.503401375161215e-06, - "loss": 0.6732, - "num_input_tokens_seen": 44217235, - "step": 2099 - }, - { - "epoch": 0.252510070342091, - "flos": 14811582278280.0, - "grad_norm": 3.2884757272857823, - "learning_rate": 3.502887525899544e-06, - "loss": 0.8335, - "num_input_tokens_seen": 44235935, - "step": 2100 - }, - { - "epoch": 0.2526303132327301, - "flos": 16647744505560.0, - "grad_norm": 2.0126255399038566, - "learning_rate": 3.50237344865103e-06, - "loss": 0.8143, - "num_input_tokens_seen": 44256655, - "step": 2101 - }, - { - "epoch": 0.2527505561233692, - "flos": 22210139116560.0, - "grad_norm": 3.047287820090462, - "learning_rate": 3.501859143493658e-06, - "loss": 0.7483, - "num_input_tokens_seen": 44277005, - "step": 2102 - }, - { - "epoch": 0.2528707990140083, - "flos": 43105501871160.0, - "grad_norm": 0.850590141977984, - "learning_rate": 3.5013446105054488e-06, - "loss": 0.6398, - "num_input_tokens_seen": 44329645, - "step": 2103 - }, - { - "epoch": 0.2529910419046474, - "flos": 18052263380040.0, - "grad_norm": 3.2387341595596926, - "learning_rate": 3.5008298497644555e-06, - "loss": 0.7421, - "num_input_tokens_seen": 44348410, - "step": 2104 - }, - { - "epoch": 0.2531112847952865, - "flos": 17457946440960.0, - "grad_norm": 2.318343743365583, - "learning_rate": 3.500314861348767e-06, - "loss": 0.8692, - "num_input_tokens_seen": 44368765, - "step": 2105 - }, - { - "epoch": 0.25323152768592555, - "flos": 11760702857160.0, - "grad_norm": 4.003989852404621, - "learning_rate": 3.499799645336507e-06, - "loss": 0.7589, - "num_input_tokens_seen": 44385380, - "step": 2106 - }, - { - "epoch": 0.25335177057656466, - "flos": 20835217813320.0, - "grad_norm": 1.70272409462675, - "learning_rate": 3.4992842018058336e-06, - "loss": 0.8577, - "num_input_tokens_seen": 44408000, - "step": 2107 - }, - { - "epoch": 0.25347201346720377, - "flos": 13725128785560.0, - "grad_norm": 2.541894172950698, - "learning_rate": 3.4987685308349384e-06, - "loss": 0.8572, - "num_input_tokens_seen": 44425450, - "step": 2108 - }, - { - "epoch": 0.2535922563578428, - "flos": 11517721426440.0, - "grad_norm": 3.1948801508290763, - "learning_rate": 3.4982526325020497e-06, - "loss": 0.6033, - "num_input_tokens_seen": 44442140, - "step": 2109 - }, - { - "epoch": 0.25371249924848194, - "flos": 11889251497920.0, - "grad_norm": 2.708862653994171, - "learning_rate": 3.4977365068854273e-06, - "loss": 0.8069, - "num_input_tokens_seen": 44457480, - "step": 2110 - }, - { - "epoch": 0.25383274213912105, - "flos": 15918103694280.0, - "grad_norm": 2.973248576879261, - "learning_rate": 3.4972201540633676e-06, - "loss": 0.707, - "num_input_tokens_seen": 44476555, - "step": 2111 - }, - { - "epoch": 0.2539529850297601, - "flos": 15648184128960.0, - "grad_norm": 2.113941928444856, - "learning_rate": 3.4967035741142008e-06, - "loss": 0.832, - "num_input_tokens_seen": 44495095, - "step": 2112 - }, - { - "epoch": 0.2540732279203992, - "flos": 18483906732840.0, - "grad_norm": 3.2870044929513145, - "learning_rate": 3.4961867671162917e-06, - "loss": 0.8075, - "num_input_tokens_seen": 44514745, - "step": 2113 - }, - { - "epoch": 0.2541934708110383, - "flos": 14190738784080.0, - "grad_norm": 4.210666400059826, - "learning_rate": 3.4956697331480402e-06, - "loss": 0.7447, - "num_input_tokens_seen": 44533035, - "step": 2114 - }, - { - "epoch": 0.2543137137016774, - "flos": 17537051206920.0, - "grad_norm": 1.917771774090804, - "learning_rate": 3.495152472287879e-06, - "loss": 0.7878, - "num_input_tokens_seen": 44553465, - "step": 2115 - }, - { - "epoch": 0.2544339565923165, - "flos": 18753889618080.0, - "grad_norm": 2.3488153271728516, - "learning_rate": 3.4946349846142766e-06, - "loss": 0.7108, - "num_input_tokens_seen": 44572325, - "step": 2116 - }, - { - "epoch": 0.25455419948295555, - "flos": 15864828964320.0, - "grad_norm": 2.5214703808432346, - "learning_rate": 3.4941172702057353e-06, - "loss": 0.7414, - "num_input_tokens_seen": 44592105, - "step": 2117 - }, - { - "epoch": 0.25467444237359466, - "flos": 19240200739080.0, - "grad_norm": 2.3711409084903114, - "learning_rate": 3.4935993291407924e-06, - "loss": 0.7871, - "num_input_tokens_seen": 44610650, - "step": 2118 - }, - { - "epoch": 0.25479468526423377, - "flos": 19779913229880.0, - "grad_norm": 2.630703769264939, - "learning_rate": 3.4930811614980183e-06, - "loss": 0.6902, - "num_input_tokens_seen": 44632065, - "step": 2119 - }, - { - "epoch": 0.2549149281548728, - "flos": 17186633837400.0, - "grad_norm": 2.761931458392727, - "learning_rate": 3.4925627673560198e-06, - "loss": 0.7788, - "num_input_tokens_seen": 44652445, - "step": 2120 - }, - { - "epoch": 0.25503517104551193, - "flos": 18915993325080.0, - "grad_norm": 2.7660079949484775, - "learning_rate": 3.4920441467934357e-06, - "loss": 0.8653, - "num_input_tokens_seen": 44672680, - "step": 2121 - }, - { - "epoch": 0.25515541393615104, - "flos": 19531422966120.0, - "grad_norm": 2.442749825608479, - "learning_rate": 3.491525299888941e-06, - "loss": 0.802, - "num_input_tokens_seen": 44691245, - "step": 2122 - }, - { - "epoch": 0.2552756568267901, - "flos": 48631464850920.0, - "grad_norm": 0.9939772316111495, - "learning_rate": 3.491006226721244e-06, - "loss": 0.6791, - "num_input_tokens_seen": 44755175, - "step": 2123 - }, - { - "epoch": 0.2553958997174292, - "flos": 13083330889200.0, - "grad_norm": 2.599876430215978, - "learning_rate": 3.4904869273690882e-06, - "loss": 0.7527, - "num_input_tokens_seen": 44772785, - "step": 2124 - }, - { - "epoch": 0.2555161426080683, - "flos": 17106895872240.0, - "grad_norm": 2.5377760224949606, - "learning_rate": 3.489967401911251e-06, - "loss": 0.874, - "num_input_tokens_seen": 44791805, - "step": 2125 - }, - { - "epoch": 0.2556363854987074, - "flos": 29878425560400.0, - "grad_norm": 2.177065458330819, - "learning_rate": 3.4894476504265428e-06, - "loss": 0.6813, - "num_input_tokens_seen": 44815765, - "step": 2126 - }, - { - "epoch": 0.2557566283893465, - "flos": 50156300776560.0, - "grad_norm": 0.7665260617926096, - "learning_rate": 3.4889276729938104e-06, - "loss": 0.5649, - "num_input_tokens_seen": 44874015, - "step": 2127 - }, - { - "epoch": 0.2558768712799856, - "flos": 16564713904560.0, - "grad_norm": 4.076891014515629, - "learning_rate": 3.488407469691934e-06, - "loss": 0.7814, - "num_input_tokens_seen": 44894430, - "step": 2128 - }, - { - "epoch": 0.25599711417062465, - "flos": 19348269877080.0, - "grad_norm": 3.1407057718546842, - "learning_rate": 3.487887040599828e-06, - "loss": 0.7805, - "num_input_tokens_seen": 44913950, - "step": 2129 - }, - { - "epoch": 0.25611735706126376, - "flos": 16724506434480.0, - "grad_norm": 3.039064853539818, - "learning_rate": 3.4873663857964407e-06, - "loss": 0.7441, - "num_input_tokens_seen": 44930885, - "step": 2130 - }, - { - "epoch": 0.2562375999519028, - "flos": 17106959192160.0, - "grad_norm": 2.4908648481914, - "learning_rate": 3.4868455053607556e-06, - "loss": 0.6525, - "num_input_tokens_seen": 44950220, - "step": 2131 - }, - { - "epoch": 0.2563578428425419, - "flos": 16728843849000.0, - "grad_norm": 3.0330171448235266, - "learning_rate": 3.486324399371789e-06, - "loss": 0.6911, - "num_input_tokens_seen": 44969240, - "step": 2132 - }, - { - "epoch": 0.25647808573318104, - "flos": 15838619008800.0, - "grad_norm": 6.004985645286561, - "learning_rate": 3.485803067908593e-06, - "loss": 0.7645, - "num_input_tokens_seen": 44988470, - "step": 2133 - }, - { - "epoch": 0.2565983286238201, - "flos": 24776163774840.0, - "grad_norm": 2.395507215335739, - "learning_rate": 3.485281511050253e-06, - "loss": 0.778, - "num_input_tokens_seen": 45010325, - "step": 2134 - }, - { - "epoch": 0.2567185715144592, - "flos": 11814009247080.0, - "grad_norm": 2.961715233754672, - "learning_rate": 3.484759728875889e-06, - "loss": 0.8856, - "num_input_tokens_seen": 45025410, - "step": 2135 - }, - { - "epoch": 0.2568388144050983, - "flos": 12733799998560.0, - "grad_norm": 2.143034866319944, - "learning_rate": 3.4842377214646543e-06, - "loss": 0.7925, - "num_input_tokens_seen": 45043425, - "step": 2136 - }, - { - "epoch": 0.25695905729573737, - "flos": 15271366844160.0, - "grad_norm": 3.402176126934295, - "learning_rate": 3.483715488895737e-06, - "loss": 0.6617, - "num_input_tokens_seen": 45063475, - "step": 2137 - }, - { - "epoch": 0.2570793001863765, - "flos": 18105538110000.0, - "grad_norm": 2.303144966773927, - "learning_rate": 3.48319303124836e-06, - "loss": 0.7656, - "num_input_tokens_seen": 45083575, - "step": 2138 - }, - { - "epoch": 0.2571995430770156, - "flos": 19726068620640.0, - "grad_norm": 2.837322811022347, - "learning_rate": 3.4826703486017798e-06, - "loss": 0.6601, - "num_input_tokens_seen": 45102920, - "step": 2139 - }, - { - "epoch": 0.25731978596765465, - "flos": 14461038268920.0, - "grad_norm": 2.534891123493293, - "learning_rate": 3.4821474410352867e-06, - "loss": 0.7483, - "num_input_tokens_seen": 45121300, - "step": 2140 - }, - { - "epoch": 0.25744002885829376, - "flos": 52038962502360.0, - "grad_norm": 0.9020178400335043, - "learning_rate": 3.481624308628205e-06, - "loss": 0.6632, - "num_input_tokens_seen": 45182390, - "step": 2141 - }, - { - "epoch": 0.25756027174893287, - "flos": 13162245695400.0, - "grad_norm": 5.231483138892787, - "learning_rate": 3.481100951459893e-06, - "loss": 0.9856, - "num_input_tokens_seen": 45195130, - "step": 2142 - }, - { - "epoch": 0.2576805146395719, - "flos": 16593424996920.0, - "grad_norm": 1.953321035083858, - "learning_rate": 3.4805773696097453e-06, - "loss": 0.7665, - "num_input_tokens_seen": 45215740, - "step": 2143 - }, - { - "epoch": 0.25780075753021103, - "flos": 12003747607800.0, - "grad_norm": 2.403057705615355, - "learning_rate": 3.4800535631571874e-06, - "loss": 0.856, - "num_input_tokens_seen": 45230990, - "step": 2144 - }, - { - "epoch": 0.25792100042085014, - "flos": 16701684094680.0, - "grad_norm": 2.760312425096671, - "learning_rate": 3.4795295321816804e-06, - "loss": 0.7525, - "num_input_tokens_seen": 45249535, - "step": 2145 - }, - { - "epoch": 0.2580412433114892, - "flos": 13650361434120.0, - "grad_norm": 3.421039357333357, - "learning_rate": 3.47900527676272e-06, - "loss": 0.9028, - "num_input_tokens_seen": 45267590, - "step": 2146 - }, - { - "epoch": 0.2581614862021283, - "flos": 10383691996560.0, - "grad_norm": 2.505299863246944, - "learning_rate": 3.478480796979835e-06, - "loss": 0.8713, - "num_input_tokens_seen": 45285195, - "step": 2147 - }, - { - "epoch": 0.25828172909276736, - "flos": 21643995050520.0, - "grad_norm": 1.877043373566037, - "learning_rate": 3.4779560929125894e-06, - "loss": 0.7604, - "num_input_tokens_seen": 45306460, - "step": 2148 - }, - { - "epoch": 0.2584019719834065, - "flos": 49487501425680.0, - "grad_norm": 0.6924915524253239, - "learning_rate": 3.4774311646405783e-06, - "loss": 0.5771, - "num_input_tokens_seen": 45376085, - "step": 2149 - }, - { - "epoch": 0.2585222148740456, - "flos": 16755940283400.0, - "grad_norm": 2.1053978619646996, - "learning_rate": 3.476906012243435e-06, - "loss": 0.8173, - "num_input_tokens_seen": 45394715, - "step": 2150 - }, - { - "epoch": 0.25864245776468464, - "flos": 21207286104120.0, - "grad_norm": 2.850701188949266, - "learning_rate": 3.476380635800824e-06, - "loss": 0.8018, - "num_input_tokens_seen": 45415635, - "step": 2151 - }, - { - "epoch": 0.25876270065532375, - "flos": 10761047500680.0, - "grad_norm": 3.515706108005362, - "learning_rate": 3.475855035392444e-06, - "loss": 0.8371, - "num_input_tokens_seen": 45430675, - "step": 2152 - }, - { - "epoch": 0.25888294354596286, - "flos": 44562562805160.0, - "grad_norm": 2.5886351007578274, - "learning_rate": 3.475329211098029e-06, - "loss": 0.6894, - "num_input_tokens_seen": 45453550, - "step": 2153 - }, - { - "epoch": 0.2590031864366019, - "flos": 20424275583000.0, - "grad_norm": 2.2607387073069867, - "learning_rate": 3.4748031629973453e-06, - "loss": 0.8132, - "num_input_tokens_seen": 45474000, - "step": 2154 - }, - { - "epoch": 0.25912342932724103, - "flos": 46017231056280.0, - "grad_norm": 0.9514728673052204, - "learning_rate": 3.4742768911701944e-06, - "loss": 0.566, - "num_input_tokens_seen": 45536415, - "step": 2155 - }, - { - "epoch": 0.25924367221788014, - "flos": 8973379349400.0, - "grad_norm": 4.864155175306192, - "learning_rate": 3.4737503956964113e-06, - "loss": 0.677, - "num_input_tokens_seen": 45548440, - "step": 2156 - }, - { - "epoch": 0.2593639151085192, - "flos": 10599861932520.0, - "grad_norm": 3.0145584354984987, - "learning_rate": 3.473223676655865e-06, - "loss": 0.6486, - "num_input_tokens_seen": 45566160, - "step": 2157 - }, - { - "epoch": 0.2594841579991583, - "flos": 11085001635000.0, - "grad_norm": 3.067535153710628, - "learning_rate": 3.472696734128459e-06, - "loss": 0.7785, - "num_input_tokens_seen": 45583745, - "step": 2158 - }, - { - "epoch": 0.2596044008897974, - "flos": 17296254313440.0, - "grad_norm": 2.3143078722221033, - "learning_rate": 3.4721695681941286e-06, - "loss": 0.7369, - "num_input_tokens_seen": 45602505, - "step": 2159 - }, - { - "epoch": 0.25972464378043647, - "flos": 10005608313360.0, - "grad_norm": 4.5155225695968, - "learning_rate": 3.471642178932845e-06, - "loss": 0.8166, - "num_input_tokens_seen": 45620870, - "step": 2160 - }, - { - "epoch": 0.2598448866710756, - "flos": 14298966221880.0, - "grad_norm": 3.353007114183278, - "learning_rate": 3.471114566424613e-06, - "loss": 0.8904, - "num_input_tokens_seen": 45639050, - "step": 2161 - }, - { - "epoch": 0.25996512956171464, - "flos": 15838049129520.0, - "grad_norm": 3.16403281115508, - "learning_rate": 3.4705867307494715e-06, - "loss": 0.7457, - "num_input_tokens_seen": 45657840, - "step": 2162 - }, - { - "epoch": 0.26008537245235375, - "flos": 13299785764800.0, - "grad_norm": 3.689807622386588, - "learning_rate": 3.470058671987492e-06, - "loss": 0.8096, - "num_input_tokens_seen": 45675825, - "step": 2163 - }, - { - "epoch": 0.26020561534299286, - "flos": 18052484999760.0, - "grad_norm": 2.3090848706930363, - "learning_rate": 3.4695303902187805e-06, - "loss": 0.8199, - "num_input_tokens_seen": 45695100, - "step": 2164 - }, - { - "epoch": 0.2603258582336319, - "flos": 18887092272960.0, - "grad_norm": 2.7148866060016696, - "learning_rate": 3.469001885523478e-06, - "loss": 0.7701, - "num_input_tokens_seen": 45715540, - "step": 2165 - }, - { - "epoch": 0.260446101124271, - "flos": 21101971382640.0, - "grad_norm": 1.8355282591179989, - "learning_rate": 3.4684731579817568e-06, - "loss": 0.7984, - "num_input_tokens_seen": 45736250, - "step": 2166 - }, - { - "epoch": 0.26056634401491013, - "flos": 18808335766560.0, - "grad_norm": 3.2933039556832653, - "learning_rate": 3.4679442076738247e-06, - "loss": 0.7571, - "num_input_tokens_seen": 45755685, - "step": 2167 - }, - { - "epoch": 0.2606865869055492, - "flos": 19834011118800.0, - "grad_norm": 3.527581934528179, - "learning_rate": 3.4674150346799245e-06, - "loss": 0.8231, - "num_input_tokens_seen": 45775105, - "step": 2168 - }, - { - "epoch": 0.2608068297961883, - "flos": 12921702081600.0, - "grad_norm": 4.08469933737574, - "learning_rate": 3.4668856390803295e-06, - "loss": 0.7713, - "num_input_tokens_seen": 45792705, - "step": 2169 - }, - { - "epoch": 0.2609270726868274, - "flos": 13542608895720.0, - "grad_norm": 2.945263444810732, - "learning_rate": 3.4663560209553495e-06, - "loss": 0.8866, - "num_input_tokens_seen": 45810490, - "step": 2170 - }, - { - "epoch": 0.26104731557746647, - "flos": 15973088062080.0, - "grad_norm": 2.5836052187517127, - "learning_rate": 3.4658261803853267e-06, - "loss": 0.7658, - "num_input_tokens_seen": 45828135, - "step": 2171 - }, - { - "epoch": 0.2611675584681056, - "flos": 15864987264120.0, - "grad_norm": 2.4905191894416903, - "learning_rate": 3.4652961174506383e-06, - "loss": 0.7978, - "num_input_tokens_seen": 45847725, - "step": 2172 - }, - { - "epoch": 0.2612878013587447, - "flos": 53001774648120.0, - "grad_norm": 1.0650856308696028, - "learning_rate": 3.464765832231694e-06, - "loss": 0.6006, - "num_input_tokens_seen": 45901610, - "step": 2173 - }, - { - "epoch": 0.26140804424938374, - "flos": 14782522926360.0, - "grad_norm": 2.603947551410048, - "learning_rate": 3.4642353248089373e-06, - "loss": 0.6916, - "num_input_tokens_seen": 45920090, - "step": 2174 - }, - { - "epoch": 0.26152828714002285, - "flos": 18727141443240.0, - "grad_norm": 2.4829266242847434, - "learning_rate": 3.463704595262846e-06, - "loss": 0.7887, - "num_input_tokens_seen": 45940690, - "step": 2175 - }, - { - "epoch": 0.26164853003066196, - "flos": 18645155620920.0, - "grad_norm": 2.5879201059641534, - "learning_rate": 3.463173643673931e-06, - "loss": 0.6852, - "num_input_tokens_seen": 45962935, - "step": 2176 - }, - { - "epoch": 0.261768772921301, - "flos": 39741179422440.0, - "grad_norm": 1.0570640632177115, - "learning_rate": 3.4626424701227387e-06, - "loss": 0.6559, - "num_input_tokens_seen": 46017715, - "step": 2177 - }, - { - "epoch": 0.26188901581194013, - "flos": 52131586071240.0, - "grad_norm": 0.9511168227037861, - "learning_rate": 3.4621110746898452e-06, - "loss": 0.6124, - "num_input_tokens_seen": 46085295, - "step": 2178 - }, - { - "epoch": 0.2620092587025792, - "flos": 15406120837080.0, - "grad_norm": 3.113009221961224, - "learning_rate": 3.4615794574558654e-06, - "loss": 0.7292, - "num_input_tokens_seen": 46104025, - "step": 2179 - }, - { - "epoch": 0.2621295015932183, - "flos": 13407949882680.0, - "grad_norm": 3.594154534169437, - "learning_rate": 3.4610476185014436e-06, - "loss": 0.8043, - "num_input_tokens_seen": 46121005, - "step": 2180 - }, - { - "epoch": 0.2622497444838574, - "flos": 17323192448040.0, - "grad_norm": 3.5408759786566977, - "learning_rate": 3.4605155579072597e-06, - "loss": 0.7835, - "num_input_tokens_seen": 46140580, - "step": 2181 - }, - { - "epoch": 0.26236998737449646, - "flos": 16185775402440.0, - "grad_norm": 2.2962944698440415, - "learning_rate": 3.459983275754027e-06, - "loss": 0.7029, - "num_input_tokens_seen": 46159195, - "step": 2182 - }, - { - "epoch": 0.26249023026513557, - "flos": 13056835994040.0, - "grad_norm": 4.346572727249206, - "learning_rate": 3.4594507721224918e-06, - "loss": 0.7728, - "num_input_tokens_seen": 46177565, - "step": 2183 - }, - { - "epoch": 0.2626104731557747, - "flos": 13380790128360.0, - "grad_norm": 3.104153797370467, - "learning_rate": 3.4589180470934353e-06, - "loss": 0.8002, - "num_input_tokens_seen": 46197150, - "step": 2184 - }, - { - "epoch": 0.26273071604641374, - "flos": 14109544460760.0, - "grad_norm": 3.26619514708331, - "learning_rate": 3.4583851007476713e-06, - "loss": 0.7457, - "num_input_tokens_seen": 46215340, - "step": 2185 - }, - { - "epoch": 0.26285095893705285, - "flos": 13375977814440.0, - "grad_norm": 3.012872101528855, - "learning_rate": 3.4578519331660464e-06, - "loss": 0.6618, - "num_input_tokens_seen": 46232055, - "step": 2186 - }, - { - "epoch": 0.26297120182769196, - "flos": 14757990948720.0, - "grad_norm": 2.833239909109068, - "learning_rate": 3.4573185444294426e-06, - "loss": 0.8072, - "num_input_tokens_seen": 46250140, - "step": 2187 - }, - { - "epoch": 0.263091444718331, - "flos": 16404193195560.0, - "grad_norm": 2.6264649799132007, - "learning_rate": 3.456784934618774e-06, - "loss": 0.7703, - "num_input_tokens_seen": 46271025, - "step": 2188 - }, - { - "epoch": 0.2632116876089701, - "flos": 13892646345720.0, - "grad_norm": 5.699848226567809, - "learning_rate": 3.4562511038149897e-06, - "loss": 0.7834, - "num_input_tokens_seen": 46286240, - "step": 2189 - }, - { - "epoch": 0.26333193049960923, - "flos": 49632418265760.0, - "grad_norm": 0.8803202804313686, - "learning_rate": 3.4557170520990705e-06, - "loss": 0.595, - "num_input_tokens_seen": 46346635, - "step": 2190 - }, - { - "epoch": 0.2634521733902483, - "flos": 18351337277160.0, - "grad_norm": 2.0530820891426385, - "learning_rate": 3.4551827795520324e-06, - "loss": 0.8495, - "num_input_tokens_seen": 46369240, - "step": 2191 - }, - { - "epoch": 0.2635724162808874, - "flos": 15054848648640.0, - "grad_norm": 2.636576602071115, - "learning_rate": 3.4546482862549226e-06, - "loss": 0.8343, - "num_input_tokens_seen": 46389275, - "step": 2192 - }, - { - "epoch": 0.2636926591715265, - "flos": 14055446571840.0, - "grad_norm": 8.313508166218709, - "learning_rate": 3.4541135722888253e-06, - "loss": 0.7765, - "num_input_tokens_seen": 46405585, - "step": 2193 - }, - { - "epoch": 0.26381290206216557, - "flos": 21130809114840.0, - "grad_norm": 2.919222846928493, - "learning_rate": 3.453578637734854e-06, - "loss": 0.7869, - "num_input_tokens_seen": 46426495, - "step": 2194 - }, - { - "epoch": 0.2639331449528047, - "flos": 18321328126440.0, - "grad_norm": 3.091727934964578, - "learning_rate": 3.4530434826741605e-06, - "loss": 0.7695, - "num_input_tokens_seen": 46447155, - "step": 2195 - }, - { - "epoch": 0.26405338784344373, - "flos": 34252692852600.0, - "grad_norm": 8.068884871912095, - "learning_rate": 3.452508107187926e-06, - "loss": 0.6743, - "num_input_tokens_seen": 46470250, - "step": 2196 - }, - { - "epoch": 0.26417363073408284, - "flos": 15487505120160.0, - "grad_norm": 2.5033781116542637, - "learning_rate": 3.451972511357366e-06, - "loss": 0.7429, - "num_input_tokens_seen": 46489515, - "step": 2197 - }, - { - "epoch": 0.26429387362472195, - "flos": 16594026536160.0, - "grad_norm": 2.5929545825963145, - "learning_rate": 3.45143669526373e-06, - "loss": 0.841, - "num_input_tokens_seen": 46508995, - "step": 2198 - }, - { - "epoch": 0.264414116515361, - "flos": 49535488942320.0, - "grad_norm": 0.740706240548159, - "learning_rate": 3.450900658988302e-06, - "loss": 0.6251, - "num_input_tokens_seen": 46570265, - "step": 2199 - }, - { - "epoch": 0.2645343594060001, - "flos": 18806531148840.0, - "grad_norm": 9.837354538265023, - "learning_rate": 3.450364402612397e-06, - "loss": 0.7557, - "num_input_tokens_seen": 46587140, - "step": 2200 - }, - { - "epoch": 0.26465460229663923, - "flos": 16458576024120.0, - "grad_norm": 2.558870117170915, - "learning_rate": 3.449827926217366e-06, - "loss": 0.8096, - "num_input_tokens_seen": 46606295, - "step": 2201 - }, - { - "epoch": 0.2647748451872783, - "flos": 21562294167840.0, - "grad_norm": 3.243192879934451, - "learning_rate": 3.449291229884591e-06, - "loss": 0.7846, - "num_input_tokens_seen": 46627255, - "step": 2202 - }, - { - "epoch": 0.2648950880779174, - "flos": 19645380856680.0, - "grad_norm": 2.8948228202718385, - "learning_rate": 3.4487543136954887e-06, - "loss": 0.8421, - "num_input_tokens_seen": 46646595, - "step": 2203 - }, - { - "epoch": 0.2650153309685565, - "flos": 21157493969760.0, - "grad_norm": 3.2536285391789077, - "learning_rate": 3.448217177731509e-06, - "loss": 0.8922, - "num_input_tokens_seen": 46666800, - "step": 2204 - }, - { - "epoch": 0.26513557385919556, - "flos": 14838963652320.0, - "grad_norm": 3.06907943542239, - "learning_rate": 3.4476798220741348e-06, - "loss": 0.7574, - "num_input_tokens_seen": 46685400, - "step": 2205 - }, - { - "epoch": 0.26525581674983467, - "flos": 12894827266920.0, - "grad_norm": 2.426157358593215, - "learning_rate": 3.4471422468048826e-06, - "loss": 0.7651, - "num_input_tokens_seen": 46703845, - "step": 2206 - }, - { - "epoch": 0.2653760596404738, - "flos": 19671907411800.0, - "grad_norm": 3.163505750110381, - "learning_rate": 3.4466044520053022e-06, - "loss": 0.7171, - "num_input_tokens_seen": 46722570, - "step": 2207 - }, - { - "epoch": 0.26549630253111284, - "flos": 16673226282000.0, - "grad_norm": 2.8841407424509686, - "learning_rate": 3.446066437756977e-06, - "loss": 0.5855, - "num_input_tokens_seen": 46741495, - "step": 2208 - }, - { - "epoch": 0.26561654542175195, - "flos": 17241713185080.0, - "grad_norm": 2.987545546180498, - "learning_rate": 3.4455282041415224e-06, - "loss": 0.7331, - "num_input_tokens_seen": 46760425, - "step": 2209 - }, - { - "epoch": 0.265736788312391, - "flos": 19725878660880.0, - "grad_norm": 2.945213741194897, - "learning_rate": 3.4449897512405894e-06, - "loss": 0.8517, - "num_input_tokens_seen": 46779295, - "step": 2210 - }, - { - "epoch": 0.2658570312030301, - "flos": 17187805255920.0, - "grad_norm": 3.0287902920034875, - "learning_rate": 3.444451079135859e-06, - "loss": 0.727, - "num_input_tokens_seen": 46798525, - "step": 2211 - }, - { - "epoch": 0.2659772740936692, - "flos": 15994992263040.0, - "grad_norm": 2.7978916984195807, - "learning_rate": 3.4439121879090493e-06, - "loss": 0.7235, - "num_input_tokens_seen": 46816025, - "step": 2212 - }, - { - "epoch": 0.2660975169843083, - "flos": 14461513168320.0, - "grad_norm": 2.5802142376627466, - "learning_rate": 3.4433730776419082e-06, - "loss": 0.8156, - "num_input_tokens_seen": 46834670, - "step": 2213 - }, - { - "epoch": 0.2662177598749474, - "flos": 21288575407320.0, - "grad_norm": 2.7994643857107366, - "learning_rate": 3.4428337484162183e-06, - "loss": 0.7801, - "num_input_tokens_seen": 46855200, - "step": 2214 - }, - { - "epoch": 0.2663380027655865, - "flos": 15918926853240.0, - "grad_norm": 2.5654716938414768, - "learning_rate": 3.442294200313797e-06, - "loss": 0.8253, - "num_input_tokens_seen": 46872950, - "step": 2215 - }, - { - "epoch": 0.26645824565622556, - "flos": 49389975054360.0, - "grad_norm": 0.9045000939630109, - "learning_rate": 3.4417544334164916e-06, - "loss": 0.5753, - "num_input_tokens_seen": 46936815, - "step": 2216 - }, - { - "epoch": 0.26657848854686467, - "flos": 18510243328200.0, - "grad_norm": 2.1626548182464354, - "learning_rate": 3.4412144478061854e-06, - "loss": 0.7537, - "num_input_tokens_seen": 46958945, - "step": 2217 - }, - { - "epoch": 0.2666987314375038, - "flos": 17350447182240.0, - "grad_norm": 2.2770874507363086, - "learning_rate": 3.4406742435647925e-06, - "loss": 0.7432, - "num_input_tokens_seen": 46978730, - "step": 2218 - }, - { - "epoch": 0.26681897432814283, - "flos": 19830655163040.0, - "grad_norm": 3.8927182614012135, - "learning_rate": 3.440133820774263e-06, - "loss": 0.7667, - "num_input_tokens_seen": 46998260, - "step": 2219 - }, - { - "epoch": 0.26693921721878194, - "flos": 21264613308960.0, - "grad_norm": 3.8011749157801784, - "learning_rate": 3.439593179516578e-06, - "loss": 0.7905, - "num_input_tokens_seen": 47017890, - "step": 2220 - }, - { - "epoch": 0.26705946010942105, - "flos": 15730518210840.0, - "grad_norm": 2.381688587718728, - "learning_rate": 3.4390523198737524e-06, - "loss": 0.7895, - "num_input_tokens_seen": 47036770, - "step": 2221 - }, - { - "epoch": 0.2671797030000601, - "flos": 15703453436400.0, - "grad_norm": 5.005794791148442, - "learning_rate": 3.4385112419278333e-06, - "loss": 0.7209, - "num_input_tokens_seen": 47057715, - "step": 2222 - }, - { - "epoch": 0.2672999458906992, - "flos": 47322414450360.0, - "grad_norm": 0.8243227346191154, - "learning_rate": 3.4379699457609033e-06, - "loss": 0.687, - "num_input_tokens_seen": 47115260, - "step": 2223 - }, - { - "epoch": 0.26742018878133833, - "flos": 12326878583160.0, - "grad_norm": 2.8520032568186693, - "learning_rate": 3.4374284314550755e-06, - "loss": 0.8752, - "num_input_tokens_seen": 47134020, - "step": 2224 - }, - { - "epoch": 0.2675404316719774, - "flos": 15108503298120.0, - "grad_norm": 3.578057434243696, - "learning_rate": 3.436886699092498e-06, - "loss": 0.7855, - "num_input_tokens_seen": 47152255, - "step": 2225 - }, - { - "epoch": 0.2676606745626165, - "flos": 12752031644160.0, - "grad_norm": 3.9754267721377894, - "learning_rate": 3.4363447487553502e-06, - "loss": 0.6915, - "num_input_tokens_seen": 47165290, - "step": 2226 - }, - { - "epoch": 0.26778091745325555, - "flos": 20425605301320.0, - "grad_norm": 2.4147394212680036, - "learning_rate": 3.4358025805258455e-06, - "loss": 0.7682, - "num_input_tokens_seen": 47184715, - "step": 2227 - }, - { - "epoch": 0.26790116034389466, - "flos": 15321475578120.0, - "grad_norm": 2.3039051861791267, - "learning_rate": 3.435260194486232e-06, - "loss": 0.8149, - "num_input_tokens_seen": 47202405, - "step": 2228 - }, - { - "epoch": 0.2680214032345338, - "flos": 13164430232640.0, - "grad_norm": 3.6421701975001928, - "learning_rate": 3.4347175907187875e-06, - "loss": 0.7962, - "num_input_tokens_seen": 47219115, - "step": 2229 - }, - { - "epoch": 0.26814164612517283, - "flos": 16405079674440.0, - "grad_norm": 2.428069333257037, - "learning_rate": 3.4341747693058254e-06, - "loss": 0.8602, - "num_input_tokens_seen": 47237310, - "step": 2230 - }, - { - "epoch": 0.26826188901581194, - "flos": 26179859490360.0, - "grad_norm": 4.193194261275402, - "learning_rate": 3.4336317303296916e-06, - "loss": 0.7547, - "num_input_tokens_seen": 47258005, - "step": 2231 - }, - { - "epoch": 0.26838213190645105, - "flos": 12867825812400.0, - "grad_norm": 3.0487073816978154, - "learning_rate": 3.4330884738727635e-06, - "loss": 0.7339, - "num_input_tokens_seen": 47275900, - "step": 2232 - }, - { - "epoch": 0.2685023747970901, - "flos": 16594089856080.0, - "grad_norm": 2.7837263933815817, - "learning_rate": 3.4325450000174535e-06, - "loss": 0.7013, - "num_input_tokens_seen": 47292260, - "step": 2233 - }, - { - "epoch": 0.2686226176877292, - "flos": 14703703100040.0, - "grad_norm": 4.2306871859724815, - "learning_rate": 3.4320013088462067e-06, - "loss": 0.7243, - "num_input_tokens_seen": 47309340, - "step": 2234 - }, - { - "epoch": 0.2687428605783683, - "flos": 15999962876760.0, - "grad_norm": 2.7018833726607165, - "learning_rate": 3.431457400441499e-06, - "loss": 0.7952, - "num_input_tokens_seen": 47329455, - "step": 2235 - }, - { - "epoch": 0.2688631034690074, - "flos": 51578703037080.0, - "grad_norm": 1.0100459700216653, - "learning_rate": 3.4309132748858424e-06, - "loss": 0.6792, - "num_input_tokens_seen": 47390165, - "step": 2236 - }, - { - "epoch": 0.2689833463596465, - "flos": 16728558909360.0, - "grad_norm": 2.066327098924208, - "learning_rate": 3.430368932261779e-06, - "loss": 0.8246, - "num_input_tokens_seen": 47410240, - "step": 2237 - }, - { - "epoch": 0.2691035892502856, - "flos": 12542288680080.0, - "grad_norm": 3.308124171130251, - "learning_rate": 3.429824372651886e-06, - "loss": 0.7342, - "num_input_tokens_seen": 47428110, - "step": 2238 - }, - { - "epoch": 0.26922383214092466, - "flos": 12947278837920.0, - "grad_norm": 2.890247217184099, - "learning_rate": 3.4292795961387732e-06, - "loss": 0.8168, - "num_input_tokens_seen": 47445730, - "step": 2239 - }, - { - "epoch": 0.26934407503156377, - "flos": 11781593939400.0, - "grad_norm": 4.742206403766894, - "learning_rate": 3.4287346028050818e-06, - "loss": 0.861, - "num_input_tokens_seen": 47461520, - "step": 2240 - }, - { - "epoch": 0.2694643179222028, - "flos": 17377132037160.0, - "grad_norm": 1.8986178967828173, - "learning_rate": 3.4281893927334866e-06, - "loss": 0.7796, - "num_input_tokens_seen": 47481150, - "step": 2241 - }, - { - "epoch": 0.26958456081284193, - "flos": 18106297949040.0, - "grad_norm": 3.7364133754322975, - "learning_rate": 3.4276439660066963e-06, - "loss": 0.7346, - "num_input_tokens_seen": 47500570, - "step": 2242 - }, - { - "epoch": 0.26970480370348104, - "flos": 13217515002840.0, - "grad_norm": 5.521132013650661, - "learning_rate": 3.427098322707452e-06, - "loss": 0.8269, - "num_input_tokens_seen": 47516255, - "step": 2243 - }, - { - "epoch": 0.2698250465941201, - "flos": 7818110577720.0, - "grad_norm": 2.711162722683463, - "learning_rate": 3.426552462918526e-06, - "loss": 0.8675, - "num_input_tokens_seen": 47533910, - "step": 2244 - }, - { - "epoch": 0.2699452894847592, - "flos": 12621900005400.0, - "grad_norm": 3.5974740014319777, - "learning_rate": 3.426006386722726e-06, - "loss": 0.7237, - "num_input_tokens_seen": 47551690, - "step": 2245 - }, - { - "epoch": 0.2700655323753983, - "flos": 13192318166040.0, - "grad_norm": 3.0390293933467456, - "learning_rate": 3.4254600942028914e-06, - "loss": 0.9071, - "num_input_tokens_seen": 47569285, - "step": 2246 - }, - { - "epoch": 0.2701857752660374, - "flos": 13272467710680.0, - "grad_norm": 2.869712279249233, - "learning_rate": 3.424913585441893e-06, - "loss": 0.7918, - "num_input_tokens_seen": 47586840, - "step": 2247 - }, - { - "epoch": 0.2703060181566765, - "flos": 11889758057280.0, - "grad_norm": 2.6510578347383222, - "learning_rate": 3.4243668605226374e-06, - "loss": 0.8578, - "num_input_tokens_seen": 47603585, - "step": 2248 - }, - { - "epoch": 0.2704262610473156, - "flos": 14297889783240.0, - "grad_norm": 2.857035028602745, - "learning_rate": 3.423819919528061e-06, - "loss": 0.8154, - "num_input_tokens_seen": 47621390, - "step": 2249 - }, - { - "epoch": 0.27054650393795465, - "flos": 15162696166920.0, - "grad_norm": 2.8078512979840253, - "learning_rate": 3.4232727625411355e-06, - "loss": 0.7694, - "num_input_tokens_seen": 47640215, - "step": 2250 - }, - { - "epoch": 0.27066674682859376, - "flos": 13621175442360.0, - "grad_norm": 2.703646482760173, - "learning_rate": 3.4227253896448626e-06, - "loss": 0.8377, - "num_input_tokens_seen": 47657795, - "step": 2251 - }, - { - "epoch": 0.2707869897192329, - "flos": 16836723027240.0, - "grad_norm": 3.0580014255737415, - "learning_rate": 3.42217780092228e-06, - "loss": 0.7881, - "num_input_tokens_seen": 47675855, - "step": 2252 - }, - { - "epoch": 0.27090723260987193, - "flos": 42981733884360.0, - "grad_norm": 0.8473053306889133, - "learning_rate": 3.421629996456456e-06, - "loss": 0.6295, - "num_input_tokens_seen": 47734195, - "step": 2253 - }, - { - "epoch": 0.27102747550051104, - "flos": 8682568701840.0, - "grad_norm": 2.5222669786057037, - "learning_rate": 3.421081976330491e-06, - "loss": 0.8013, - "num_input_tokens_seen": 47752430, - "step": 2254 - }, - { - "epoch": 0.27114771839115015, - "flos": 14540617934280.0, - "grad_norm": 3.9353408948888244, - "learning_rate": 3.4205337406275207e-06, - "loss": 0.8529, - "num_input_tokens_seen": 47772270, - "step": 2255 - }, - { - "epoch": 0.2712679612817892, - "flos": 13379935309440.0, - "grad_norm": 3.8278357571690957, - "learning_rate": 3.4199852894307114e-06, - "loss": 0.7249, - "num_input_tokens_seen": 47788740, - "step": 2256 - }, - { - "epoch": 0.2713882041724283, - "flos": 17915134890120.0, - "grad_norm": 2.5626508156659673, - "learning_rate": 3.419436622823262e-06, - "loss": 0.77, - "num_input_tokens_seen": 47809180, - "step": 2257 - }, - { - "epoch": 0.27150844706306737, - "flos": 16863787801680.0, - "grad_norm": 2.4572077507340184, - "learning_rate": 3.4188877408884063e-06, - "loss": 0.7254, - "num_input_tokens_seen": 47829605, - "step": 2258 - }, - { - "epoch": 0.2716286899537065, - "flos": 16511565814440.0, - "grad_norm": 3.219028201626418, - "learning_rate": 3.4183386437094088e-06, - "loss": 0.6333, - "num_input_tokens_seen": 47845990, - "step": 2259 - }, - { - "epoch": 0.2717489328443456, - "flos": 9519138892560.0, - "grad_norm": 2.790556520858511, - "learning_rate": 3.417789331369565e-06, - "loss": 0.793, - "num_input_tokens_seen": 47861500, - "step": 2260 - }, - { - "epoch": 0.27186917573498465, - "flos": 21481384784160.0, - "grad_norm": 2.238863298455523, - "learning_rate": 3.4172398039522088e-06, - "loss": 0.8832, - "num_input_tokens_seen": 47882505, - "step": 2261 - }, - { - "epoch": 0.27198941862562376, - "flos": 19078888531080.0, - "grad_norm": 2.0097295611438737, - "learning_rate": 3.4166900615407e-06, - "loss": 0.7807, - "num_input_tokens_seen": 47900140, - "step": 2262 - }, - { - "epoch": 0.27210966151626287, - "flos": 24073271138400.0, - "grad_norm": 2.6381975562251982, - "learning_rate": 3.416140104218436e-06, - "loss": 0.7269, - "num_input_tokens_seen": 47919225, - "step": 2263 - }, - { - "epoch": 0.2722299044069019, - "flos": 48271106253960.0, - "grad_norm": 0.8516157291119941, - "learning_rate": 3.4155899320688437e-06, - "loss": 0.7365, - "num_input_tokens_seen": 47985020, - "step": 2264 - }, - { - "epoch": 0.27235014729754103, - "flos": 11160497165520.0, - "grad_norm": 2.357864907139212, - "learning_rate": 3.415039545175384e-06, - "loss": 0.7273, - "num_input_tokens_seen": 48000465, - "step": 2265 - }, - { - "epoch": 0.27247039018818014, - "flos": 16000247816400.0, - "grad_norm": 7.761529283750063, - "learning_rate": 3.414488943621551e-06, - "loss": 0.6385, - "num_input_tokens_seen": 48018850, - "step": 2266 - }, - { - "epoch": 0.2725906330788192, - "flos": 13648778436120.0, - "grad_norm": 2.5240471165547076, - "learning_rate": 3.41393812749087e-06, - "loss": 0.7241, - "num_input_tokens_seen": 48036615, - "step": 2267 - }, - { - "epoch": 0.2727108759694583, - "flos": 13048826024160.0, - "grad_norm": 5.6758440910793375, - "learning_rate": 3.4133870968668984e-06, - "loss": 0.713, - "num_input_tokens_seen": 48051135, - "step": 2268 - }, - { - "epoch": 0.2728311188600974, - "flos": 17916337968600.0, - "grad_norm": 2.59771673375896, - "learning_rate": 3.412835851833229e-06, - "loss": 0.7551, - "num_input_tokens_seen": 48073050, - "step": 2269 - }, - { - "epoch": 0.2729513617507365, - "flos": 22750294846800.0, - "grad_norm": 2.355311855069589, - "learning_rate": 3.4122843924734834e-06, - "loss": 0.7633, - "num_input_tokens_seen": 48095070, - "step": 2270 - }, - { - "epoch": 0.2730716046413756, - "flos": 13943863178280.0, - "grad_norm": 3.1425336665107997, - "learning_rate": 3.411732718871319e-06, - "loss": 0.8702, - "num_input_tokens_seen": 48110630, - "step": 2271 - }, - { - "epoch": 0.27319184753201464, - "flos": 19754114853840.0, - "grad_norm": 2.0500567219027865, - "learning_rate": 3.4111808311104227e-06, - "loss": 0.7652, - "num_input_tokens_seen": 48132665, - "step": 2272 - }, - { - "epoch": 0.27331209042265375, - "flos": 23317610331360.0, - "grad_norm": 3.17574461368013, - "learning_rate": 3.410628729274517e-06, - "loss": 0.6721, - "num_input_tokens_seen": 48153905, - "step": 2273 - }, - { - "epoch": 0.27343233331329286, - "flos": 18861768796320.0, - "grad_norm": 3.8226945000034265, - "learning_rate": 3.4100764134473546e-06, - "loss": 0.8107, - "num_input_tokens_seen": 48172910, - "step": 2274 - }, - { - "epoch": 0.2735525762039319, - "flos": 17862968258760.0, - "grad_norm": 4.283965426065961, - "learning_rate": 3.4095238837127215e-06, - "loss": 0.825, - "num_input_tokens_seen": 48191770, - "step": 2275 - }, - { - "epoch": 0.27367281909457103, - "flos": 10518509309400.0, - "grad_norm": 3.047781159071515, - "learning_rate": 3.4089711401544355e-06, - "loss": 0.771, - "num_input_tokens_seen": 48209085, - "step": 2276 - }, - { - "epoch": 0.27379306198521014, - "flos": 17187678616080.0, - "grad_norm": 3.622402839279073, - "learning_rate": 3.4084181828563486e-06, - "loss": 0.6526, - "num_input_tokens_seen": 48225525, - "step": 2277 - }, - { - "epoch": 0.2739133048758492, - "flos": 12733515058920.0, - "grad_norm": 2.4909002632992157, - "learning_rate": 3.4078650119023428e-06, - "loss": 0.6872, - "num_input_tokens_seen": 48243560, - "step": 2278 - }, - { - "epoch": 0.2740335477664883, - "flos": 14075387855280.0, - "grad_norm": 3.3501006097061934, - "learning_rate": 3.4073116273763337e-06, - "loss": 0.7307, - "num_input_tokens_seen": 48257725, - "step": 2279 - }, - { - "epoch": 0.2741537906571274, - "flos": 19132479860640.0, - "grad_norm": 2.6614048026139248, - "learning_rate": 3.40675802936227e-06, - "loss": 0.7892, - "num_input_tokens_seen": 48278230, - "step": 2280 - }, - { - "epoch": 0.27427403354776647, - "flos": 25096477013760.0, - "grad_norm": 2.6433927037553633, - "learning_rate": 3.4062042179441318e-06, - "loss": 0.6969, - "num_input_tokens_seen": 48298420, - "step": 2281 - }, - { - "epoch": 0.2743942764384056, - "flos": 13701483286800.0, - "grad_norm": 2.1612957506428465, - "learning_rate": 3.4056501932059314e-06, - "loss": 0.7918, - "num_input_tokens_seen": 48316215, - "step": 2282 - }, - { - "epoch": 0.2745145193290447, - "flos": 47848703118120.0, - "grad_norm": 0.8366910443674358, - "learning_rate": 3.405095955231715e-06, - "loss": 0.6075, - "num_input_tokens_seen": 48367590, - "step": 2283 - }, - { - "epoch": 0.27463476221968375, - "flos": 11754624144840.0, - "grad_norm": 4.990952507647588, - "learning_rate": 3.4045415041055585e-06, - "loss": 0.921, - "num_input_tokens_seen": 48382950, - "step": 2284 - }, - { - "epoch": 0.27475500511032286, - "flos": 7492320165720.0, - "grad_norm": 3.7107713521605095, - "learning_rate": 3.4039868399115728e-06, - "loss": 0.7562, - "num_input_tokens_seen": 48397310, - "step": 2285 - }, - { - "epoch": 0.27487524800096197, - "flos": 12624496122120.0, - "grad_norm": 3.4370974218275787, - "learning_rate": 3.4034319627339003e-06, - "loss": 0.7838, - "num_input_tokens_seen": 48413895, - "step": 2286 - }, - { - "epoch": 0.274995490891601, - "flos": 19883138394000.0, - "grad_norm": 3.132074663979872, - "learning_rate": 3.402876872656715e-06, - "loss": 0.6811, - "num_input_tokens_seen": 48431935, - "step": 2287 - }, - { - "epoch": 0.27511573378224013, - "flos": 17156782986480.0, - "grad_norm": 2.4869581504586216, - "learning_rate": 3.402321569764223e-06, - "loss": 0.8743, - "num_input_tokens_seen": 48450960, - "step": 2288 - }, - { - "epoch": 0.2752359766728792, - "flos": 12187818835680.0, - "grad_norm": 2.362540136733038, - "learning_rate": 3.4017660541406635e-06, - "loss": 0.8136, - "num_input_tokens_seen": 48466745, - "step": 2289 - }, - { - "epoch": 0.2753562195635183, - "flos": 18534648666000.0, - "grad_norm": 2.578077281163497, - "learning_rate": 3.4012103258703092e-06, - "loss": 0.7266, - "num_input_tokens_seen": 48485220, - "step": 2290 - }, - { - "epoch": 0.2754764624541574, - "flos": 20045020481280.0, - "grad_norm": 2.597818699396492, - "learning_rate": 3.4006543850374616e-06, - "loss": 0.8187, - "num_input_tokens_seen": 48499990, - "step": 2291 - }, - { - "epoch": 0.27559670534479647, - "flos": 12570746492760.0, - "grad_norm": 4.022629147406999, - "learning_rate": 3.400098231726458e-06, - "loss": 0.7405, - "num_input_tokens_seen": 48516810, - "step": 2292 - }, - { - "epoch": 0.2757169482354356, - "flos": 16048741892400.0, - "grad_norm": 2.663694026221091, - "learning_rate": 3.3995418660216657e-06, - "loss": 0.8622, - "num_input_tokens_seen": 48533985, - "step": 2293 - }, - { - "epoch": 0.2758371911260747, - "flos": 15244112109960.0, - "grad_norm": 3.6085094388373467, - "learning_rate": 3.3989852880074848e-06, - "loss": 0.7964, - "num_input_tokens_seen": 48555135, - "step": 2294 - }, - { - "epoch": 0.27595743401671374, - "flos": 51082391860080.0, - "grad_norm": 0.768752968014826, - "learning_rate": 3.398428497768348e-06, - "loss": 0.6352, - "num_input_tokens_seen": 48620025, - "step": 2295 - }, - { - "epoch": 0.27607767690735285, - "flos": 15513683415720.0, - "grad_norm": 2.4104781221838243, - "learning_rate": 3.3978714953887205e-06, - "loss": 0.698, - "num_input_tokens_seen": 48639500, - "step": 2296 - }, - { - "epoch": 0.27619791979799196, - "flos": 18185307735120.0, - "grad_norm": 2.445665132976604, - "learning_rate": 3.397314280953098e-06, - "loss": 0.8478, - "num_input_tokens_seen": 48660045, - "step": 2297 - }, - { - "epoch": 0.276318162688631, - "flos": 18132349604760.0, - "grad_norm": 2.739880409136879, - "learning_rate": 3.3967568545460108e-06, - "loss": 0.7803, - "num_input_tokens_seen": 48679305, - "step": 2298 - }, - { - "epoch": 0.27643840557927013, - "flos": 13245339616320.0, - "grad_norm": 2.1321520778459364, - "learning_rate": 3.3961992162520185e-06, - "loss": 0.7738, - "num_input_tokens_seen": 48697650, - "step": 2299 - }, - { - "epoch": 0.27655864846990924, - "flos": 18184072996680.0, - "grad_norm": 2.5598353976737633, - "learning_rate": 3.3956413661557156e-06, - "loss": 0.7058, - "num_input_tokens_seen": 48717545, - "step": 2300 - }, - { - "epoch": 0.2766788913605483, - "flos": 14811803898000.0, - "grad_norm": 3.192964843269723, - "learning_rate": 3.3950833043417273e-06, - "loss": 0.6467, - "num_input_tokens_seen": 48735410, - "step": 2301 - }, - { - "epoch": 0.2767991342511874, - "flos": 15702566957520.0, - "grad_norm": 5.216120440046257, - "learning_rate": 3.3945250308947105e-06, - "loss": 0.7077, - "num_input_tokens_seen": 48751435, - "step": 2302 - }, - { - "epoch": 0.2769193771418265, - "flos": 48662292669240.0, - "grad_norm": 1.2337172482055592, - "learning_rate": 3.3939665458993556e-06, - "loss": 0.6958, - "num_input_tokens_seen": 48805575, - "step": 2303 - }, - { - "epoch": 0.27703962003246557, - "flos": 15135853012200.0, - "grad_norm": 3.8992620601400287, - "learning_rate": 3.3934078494403843e-06, - "loss": 0.7391, - "num_input_tokens_seen": 48824870, - "step": 2304 - }, - { - "epoch": 0.2771598629231047, - "flos": 16782815098080.0, - "grad_norm": 2.671668775307372, - "learning_rate": 3.3928489416025495e-06, - "loss": 0.7912, - "num_input_tokens_seen": 48845435, - "step": 2305 - }, - { - "epoch": 0.27728010581374374, - "flos": 13407823242840.0, - "grad_norm": 3.189281937910159, - "learning_rate": 3.392289822470638e-06, - "loss": 0.7632, - "num_input_tokens_seen": 48863135, - "step": 2306 - }, - { - "epoch": 0.27740034870438285, - "flos": 14190960403800.0, - "grad_norm": 3.030505216673638, - "learning_rate": 3.3917304921294674e-06, - "loss": 0.741, - "num_input_tokens_seen": 48881020, - "step": 2307 - }, - { - "epoch": 0.27752059159502196, - "flos": 15809306377200.0, - "grad_norm": 2.1824940759812193, - "learning_rate": 3.3911709506638876e-06, - "loss": 0.7882, - "num_input_tokens_seen": 48900050, - "step": 2308 - }, - { - "epoch": 0.277640834485661, - "flos": 19503883292280.0, - "grad_norm": 4.165152974153014, - "learning_rate": 3.390611198158781e-06, - "loss": 0.7935, - "num_input_tokens_seen": 48917645, - "step": 2309 - }, - { - "epoch": 0.2777610773763001, - "flos": 14238093101520.0, - "grad_norm": 4.263809879881191, - "learning_rate": 3.3900512346990612e-06, - "loss": 0.8782, - "num_input_tokens_seen": 48933355, - "step": 2310 - }, - { - "epoch": 0.27788132026693924, - "flos": 28150839030480.0, - "grad_norm": 2.5817937467593466, - "learning_rate": 3.389491060369674e-06, - "loss": 0.6338, - "num_input_tokens_seen": 48958750, - "step": 2311 - }, - { - "epoch": 0.2780015631575783, - "flos": 16377729960360.0, - "grad_norm": 3.419766407847285, - "learning_rate": 3.388930675255598e-06, - "loss": 0.8696, - "num_input_tokens_seen": 48978320, - "step": 2312 - }, - { - "epoch": 0.2781218060482174, - "flos": 8844355809240.0, - "grad_norm": 14.44864689916661, - "learning_rate": 3.388370079441843e-06, - "loss": 0.7587, - "num_input_tokens_seen": 48993555, - "step": 2313 - }, - { - "epoch": 0.2782420489388565, - "flos": 13213145928360.0, - "grad_norm": 2.454747726313502, - "learning_rate": 3.3878092730134505e-06, - "loss": 0.9175, - "num_input_tokens_seen": 49011260, - "step": 2314 - }, - { - "epoch": 0.27836229182949557, - "flos": 13515259181640.0, - "grad_norm": 2.9323659631092367, - "learning_rate": 3.3872482560554947e-06, - "loss": 0.7876, - "num_input_tokens_seen": 49029755, - "step": 2315 - }, - { - "epoch": 0.2784825347201347, - "flos": 49461671389680.0, - "grad_norm": 0.8163970143649071, - "learning_rate": 3.386687028653082e-06, - "loss": 0.5871, - "num_input_tokens_seen": 49092320, - "step": 2316 - }, - { - "epoch": 0.2786027776107738, - "flos": 16560724749600.0, - "grad_norm": 2.7030314170205543, - "learning_rate": 3.386125590891349e-06, - "loss": 0.829, - "num_input_tokens_seen": 49108915, - "step": 2317 - }, - { - "epoch": 0.27872302050141284, - "flos": 11490561672120.0, - "grad_norm": 2.5321591475310736, - "learning_rate": 3.3855639428554657e-06, - "loss": 0.8092, - "num_input_tokens_seen": 49126165, - "step": 2318 - }, - { - "epoch": 0.27884326339205195, - "flos": 16188561478920.0, - "grad_norm": 2.4979636481933265, - "learning_rate": 3.385002084630635e-06, - "loss": 0.7906, - "num_input_tokens_seen": 49144855, - "step": 2319 - }, - { - "epoch": 0.278963506282691, - "flos": 15028037153880.0, - "grad_norm": 2.3575526393219883, - "learning_rate": 3.384440016302088e-06, - "loss": 0.8236, - "num_input_tokens_seen": 49163250, - "step": 2320 - }, - { - "epoch": 0.2790837491733301, - "flos": 16052129508120.0, - "grad_norm": 3.3301527266662054, - "learning_rate": 3.3838777379550923e-06, - "loss": 0.5983, - "num_input_tokens_seen": 49182415, - "step": 2321 - }, - { - "epoch": 0.27920399206396923, - "flos": 19267170533640.0, - "grad_norm": 2.792050425404765, - "learning_rate": 3.383315249674944e-06, - "loss": 0.7857, - "num_input_tokens_seen": 49200700, - "step": 2322 - }, - { - "epoch": 0.2793242349546083, - "flos": 18610175856480.0, - "grad_norm": 2.7058171443691843, - "learning_rate": 3.3827525515469715e-06, - "loss": 0.8488, - "num_input_tokens_seen": 49215325, - "step": 2323 - }, - { - "epoch": 0.2794444778452474, - "flos": 15243827170320.0, - "grad_norm": 2.5038013168461917, - "learning_rate": 3.3821896436565367e-06, - "loss": 0.6875, - "num_input_tokens_seen": 49234705, - "step": 2324 - }, - { - "epoch": 0.2795647207358865, - "flos": 15780500304960.0, - "grad_norm": 2.7612041765575164, - "learning_rate": 3.381626526089032e-06, - "loss": 0.6869, - "num_input_tokens_seen": 49253990, - "step": 2325 - }, - { - "epoch": 0.27968496362652556, - "flos": 15703865015880.0, - "grad_norm": 2.3923609794637577, - "learning_rate": 3.3810631989298815e-06, - "loss": 0.7715, - "num_input_tokens_seen": 49273320, - "step": 2326 - }, - { - "epoch": 0.2798052065171647, - "flos": 17026208108280.0, - "grad_norm": 3.407836824423877, - "learning_rate": 3.3804996622645423e-06, - "loss": 0.8309, - "num_input_tokens_seen": 49291040, - "step": 2327 - }, - { - "epoch": 0.2799254494078038, - "flos": 15753118930920.0, - "grad_norm": 2.692680245602848, - "learning_rate": 3.3799359161785015e-06, - "loss": 0.8694, - "num_input_tokens_seen": 49310410, - "step": 2328 - }, - { - "epoch": 0.28004569229844284, - "flos": 19346116999800.0, - "grad_norm": 3.552200822574496, - "learning_rate": 3.3793719607572798e-06, - "loss": 0.8381, - "num_input_tokens_seen": 49331095, - "step": 2329 - }, - { - "epoch": 0.28016593518908195, - "flos": 24640744922760.0, - "grad_norm": 3.579876466889392, - "learning_rate": 3.378807796086428e-06, - "loss": 0.749, - "num_input_tokens_seen": 49353675, - "step": 2330 - }, - { - "epoch": 0.28028617807972106, - "flos": 11166385918080.0, - "grad_norm": 2.5058039051793655, - "learning_rate": 3.37824342225153e-06, - "loss": 0.7477, - "num_input_tokens_seen": 49369815, - "step": 2331 - }, - { - "epoch": 0.2804064209703601, - "flos": 18699855049080.0, - "grad_norm": 2.4967705849215, - "learning_rate": 3.3776788393382006e-06, - "loss": 0.7523, - "num_input_tokens_seen": 49389015, - "step": 2332 - }, - { - "epoch": 0.2805266638609992, - "flos": 21535482673080.0, - "grad_norm": 3.958625314216658, - "learning_rate": 3.3771140474320872e-06, - "loss": 0.7502, - "num_input_tokens_seen": 49408685, - "step": 2333 - }, - { - "epoch": 0.28064690675163834, - "flos": 15696804844800.0, - "grad_norm": 2.568072795471931, - "learning_rate": 3.3765490466188664e-06, - "loss": 0.7651, - "num_input_tokens_seen": 49425805, - "step": 2334 - }, - { - "epoch": 0.2807671496422774, - "flos": 15351263109120.0, - "grad_norm": 3.4743784012342953, - "learning_rate": 3.3759838369842508e-06, - "loss": 0.7072, - "num_input_tokens_seen": 49443600, - "step": 2335 - }, - { - "epoch": 0.2808873925329165, - "flos": 15729410112240.0, - "grad_norm": 3.731597250366179, - "learning_rate": 3.375418418613981e-06, - "loss": 0.7104, - "num_input_tokens_seen": 49462345, - "step": 2336 - }, - { - "epoch": 0.28100763542355556, - "flos": 11706668288160.0, - "grad_norm": 4.235995387318867, - "learning_rate": 3.374852791593831e-06, - "loss": 0.8151, - "num_input_tokens_seen": 49478265, - "step": 2337 - }, - { - "epoch": 0.28112787831419467, - "flos": 13920787558800.0, - "grad_norm": 12.251714469668686, - "learning_rate": 3.374286956009605e-06, - "loss": 0.5083, - "num_input_tokens_seen": 49496550, - "step": 2338 - }, - { - "epoch": 0.2812481212048338, - "flos": 9303538835880.0, - "grad_norm": 3.0740046929526508, - "learning_rate": 3.3737209119471405e-06, - "loss": 0.7481, - "num_input_tokens_seen": 49512780, - "step": 2339 - }, - { - "epoch": 0.28136836409547283, - "flos": 11382714153840.0, - "grad_norm": 3.629752628105027, - "learning_rate": 3.373154659492306e-06, - "loss": 0.6326, - "num_input_tokens_seen": 49530640, - "step": 2340 - }, - { - "epoch": 0.28148860698611194, - "flos": 14564833312320.0, - "grad_norm": 2.1361599876765878, - "learning_rate": 3.3725881987310016e-06, - "loss": 0.8321, - "num_input_tokens_seen": 49547895, - "step": 2341 - }, - { - "epoch": 0.28160884987675106, - "flos": 12732691899960.0, - "grad_norm": 2.4610035720917076, - "learning_rate": 3.372021529749159e-06, - "loss": 0.8522, - "num_input_tokens_seen": 49566675, - "step": 2342 - }, - { - "epoch": 0.2817290927673901, - "flos": 12271704255600.0, - "grad_norm": 2.0795792388824754, - "learning_rate": 3.3714546526327405e-06, - "loss": 0.9018, - "num_input_tokens_seen": 49584395, - "step": 2343 - }, - { - "epoch": 0.2818493356580292, - "flos": 11220642106800.0, - "grad_norm": 2.8825112782478537, - "learning_rate": 3.3708875674677423e-06, - "loss": 0.8485, - "num_input_tokens_seen": 49602090, - "step": 2344 - }, - { - "epoch": 0.28196957854866833, - "flos": 14919746396160.0, - "grad_norm": 3.143955267998076, - "learning_rate": 3.37032027434019e-06, - "loss": 0.8112, - "num_input_tokens_seen": 49621330, - "step": 2345 - }, - { - "epoch": 0.2820898214393074, - "flos": 14594399223600.0, - "grad_norm": 3.6858529760768923, - "learning_rate": 3.369752773336141e-06, - "loss": 0.8184, - "num_input_tokens_seen": 49640530, - "step": 2346 - }, - { - "epoch": 0.2822100643299465, - "flos": 16485577478640.0, - "grad_norm": 2.8262820482693565, - "learning_rate": 3.3691850645416864e-06, - "loss": 0.7605, - "num_input_tokens_seen": 49659960, - "step": 2347 - }, - { - "epoch": 0.2823303072205856, - "flos": 8358519587640.0, - "grad_norm": 2.5286152631887195, - "learning_rate": 3.368617148042945e-06, - "loss": 0.8245, - "num_input_tokens_seen": 49677350, - "step": 2348 - }, - { - "epoch": 0.28245055011122466, - "flos": 13326723899400.0, - "grad_norm": 2.141819864080469, - "learning_rate": 3.368049023926071e-06, - "loss": 0.8314, - "num_input_tokens_seen": 49696065, - "step": 2349 - }, - { - "epoch": 0.2825707930018638, - "flos": 18024787026120.0, - "grad_norm": 2.036646204198923, - "learning_rate": 3.3674806922772476e-06, - "loss": 0.8213, - "num_input_tokens_seen": 49716670, - "step": 2350 - }, - { - "epoch": 0.28269103589250283, - "flos": 18482608674480.0, - "grad_norm": 2.795013660813797, - "learning_rate": 3.3669121531826904e-06, - "loss": 0.729, - "num_input_tokens_seen": 49737370, - "step": 2351 - }, - { - "epoch": 0.28281127878314194, - "flos": 14082827945880.0, - "grad_norm": 2.991210140436149, - "learning_rate": 3.366343406728647e-06, - "loss": 0.8083, - "num_input_tokens_seen": 49756540, - "step": 2352 - }, - { - "epoch": 0.28293152167378105, - "flos": 17484757935720.0, - "grad_norm": 2.2274045188639437, - "learning_rate": 3.3657744530013946e-06, - "loss": 0.6665, - "num_input_tokens_seen": 49775495, - "step": 2353 - }, - { - "epoch": 0.2830517645644201, - "flos": 32276837678640.0, - "grad_norm": 2.8854442461993273, - "learning_rate": 3.3652052920872437e-06, - "loss": 0.699, - "num_input_tokens_seen": 49798080, - "step": 2354 - }, - { - "epoch": 0.2831720074550592, - "flos": 19537280058720.0, - "grad_norm": 2.675503503348378, - "learning_rate": 3.3646359240725355e-06, - "loss": 0.8386, - "num_input_tokens_seen": 49816990, - "step": 2355 - }, - { - "epoch": 0.2832922503456983, - "flos": 22777454601120.0, - "grad_norm": 2.4186023874114433, - "learning_rate": 3.364066349043643e-06, - "loss": 0.6613, - "num_input_tokens_seen": 49837915, - "step": 2356 - }, - { - "epoch": 0.2834124932363374, - "flos": 14913794323680.0, - "grad_norm": 2.045754744916623, - "learning_rate": 3.363496567086969e-06, - "loss": 0.8083, - "num_input_tokens_seen": 49854730, - "step": 2357 - }, - { - "epoch": 0.2835327361269765, - "flos": 28960566066480.0, - "grad_norm": 3.564599606365171, - "learning_rate": 3.3629265782889506e-06, - "loss": 0.7357, - "num_input_tokens_seen": 49876275, - "step": 2358 - }, - { - "epoch": 0.2836529790176156, - "flos": 22208492798640.0, - "grad_norm": 3.100240045913406, - "learning_rate": 3.362356382736054e-06, - "loss": 0.6874, - "num_input_tokens_seen": 49896600, - "step": 2359 - }, - { - "epoch": 0.28377322190825466, - "flos": 9195627997680.0, - "grad_norm": 2.723438210851125, - "learning_rate": 3.361785980514777e-06, - "loss": 0.8947, - "num_input_tokens_seen": 49912520, - "step": 2360 - }, - { - "epoch": 0.28389346479889377, - "flos": 13353978633600.0, - "grad_norm": 2.715094992449377, - "learning_rate": 3.361215371711649e-06, - "loss": 0.7494, - "num_input_tokens_seen": 49931335, - "step": 2361 - }, - { - "epoch": 0.2840137076895329, - "flos": 14914934082240.0, - "grad_norm": 2.035078929523287, - "learning_rate": 3.3606445564132326e-06, - "loss": 0.8146, - "num_input_tokens_seen": 49948350, - "step": 2362 - }, - { - "epoch": 0.28413395058017193, - "flos": 14650111770480.0, - "grad_norm": 2.4546030050652967, - "learning_rate": 3.360073534706118e-06, - "loss": 0.8024, - "num_input_tokens_seen": 49965225, - "step": 2363 - }, - { - "epoch": 0.28425419347081105, - "flos": 27685830571200.0, - "grad_norm": 4.084505081732096, - "learning_rate": 3.35950230667693e-06, - "loss": 0.7431, - "num_input_tokens_seen": 49986640, - "step": 2364 - }, - { - "epoch": 0.28437443636145016, - "flos": 10059452922600.0, - "grad_norm": 3.480911517852355, - "learning_rate": 3.358930872412323e-06, - "loss": 0.8403, - "num_input_tokens_seen": 50003525, - "step": 2365 - }, - { - "epoch": 0.2844946792520892, - "flos": 16648092765120.0, - "grad_norm": 2.044854251359191, - "learning_rate": 3.3583592319989825e-06, - "loss": 0.7865, - "num_input_tokens_seen": 50022615, - "step": 2366 - }, - { - "epoch": 0.2846149221427283, - "flos": 23803129953360.0, - "grad_norm": 3.8254314054662184, - "learning_rate": 3.357787385523627e-06, - "loss": 0.6674, - "num_input_tokens_seen": 50043740, - "step": 2367 - }, - { - "epoch": 0.2847351650333674, - "flos": 20887321124760.0, - "grad_norm": 15.888992990954511, - "learning_rate": 3.3572153330730048e-06, - "loss": 0.8215, - "num_input_tokens_seen": 50064555, - "step": 2368 - }, - { - "epoch": 0.2848554079240065, - "flos": 41077638365640.0, - "grad_norm": 0.801810222218399, - "learning_rate": 3.3566430747338956e-06, - "loss": 0.6614, - "num_input_tokens_seen": 50119480, - "step": 2369 - }, - { - "epoch": 0.2849756508146456, - "flos": 8573043205680.0, - "grad_norm": 3.452186836773707, - "learning_rate": 3.35607061059311e-06, - "loss": 0.8451, - "num_input_tokens_seen": 50134130, - "step": 2370 - }, - { - "epoch": 0.28509589370528465, - "flos": 18429650544120.0, - "grad_norm": 2.413604199012793, - "learning_rate": 3.3554979407374917e-06, - "loss": 0.7304, - "num_input_tokens_seen": 50155960, - "step": 2371 - }, - { - "epoch": 0.28521613659592376, - "flos": 14595887241720.0, - "grad_norm": 2.8049572998169485, - "learning_rate": 3.3549250652539134e-06, - "loss": 0.7197, - "num_input_tokens_seen": 50174775, - "step": 2372 - }, - { - "epoch": 0.2853363794865629, - "flos": 17107054172040.0, - "grad_norm": 2.368304200106212, - "learning_rate": 3.3543519842292794e-06, - "loss": 0.8012, - "num_input_tokens_seen": 50194150, - "step": 2373 - }, - { - "epoch": 0.28545662237720193, - "flos": 14511875181960.0, - "grad_norm": 2.1765227501081355, - "learning_rate": 3.353778697750527e-06, - "loss": 0.8216, - "num_input_tokens_seen": 50212275, - "step": 2374 - }, - { - "epoch": 0.28557686526784104, - "flos": 17485296155040.0, - "grad_norm": 2.2021093529998867, - "learning_rate": 3.353205205904622e-06, - "loss": 0.8709, - "num_input_tokens_seen": 50231105, - "step": 2375 - }, - { - "epoch": 0.28569710815848015, - "flos": 33034493063160.0, - "grad_norm": 9.36538488050336, - "learning_rate": 3.3526315087785637e-06, - "loss": 0.7111, - "num_input_tokens_seen": 50251940, - "step": 2376 - }, - { - "epoch": 0.2858173510491192, - "flos": 19668393156240.0, - "grad_norm": 1.9816194761393322, - "learning_rate": 3.3520576064593805e-06, - "loss": 0.7952, - "num_input_tokens_seen": 50271615, - "step": 2377 - }, - { - "epoch": 0.2859375939397583, - "flos": 16944918805080.0, - "grad_norm": 2.02172814709969, - "learning_rate": 3.3514834990341337e-06, - "loss": 0.8102, - "num_input_tokens_seen": 50291660, - "step": 2378 - }, - { - "epoch": 0.2860578368303974, - "flos": 8789972980680.0, - "grad_norm": 4.297382338656826, - "learning_rate": 3.3509091865899144e-06, - "loss": 0.9232, - "num_input_tokens_seen": 50306570, - "step": 2379 - }, - { - "epoch": 0.2861780797210365, - "flos": 14568632507520.0, - "grad_norm": 2.4895312464757207, - "learning_rate": 3.350334669213846e-06, - "loss": 0.6861, - "num_input_tokens_seen": 50323695, - "step": 2380 - }, - { - "epoch": 0.2862983226116756, - "flos": 20211651562560.0, - "grad_norm": 2.7510562082176193, - "learning_rate": 3.3497599469930816e-06, - "loss": 0.7547, - "num_input_tokens_seen": 50341625, - "step": 2381 - }, - { - "epoch": 0.28641856550231465, - "flos": 16134526909920.0, - "grad_norm": 2.889693321402853, - "learning_rate": 3.349185020014807e-06, - "loss": 0.8145, - "num_input_tokens_seen": 50358610, - "step": 2382 - }, - { - "epoch": 0.28653880839295376, - "flos": 16374405664560.0, - "grad_norm": 3.151832737681878, - "learning_rate": 3.348609888366237e-06, - "loss": 0.7306, - "num_input_tokens_seen": 50377345, - "step": 2383 - }, - { - "epoch": 0.28665905128359287, - "flos": 17107054172040.0, - "grad_norm": 4.561077575519007, - "learning_rate": 3.348034552134619e-06, - "loss": 0.6107, - "num_input_tokens_seen": 50396470, - "step": 2384 - }, - { - "epoch": 0.2867792941742319, - "flos": 15266301250560.0, - "grad_norm": 3.9620196126484206, - "learning_rate": 3.3474590114072316e-06, - "loss": 0.8329, - "num_input_tokens_seen": 50414190, - "step": 2385 - }, - { - "epoch": 0.28689953706487104, - "flos": 15105368962080.0, - "grad_norm": 2.496210468649706, - "learning_rate": 3.3468832662713836e-06, - "loss": 0.8057, - "num_input_tokens_seen": 50432155, - "step": 2386 - }, - { - "epoch": 0.28701977995551015, - "flos": 9193633420200.0, - "grad_norm": 3.0902182717374482, - "learning_rate": 3.346307316814415e-06, - "loss": 0.8255, - "num_input_tokens_seen": 50447045, - "step": 2387 - }, - { - "epoch": 0.2871400228461492, - "flos": 15541444709280.0, - "grad_norm": 4.417827573589841, - "learning_rate": 3.3457311631236965e-06, - "loss": 0.7222, - "num_input_tokens_seen": 50467750, - "step": 2388 - }, - { - "epoch": 0.2872602657367883, - "flos": 18403123989000.0, - "grad_norm": 2.4548238715177386, - "learning_rate": 3.345154805286631e-06, - "loss": 0.83, - "num_input_tokens_seen": 50487730, - "step": 2389 - }, - { - "epoch": 0.2873805086274274, - "flos": 12131663049360.0, - "grad_norm": 3.97839265388932, - "learning_rate": 3.344578243390651e-06, - "loss": 0.7383, - "num_input_tokens_seen": 50503010, - "step": 2390 - }, - { - "epoch": 0.2875007515180665, - "flos": 12705152226120.0, - "grad_norm": 2.938924890244398, - "learning_rate": 3.3440014775232206e-06, - "loss": 0.7571, - "num_input_tokens_seen": 50520785, - "step": 2391 - }, - { - "epoch": 0.2876209944087056, - "flos": 17155105008600.0, - "grad_norm": 2.6332382627620543, - "learning_rate": 3.343424507771834e-06, - "loss": 0.705, - "num_input_tokens_seen": 50538715, - "step": 2392 - }, - { - "epoch": 0.2877412372993447, - "flos": 9978353579160.0, - "grad_norm": 2.9889574270729407, - "learning_rate": 3.342847334224018e-06, - "loss": 0.8562, - "num_input_tokens_seen": 50555835, - "step": 2393 - }, - { - "epoch": 0.28786148018998375, - "flos": 42803171489520.0, - "grad_norm": 0.876513738516915, - "learning_rate": 3.342269956967329e-06, - "loss": 0.6583, - "num_input_tokens_seen": 50617460, - "step": 2394 - }, - { - "epoch": 0.28798172308062286, - "flos": 17155864847640.0, - "grad_norm": 4.8630659308509, - "learning_rate": 3.341692376089355e-06, - "loss": 0.6928, - "num_input_tokens_seen": 50632735, - "step": 2395 - }, - { - "epoch": 0.288101965971262, - "flos": 18394987379280.0, - "grad_norm": 3.347280189641846, - "learning_rate": 3.3411145916777146e-06, - "loss": 0.8146, - "num_input_tokens_seen": 50646615, - "step": 2396 - }, - { - "epoch": 0.28822220886190103, - "flos": 11841074021520.0, - "grad_norm": 3.285200084706808, - "learning_rate": 3.3405366038200566e-06, - "loss": 0.8839, - "num_input_tokens_seen": 50665270, - "step": 2397 - }, - { - "epoch": 0.28834245175254014, - "flos": 17971227356520.0, - "grad_norm": 3.3691498244892517, - "learning_rate": 3.3399584126040617e-06, - "loss": 0.8241, - "num_input_tokens_seen": 50684490, - "step": 2398 - }, - { - "epoch": 0.2884626946431792, - "flos": 17997627271800.0, - "grad_norm": 3.009196490641166, - "learning_rate": 3.339380018117441e-06, - "loss": 0.8825, - "num_input_tokens_seen": 50705045, - "step": 2399 - }, - { - "epoch": 0.2885829375338183, - "flos": 12057275617440.0, - "grad_norm": 6.468076809117419, - "learning_rate": 3.3388014204479366e-06, - "loss": 0.7558, - "num_input_tokens_seen": 50722570, - "step": 2400 - }, - { - "epoch": 0.2887031804244574, - "flos": 17620113467880.0, - "grad_norm": 3.3691702048067445, - "learning_rate": 3.338222619683321e-06, - "loss": 0.896, - "num_input_tokens_seen": 50742255, - "step": 2401 - }, - { - "epoch": 0.2888234233150965, - "flos": 17079356198400.0, - "grad_norm": 47.84081575750721, - "learning_rate": 3.337643615911398e-06, - "loss": 0.7154, - "num_input_tokens_seen": 50761600, - "step": 2402 - }, - { - "epoch": 0.2889436662057356, - "flos": 16296440657160.0, - "grad_norm": 3.804707803221623, - "learning_rate": 3.3370644092200026e-06, - "loss": 0.7689, - "num_input_tokens_seen": 50778595, - "step": 2403 - }, - { - "epoch": 0.2890639090963747, - "flos": 15811205974800.0, - "grad_norm": 9.973349726794364, - "learning_rate": 3.3364849996969985e-06, - "loss": 0.7748, - "num_input_tokens_seen": 50798335, - "step": 2404 - }, - { - "epoch": 0.28918415198701375, - "flos": 20968420468200.0, - "grad_norm": 4.0558165519957114, - "learning_rate": 3.335905387430283e-06, - "loss": 0.8402, - "num_input_tokens_seen": 50819490, - "step": 2405 - }, - { - "epoch": 0.28930439487765286, - "flos": 16053680846160.0, - "grad_norm": 2.885534639850279, - "learning_rate": 3.335325572507782e-06, - "loss": 0.8052, - "num_input_tokens_seen": 50839710, - "step": 2406 - }, - { - "epoch": 0.28942463776829197, - "flos": 14083144545480.0, - "grad_norm": 2.9087336602631146, - "learning_rate": 3.3347455550174537e-06, - "loss": 0.7249, - "num_input_tokens_seen": 50858770, - "step": 2407 - }, - { - "epoch": 0.289544880658931, - "flos": 10651680304320.0, - "grad_norm": 3.313758035403265, - "learning_rate": 3.3341653350472864e-06, - "loss": 0.6696, - "num_input_tokens_seen": 50875320, - "step": 2408 - }, - { - "epoch": 0.28966512354957014, - "flos": 20995295282880.0, - "grad_norm": 4.383878581628035, - "learning_rate": 3.333584912685298e-06, - "loss": 0.6851, - "num_input_tokens_seen": 50893660, - "step": 2409 - }, - { - "epoch": 0.28978536644020925, - "flos": 47710186081320.0, - "grad_norm": 0.8523109716446599, - "learning_rate": 3.3330042880195385e-06, - "loss": 0.5808, - "num_input_tokens_seen": 50947730, - "step": 2410 - }, - { - "epoch": 0.2899056093308483, - "flos": 13596643464720.0, - "grad_norm": 10.355777812131654, - "learning_rate": 3.3324234611380888e-06, - "loss": 0.764, - "num_input_tokens_seen": 50966180, - "step": 2411 - }, - { - "epoch": 0.2900258522214874, - "flos": 16756098583200.0, - "grad_norm": 2.9253516091836747, - "learning_rate": 3.3318424321290596e-06, - "loss": 0.8054, - "num_input_tokens_seen": 50985615, - "step": 2412 - }, - { - "epoch": 0.2901460951121265, - "flos": 52438507147080.0, - "grad_norm": 0.8399639664596104, - "learning_rate": 3.3312612010805917e-06, - "loss": 0.6383, - "num_input_tokens_seen": 51044910, - "step": 2413 - }, - { - "epoch": 0.2902663380027656, - "flos": 23614468031280.0, - "grad_norm": 2.5738551066772306, - "learning_rate": 3.330679768080858e-06, - "loss": 0.6916, - "num_input_tokens_seen": 51068515, - "step": 2414 - }, - { - "epoch": 0.2903865808934047, - "flos": 21535387693200.0, - "grad_norm": 3.23760898362467, - "learning_rate": 3.3300981332180627e-06, - "loss": 0.8241, - "num_input_tokens_seen": 51087440, - "step": 2415 - }, - { - "epoch": 0.29050682378404374, - "flos": 12459732978480.0, - "grad_norm": 3.2836313961175434, - "learning_rate": 3.3295162965804373e-06, - "loss": 0.7932, - "num_input_tokens_seen": 51105655, - "step": 2416 - }, - { - "epoch": 0.29062706667468285, - "flos": 13029422960040.0, - "grad_norm": 4.359954467325699, - "learning_rate": 3.328934258256247e-06, - "loss": 0.7562, - "num_input_tokens_seen": 51123440, - "step": 2417 - }, - { - "epoch": 0.29074730956532197, - "flos": 17781837255360.0, - "grad_norm": 5.859647454603078, - "learning_rate": 3.3283520183337856e-06, - "loss": 0.6499, - "num_input_tokens_seen": 51142865, - "step": 2418 - }, - { - "epoch": 0.290867552455961, - "flos": 16346232791520.0, - "grad_norm": 3.4901667472550675, - "learning_rate": 3.3277695769013797e-06, - "loss": 0.6841, - "num_input_tokens_seen": 51162030, - "step": 2419 - }, - { - "epoch": 0.29098779534660013, - "flos": 16972585118760.0, - "grad_norm": 4.024494987697942, - "learning_rate": 3.327186934047385e-06, - "loss": 0.762, - "num_input_tokens_seen": 51180445, - "step": 2420 - }, - { - "epoch": 0.29110803823723924, - "flos": 11139257823720.0, - "grad_norm": 11.918470624060856, - "learning_rate": 3.3266040898601877e-06, - "loss": 0.629, - "num_input_tokens_seen": 51198000, - "step": 2421 - }, - { - "epoch": 0.2912282811278783, - "flos": 16534356494280.0, - "grad_norm": 3.1890160938047036, - "learning_rate": 3.3260210444282045e-06, - "loss": 0.758, - "num_input_tokens_seen": 51215675, - "step": 2422 - }, - { - "epoch": 0.2913485240185174, - "flos": 17943117803400.0, - "grad_norm": 4.825422895013727, - "learning_rate": 3.325437797839883e-06, - "loss": 0.7063, - "num_input_tokens_seen": 51233765, - "step": 2423 - }, - { - "epoch": 0.2914687669091565, - "flos": 13083267569280.0, - "grad_norm": 8.791758267134872, - "learning_rate": 3.3248543501837015e-06, - "loss": 0.7262, - "num_input_tokens_seen": 51250690, - "step": 2424 - }, - { - "epoch": 0.2915890097997956, - "flos": 16486463957520.0, - "grad_norm": 3.799139658257424, - "learning_rate": 3.3242707015481684e-06, - "loss": 0.7573, - "num_input_tokens_seen": 51270345, - "step": 2425 - }, - { - "epoch": 0.2917092526904347, - "flos": 10059579562440.0, - "grad_norm": 2.411393155781883, - "learning_rate": 3.323686852021823e-06, - "loss": 0.7938, - "num_input_tokens_seen": 51287575, - "step": 2426 - }, - { - "epoch": 0.2918294955810738, - "flos": 16594121516040.0, - "grad_norm": 3.9762827357000248, - "learning_rate": 3.323102801693235e-06, - "loss": 0.7867, - "num_input_tokens_seen": 51306060, - "step": 2427 - }, - { - "epoch": 0.29194973847171285, - "flos": 17158714244040.0, - "grad_norm": 4.259053404681686, - "learning_rate": 3.322518550651003e-06, - "loss": 0.7774, - "num_input_tokens_seen": 51325090, - "step": 2428 - }, - { - "epoch": 0.29206998136235196, - "flos": 16027439230680.0, - "grad_norm": 4.757990490820908, - "learning_rate": 3.3219340989837586e-06, - "loss": 0.7899, - "num_input_tokens_seen": 51344800, - "step": 2429 - }, - { - "epoch": 0.292190224252991, - "flos": 16993666160760.0, - "grad_norm": 2.886337317017015, - "learning_rate": 3.3213494467801625e-06, - "loss": 0.7897, - "num_input_tokens_seen": 51363695, - "step": 2430 - }, - { - "epoch": 0.2923104671436301, - "flos": 15162759486840.0, - "grad_norm": 4.484570861154478, - "learning_rate": 3.3207645941289063e-06, - "loss": 0.6951, - "num_input_tokens_seen": 51381760, - "step": 2431 - }, - { - "epoch": 0.29243071003426924, - "flos": 26314265223720.0, - "grad_norm": 3.4872973037936394, - "learning_rate": 3.320179541118711e-06, - "loss": 0.7897, - "num_input_tokens_seen": 51403980, - "step": 2432 - }, - { - "epoch": 0.2925509529249083, - "flos": 42804849467400.0, - "grad_norm": 1.0322599955966598, - "learning_rate": 3.3195942878383293e-06, - "loss": 0.6367, - "num_input_tokens_seen": 51459800, - "step": 2433 - }, - { - "epoch": 0.2926711958155474, - "flos": 15648405748680.0, - "grad_norm": 4.015804064131384, - "learning_rate": 3.319008834376543e-06, - "loss": 0.7618, - "num_input_tokens_seen": 51479210, - "step": 2434 - }, - { - "epoch": 0.2927914387061865, - "flos": 16972268519160.0, - "grad_norm": 3.326108222930296, - "learning_rate": 3.3184231808221654e-06, - "loss": 0.8664, - "num_input_tokens_seen": 51493255, - "step": 2435 - }, - { - "epoch": 0.29291168159682557, - "flos": 16431764529360.0, - "grad_norm": 5.762906061458347, - "learning_rate": 3.3178373272640394e-06, - "loss": 0.5995, - "num_input_tokens_seen": 51512070, - "step": 2436 - }, - { - "epoch": 0.2930319244874647, - "flos": 15479716770000.0, - "grad_norm": 6.5441722890283325, - "learning_rate": 3.3172512737910387e-06, - "loss": 0.8409, - "num_input_tokens_seen": 51529300, - "step": 2437 - }, - { - "epoch": 0.2931521673781038, - "flos": 23020024452360.0, - "grad_norm": 3.154660268347672, - "learning_rate": 3.3166650204920674e-06, - "loss": 0.8598, - "num_input_tokens_seen": 51550190, - "step": 2438 - }, - { - "epoch": 0.29327241026874284, - "flos": 17722673772840.0, - "grad_norm": 3.4000016044164125, - "learning_rate": 3.316078567456059e-06, - "loss": 0.8092, - "num_input_tokens_seen": 51567750, - "step": 2439 - }, - { - "epoch": 0.29339265315938196, - "flos": 17754202601640.0, - "grad_norm": 2.457676837569831, - "learning_rate": 3.3154919147719786e-06, - "loss": 0.7497, - "num_input_tokens_seen": 51588485, - "step": 2440 - }, - { - "epoch": 0.29351289605002107, - "flos": 12354861496440.0, - "grad_norm": 3.7559985186134157, - "learning_rate": 3.31490506252882e-06, - "loss": 0.8556, - "num_input_tokens_seen": 51607585, - "step": 2441 - }, - { - "epoch": 0.2936331389406601, - "flos": 14488071383400.0, - "grad_norm": 10.333736493849473, - "learning_rate": 3.31431801081561e-06, - "loss": 0.8365, - "num_input_tokens_seen": 51626240, - "step": 2442 - }, - { - "epoch": 0.29375338183129923, - "flos": 52671203582160.0, - "grad_norm": 0.9391543732193662, - "learning_rate": 3.313730759721402e-06, - "loss": 0.6795, - "num_input_tokens_seen": 51688890, - "step": 2443 - }, - { - "epoch": 0.29387362472193834, - "flos": 16135001809320.0, - "grad_norm": 3.5071872757204026, - "learning_rate": 3.313143309335282e-06, - "loss": 0.8395, - "num_input_tokens_seen": 51707100, - "step": 2444 - }, - { - "epoch": 0.2939938676125774, - "flos": 16809848212560.0, - "grad_norm": 3.5532361767233662, - "learning_rate": 3.3125556597463665e-06, - "loss": 0.8207, - "num_input_tokens_seen": 51726125, - "step": 2445 - }, - { - "epoch": 0.2941141105032165, - "flos": 23020784291400.0, - "grad_norm": 3.0045354256156305, - "learning_rate": 3.311967811043801e-06, - "loss": 0.6443, - "num_input_tokens_seen": 51747765, - "step": 2446 - }, - { - "epoch": 0.29423435339385556, - "flos": 16999143333840.0, - "grad_norm": 4.402788683011695, - "learning_rate": 3.3113797633167617e-06, - "loss": 0.7849, - "num_input_tokens_seen": 51765780, - "step": 2447 - }, - { - "epoch": 0.2943545962844947, - "flos": 19693463353200.0, - "grad_norm": 3.6613507026636367, - "learning_rate": 3.310791516654455e-06, - "loss": 0.6618, - "num_input_tokens_seen": 51782560, - "step": 2448 - }, - { - "epoch": 0.2944748391751338, - "flos": 14786195481720.0, - "grad_norm": 2.7505463144202817, - "learning_rate": 3.3102030711461177e-06, - "loss": 0.7759, - "num_input_tokens_seen": 51801855, - "step": 2449 - }, - { - "epoch": 0.29459508206577284, - "flos": 11625315665040.0, - "grad_norm": 3.0286089216209593, - "learning_rate": 3.3096144268810156e-06, - "loss": 0.6607, - "num_input_tokens_seen": 51820335, - "step": 2450 - }, - { - "epoch": 0.29471532495641195, - "flos": 14920126315680.0, - "grad_norm": 3.2870681769180012, - "learning_rate": 3.3090255839484462e-06, - "loss": 0.7081, - "num_input_tokens_seen": 51838050, - "step": 2451 - }, - { - "epoch": 0.29483556784705106, - "flos": 14893283160960.0, - "grad_norm": 2.246768538243519, - "learning_rate": 3.3084365424377366e-06, - "loss": 0.8356, - "num_input_tokens_seen": 51856535, - "step": 2452 - }, - { - "epoch": 0.2949558107376901, - "flos": 50551571326680.0, - "grad_norm": 0.7857783116535828, - "learning_rate": 3.307847302438245e-06, - "loss": 0.5759, - "num_input_tokens_seen": 51910235, - "step": 2453 - }, - { - "epoch": 0.2950760536283292, - "flos": 11733479782920.0, - "grad_norm": 4.237175835107293, - "learning_rate": 3.3072578640393562e-06, - "loss": 0.7507, - "num_input_tokens_seen": 51927290, - "step": 2454 - }, - { - "epoch": 0.29519629651896834, - "flos": 14972166307200.0, - "grad_norm": 2.4310422597748143, - "learning_rate": 3.3066682273304886e-06, - "loss": 0.7772, - "num_input_tokens_seen": 51944655, - "step": 2455 - }, - { - "epoch": 0.2953165394096074, - "flos": 13812623440920.0, - "grad_norm": 2.740882671774655, - "learning_rate": 3.3060783924010904e-06, - "loss": 0.7487, - "num_input_tokens_seen": 51962300, - "step": 2456 - }, - { - "epoch": 0.2954367823002465, - "flos": 15075676410960.0, - "grad_norm": 3.2016255660615265, - "learning_rate": 3.3054883593406387e-06, - "loss": 0.834, - "num_input_tokens_seen": 51976770, - "step": 2457 - }, - { - "epoch": 0.2955570251908856, - "flos": 22885112159640.0, - "grad_norm": 5.362806995202964, - "learning_rate": 3.3048981282386404e-06, - "loss": 0.6222, - "num_input_tokens_seen": 51997800, - "step": 2458 - }, - { - "epoch": 0.29567726808152467, - "flos": 15835484672760.0, - "grad_norm": 3.4715084071232694, - "learning_rate": 3.304307699184634e-06, - "loss": 0.8063, - "num_input_tokens_seen": 52016110, - "step": 2459 - }, - { - "epoch": 0.2957975109721638, - "flos": 17755310700240.0, - "grad_norm": 2.380910045156566, - "learning_rate": 3.3037170722681866e-06, - "loss": 0.7819, - "num_input_tokens_seen": 52036665, - "step": 2460 - }, - { - "epoch": 0.29591775386280283, - "flos": 9788995137960.0, - "grad_norm": 3.8144842518651263, - "learning_rate": 3.3031262475788956e-06, - "loss": 0.6671, - "num_input_tokens_seen": 52053325, - "step": 2461 - }, - { - "epoch": 0.29603799675344195, - "flos": 12949305075360.0, - "grad_norm": 2.4538610046743132, - "learning_rate": 3.3025352252063897e-06, - "loss": 0.719, - "num_input_tokens_seen": 52071740, - "step": 2462 - }, - { - "epoch": 0.29615823964408106, - "flos": 16674777620040.0, - "grad_norm": 3.0321539091537533, - "learning_rate": 3.3019440052403252e-06, - "loss": 0.7287, - "num_input_tokens_seen": 52091325, - "step": 2463 - }, - { - "epoch": 0.2962784825347201, - "flos": 17215313269800.0, - "grad_norm": 2.9061300601615727, - "learning_rate": 3.30135258777039e-06, - "loss": 0.6916, - "num_input_tokens_seen": 52110415, - "step": 2464 - }, - { - "epoch": 0.2963987254253592, - "flos": 11922743244240.0, - "grad_norm": 3.127396999404189, - "learning_rate": 3.3007609728863024e-06, - "loss": 0.6823, - "num_input_tokens_seen": 52128225, - "step": 2465 - }, - { - "epoch": 0.29651896831599833, - "flos": 24584589136440.0, - "grad_norm": 3.160721133073066, - "learning_rate": 3.300169160677809e-06, - "loss": 0.7167, - "num_input_tokens_seen": 52151860, - "step": 2466 - }, - { - "epoch": 0.2966392112066374, - "flos": 17430691706760.0, - "grad_norm": 3.406573638939797, - "learning_rate": 3.2995771512346878e-06, - "loss": 0.732, - "num_input_tokens_seen": 52169930, - "step": 2467 - }, - { - "epoch": 0.2967594540972765, - "flos": 14569012427040.0, - "grad_norm": 3.52486348530363, - "learning_rate": 3.298984944646746e-06, - "loss": 0.7013, - "num_input_tokens_seen": 52188330, - "step": 2468 - }, - { - "epoch": 0.2968796969879156, - "flos": 17377068717240.0, - "grad_norm": 2.465402385684707, - "learning_rate": 3.298392541003822e-06, - "loss": 0.801, - "num_input_tokens_seen": 52207455, - "step": 2469 - }, - { - "epoch": 0.29699993987855466, - "flos": 16756066923240.0, - "grad_norm": 2.2648994073151125, - "learning_rate": 3.2977999403957806e-06, - "loss": 0.8738, - "num_input_tokens_seen": 52225935, - "step": 2470 - }, - { - "epoch": 0.2971201827691938, - "flos": 24850582866720.0, - "grad_norm": 3.294433557564936, - "learning_rate": 3.2972071429125207e-06, - "loss": 0.6495, - "num_input_tokens_seen": 52246875, - "step": 2471 - }, - { - "epoch": 0.2972404256598329, - "flos": 16134875169480.0, - "grad_norm": 2.564719936555707, - "learning_rate": 3.2966141486439682e-06, - "loss": 0.8639, - "num_input_tokens_seen": 52265785, - "step": 2472 - }, - { - "epoch": 0.29736066855047194, - "flos": 23479492418640.0, - "grad_norm": 3.2953387835030075, - "learning_rate": 3.29602095768008e-06, - "loss": 0.6371, - "num_input_tokens_seen": 52286020, - "step": 2473 - }, - { - "epoch": 0.29748091144111105, - "flos": 24613046949120.0, - "grad_norm": 3.201510287557716, - "learning_rate": 3.2954275701108437e-06, - "loss": 0.6291, - "num_input_tokens_seen": 52306920, - "step": 2474 - }, - { - "epoch": 0.29760115433175016, - "flos": 30365781460080.0, - "grad_norm": 2.4715960551400658, - "learning_rate": 3.294833986026275e-06, - "loss": 0.6731, - "num_input_tokens_seen": 52329880, - "step": 2475 - }, - { - "epoch": 0.2977213972223892, - "flos": 17939096988480.0, - "grad_norm": 2.721791374945333, - "learning_rate": 3.29424020551642e-06, - "loss": 0.8343, - "num_input_tokens_seen": 52348235, - "step": 2476 - }, - { - "epoch": 0.2978416401130283, - "flos": 15565153527960.0, - "grad_norm": 3.1760963949857848, - "learning_rate": 3.2936462286713546e-06, - "loss": 0.6932, - "num_input_tokens_seen": 52366305, - "step": 2477 - }, - { - "epoch": 0.2979618830036674, - "flos": 18885762554640.0, - "grad_norm": 2.8137106938711614, - "learning_rate": 3.2930520555811846e-06, - "loss": 0.7559, - "num_input_tokens_seen": 52385650, - "step": 2478 - }, - { - "epoch": 0.2980821258943065, - "flos": 17187678616080.0, - "grad_norm": 3.0490226805716985, - "learning_rate": 3.292457686336046e-06, - "loss": 0.7793, - "num_input_tokens_seen": 52404690, - "step": 2479 - }, - { - "epoch": 0.2982023687849456, - "flos": 51441294098880.0, - "grad_norm": 0.9338938657652086, - "learning_rate": 3.291863121026105e-06, - "loss": 0.6431, - "num_input_tokens_seen": 52468190, - "step": 2480 - }, - { - "epoch": 0.29832261167558466, - "flos": 21886374942000.0, - "grad_norm": 2.7721268256708167, - "learning_rate": 3.2912683597415547e-06, - "loss": 0.7443, - "num_input_tokens_seen": 52491995, - "step": 2481 - }, - { - "epoch": 0.29844285456622377, - "flos": 24908669910600.0, - "grad_norm": 2.8108007596033495, - "learning_rate": 3.2906734025726213e-06, - "loss": 0.7559, - "num_input_tokens_seen": 52510980, - "step": 2482 - }, - { - "epoch": 0.2985630974568629, - "flos": 17483459877360.0, - "grad_norm": 2.7889613712900534, - "learning_rate": 3.290078249609559e-06, - "loss": 0.852, - "num_input_tokens_seen": 52530120, - "step": 2483 - }, - { - "epoch": 0.29868334034750194, - "flos": 15945991627680.0, - "grad_norm": 2.771630786687217, - "learning_rate": 3.2894829009426514e-06, - "loss": 0.8486, - "num_input_tokens_seen": 52547675, - "step": 2484 - }, - { - "epoch": 0.29880358323814105, - "flos": 18808240786680.0, - "grad_norm": 2.6818610182046303, - "learning_rate": 3.288887356662213e-06, - "loss": 0.7555, - "num_input_tokens_seen": 52568730, - "step": 2485 - }, - { - "epoch": 0.29892382612878016, - "flos": 52365671053200.0, - "grad_norm": 0.7979393003820959, - "learning_rate": 3.288291616858588e-06, - "loss": 0.6048, - "num_input_tokens_seen": 52623840, - "step": 2486 - }, - { - "epoch": 0.2990440690194192, - "flos": 18670510757520.0, - "grad_norm": 2.724962874693347, - "learning_rate": 3.287695681622149e-06, - "loss": 0.7599, - "num_input_tokens_seen": 52642910, - "step": 2487 - }, - { - "epoch": 0.2991643119100583, - "flos": 17376657137760.0, - "grad_norm": 2.921461433068598, - "learning_rate": 3.2870995510432982e-06, - "loss": 0.7963, - "num_input_tokens_seen": 52661110, - "step": 2488 - }, - { - "epoch": 0.29928455480069743, - "flos": 20103962344080.0, - "grad_norm": 4.717511753538659, - "learning_rate": 3.2865032252124697e-06, - "loss": 0.7429, - "num_input_tokens_seen": 52681345, - "step": 2489 - }, - { - "epoch": 0.2994047976913365, - "flos": 24748149201600.0, - "grad_norm": 2.811559647078178, - "learning_rate": 3.2859067042201243e-06, - "loss": 0.7626, - "num_input_tokens_seen": 52703105, - "step": 2490 - }, - { - "epoch": 0.2995250405819756, - "flos": 12219632604120.0, - "grad_norm": 3.227330003393775, - "learning_rate": 3.2853099881567544e-06, - "loss": 0.7529, - "num_input_tokens_seen": 52721225, - "step": 2491 - }, - { - "epoch": 0.29964528347261465, - "flos": 16807695335280.0, - "grad_norm": 2.408011052128468, - "learning_rate": 3.284713077112881e-06, - "loss": 0.7716, - "num_input_tokens_seen": 52740375, - "step": 2492 - }, - { - "epoch": 0.29976552636325376, - "flos": 12347263106040.0, - "grad_norm": 3.0672956015240698, - "learning_rate": 3.284115971179056e-06, - "loss": 0.8267, - "num_input_tokens_seen": 52754125, - "step": 2493 - }, - { - "epoch": 0.2998857692538929, - "flos": 12436182459600.0, - "grad_norm": 3.032285061331845, - "learning_rate": 3.283518670445859e-06, - "loss": 0.778, - "num_input_tokens_seen": 52771755, - "step": 2494 - }, - { - "epoch": 0.30000601214453193, - "flos": 50758627685520.0, - "grad_norm": 1.9583798328587652, - "learning_rate": 3.2829211750038995e-06, - "loss": 0.575, - "num_input_tokens_seen": 52840105, - "step": 2495 - }, - { - "epoch": 0.30012625503517104, - "flos": 12840792697920.0, - "grad_norm": 2.3099782442331716, - "learning_rate": 3.2823234849438183e-06, - "loss": 0.8619, - "num_input_tokens_seen": 52857860, - "step": 2496 - }, - { - "epoch": 0.30024649792581015, - "flos": 15541634669040.0, - "grad_norm": 3.865488746603988, - "learning_rate": 3.2817256003562836e-06, - "loss": 0.7219, - "num_input_tokens_seen": 52877955, - "step": 2497 - }, - { - "epoch": 0.3003667408164492, - "flos": 16837166266680.0, - "grad_norm": 2.4940385312160736, - "learning_rate": 3.281127521331995e-06, - "loss": 0.6452, - "num_input_tokens_seen": 52898855, - "step": 2498 - }, - { - "epoch": 0.3004869837070883, - "flos": 47354639798280.0, - "grad_norm": 0.9045327128287971, - "learning_rate": 3.2805292479616798e-06, - "loss": 0.6365, - "num_input_tokens_seen": 52957440, - "step": 2499 - }, - { - "epoch": 0.30060722659772743, - "flos": 19238712720960.0, - "grad_norm": 5.16678038851371, - "learning_rate": 3.2799307803360955e-06, - "loss": 0.8852, - "num_input_tokens_seen": 52973090, - "step": 2500 - }, - { - "epoch": 0.3007274694883665, - "flos": 18292965293640.0, - "grad_norm": 2.5329135090985586, - "learning_rate": 3.27933211854603e-06, - "loss": 0.8029, - "num_input_tokens_seen": 52991865, - "step": 2501 - }, - { - "epoch": 0.3008477123790056, - "flos": 12435644240280.0, - "grad_norm": 2.1393443265382635, - "learning_rate": 3.278733262682299e-06, - "loss": 0.8566, - "num_input_tokens_seen": 53009440, - "step": 2502 - }, - { - "epoch": 0.3009679552696447, - "flos": 15729251812440.0, - "grad_norm": 4.18817884721682, - "learning_rate": 3.2781342128357484e-06, - "loss": 0.7968, - "num_input_tokens_seen": 53028515, - "step": 2503 - }, - { - "epoch": 0.30108819816028376, - "flos": 15452588675640.0, - "grad_norm": 3.871728192641223, - "learning_rate": 3.2775349690972547e-06, - "loss": 0.7823, - "num_input_tokens_seen": 53042385, - "step": 2504 - }, - { - "epoch": 0.30120844105092287, - "flos": 52456490004360.0, - "grad_norm": 0.767728873245092, - "learning_rate": 3.276935531557722e-06, - "loss": 0.5556, - "num_input_tokens_seen": 53107325, - "step": 2505 - }, - { - "epoch": 0.301328683941562, - "flos": 14810030940240.0, - "grad_norm": 3.702983740836557, - "learning_rate": 3.2763359003080837e-06, - "loss": 0.7833, - "num_input_tokens_seen": 53124000, - "step": 2506 - }, - { - "epoch": 0.30144892683220104, - "flos": 52102020159960.0, - "grad_norm": 0.9344237400981206, - "learning_rate": 3.2757360754393047e-06, - "loss": 0.6693, - "num_input_tokens_seen": 53187790, - "step": 2507 - }, - { - "epoch": 0.30156916972284015, - "flos": 16701905714400.0, - "grad_norm": 3.4118522280750527, - "learning_rate": 3.2751360570423767e-06, - "loss": 0.6052, - "num_input_tokens_seen": 53205895, - "step": 2508 - }, - { - "epoch": 0.3016894126134792, - "flos": 21940631130720.0, - "grad_norm": 2.8647656931774788, - "learning_rate": 3.2745358452083236e-06, - "loss": 0.7473, - "num_input_tokens_seen": 53228515, - "step": 2509 - }, - { - "epoch": 0.3018096555041183, - "flos": 15759292623120.0, - "grad_norm": 1.5675103536747574, - "learning_rate": 3.2739354400281955e-06, - "loss": 0.8071, - "num_input_tokens_seen": 53249455, - "step": 2510 - }, - { - "epoch": 0.3019298983947574, - "flos": 43584915612240.0, - "grad_norm": 0.8955025875558962, - "learning_rate": 3.2733348415930744e-06, - "loss": 0.683, - "num_input_tokens_seen": 53311045, - "step": 2511 - }, - { - "epoch": 0.3020501412853965, - "flos": 25288558211520.0, - "grad_norm": 3.1483181535615046, - "learning_rate": 3.27273404999407e-06, - "loss": 0.7879, - "num_input_tokens_seen": 53332985, - "step": 2512 - }, - { - "epoch": 0.3021703841760356, - "flos": 52367823930480.0, - "grad_norm": 0.7971024656085117, - "learning_rate": 3.272133065322322e-06, - "loss": 0.6183, - "num_input_tokens_seen": 53390975, - "step": 2513 - }, - { - "epoch": 0.3022906270666747, - "flos": 15732892707840.0, - "grad_norm": 1.9108509758413341, - "learning_rate": 3.271531887669e-06, - "loss": 0.7745, - "num_input_tokens_seen": 53410755, - "step": 2514 - }, - { - "epoch": 0.30241086995731375, - "flos": 22851905352960.0, - "grad_norm": 3.5760562696787495, - "learning_rate": 3.2709305171253015e-06, - "loss": 0.6286, - "num_input_tokens_seen": 53430595, - "step": 2515 - }, - { - "epoch": 0.30253111284795287, - "flos": 17212970432760.0, - "grad_norm": 2.168049248927793, - "learning_rate": 3.2703289537824536e-06, - "loss": 0.7665, - "num_input_tokens_seen": 53450115, - "step": 2516 - }, - { - "epoch": 0.302651355738592, - "flos": 13669954458000.0, - "grad_norm": 3.4191111372150407, - "learning_rate": 3.269727197731714e-06, - "loss": 0.7719, - "num_input_tokens_seen": 53462600, - "step": 2517 - }, - { - "epoch": 0.30277159862923103, - "flos": 16404604775040.0, - "grad_norm": 2.351315608957255, - "learning_rate": 3.269125249064367e-06, - "loss": 0.7643, - "num_input_tokens_seen": 53482015, - "step": 2518 - }, - { - "epoch": 0.30289184151987014, - "flos": 16296757256760.0, - "grad_norm": 2.0861977404156775, - "learning_rate": 3.2685231078717297e-06, - "loss": 0.8108, - "num_input_tokens_seen": 53501925, - "step": 2519 - }, - { - "epoch": 0.30301208441050925, - "flos": 18481373936040.0, - "grad_norm": 2.8491389596831658, - "learning_rate": 3.267920774245145e-06, - "loss": 0.7131, - "num_input_tokens_seen": 53521050, - "step": 2520 - }, - { - "epoch": 0.3031323273011483, - "flos": 16863724481760.0, - "grad_norm": 2.4504147275897443, - "learning_rate": 3.2673182482759876e-06, - "loss": 0.8307, - "num_input_tokens_seen": 53539885, - "step": 2521 - }, - { - "epoch": 0.3032525701917874, - "flos": 13782709270080.0, - "grad_norm": 3.777608962743872, - "learning_rate": 3.266715530055659e-06, - "loss": 0.635, - "num_input_tokens_seen": 53557755, - "step": 2522 - }, - { - "epoch": 0.30337281308242653, - "flos": 12973108873920.0, - "grad_norm": 2.498610873525977, - "learning_rate": 3.2661126196755927e-06, - "loss": 0.7907, - "num_input_tokens_seen": 53576585, - "step": 2523 - }, - { - "epoch": 0.3034930559730656, - "flos": 42614161307880.0, - "grad_norm": 0.8424311326176344, - "learning_rate": 3.265509517227248e-06, - "loss": 0.5998, - "num_input_tokens_seen": 53633120, - "step": 2524 - }, - { - "epoch": 0.3036132988637047, - "flos": 10733064587400.0, - "grad_norm": 2.4512131159379216, - "learning_rate": 3.264906222802115e-06, - "loss": 0.7889, - "num_input_tokens_seen": 53650690, - "step": 2525 - }, - { - "epoch": 0.30373354175434375, - "flos": 15379214362440.0, - "grad_norm": 3.0448570415844687, - "learning_rate": 3.264302736491715e-06, - "loss": 0.7531, - "num_input_tokens_seen": 53670530, - "step": 2526 - }, - { - "epoch": 0.30385378464498286, - "flos": 15460725285360.0, - "grad_norm": 2.3198050512422466, - "learning_rate": 3.263699058387594e-06, - "loss": 0.8494, - "num_input_tokens_seen": 53687685, - "step": 2527 - }, - { - "epoch": 0.30397402753562197, - "flos": 15079887185640.0, - "grad_norm": 2.9469040028832754, - "learning_rate": 3.2630951885813315e-06, - "loss": 0.8925, - "num_input_tokens_seen": 53704800, - "step": 2528 - }, - { - "epoch": 0.304094270426261, - "flos": 10977597356160.0, - "grad_norm": 2.7371343150689604, - "learning_rate": 3.262491127164533e-06, - "loss": 0.7566, - "num_input_tokens_seen": 53723335, - "step": 2529 - }, - { - "epoch": 0.30421451331690014, - "flos": 10059579562440.0, - "grad_norm": 5.470218657424416, - "learning_rate": 3.2618868742288337e-06, - "loss": 0.7875, - "num_input_tokens_seen": 53739980, - "step": 2530 - }, - { - "epoch": 0.30433475620753925, - "flos": 12678657330960.0, - "grad_norm": 2.7775336306619236, - "learning_rate": 3.261282429865899e-06, - "loss": 0.7095, - "num_input_tokens_seen": 53757705, - "step": 2531 - }, - { - "epoch": 0.3044549990981783, - "flos": 13813035020400.0, - "grad_norm": 2.4151262328523475, - "learning_rate": 3.2606777941674225e-06, - "loss": 0.7128, - "num_input_tokens_seen": 53776080, - "step": 2532 - }, - { - "epoch": 0.3045752419888174, - "flos": 15405107718360.0, - "grad_norm": 4.644964071243666, - "learning_rate": 3.2600729672251276e-06, - "loss": 0.8247, - "num_input_tokens_seen": 53793515, - "step": 2533 - }, - { - "epoch": 0.3046954848794565, - "flos": 21346504151400.0, - "grad_norm": 6.294595234699462, - "learning_rate": 3.259467949130765e-06, - "loss": 0.6306, - "num_input_tokens_seen": 53814645, - "step": 2534 - }, - { - "epoch": 0.3048157277700956, - "flos": 14831998461120.0, - "grad_norm": 4.040351234124674, - "learning_rate": 3.2588627399761164e-06, - "loss": 0.817, - "num_input_tokens_seen": 53830360, - "step": 2535 - }, - { - "epoch": 0.3049359706607347, - "flos": 16641159233880.0, - "grad_norm": 2.090994772832933, - "learning_rate": 3.2582573398529903e-06, - "loss": 0.6999, - "num_input_tokens_seen": 53847435, - "step": 2536 - }, - { - "epoch": 0.3050562135513738, - "flos": 13454955940560.0, - "grad_norm": 3.408013263649718, - "learning_rate": 3.2576517488532265e-06, - "loss": 0.7287, - "num_input_tokens_seen": 53863505, - "step": 2537 - }, - { - "epoch": 0.30517645644201286, - "flos": 14887774327920.0, - "grad_norm": 2.4738909153368764, - "learning_rate": 3.257045967068692e-06, - "loss": 0.8476, - "num_input_tokens_seen": 53882480, - "step": 2538 - }, - { - "epoch": 0.30529669933265197, - "flos": 16054060765680.0, - "grad_norm": 2.6297761064675647, - "learning_rate": 3.2564399945912848e-06, - "loss": 0.8009, - "num_input_tokens_seen": 53901990, - "step": 2539 - }, - { - "epoch": 0.305416942223291, - "flos": 15972834782400.0, - "grad_norm": 2.6534578795251877, - "learning_rate": 3.2558338315129287e-06, - "loss": 0.8055, - "num_input_tokens_seen": 53919855, - "step": 2540 - }, - { - "epoch": 0.30553718511393013, - "flos": 24910126268760.0, - "grad_norm": 2.844682812968167, - "learning_rate": 3.2552274779255785e-06, - "loss": 0.7389, - "num_input_tokens_seen": 53940505, - "step": 2541 - }, - { - "epoch": 0.30565742800456924, - "flos": 16292704781880.0, - "grad_norm": 3.6365669128648195, - "learning_rate": 3.2546209339212184e-06, - "loss": 0.75, - "num_input_tokens_seen": 53959245, - "step": 2542 - }, - { - "epoch": 0.3057776708952083, - "flos": 16512800552880.0, - "grad_norm": 2.170953019760577, - "learning_rate": 3.25401419959186e-06, - "loss": 0.7517, - "num_input_tokens_seen": 53979575, - "step": 2543 - }, - { - "epoch": 0.3058979137858474, - "flos": 15946086607560.0, - "grad_norm": 2.8515730348157287, - "learning_rate": 3.253407275029545e-06, - "loss": 0.751, - "num_input_tokens_seen": 53998200, - "step": 2544 - }, - { - "epoch": 0.3060181566764865, - "flos": 19779691610160.0, - "grad_norm": 3.5947155526098036, - "learning_rate": 3.2528001603263425e-06, - "loss": 0.7871, - "num_input_tokens_seen": 54019990, - "step": 2545 - }, - { - "epoch": 0.3061383995671256, - "flos": 14514661258440.0, - "grad_norm": 2.41190832126412, - "learning_rate": 3.2521928555743514e-06, - "loss": 0.7957, - "num_input_tokens_seen": 54037055, - "step": 2546 - }, - { - "epoch": 0.3062586424577647, - "flos": 16189036378320.0, - "grad_norm": 2.465741606107749, - "learning_rate": 3.2515853608657e-06, - "loss": 0.6508, - "num_input_tokens_seen": 54054775, - "step": 2547 - }, - { - "epoch": 0.3063788853484038, - "flos": 15239553075720.0, - "grad_norm": 3.273371220102035, - "learning_rate": 3.250977676292545e-06, - "loss": 0.7125, - "num_input_tokens_seen": 54072735, - "step": 2548 - }, - { - "epoch": 0.30649912823904285, - "flos": 11808468754080.0, - "grad_norm": 2.292864353591317, - "learning_rate": 3.2503698019470712e-06, - "loss": 0.7798, - "num_input_tokens_seen": 54088225, - "step": 2549 - }, - { - "epoch": 0.30661937112968196, - "flos": 13590628072320.0, - "grad_norm": 3.051707343332677, - "learning_rate": 3.249761737921492e-06, - "loss": 0.7623, - "num_input_tokens_seen": 54104475, - "step": 2550 - }, - { - "epoch": 0.30673961402032107, - "flos": 23043923230800.0, - "grad_norm": 4.971653311994219, - "learning_rate": 3.249153484308051e-06, - "loss": 0.7295, - "num_input_tokens_seen": 54122810, - "step": 2551 - }, - { - "epoch": 0.3068598569109601, - "flos": 14784549163800.0, - "grad_norm": 2.522091060491934, - "learning_rate": 3.2485450411990194e-06, - "loss": 0.7558, - "num_input_tokens_seen": 54141885, - "step": 2552 - }, - { - "epoch": 0.30698009980159924, - "flos": 21719933820480.0, - "grad_norm": 2.648454514170321, - "learning_rate": 3.2479364086866983e-06, - "loss": 0.7989, - "num_input_tokens_seen": 54161860, - "step": 2553 - }, - { - "epoch": 0.30710034269223835, - "flos": 17458199720640.0, - "grad_norm": 2.3456957052273886, - "learning_rate": 3.247327586863416e-06, - "loss": 0.8025, - "num_input_tokens_seen": 54182460, - "step": 2554 - }, - { - "epoch": 0.3072205855828774, - "flos": 18969742954440.0, - "grad_norm": 3.3718077079273487, - "learning_rate": 3.2467185758215304e-06, - "loss": 0.7586, - "num_input_tokens_seen": 54201920, - "step": 2555 - }, - { - "epoch": 0.3073408284735165, - "flos": 16269534182520.0, - "grad_norm": 3.744364000004055, - "learning_rate": 3.246109375653428e-06, - "loss": 0.8305, - "num_input_tokens_seen": 54218405, - "step": 2556 - }, - { - "epoch": 0.30746107136415557, - "flos": 14244678373200.0, - "grad_norm": 2.5520541650052424, - "learning_rate": 3.2454999864515243e-06, - "loss": 0.7756, - "num_input_tokens_seen": 54237500, - "step": 2557 - }, - { - "epoch": 0.3075813142547947, - "flos": 15890817300120.0, - "grad_norm": 2.390409073396746, - "learning_rate": 3.244890408308263e-06, - "loss": 0.6766, - "num_input_tokens_seen": 54257925, - "step": 2558 - }, - { - "epoch": 0.3077015571454338, - "flos": 17647336542120.0, - "grad_norm": 2.941845540632024, - "learning_rate": 3.2442806413161165e-06, - "loss": 0.591, - "num_input_tokens_seen": 54277290, - "step": 2559 - }, - { - "epoch": 0.30782180003607285, - "flos": 13434413117880.0, - "grad_norm": 2.992722266793581, - "learning_rate": 3.243670685567586e-06, - "loss": 0.7615, - "num_input_tokens_seen": 54294410, - "step": 2560 - }, - { - "epoch": 0.30794204292671196, - "flos": 17484884575560.0, - "grad_norm": 2.6049573588038872, - "learning_rate": 3.2430605411552012e-06, - "loss": 0.7764, - "num_input_tokens_seen": 54314245, - "step": 2561 - }, - { - "epoch": 0.30806228581735107, - "flos": 50090999753160.0, - "grad_norm": 0.9015254484129663, - "learning_rate": 3.2424502081715205e-06, - "loss": 0.7109, - "num_input_tokens_seen": 54377080, - "step": 2562 - }, - { - "epoch": 0.3081825287079901, - "flos": 17458896239760.0, - "grad_norm": 3.1013298812228163, - "learning_rate": 3.241839686709132e-06, - "loss": 0.764, - "num_input_tokens_seen": 54397735, - "step": 2563 - }, - { - "epoch": 0.30830277159862923, - "flos": 11808690373800.0, - "grad_norm": 2.888206336092307, - "learning_rate": 3.2412289768606495e-06, - "loss": 0.7966, - "num_input_tokens_seen": 54414025, - "step": 2564 - }, - { - "epoch": 0.30842301448926834, - "flos": 21533488095600.0, - "grad_norm": 1.9429605506076677, - "learning_rate": 3.240618078718718e-06, - "loss": 0.8007, - "num_input_tokens_seen": 54435205, - "step": 2565 - }, - { - "epoch": 0.3085432573799074, - "flos": 16054124085600.0, - "grad_norm": 2.7887916754015643, - "learning_rate": 3.240006992376011e-06, - "loss": 0.7198, - "num_input_tokens_seen": 54454550, - "step": 2566 - }, - { - "epoch": 0.3086635002705465, - "flos": 16135191769080.0, - "grad_norm": 2.79581955756154, - "learning_rate": 3.2393957179252284e-06, - "loss": 0.7453, - "num_input_tokens_seen": 54470805, - "step": 2567 - }, - { - "epoch": 0.3087837431611856, - "flos": 23987327821080.0, - "grad_norm": 2.7194157804650314, - "learning_rate": 3.2387842554591016e-06, - "loss": 0.7939, - "num_input_tokens_seen": 54491340, - "step": 2568 - }, - { - "epoch": 0.3089039860518247, - "flos": 12837246782400.0, - "grad_norm": 6.328676552931197, - "learning_rate": 3.238172605070388e-06, - "loss": 0.8476, - "num_input_tokens_seen": 54506475, - "step": 2569 - }, - { - "epoch": 0.3090242289424638, - "flos": 10456971329880.0, - "grad_norm": 3.2146927530418767, - "learning_rate": 3.2375607668518745e-06, - "loss": 0.7584, - "num_input_tokens_seen": 54519230, - "step": 2570 - }, - { - "epoch": 0.30914447183310284, - "flos": 11704737030600.0, - "grad_norm": 2.5426540853945574, - "learning_rate": 3.236948740896377e-06, - "loss": 0.8837, - "num_input_tokens_seen": 54533750, - "step": 2571 - }, - { - "epoch": 0.30926471472374195, - "flos": 23665178304480.0, - "grad_norm": 2.012327113337138, - "learning_rate": 3.2363365272967384e-06, - "loss": 0.8245, - "num_input_tokens_seen": 54556040, - "step": 2572 - }, - { - "epoch": 0.30938495761438106, - "flos": 14888122587480.0, - "grad_norm": 2.485180418346112, - "learning_rate": 3.235724126145832e-06, - "loss": 0.803, - "num_input_tokens_seen": 54571795, - "step": 2573 - }, - { - "epoch": 0.3095052005050201, - "flos": 17620050147960.0, - "grad_norm": 2.15711205982899, - "learning_rate": 3.235111537536558e-06, - "loss": 0.7518, - "num_input_tokens_seen": 54592330, - "step": 2574 - }, - { - "epoch": 0.30962544339565923, - "flos": 17131491169800.0, - "grad_norm": 2.2523258691475574, - "learning_rate": 3.2344987615618456e-06, - "loss": 0.8219, - "num_input_tokens_seen": 54611885, - "step": 2575 - }, - { - "epoch": 0.30974568628629834, - "flos": 24827032347840.0, - "grad_norm": 1.9914761937677357, - "learning_rate": 3.2338857983146533e-06, - "loss": 0.7561, - "num_input_tokens_seen": 54633105, - "step": 2576 - }, - { - "epoch": 0.3098659291769374, - "flos": 14784359204040.0, - "grad_norm": 2.378470545363628, - "learning_rate": 3.233272647887966e-06, - "loss": 0.7469, - "num_input_tokens_seen": 54651715, - "step": 2577 - }, - { - "epoch": 0.3099861720675765, - "flos": 18127948870320.0, - "grad_norm": 2.0838078403511764, - "learning_rate": 3.2326593103747985e-06, - "loss": 0.8759, - "num_input_tokens_seen": 54670450, - "step": 2578 - }, - { - "epoch": 0.3101064149582156, - "flos": 8520433334880.0, - "grad_norm": 2.9016238894770434, - "learning_rate": 3.2320457858681936e-06, - "loss": 0.8315, - "num_input_tokens_seen": 54688560, - "step": 2579 - }, - { - "epoch": 0.31022665784885467, - "flos": 16858437268440.0, - "grad_norm": 2.7122502295470863, - "learning_rate": 3.2314320744612228e-06, - "loss": 0.8452, - "num_input_tokens_seen": 54703580, - "step": 2580 - }, - { - "epoch": 0.3103469007394938, - "flos": 11868202115880.0, - "grad_norm": 2.0027216036474735, - "learning_rate": 3.2308181762469854e-06, - "loss": 0.7538, - "num_input_tokens_seen": 54721245, - "step": 2581 - }, - { - "epoch": 0.3104671436301329, - "flos": 22395983302200.0, - "grad_norm": 2.761471317759095, - "learning_rate": 3.230204091318609e-06, - "loss": 0.768, - "num_input_tokens_seen": 54741505, - "step": 2582 - }, - { - "epoch": 0.31058738652077195, - "flos": 14649415251360.0, - "grad_norm": 3.0608350938195965, - "learning_rate": 3.2295898197692503e-06, - "loss": 0.8343, - "num_input_tokens_seen": 54760185, - "step": 2583 - }, - { - "epoch": 0.31070762941141106, - "flos": 20590305125040.0, - "grad_norm": 2.3800618288680275, - "learning_rate": 3.228975361692094e-06, - "loss": 0.7737, - "num_input_tokens_seen": 54780925, - "step": 2584 - }, - { - "epoch": 0.31082787230205017, - "flos": 15000750759720.0, - "grad_norm": 2.9623828269223798, - "learning_rate": 3.228360717180352e-06, - "loss": 0.784, - "num_input_tokens_seen": 54798170, - "step": 2585 - }, - { - "epoch": 0.3109481151926892, - "flos": 46032771605280.0, - "grad_norm": 0.8584558600096454, - "learning_rate": 3.227745886327266e-06, - "loss": 0.6356, - "num_input_tokens_seen": 54856585, - "step": 2586 - }, - { - "epoch": 0.31106835808332833, - "flos": 32933678547360.0, - "grad_norm": 0.8333912511407996, - "learning_rate": 3.227130869226105e-06, - "loss": 0.5809, - "num_input_tokens_seen": 54913465, - "step": 2587 - }, - { - "epoch": 0.3111886009739674, - "flos": 17132820888120.0, - "grad_norm": 7.247233948365663, - "learning_rate": 3.226515665970167e-06, - "loss": 0.8007, - "num_input_tokens_seen": 54930725, - "step": 2588 - }, - { - "epoch": 0.3113088438646065, - "flos": 12787169708400.0, - "grad_norm": 4.7879848543467745, - "learning_rate": 3.225900276652777e-06, - "loss": 0.8347, - "num_input_tokens_seen": 54947220, - "step": 2589 - }, - { - "epoch": 0.3114290867552456, - "flos": 20805683562000.0, - "grad_norm": 1.8511162715110783, - "learning_rate": 3.2252847013672906e-06, - "loss": 0.7414, - "num_input_tokens_seen": 54969600, - "step": 2590 - }, - { - "epoch": 0.31154932964588467, - "flos": 20075282911680.0, - "grad_norm": 2.7813148062367197, - "learning_rate": 3.224668940207089e-06, - "loss": 0.7427, - "num_input_tokens_seen": 54988305, - "step": 2591 - }, - { - "epoch": 0.3116695725365238, - "flos": 19455895775640.0, - "grad_norm": 2.135783491184399, - "learning_rate": 3.2240529932655828e-06, - "loss": 0.8474, - "num_input_tokens_seen": 55007290, - "step": 2592 - }, - { - "epoch": 0.3117898154271629, - "flos": 15485858802240.0, - "grad_norm": 4.451946972746254, - "learning_rate": 3.223436860636211e-06, - "loss": 0.8511, - "num_input_tokens_seen": 55022645, - "step": 2593 - }, - { - "epoch": 0.31191005831780194, - "flos": 19996684705080.0, - "grad_norm": 1.678894139490698, - "learning_rate": 3.2228205424124403e-06, - "loss": 0.725, - "num_input_tokens_seen": 55045520, - "step": 2594 - }, - { - "epoch": 0.31203030120844105, - "flos": 9406194120720.0, - "grad_norm": 3.128844223645895, - "learning_rate": 3.222204038687765e-06, - "loss": 0.7381, - "num_input_tokens_seen": 55058375, - "step": 2595 - }, - { - "epoch": 0.31215054409908016, - "flos": 20210670103800.0, - "grad_norm": 2.0125934365895706, - "learning_rate": 3.221587349555709e-06, - "loss": 0.8709, - "num_input_tokens_seen": 55078355, - "step": 2596 - }, - { - "epoch": 0.3122707869897192, - "flos": 15729188492520.0, - "grad_norm": 2.326881332532292, - "learning_rate": 3.2209704751098236e-06, - "loss": 0.6757, - "num_input_tokens_seen": 55097105, - "step": 2597 - }, - { - "epoch": 0.31239102988035833, - "flos": 11051446568760.0, - "grad_norm": 2.3561537017166945, - "learning_rate": 3.2203534154436875e-06, - "loss": 0.8004, - "num_input_tokens_seen": 55111180, - "step": 2598 - }, - { - "epoch": 0.31251127277099744, - "flos": 16134305290200.0, - "grad_norm": 3.0132272309497194, - "learning_rate": 3.2197361706509084e-06, - "loss": 0.7539, - "num_input_tokens_seen": 55131655, - "step": 2599 - }, - { - "epoch": 0.3126315156616365, - "flos": 11057588601000.0, - "grad_norm": 4.7451092487778315, - "learning_rate": 3.2191187408251228e-06, - "loss": 0.8111, - "num_input_tokens_seen": 55148535, - "step": 2600 - }, - { - "epoch": 0.3127517585522756, - "flos": 13241223821520.0, - "grad_norm": 6.688382669826623, - "learning_rate": 3.218501126059993e-06, - "loss": 0.755, - "num_input_tokens_seen": 55163650, - "step": 2601 - }, - { - "epoch": 0.31287200144291466, - "flos": 16027059311160.0, - "grad_norm": 2.308433772448542, - "learning_rate": 3.2178833264492116e-06, - "loss": 0.7946, - "num_input_tokens_seen": 55182075, - "step": 2602 - }, - { - "epoch": 0.31299224433355377, - "flos": 21938699873160.0, - "grad_norm": 2.4222242534212857, - "learning_rate": 3.217265342086498e-06, - "loss": 0.7449, - "num_input_tokens_seen": 55202285, - "step": 2603 - }, - { - "epoch": 0.3131124872241929, - "flos": 8439302331480.0, - "grad_norm": 2.5544637879578636, - "learning_rate": 3.216647173065599e-06, - "loss": 0.716, - "num_input_tokens_seen": 55217470, - "step": 2604 - }, - { - "epoch": 0.31323273011483194, - "flos": 36703524881880.0, - "grad_norm": 3.717342462953533, - "learning_rate": 3.216028819480292e-06, - "loss": 0.7191, - "num_input_tokens_seen": 55238530, - "step": 2605 - }, - { - "epoch": 0.31335297300547105, - "flos": 16539580387680.0, - "grad_norm": 3.2551220366165383, - "learning_rate": 3.2154102814243793e-06, - "loss": 0.7447, - "num_input_tokens_seen": 55257390, - "step": 2606 - }, - { - "epoch": 0.31347321589611016, - "flos": 25500960612240.0, - "grad_norm": 3.164777058106092, - "learning_rate": 3.2147915589916937e-06, - "loss": 0.6617, - "num_input_tokens_seen": 55278670, - "step": 2607 - }, - { - "epoch": 0.3135934587867492, - "flos": 14433783534720.0, - "grad_norm": 4.4081920916039365, - "learning_rate": 3.2141726522760938e-06, - "loss": 0.8125, - "num_input_tokens_seen": 55296450, - "step": 2608 - }, - { - "epoch": 0.3137137016773883, - "flos": 48528334666680.0, - "grad_norm": 0.781268348478381, - "learning_rate": 3.213553561371469e-06, - "loss": 0.5513, - "num_input_tokens_seen": 55359905, - "step": 2609 - }, - { - "epoch": 0.31383394456802743, - "flos": 11841232321320.0, - "grad_norm": 4.0121499547650865, - "learning_rate": 3.212934286371733e-06, - "loss": 0.9554, - "num_input_tokens_seen": 55376335, - "step": 2610 - }, - { - "epoch": 0.3139541874586665, - "flos": 28523635500360.0, - "grad_norm": 3.7825156371505044, - "learning_rate": 3.2123148273708304e-06, - "loss": 0.7991, - "num_input_tokens_seen": 55396245, - "step": 2611 - }, - { - "epoch": 0.3140744303493056, - "flos": 18349152739920.0, - "grad_norm": 2.4487285701481922, - "learning_rate": 3.211695184462733e-06, - "loss": 0.7596, - "num_input_tokens_seen": 55417140, - "step": 2612 - }, - { - "epoch": 0.3141946732399447, - "flos": 53474946885720.0, - "grad_norm": 0.8954170205985521, - "learning_rate": 3.2110753577414383e-06, - "loss": 0.6375, - "num_input_tokens_seen": 55478440, - "step": 2613 - }, - { - "epoch": 0.31431491613058377, - "flos": 14051172477240.0, - "grad_norm": 2.555570677237991, - "learning_rate": 3.2104553473009757e-06, - "loss": 0.7741, - "num_input_tokens_seen": 55496280, - "step": 2614 - }, - { - "epoch": 0.3144351590212229, - "flos": 26611407863280.0, - "grad_norm": 2.4782083619998216, - "learning_rate": 3.209835153235399e-06, - "loss": 0.6645, - "num_input_tokens_seen": 55517555, - "step": 2615 - }, - { - "epoch": 0.314555401911862, - "flos": 13542703875600.0, - "grad_norm": 2.877578242718823, - "learning_rate": 3.2092147756387916e-06, - "loss": 0.6698, - "num_input_tokens_seen": 55537600, - "step": 2616 - }, - { - "epoch": 0.31467564480250104, - "flos": 11922838224120.0, - "grad_norm": 3.247024347653989, - "learning_rate": 3.208594214605264e-06, - "loss": 0.8243, - "num_input_tokens_seen": 55555865, - "step": 2617 - }, - { - "epoch": 0.31479588769314015, - "flos": 15837289290480.0, - "grad_norm": 4.223312962058138, - "learning_rate": 3.2079734702289553e-06, - "loss": 0.7614, - "num_input_tokens_seen": 55574480, - "step": 2618 - }, - { - "epoch": 0.3149161305837792, - "flos": 48699836890440.0, - "grad_norm": 0.8159759701762236, - "learning_rate": 3.207352542604031e-06, - "loss": 0.6353, - "num_input_tokens_seen": 55636535, - "step": 2619 - }, - { - "epoch": 0.3150363734744183, - "flos": 21265721407560.0, - "grad_norm": 2.196940731188482, - "learning_rate": 3.2067314318246864e-06, - "loss": 0.7649, - "num_input_tokens_seen": 55656970, - "step": 2620 - }, - { - "epoch": 0.31515661636505743, - "flos": 20266097711040.0, - "grad_norm": 2.2760466714508643, - "learning_rate": 3.206110137985143e-06, - "loss": 0.7591, - "num_input_tokens_seen": 55676895, - "step": 2621 - }, - { - "epoch": 0.3152768592556965, - "flos": 18022602488880.0, - "grad_norm": 2.150861336921007, - "learning_rate": 3.2054886611796505e-06, - "loss": 0.9039, - "num_input_tokens_seen": 55695610, - "step": 2622 - }, - { - "epoch": 0.3153971021463356, - "flos": 50492471164080.0, - "grad_norm": 0.946079951468408, - "learning_rate": 3.204867001502487e-06, - "loss": 0.6842, - "num_input_tokens_seen": 55753985, - "step": 2623 - }, - { - "epoch": 0.3155173450369747, - "flos": 18754269537600.0, - "grad_norm": 2.8315926047689706, - "learning_rate": 3.2042451590479567e-06, - "loss": 0.7839, - "num_input_tokens_seen": 55774220, - "step": 2624 - }, - { - "epoch": 0.31563758792761376, - "flos": 17802696677640.0, - "grad_norm": 1.982821960667134, - "learning_rate": 3.203623133910394e-06, - "loss": 0.8418, - "num_input_tokens_seen": 55792245, - "step": 2625 - }, - { - "epoch": 0.31575783081825287, - "flos": 23423938171560.0, - "grad_norm": 3.9444147580481825, - "learning_rate": 3.203000926184158e-06, - "loss": 0.7533, - "num_input_tokens_seen": 55810890, - "step": 2626 - }, - { - "epoch": 0.315878073708892, - "flos": 22615350894120.0, - "grad_norm": 2.513714935953526, - "learning_rate": 3.202378535963639e-06, - "loss": 0.7584, - "num_input_tokens_seen": 55831525, - "step": 2627 - }, - { - "epoch": 0.31599831659953104, - "flos": 16242849327600.0, - "grad_norm": 2.5642077640202574, - "learning_rate": 3.2017559633432516e-06, - "loss": 0.8252, - "num_input_tokens_seen": 55850875, - "step": 2628 - }, - { - "epoch": 0.31611855949017015, - "flos": 18754047917880.0, - "grad_norm": 2.420261362322401, - "learning_rate": 3.2011332084174398e-06, - "loss": 0.6436, - "num_input_tokens_seen": 55871465, - "step": 2629 - }, - { - "epoch": 0.31623880238080926, - "flos": 15054057149640.0, - "grad_norm": 2.003354359081118, - "learning_rate": 3.2005102712806756e-06, - "loss": 0.8748, - "num_input_tokens_seen": 55890015, - "step": 2630 - }, - { - "epoch": 0.3163590452714483, - "flos": 9274574463840.0, - "grad_norm": 5.309649072750179, - "learning_rate": 3.1998871520274575e-06, - "loss": 0.7054, - "num_input_tokens_seen": 55905070, - "step": 2631 - }, - { - "epoch": 0.3164792881620874, - "flos": 16865529099480.0, - "grad_norm": 5.463095046396495, - "learning_rate": 3.199263850752312e-06, - "loss": 0.8216, - "num_input_tokens_seen": 55925625, - "step": 2632 - }, - { - "epoch": 0.31659953105272653, - "flos": 13353630374040.0, - "grad_norm": 3.4620070344092064, - "learning_rate": 3.198640367549795e-06, - "loss": 0.833, - "num_input_tokens_seen": 55944240, - "step": 2633 - }, - { - "epoch": 0.3167197739433656, - "flos": 18835495520880.0, - "grad_norm": 2.606646819059449, - "learning_rate": 3.198016702514487e-06, - "loss": 0.8446, - "num_input_tokens_seen": 55964240, - "step": 2634 - }, - { - "epoch": 0.3168400168340047, - "flos": 17238198929520.0, - "grad_norm": 2.0789290858766694, - "learning_rate": 3.1973928557409972e-06, - "loss": 0.8325, - "num_input_tokens_seen": 55982000, - "step": 2635 - }, - { - "epoch": 0.31696025972464376, - "flos": 20806253441280.0, - "grad_norm": 2.7991685041410412, - "learning_rate": 3.1967688273239636e-06, - "loss": 0.6894, - "num_input_tokens_seen": 56001525, - "step": 2636 - }, - { - "epoch": 0.31708050261528287, - "flos": 11949618058920.0, - "grad_norm": 2.7688686509036557, - "learning_rate": 3.1961446173580503e-06, - "loss": 0.8071, - "num_input_tokens_seen": 56018185, - "step": 2637 - }, - { - "epoch": 0.317200745505922, - "flos": 19211869566240.0, - "grad_norm": 2.174108660231061, - "learning_rate": 3.1955202259379502e-06, - "loss": 0.7537, - "num_input_tokens_seen": 56039635, - "step": 2638 - }, - { - "epoch": 0.31732098839656103, - "flos": 23013882420120.0, - "grad_norm": 2.209855316915114, - "learning_rate": 3.194895653158381e-06, - "loss": 0.8049, - "num_input_tokens_seen": 56058295, - "step": 2639 - }, - { - "epoch": 0.31744123128720014, - "flos": 43475738375640.0, - "grad_norm": 0.7803833923401847, - "learning_rate": 3.194270899114093e-06, - "loss": 0.5874, - "num_input_tokens_seen": 56123810, - "step": 2640 - }, - { - "epoch": 0.31756147417783925, - "flos": 12702904368960.0, - "grad_norm": 2.842036475079726, - "learning_rate": 3.193645963899858e-06, - "loss": 0.8111, - "num_input_tokens_seen": 56141145, - "step": 2641 - }, - { - "epoch": 0.3176817170684783, - "flos": 18670985656920.0, - "grad_norm": 2.2612759645910336, - "learning_rate": 3.193020847610479e-06, - "loss": 0.8159, - "num_input_tokens_seen": 56161185, - "step": 2642 - }, - { - "epoch": 0.3178019599591174, - "flos": 18294484971720.0, - "grad_norm": 3.370390959115152, - "learning_rate": 3.192395550340787e-06, - "loss": 0.6944, - "num_input_tokens_seen": 56178855, - "step": 2643 - }, - { - "epoch": 0.31792220284975653, - "flos": 9006459516240.0, - "grad_norm": 3.044453915892979, - "learning_rate": 3.191770072185638e-06, - "loss": 0.7543, - "num_input_tokens_seen": 56195570, - "step": 2644 - }, - { - "epoch": 0.3180424457403956, - "flos": 11273505257280.0, - "grad_norm": 3.957875168368587, - "learning_rate": 3.191144413239916e-06, - "loss": 0.7077, - "num_input_tokens_seen": 56211860, - "step": 2645 - }, - { - "epoch": 0.3181626886310347, - "flos": 19184076612720.0, - "grad_norm": 2.801180035705978, - "learning_rate": 3.190518573598534e-06, - "loss": 0.8613, - "num_input_tokens_seen": 56228185, - "step": 2646 - }, - { - "epoch": 0.3182829315216738, - "flos": 18672252055320.0, - "grad_norm": 1.8709933387397184, - "learning_rate": 3.1898925533564308e-06, - "loss": 0.7702, - "num_input_tokens_seen": 56249375, - "step": 2647 - }, - { - "epoch": 0.31840317441231286, - "flos": 13218496461600.0, - "grad_norm": 3.197526309432296, - "learning_rate": 3.1892663526085733e-06, - "loss": 0.6293, - "num_input_tokens_seen": 56267470, - "step": 2648 - }, - { - "epoch": 0.31852341730295197, - "flos": 47731615382880.0, - "grad_norm": 0.7606634802006281, - "learning_rate": 3.188639971449956e-06, - "loss": 0.5937, - "num_input_tokens_seen": 56333240, - "step": 2649 - }, - { - "epoch": 0.318643660193591, - "flos": 15109009857480.0, - "grad_norm": 3.4205977612384437, - "learning_rate": 3.1880134099756e-06, - "loss": 0.7135, - "num_input_tokens_seen": 56352595, - "step": 2650 - }, - { - "epoch": 0.31876390308423014, - "flos": 19753038415200.0, - "grad_norm": 2.0911578173707692, - "learning_rate": 3.1873866682805535e-06, - "loss": 0.6922, - "num_input_tokens_seen": 56373010, - "step": 2651 - }, - { - "epoch": 0.31888414597486925, - "flos": 13165538331240.0, - "grad_norm": 2.3684975804128725, - "learning_rate": 3.186759746459894e-06, - "loss": 0.8645, - "num_input_tokens_seen": 56391840, - "step": 2652 - }, - { - "epoch": 0.3190043888655083, - "flos": 18424458310680.0, - "grad_norm": 2.8537241646077174, - "learning_rate": 3.1861326446087246e-06, - "loss": 0.7874, - "num_input_tokens_seen": 56410300, - "step": 2653 - }, - { - "epoch": 0.3191246317561474, - "flos": 16134463590000.0, - "grad_norm": 3.4991815154949233, - "learning_rate": 3.1855053628221763e-06, - "loss": 0.6981, - "num_input_tokens_seen": 56429275, - "step": 2654 - }, - { - "epoch": 0.3192448746467865, - "flos": 10841323685160.0, - "grad_norm": 5.106551670609004, - "learning_rate": 3.184877901195407e-06, - "loss": 0.868, - "num_input_tokens_seen": 56445690, - "step": 2655 - }, - { - "epoch": 0.3193651175374256, - "flos": 49578193737000.0, - "grad_norm": 0.8402851098286303, - "learning_rate": 3.184250259823602e-06, - "loss": 0.6616, - "num_input_tokens_seen": 56507940, - "step": 2656 - }, - { - "epoch": 0.3194853604280647, - "flos": 8865531831120.0, - "grad_norm": 2.8382834407462987, - "learning_rate": 3.183622438801974e-06, - "loss": 0.7889, - "num_input_tokens_seen": 56522950, - "step": 2657 - }, - { - "epoch": 0.3196056033187038, - "flos": 10869749837880.0, - "grad_norm": 3.2353985774674077, - "learning_rate": 3.1829944382257637e-06, - "loss": 0.7471, - "num_input_tokens_seen": 56540800, - "step": 2658 - }, - { - "epoch": 0.31972584620934286, - "flos": 17403690252240.0, - "grad_norm": 2.5923808517224467, - "learning_rate": 3.1823662581902373e-06, - "loss": 0.7998, - "num_input_tokens_seen": 56558205, - "step": 2659 - }, - { - "epoch": 0.31984608909998197, - "flos": 15540716530200.0, - "grad_norm": 2.4182419630264347, - "learning_rate": 3.1817378987906896e-06, - "loss": 0.7198, - "num_input_tokens_seen": 56577430, - "step": 2660 - }, - { - "epoch": 0.3199663319906211, - "flos": 13353598714080.0, - "grad_norm": 2.9122416762368033, - "learning_rate": 3.181109360122442e-06, - "loss": 0.7922, - "num_input_tokens_seen": 56594740, - "step": 2661 - }, - { - "epoch": 0.32008657488126013, - "flos": 13677236248800.0, - "grad_norm": 2.861265797636018, - "learning_rate": 3.1804806422808445e-06, - "loss": 0.7679, - "num_input_tokens_seen": 56611595, - "step": 2662 - }, - { - "epoch": 0.32020681777189924, - "flos": 15055165248240.0, - "grad_norm": 3.466124219514145, - "learning_rate": 3.1798517453612714e-06, - "loss": 0.7087, - "num_input_tokens_seen": 56631120, - "step": 2663 - }, - { - "epoch": 0.32032706066253835, - "flos": 25909116766080.0, - "grad_norm": 2.4845495582531196, - "learning_rate": 3.1792226694591265e-06, - "loss": 0.7422, - "num_input_tokens_seen": 56652225, - "step": 2664 - }, - { - "epoch": 0.3204473035531774, - "flos": 11139289483680.0, - "grad_norm": 2.4189320064543702, - "learning_rate": 3.178593414669841e-06, - "loss": 0.7898, - "num_input_tokens_seen": 56670530, - "step": 2665 - }, - { - "epoch": 0.3205675464438165, - "flos": 17917129467600.0, - "grad_norm": 5.88288966479106, - "learning_rate": 3.1779639810888707e-06, - "loss": 0.6893, - "num_input_tokens_seen": 56689845, - "step": 2666 - }, - { - "epoch": 0.3206877893344556, - "flos": 16432366068600.0, - "grad_norm": 2.7276186933137305, - "learning_rate": 3.1773343688117013e-06, - "loss": 0.7463, - "num_input_tokens_seen": 56710475, - "step": 2667 - }, - { - "epoch": 0.3208080322250947, - "flos": 14919556436400.0, - "grad_norm": 3.0875284850007323, - "learning_rate": 3.1767045779338445e-06, - "loss": 0.8217, - "num_input_tokens_seen": 56727855, - "step": 2668 - }, - { - "epoch": 0.3209282751157338, - "flos": 15918546933720.0, - "grad_norm": 5.609140205250987, - "learning_rate": 3.176074608550839e-06, - "loss": 0.8865, - "num_input_tokens_seen": 56743395, - "step": 2669 - }, - { - "epoch": 0.32104851800637285, - "flos": 16135476708720.0, - "grad_norm": 2.3043308822675455, - "learning_rate": 3.17544446075825e-06, - "loss": 0.8118, - "num_input_tokens_seen": 56762280, - "step": 2670 - }, - { - "epoch": 0.32116876089701196, - "flos": 27204585043800.0, - "grad_norm": 1.7264293084187268, - "learning_rate": 3.174814134651671e-06, - "loss": 0.6994, - "num_input_tokens_seen": 56784550, - "step": 2671 - }, - { - "epoch": 0.3212890037876511, - "flos": 16078972662840.0, - "grad_norm": 2.573976542955056, - "learning_rate": 3.1741836303267215e-06, - "loss": 0.7967, - "num_input_tokens_seen": 56803805, - "step": 2672 - }, - { - "epoch": 0.32140924667829013, - "flos": 7467186648840.0, - "grad_norm": 4.607024029691646, - "learning_rate": 3.1735529478790496e-06, - "loss": 0.7292, - "num_input_tokens_seen": 56821515, - "step": 2673 - }, - { - "epoch": 0.32152948956892924, - "flos": 37406892417720.0, - "grad_norm": 2.579157304721125, - "learning_rate": 3.172922087404328e-06, - "loss": 0.7817, - "num_input_tokens_seen": 56843495, - "step": 2674 - }, - { - "epoch": 0.32164973245956835, - "flos": 47080889377800.0, - "grad_norm": 0.9253902655907155, - "learning_rate": 3.1722910489982586e-06, - "loss": 0.5774, - "num_input_tokens_seen": 56903575, - "step": 2675 - }, - { - "epoch": 0.3217699753502074, - "flos": 17215091650080.0, - "grad_norm": 2.0587602870107435, - "learning_rate": 3.1716598327565694e-06, - "loss": 0.7948, - "num_input_tokens_seen": 56922935, - "step": 2676 - }, - { - "epoch": 0.3218902182408465, - "flos": 13920724238880.0, - "grad_norm": 2.0122638445366197, - "learning_rate": 3.171028438775015e-06, - "loss": 0.8266, - "num_input_tokens_seen": 56941850, - "step": 2677 - }, - { - "epoch": 0.3220104611314856, - "flos": 14892396682080.0, - "grad_norm": 2.782474719649888, - "learning_rate": 3.170396867149377e-06, - "loss": 0.8265, - "num_input_tokens_seen": 56959575, - "step": 2678 - }, - { - "epoch": 0.3221307040221247, - "flos": 14700600423960.0, - "grad_norm": 2.191612954684209, - "learning_rate": 3.1697651179754653e-06, - "loss": 0.8362, - "num_input_tokens_seen": 56977955, - "step": 2679 - }, - { - "epoch": 0.3222509469127638, - "flos": 17565698979360.0, - "grad_norm": 3.5867619782113067, - "learning_rate": 3.1691331913491153e-06, - "loss": 0.7292, - "num_input_tokens_seen": 57000245, - "step": 2680 - }, - { - "epoch": 0.32237118980340285, - "flos": 12894352367520.0, - "grad_norm": 2.9367467036113064, - "learning_rate": 3.1685010873661898e-06, - "loss": 0.8278, - "num_input_tokens_seen": 57019140, - "step": 2681 - }, - { - "epoch": 0.32249143269404196, - "flos": 16944095646120.0, - "grad_norm": 2.2275519063362728, - "learning_rate": 3.167868806122578e-06, - "loss": 0.777, - "num_input_tokens_seen": 57037910, - "step": 2682 - }, - { - "epoch": 0.32261167558468107, - "flos": 17886803717280.0, - "grad_norm": 2.6217515918664653, - "learning_rate": 3.1672363477141968e-06, - "loss": 0.656, - "num_input_tokens_seen": 57056925, - "step": 2683 - }, - { - "epoch": 0.3227319184753201, - "flos": 22286077886520.0, - "grad_norm": 2.8632491085231297, - "learning_rate": 3.1666037122369903e-06, - "loss": 0.8254, - "num_input_tokens_seen": 57077305, - "step": 2684 - }, - { - "epoch": 0.32285216136595923, - "flos": 12354449916960.0, - "grad_norm": 3.6319988693201934, - "learning_rate": 3.165970899786928e-06, - "loss": 0.8396, - "num_input_tokens_seen": 57094940, - "step": 2685 - }, - { - "epoch": 0.32297240425659834, - "flos": 16080998900280.0, - "grad_norm": 2.7490532719295815, - "learning_rate": 3.1653379104600067e-06, - "loss": 0.7184, - "num_input_tokens_seen": 57114805, - "step": 2686 - }, - { - "epoch": 0.3230926471472374, - "flos": 16648314384840.0, - "grad_norm": 2.240629392483927, - "learning_rate": 3.164704744352251e-06, - "loss": 0.6796, - "num_input_tokens_seen": 57135330, - "step": 2687 - }, - { - "epoch": 0.3232128900378765, - "flos": 12351632180520.0, - "grad_norm": 2.554272445886249, - "learning_rate": 3.164071401559713e-06, - "loss": 0.794, - "num_input_tokens_seen": 57152450, - "step": 2688 - }, - { - "epoch": 0.3233331329285156, - "flos": 17592225534480.0, - "grad_norm": 4.156171296027023, - "learning_rate": 3.1634378821784674e-06, - "loss": 0.6953, - "num_input_tokens_seen": 57172385, - "step": 2689 - }, - { - "epoch": 0.3234533758191547, - "flos": 13218623101440.0, - "grad_norm": 2.816570219010082, - "learning_rate": 3.1628041863046208e-06, - "loss": 0.717, - "num_input_tokens_seen": 57189520, - "step": 2690 - }, - { - "epoch": 0.3235736187097938, - "flos": 12354418257000.0, - "grad_norm": 3.9053307023730346, - "learning_rate": 3.162170314034304e-06, - "loss": 0.8999, - "num_input_tokens_seen": 57206655, - "step": 2691 - }, - { - "epoch": 0.3236938616004329, - "flos": 16188719778720.0, - "grad_norm": 2.0505942995144535, - "learning_rate": 3.1615362654636738e-06, - "loss": 0.7896, - "num_input_tokens_seen": 57227115, - "step": 2692 - }, - { - "epoch": 0.32381410449107195, - "flos": 12516047064600.0, - "grad_norm": 2.231374259501982, - "learning_rate": 3.1609020406889163e-06, - "loss": 0.864, - "num_input_tokens_seen": 57244270, - "step": 2693 - }, - { - "epoch": 0.32393434738171106, - "flos": 12081617635320.0, - "grad_norm": 3.005341225544615, - "learning_rate": 3.1602676398062416e-06, - "loss": 0.8352, - "num_input_tokens_seen": 57262900, - "step": 2694 - }, - { - "epoch": 0.3240545902723502, - "flos": 18672695294760.0, - "grad_norm": 3.4750029419811064, - "learning_rate": 3.1596330629118886e-06, - "loss": 0.5985, - "num_input_tokens_seen": 57282590, - "step": 2695 - }, - { - "epoch": 0.32417483316298923, - "flos": 26341488297960.0, - "grad_norm": 2.3905394295730122, - "learning_rate": 3.1589983101021223e-06, - "loss": 0.7188, - "num_input_tokens_seen": 57300940, - "step": 2696 - }, - { - "epoch": 0.32429507605362834, - "flos": 22074656944560.0, - "grad_norm": 10.440659461696717, - "learning_rate": 3.1583633814732337e-06, - "loss": 0.8155, - "num_input_tokens_seen": 57320180, - "step": 2697 - }, - { - "epoch": 0.3244153189442674, - "flos": 13299595805040.0, - "grad_norm": 3.456216248387134, - "learning_rate": 3.157728277121541e-06, - "loss": 0.6913, - "num_input_tokens_seen": 57338075, - "step": 2698 - }, - { - "epoch": 0.3245355618349065, - "flos": 12919675844160.0, - "grad_norm": 3.4931637894883876, - "learning_rate": 3.1570929971433897e-06, - "loss": 0.7648, - "num_input_tokens_seen": 57353580, - "step": 2699 - }, - { - "epoch": 0.3246558047255456, - "flos": 17160930441240.0, - "grad_norm": 2.7930328575192904, - "learning_rate": 3.1564575416351504e-06, - "loss": 0.8104, - "num_input_tokens_seen": 57372000, - "step": 2700 - }, - { - "epoch": 0.32477604761618467, - "flos": 15917438835120.0, - "grad_norm": 2.610406089410159, - "learning_rate": 3.155821910693221e-06, - "loss": 0.7215, - "num_input_tokens_seen": 57391135, - "step": 2701 - }, - { - "epoch": 0.3248962905068238, - "flos": 14487406524240.0, - "grad_norm": 3.348076846253743, - "learning_rate": 3.1551861044140275e-06, - "loss": 0.836, - "num_input_tokens_seen": 57410490, - "step": 2702 - }, - { - "epoch": 0.3250165333974629, - "flos": 17535974768280.0, - "grad_norm": 2.3780602264605974, - "learning_rate": 3.15455012289402e-06, - "loss": 0.7507, - "num_input_tokens_seen": 57429855, - "step": 2703 - }, - { - "epoch": 0.32513677628810195, - "flos": 17567060357640.0, - "grad_norm": 2.0993048016735423, - "learning_rate": 3.153913966229677e-06, - "loss": 0.8233, - "num_input_tokens_seen": 57448695, - "step": 2704 - }, - { - "epoch": 0.32525701917874106, - "flos": 52108700411520.0, - "grad_norm": 0.6430979426246516, - "learning_rate": 3.1532776345175027e-06, - "loss": 0.5082, - "num_input_tokens_seen": 57513560, - "step": 2705 - }, - { - "epoch": 0.32537726206938017, - "flos": 14379622325880.0, - "grad_norm": 2.4993024806657678, - "learning_rate": 3.1526411278540285e-06, - "loss": 0.7624, - "num_input_tokens_seen": 57531710, - "step": 2706 - }, - { - "epoch": 0.3254975049600192, - "flos": 21097285708560.0, - "grad_norm": 2.4229348109564426, - "learning_rate": 3.1520044463358116e-06, - "loss": 0.7943, - "num_input_tokens_seen": 57548160, - "step": 2707 - }, - { - "epoch": 0.32561774785065833, - "flos": 13783310809320.0, - "grad_norm": 1.9745605945693276, - "learning_rate": 3.151367590059436e-06, - "loss": 0.783, - "num_input_tokens_seen": 57566305, - "step": 2708 - }, - { - "epoch": 0.32573799074129745, - "flos": 16917632410920.0, - "grad_norm": 2.1477488020808204, - "learning_rate": 3.1507305591215117e-06, - "loss": 0.8481, - "num_input_tokens_seen": 57583935, - "step": 2709 - }, - { - "epoch": 0.3258582336319365, - "flos": 52539045705960.0, - "grad_norm": 0.7482013194355245, - "learning_rate": 3.150093353618677e-06, - "loss": 0.5762, - "num_input_tokens_seen": 57648385, - "step": 2710 - }, - { - "epoch": 0.3259784765225756, - "flos": 16432176108840.0, - "grad_norm": 2.9158221702510994, - "learning_rate": 3.149455973647596e-06, - "loss": 0.8566, - "num_input_tokens_seen": 57666165, - "step": 2711 - }, - { - "epoch": 0.32609871941321467, - "flos": 14973211085880.0, - "grad_norm": 2.5733493847749913, - "learning_rate": 3.1488184193049563e-06, - "loss": 0.7438, - "num_input_tokens_seen": 57685420, - "step": 2712 - }, - { - "epoch": 0.3262189623038538, - "flos": 16402768497360.0, - "grad_norm": 2.0099172284748006, - "learning_rate": 3.1481806906874767e-06, - "loss": 0.7104, - "num_input_tokens_seen": 57706450, - "step": 2713 - }, - { - "epoch": 0.3263392051944929, - "flos": 15297988379160.0, - "grad_norm": 2.0563458034842, - "learning_rate": 3.147542787891899e-06, - "loss": 0.8641, - "num_input_tokens_seen": 57725515, - "step": 2714 - }, - { - "epoch": 0.32645944808513194, - "flos": 17593491932880.0, - "grad_norm": 2.7183920964528596, - "learning_rate": 3.1469047110149926e-06, - "loss": 0.7252, - "num_input_tokens_seen": 57743975, - "step": 2715 - }, - { - "epoch": 0.32657969097577105, - "flos": 15378517843320.0, - "grad_norm": 3.2310533651881777, - "learning_rate": 3.146266460153554e-06, - "loss": 0.8354, - "num_input_tokens_seen": 57763405, - "step": 2716 - }, - { - "epoch": 0.32669993386641016, - "flos": 16620173171760.0, - "grad_norm": 2.72357416209485, - "learning_rate": 3.145628035404404e-06, - "loss": 0.786, - "num_input_tokens_seen": 57782325, - "step": 2717 - }, - { - "epoch": 0.3268201767570492, - "flos": 53180400362880.0, - "grad_norm": 0.8584693992195426, - "learning_rate": 3.1449894368643922e-06, - "loss": 0.5935, - "num_input_tokens_seen": 57844360, - "step": 2718 - }, - { - "epoch": 0.32694041964768833, - "flos": 17970815777040.0, - "grad_norm": 2.02791756390542, - "learning_rate": 3.1443506646303934e-06, - "loss": 0.7076, - "num_input_tokens_seen": 57865380, - "step": 2719 - }, - { - "epoch": 0.32706066253832744, - "flos": 24370888677360.0, - "grad_norm": 2.594753025481352, - "learning_rate": 3.1437117187993086e-06, - "loss": 0.6563, - "num_input_tokens_seen": 57887420, - "step": 2720 - }, - { - "epoch": 0.3271809054289665, - "flos": 17620461727440.0, - "grad_norm": 1.9789795101082333, - "learning_rate": 3.143072599468065e-06, - "loss": 0.7866, - "num_input_tokens_seen": 57906965, - "step": 2721 - }, - { - "epoch": 0.3273011483196056, - "flos": 28123489316400.0, - "grad_norm": 1.6349369082395657, - "learning_rate": 3.1424333067336174e-06, - "loss": 0.7436, - "num_input_tokens_seen": 57929450, - "step": 2722 - }, - { - "epoch": 0.3274213912102447, - "flos": 21314627063040.0, - "grad_norm": 2.0943443251924503, - "learning_rate": 3.141793840692945e-06, - "loss": 0.7571, - "num_input_tokens_seen": 57949920, - "step": 2723 - }, - { - "epoch": 0.32754163410088377, - "flos": 21373822205520.0, - "grad_norm": 3.477128229295179, - "learning_rate": 3.1411542014430553e-06, - "loss": 0.5903, - "num_input_tokens_seen": 57970720, - "step": 2724 - }, - { - "epoch": 0.3276618769915229, - "flos": 15081501843600.0, - "grad_norm": 1.9958887760157342, - "learning_rate": 3.1405143890809804e-06, - "loss": 0.8053, - "num_input_tokens_seen": 57989735, - "step": 2725 - }, - { - "epoch": 0.327782119882162, - "flos": 13620383943360.0, - "grad_norm": 2.1691304161783673, - "learning_rate": 3.1398744037037796e-06, - "loss": 0.6909, - "num_input_tokens_seen": 58008790, - "step": 2726 - }, - { - "epoch": 0.32790236277280105, - "flos": 15943078911360.0, - "grad_norm": 2.277115196942769, - "learning_rate": 3.139234245408538e-06, - "loss": 0.8227, - "num_input_tokens_seen": 58027390, - "step": 2727 - }, - { - "epoch": 0.32802260566344016, - "flos": 17079989397600.0, - "grad_norm": 1.8133591555879218, - "learning_rate": 3.1385939142923666e-06, - "loss": 0.7492, - "num_input_tokens_seen": 58049500, - "step": 2728 - }, - { - "epoch": 0.3281428485540792, - "flos": 17727960986160.0, - "grad_norm": 2.810994911137673, - "learning_rate": 3.137953410452405e-06, - "loss": 0.76, - "num_input_tokens_seen": 58069490, - "step": 2729 - }, - { - "epoch": 0.3282630914447183, - "flos": 25070583657840.0, - "grad_norm": 2.2409963600643206, - "learning_rate": 3.1373127339858146e-06, - "loss": 0.7312, - "num_input_tokens_seen": 58091810, - "step": 2730 - }, - { - "epoch": 0.32838333433535744, - "flos": 20239634475840.0, - "grad_norm": 2.7116445009732013, - "learning_rate": 3.136671884989787e-06, - "loss": 0.7183, - "num_input_tokens_seen": 58111440, - "step": 2731 - }, - { - "epoch": 0.3285035772259965, - "flos": 8952678226920.0, - "grad_norm": 3.991552134379217, - "learning_rate": 3.1360308635615383e-06, - "loss": 0.8441, - "num_input_tokens_seen": 58129700, - "step": 2732 - }, - { - "epoch": 0.3286238201166356, - "flos": 17808648750120.0, - "grad_norm": 2.2659267882247933, - "learning_rate": 3.135389669798311e-06, - "loss": 0.773, - "num_input_tokens_seen": 58147480, - "step": 2733 - }, - { - "epoch": 0.3287440630072747, - "flos": 15644606553480.0, - "grad_norm": 3.1146272552012984, - "learning_rate": 3.134748303797373e-06, - "loss": 0.7891, - "num_input_tokens_seen": 58164570, - "step": 2734 - }, - { - "epoch": 0.32886430589791377, - "flos": 17376815437560.0, - "grad_norm": 3.294425162619362, - "learning_rate": 3.1341067656560203e-06, - "loss": 0.7939, - "num_input_tokens_seen": 58182135, - "step": 2735 - }, - { - "epoch": 0.3289845487885529, - "flos": 16404446475240.0, - "grad_norm": 3.1127402339283012, - "learning_rate": 3.133465055471572e-06, - "loss": 0.833, - "num_input_tokens_seen": 58201640, - "step": 2736 - }, - { - "epoch": 0.329104791679192, - "flos": 14379590665920.0, - "grad_norm": 2.816291796174745, - "learning_rate": 3.1328231733413767e-06, - "loss": 0.6405, - "num_input_tokens_seen": 58218000, - "step": 2737 - }, - { - "epoch": 0.32922503456983104, - "flos": 11652127159800.0, - "grad_norm": 3.0969215859182118, - "learning_rate": 3.1321811193628067e-06, - "loss": 0.8839, - "num_input_tokens_seen": 58235865, - "step": 2738 - }, - { - "epoch": 0.32934527746047015, - "flos": 19672192351440.0, - "grad_norm": 3.2214671397566264, - "learning_rate": 3.131538893633261e-06, - "loss": 0.6841, - "num_input_tokens_seen": 58255145, - "step": 2739 - }, - { - "epoch": 0.32946552035110926, - "flos": 17133454087320.0, - "grad_norm": 4.7980347265884635, - "learning_rate": 3.130896496250165e-06, - "loss": 0.758, - "num_input_tokens_seen": 58274690, - "step": 2740 - }, - { - "epoch": 0.3295857632417483, - "flos": 10301611194360.0, - "grad_norm": 3.2332146816178478, - "learning_rate": 3.1302539273109693e-06, - "loss": 0.8422, - "num_input_tokens_seen": 58291235, - "step": 2741 - }, - { - "epoch": 0.32970600613238743, - "flos": 16239714991560.0, - "grad_norm": 4.460638620816259, - "learning_rate": 3.1296111869131513e-06, - "loss": 0.7957, - "num_input_tokens_seen": 58308380, - "step": 2742 - }, - { - "epoch": 0.32982624902302654, - "flos": 16134368610120.0, - "grad_norm": 2.4769820089660923, - "learning_rate": 3.1289682751542153e-06, - "loss": 0.8351, - "num_input_tokens_seen": 58327660, - "step": 2743 - }, - { - "epoch": 0.3299464919136656, - "flos": 13624088158680.0, - "grad_norm": 2.7869458969279552, - "learning_rate": 3.1283251921316883e-06, - "loss": 0.7014, - "num_input_tokens_seen": 58345125, - "step": 2744 - }, - { - "epoch": 0.3300667348043047, - "flos": 9735530448240.0, - "grad_norm": 2.8607812977373657, - "learning_rate": 3.1276819379431277e-06, - "loss": 0.8051, - "num_input_tokens_seen": 58362935, - "step": 2745 - }, - { - "epoch": 0.33018697769494376, - "flos": 11463750177360.0, - "grad_norm": 2.9903523766315203, - "learning_rate": 3.1270385126861134e-06, - "loss": 0.7328, - "num_input_tokens_seen": 58380640, - "step": 2746 - }, - { - "epoch": 0.3303072205855829, - "flos": 13325584140840.0, - "grad_norm": 3.15493278589004, - "learning_rate": 3.1263949164582533e-06, - "loss": 0.8039, - "num_input_tokens_seen": 58400010, - "step": 2747 - }, - { - "epoch": 0.330427463476222, - "flos": 12948988475760.0, - "grad_norm": 2.932908710899425, - "learning_rate": 3.1257511493571797e-06, - "loss": 0.7519, - "num_input_tokens_seen": 58418235, - "step": 2748 - }, - { - "epoch": 0.33054770636686104, - "flos": 19915332081960.0, - "grad_norm": 4.58167511969867, - "learning_rate": 3.125107211480552e-06, - "loss": 0.7721, - "num_input_tokens_seen": 58437890, - "step": 2749 - }, - { - "epoch": 0.33066794925750015, - "flos": 14700758723760.0, - "grad_norm": 2.051981256983116, - "learning_rate": 3.124463102926054e-06, - "loss": 0.775, - "num_input_tokens_seen": 58456945, - "step": 2750 - }, - { - "epoch": 0.33078819214813926, - "flos": 52097081206200.0, - "grad_norm": 0.7583136720169608, - "learning_rate": 3.1238188237913984e-06, - "loss": 0.62, - "num_input_tokens_seen": 58521205, - "step": 2751 - }, - { - "epoch": 0.3309084350387783, - "flos": 15459585526800.0, - "grad_norm": 4.302635742386873, - "learning_rate": 3.1231743741743202e-06, - "loss": 0.7406, - "num_input_tokens_seen": 58540430, - "step": 2752 - }, - { - "epoch": 0.3310286779294174, - "flos": 10328960908440.0, - "grad_norm": 2.489794595817899, - "learning_rate": 3.122529754172582e-06, - "loss": 0.81, - "num_input_tokens_seen": 58557035, - "step": 2753 - }, - { - "epoch": 0.33114892082005654, - "flos": 15190489120440.0, - "grad_norm": 2.702310163692295, - "learning_rate": 3.1218849638839736e-06, - "loss": 0.7105, - "num_input_tokens_seen": 58576015, - "step": 2754 - }, - { - "epoch": 0.3312691637106956, - "flos": 12461252656560.0, - "grad_norm": 2.074805340196287, - "learning_rate": 3.121240003406307e-06, - "loss": 0.7665, - "num_input_tokens_seen": 58594585, - "step": 2755 - }, - { - "epoch": 0.3313894066013347, - "flos": 21612086302200.0, - "grad_norm": 4.440321978673618, - "learning_rate": 3.120594872837425e-06, - "loss": 0.7041, - "num_input_tokens_seen": 58612975, - "step": 2756 - }, - { - "epoch": 0.3315096494919738, - "flos": 45272235164400.0, - "grad_norm": 0.8374637681326133, - "learning_rate": 3.1199495722751906e-06, - "loss": 0.6407, - "num_input_tokens_seen": 58672225, - "step": 2757 - }, - { - "epoch": 0.33162989238261287, - "flos": 15837890829720.0, - "grad_norm": 4.923326260185628, - "learning_rate": 3.1193041018174972e-06, - "loss": 0.8308, - "num_input_tokens_seen": 58692660, - "step": 2758 - }, - { - "epoch": 0.331750135273252, - "flos": 16594058196120.0, - "grad_norm": 4.380733330010741, - "learning_rate": 3.118658461562261e-06, - "loss": 0.9244, - "num_input_tokens_seen": 58708480, - "step": 2759 - }, - { - "epoch": 0.33187037816389103, - "flos": 16647301266120.0, - "grad_norm": 1.5392393213929856, - "learning_rate": 3.118012651607426e-06, - "loss": 0.8356, - "num_input_tokens_seen": 58729805, - "step": 2760 - }, - { - "epoch": 0.33199062105453014, - "flos": 14024582602200.0, - "grad_norm": 3.396169322111771, - "learning_rate": 3.1173666720509603e-06, - "loss": 0.8181, - "num_input_tokens_seen": 58746460, - "step": 2761 - }, - { - "epoch": 0.33211086394516925, - "flos": 23182254799200.0, - "grad_norm": 2.1887707969945422, - "learning_rate": 3.116720522990859e-06, - "loss": 0.6662, - "num_input_tokens_seen": 58767055, - "step": 2762 - }, - { - "epoch": 0.3322311068358083, - "flos": 13084565627640.0, - "grad_norm": 2.8976674663607396, - "learning_rate": 3.116074204525142e-06, - "loss": 0.6026, - "num_input_tokens_seen": 58784950, - "step": 2763 - }, - { - "epoch": 0.3323513497264474, - "flos": 23694934175520.0, - "grad_norm": 1.6909992530049591, - "learning_rate": 3.1154277167518553e-06, - "loss": 0.813, - "num_input_tokens_seen": 58806285, - "step": 2764 - }, - { - "epoch": 0.33247159261708653, - "flos": 38797078313040.0, - "grad_norm": 0.8488128084924136, - "learning_rate": 3.114781059769072e-06, - "loss": 0.6186, - "num_input_tokens_seen": 58857330, - "step": 2765 - }, - { - "epoch": 0.3325918355077256, - "flos": 19887570788400.0, - "grad_norm": 3.6137437679061537, - "learning_rate": 3.1141342336748874e-06, - "loss": 0.6625, - "num_input_tokens_seen": 58876610, - "step": 2766 - }, - { - "epoch": 0.3327120783983647, - "flos": 17327086623120.0, - "grad_norm": 1.8524739400289354, - "learning_rate": 3.1134872385674253e-06, - "loss": 0.8004, - "num_input_tokens_seen": 58900485, - "step": 2767 - }, - { - "epoch": 0.3328323212890038, - "flos": 14001190383120.0, - "grad_norm": 2.3715248784323215, - "learning_rate": 3.1128400745448353e-06, - "loss": 0.8505, - "num_input_tokens_seen": 58919585, - "step": 2768 - }, - { - "epoch": 0.33295256417964286, - "flos": 27718340858760.0, - "grad_norm": 2.5380424445881524, - "learning_rate": 3.11219274170529e-06, - "loss": 0.6164, - "num_input_tokens_seen": 58941115, - "step": 2769 - }, - { - "epoch": 0.333072807070282, - "flos": 19429432540440.0, - "grad_norm": 3.373364935514564, - "learning_rate": 3.1115452401469903e-06, - "loss": 0.8039, - "num_input_tokens_seen": 58961235, - "step": 2770 - }, - { - "epoch": 0.3331930499609211, - "flos": 15673032706200.0, - "grad_norm": 1.871321630438367, - "learning_rate": 3.1108975699681613e-06, - "loss": 0.8519, - "num_input_tokens_seen": 58978350, - "step": 2771 - }, - { - "epoch": 0.33331329285156014, - "flos": 15101791386600.0, - "grad_norm": 1.8450890543878433, - "learning_rate": 3.1102497312670542e-06, - "loss": 0.7021, - "num_input_tokens_seen": 58996075, - "step": 2772 - }, - { - "epoch": 0.33343353574219925, - "flos": 20536238896080.0, - "grad_norm": 2.7741890762018326, - "learning_rate": 3.109601724141946e-06, - "loss": 0.7783, - "num_input_tokens_seen": 59014790, - "step": 2773 - }, - { - "epoch": 0.33355377863283836, - "flos": 17400239316600.0, - "grad_norm": 2.061355504469839, - "learning_rate": 3.108953548691138e-06, - "loss": 0.678, - "num_input_tokens_seen": 59034595, - "step": 2774 - }, - { - "epoch": 0.3336740215234774, - "flos": 27772660367400.0, - "grad_norm": 3.759538091735208, - "learning_rate": 3.108305205012959e-06, - "loss": 0.7001, - "num_input_tokens_seen": 59055010, - "step": 2775 - }, - { - "epoch": 0.3337942644141165, - "flos": 18698778610440.0, - "grad_norm": 2.5200933731279576, - "learning_rate": 3.107656693205761e-06, - "loss": 0.8689, - "num_input_tokens_seen": 59074170, - "step": 2776 - }, - { - "epoch": 0.3339145073047556, - "flos": 19046694843120.0, - "grad_norm": 2.602406774311413, - "learning_rate": 3.107008013367924e-06, - "loss": 0.6723, - "num_input_tokens_seen": 59092685, - "step": 2777 - }, - { - "epoch": 0.3340347501953947, - "flos": 13920755898840.0, - "grad_norm": 4.906168748705194, - "learning_rate": 3.1063591655978507e-06, - "loss": 0.8421, - "num_input_tokens_seen": 59108355, - "step": 2778 - }, - { - "epoch": 0.3341549930860338, - "flos": 13214982206040.0, - "grad_norm": 3.1333342234292356, - "learning_rate": 3.105710149993972e-06, - "loss": 0.7747, - "num_input_tokens_seen": 59127405, - "step": 2779 - }, - { - "epoch": 0.33427523597667286, - "flos": 16594216495920.0, - "grad_norm": 2.5959018834973553, - "learning_rate": 3.1050609666547427e-06, - "loss": 0.8359, - "num_input_tokens_seen": 59146685, - "step": 2780 - }, - { - "epoch": 0.33439547886731197, - "flos": 16567088401560.0, - "grad_norm": 2.3562494252657, - "learning_rate": 3.104411615678644e-06, - "loss": 0.7623, - "num_input_tokens_seen": 59165255, - "step": 2781 - }, - { - "epoch": 0.3345157217579511, - "flos": 17645943503880.0, - "grad_norm": 3.889336430062452, - "learning_rate": 3.1037620971641803e-06, - "loss": 0.7119, - "num_input_tokens_seen": 59184765, - "step": 2782 - }, - { - "epoch": 0.33463596464859013, - "flos": 13866658009920.0, - "grad_norm": 4.400256686959616, - "learning_rate": 3.1031124112098844e-06, - "loss": 0.6313, - "num_input_tokens_seen": 59202695, - "step": 2783 - }, - { - "epoch": 0.33475620753922924, - "flos": 14892428342040.0, - "grad_norm": 2.402334619520251, - "learning_rate": 3.1024625579143127e-06, - "loss": 0.7068, - "num_input_tokens_seen": 59219935, - "step": 2784 - }, - { - "epoch": 0.33487645042986836, - "flos": 13268890135200.0, - "grad_norm": 2.5258036288288586, - "learning_rate": 3.101812537376048e-06, - "loss": 0.711, - "num_input_tokens_seen": 59238675, - "step": 2785 - }, - { - "epoch": 0.3349966933205074, - "flos": 18943026439560.0, - "grad_norm": 2.2000343883601094, - "learning_rate": 3.1011623496936973e-06, - "loss": 0.8198, - "num_input_tokens_seen": 59256690, - "step": 2786 - }, - { - "epoch": 0.3351169362111465, - "flos": 20617591519200.0, - "grad_norm": 2.474861788503023, - "learning_rate": 3.100511994965893e-06, - "loss": 0.6781, - "num_input_tokens_seen": 59276365, - "step": 2787 - }, - { - "epoch": 0.33523717910178563, - "flos": 16592760137760.0, - "grad_norm": 1.9021338752418693, - "learning_rate": 3.0998614732912947e-06, - "loss": 0.8447, - "num_input_tokens_seen": 59295460, - "step": 2788 - }, - { - "epoch": 0.3353574219924247, - "flos": 11409937228080.0, - "grad_norm": 3.1033547553590877, - "learning_rate": 3.0992107847685855e-06, - "loss": 0.6563, - "num_input_tokens_seen": 59312895, - "step": 2789 - }, - { - "epoch": 0.3354776648830638, - "flos": 18159889278600.0, - "grad_norm": 2.9691279120587755, - "learning_rate": 3.0985599294964736e-06, - "loss": 0.7805, - "num_input_tokens_seen": 59332170, - "step": 2790 - }, - { - "epoch": 0.33559790777370285, - "flos": 21049298191920.0, - "grad_norm": 3.2184092517223886, - "learning_rate": 3.097908907573695e-06, - "loss": 0.6953, - "num_input_tokens_seen": 59349870, - "step": 2791 - }, - { - "epoch": 0.33571815066434196, - "flos": 16268995963200.0, - "grad_norm": 4.428296498145566, - "learning_rate": 3.0972577190990067e-06, - "loss": 0.8783, - "num_input_tokens_seen": 59368070, - "step": 2792 - }, - { - "epoch": 0.3358383935549811, - "flos": 17458073080800.0, - "grad_norm": 2.7721360282604617, - "learning_rate": 3.096606364171196e-06, - "loss": 0.7849, - "num_input_tokens_seen": 59387580, - "step": 2793 - }, - { - "epoch": 0.33595863644562013, - "flos": 16291628343240.0, - "grad_norm": 3.1560795804084365, - "learning_rate": 3.0959548428890703e-06, - "loss": 0.8308, - "num_input_tokens_seen": 59406170, - "step": 2794 - }, - { - "epoch": 0.33607887933625924, - "flos": 14703038240880.0, - "grad_norm": 2.7110090878403614, - "learning_rate": 3.095303155351468e-06, - "loss": 0.8179, - "num_input_tokens_seen": 59426095, - "step": 2795 - }, - { - "epoch": 0.33619912222689835, - "flos": 14109892720320.0, - "grad_norm": 3.2022571411482037, - "learning_rate": 3.0946513016572464e-06, - "loss": 0.7677, - "num_input_tokens_seen": 59444720, - "step": 2796 - }, - { - "epoch": 0.3363193651175374, - "flos": 12246824018400.0, - "grad_norm": 2.694948033555462, - "learning_rate": 3.0939992819052938e-06, - "loss": 0.7461, - "num_input_tokens_seen": 59461950, - "step": 2797 - }, - { - "epoch": 0.3364396080081765, - "flos": 17241966464760.0, - "grad_norm": 2.360719209446996, - "learning_rate": 3.0933470961945193e-06, - "loss": 0.7918, - "num_input_tokens_seen": 59479965, - "step": 2798 - }, - { - "epoch": 0.3365598508988156, - "flos": 20563082050800.0, - "grad_norm": 2.4859422262879334, - "learning_rate": 3.0926947446238597e-06, - "loss": 0.6727, - "num_input_tokens_seen": 59499255, - "step": 2799 - }, - { - "epoch": 0.3366800937894547, - "flos": 12381546351360.0, - "grad_norm": 2.743635876482772, - "learning_rate": 3.092042227292276e-06, - "loss": 0.8088, - "num_input_tokens_seen": 59515810, - "step": 2800 - }, - { - "epoch": 0.3368003366800938, - "flos": 17512139309760.0, - "grad_norm": 1.9323821616424721, - "learning_rate": 3.0913895442987557e-06, - "loss": 0.8676, - "num_input_tokens_seen": 59536495, - "step": 2801 - }, - { - "epoch": 0.3369205795707329, - "flos": 18160395837960.0, - "grad_norm": 1.9152441620114702, - "learning_rate": 3.090736695742308e-06, - "loss": 0.8392, - "num_input_tokens_seen": 59557345, - "step": 2802 - }, - { - "epoch": 0.33704082246137196, - "flos": 13083330889200.0, - "grad_norm": 3.65179876657619, - "learning_rate": 3.0900836817219713e-06, - "loss": 0.489, - "num_input_tokens_seen": 59573495, - "step": 2803 - }, - { - "epoch": 0.33716106535201107, - "flos": 15566388266400.0, - "grad_norm": 2.2180627107620947, - "learning_rate": 3.089430502336807e-06, - "loss": 0.8269, - "num_input_tokens_seen": 59593185, - "step": 2804 - }, - { - "epoch": 0.3372813082426502, - "flos": 13432196920680.0, - "grad_norm": 3.916263776037178, - "learning_rate": 3.088777157685902e-06, - "loss": 0.88, - "num_input_tokens_seen": 59608495, - "step": 2805 - }, - { - "epoch": 0.33740155113328923, - "flos": 12543301798800.0, - "grad_norm": 2.423588497295618, - "learning_rate": 3.088123647868367e-06, - "loss": 0.8416, - "num_input_tokens_seen": 59624765, - "step": 2806 - }, - { - "epoch": 0.33752179402392835, - "flos": 21318647877960.0, - "grad_norm": 3.1775915187618784, - "learning_rate": 3.0874699729833405e-06, - "loss": 0.7968, - "num_input_tokens_seen": 59645855, - "step": 2807 - }, - { - "epoch": 0.3376420369145674, - "flos": 18374317916760.0, - "grad_norm": 2.608517625857019, - "learning_rate": 3.086816133129983e-06, - "loss": 0.7856, - "num_input_tokens_seen": 59665835, - "step": 2808 - }, - { - "epoch": 0.3377622798052065, - "flos": 20158060233000.0, - "grad_norm": 2.3427184823171, - "learning_rate": 3.0861621284074826e-06, - "loss": 0.7535, - "num_input_tokens_seen": 59686080, - "step": 2809 - }, - { - "epoch": 0.3378825226958456, - "flos": 16075426747320.0, - "grad_norm": 2.3712888131511893, - "learning_rate": 3.085507958915051e-06, - "loss": 0.7139, - "num_input_tokens_seen": 59704230, - "step": 2810 - }, - { - "epoch": 0.3380027655864847, - "flos": 31283229374520.0, - "grad_norm": 2.3669193732869553, - "learning_rate": 3.084853624751925e-06, - "loss": 0.6931, - "num_input_tokens_seen": 59725535, - "step": 2811 - }, - { - "epoch": 0.3381230084771238, - "flos": 19591599567360.0, - "grad_norm": 3.1773923796549823, - "learning_rate": 3.0841991260173668e-06, - "loss": 0.831, - "num_input_tokens_seen": 59745160, - "step": 2812 - }, - { - "epoch": 0.3382432513677629, - "flos": 16620268151640.0, - "grad_norm": 2.840641712437516, - "learning_rate": 3.0835444628106634e-06, - "loss": 0.7849, - "num_input_tokens_seen": 59763860, - "step": 2813 - }, - { - "epoch": 0.33836349425840195, - "flos": 16184540664000.0, - "grad_norm": 2.375302446080849, - "learning_rate": 3.082889635231126e-06, - "loss": 0.8298, - "num_input_tokens_seen": 59782240, - "step": 2814 - }, - { - "epoch": 0.33848373714904106, - "flos": 20022957980520.0, - "grad_norm": 3.6272887535288736, - "learning_rate": 3.0822346433780925e-06, - "loss": 0.7289, - "num_input_tokens_seen": 59802685, - "step": 2815 - }, - { - "epoch": 0.3386039800396802, - "flos": 18942931459680.0, - "grad_norm": 2.0595118522034426, - "learning_rate": 3.0815794873509237e-06, - "loss": 0.8292, - "num_input_tokens_seen": 59820690, - "step": 2816 - }, - { - "epoch": 0.33872422293031923, - "flos": 13786191865680.0, - "grad_norm": 3.236261969139388, - "learning_rate": 3.0809241672490066e-06, - "loss": 0.7088, - "num_input_tokens_seen": 59838580, - "step": 2817 - }, - { - "epoch": 0.33884446582095834, - "flos": 16943209167240.0, - "grad_norm": 2.4581498115270852, - "learning_rate": 3.080268683171753e-06, - "loss": 0.8332, - "num_input_tokens_seen": 59858590, - "step": 2818 - }, - { - "epoch": 0.33896470871159745, - "flos": 11652222139680.0, - "grad_norm": 3.4339418896583402, - "learning_rate": 3.0796130352185985e-06, - "loss": 0.8663, - "num_input_tokens_seen": 59875165, - "step": 2819 - }, - { - "epoch": 0.3390849516022365, - "flos": 25342307840880.0, - "grad_norm": 2.185841503857627, - "learning_rate": 3.0789572234890057e-06, - "loss": 0.665, - "num_input_tokens_seen": 59896525, - "step": 2820 - }, - { - "epoch": 0.3392051944928756, - "flos": 11787704311680.0, - "grad_norm": 2.22037382208955, - "learning_rate": 3.0783012480824596e-06, - "loss": 0.7625, - "num_input_tokens_seen": 59915390, - "step": 2821 - }, - { - "epoch": 0.33932543738351467, - "flos": 12457643421120.0, - "grad_norm": 2.8966728658430565, - "learning_rate": 3.077645109098471e-06, - "loss": 0.7288, - "num_input_tokens_seen": 59931380, - "step": 2822 - }, - { - "epoch": 0.3394456802741538, - "flos": 16188561478920.0, - "grad_norm": 2.548333060757523, - "learning_rate": 3.076988806636577e-06, - "loss": 0.7016, - "num_input_tokens_seen": 59948860, - "step": 2823 - }, - { - "epoch": 0.3395659231647929, - "flos": 18482513694600.0, - "grad_norm": 2.1279618434568444, - "learning_rate": 3.0763323407963377e-06, - "loss": 0.8644, - "num_input_tokens_seen": 59968190, - "step": 2824 - }, - { - "epoch": 0.33968616605543195, - "flos": 21346567471320.0, - "grad_norm": 2.3100614319926813, - "learning_rate": 3.075675711677337e-06, - "loss": 0.7751, - "num_input_tokens_seen": 59988005, - "step": 2825 - }, - { - "epoch": 0.33980640894607106, - "flos": 15675723802800.0, - "grad_norm": 9.888616131891572, - "learning_rate": 3.0750189193791865e-06, - "loss": 0.7624, - "num_input_tokens_seen": 60007310, - "step": 2826 - }, - { - "epoch": 0.33992665183671017, - "flos": 23858082661200.0, - "grad_norm": 3.874808645138233, - "learning_rate": 3.0743619640015203e-06, - "loss": 0.6921, - "num_input_tokens_seen": 60027280, - "step": 2827 - }, - { - "epoch": 0.3400468947273492, - "flos": 12435264320760.0, - "grad_norm": 2.3160706867242458, - "learning_rate": 3.073704845643999e-06, - "loss": 0.9082, - "num_input_tokens_seen": 60044125, - "step": 2828 - }, - { - "epoch": 0.34016713761798834, - "flos": 12300257048160.0, - "grad_norm": 3.343480036880592, - "learning_rate": 3.0730475644063063e-06, - "loss": 0.7625, - "num_input_tokens_seen": 60058945, - "step": 2829 - }, - { - "epoch": 0.34028738050862745, - "flos": 16025761252800.0, - "grad_norm": 2.3541835232921695, - "learning_rate": 3.072390120388151e-06, - "loss": 0.6457, - "num_input_tokens_seen": 60076990, - "step": 2830 - }, - { - "epoch": 0.3404076233992665, - "flos": 16646541427080.0, - "grad_norm": 3.0272868883268793, - "learning_rate": 3.071732513689267e-06, - "loss": 0.6826, - "num_input_tokens_seen": 60095245, - "step": 2831 - }, - { - "epoch": 0.3405278662899056, - "flos": 12432573224160.0, - "grad_norm": 3.4913311277388264, - "learning_rate": 3.0710747444094134e-06, - "loss": 0.6585, - "num_input_tokens_seen": 60112995, - "step": 2832 - }, - { - "epoch": 0.3406481091805447, - "flos": 31497531372840.0, - "grad_norm": 2.3780031792095677, - "learning_rate": 3.070416812648372e-06, - "loss": 0.6431, - "num_input_tokens_seen": 60136165, - "step": 2833 - }, - { - "epoch": 0.3407683520711838, - "flos": 19618506042000.0, - "grad_norm": 2.4331452031329346, - "learning_rate": 3.069758718505951e-06, - "loss": 0.6342, - "num_input_tokens_seen": 60157625, - "step": 2834 - }, - { - "epoch": 0.3408885949618229, - "flos": 21103142801160.0, - "grad_norm": 2.7585531218245074, - "learning_rate": 3.0691004620819836e-06, - "loss": 0.7825, - "num_input_tokens_seen": 60177475, - "step": 2835 - }, - { - "epoch": 0.341008837852462, - "flos": 46869183496200.0, - "grad_norm": 0.8052253037046572, - "learning_rate": 3.0684420434763254e-06, - "loss": 0.6274, - "num_input_tokens_seen": 60243380, - "step": 2836 - }, - { - "epoch": 0.34112908074310105, - "flos": 15215337697680.0, - "grad_norm": 2.8251391081192105, - "learning_rate": 3.06778346278886e-06, - "loss": 0.7488, - "num_input_tokens_seen": 60261935, - "step": 2837 - }, - { - "epoch": 0.34124932363374016, - "flos": 18297872587440.0, - "grad_norm": 2.5625392631686177, - "learning_rate": 3.0671247201194906e-06, - "loss": 0.7783, - "num_input_tokens_seen": 60283790, - "step": 2838 - }, - { - "epoch": 0.3413695665243792, - "flos": 20833286555760.0, - "grad_norm": 4.152672167015955, - "learning_rate": 3.066465815568151e-06, - "loss": 0.7295, - "num_input_tokens_seen": 60304340, - "step": 2839 - }, - { - "epoch": 0.34148980941501833, - "flos": 18538416201240.0, - "grad_norm": 2.3339893803882, - "learning_rate": 3.0658067492347947e-06, - "loss": 0.6753, - "num_input_tokens_seen": 60326700, - "step": 2840 - }, - { - "epoch": 0.34161005230565744, - "flos": 12786599829120.0, - "grad_norm": 3.137937392184635, - "learning_rate": 3.065147521219402e-06, - "loss": 0.6586, - "num_input_tokens_seen": 60345675, - "step": 2841 - }, - { - "epoch": 0.3417302951962965, - "flos": 32116633569240.0, - "grad_norm": 1.88243903417261, - "learning_rate": 3.064488131621977e-06, - "loss": 0.7386, - "num_input_tokens_seen": 60368720, - "step": 2842 - }, - { - "epoch": 0.3418505380869356, - "flos": 22667834125080.0, - "grad_norm": 2.400822848087795, - "learning_rate": 3.063828580542549e-06, - "loss": 0.7208, - "num_input_tokens_seen": 60389635, - "step": 2843 - }, - { - "epoch": 0.3419707809775747, - "flos": 14217740238600.0, - "grad_norm": 2.3979598075991775, - "learning_rate": 3.0631688680811706e-06, - "loss": 0.7022, - "num_input_tokens_seen": 60408980, - "step": 2844 - }, - { - "epoch": 0.3420910238682138, - "flos": 21073196970360.0, - "grad_norm": 2.6074355907293754, - "learning_rate": 3.062508994337921e-06, - "loss": 0.7452, - "num_input_tokens_seen": 60428305, - "step": 2845 - }, - { - "epoch": 0.3422112667588529, - "flos": 15648532388520.0, - "grad_norm": 3.101784186524025, - "learning_rate": 3.0618489594129013e-06, - "loss": 0.7644, - "num_input_tokens_seen": 60446165, - "step": 2846 - }, - { - "epoch": 0.342331509649492, - "flos": 10086359397240.0, - "grad_norm": 2.736019141581986, - "learning_rate": 3.061188763406239e-06, - "loss": 0.696, - "num_input_tokens_seen": 60462030, - "step": 2847 - }, - { - "epoch": 0.34245175254013105, - "flos": 20994915363360.0, - "grad_norm": 13.011367522841445, - "learning_rate": 3.060528406418085e-06, - "loss": 0.8039, - "num_input_tokens_seen": 60481600, - "step": 2848 - }, - { - "epoch": 0.34257199543077016, - "flos": 25068905679960.0, - "grad_norm": 2.201495711191381, - "learning_rate": 3.0598678885486145e-06, - "loss": 0.6002, - "num_input_tokens_seen": 60503860, - "step": 2849 - }, - { - "epoch": 0.34269223832140927, - "flos": 14595190722600.0, - "grad_norm": 2.6223823574797023, - "learning_rate": 3.0592072098980282e-06, - "loss": 0.7272, - "num_input_tokens_seen": 60523240, - "step": 2850 - }, - { - "epoch": 0.3428124812120483, - "flos": 19969429970880.0, - "grad_norm": 4.363430092372342, - "learning_rate": 3.0585463705665514e-06, - "loss": 0.7076, - "num_input_tokens_seen": 60543335, - "step": 2851 - }, - { - "epoch": 0.34293272410268744, - "flos": 17997120712440.0, - "grad_norm": 3.5637841838052196, - "learning_rate": 3.0578853706544304e-06, - "loss": 0.6858, - "num_input_tokens_seen": 60560445, - "step": 2852 - }, - { - "epoch": 0.34305296699332655, - "flos": 15729663391920.0, - "grad_norm": 2.602334306116298, - "learning_rate": 3.0572242102619404e-06, - "loss": 0.6356, - "num_input_tokens_seen": 60577320, - "step": 2853 - }, - { - "epoch": 0.3431732098839656, - "flos": 17616092652960.0, - "grad_norm": 6.2159472719997595, - "learning_rate": 3.0565628894893784e-06, - "loss": 0.7992, - "num_input_tokens_seen": 60597675, - "step": 2854 - }, - { - "epoch": 0.3432934527746047, - "flos": 12247013978160.0, - "grad_norm": 2.3685591849819696, - "learning_rate": 3.0559014084370655e-06, - "loss": 0.7331, - "num_input_tokens_seen": 60615920, - "step": 2855 - }, - { - "epoch": 0.34341369566524377, - "flos": 17159980642440.0, - "grad_norm": 2.3303283915131963, - "learning_rate": 3.055239767205349e-06, - "loss": 0.7707, - "num_input_tokens_seen": 60637390, - "step": 2856 - }, - { - "epoch": 0.3435339385558829, - "flos": 12591225995520.0, - "grad_norm": 2.405170743595383, - "learning_rate": 3.054577965894599e-06, - "loss": 0.7574, - "num_input_tokens_seen": 60653255, - "step": 2857 - }, - { - "epoch": 0.343654181446522, - "flos": 16242057828600.0, - "grad_norm": 2.8108850717028653, - "learning_rate": 3.0539160046052094e-06, - "loss": 0.7056, - "num_input_tokens_seen": 60672675, - "step": 2858 - }, - { - "epoch": 0.34377442433716104, - "flos": 14541852672720.0, - "grad_norm": 2.761311070676487, - "learning_rate": 3.0532538834376003e-06, - "loss": 0.6859, - "num_input_tokens_seen": 60691955, - "step": 2859 - }, - { - "epoch": 0.34389466722780015, - "flos": 16240569810480.0, - "grad_norm": 3.1980201795560417, - "learning_rate": 3.0525916024922143e-06, - "loss": 0.7622, - "num_input_tokens_seen": 60710860, - "step": 2860 - }, - { - "epoch": 0.34401491011843927, - "flos": 13596580144800.0, - "grad_norm": 5.596202802733847, - "learning_rate": 3.0519291618695193e-06, - "loss": 0.8089, - "num_input_tokens_seen": 60727980, - "step": 2861 - }, - { - "epoch": 0.3441351530090783, - "flos": 13029391300080.0, - "grad_norm": 3.0091440399370017, - "learning_rate": 3.0512665616700065e-06, - "loss": 0.7323, - "num_input_tokens_seen": 60746765, - "step": 2862 - }, - { - "epoch": 0.34425539589971743, - "flos": 16918202290200.0, - "grad_norm": 3.327390777835951, - "learning_rate": 3.0506038019941933e-06, - "loss": 0.8786, - "num_input_tokens_seen": 60766495, - "step": 2863 - }, - { - "epoch": 0.34437563879035654, - "flos": 16026236152200.0, - "grad_norm": 13.243455530181041, - "learning_rate": 3.049940882942617e-06, - "loss": 0.6514, - "num_input_tokens_seen": 60785000, - "step": 2864 - }, - { - "epoch": 0.3444958816809956, - "flos": 16891770714960.0, - "grad_norm": 2.507906998134401, - "learning_rate": 3.0492778046158448e-06, - "loss": 0.776, - "num_input_tokens_seen": 60806140, - "step": 2865 - }, - { - "epoch": 0.3446161245716347, - "flos": 16027059311160.0, - "grad_norm": 2.6507867616865406, - "learning_rate": 3.0486145671144633e-06, - "loss": 0.7555, - "num_input_tokens_seen": 60825650, - "step": 2866 - }, - { - "epoch": 0.3447363674622738, - "flos": 18397456856160.0, - "grad_norm": 2.7693378935170117, - "learning_rate": 3.047951170539086e-06, - "loss": 0.7282, - "num_input_tokens_seen": 60844995, - "step": 2867 - }, - { - "epoch": 0.3448566103529129, - "flos": 8682695341680.0, - "grad_norm": 6.694948700643497, - "learning_rate": 3.047287614990349e-06, - "loss": 0.8205, - "num_input_tokens_seen": 60862635, - "step": 2868 - }, - { - "epoch": 0.344976853243552, - "flos": 29554914665520.0, - "grad_norm": 3.9294102231560477, - "learning_rate": 3.046623900568914e-06, - "loss": 0.5782, - "num_input_tokens_seen": 60884920, - "step": 2869 - }, - { - "epoch": 0.34509709613419104, - "flos": 21069524415000.0, - "grad_norm": 5.862224567507015, - "learning_rate": 3.045960027375465e-06, - "loss": 0.6779, - "num_input_tokens_seen": 60902475, - "step": 2870 - }, - { - "epoch": 0.34521733902483015, - "flos": 21990613224840.0, - "grad_norm": 4.082895579845016, - "learning_rate": 3.045295995510711e-06, - "loss": 0.7992, - "num_input_tokens_seen": 60919165, - "step": 2871 - }, - { - "epoch": 0.34533758191546926, - "flos": 20477328693240.0, - "grad_norm": 2.4643455521799935, - "learning_rate": 3.0446318050753865e-06, - "loss": 0.7213, - "num_input_tokens_seen": 60939365, - "step": 2872 - }, - { - "epoch": 0.3454578248061083, - "flos": 19888140667680.0, - "grad_norm": 2.6998286793361745, - "learning_rate": 3.0439674561702474e-06, - "loss": 0.7624, - "num_input_tokens_seen": 60958585, - "step": 2873 - }, - { - "epoch": 0.3455780676967474, - "flos": 13892709665640.0, - "grad_norm": 2.743336493890857, - "learning_rate": 3.043302948896076e-06, - "loss": 0.8629, - "num_input_tokens_seen": 60976910, - "step": 2874 - }, - { - "epoch": 0.34569831058738654, - "flos": 25342529460600.0, - "grad_norm": 4.483073720305016, - "learning_rate": 3.0426382833536756e-06, - "loss": 0.582, - "num_input_tokens_seen": 60999985, - "step": 2875 - }, - { - "epoch": 0.3458185534780256, - "flos": 22857794105520.0, - "grad_norm": 3.389405483901529, - "learning_rate": 3.041973459643877e-06, - "loss": 0.7704, - "num_input_tokens_seen": 61019160, - "step": 2876 - }, - { - "epoch": 0.3459387963686647, - "flos": 23830163067840.0, - "grad_norm": 2.4800294502109392, - "learning_rate": 3.0413084778675334e-06, - "loss": 0.6464, - "num_input_tokens_seen": 61040130, - "step": 2877 - }, - { - "epoch": 0.3460590392593038, - "flos": 18074009281200.0, - "grad_norm": 2.873093677920108, - "learning_rate": 3.0406433381255214e-06, - "loss": 0.8174, - "num_input_tokens_seen": 61057885, - "step": 2878 - }, - { - "epoch": 0.34617928214994287, - "flos": 13731872357040.0, - "grad_norm": 3.53644443035436, - "learning_rate": 3.0399780405187425e-06, - "loss": 0.807, - "num_input_tokens_seen": 61076600, - "step": 2879 - }, - { - "epoch": 0.346299525040582, - "flos": 18208636634280.0, - "grad_norm": 2.953452117402623, - "learning_rate": 3.0393125851481216e-06, - "loss": 0.7661, - "num_input_tokens_seen": 61096195, - "step": 2880 - }, - { - "epoch": 0.3464197679312211, - "flos": 11976017974200.0, - "grad_norm": 2.6190650695780895, - "learning_rate": 3.038646972114608e-06, - "loss": 0.8415, - "num_input_tokens_seen": 61112240, - "step": 2881 - }, - { - "epoch": 0.34654001082186014, - "flos": 16377666640440.0, - "grad_norm": 3.1414030259577617, - "learning_rate": 3.037981201519174e-06, - "loss": 0.6635, - "num_input_tokens_seen": 61132560, - "step": 2882 - }, - { - "epoch": 0.34666025371249926, - "flos": 14299251161520.0, - "grad_norm": 3.326346195153139, - "learning_rate": 3.0373152734628175e-06, - "loss": 0.6817, - "num_input_tokens_seen": 61150560, - "step": 2883 - }, - { - "epoch": 0.34678049660313837, - "flos": 11112193049280.0, - "grad_norm": 2.6030867410071594, - "learning_rate": 3.0366491880465584e-06, - "loss": 0.7358, - "num_input_tokens_seen": 61168300, - "step": 2884 - }, - { - "epoch": 0.3469007394937774, - "flos": 15489341397840.0, - "grad_norm": 2.3902135756763268, - "learning_rate": 3.035982945371443e-06, - "loss": 0.8081, - "num_input_tokens_seen": 61189715, - "step": 2885 - }, - { - "epoch": 0.34702098238441653, - "flos": 16371492948240.0, - "grad_norm": 2.9513285991020943, - "learning_rate": 3.035316545538537e-06, - "loss": 0.8427, - "num_input_tokens_seen": 61208230, - "step": 2886 - }, - { - "epoch": 0.3471412252750556, - "flos": 16782403518600.0, - "grad_norm": 2.929793292360728, - "learning_rate": 3.034649988648935e-06, - "loss": 0.7647, - "num_input_tokens_seen": 61227715, - "step": 2887 - }, - { - "epoch": 0.3472614681656947, - "flos": 15594497819520.0, - "grad_norm": 2.7766981273149542, - "learning_rate": 3.033983274803752e-06, - "loss": 0.799, - "num_input_tokens_seen": 61247225, - "step": 2888 - }, - { - "epoch": 0.3473817110563338, - "flos": 17186570517480.0, - "grad_norm": 3.7500474291027333, - "learning_rate": 3.0333164041041283e-06, - "loss": 0.7021, - "num_input_tokens_seen": 61263260, - "step": 2889 - }, - { - "epoch": 0.34750195394697286, - "flos": 16350696845880.0, - "grad_norm": 4.854235319817235, - "learning_rate": 3.032649376651228e-06, - "loss": 0.7071, - "num_input_tokens_seen": 61282400, - "step": 2890 - }, - { - "epoch": 0.347622196837612, - "flos": 21345269412960.0, - "grad_norm": 2.7284202040177723, - "learning_rate": 3.031982192546238e-06, - "loss": 0.7406, - "num_input_tokens_seen": 61305215, - "step": 2891 - }, - { - "epoch": 0.3477424397282511, - "flos": 16431922829160.0, - "grad_norm": 3.6167714351357168, - "learning_rate": 3.0313148518903696e-06, - "loss": 0.9287, - "num_input_tokens_seen": 61324760, - "step": 2892 - }, - { - "epoch": 0.34786268261889014, - "flos": 11490530012160.0, - "grad_norm": 3.041269163172469, - "learning_rate": 3.030647354784859e-06, - "loss": 0.7851, - "num_input_tokens_seen": 61341520, - "step": 2893 - }, - { - "epoch": 0.34798292550952925, - "flos": 15190045881000.0, - "grad_norm": 2.157245572007688, - "learning_rate": 3.029979701330964e-06, - "loss": 0.7585, - "num_input_tokens_seen": 61360665, - "step": 2894 - }, - { - "epoch": 0.34810316840016836, - "flos": 14568442547760.0, - "grad_norm": 2.7815093873184615, - "learning_rate": 3.029311891629966e-06, - "loss": 0.7794, - "num_input_tokens_seen": 61378840, - "step": 2895 - }, - { - "epoch": 0.3482234112908074, - "flos": 17296001033760.0, - "grad_norm": 2.4605925787547696, - "learning_rate": 3.0286439257831744e-06, - "loss": 0.723, - "num_input_tokens_seen": 61398030, - "step": 2896 - }, - { - "epoch": 0.3483436541814465, - "flos": 17564749180560.0, - "grad_norm": 3.169494336829818, - "learning_rate": 3.0279758038919156e-06, - "loss": 0.6939, - "num_input_tokens_seen": 61415975, - "step": 2897 - }, - { - "epoch": 0.34846389707208564, - "flos": 16567088401560.0, - "grad_norm": 2.4143262799290013, - "learning_rate": 3.0273075260575455e-06, - "loss": 0.7578, - "num_input_tokens_seen": 61434595, - "step": 2898 - }, - { - "epoch": 0.3485841399627247, - "flos": 15940482794640.0, - "grad_norm": 2.6382304513438917, - "learning_rate": 3.0266390923814396e-06, - "loss": 0.7788, - "num_input_tokens_seen": 61452375, - "step": 2899 - }, - { - "epoch": 0.3487043828533638, - "flos": 12408357846120.0, - "grad_norm": 3.282064282333474, - "learning_rate": 3.0259705029650008e-06, - "loss": 0.8067, - "num_input_tokens_seen": 61470025, - "step": 2900 - }, - { - "epoch": 0.34882462574400286, - "flos": 16539453747840.0, - "grad_norm": 2.740661196322848, - "learning_rate": 3.025301757909652e-06, - "loss": 0.7136, - "num_input_tokens_seen": 61489940, - "step": 2901 - }, - { - "epoch": 0.34894486863464197, - "flos": 21912584897520.0, - "grad_norm": 2.314799840525478, - "learning_rate": 3.024632857316842e-06, - "loss": 0.7957, - "num_input_tokens_seen": 61510975, - "step": 2902 - }, - { - "epoch": 0.3490651115252811, - "flos": 16184414024160.0, - "grad_norm": 3.4342390371940335, - "learning_rate": 3.0239638012880412e-06, - "loss": 0.7523, - "num_input_tokens_seen": 61530590, - "step": 2903 - }, - { - "epoch": 0.34918535441592014, - "flos": 9194646538920.0, - "grad_norm": 3.9099702743219855, - "learning_rate": 3.0232945899247466e-06, - "loss": 0.7994, - "num_input_tokens_seen": 61547245, - "step": 2904 - }, - { - "epoch": 0.34930559730655925, - "flos": 16972110219360.0, - "grad_norm": 3.9007858511654843, - "learning_rate": 3.022625223328476e-06, - "loss": 0.7589, - "num_input_tokens_seen": 61568705, - "step": 2905 - }, - { - "epoch": 0.34942584019719836, - "flos": 16727134211160.0, - "grad_norm": 2.008569953032651, - "learning_rate": 3.0219557016007723e-06, - "loss": 0.6826, - "num_input_tokens_seen": 61588555, - "step": 2906 - }, - { - "epoch": 0.3495460830878374, - "flos": 17890096353120.0, - "grad_norm": 2.0955136969884873, - "learning_rate": 3.021286024843202e-06, - "loss": 0.684, - "num_input_tokens_seen": 61606470, - "step": 2907 - }, - { - "epoch": 0.3496663259784765, - "flos": 51626057354520.0, - "grad_norm": 1.1071624182805704, - "learning_rate": 3.0206161931573526e-06, - "loss": 0.6966, - "num_input_tokens_seen": 61658740, - "step": 2908 - }, - { - "epoch": 0.34978656886911563, - "flos": 21047493574200.0, - "grad_norm": 2.087082337952743, - "learning_rate": 3.0199462066448388e-06, - "loss": 0.9218, - "num_input_tokens_seen": 61680655, - "step": 2909 - }, - { - "epoch": 0.3499068117597547, - "flos": 15459775486560.0, - "grad_norm": 3.116511603865188, - "learning_rate": 3.019276065407296e-06, - "loss": 0.6744, - "num_input_tokens_seen": 61699495, - "step": 2910 - }, - { - "epoch": 0.3500270546503938, - "flos": 16673701181400.0, - "grad_norm": 2.57483180709709, - "learning_rate": 3.018605769546385e-06, - "loss": 0.7953, - "num_input_tokens_seen": 61719770, - "step": 2911 - }, - { - "epoch": 0.3501472975410329, - "flos": 16756383522840.0, - "grad_norm": 2.108795877713992, - "learning_rate": 3.017935319163788e-06, - "loss": 0.7849, - "num_input_tokens_seen": 61738450, - "step": 2912 - }, - { - "epoch": 0.35026754043167196, - "flos": 18645218940840.0, - "grad_norm": 4.929820208569311, - "learning_rate": 3.017264714361213e-06, - "loss": 0.6979, - "num_input_tokens_seen": 61757820, - "step": 2913 - }, - { - "epoch": 0.3503877833223111, - "flos": 14299124521680.0, - "grad_norm": 2.621086109224146, - "learning_rate": 3.016593955240389e-06, - "loss": 0.804, - "num_input_tokens_seen": 61776230, - "step": 2914 - }, - { - "epoch": 0.3505080262129502, - "flos": 47976179811600.0, - "grad_norm": 0.8076530919056626, - "learning_rate": 3.015923041903071e-06, - "loss": 0.649, - "num_input_tokens_seen": 61842075, - "step": 2915 - }, - { - "epoch": 0.35062826910358924, - "flos": 21507848019360.0, - "grad_norm": 3.922151472217832, - "learning_rate": 3.0152519744510347e-06, - "loss": 0.8158, - "num_input_tokens_seen": 61861595, - "step": 2916 - }, - { - "epoch": 0.35074851199422835, - "flos": 17565635659440.0, - "grad_norm": 3.5708087601292813, - "learning_rate": 3.014580752986081e-06, - "loss": 0.8144, - "num_input_tokens_seen": 61880190, - "step": 2917 - }, - { - "epoch": 0.3508687548848674, - "flos": 11079271182240.0, - "grad_norm": 2.3265476900119944, - "learning_rate": 3.0139093776100345e-06, - "loss": 0.7661, - "num_input_tokens_seen": 61896500, - "step": 2918 - }, - { - "epoch": 0.3509889977755065, - "flos": 15622385752920.0, - "grad_norm": 2.6141135311674986, - "learning_rate": 3.013237848424741e-06, - "loss": 0.7372, - "num_input_tokens_seen": 61915605, - "step": 2919 - }, - { - "epoch": 0.35110924066614563, - "flos": 13974600508080.0, - "grad_norm": 5.658037609224805, - "learning_rate": 3.012566165532072e-06, - "loss": 0.7407, - "num_input_tokens_seen": 61934115, - "step": 2920 - }, - { - "epoch": 0.3512294835567847, - "flos": 16080429021000.0, - "grad_norm": 3.375010596364659, - "learning_rate": 3.0118943290339207e-06, - "loss": 0.7408, - "num_input_tokens_seen": 61954045, - "step": 2921 - }, - { - "epoch": 0.3513497264474238, - "flos": 12998495670480.0, - "grad_norm": 2.8182233432607857, - "learning_rate": 3.011222339032204e-06, - "loss": 0.6533, - "num_input_tokens_seen": 61971915, - "step": 2922 - }, - { - "epoch": 0.3514699693380629, - "flos": 19753101735120.0, - "grad_norm": 2.3868725816065663, - "learning_rate": 3.0105501956288626e-06, - "loss": 0.6713, - "num_input_tokens_seen": 61992105, - "step": 2923 - }, - { - "epoch": 0.35159021222870196, - "flos": 11112129729360.0, - "grad_norm": 4.297534572957118, - "learning_rate": 3.0098778989258602e-06, - "loss": 0.7087, - "num_input_tokens_seen": 62010435, - "step": 2924 - }, - { - "epoch": 0.35171045511934107, - "flos": 10161411688320.0, - "grad_norm": 2.9127601802020373, - "learning_rate": 3.009205449025183e-06, - "loss": 0.857, - "num_input_tokens_seen": 62026350, - "step": 2925 - }, - { - "epoch": 0.3518306980099802, - "flos": 10383945276240.0, - "grad_norm": 4.65503671699245, - "learning_rate": 3.008532846028842e-06, - "loss": 0.6099, - "num_input_tokens_seen": 62042830, - "step": 2926 - }, - { - "epoch": 0.35195094090061924, - "flos": 19834106098680.0, - "grad_norm": 4.642099062893509, - "learning_rate": 3.0078600900388694e-06, - "loss": 0.6883, - "num_input_tokens_seen": 62062855, - "step": 2927 - }, - { - "epoch": 0.35207118379125835, - "flos": 18780447833160.0, - "grad_norm": 2.5236897534401197, - "learning_rate": 3.007187181157323e-06, - "loss": 0.726, - "num_input_tokens_seen": 62082585, - "step": 2928 - }, - { - "epoch": 0.35219142668189746, - "flos": 13137903677520.0, - "grad_norm": 3.1619272067662583, - "learning_rate": 3.006514119486282e-06, - "loss": 0.6625, - "num_input_tokens_seen": 62099135, - "step": 2929 - }, - { - "epoch": 0.3523116695725365, - "flos": 10194998414520.0, - "grad_norm": 2.6477997948890883, - "learning_rate": 3.005840905127849e-06, - "loss": 0.6821, - "num_input_tokens_seen": 62115760, - "step": 2930 - }, - { - "epoch": 0.3524319124631756, - "flos": 15675723802800.0, - "grad_norm": 2.7960509039811505, - "learning_rate": 3.0051675381841516e-06, - "loss": 0.8458, - "num_input_tokens_seen": 62132790, - "step": 2931 - }, - { - "epoch": 0.3525521553538147, - "flos": 19293728748720.0, - "grad_norm": 3.1143891817306266, - "learning_rate": 3.0044940187573363e-06, - "loss": 0.7505, - "num_input_tokens_seen": 62153520, - "step": 2932 - }, - { - "epoch": 0.3526723982444538, - "flos": 15756633186480.0, - "grad_norm": 2.781672584918998, - "learning_rate": 3.003820346949578e-06, - "loss": 0.6568, - "num_input_tokens_seen": 62171320, - "step": 2933 - }, - { - "epoch": 0.3527926411350929, - "flos": 17377796896320.0, - "grad_norm": 3.370931504324605, - "learning_rate": 3.003146522863071e-06, - "loss": 0.7755, - "num_input_tokens_seen": 62191925, - "step": 2934 - }, - { - "epoch": 0.35291288402573195, - "flos": 22344766469640.0, - "grad_norm": 3.2539770294954278, - "learning_rate": 3.0024725466000345e-06, - "loss": 0.8316, - "num_input_tokens_seen": 62211600, - "step": 2935 - }, - { - "epoch": 0.35303312691637107, - "flos": 16917695730840.0, - "grad_norm": 2.868058490726015, - "learning_rate": 3.0017984182627087e-06, - "loss": 0.7736, - "num_input_tokens_seen": 62230645, - "step": 2936 - }, - { - "epoch": 0.3531533698070102, - "flos": 15973088062080.0, - "grad_norm": 4.172576273866119, - "learning_rate": 3.00112413795336e-06, - "loss": 0.7968, - "num_input_tokens_seen": 62250200, - "step": 2937 - }, - { - "epoch": 0.35327361269764923, - "flos": 11490688311960.0, - "grad_norm": 2.7815329589920785, - "learning_rate": 3.000449705774275e-06, - "loss": 0.7935, - "num_input_tokens_seen": 62268160, - "step": 2938 - }, - { - "epoch": 0.35339385558828834, - "flos": 16161496704480.0, - "grad_norm": 3.866009157007131, - "learning_rate": 2.9997751218277654e-06, - "loss": 0.6951, - "num_input_tokens_seen": 62286035, - "step": 2939 - }, - { - "epoch": 0.35351409847892745, - "flos": 17696115557760.0, - "grad_norm": 3.6743493366983633, - "learning_rate": 2.999100386216166e-06, - "loss": 0.757, - "num_input_tokens_seen": 62304695, - "step": 2940 - }, - { - "epoch": 0.3536343413695665, - "flos": 19833599539320.0, - "grad_norm": 2.759844498745732, - "learning_rate": 2.998425499041831e-06, - "loss": 0.7213, - "num_input_tokens_seen": 62324930, - "step": 2941 - }, - { - "epoch": 0.3537545842602056, - "flos": 48657263226960.0, - "grad_norm": 1.030368659513995, - "learning_rate": 2.997750460407142e-06, - "loss": 0.6219, - "num_input_tokens_seen": 62386005, - "step": 2942 - }, - { - "epoch": 0.35387482715084473, - "flos": 13456950518040.0, - "grad_norm": 11.574579970316755, - "learning_rate": 2.997075270414501e-06, - "loss": 0.6856, - "num_input_tokens_seen": 62402940, - "step": 2943 - }, - { - "epoch": 0.3539950700414838, - "flos": 48359234108520.0, - "grad_norm": 0.7025044139709815, - "learning_rate": 2.9963999291663347e-06, - "loss": 0.5933, - "num_input_tokens_seen": 62468440, - "step": 2944 - }, - { - "epoch": 0.3541153129321229, - "flos": 15000529140000.0, - "grad_norm": 4.277070615922639, - "learning_rate": 2.9957244367650915e-06, - "loss": 0.7281, - "num_input_tokens_seen": 62484405, - "step": 2945 - }, - { - "epoch": 0.354235555822762, - "flos": 14298966221880.0, - "grad_norm": 3.1745610632071264, - "learning_rate": 2.9950487933132425e-06, - "loss": 0.8216, - "num_input_tokens_seen": 62501540, - "step": 2946 - }, - { - "epoch": 0.35435579871340106, - "flos": 15189729281400.0, - "grad_norm": 4.229229154579278, - "learning_rate": 2.994372998913283e-06, - "loss": 0.6933, - "num_input_tokens_seen": 62519765, - "step": 2947 - }, - { - "epoch": 0.35447604160404017, - "flos": 17539140764280.0, - "grad_norm": 7.751121685932735, - "learning_rate": 2.99369705366773e-06, - "loss": 0.6111, - "num_input_tokens_seen": 62539730, - "step": 2948 - }, - { - "epoch": 0.3545962844946792, - "flos": 17156403066960.0, - "grad_norm": 2.866487755478319, - "learning_rate": 2.9930209576791244e-06, - "loss": 0.7932, - "num_input_tokens_seen": 62557925, - "step": 2949 - }, - { - "epoch": 0.35471652738531834, - "flos": 16156842690360.0, - "grad_norm": 3.0135187096528426, - "learning_rate": 2.9923447110500285e-06, - "loss": 0.6336, - "num_input_tokens_seen": 62576390, - "step": 2950 - }, - { - "epoch": 0.35483677027595745, - "flos": 20047584938040.0, - "grad_norm": 2.07107668034654, - "learning_rate": 2.9916683138830295e-06, - "loss": 0.7353, - "num_input_tokens_seen": 62596775, - "step": 2951 - }, - { - "epoch": 0.3549570131665965, - "flos": 9815964932520.0, - "grad_norm": 9.709581766913491, - "learning_rate": 2.9909917662807353e-06, - "loss": 0.7863, - "num_input_tokens_seen": 62614295, - "step": 2952 - }, - { - "epoch": 0.3550772560572356, - "flos": 15271461824040.0, - "grad_norm": 4.739593349348391, - "learning_rate": 2.9903150683457783e-06, - "loss": 0.6619, - "num_input_tokens_seen": 62632560, - "step": 2953 - }, - { - "epoch": 0.3551974989478747, - "flos": 14758180908480.0, - "grad_norm": 3.624919101207006, - "learning_rate": 2.9896382201808126e-06, - "loss": 0.6261, - "num_input_tokens_seen": 62649680, - "step": 2954 - }, - { - "epoch": 0.3553177418385138, - "flos": 14487849763680.0, - "grad_norm": 2.979085178639059, - "learning_rate": 2.988961221888516e-06, - "loss": 0.7839, - "num_input_tokens_seen": 62666075, - "step": 2955 - }, - { - "epoch": 0.3554379847291529, - "flos": 10787574055800.0, - "grad_norm": 3.977901091046499, - "learning_rate": 2.988284073571589e-06, - "loss": 0.7768, - "num_input_tokens_seen": 62681880, - "step": 2956 - }, - { - "epoch": 0.355558227619792, - "flos": 14973685985280.0, - "grad_norm": 5.917958114161235, - "learning_rate": 2.9876067753327528e-06, - "loss": 0.7025, - "num_input_tokens_seen": 62699330, - "step": 2957 - }, - { - "epoch": 0.35567847051043106, - "flos": 27685830571200.0, - "grad_norm": 2.9671885349297993, - "learning_rate": 2.986929327274754e-06, - "loss": 0.7776, - "num_input_tokens_seen": 62719630, - "step": 2958 - }, - { - "epoch": 0.35579871340107017, - "flos": 19752785135520.0, - "grad_norm": 2.7013017095005756, - "learning_rate": 2.9862517295003617e-06, - "loss": 0.764, - "num_input_tokens_seen": 62739765, - "step": 2959 - }, - { - "epoch": 0.3559189562917093, - "flos": 20752123892400.0, - "grad_norm": 6.648889044412032, - "learning_rate": 2.9855739821123654e-06, - "loss": 0.7151, - "num_input_tokens_seen": 62761065, - "step": 2960 - }, - { - "epoch": 0.35603919918234833, - "flos": 18805961269560.0, - "grad_norm": 2.5419629242216883, - "learning_rate": 2.98489608521358e-06, - "loss": 0.8036, - "num_input_tokens_seen": 62780725, - "step": 2961 - }, - { - "epoch": 0.35615944207298744, - "flos": 16835045049360.0, - "grad_norm": 3.4316686934146636, - "learning_rate": 2.9842180389068425e-06, - "loss": 0.7768, - "num_input_tokens_seen": 62797755, - "step": 2962 - }, - { - "epoch": 0.35627968496362655, - "flos": 50329707089280.0, - "grad_norm": 0.7798080870923961, - "learning_rate": 2.98353984329501e-06, - "loss": 0.6146, - "num_input_tokens_seen": 62861820, - "step": 2963 - }, - { - "epoch": 0.3563999278542656, - "flos": 16570064437800.0, - "grad_norm": 4.224434195108125, - "learning_rate": 2.982861498480965e-06, - "loss": 0.6943, - "num_input_tokens_seen": 62883920, - "step": 2964 - }, - { - "epoch": 0.3565201707449047, - "flos": 19018965209520.0, - "grad_norm": 2.399184318849427, - "learning_rate": 2.9821830045676122e-06, - "loss": 0.8047, - "num_input_tokens_seen": 62903340, - "step": 2965 - }, - { - "epoch": 0.3566404136355438, - "flos": 20887099505040.0, - "grad_norm": 2.6117790249445645, - "learning_rate": 2.9815043616578793e-06, - "loss": 0.6963, - "num_input_tokens_seen": 62923855, - "step": 2966 - }, - { - "epoch": 0.3567606565261829, - "flos": 28204398700080.0, - "grad_norm": 4.170367593640742, - "learning_rate": 2.9808255698547145e-06, - "loss": 0.7619, - "num_input_tokens_seen": 62946375, - "step": 2967 - }, - { - "epoch": 0.356880899416822, - "flos": 16080555660840.0, - "grad_norm": 4.509293813628896, - "learning_rate": 2.9801466292610913e-06, - "loss": 0.767, - "num_input_tokens_seen": 62965980, - "step": 2968 - }, - { - "epoch": 0.35700114230746105, - "flos": 13866341410320.0, - "grad_norm": 3.443221168968473, - "learning_rate": 2.979467539980003e-06, - "loss": 0.7868, - "num_input_tokens_seen": 62982490, - "step": 2969 - }, - { - "epoch": 0.35712138519810016, - "flos": 14434068474360.0, - "grad_norm": 3.119392704914395, - "learning_rate": 2.978788302114468e-06, - "loss": 0.7485, - "num_input_tokens_seen": 62999325, - "step": 2970 - }, - { - "epoch": 0.35724162808873927, - "flos": 25850079923400.0, - "grad_norm": 3.679679724812645, - "learning_rate": 2.9781089157675255e-06, - "loss": 0.7996, - "num_input_tokens_seen": 63017505, - "step": 2971 - }, - { - "epoch": 0.3573618709793783, - "flos": 18726824843640.0, - "grad_norm": 1.8392779539264659, - "learning_rate": 2.977429381042238e-06, - "loss": 0.8681, - "num_input_tokens_seen": 63037900, - "step": 2972 - }, - { - "epoch": 0.35748211387001744, - "flos": 21373030706520.0, - "grad_norm": 3.7634263327240536, - "learning_rate": 2.9767496980416913e-06, - "loss": 0.8713, - "num_input_tokens_seen": 63056915, - "step": 2973 - }, - { - "epoch": 0.35760235676065655, - "flos": 10140140686560.0, - "grad_norm": 4.347785666733978, - "learning_rate": 2.9760698668689914e-06, - "loss": 0.7912, - "num_input_tokens_seen": 63072860, - "step": 2974 - }, - { - "epoch": 0.3577225996512956, - "flos": 32389940750280.0, - "grad_norm": 2.6026352512126034, - "learning_rate": 2.975389887627269e-06, - "loss": 0.699, - "num_input_tokens_seen": 63095180, - "step": 2975 - }, - { - "epoch": 0.3578428425419347, - "flos": 12435074361000.0, - "grad_norm": 4.508867216434684, - "learning_rate": 2.9747097604196764e-06, - "loss": 0.8834, - "num_input_tokens_seen": 63111545, - "step": 2976 - }, - { - "epoch": 0.3579630854325738, - "flos": 52864012959000.0, - "grad_norm": 0.6908763859505277, - "learning_rate": 2.9740294853493875e-06, - "loss": 0.59, - "num_input_tokens_seen": 63182825, - "step": 2977 - }, - { - "epoch": 0.3580833283232129, - "flos": 18349437679560.0, - "grad_norm": 3.4887413413508583, - "learning_rate": 2.9733490625196008e-06, - "loss": 0.6608, - "num_input_tokens_seen": 63202405, - "step": 2978 - }, - { - "epoch": 0.358203571213852, - "flos": 10139222547720.0, - "grad_norm": 5.676215746552981, - "learning_rate": 2.9726684920335353e-06, - "loss": 0.7124, - "num_input_tokens_seen": 63219990, - "step": 2979 - }, - { - "epoch": 0.35832381410449105, - "flos": 14838267133200.0, - "grad_norm": 3.1038307332459496, - "learning_rate": 2.971987773994432e-06, - "loss": 0.8048, - "num_input_tokens_seen": 63235895, - "step": 2980 - }, - { - "epoch": 0.35844405699513016, - "flos": 12381957930840.0, - "grad_norm": 2.5482444591838096, - "learning_rate": 2.9713069085055566e-06, - "loss": 0.8046, - "num_input_tokens_seen": 63253925, - "step": 2981 - }, - { - "epoch": 0.35856429988576927, - "flos": 16994489319720.0, - "grad_norm": 3.011369618169428, - "learning_rate": 2.9706258956701958e-06, - "loss": 0.7756, - "num_input_tokens_seen": 63273635, - "step": 2982 - }, - { - "epoch": 0.3586845427764083, - "flos": 16859608686960.0, - "grad_norm": 3.378029911421522, - "learning_rate": 2.9699447355916575e-06, - "loss": 0.7479, - "num_input_tokens_seen": 63292165, - "step": 2983 - }, - { - "epoch": 0.35880478566704743, - "flos": 14622350476920.0, - "grad_norm": 3.723815801422622, - "learning_rate": 2.969263428373275e-06, - "loss": 0.7198, - "num_input_tokens_seen": 63310235, - "step": 2984 - }, - { - "epoch": 0.35892502855768654, - "flos": 9951415444560.0, - "grad_norm": 4.064055863799969, - "learning_rate": 2.9685819741184007e-06, - "loss": 0.7605, - "num_input_tokens_seen": 63328395, - "step": 2985 - }, - { - "epoch": 0.3590452714483256, - "flos": 13218844721160.0, - "grad_norm": 4.145769486183278, - "learning_rate": 2.967900372930411e-06, - "loss": 0.6613, - "num_input_tokens_seen": 63346625, - "step": 2986 - }, - { - "epoch": 0.3591655143389647, - "flos": 12948925155840.0, - "grad_norm": 4.132365500031972, - "learning_rate": 2.9672186249127046e-06, - "loss": 0.7687, - "num_input_tokens_seen": 63365810, - "step": 2987 - }, - { - "epoch": 0.3592857572296038, - "flos": 18480329157360.0, - "grad_norm": 4.14353708033762, - "learning_rate": 2.9665367301687014e-06, - "loss": 0.7724, - "num_input_tokens_seen": 63383775, - "step": 2988 - }, - { - "epoch": 0.3594060001202429, - "flos": 21559096511880.0, - "grad_norm": 2.801110156550588, - "learning_rate": 2.965854688801845e-06, - "loss": 0.7441, - "num_input_tokens_seen": 63405555, - "step": 2989 - }, - { - "epoch": 0.359526243010882, - "flos": 12433048123560.0, - "grad_norm": 2.7066621955433767, - "learning_rate": 2.9651725009156005e-06, - "loss": 0.7463, - "num_input_tokens_seen": 63423020, - "step": 2990 - }, - { - "epoch": 0.3596464859015211, - "flos": 16808676794040.0, - "grad_norm": 2.5577559868611344, - "learning_rate": 2.964490166613454e-06, - "loss": 0.7278, - "num_input_tokens_seen": 63442665, - "step": 2991 - }, - { - "epoch": 0.35976672879216015, - "flos": 40328341210560.0, - "grad_norm": 0.8320776339930948, - "learning_rate": 2.963807685998917e-06, - "loss": 0.5934, - "num_input_tokens_seen": 63498250, - "step": 2992 - }, - { - "epoch": 0.35988697168279926, - "flos": 31738359926280.0, - "grad_norm": 2.102688512526972, - "learning_rate": 2.9631250591755196e-06, - "loss": 0.7694, - "num_input_tokens_seen": 63520685, - "step": 2993 - }, - { - "epoch": 0.36000721457343837, - "flos": 26341266678240.0, - "grad_norm": 4.69156460331988, - "learning_rate": 2.962442286246817e-06, - "loss": 0.5698, - "num_input_tokens_seen": 63543235, - "step": 2994 - }, - { - "epoch": 0.3601274574640774, - "flos": 13349071339800.0, - "grad_norm": 2.118637545591757, - "learning_rate": 2.9617593673163853e-06, - "loss": 0.6859, - "num_input_tokens_seen": 63561775, - "step": 2995 - }, - { - "epoch": 0.36024770035471654, - "flos": 9681147619680.0, - "grad_norm": 2.566262994539163, - "learning_rate": 2.9610763024878216e-06, - "loss": 0.7519, - "num_input_tokens_seen": 63577000, - "step": 2996 - }, - { - "epoch": 0.3603679432453556, - "flos": 14812183817520.0, - "grad_norm": 2.482273580592246, - "learning_rate": 2.960393091864747e-06, - "loss": 0.8975, - "num_input_tokens_seen": 63595100, - "step": 2997 - }, - { - "epoch": 0.3604881861359947, - "flos": 16427870354280.0, - "grad_norm": 3.1740235817218907, - "learning_rate": 2.959709735550804e-06, - "loss": 0.726, - "num_input_tokens_seen": 63614415, - "step": 2998 - }, - { - "epoch": 0.3606084290266338, - "flos": 16134780189600.0, - "grad_norm": 3.6141151909258857, - "learning_rate": 2.9590262336496575e-06, - "loss": 0.7469, - "num_input_tokens_seen": 63633865, - "step": 2999 - }, - { - "epoch": 0.36072867191727287, - "flos": 11382587514000.0, - "grad_norm": 3.4454100718273457, - "learning_rate": 2.9583425862649936e-06, - "loss": 0.8362, - "num_input_tokens_seen": 63651720, - "step": 3000 - }, - { - "epoch": 0.360848914807912, - "flos": 14374651712160.0, - "grad_norm": 3.3630731143858705, - "learning_rate": 2.9576587935005215e-06, - "loss": 0.7285, - "num_input_tokens_seen": 63669520, - "step": 3001 - }, - { - "epoch": 0.3609691576985511, - "flos": 13783690728840.0, - "grad_norm": 4.439591465618407, - "learning_rate": 2.9569748554599713e-06, - "loss": 0.6974, - "num_input_tokens_seen": 63684850, - "step": 3002 - }, - { - "epoch": 0.36108940058919015, - "flos": 31060854086400.0, - "grad_norm": 4.459332339344056, - "learning_rate": 2.956290772247097e-06, - "loss": 0.7159, - "num_input_tokens_seen": 63703245, - "step": 3003 - }, - { - "epoch": 0.36120964347982926, - "flos": 16971698639880.0, - "grad_norm": 3.0487727588462556, - "learning_rate": 2.9556065439656724e-06, - "loss": 0.725, - "num_input_tokens_seen": 63722015, - "step": 3004 - }, - { - "epoch": 0.36132988637046837, - "flos": 13217926582320.0, - "grad_norm": 2.4751722603393227, - "learning_rate": 2.9549221707194952e-06, - "loss": 0.8059, - "num_input_tokens_seen": 63740585, - "step": 3005 - }, - { - "epoch": 0.3614501292611074, - "flos": 20395881090240.0, - "grad_norm": 4.565509796255729, - "learning_rate": 2.954237652612384e-06, - "loss": 0.7257, - "num_input_tokens_seen": 63759355, - "step": 3006 - }, - { - "epoch": 0.36157037215174653, - "flos": 16564080705360.0, - "grad_norm": 3.3583366741103853, - "learning_rate": 2.9535529897481796e-06, - "loss": 0.8348, - "num_input_tokens_seen": 63776620, - "step": 3007 - }, - { - "epoch": 0.36169061504238564, - "flos": 9138110833080.0, - "grad_norm": 4.193461714954026, - "learning_rate": 2.9528681822307446e-06, - "loss": 0.7485, - "num_input_tokens_seen": 63793190, - "step": 3008 - }, - { - "epoch": 0.3618108579330247, - "flos": 19561115517240.0, - "grad_norm": 7.521110325271286, - "learning_rate": 2.952183230163964e-06, - "loss": 0.8067, - "num_input_tokens_seen": 63812485, - "step": 3009 - }, - { - "epoch": 0.3619311008236638, - "flos": 16698803038320.0, - "grad_norm": 2.732146221269234, - "learning_rate": 2.9514981336517448e-06, - "loss": 0.7219, - "num_input_tokens_seen": 63831975, - "step": 3010 - }, - { - "epoch": 0.36205134371430286, - "flos": 18995098091040.0, - "grad_norm": 2.589199624720982, - "learning_rate": 2.950812892798015e-06, - "loss": 0.793, - "num_input_tokens_seen": 63852590, - "step": 3011 - }, - { - "epoch": 0.362171586604942, - "flos": 19158056616960.0, - "grad_norm": 5.010150333973877, - "learning_rate": 2.9501275077067256e-06, - "loss": 0.8574, - "num_input_tokens_seen": 63872930, - "step": 3012 - }, - { - "epoch": 0.3622918294955811, - "flos": 20589925205520.0, - "grad_norm": 2.3018465797809733, - "learning_rate": 2.949441978481848e-06, - "loss": 0.8721, - "num_input_tokens_seen": 63893550, - "step": 3013 - }, - { - "epoch": 0.36241207238622014, - "flos": 14487121584600.0, - "grad_norm": 3.552404588656989, - "learning_rate": 2.9487563052273778e-06, - "loss": 0.7862, - "num_input_tokens_seen": 63910030, - "step": 3014 - }, - { - "epoch": 0.36253231527685925, - "flos": 15648437408640.0, - "grad_norm": 3.385160340606662, - "learning_rate": 2.94807048804733e-06, - "loss": 0.8509, - "num_input_tokens_seen": 63929370, - "step": 3015 - }, - { - "epoch": 0.36265255816749836, - "flos": 13402029470160.0, - "grad_norm": 2.643526368118076, - "learning_rate": 2.9473845270457434e-06, - "loss": 0.8823, - "num_input_tokens_seen": 63945905, - "step": 3016 - }, - { - "epoch": 0.3627728010581374, - "flos": 13622695120440.0, - "grad_norm": 2.743389333599146, - "learning_rate": 2.946698422326677e-06, - "loss": 0.6652, - "num_input_tokens_seen": 63963085, - "step": 3017 - }, - { - "epoch": 0.36289304394877653, - "flos": 20182560550680.0, - "grad_norm": 3.3398628814327784, - "learning_rate": 2.946012173994213e-06, - "loss": 0.7836, - "num_input_tokens_seen": 63982590, - "step": 3018 - }, - { - "epoch": 0.36301328683941564, - "flos": 25370195774280.0, - "grad_norm": 2.0118354837566597, - "learning_rate": 2.945325782152454e-06, - "loss": 0.6712, - "num_input_tokens_seen": 64005345, - "step": 3019 - }, - { - "epoch": 0.3631335297300547, - "flos": 13893659464440.0, - "grad_norm": 2.7655105998198715, - "learning_rate": 2.9446392469055257e-06, - "loss": 0.7798, - "num_input_tokens_seen": 64023100, - "step": 3020 - }, - { - "epoch": 0.3632537726206938, - "flos": 14271679827720.0, - "grad_norm": 2.3913454453702454, - "learning_rate": 2.9439525683575745e-06, - "loss": 0.7827, - "num_input_tokens_seen": 64041740, - "step": 3021 - }, - { - "epoch": 0.3633740155113329, - "flos": 15405266018160.0, - "grad_norm": 3.167321906980559, - "learning_rate": 2.9432657466127694e-06, - "loss": 0.7281, - "num_input_tokens_seen": 64061030, - "step": 3022 - }, - { - "epoch": 0.36349425840197197, - "flos": 14834974497360.0, - "grad_norm": 2.3329700005438347, - "learning_rate": 2.9425787817753007e-06, - "loss": 0.7575, - "num_input_tokens_seen": 64079410, - "step": 3023 - }, - { - "epoch": 0.3636145012926111, - "flos": 21805623858120.0, - "grad_norm": 2.4330920931582227, - "learning_rate": 2.94189167394938e-06, - "loss": 0.689, - "num_input_tokens_seen": 64101565, - "step": 3024 - }, - { - "epoch": 0.3637347441832502, - "flos": 15673855865160.0, - "grad_norm": 3.1136592314854323, - "learning_rate": 2.941204423239241e-06, - "loss": 0.7978, - "num_input_tokens_seen": 64120160, - "step": 3025 - }, - { - "epoch": 0.36385498707388925, - "flos": 21670268325960.0, - "grad_norm": 2.3587124127828343, - "learning_rate": 2.9405170297491395e-06, - "loss": 0.7491, - "num_input_tokens_seen": 64139875, - "step": 3026 - }, - { - "epoch": 0.36397522996452836, - "flos": 16269692482320.0, - "grad_norm": 2.877137757674412, - "learning_rate": 2.939829493583353e-06, - "loss": 0.7876, - "num_input_tokens_seen": 64156240, - "step": 3027 - }, - { - "epoch": 0.3640954728551674, - "flos": 15729346792320.0, - "grad_norm": 2.7765887343455864, - "learning_rate": 2.939141814846179e-06, - "loss": 0.8019, - "num_input_tokens_seen": 64173375, - "step": 3028 - }, - { - "epoch": 0.3642157157458065, - "flos": 12921923701320.0, - "grad_norm": 2.1845264387776933, - "learning_rate": 2.938453993641938e-06, - "loss": 0.812, - "num_input_tokens_seen": 64191470, - "step": 3029 - }, - { - "epoch": 0.36433595863644563, - "flos": 12867604192680.0, - "grad_norm": 5.852130286830854, - "learning_rate": 2.937766030074973e-06, - "loss": 0.6759, - "num_input_tokens_seen": 64208445, - "step": 3030 - }, - { - "epoch": 0.3644562015270847, - "flos": 19671495832320.0, - "grad_norm": 2.3098317810834357, - "learning_rate": 2.937077924249646e-06, - "loss": 0.7994, - "num_input_tokens_seen": 64230755, - "step": 3031 - }, - { - "epoch": 0.3645764444177238, - "flos": 10383312077040.0, - "grad_norm": 4.2585773306043535, - "learning_rate": 2.9363896762703443e-06, - "loss": 0.7328, - "num_input_tokens_seen": 64247540, - "step": 3032 - }, - { - "epoch": 0.3646966873083629, - "flos": 15108439978200.0, - "grad_norm": 1.910462586971771, - "learning_rate": 2.9357012862414725e-06, - "loss": 0.8288, - "num_input_tokens_seen": 64266620, - "step": 3033 - }, - { - "epoch": 0.36481693019900197, - "flos": 20374166849040.0, - "grad_norm": 4.945584209866402, - "learning_rate": 2.9350127542674593e-06, - "loss": 0.7038, - "num_input_tokens_seen": 64288550, - "step": 3034 - }, - { - "epoch": 0.3649371730896411, - "flos": 14400861667680.0, - "grad_norm": 2.773029909943131, - "learning_rate": 2.934324080452755e-06, - "loss": 0.7517, - "num_input_tokens_seen": 64306060, - "step": 3035 - }, - { - "epoch": 0.3650574159802802, - "flos": 18129658508160.0, - "grad_norm": 2.276138850624403, - "learning_rate": 2.9336352649018307e-06, - "loss": 0.7675, - "num_input_tokens_seen": 64325850, - "step": 3036 - }, - { - "epoch": 0.36517765887091924, - "flos": 24126862467960.0, - "grad_norm": 3.9907769923289926, - "learning_rate": 2.9329463077191783e-06, - "loss": 0.6868, - "num_input_tokens_seen": 64348945, - "step": 3037 - }, - { - "epoch": 0.36529790176155835, - "flos": 14703513140280.0, - "grad_norm": 5.938310748512445, - "learning_rate": 2.9322572090093135e-06, - "loss": 0.6267, - "num_input_tokens_seen": 64367370, - "step": 3038 - }, - { - "epoch": 0.36541814465219746, - "flos": 12867635852640.0, - "grad_norm": 4.22497754514246, - "learning_rate": 2.9315679688767713e-06, - "loss": 0.7264, - "num_input_tokens_seen": 64385100, - "step": 3039 - }, - { - "epoch": 0.3655383875428365, - "flos": 16593678276600.0, - "grad_norm": 2.5887575014780237, - "learning_rate": 2.9308785874261085e-06, - "loss": 0.6508, - "num_input_tokens_seen": 64405010, - "step": 3040 - }, - { - "epoch": 0.36565863043347563, - "flos": 16080682300680.0, - "grad_norm": 4.614077894144444, - "learning_rate": 2.9301890647619045e-06, - "loss": 0.791, - "num_input_tokens_seen": 64424025, - "step": 3041 - }, - { - "epoch": 0.36577887332411474, - "flos": 18187333972560.0, - "grad_norm": 2.750897261153997, - "learning_rate": 2.929499400988759e-06, - "loss": 0.77, - "num_input_tokens_seen": 64444905, - "step": 3042 - }, - { - "epoch": 0.3658991162147538, - "flos": 20751997252560.0, - "grad_norm": 2.4178808932758873, - "learning_rate": 2.9288095962112927e-06, - "loss": 0.6265, - "num_input_tokens_seen": 64465330, - "step": 3043 - }, - { - "epoch": 0.3660193591053929, - "flos": 12975673330680.0, - "grad_norm": 2.7335507267665005, - "learning_rate": 2.9281196505341503e-06, - "loss": 0.8281, - "num_input_tokens_seen": 64482220, - "step": 3044 - }, - { - "epoch": 0.36613960199603196, - "flos": 7467566568360.0, - "grad_norm": 3.1503208481403964, - "learning_rate": 2.9274295640619946e-06, - "loss": 0.7767, - "num_input_tokens_seen": 64499070, - "step": 3045 - }, - { - "epoch": 0.36625984488667107, - "flos": 14433625234920.0, - "grad_norm": 2.686070705304092, - "learning_rate": 2.9267393368995103e-06, - "loss": 0.7733, - "num_input_tokens_seen": 64518020, - "step": 3046 - }, - { - "epoch": 0.3663800877773102, - "flos": 12893054309160.0, - "grad_norm": 4.195565043827506, - "learning_rate": 2.926048969151407e-06, - "loss": 0.7203, - "num_input_tokens_seen": 64535025, - "step": 3047 - }, - { - "epoch": 0.36650033066794924, - "flos": 14839280251920.0, - "grad_norm": 3.0458212570876437, - "learning_rate": 2.92535846092241e-06, - "loss": 0.6614, - "num_input_tokens_seen": 64553760, - "step": 3048 - }, - { - "epoch": 0.36662057355858835, - "flos": 18104619971160.0, - "grad_norm": 3.770062250083788, - "learning_rate": 2.9246678123172704e-06, - "loss": 0.8061, - "num_input_tokens_seen": 64573570, - "step": 3049 - }, - { - "epoch": 0.36674081644922746, - "flos": 8979046482240.0, - "grad_norm": 4.52947058104, - "learning_rate": 2.9239770234407596e-06, - "loss": 0.7059, - "num_input_tokens_seen": 64591595, - "step": 3050 - }, - { - "epoch": 0.3668610593398665, - "flos": 15432995651760.0, - "grad_norm": 2.3020135400010986, - "learning_rate": 2.9232860943976686e-06, - "loss": 0.6646, - "num_input_tokens_seen": 64612050, - "step": 3051 - }, - { - "epoch": 0.3669813022305056, - "flos": 19615530005760.0, - "grad_norm": 2.3912510270367267, - "learning_rate": 2.9225950252928115e-06, - "loss": 0.8237, - "num_input_tokens_seen": 64632620, - "step": 3052 - }, - { - "epoch": 0.36710154512114473, - "flos": 13948168932840.0, - "grad_norm": 3.4706990033212723, - "learning_rate": 2.9219038162310217e-06, - "loss": 0.7863, - "num_input_tokens_seen": 64650540, - "step": 3053 - }, - { - "epoch": 0.3672217880117838, - "flos": 15215211057840.0, - "grad_norm": 3.3376291885804013, - "learning_rate": 2.921212467317157e-06, - "loss": 0.8072, - "num_input_tokens_seen": 64669705, - "step": 3054 - }, - { - "epoch": 0.3673420309024229, - "flos": 9871075940160.0, - "grad_norm": 4.190551556904439, - "learning_rate": 2.920520978656093e-06, - "loss": 0.7882, - "num_input_tokens_seen": 64686390, - "step": 3055 - }, - { - "epoch": 0.367462273793062, - "flos": 21265436467920.0, - "grad_norm": 2.796825216692064, - "learning_rate": 2.919829350352729e-06, - "loss": 0.7486, - "num_input_tokens_seen": 64707715, - "step": 3056 - }, - { - "epoch": 0.36758251668370107, - "flos": 46175657716560.0, - "grad_norm": 0.7543935041204577, - "learning_rate": 2.919137582511983e-06, - "loss": 0.6105, - "num_input_tokens_seen": 64763875, - "step": 3057 - }, - { - "epoch": 0.3677027595743402, - "flos": 9222281192640.0, - "grad_norm": 2.6499534027033427, - "learning_rate": 2.918445675238797e-06, - "loss": 0.6161, - "num_input_tokens_seen": 64780520, - "step": 3058 - }, - { - "epoch": 0.36782300246497923, - "flos": 18349374359640.0, - "grad_norm": 3.588136617817465, - "learning_rate": 2.917753628638132e-06, - "loss": 0.6676, - "num_input_tokens_seen": 64800545, - "step": 3059 - }, - { - "epoch": 0.36794324535561834, - "flos": 12704614006800.0, - "grad_norm": 6.256401445840386, - "learning_rate": 2.9170614428149716e-06, - "loss": 0.6781, - "num_input_tokens_seen": 64818600, - "step": 3060 - }, - { - "epoch": 0.36806348824625745, - "flos": 17639548191960.0, - "grad_norm": 4.161948062629493, - "learning_rate": 2.9163691178743195e-06, - "loss": 0.847, - "num_input_tokens_seen": 64836970, - "step": 3061 - }, - { - "epoch": 0.3681837311368965, - "flos": 15000719099760.0, - "grad_norm": 2.9516862866528437, - "learning_rate": 2.9156766539212006e-06, - "loss": 0.7684, - "num_input_tokens_seen": 64854335, - "step": 3062 - }, - { - "epoch": 0.3683039740275356, - "flos": 15699369301560.0, - "grad_norm": 2.9353499965167327, - "learning_rate": 2.9149840510606614e-06, - "loss": 0.6946, - "num_input_tokens_seen": 64872710, - "step": 3063 - }, - { - "epoch": 0.36842421691817473, - "flos": 51902593851480.0, - "grad_norm": 1.0765407919944339, - "learning_rate": 2.914291309397769e-06, - "loss": 0.6768, - "num_input_tokens_seen": 64929900, - "step": 3064 - }, - { - "epoch": 0.3685444598088138, - "flos": 17079957737640.0, - "grad_norm": 3.200045859485082, - "learning_rate": 2.9135984290376117e-06, - "loss": 0.7696, - "num_input_tokens_seen": 64948485, - "step": 3065 - }, - { - "epoch": 0.3686647026994529, - "flos": 16885913622360.0, - "grad_norm": 2.2700036192541635, - "learning_rate": 2.9129054100853e-06, - "loss": 0.8176, - "num_input_tokens_seen": 64967045, - "step": 3066 - }, - { - "epoch": 0.368784945590092, - "flos": 18403693868280.0, - "grad_norm": 3.0025317317610374, - "learning_rate": 2.912212252645963e-06, - "loss": 0.7392, - "num_input_tokens_seen": 64989350, - "step": 3067 - }, - { - "epoch": 0.36890518848073106, - "flos": 13461699512040.0, - "grad_norm": 3.415752490178074, - "learning_rate": 2.9115189568247523e-06, - "loss": 0.7415, - "num_input_tokens_seen": 65006630, - "step": 3068 - }, - { - "epoch": 0.36902543137137017, - "flos": 11922458304600.0, - "grad_norm": 2.5968254883380832, - "learning_rate": 2.910825522726841e-06, - "loss": 0.8929, - "num_input_tokens_seen": 65023875, - "step": 3069 - }, - { - "epoch": 0.3691456742620093, - "flos": 8897852158920.0, - "grad_norm": 3.6537709757372108, - "learning_rate": 2.9101319504574215e-06, - "loss": 0.7597, - "num_input_tokens_seen": 65040035, - "step": 3070 - }, - { - "epoch": 0.36926591715264834, - "flos": 12976433169720.0, - "grad_norm": 2.365399078506235, - "learning_rate": 2.909438240121709e-06, - "loss": 0.746, - "num_input_tokens_seen": 65060030, - "step": 3071 - }, - { - "epoch": 0.36938616004328745, - "flos": 21236598735720.0, - "grad_norm": 2.9142519810003815, - "learning_rate": 2.908744391824939e-06, - "loss": 0.6887, - "num_input_tokens_seen": 65080770, - "step": 3072 - }, - { - "epoch": 0.36950640293392656, - "flos": 21427096935480.0, - "grad_norm": 2.444795499023069, - "learning_rate": 2.908050405672367e-06, - "loss": 0.7645, - "num_input_tokens_seen": 65100035, - "step": 3073 - }, - { - "epoch": 0.3696266458245656, - "flos": 18187460612400.0, - "grad_norm": 2.698858698060113, - "learning_rate": 2.9073562817692703e-06, - "loss": 0.7781, - "num_input_tokens_seen": 65118440, - "step": 3074 - }, - { - "epoch": 0.3697468887152047, - "flos": 44138522334120.0, - "grad_norm": 0.7896232123524545, - "learning_rate": 2.9066620202209468e-06, - "loss": 0.6029, - "num_input_tokens_seen": 65180650, - "step": 3075 - }, - { - "epoch": 0.3698671316058438, - "flos": 19156283659200.0, - "grad_norm": 2.902085304305226, - "learning_rate": 2.905967621132716e-06, - "loss": 0.7731, - "num_input_tokens_seen": 65197980, - "step": 3076 - }, - { - "epoch": 0.3699873744964829, - "flos": 18024217146840.0, - "grad_norm": 2.55775533436329, - "learning_rate": 2.9052730846099172e-06, - "loss": 0.7287, - "num_input_tokens_seen": 65219045, - "step": 3077 - }, - { - "epoch": 0.370107617387122, - "flos": 47837373343800.0, - "grad_norm": 1.6553963674623648, - "learning_rate": 2.9045784107579123e-06, - "loss": 0.6412, - "num_input_tokens_seen": 65278870, - "step": 3078 - }, - { - "epoch": 0.37022786027776106, - "flos": 11625948864240.0, - "grad_norm": 3.0205894284055086, - "learning_rate": 2.9038835996820807e-06, - "loss": 0.6611, - "num_input_tokens_seen": 65296200, - "step": 3079 - }, - { - "epoch": 0.37034810316840017, - "flos": 13538461440960.0, - "grad_norm": 2.3876162276013235, - "learning_rate": 2.903188651487826e-06, - "loss": 0.7752, - "num_input_tokens_seen": 65314475, - "step": 3080 - }, - { - "epoch": 0.3704683460590393, - "flos": 13002579805320.0, - "grad_norm": 3.7068416438437426, - "learning_rate": 2.902493566280571e-06, - "loss": 0.851, - "num_input_tokens_seen": 65332300, - "step": 3081 - }, - { - "epoch": 0.37058858894967833, - "flos": 10272931761960.0, - "grad_norm": 2.3878463853344574, - "learning_rate": 2.9017983441657595e-06, - "loss": 0.7962, - "num_input_tokens_seen": 65349350, - "step": 3082 - }, - { - "epoch": 0.37070883184031744, - "flos": 10140425626200.0, - "grad_norm": 2.906058520500362, - "learning_rate": 2.9011029852488564e-06, - "loss": 0.7465, - "num_input_tokens_seen": 65366305, - "step": 3083 - }, - { - "epoch": 0.37082907473095655, - "flos": 38532124870080.0, - "grad_norm": 1.0004766639492981, - "learning_rate": 2.9004074896353465e-06, - "loss": 0.67, - "num_input_tokens_seen": 65420025, - "step": 3084 - }, - { - "epoch": 0.3709493176215956, - "flos": 11652886998840.0, - "grad_norm": 2.303883661293289, - "learning_rate": 2.8997118574307362e-06, - "loss": 0.7962, - "num_input_tokens_seen": 65436700, - "step": 3085 - }, - { - "epoch": 0.3710695605122347, - "flos": 15324863193840.0, - "grad_norm": 2.734391915405268, - "learning_rate": 2.899016088740553e-06, - "loss": 0.7307, - "num_input_tokens_seen": 65454530, - "step": 3086 - }, - { - "epoch": 0.37118980340287383, - "flos": 10437220006200.0, - "grad_norm": 2.723192816120359, - "learning_rate": 2.898320183670344e-06, - "loss": 0.7721, - "num_input_tokens_seen": 65471665, - "step": 3087 - }, - { - "epoch": 0.3713100462935129, - "flos": 18970091214000.0, - "grad_norm": 2.446977042729262, - "learning_rate": 2.8976241423256767e-06, - "loss": 0.8701, - "num_input_tokens_seen": 65491480, - "step": 3088 - }, - { - "epoch": 0.371430289184152, - "flos": 22399212618120.0, - "grad_norm": 2.4077710385128746, - "learning_rate": 2.896927964812142e-06, - "loss": 0.6663, - "num_input_tokens_seen": 65511765, - "step": 3089 - }, - { - "epoch": 0.37155053207479105, - "flos": 11463465237720.0, - "grad_norm": 4.7190972004665435, - "learning_rate": 2.8962316512353465e-06, - "loss": 0.7163, - "num_input_tokens_seen": 65529030, - "step": 3090 - }, - { - "epoch": 0.37167077496543016, - "flos": 17133707367000.0, - "grad_norm": 3.31704544000377, - "learning_rate": 2.8955352017009233e-06, - "loss": 0.7332, - "num_input_tokens_seen": 65547995, - "step": 3091 - }, - { - "epoch": 0.3717910178560693, - "flos": 16159945366440.0, - "grad_norm": 2.6379912294093075, - "learning_rate": 2.8948386163145212e-06, - "loss": 0.7533, - "num_input_tokens_seen": 65566925, - "step": 3092 - }, - { - "epoch": 0.3719112607467083, - "flos": 19750062378960.0, - "grad_norm": 2.624933332777402, - "learning_rate": 2.8941418951818135e-06, - "loss": 0.7732, - "num_input_tokens_seen": 65586205, - "step": 3093 - }, - { - "epoch": 0.37203150363734744, - "flos": 8817164394960.0, - "grad_norm": 3.2835741803541127, - "learning_rate": 2.8934450384084903e-06, - "loss": 0.6999, - "num_input_tokens_seen": 65603440, - "step": 3094 - }, - { - "epoch": 0.37215174652798655, - "flos": 17351175361320.0, - "grad_norm": 5.735336846700217, - "learning_rate": 2.8927480461002653e-06, - "loss": 0.6911, - "num_input_tokens_seen": 65623130, - "step": 3095 - }, - { - "epoch": 0.3722719894186256, - "flos": 13050282382320.0, - "grad_norm": 38.34063315062266, - "learning_rate": 2.892050918362872e-06, - "loss": 0.8205, - "num_input_tokens_seen": 65637905, - "step": 3096 - }, - { - "epoch": 0.3723922323092647, - "flos": 46009818134280.0, - "grad_norm": 0.9000697691683281, - "learning_rate": 2.8913536553020626e-06, - "loss": 0.6031, - "num_input_tokens_seen": 65691680, - "step": 3097 - }, - { - "epoch": 0.3725124751999038, - "flos": 16864167721200.0, - "grad_norm": 2.036775907322445, - "learning_rate": 2.8906562570236137e-06, - "loss": 0.8394, - "num_input_tokens_seen": 65709310, - "step": 3098 - }, - { - "epoch": 0.3726327180905429, - "flos": 15295392262440.0, - "grad_norm": 1.9453278812888084, - "learning_rate": 2.889958723633318e-06, - "loss": 0.7514, - "num_input_tokens_seen": 65727970, - "step": 3099 - }, - { - "epoch": 0.372752960981182, - "flos": 22453437146880.0, - "grad_norm": 2.7818827391538012, - "learning_rate": 2.889261055236992e-06, - "loss": 0.7227, - "num_input_tokens_seen": 65749905, - "step": 3100 - }, - { - "epoch": 0.3728732038718211, - "flos": 18400907791800.0, - "grad_norm": 2.2544813034084386, - "learning_rate": 2.8885632519404704e-06, - "loss": 0.8124, - "num_input_tokens_seen": 65769895, - "step": 3101 - }, - { - "epoch": 0.37299344676246016, - "flos": 18538447861200.0, - "grad_norm": 2.4894315495103023, - "learning_rate": 2.8878653138496107e-06, - "loss": 0.741, - "num_input_tokens_seen": 65790110, - "step": 3102 - }, - { - "epoch": 0.37311368965309927, - "flos": 17458294700520.0, - "grad_norm": 2.9345868863924647, - "learning_rate": 2.8871672410702878e-06, - "loss": 0.7363, - "num_input_tokens_seen": 65807190, - "step": 3103 - }, - { - "epoch": 0.3732339325437384, - "flos": 18915676725480.0, - "grad_norm": 2.1011630107567134, - "learning_rate": 2.8864690337084008e-06, - "loss": 0.8045, - "num_input_tokens_seen": 65826185, - "step": 3104 - }, - { - "epoch": 0.37335417543437743, - "flos": 19208418630600.0, - "grad_norm": 2.633313139906448, - "learning_rate": 2.885770691869866e-06, - "loss": 0.7668, - "num_input_tokens_seen": 65846785, - "step": 3105 - }, - { - "epoch": 0.37347441832501654, - "flos": 17592352174320.0, - "grad_norm": 2.608619169309599, - "learning_rate": 2.8850722156606207e-06, - "loss": 0.7184, - "num_input_tokens_seen": 65864895, - "step": 3106 - }, - { - "epoch": 0.3735946612156556, - "flos": 14402919565080.0, - "grad_norm": 2.4870288997024788, - "learning_rate": 2.8843736051866252e-06, - "loss": 0.646, - "num_input_tokens_seen": 65883540, - "step": 3107 - }, - { - "epoch": 0.3737149041062947, - "flos": 16864041081360.0, - "grad_norm": 2.0730318455989605, - "learning_rate": 2.8836748605538557e-06, - "loss": 0.6832, - "num_input_tokens_seen": 65904900, - "step": 3108 - }, - { - "epoch": 0.3738351469969338, - "flos": 25477251793560.0, - "grad_norm": 4.948438093584686, - "learning_rate": 2.882975981868313e-06, - "loss": 0.616, - "num_input_tokens_seen": 65925005, - "step": 3109 - }, - { - "epoch": 0.3739553898875729, - "flos": 32011983706920.0, - "grad_norm": 3.015873557802182, - "learning_rate": 2.882276969236016e-06, - "loss": 0.6548, - "num_input_tokens_seen": 65946085, - "step": 3110 - }, - { - "epoch": 0.374075632778212, - "flos": 9327342634440.0, - "grad_norm": 3.4288710054579643, - "learning_rate": 2.881577822763005e-06, - "loss": 0.731, - "num_input_tokens_seen": 65963755, - "step": 3111 - }, - { - "epoch": 0.3741958756688511, - "flos": 19071670060200.0, - "grad_norm": 2.532600325228723, - "learning_rate": 2.880878542555338e-06, - "loss": 0.8661, - "num_input_tokens_seen": 65981240, - "step": 3112 - }, - { - "epoch": 0.37431611855949015, - "flos": 15675660482880.0, - "grad_norm": 2.8794698383535455, - "learning_rate": 2.8801791287190976e-06, - "loss": 0.7907, - "num_input_tokens_seen": 65998955, - "step": 3113 - }, - { - "epoch": 0.37443636145012926, - "flos": 17728055966040.0, - "grad_norm": 3.7203019694339887, - "learning_rate": 2.8794795813603817e-06, - "loss": 0.8313, - "num_input_tokens_seen": 66014140, - "step": 3114 - }, - { - "epoch": 0.3745566043407684, - "flos": 11194527131160.0, - "grad_norm": 2.9165484994838176, - "learning_rate": 2.878779900585314e-06, - "loss": 0.7946, - "num_input_tokens_seen": 66031700, - "step": 3115 - }, - { - "epoch": 0.37467684723140743, - "flos": 18024882006000.0, - "grad_norm": 2.245123452953143, - "learning_rate": 2.8780800865000336e-06, - "loss": 0.7506, - "num_input_tokens_seen": 66052730, - "step": 3116 - }, - { - "epoch": 0.37479709012204654, - "flos": 47461949097240.0, - "grad_norm": 1.0338737172056662, - "learning_rate": 2.877380139210702e-06, - "loss": 0.6506, - "num_input_tokens_seen": 66111120, - "step": 3117 - }, - { - "epoch": 0.37491733301268565, - "flos": 17429456968320.0, - "grad_norm": 4.721917712298031, - "learning_rate": 2.876680058823501e-06, - "loss": 0.7315, - "num_input_tokens_seen": 66131240, - "step": 3118 - }, - { - "epoch": 0.3750375759033247, - "flos": 23614436371320.0, - "grad_norm": 3.299367820990633, - "learning_rate": 2.8759798454446314e-06, - "loss": 0.6515, - "num_input_tokens_seen": 66154125, - "step": 3119 - }, - { - "epoch": 0.3751578187939638, - "flos": 17106895872240.0, - "grad_norm": 2.3169990282842834, - "learning_rate": 2.8752794991803173e-06, - "loss": 0.7974, - "num_input_tokens_seen": 66171530, - "step": 3120 - }, - { - "epoch": 0.37527806168460287, - "flos": 10621607833680.0, - "grad_norm": 3.477307186307292, - "learning_rate": 2.8745790201367976e-06, - "loss": 0.7414, - "num_input_tokens_seen": 66187005, - "step": 3121 - }, - { - "epoch": 0.375398304575242, - "flos": 19343394243240.0, - "grad_norm": 3.0834258469457767, - "learning_rate": 2.8738784084203373e-06, - "loss": 0.8449, - "num_input_tokens_seen": 66206800, - "step": 3122 - }, - { - "epoch": 0.3755185474658811, - "flos": 16269724142280.0, - "grad_norm": 3.4557358205808364, - "learning_rate": 2.873177664137216e-06, - "loss": 0.7708, - "num_input_tokens_seen": 66227450, - "step": 3123 - }, - { - "epoch": 0.37563879035652015, - "flos": 22617092191920.0, - "grad_norm": 2.3780223287908857, - "learning_rate": 2.8724767873937384e-06, - "loss": 0.6718, - "num_input_tokens_seen": 66251290, - "step": 3124 - }, - { - "epoch": 0.37575903324715926, - "flos": 15186436645560.0, - "grad_norm": 3.7555201273594516, - "learning_rate": 2.871775778296225e-06, - "loss": 0.8545, - "num_input_tokens_seen": 66268100, - "step": 3125 - }, - { - "epoch": 0.37587927613779837, - "flos": 13650709693680.0, - "grad_norm": 5.6469077839240125, - "learning_rate": 2.8710746369510196e-06, - "loss": 0.7693, - "num_input_tokens_seen": 66285805, - "step": 3126 - }, - { - "epoch": 0.3759995190284374, - "flos": 9896241117000.0, - "grad_norm": 4.256865173785527, - "learning_rate": 2.8703733634644846e-06, - "loss": 0.8191, - "num_input_tokens_seen": 66300280, - "step": 3127 - }, - { - "epoch": 0.37611976191907653, - "flos": 14973242745840.0, - "grad_norm": 2.959312939421124, - "learning_rate": 2.869671957943002e-06, - "loss": 0.7862, - "num_input_tokens_seen": 66319155, - "step": 3128 - }, - { - "epoch": 0.37624000480971564, - "flos": 15459458886960.0, - "grad_norm": 3.356430537591135, - "learning_rate": 2.8689704204929747e-06, - "loss": 0.7341, - "num_input_tokens_seen": 66338055, - "step": 3129 - }, - { - "epoch": 0.3763602477003547, - "flos": 16512293993520.0, - "grad_norm": 2.810780317355243, - "learning_rate": 2.8682687512208253e-06, - "loss": 0.7918, - "num_input_tokens_seen": 66356785, - "step": 3130 - }, - { - "epoch": 0.3764804905909938, - "flos": 20184840067800.0, - "grad_norm": 3.2924058462481036, - "learning_rate": 2.8675669502329972e-06, - "loss": 0.7886, - "num_input_tokens_seen": 66378035, - "step": 3131 - }, - { - "epoch": 0.3766007334816329, - "flos": 16485387518880.0, - "grad_norm": 6.083261393639896, - "learning_rate": 2.866865017635952e-06, - "loss": 0.8225, - "num_input_tokens_seen": 66395575, - "step": 3132 - }, - { - "epoch": 0.376720976372272, - "flos": 19023049344360.0, - "grad_norm": 2.0064260245136634, - "learning_rate": 2.866162953536174e-06, - "loss": 0.7788, - "num_input_tokens_seen": 66416265, - "step": 3133 - }, - { - "epoch": 0.3768412192629111, - "flos": 13164873472080.0, - "grad_norm": 2.029048842104053, - "learning_rate": 2.8654607580401634e-06, - "loss": 0.736, - "num_input_tokens_seen": 66435720, - "step": 3134 - }, - { - "epoch": 0.3769614621535502, - "flos": 46434500787240.0, - "grad_norm": 0.9092229879948845, - "learning_rate": 2.8647584312544446e-06, - "loss": 0.6764, - "num_input_tokens_seen": 66500645, - "step": 3135 - }, - { - "epoch": 0.37708170504418925, - "flos": 17324680466160.0, - "grad_norm": 1.9345245278884615, - "learning_rate": 2.864055973285559e-06, - "loss": 0.8397, - "num_input_tokens_seen": 66522365, - "step": 3136 - }, - { - "epoch": 0.37720194793482836, - "flos": 17887563556320.0, - "grad_norm": 2.792094487293824, - "learning_rate": 2.8633533842400698e-06, - "loss": 0.8593, - "num_input_tokens_seen": 66542285, - "step": 3137 - }, - { - "epoch": 0.3773221908254674, - "flos": 15352022948160.0, - "grad_norm": 3.3948044573689327, - "learning_rate": 2.862650664224558e-06, - "loss": 0.7614, - "num_input_tokens_seen": 66560855, - "step": 3138 - }, - { - "epoch": 0.37744243371610653, - "flos": 27662881591560.0, - "grad_norm": 2.3301807518459148, - "learning_rate": 2.861947813345627e-06, - "loss": 0.6853, - "num_input_tokens_seen": 66583275, - "step": 3139 - }, - { - "epoch": 0.37756267660674564, - "flos": 19158879775920.0, - "grad_norm": 2.803106600935993, - "learning_rate": 2.8612448317098974e-06, - "loss": 0.6991, - "num_input_tokens_seen": 66603330, - "step": 3140 - }, - { - "epoch": 0.3776829194973847, - "flos": 14191182023520.0, - "grad_norm": 3.0897895817885406, - "learning_rate": 2.8605417194240114e-06, - "loss": 0.8245, - "num_input_tokens_seen": 66621410, - "step": 3141 - }, - { - "epoch": 0.3778031623880238, - "flos": 12676979353080.0, - "grad_norm": 3.6128718623654605, - "learning_rate": 2.8598384765946315e-06, - "loss": 0.7702, - "num_input_tokens_seen": 66639785, - "step": 3142 - }, - { - "epoch": 0.3779234052786629, - "flos": 19888109007720.0, - "grad_norm": 2.595534572482028, - "learning_rate": 2.8591351033284377e-06, - "loss": 0.6993, - "num_input_tokens_seen": 66659235, - "step": 3143 - }, - { - "epoch": 0.37804364816930197, - "flos": 14379622325880.0, - "grad_norm": 3.023689200399941, - "learning_rate": 2.8584315997321325e-06, - "loss": 0.8106, - "num_input_tokens_seen": 66677960, - "step": 3144 - }, - { - "epoch": 0.3781638910599411, - "flos": 16613682879960.0, - "grad_norm": 3.614404386255474, - "learning_rate": 2.8577279659124356e-06, - "loss": 0.7617, - "num_input_tokens_seen": 66695355, - "step": 3145 - }, - { - "epoch": 0.3782841339505802, - "flos": 10653358282200.0, - "grad_norm": 7.905733476133274, - "learning_rate": 2.857024201976089e-06, - "loss": 0.8051, - "num_input_tokens_seen": 66712635, - "step": 3146 - }, - { - "epoch": 0.37840437684121925, - "flos": 24100937452080.0, - "grad_norm": 4.914051243901059, - "learning_rate": 2.8563203080298516e-06, - "loss": 0.7131, - "num_input_tokens_seen": 66733130, - "step": 3147 - }, - { - "epoch": 0.37852461973185836, - "flos": 13407601623120.0, - "grad_norm": 7.100366405649952, - "learning_rate": 2.855616284180505e-06, - "loss": 0.8692, - "num_input_tokens_seen": 66749900, - "step": 3148 - }, - { - "epoch": 0.37864486262249747, - "flos": 43854518577960.0, - "grad_norm": 0.9354139006530602, - "learning_rate": 2.8549121305348477e-06, - "loss": 0.7195, - "num_input_tokens_seen": 66809405, - "step": 3149 - }, - { - "epoch": 0.3787651055131365, - "flos": 17102685097560.0, - "grad_norm": 3.300927442213743, - "learning_rate": 2.8542078471997006e-06, - "loss": 0.8007, - "num_input_tokens_seen": 66826740, - "step": 3150 - }, - { - "epoch": 0.37888534840377563, - "flos": 18018866613600.0, - "grad_norm": 2.848290476140839, - "learning_rate": 2.8535034342819013e-06, - "loss": 0.7428, - "num_input_tokens_seen": 66843870, - "step": 3151 - }, - { - "epoch": 0.37900559129441475, - "flos": 17564907480360.0, - "grad_norm": 4.401442915402724, - "learning_rate": 2.85279889188831e-06, - "loss": 0.7063, - "num_input_tokens_seen": 66863965, - "step": 3152 - }, - { - "epoch": 0.3791258341850538, - "flos": 18051851800560.0, - "grad_norm": 4.672717372518117, - "learning_rate": 2.852094220125805e-06, - "loss": 0.7848, - "num_input_tokens_seen": 66883195, - "step": 3153 - }, - { - "epoch": 0.3792460770756929, - "flos": 12705785425320.0, - "grad_norm": 2.9702873666373213, - "learning_rate": 2.8513894191012846e-06, - "loss": 0.6899, - "num_input_tokens_seen": 66901895, - "step": 3154 - }, - { - "epoch": 0.37936631996633197, - "flos": 17727581066640.0, - "grad_norm": 3.541341624584412, - "learning_rate": 2.8506844889216664e-06, - "loss": 0.7677, - "num_input_tokens_seen": 66921000, - "step": 3155 - }, - { - "epoch": 0.3794865628569711, - "flos": 51844131379440.0, - "grad_norm": 0.929752091159505, - "learning_rate": 2.849979429693887e-06, - "loss": 0.6623, - "num_input_tokens_seen": 66981705, - "step": 3156 - }, - { - "epoch": 0.3796068057476102, - "flos": 11491036571520.0, - "grad_norm": 3.8013846534931814, - "learning_rate": 2.8492742415249042e-06, - "loss": 0.7264, - "num_input_tokens_seen": 66999070, - "step": 3157 - }, - { - "epoch": 0.37972704863824924, - "flos": 18456651998640.0, - "grad_norm": 2.3088819707918393, - "learning_rate": 2.848568924521694e-06, - "loss": 0.743, - "num_input_tokens_seen": 67019570, - "step": 3158 - }, - { - "epoch": 0.37984729152888835, - "flos": 19210128268440.0, - "grad_norm": 2.4409219246160934, - "learning_rate": 2.8478634787912526e-06, - "loss": 0.7157, - "num_input_tokens_seen": 67037345, - "step": 3159 - }, - { - "epoch": 0.37996753441952746, - "flos": 18780479493120.0, - "grad_norm": 2.606651392336661, - "learning_rate": 2.847157904440596e-06, - "loss": 0.7583, - "num_input_tokens_seen": 67056795, - "step": 3160 - }, - { - "epoch": 0.3800877773101665, - "flos": 14701835162400.0, - "grad_norm": 2.891340507413399, - "learning_rate": 2.846452201576759e-06, - "loss": 0.7378, - "num_input_tokens_seen": 67075890, - "step": 3161 - }, - { - "epoch": 0.38020802020080563, - "flos": 46480715346120.0, - "grad_norm": 0.9173193074905756, - "learning_rate": 2.845746370306795e-06, - "loss": 0.6491, - "num_input_tokens_seen": 67140800, - "step": 3162 - }, - { - "epoch": 0.38032826309144474, - "flos": 15568066244280.0, - "grad_norm": 2.7198724293244556, - "learning_rate": 2.84504041073778e-06, - "loss": 0.7645, - "num_input_tokens_seen": 67158935, - "step": 3163 - }, - { - "epoch": 0.3804485059820838, - "flos": 13840638014160.0, - "grad_norm": 2.821349568838892, - "learning_rate": 2.844334322976806e-06, - "loss": 0.7863, - "num_input_tokens_seen": 67178870, - "step": 3164 - }, - { - "epoch": 0.3805687488727229, - "flos": 15971378424240.0, - "grad_norm": 2.6176265519089594, - "learning_rate": 2.8436281071309866e-06, - "loss": 0.8149, - "num_input_tokens_seen": 67197130, - "step": 3165 - }, - { - "epoch": 0.380688991763362, - "flos": 43147320186960.0, - "grad_norm": 4.332464253255566, - "learning_rate": 2.842921763307455e-06, - "loss": 0.5473, - "num_input_tokens_seen": 67259660, - "step": 3166 - }, - { - "epoch": 0.38080923465400107, - "flos": 17425531133280.0, - "grad_norm": 2.119788117526846, - "learning_rate": 2.842215291613361e-06, - "loss": 0.8137, - "num_input_tokens_seen": 67277760, - "step": 3167 - }, - { - "epoch": 0.3809294775446402, - "flos": 40500481124880.0, - "grad_norm": 0.8393126471815691, - "learning_rate": 2.8415086921558774e-06, - "loss": 0.6333, - "num_input_tokens_seen": 67340905, - "step": 3168 - }, - { - "epoch": 0.38104972043527924, - "flos": 18050996981640.0, - "grad_norm": 1.8775139168377186, - "learning_rate": 2.840801965042194e-06, - "loss": 0.7649, - "num_input_tokens_seen": 67360085, - "step": 3169 - }, - { - "epoch": 0.38116996332591835, - "flos": 16728685549200.0, - "grad_norm": 3.0501001748065555, - "learning_rate": 2.840095110379521e-06, - "loss": 0.8187, - "num_input_tokens_seen": 67379325, - "step": 3170 - }, - { - "epoch": 0.38129020621655746, - "flos": 44840813431320.0, - "grad_norm": 0.769483806871017, - "learning_rate": 2.8393881282750884e-06, - "loss": 0.5468, - "num_input_tokens_seen": 67441875, - "step": 3171 - }, - { - "epoch": 0.3814104491071965, - "flos": 15833521755240.0, - "grad_norm": 3.2202225066839265, - "learning_rate": 2.838681018836144e-06, - "loss": 0.7697, - "num_input_tokens_seen": 67458915, - "step": 3172 - }, - { - "epoch": 0.3815306919978356, - "flos": 13947789013320.0, - "grad_norm": 2.7880327476865925, - "learning_rate": 2.837973782169955e-06, - "loss": 0.7596, - "num_input_tokens_seen": 67477010, - "step": 3173 - }, - { - "epoch": 0.38165093488847474, - "flos": 49451698502280.0, - "grad_norm": 0.8571382022912923, - "learning_rate": 2.8372664183838096e-06, - "loss": 0.6187, - "num_input_tokens_seen": 67539750, - "step": 3174 - }, - { - "epoch": 0.3817711777791138, - "flos": 16346359431360.0, - "grad_norm": 3.280860223780251, - "learning_rate": 2.836558927585015e-06, - "loss": 0.6683, - "num_input_tokens_seen": 67556440, - "step": 3175 - }, - { - "epoch": 0.3818914206697529, - "flos": 16701969034320.0, - "grad_norm": 5.859550453102283, - "learning_rate": 2.8358513098808957e-06, - "loss": 0.8071, - "num_input_tokens_seen": 67576475, - "step": 3176 - }, - { - "epoch": 0.382011663560392, - "flos": 17860023882480.0, - "grad_norm": 3.8387532825014965, - "learning_rate": 2.835143565378798e-06, - "loss": 0.7497, - "num_input_tokens_seen": 67596660, - "step": 3177 - }, - { - "epoch": 0.38213190645103107, - "flos": 16081030560240.0, - "grad_norm": 2.38426159430276, - "learning_rate": 2.8344356941860847e-06, - "loss": 0.7603, - "num_input_tokens_seen": 67616010, - "step": 3178 - }, - { - "epoch": 0.3822521493416702, - "flos": 26098000307880.0, - "grad_norm": 9.754116265385212, - "learning_rate": 2.8337276964101403e-06, - "loss": 0.643, - "num_input_tokens_seen": 67636170, - "step": 3179 - }, - { - "epoch": 0.3823723922323093, - "flos": 15406405776720.0, - "grad_norm": 2.7807645909911636, - "learning_rate": 2.833019572158367e-06, - "loss": 0.7389, - "num_input_tokens_seen": 67654325, - "step": 3180 - }, - { - "epoch": 0.38249263512294834, - "flos": 14458030572720.0, - "grad_norm": 2.39794091666184, - "learning_rate": 2.8323113215381872e-06, - "loss": 0.8005, - "num_input_tokens_seen": 67672390, - "step": 3181 - }, - { - "epoch": 0.38261287801358745, - "flos": 15675692142840.0, - "grad_norm": 2.471119672920907, - "learning_rate": 2.831602944657042e-06, - "loss": 0.7389, - "num_input_tokens_seen": 67690190, - "step": 3182 - }, - { - "epoch": 0.38273312090422656, - "flos": 16081030560240.0, - "grad_norm": 3.2596298747198302, - "learning_rate": 2.830894441622391e-06, - "loss": 0.7307, - "num_input_tokens_seen": 67706560, - "step": 3183 - }, - { - "epoch": 0.3828533637948656, - "flos": 18160110898320.0, - "grad_norm": 2.8966083358609938, - "learning_rate": 2.8301858125417134e-06, - "loss": 0.7799, - "num_input_tokens_seen": 67726120, - "step": 3184 - }, - { - "epoch": 0.38297360668550473, - "flos": 16431669549480.0, - "grad_norm": 2.690790247079485, - "learning_rate": 2.8294770575225082e-06, - "loss": 0.7305, - "num_input_tokens_seen": 67745970, - "step": 3185 - }, - { - "epoch": 0.3830938495761438, - "flos": 18238075905720.0, - "grad_norm": 2.3623952166213384, - "learning_rate": 2.828768176672293e-06, - "loss": 0.8268, - "num_input_tokens_seen": 67764805, - "step": 3186 - }, - { - "epoch": 0.3832140924667829, - "flos": 24262154680200.0, - "grad_norm": 2.6054520956762515, - "learning_rate": 2.8280591700986044e-06, - "loss": 0.7082, - "num_input_tokens_seen": 67786390, - "step": 3187 - }, - { - "epoch": 0.383334335357422, - "flos": 23423811531720.0, - "grad_norm": 2.663509725459471, - "learning_rate": 2.827350037908999e-06, - "loss": 0.7328, - "num_input_tokens_seen": 67805550, - "step": 3188 - }, - { - "epoch": 0.38345457824806106, - "flos": 14460468389640.0, - "grad_norm": 3.4486653109664016, - "learning_rate": 2.8266407802110496e-06, - "loss": 0.7733, - "num_input_tokens_seen": 67823525, - "step": 3189 - }, - { - "epoch": 0.3835748211387002, - "flos": 16405048014480.0, - "grad_norm": 2.598137878283274, - "learning_rate": 2.8259313971123515e-06, - "loss": 0.7329, - "num_input_tokens_seen": 67844365, - "step": 3190 - }, - { - "epoch": 0.3836950640293393, - "flos": 18402395809920.0, - "grad_norm": 2.47140350667716, - "learning_rate": 2.8252218887205166e-06, - "loss": 0.76, - "num_input_tokens_seen": 67864775, - "step": 3191 - }, - { - "epoch": 0.38381530691997834, - "flos": 15946086607560.0, - "grad_norm": 2.5581172586785716, - "learning_rate": 2.824512255143178e-06, - "loss": 0.7903, - "num_input_tokens_seen": 67883730, - "step": 3192 - }, - { - "epoch": 0.38393554981061745, - "flos": 15541729648920.0, - "grad_norm": 6.003091333663243, - "learning_rate": 2.8238024964879855e-06, - "loss": 0.7774, - "num_input_tokens_seen": 67904345, - "step": 3193 - }, - { - "epoch": 0.38405579270125656, - "flos": 12408642785760.0, - "grad_norm": 3.4360206888839415, - "learning_rate": 2.8230926128626095e-06, - "loss": 0.7451, - "num_input_tokens_seen": 67922560, - "step": 3194 - }, - { - "epoch": 0.3841760355918956, - "flos": 15972803122440.0, - "grad_norm": 2.5533538043477253, - "learning_rate": 2.822382604374738e-06, - "loss": 0.7685, - "num_input_tokens_seen": 67941205, - "step": 3195 - }, - { - "epoch": 0.3842962784825347, - "flos": 18991140596040.0, - "grad_norm": 3.713471328618504, - "learning_rate": 2.8216724711320793e-06, - "loss": 0.6385, - "num_input_tokens_seen": 67960050, - "step": 3196 - }, - { - "epoch": 0.38441652137317384, - "flos": 18564531176880.0, - "grad_norm": 2.154425028415694, - "learning_rate": 2.820962213242361e-06, - "loss": 0.7844, - "num_input_tokens_seen": 67979100, - "step": 3197 - }, - { - "epoch": 0.3845367642638129, - "flos": 13218306501840.0, - "grad_norm": 3.596958486216167, - "learning_rate": 2.8202518308133264e-06, - "loss": 0.8403, - "num_input_tokens_seen": 67996095, - "step": 3198 - }, - { - "epoch": 0.384657007154452, - "flos": 18483843412920.0, - "grad_norm": 2.8813573788309315, - "learning_rate": 2.8195413239527426e-06, - "loss": 0.7257, - "num_input_tokens_seen": 68015555, - "step": 3199 - }, - { - "epoch": 0.38477725004509106, - "flos": 14515326117600.0, - "grad_norm": 2.3290260158738434, - "learning_rate": 2.8188306927683906e-06, - "loss": 0.7964, - "num_input_tokens_seen": 68034745, - "step": 3200 - }, - { - "epoch": 0.38489749293573017, - "flos": 13326280659960.0, - "grad_norm": 4.182829344483617, - "learning_rate": 2.818119937368074e-06, - "loss": 0.7234, - "num_input_tokens_seen": 68053100, - "step": 3201 - }, - { - "epoch": 0.3850177358263693, - "flos": 17862746639040.0, - "grad_norm": 4.343477528760415, - "learning_rate": 2.817409057859613e-06, - "loss": 0.6372, - "num_input_tokens_seen": 68071810, - "step": 3202 - }, - { - "epoch": 0.38513797871700833, - "flos": 12890648152200.0, - "grad_norm": 2.406656404334227, - "learning_rate": 2.8166980543508482e-06, - "loss": 0.7668, - "num_input_tokens_seen": 68087420, - "step": 3203 - }, - { - "epoch": 0.38525822160764744, - "flos": 18862212035760.0, - "grad_norm": 23.25733225066815, - "learning_rate": 2.815986926949638e-06, - "loss": 0.7693, - "num_input_tokens_seen": 68105640, - "step": 3204 - }, - { - "epoch": 0.38537846449828655, - "flos": 14757832648920.0, - "grad_norm": 3.3097528546313293, - "learning_rate": 2.8152756757638597e-06, - "loss": 0.7818, - "num_input_tokens_seen": 68123860, - "step": 3205 - }, - { - "epoch": 0.3854987073889256, - "flos": 16863819461640.0, - "grad_norm": 3.2113506116359347, - "learning_rate": 2.8145643009014093e-06, - "loss": 0.8211, - "num_input_tokens_seen": 68142075, - "step": 3206 - }, - { - "epoch": 0.3856189502795647, - "flos": 14755173212280.0, - "grad_norm": 3.0073771372216354, - "learning_rate": 2.813852802470202e-06, - "loss": 0.776, - "num_input_tokens_seen": 68159690, - "step": 3207 - }, - { - "epoch": 0.38573919317020383, - "flos": 18834799001760.0, - "grad_norm": 4.4634119977184445, - "learning_rate": 2.8131411805781717e-06, - "loss": 0.7017, - "num_input_tokens_seen": 68179535, - "step": 3208 - }, - { - "epoch": 0.3858594360608429, - "flos": 21882385787040.0, - "grad_norm": 6.025834414369872, - "learning_rate": 2.8124294353332707e-06, - "loss": 0.6168, - "num_input_tokens_seen": 68197930, - "step": 3209 - }, - { - "epoch": 0.385979678951482, - "flos": 18159699318840.0, - "grad_norm": 2.3593720695022675, - "learning_rate": 2.8117175668434713e-06, - "loss": 0.7531, - "num_input_tokens_seen": 68217310, - "step": 3210 - }, - { - "epoch": 0.3860999218421211, - "flos": 15567591344880.0, - "grad_norm": 4.578512858863596, - "learning_rate": 2.811005575216762e-06, - "loss": 0.6569, - "num_input_tokens_seen": 68235745, - "step": 3211 - }, - { - "epoch": 0.38622016473276016, - "flos": 17972177155320.0, - "grad_norm": 2.369700391639516, - "learning_rate": 2.8102934605611513e-06, - "loss": 0.7719, - "num_input_tokens_seen": 68257100, - "step": 3212 - }, - { - "epoch": 0.3863404076233993, - "flos": 15027593914440.0, - "grad_norm": 4.398171669226023, - "learning_rate": 2.8095812229846665e-06, - "loss": 0.6539, - "num_input_tokens_seen": 68276780, - "step": 3213 - }, - { - "epoch": 0.3864606505140384, - "flos": 16350918465600.0, - "grad_norm": 5.472048740036046, - "learning_rate": 2.808868862595355e-06, - "loss": 0.6748, - "num_input_tokens_seen": 68296745, - "step": 3214 - }, - { - "epoch": 0.38658089340467744, - "flos": 18835653820680.0, - "grad_norm": 4.025197771793429, - "learning_rate": 2.8081563795012795e-06, - "loss": 0.7664, - "num_input_tokens_seen": 68316090, - "step": 3215 - }, - { - "epoch": 0.38670113629531655, - "flos": 24829280205000.0, - "grad_norm": 2.83793246296801, - "learning_rate": 2.807443773810524e-06, - "loss": 0.7211, - "num_input_tokens_seen": 68337070, - "step": 3216 - }, - { - "epoch": 0.3868213791859556, - "flos": 17080084377480.0, - "grad_norm": 11.296969549223975, - "learning_rate": 2.80673104563119e-06, - "loss": 0.8737, - "num_input_tokens_seen": 68357415, - "step": 3217 - }, - { - "epoch": 0.3869416220765947, - "flos": 13461034652880.0, - "grad_norm": 2.0630087947389875, - "learning_rate": 2.8060181950713976e-06, - "loss": 0.7656, - "num_input_tokens_seen": 68373925, - "step": 3218 - }, - { - "epoch": 0.3870618649672338, - "flos": 11382650833920.0, - "grad_norm": 3.4380197305518148, - "learning_rate": 2.805305222239286e-06, - "loss": 0.7886, - "num_input_tokens_seen": 68390900, - "step": 3219 - }, - { - "epoch": 0.3871821078578729, - "flos": 17214965010240.0, - "grad_norm": 4.348395847637828, - "learning_rate": 2.8045921272430118e-06, - "loss": 0.7178, - "num_input_tokens_seen": 68410300, - "step": 3220 - }, - { - "epoch": 0.387302350748512, - "flos": 12969246358800.0, - "grad_norm": 9.098166530477918, - "learning_rate": 2.803878910190753e-06, - "loss": 0.7546, - "num_input_tokens_seen": 68426940, - "step": 3221 - }, - { - "epoch": 0.3874225936391511, - "flos": 8325534400680.0, - "grad_norm": 7.149409727260159, - "learning_rate": 2.8031655711907017e-06, - "loss": 0.8065, - "num_input_tokens_seen": 68440365, - "step": 3222 - }, - { - "epoch": 0.38754283652979016, - "flos": 16054092425640.0, - "grad_norm": 3.6792278286151237, - "learning_rate": 2.8024521103510723e-06, - "loss": 0.7839, - "num_input_tokens_seen": 68456855, - "step": 3223 - }, - { - "epoch": 0.38766307942042927, - "flos": 15484434104040.0, - "grad_norm": 2.979664385285361, - "learning_rate": 2.8017385277800952e-06, - "loss": 0.7388, - "num_input_tokens_seen": 68474930, - "step": 3224 - }, - { - "epoch": 0.3877833223110684, - "flos": 20103740724360.0, - "grad_norm": 3.97771684421159, - "learning_rate": 2.8010248235860213e-06, - "loss": 0.7195, - "num_input_tokens_seen": 68494765, - "step": 3225 - }, - { - "epoch": 0.38790356520170743, - "flos": 47554477686240.0, - "grad_norm": 0.9318304608227468, - "learning_rate": 2.8003109978771192e-06, - "loss": 0.6606, - "num_input_tokens_seen": 68555650, - "step": 3226 - }, - { - "epoch": 0.38802380809234654, - "flos": 16350158626560.0, - "grad_norm": 7.377533041228217, - "learning_rate": 2.799597050761674e-06, - "loss": 0.778, - "num_input_tokens_seen": 68571575, - "step": 3227 - }, - { - "epoch": 0.38814405098298566, - "flos": 18507742191360.0, - "grad_norm": 5.8404531715879475, - "learning_rate": 2.7988829823479924e-06, - "loss": 0.7787, - "num_input_tokens_seen": 68589685, - "step": 3228 - }, - { - "epoch": 0.3882642938736247, - "flos": 13756562634480.0, - "grad_norm": 3.6453774012720657, - "learning_rate": 2.7981687927443976e-06, - "loss": 0.6314, - "num_input_tokens_seen": 68606205, - "step": 3229 - }, - { - "epoch": 0.3883845367642638, - "flos": 15837669210000.0, - "grad_norm": 4.864048712662157, - "learning_rate": 2.797454482059231e-06, - "loss": 0.8365, - "num_input_tokens_seen": 68626080, - "step": 3230 - }, - { - "epoch": 0.3885047796549029, - "flos": 15026992375200.0, - "grad_norm": 3.117261536753917, - "learning_rate": 2.7967400504008537e-06, - "loss": 0.8263, - "num_input_tokens_seen": 68645100, - "step": 3231 - }, - { - "epoch": 0.388625022545542, - "flos": 47421270540000.0, - "grad_norm": 0.8414785752518478, - "learning_rate": 2.7960254978776456e-06, - "loss": 0.6169, - "num_input_tokens_seen": 68706910, - "step": 3232 - }, - { - "epoch": 0.3887452654361811, - "flos": 13218654761400.0, - "grad_norm": 2.825869677876234, - "learning_rate": 2.7953108245980006e-06, - "loss": 0.7835, - "num_input_tokens_seen": 68725145, - "step": 3233 - }, - { - "epoch": 0.38886550832682015, - "flos": 18296827808760.0, - "grad_norm": 2.389930474614781, - "learning_rate": 2.7945960306703365e-06, - "loss": 0.7236, - "num_input_tokens_seen": 68747850, - "step": 3234 - }, - { - "epoch": 0.38898575121745926, - "flos": 19942713456000.0, - "grad_norm": 5.387213027993638, - "learning_rate": 2.7938811162030865e-06, - "loss": 0.639, - "num_input_tokens_seen": 68767835, - "step": 3235 - }, - { - "epoch": 0.3891059941080984, - "flos": 21099691865520.0, - "grad_norm": 2.198489676414198, - "learning_rate": 2.793166081304702e-06, - "loss": 0.8077, - "num_input_tokens_seen": 68788050, - "step": 3236 - }, - { - "epoch": 0.38922623699873743, - "flos": 16755845303520.0, - "grad_norm": 5.167804326147104, - "learning_rate": 2.7924509260836543e-06, - "loss": 0.7958, - "num_input_tokens_seen": 68806895, - "step": 3237 - }, - { - "epoch": 0.38934647988937654, - "flos": 14460911629080.0, - "grad_norm": 2.3670900028911257, - "learning_rate": 2.791735650648431e-06, - "loss": 0.6636, - "num_input_tokens_seen": 68825735, - "step": 3238 - }, - { - "epoch": 0.38946672278001565, - "flos": 14023094584080.0, - "grad_norm": 2.5846286008089967, - "learning_rate": 2.791020255107538e-06, - "loss": 0.718, - "num_input_tokens_seen": 68842825, - "step": 3239 - }, - { - "epoch": 0.3895869656706547, - "flos": 18267800116800.0, - "grad_norm": 1.9824456875878944, - "learning_rate": 2.7903047395695023e-06, - "loss": 0.7887, - "num_input_tokens_seen": 68862445, - "step": 3240 - }, - { - "epoch": 0.3897072085612938, - "flos": 17673609817560.0, - "grad_norm": 8.992138289939197, - "learning_rate": 2.789589104142865e-06, - "loss": 0.8841, - "num_input_tokens_seen": 68879790, - "step": 3241 - }, - { - "epoch": 0.3898274514519329, - "flos": 12517566742680.0, - "grad_norm": 13.532758738780178, - "learning_rate": 2.7888733489361895e-06, - "loss": 0.7526, - "num_input_tokens_seen": 68897925, - "step": 3242 - }, - { - "epoch": 0.389947694342572, - "flos": 48715599059160.0, - "grad_norm": 0.7921957749154654, - "learning_rate": 2.788157474058054e-06, - "loss": 0.6423, - "num_input_tokens_seen": 68959920, - "step": 3243 - }, - { - "epoch": 0.3900679372332111, - "flos": 18863383454280.0, - "grad_norm": 5.9856503873081355, - "learning_rate": 2.7874414796170555e-06, - "loss": 0.6873, - "num_input_tokens_seen": 68981130, - "step": 3244 - }, - { - "epoch": 0.3901881801238502, - "flos": 8547118189800.0, - "grad_norm": 5.220171158129397, - "learning_rate": 2.7867253657218113e-06, - "loss": 0.8105, - "num_input_tokens_seen": 68994740, - "step": 3245 - }, - { - "epoch": 0.39030842301448926, - "flos": 20023717819560.0, - "grad_norm": 4.516830758542803, - "learning_rate": 2.7860091324809544e-06, - "loss": 0.7105, - "num_input_tokens_seen": 69015520, - "step": 3246 - }, - { - "epoch": 0.39042866590512837, - "flos": 19915996941120.0, - "grad_norm": 2.982073790416947, - "learning_rate": 2.7852927800031377e-06, - "loss": 0.7991, - "num_input_tokens_seen": 69035405, - "step": 3247 - }, - { - "epoch": 0.3905489087957674, - "flos": 21805053978840.0, - "grad_norm": 3.1446331505228375, - "learning_rate": 2.7845763083970298e-06, - "loss": 0.8171, - "num_input_tokens_seen": 69055525, - "step": 3248 - }, - { - "epoch": 0.39066915168640653, - "flos": 17943751002600.0, - "grad_norm": 2.8385660059640756, - "learning_rate": 2.7838597177713205e-06, - "loss": 0.7967, - "num_input_tokens_seen": 69076335, - "step": 3249 - }, - { - "epoch": 0.39078939457704565, - "flos": 15027910514040.0, - "grad_norm": 2.719154941612666, - "learning_rate": 2.7831430082347143e-06, - "loss": 0.7181, - "num_input_tokens_seen": 69095260, - "step": 3250 - }, - { - "epoch": 0.3909096374676847, - "flos": 16674334380600.0, - "grad_norm": 2.712490549378698, - "learning_rate": 2.7824261798959373e-06, - "loss": 0.8049, - "num_input_tokens_seen": 69113160, - "step": 3251 - }, - { - "epoch": 0.3910298803583238, - "flos": 16837039626840.0, - "grad_norm": 3.144039139239495, - "learning_rate": 2.78170923286373e-06, - "loss": 0.7798, - "num_input_tokens_seen": 69132480, - "step": 3252 - }, - { - "epoch": 0.3911501232489629, - "flos": 17808807049920.0, - "grad_norm": 14.700174260566289, - "learning_rate": 2.780992167246854e-06, - "loss": 0.8276, - "num_input_tokens_seen": 69149725, - "step": 3253 - }, - { - "epoch": 0.391270366139602, - "flos": 44864648889840.0, - "grad_norm": 1.0143764744511088, - "learning_rate": 2.7802749831540883e-06, - "loss": 0.7726, - "num_input_tokens_seen": 69208345, - "step": 3254 - }, - { - "epoch": 0.3913906090302411, - "flos": 15756791486280.0, - "grad_norm": 6.353046089128876, - "learning_rate": 2.7795576806942268e-06, - "loss": 0.8035, - "num_input_tokens_seen": 69226870, - "step": 3255 - }, - { - "epoch": 0.3915108519208802, - "flos": 36700901596560.0, - "grad_norm": 0.802972762822867, - "learning_rate": 2.778840259976085e-06, - "loss": 0.5694, - "num_input_tokens_seen": 69281820, - "step": 3256 - }, - { - "epoch": 0.39163109481151925, - "flos": 12028754484840.0, - "grad_norm": 3.2849434546680114, - "learning_rate": 2.778122721108495e-06, - "loss": 0.7535, - "num_input_tokens_seen": 69299770, - "step": 3257 - }, - { - "epoch": 0.39175133770215836, - "flos": 19104813546960.0, - "grad_norm": 2.9529370444898926, - "learning_rate": 2.7774050642003076e-06, - "loss": 0.8666, - "num_input_tokens_seen": 69320300, - "step": 3258 - }, - { - "epoch": 0.3918715805927975, - "flos": 15999994536720.0, - "grad_norm": 4.07580874790058, - "learning_rate": 2.7766872893603896e-06, - "loss": 0.9246, - "num_input_tokens_seen": 69339995, - "step": 3259 - }, - { - "epoch": 0.39199182348343653, - "flos": 14892934901400.0, - "grad_norm": 4.11468279110742, - "learning_rate": 2.7759693966976275e-06, - "loss": 0.713, - "num_input_tokens_seen": 69358220, - "step": 3260 - }, - { - "epoch": 0.39211206637407564, - "flos": 15859605070920.0, - "grad_norm": 2.266809403843219, - "learning_rate": 2.7752513863209242e-06, - "loss": 0.825, - "num_input_tokens_seen": 69376520, - "step": 3261 - }, - { - "epoch": 0.39223230926471475, - "flos": 15403208120760.0, - "grad_norm": 5.903033960305788, - "learning_rate": 2.774533258339203e-06, - "loss": 0.8229, - "num_input_tokens_seen": 69393700, - "step": 3262 - }, - { - "epoch": 0.3923525521553538, - "flos": 12840571078200.0, - "grad_norm": 5.629710450391656, - "learning_rate": 2.7738150128614014e-06, - "loss": 0.7724, - "num_input_tokens_seen": 69410825, - "step": 3263 - }, - { - "epoch": 0.3924727950459929, - "flos": 15027657234360.0, - "grad_norm": 3.0460878578084043, - "learning_rate": 2.7730966499964777e-06, - "loss": 0.8842, - "num_input_tokens_seen": 69427495, - "step": 3264 - }, - { - "epoch": 0.39259303793663197, - "flos": 11814104226960.0, - "grad_norm": 4.104056626343785, - "learning_rate": 2.772378169853408e-06, - "loss": 0.7856, - "num_input_tokens_seen": 69444785, - "step": 3265 - }, - { - "epoch": 0.3927132808272711, - "flos": 12243594702480.0, - "grad_norm": 2.7468908246553383, - "learning_rate": 2.771659572541183e-06, - "loss": 0.7292, - "num_input_tokens_seen": 69462435, - "step": 3266 - }, - { - "epoch": 0.3928335237179102, - "flos": 14812373777280.0, - "grad_norm": 2.978806978578001, - "learning_rate": 2.7709408581688143e-06, - "loss": 0.8539, - "num_input_tokens_seen": 69482140, - "step": 3267 - }, - { - "epoch": 0.39295376660854925, - "flos": 18295054851000.0, - "grad_norm": 4.401330893980327, - "learning_rate": 2.7702220268453307e-06, - "loss": 0.865, - "num_input_tokens_seen": 69502220, - "step": 3268 - }, - { - "epoch": 0.39307400949918836, - "flos": 13650804673560.0, - "grad_norm": 5.141278164538575, - "learning_rate": 2.7695030786797785e-06, - "loss": 0.82, - "num_input_tokens_seen": 69517835, - "step": 3269 - }, - { - "epoch": 0.39319425238982747, - "flos": 16401597078840.0, - "grad_norm": 2.5618505818007615, - "learning_rate": 2.7687840137812206e-06, - "loss": 0.7264, - "num_input_tokens_seen": 69535640, - "step": 3270 - }, - { - "epoch": 0.3933144952804665, - "flos": 48804265133040.0, - "grad_norm": 0.7996069508838667, - "learning_rate": 2.7680648322587395e-06, - "loss": 0.6264, - "num_input_tokens_seen": 69600235, - "step": 3271 - }, - { - "epoch": 0.39343473817110564, - "flos": 11274739995720.0, - "grad_norm": 10.801046843479527, - "learning_rate": 2.7673455342214334e-06, - "loss": 0.7975, - "num_input_tokens_seen": 69616945, - "step": 3272 - }, - { - "epoch": 0.39355498106174475, - "flos": 15595067698800.0, - "grad_norm": 3.082323802605574, - "learning_rate": 2.7666261197784198e-06, - "loss": 0.7368, - "num_input_tokens_seen": 69635480, - "step": 3273 - }, - { - "epoch": 0.3936752239523838, - "flos": 9653164706400.0, - "grad_norm": 4.112329385575025, - "learning_rate": 2.7659065890388336e-06, - "loss": 0.7533, - "num_input_tokens_seen": 69651200, - "step": 3274 - }, - { - "epoch": 0.3937954668430229, - "flos": 12246380778960.0, - "grad_norm": 3.0893940170292384, - "learning_rate": 2.7651869421118266e-06, - "loss": 0.8174, - "num_input_tokens_seen": 69667530, - "step": 3275 - }, - { - "epoch": 0.393915709733662, - "flos": 15401403503040.0, - "grad_norm": 2.3649064458041478, - "learning_rate": 2.76446717910657e-06, - "loss": 0.8083, - "num_input_tokens_seen": 69687955, - "step": 3276 - }, - { - "epoch": 0.3940359526243011, - "flos": 12516616943880.0, - "grad_norm": 3.766102234289921, - "learning_rate": 2.763747300132249e-06, - "loss": 0.7407, - "num_input_tokens_seen": 69705115, - "step": 3277 - }, - { - "epoch": 0.3941561955149402, - "flos": 15351453068880.0, - "grad_norm": 1.947229505565528, - "learning_rate": 2.7630273052980704e-06, - "loss": 0.8508, - "num_input_tokens_seen": 69725425, - "step": 3278 - }, - { - "epoch": 0.39427643840557924, - "flos": 13758525552000.0, - "grad_norm": 3.9115507247856063, - "learning_rate": 2.7623071947132554e-06, - "loss": 0.6575, - "num_input_tokens_seen": 69742175, - "step": 3279 - }, - { - "epoch": 0.39439668129621835, - "flos": 17026176448320.0, - "grad_norm": 4.1498129193525095, - "learning_rate": 2.7615869684870458e-06, - "loss": 0.7705, - "num_input_tokens_seen": 69761205, - "step": 3280 - }, - { - "epoch": 0.39451692418685746, - "flos": 19537754958120.0, - "grad_norm": 3.2197680298550484, - "learning_rate": 2.7608666267286986e-06, - "loss": 0.8303, - "num_input_tokens_seen": 69781155, - "step": 3281 - }, - { - "epoch": 0.3946371670774965, - "flos": 13325679120720.0, - "grad_norm": 2.9989803488743587, - "learning_rate": 2.760146169547489e-06, - "loss": 0.8314, - "num_input_tokens_seen": 69797640, - "step": 3282 - }, - { - "epoch": 0.39475740996813563, - "flos": 17727201147120.0, - "grad_norm": 1.6923778113430505, - "learning_rate": 2.75942559705271e-06, - "loss": 0.7477, - "num_input_tokens_seen": 69817095, - "step": 3283 - }, - { - "epoch": 0.39487765285877474, - "flos": 14109671100600.0, - "grad_norm": 2.5716985889990904, - "learning_rate": 2.7587049093536713e-06, - "loss": 0.8665, - "num_input_tokens_seen": 69833145, - "step": 3284 - }, - { - "epoch": 0.3949978957494138, - "flos": 12625097661360.0, - "grad_norm": 2.9991404573596068, - "learning_rate": 2.757984106559701e-06, - "loss": 0.7863, - "num_input_tokens_seen": 69851850, - "step": 3285 - }, - { - "epoch": 0.3951181386400529, - "flos": 26690480969280.0, - "grad_norm": 3.0309079950628877, - "learning_rate": 2.7572631887801446e-06, - "loss": 0.6979, - "num_input_tokens_seen": 69873195, - "step": 3286 - }, - { - "epoch": 0.395238381530692, - "flos": 16916081072880.0, - "grad_norm": 2.3496718806627754, - "learning_rate": 2.7565421561243654e-06, - "loss": 0.7367, - "num_input_tokens_seen": 69891080, - "step": 3287 - }, - { - "epoch": 0.3953586244213311, - "flos": 17831281130160.0, - "grad_norm": 4.153023062230683, - "learning_rate": 2.7558210087017413e-06, - "loss": 0.8023, - "num_input_tokens_seen": 69910735, - "step": 3288 - }, - { - "epoch": 0.3954788673119702, - "flos": 17160297242040.0, - "grad_norm": 2.6586854428451256, - "learning_rate": 2.7550997466216724e-06, - "loss": 0.7311, - "num_input_tokens_seen": 69928250, - "step": 3289 - }, - { - "epoch": 0.3955991102026093, - "flos": 12760009954080.0, - "grad_norm": 2.991798475190116, - "learning_rate": 2.7543783699935714e-06, - "loss": 0.787, - "num_input_tokens_seen": 69946000, - "step": 3290 - }, - { - "epoch": 0.39571935309324835, - "flos": 13297886167200.0, - "grad_norm": 3.614825076650021, - "learning_rate": 2.753656878926872e-06, - "loss": 0.8374, - "num_input_tokens_seen": 69961600, - "step": 3291 - }, - { - "epoch": 0.39583959598388746, - "flos": 12948450256440.0, - "grad_norm": 2.6414489363544242, - "learning_rate": 2.752935273531023e-06, - "loss": 0.7255, - "num_input_tokens_seen": 69979470, - "step": 3292 - }, - { - "epoch": 0.39595983887452657, - "flos": 14134931257320.0, - "grad_norm": 2.6025979670573602, - "learning_rate": 2.752213553915492e-06, - "loss": 0.7688, - "num_input_tokens_seen": 69997545, - "step": 3293 - }, - { - "epoch": 0.3960800817651656, - "flos": 44725810762080.0, - "grad_norm": 0.8013785390958703, - "learning_rate": 2.751491720189762e-06, - "loss": 0.6752, - "num_input_tokens_seen": 70055375, - "step": 3294 - }, - { - "epoch": 0.39620032465580474, - "flos": 12273445553400.0, - "grad_norm": 3.4177332508155853, - "learning_rate": 2.7507697724633364e-06, - "loss": 0.897, - "num_input_tokens_seen": 70071855, - "step": 3295 - }, - { - "epoch": 0.3963205675464438, - "flos": 50933992424400.0, - "grad_norm": 0.767163657383138, - "learning_rate": 2.7500477108457327e-06, - "loss": 0.5578, - "num_input_tokens_seen": 70123585, - "step": 3296 - }, - { - "epoch": 0.3964408104370829, - "flos": 18808937305800.0, - "grad_norm": 3.1070256391191164, - "learning_rate": 2.7493255354464877e-06, - "loss": 0.7939, - "num_input_tokens_seen": 70141115, - "step": 3297 - }, - { - "epoch": 0.396561053327722, - "flos": 17779779357960.0, - "grad_norm": 3.3767866421241695, - "learning_rate": 2.748603246375156e-06, - "loss": 0.7408, - "num_input_tokens_seen": 70158850, - "step": 3298 - }, - { - "epoch": 0.39668129621836107, - "flos": 15001099019280.0, - "grad_norm": 2.912170122972605, - "learning_rate": 2.7478808437413055e-06, - "loss": 0.6688, - "num_input_tokens_seen": 70177980, - "step": 3299 - }, - { - "epoch": 0.3968015391090002, - "flos": 19833852819000.0, - "grad_norm": 3.288413230620109, - "learning_rate": 2.7471583276545263e-06, - "loss": 0.6415, - "num_input_tokens_seen": 70198360, - "step": 3300 - }, - { - "epoch": 0.3969217819996393, - "flos": 9087590519640.0, - "grad_norm": 2.8791326591665745, - "learning_rate": 2.7464356982244224e-06, - "loss": 0.6821, - "num_input_tokens_seen": 70216080, - "step": 3301 - }, - { - "epoch": 0.39704202489027834, - "flos": 48841809354240.0, - "grad_norm": 0.8247330493786621, - "learning_rate": 2.745712955560617e-06, - "loss": 0.6381, - "num_input_tokens_seen": 70272005, - "step": 3302 - }, - { - "epoch": 0.39716226778091746, - "flos": 12381736311120.0, - "grad_norm": 5.129353604704945, - "learning_rate": 2.7449900997727496e-06, - "loss": 0.7551, - "num_input_tokens_seen": 70289835, - "step": 3303 - }, - { - "epoch": 0.39728251067155657, - "flos": 17187805255920.0, - "grad_norm": 2.699578665601586, - "learning_rate": 2.744267130970476e-06, - "loss": 0.8279, - "num_input_tokens_seen": 70309280, - "step": 3304 - }, - { - "epoch": 0.3974027535621956, - "flos": 15135853012200.0, - "grad_norm": 3.105554274814852, - "learning_rate": 2.7435440492634697e-06, - "loss": 0.7476, - "num_input_tokens_seen": 70328325, - "step": 3305 - }, - { - "epoch": 0.39752299645283473, - "flos": 15756601526520.0, - "grad_norm": 3.3066968648310944, - "learning_rate": 2.7428208547614228e-06, - "loss": 0.6482, - "num_input_tokens_seen": 70347540, - "step": 3306 - }, - { - "epoch": 0.39764323934347384, - "flos": 14028571757160.0, - "grad_norm": 2.715969373774717, - "learning_rate": 2.742097547574043e-06, - "loss": 0.7385, - "num_input_tokens_seen": 70365485, - "step": 3307 - }, - { - "epoch": 0.3977634822341129, - "flos": 15243700530480.0, - "grad_norm": 2.92982454106567, - "learning_rate": 2.7413741278110544e-06, - "loss": 0.7546, - "num_input_tokens_seen": 70383895, - "step": 3308 - }, - { - "epoch": 0.397883725124752, - "flos": 29257455426360.0, - "grad_norm": 3.214230225099224, - "learning_rate": 2.7406505955822016e-06, - "loss": 0.6649, - "num_input_tokens_seen": 70404640, - "step": 3309 - }, - { - "epoch": 0.39800396801539106, - "flos": 12674509876200.0, - "grad_norm": 4.693504048696129, - "learning_rate": 2.7399269509972415e-06, - "loss": 0.6453, - "num_input_tokens_seen": 70418515, - "step": 3310 - }, - { - "epoch": 0.3981242109060302, - "flos": 14023696123320.0, - "grad_norm": 9.284328374036196, - "learning_rate": 2.7392031941659514e-06, - "loss": 0.8258, - "num_input_tokens_seen": 70436080, - "step": 3311 - }, - { - "epoch": 0.3982444537966693, - "flos": 17992055118840.0, - "grad_norm": 2.457021934600783, - "learning_rate": 2.7384793251981244e-06, - "loss": 0.8447, - "num_input_tokens_seen": 70454785, - "step": 3312 - }, - { - "epoch": 0.39836469668730834, - "flos": 19213199284560.0, - "grad_norm": 2.3834348772553935, - "learning_rate": 2.737755344203571e-06, - "loss": 0.79, - "num_input_tokens_seen": 70474455, - "step": 3313 - }, - { - "epoch": 0.39848493957794745, - "flos": 20265686131560.0, - "grad_norm": 2.942107949014581, - "learning_rate": 2.7370312512921186e-06, - "loss": 0.7813, - "num_input_tokens_seen": 70495955, - "step": 3314 - }, - { - "epoch": 0.39860518246858656, - "flos": 8871040664160.0, - "grad_norm": 4.19685889359577, - "learning_rate": 2.736307046573611e-06, - "loss": 0.745, - "num_input_tokens_seen": 70511545, - "step": 3315 - }, - { - "epoch": 0.3987254253592256, - "flos": 16374722264160.0, - "grad_norm": 2.11581725651434, - "learning_rate": 2.73558273015791e-06, - "loss": 0.8109, - "num_input_tokens_seen": 70531095, - "step": 3316 - }, - { - "epoch": 0.3988456682498647, - "flos": 17215154970000.0, - "grad_norm": 37.17042346892721, - "learning_rate": 2.734858302154894e-06, - "loss": 0.6856, - "num_input_tokens_seen": 70552315, - "step": 3317 - }, - { - "epoch": 0.39896591114050384, - "flos": 14028983336640.0, - "grad_norm": 4.670097497743247, - "learning_rate": 2.734133762674457e-06, - "loss": 0.7345, - "num_input_tokens_seen": 70571625, - "step": 3318 - }, - { - "epoch": 0.3990861540311429, - "flos": 20831576917920.0, - "grad_norm": 2.581222892415688, - "learning_rate": 2.7334091118265124e-06, - "loss": 0.689, - "num_input_tokens_seen": 70593240, - "step": 3319 - }, - { - "epoch": 0.399206396921782, - "flos": 45524904542880.0, - "grad_norm": 0.6590063474708352, - "learning_rate": 2.732684349720989e-06, - "loss": 0.5798, - "num_input_tokens_seen": 70660920, - "step": 3320 - }, - { - "epoch": 0.3993266398124211, - "flos": 20590241805120.0, - "grad_norm": 3.1728733539860317, - "learning_rate": 2.7319594764678318e-06, - "loss": 0.7262, - "num_input_tokens_seen": 70682740, - "step": 3321 - }, - { - "epoch": 0.39944688270306017, - "flos": 17001739450560.0, - "grad_norm": 1.9906276205815143, - "learning_rate": 2.7312344921770044e-06, - "loss": 0.812, - "num_input_tokens_seen": 70704160, - "step": 3322 - }, - { - "epoch": 0.3995671255936993, - "flos": 14163325750080.0, - "grad_norm": 2.5098320265717433, - "learning_rate": 2.7305093969584857e-06, - "loss": 0.7676, - "num_input_tokens_seen": 70722705, - "step": 3323 - }, - { - "epoch": 0.3996873684843384, - "flos": 17458389680400.0, - "grad_norm": 2.7345519090372, - "learning_rate": 2.729784190922272e-06, - "loss": 0.7772, - "num_input_tokens_seen": 70743860, - "step": 3324 - }, - { - "epoch": 0.39980761137497745, - "flos": 49091091117000.0, - "grad_norm": 0.7742958315327572, - "learning_rate": 2.729058874178378e-06, - "loss": 0.5847, - "num_input_tokens_seen": 70814260, - "step": 3325 - }, - { - "epoch": 0.39992785426561656, - "flos": 20941513993560.0, - "grad_norm": 2.520789111881839, - "learning_rate": 2.7283334468368315e-06, - "loss": 0.6819, - "num_input_tokens_seen": 70835260, - "step": 3326 - }, - { - "epoch": 0.4000480971562556, - "flos": 10950785861400.0, - "grad_norm": 3.2285862226665687, - "learning_rate": 2.72760790900768e-06, - "loss": 0.7137, - "num_input_tokens_seen": 70851565, - "step": 3327 - }, - { - "epoch": 0.4001683400468947, - "flos": 17512392589440.0, - "grad_norm": 2.8219226245937956, - "learning_rate": 2.7268822608009875e-06, - "loss": 0.7841, - "num_input_tokens_seen": 70870660, - "step": 3328 - }, - { - "epoch": 0.40028858293753383, - "flos": 17835745184520.0, - "grad_norm": 2.8391234941916488, - "learning_rate": 2.726156502326834e-06, - "loss": 0.7572, - "num_input_tokens_seen": 70891680, - "step": 3329 - }, - { - "epoch": 0.4004088258281729, - "flos": 49245374813880.0, - "grad_norm": 0.7077154989911975, - "learning_rate": 2.725430633695316e-06, - "loss": 0.6221, - "num_input_tokens_seen": 70954480, - "step": 3330 - }, - { - "epoch": 0.400529068718812, - "flos": 43182991979160.0, - "grad_norm": 0.932822862883854, - "learning_rate": 2.7247046550165485e-06, - "loss": 0.6163, - "num_input_tokens_seen": 71006325, - "step": 3331 - }, - { - "epoch": 0.4006493116094511, - "flos": 18594603647520.0, - "grad_norm": 2.7851636974775515, - "learning_rate": 2.7239785664006606e-06, - "loss": 0.7465, - "num_input_tokens_seen": 71029585, - "step": 3332 - }, - { - "epoch": 0.40076955450009016, - "flos": 44428604802600.0, - "grad_norm": 0.8132452114889485, - "learning_rate": 2.7232523679578002e-06, - "loss": 0.6598, - "num_input_tokens_seen": 71092385, - "step": 3333 - }, - { - "epoch": 0.4008897973907293, - "flos": 12110961926880.0, - "grad_norm": 4.326452930698293, - "learning_rate": 2.7225260597981295e-06, - "loss": 0.7832, - "num_input_tokens_seen": 71109810, - "step": 3334 - }, - { - "epoch": 0.4010100402813684, - "flos": 11193419032560.0, - "grad_norm": 3.813944558551307, - "learning_rate": 2.721799642031831e-06, - "loss": 0.7704, - "num_input_tokens_seen": 71125700, - "step": 3335 - }, - { - "epoch": 0.40113028317200744, - "flos": 9655349243640.0, - "grad_norm": 2.4194472419361515, - "learning_rate": 2.721073114769101e-06, - "loss": 0.7546, - "num_input_tokens_seen": 71143095, - "step": 3336 - }, - { - "epoch": 0.40125052606264655, - "flos": 15108914877600.0, - "grad_norm": 2.4768610151629096, - "learning_rate": 2.7203464781201523e-06, - "loss": 0.7362, - "num_input_tokens_seen": 71162130, - "step": 3337 - }, - { - "epoch": 0.40137076895328566, - "flos": 18024407106600.0, - "grad_norm": 4.67735229715688, - "learning_rate": 2.719619732195215e-06, - "loss": 0.7561, - "num_input_tokens_seen": 71183490, - "step": 3338 - }, - { - "epoch": 0.4014910118439247, - "flos": 17727612726600.0, - "grad_norm": 1.955491364333442, - "learning_rate": 2.7188928771045377e-06, - "loss": 0.7182, - "num_input_tokens_seen": 71204530, - "step": 3339 - }, - { - "epoch": 0.4016112547345638, - "flos": 19591599567360.0, - "grad_norm": 2.4129395079636367, - "learning_rate": 2.7181659129583815e-06, - "loss": 0.7857, - "num_input_tokens_seen": 71223840, - "step": 3340 - }, - { - "epoch": 0.4017314976252029, - "flos": 15567717984720.0, - "grad_norm": 3.017497375790546, - "learning_rate": 2.7174388398670276e-06, - "loss": 0.7514, - "num_input_tokens_seen": 71242740, - "step": 3341 - }, - { - "epoch": 0.401851740515842, - "flos": 18673645093560.0, - "grad_norm": 2.4248650582119224, - "learning_rate": 2.716711657940773e-06, - "loss": 0.8986, - "num_input_tokens_seen": 71263470, - "step": 3342 - }, - { - "epoch": 0.4019719834064811, - "flos": 40813607552880.0, - "grad_norm": 0.8563613763476401, - "learning_rate": 2.7159843672899284e-06, - "loss": 0.5948, - "num_input_tokens_seen": 71327390, - "step": 3343 - }, - { - "epoch": 0.40209222629712016, - "flos": 13266705597960.0, - "grad_norm": 4.45150588198079, - "learning_rate": 2.715256968024825e-06, - "loss": 0.7938, - "num_input_tokens_seen": 71344185, - "step": 3344 - }, - { - "epoch": 0.40221246918775927, - "flos": 19026436960080.0, - "grad_norm": 2.766168659460552, - "learning_rate": 2.7145294602558083e-06, - "loss": 0.82, - "num_input_tokens_seen": 71364615, - "step": 3345 - }, - { - "epoch": 0.4023327120783984, - "flos": 24856186679640.0, - "grad_norm": 3.8717430948198457, - "learning_rate": 2.713801844093241e-06, - "loss": 0.6808, - "num_input_tokens_seen": 71385485, - "step": 3346 - }, - { - "epoch": 0.40245295496903744, - "flos": 19720211528040.0, - "grad_norm": 3.1154526397871685, - "learning_rate": 2.7130741196475014e-06, - "loss": 0.8504, - "num_input_tokens_seen": 71403335, - "step": 3347 - }, - { - "epoch": 0.40257319785967655, - "flos": 27123485700360.0, - "grad_norm": 3.0438002501927963, - "learning_rate": 2.7123462870289848e-06, - "loss": 0.7782, - "num_input_tokens_seen": 71423105, - "step": 3348 - }, - { - "epoch": 0.40269344075031566, - "flos": 17836188423960.0, - "grad_norm": 2.721593838348231, - "learning_rate": 2.711618346348102e-06, - "loss": 0.7899, - "num_input_tokens_seen": 71443350, - "step": 3349 - }, - { - "epoch": 0.4028136836409547, - "flos": 10462100243400.0, - "grad_norm": 2.2747181434836494, - "learning_rate": 2.7108902977152825e-06, - "loss": 0.6172, - "num_input_tokens_seen": 71460970, - "step": 3350 - }, - { - "epoch": 0.4029339265315938, - "flos": 19154922280920.0, - "grad_norm": 5.602996685135348, - "learning_rate": 2.7101621412409704e-06, - "loss": 0.7348, - "num_input_tokens_seen": 71480175, - "step": 3351 - }, - { - "epoch": 0.40305416942223293, - "flos": 17024118550920.0, - "grad_norm": 4.360470432485252, - "learning_rate": 2.7094338770356256e-06, - "loss": 0.8401, - "num_input_tokens_seen": 71498980, - "step": 3352 - }, - { - "epoch": 0.403174412312872, - "flos": 19861360832880.0, - "grad_norm": 2.7618818221447143, - "learning_rate": 2.708705505209726e-06, - "loss": 0.6223, - "num_input_tokens_seen": 71519475, - "step": 3353 - }, - { - "epoch": 0.4032946552035111, - "flos": 15913449680160.0, - "grad_norm": 3.180687899290861, - "learning_rate": 2.7079770258737646e-06, - "loss": 0.897, - "num_input_tokens_seen": 71537105, - "step": 3354 - }, - { - "epoch": 0.4034148980941502, - "flos": 12648331580640.0, - "grad_norm": 3.47772238595527, - "learning_rate": 2.707248439138251e-06, - "loss": 0.7266, - "num_input_tokens_seen": 71553060, - "step": 3355 - }, - { - "epoch": 0.40353514098478926, - "flos": 16107842055000.0, - "grad_norm": 6.145223434272257, - "learning_rate": 2.7065197451137114e-06, - "loss": 0.6341, - "num_input_tokens_seen": 71574160, - "step": 3356 - }, - { - "epoch": 0.4036553838754284, - "flos": 10356500582280.0, - "grad_norm": 5.99001212091764, - "learning_rate": 2.7057909439106894e-06, - "loss": 0.6648, - "num_input_tokens_seen": 71591735, - "step": 3357 - }, - { - "epoch": 0.40377562676606743, - "flos": 18159889278600.0, - "grad_norm": 2.8800266937502275, - "learning_rate": 2.7050620356397417e-06, - "loss": 0.769, - "num_input_tokens_seen": 71610405, - "step": 3358 - }, - { - "epoch": 0.40389586965670654, - "flos": 17620113467880.0, - "grad_norm": 2.4528798752307486, - "learning_rate": 2.7043330204114437e-06, - "loss": 0.7024, - "num_input_tokens_seen": 71628835, - "step": 3359 - }, - { - "epoch": 0.40401611254734565, - "flos": 11950061298360.0, - "grad_norm": 3.6642200948501196, - "learning_rate": 2.7036038983363862e-06, - "loss": 0.8351, - "num_input_tokens_seen": 71645160, - "step": 3360 - }, - { - "epoch": 0.4041363554379847, - "flos": 17566078898880.0, - "grad_norm": 2.603894262689695, - "learning_rate": 2.702874669525177e-06, - "loss": 0.8266, - "num_input_tokens_seen": 71663360, - "step": 3361 - }, - { - "epoch": 0.4042565983286238, - "flos": 20832400076880.0, - "grad_norm": 3.3706730550250747, - "learning_rate": 2.7021453340884394e-06, - "loss": 0.6739, - "num_input_tokens_seen": 71680805, - "step": 3362 - }, - { - "epoch": 0.40437684121926293, - "flos": 12920245723440.0, - "grad_norm": 3.389029835200179, - "learning_rate": 2.7014158921368125e-06, - "loss": 0.7068, - "num_input_tokens_seen": 71698850, - "step": 3363 - }, - { - "epoch": 0.404497084109902, - "flos": 17588014759800.0, - "grad_norm": 2.711154896047315, - "learning_rate": 2.700686343780953e-06, - "loss": 0.8476, - "num_input_tokens_seen": 71718440, - "step": 3364 - }, - { - "epoch": 0.4046173270005411, - "flos": 16782783438120.0, - "grad_norm": 3.478986876328933, - "learning_rate": 2.699956689131532e-06, - "loss": 0.856, - "num_input_tokens_seen": 71738145, - "step": 3365 - }, - { - "epoch": 0.4047375698911802, - "flos": 15109009857480.0, - "grad_norm": 3.180942447087745, - "learning_rate": 2.699226928299238e-06, - "loss": 0.8229, - "num_input_tokens_seen": 71755885, - "step": 3366 - }, - { - "epoch": 0.40485781278181926, - "flos": 21210515420040.0, - "grad_norm": 29.58921539143052, - "learning_rate": 2.698497061394774e-06, - "loss": 0.7646, - "num_input_tokens_seen": 71774090, - "step": 3367 - }, - { - "epoch": 0.40497805567245837, - "flos": 16944443905680.0, - "grad_norm": 1.9989702434758148, - "learning_rate": 2.6977670885288627e-06, - "loss": 0.7937, - "num_input_tokens_seen": 71795210, - "step": 3368 - }, - { - "epoch": 0.4050982985630975, - "flos": 11868392075640.0, - "grad_norm": 2.5805528710767316, - "learning_rate": 2.6970370098122378e-06, - "loss": 0.7395, - "num_input_tokens_seen": 71811915, - "step": 3369 - }, - { - "epoch": 0.40521854145373654, - "flos": 25315496346120.0, - "grad_norm": 2.4548883599965166, - "learning_rate": 2.6963068253556535e-06, - "loss": 0.8434, - "num_input_tokens_seen": 71833020, - "step": 3370 - }, - { - "epoch": 0.40533878434437565, - "flos": 18558895704000.0, - "grad_norm": 2.8397490473807028, - "learning_rate": 2.6955765352698763e-06, - "loss": 0.8349, - "num_input_tokens_seen": 71852885, - "step": 3371 - }, - { - "epoch": 0.40545902723501476, - "flos": 10923024567840.0, - "grad_norm": 5.219311164334942, - "learning_rate": 2.6948461396656923e-06, - "loss": 0.7152, - "num_input_tokens_seen": 71870015, - "step": 3372 - }, - { - "epoch": 0.4055792701256538, - "flos": 18700583228160.0, - "grad_norm": 2.8541939912773535, - "learning_rate": 2.6941156386539013e-06, - "loss": 0.7295, - "num_input_tokens_seen": 71889685, - "step": 3373 - }, - { - "epoch": 0.4056995130162929, - "flos": 14299852700760.0, - "grad_norm": 3.0469100558880755, - "learning_rate": 2.6933850323453203e-06, - "loss": 0.7721, - "num_input_tokens_seen": 71907850, - "step": 3374 - }, - { - "epoch": 0.405819755906932, - "flos": 11220388827120.0, - "grad_norm": 3.2836036596663756, - "learning_rate": 2.6926543208507806e-06, - "loss": 0.7354, - "num_input_tokens_seen": 71926250, - "step": 3375 - }, - { - "epoch": 0.4059399987975711, - "flos": 15675692142840.0, - "grad_norm": 3.5860718014236443, - "learning_rate": 2.6919235042811316e-06, - "loss": 0.7675, - "num_input_tokens_seen": 71944755, - "step": 3376 - }, - { - "epoch": 0.4060602416882102, - "flos": 18727014803400.0, - "grad_norm": 3.736061258152817, - "learning_rate": 2.691192582747237e-06, - "loss": 0.737, - "num_input_tokens_seen": 71964105, - "step": 3377 - }, - { - "epoch": 0.40618048457884925, - "flos": 17401822314600.0, - "grad_norm": 3.678369500679984, - "learning_rate": 2.6904615563599765e-06, - "loss": 0.7209, - "num_input_tokens_seen": 71983625, - "step": 3378 - }, - { - "epoch": 0.40630072746948837, - "flos": 12867730832520.0, - "grad_norm": 3.1203047002046356, - "learning_rate": 2.6897304252302477e-06, - "loss": 0.8115, - "num_input_tokens_seen": 72000665, - "step": 3379 - }, - { - "epoch": 0.4064209703601275, - "flos": 44840718451440.0, - "grad_norm": 0.8222185062490582, - "learning_rate": 2.688999189468962e-06, - "loss": 0.5621, - "num_input_tokens_seen": 72056815, - "step": 3380 - }, - { - "epoch": 0.40654121325076653, - "flos": 17592763753800.0, - "grad_norm": 3.7314649919331884, - "learning_rate": 2.6882678491870464e-06, - "loss": 0.7335, - "num_input_tokens_seen": 72076970, - "step": 3381 - }, - { - "epoch": 0.40666145614140564, - "flos": 20049674495400.0, - "grad_norm": 7.315578497372624, - "learning_rate": 2.6875364044954453e-06, - "loss": 0.6936, - "num_input_tokens_seen": 72096920, - "step": 3382 - }, - { - "epoch": 0.40678169903204475, - "flos": 19185627950760.0, - "grad_norm": 3.0468113036711384, - "learning_rate": 2.6868048555051185e-06, - "loss": 0.8043, - "num_input_tokens_seen": 72118170, - "step": 3383 - }, - { - "epoch": 0.4069019419226838, - "flos": 20995770182280.0, - "grad_norm": 4.86241775874629, - "learning_rate": 2.686073202327041e-06, - "loss": 0.8307, - "num_input_tokens_seen": 72136890, - "step": 3384 - }, - { - "epoch": 0.4070221848133229, - "flos": 18484381632240.0, - "grad_norm": 3.4818789175980784, - "learning_rate": 2.6853414450722043e-06, - "loss": 0.7203, - "num_input_tokens_seen": 72156275, - "step": 3385 - }, - { - "epoch": 0.40714242770396203, - "flos": 13434191498160.0, - "grad_norm": 2.6556497232171066, - "learning_rate": 2.684609583851616e-06, - "loss": 0.831, - "num_input_tokens_seen": 72174170, - "step": 3386 - }, - { - "epoch": 0.4072626705946011, - "flos": 22184847299880.0, - "grad_norm": 3.5849247584833375, - "learning_rate": 2.683877618776297e-06, - "loss": 0.7903, - "num_input_tokens_seen": 72196145, - "step": 3387 - }, - { - "epoch": 0.4073829134852402, - "flos": 15972074943360.0, - "grad_norm": 5.166791278607954, - "learning_rate": 2.6831455499572876e-06, - "loss": 0.7257, - "num_input_tokens_seen": 72213800, - "step": 3388 - }, - { - "epoch": 0.40750315637587925, - "flos": 18506729072640.0, - "grad_norm": 2.5575733741832423, - "learning_rate": 2.682413377505641e-06, - "loss": 0.7563, - "num_input_tokens_seen": 72232325, - "step": 3389 - }, - { - "epoch": 0.40762339926651836, - "flos": 14401019967480.0, - "grad_norm": 3.5937576957288524, - "learning_rate": 2.6816811015324284e-06, - "loss": 0.751, - "num_input_tokens_seen": 72250095, - "step": 3390 - }, - { - "epoch": 0.40774364215715747, - "flos": 52694659121160.0, - "grad_norm": 0.8039508892451798, - "learning_rate": 2.6809487221487343e-06, - "loss": 0.6122, - "num_input_tokens_seen": 72309300, - "step": 3391 - }, - { - "epoch": 0.4078638850477965, - "flos": 10976235977880.0, - "grad_norm": 14.914688632744525, - "learning_rate": 2.6802162394656605e-06, - "loss": 0.7981, - "num_input_tokens_seen": 72325730, - "step": 3392 - }, - { - "epoch": 0.40798412793843564, - "flos": 17458611300120.0, - "grad_norm": 4.211877721447963, - "learning_rate": 2.679483653594324e-06, - "loss": 0.7045, - "num_input_tokens_seen": 72347220, - "step": 3393 - }, - { - "epoch": 0.40810437082907475, - "flos": 15402764881320.0, - "grad_norm": 3.164747526635993, - "learning_rate": 2.678750964645857e-06, - "loss": 0.7355, - "num_input_tokens_seen": 72366020, - "step": 3394 - }, - { - "epoch": 0.4082246137197138, - "flos": 8190875387640.0, - "grad_norm": 3.051731718638443, - "learning_rate": 2.6780181727314094e-06, - "loss": 0.8295, - "num_input_tokens_seen": 72380645, - "step": 3395 - }, - { - "epoch": 0.4083448566103529, - "flos": 13920755898840.0, - "grad_norm": 3.5162149570100754, - "learning_rate": 2.6772852779621435e-06, - "loss": 0.7637, - "num_input_tokens_seen": 72398225, - "step": 3396 - }, - { - "epoch": 0.408465099500992, - "flos": 17241934804800.0, - "grad_norm": 3.5615641385287455, - "learning_rate": 2.676552280449239e-06, - "loss": 0.8282, - "num_input_tokens_seen": 72417830, - "step": 3397 - }, - { - "epoch": 0.4085853423916311, - "flos": 9248966047560.0, - "grad_norm": 3.0562502870774084, - "learning_rate": 2.6758191803038917e-06, - "loss": 0.7211, - "num_input_tokens_seen": 72436045, - "step": 3398 - }, - { - "epoch": 0.4087055852822702, - "flos": 17836093444080.0, - "grad_norm": 1.9796707983599573, - "learning_rate": 2.6750859776373125e-06, - "loss": 0.8115, - "num_input_tokens_seen": 72455220, - "step": 3399 - }, - { - "epoch": 0.4088258281729093, - "flos": 48950353391640.0, - "grad_norm": 0.7820124529581185, - "learning_rate": 2.674352672560727e-06, - "loss": 0.6072, - "num_input_tokens_seen": 72516385, - "step": 3400 - }, - { - "epoch": 0.40894607106354836, - "flos": 14947032790320.0, - "grad_norm": 3.44072752201294, - "learning_rate": 2.673619265185377e-06, - "loss": 0.7493, - "num_input_tokens_seen": 72535945, - "step": 3401 - }, - { - "epoch": 0.40906631395418747, - "flos": 19834770957840.0, - "grad_norm": 2.119908460044274, - "learning_rate": 2.672885755622521e-06, - "loss": 0.7561, - "num_input_tokens_seen": 72558080, - "step": 3402 - }, - { - "epoch": 0.4091865568448266, - "flos": 18673170194160.0, - "grad_norm": 3.710029442765451, - "learning_rate": 2.67215214398343e-06, - "loss": 0.6784, - "num_input_tokens_seen": 72577815, - "step": 3403 - }, - { - "epoch": 0.40930679973546563, - "flos": 21021853497960.0, - "grad_norm": 7.958400572913824, - "learning_rate": 2.671418430379393e-06, - "loss": 0.754, - "num_input_tokens_seen": 72596220, - "step": 3404 - }, - { - "epoch": 0.40942704262610474, - "flos": 15270353725440.0, - "grad_norm": 2.183613840191569, - "learning_rate": 2.670684614921715e-06, - "loss": 0.8012, - "num_input_tokens_seen": 72614915, - "step": 3405 - }, - { - "epoch": 0.4095472855167438, - "flos": 15812219093520.0, - "grad_norm": 2.773578736913228, - "learning_rate": 2.6699506977217128e-06, - "loss": 0.6676, - "num_input_tokens_seen": 72634810, - "step": 3406 - }, - { - "epoch": 0.4096675284073829, - "flos": 20480019789840.0, - "grad_norm": 3.5505372352812543, - "learning_rate": 2.6692166788907233e-06, - "loss": 0.6725, - "num_input_tokens_seen": 72654725, - "step": 3407 - }, - { - "epoch": 0.409787771298022, - "flos": 14027083739040.0, - "grad_norm": 2.941922531311254, - "learning_rate": 2.6684825585400957e-06, - "loss": 0.7513, - "num_input_tokens_seen": 72673390, - "step": 3408 - }, - { - "epoch": 0.4099080141886611, - "flos": 43681460524800.0, - "grad_norm": 0.8850298354405367, - "learning_rate": 2.6677483367811947e-06, - "loss": 0.7125, - "num_input_tokens_seen": 72733150, - "step": 3409 - }, - { - "epoch": 0.4100282570793002, - "flos": 16025128053600.0, - "grad_norm": 4.3592651790882515, - "learning_rate": 2.6670140137254028e-06, - "loss": 0.7444, - "num_input_tokens_seen": 72752345, - "step": 3410 - }, - { - "epoch": 0.4101484999699393, - "flos": 13542577235760.0, - "grad_norm": 3.7236082254794085, - "learning_rate": 2.666279589484115e-06, - "loss": 0.853, - "num_input_tokens_seen": 72769965, - "step": 3411 - }, - { - "epoch": 0.41026874286057835, - "flos": 13943768198400.0, - "grad_norm": 4.1158458977740695, - "learning_rate": 2.6655450641687435e-06, - "loss": 0.7807, - "num_input_tokens_seen": 72787250, - "step": 3412 - }, - { - "epoch": 0.41038898575121746, - "flos": 22909960736880.0, - "grad_norm": 4.242232686007622, - "learning_rate": 2.664810437890715e-06, - "loss": 0.6774, - "num_input_tokens_seen": 72808640, - "step": 3413 - }, - { - "epoch": 0.41050922864185657, - "flos": 10437473285880.0, - "grad_norm": 2.8265498711940187, - "learning_rate": 2.6640757107614714e-06, - "loss": 0.7894, - "num_input_tokens_seen": 72826455, - "step": 3414 - }, - { - "epoch": 0.4106294715324956, - "flos": 22722818492880.0, - "grad_norm": 5.533884401725001, - "learning_rate": 2.6633408828924697e-06, - "loss": 0.6909, - "num_input_tokens_seen": 72845040, - "step": 3415 - }, - { - "epoch": 0.41074971442313474, - "flos": 17912507113440.0, - "grad_norm": 4.286826852165739, - "learning_rate": 2.662605954395185e-06, - "loss": 0.6856, - "num_input_tokens_seen": 72864720, - "step": 3416 - }, - { - "epoch": 0.41086995731377385, - "flos": 15756348246840.0, - "grad_norm": 2.9485868269601707, - "learning_rate": 2.6618709253811027e-06, - "loss": 0.8239, - "num_input_tokens_seen": 72883895, - "step": 3417 - }, - { - "epoch": 0.4109902002044129, - "flos": 15134364994080.0, - "grad_norm": 2.161424923067901, - "learning_rate": 2.6611357959617277e-06, - "loss": 0.8678, - "num_input_tokens_seen": 72903235, - "step": 3418 - }, - { - "epoch": 0.411110443095052, - "flos": 13266610618080.0, - "grad_norm": 4.38040241744353, - "learning_rate": 2.660400566248578e-06, - "loss": 0.888, - "num_input_tokens_seen": 72921080, - "step": 3419 - }, - { - "epoch": 0.41123068598569107, - "flos": 10599893592480.0, - "grad_norm": 3.0442383924615233, - "learning_rate": 2.6596652363531876e-06, - "loss": 0.6555, - "num_input_tokens_seen": 72936675, - "step": 3420 - }, - { - "epoch": 0.4113509288763302, - "flos": 15486207061800.0, - "grad_norm": 2.061570391533611, - "learning_rate": 2.6589298063871055e-06, - "loss": 0.7701, - "num_input_tokens_seen": 72956570, - "step": 3421 - }, - { - "epoch": 0.4114711717669693, - "flos": 13461636192120.0, - "grad_norm": 3.0665975134419323, - "learning_rate": 2.658194276461895e-06, - "loss": 0.6823, - "num_input_tokens_seen": 72974215, - "step": 3422 - }, - { - "epoch": 0.41159141465760835, - "flos": 19967435393400.0, - "grad_norm": 3.3981714604920015, - "learning_rate": 2.6574586466891368e-06, - "loss": 0.6597, - "num_input_tokens_seen": 72994410, - "step": 3423 - }, - { - "epoch": 0.41171165754824746, - "flos": 14618994521160.0, - "grad_norm": 2.5809638836952384, - "learning_rate": 2.6567229171804247e-06, - "loss": 0.6158, - "num_input_tokens_seen": 73012015, - "step": 3424 - }, - { - "epoch": 0.41183190043888657, - "flos": 13137555417960.0, - "grad_norm": 3.1362777152876093, - "learning_rate": 2.655987088047368e-06, - "loss": 0.8424, - "num_input_tokens_seen": 73030080, - "step": 3425 - }, - { - "epoch": 0.4119521433295256, - "flos": 19916028601080.0, - "grad_norm": 5.535668973253848, - "learning_rate": 2.6552511594015912e-06, - "loss": 0.7667, - "num_input_tokens_seen": 73050190, - "step": 3426 - }, - { - "epoch": 0.41207238622016473, - "flos": 11004472170840.0, - "grad_norm": 6.9668195984525685, - "learning_rate": 2.654515131354735e-06, - "loss": 0.8431, - "num_input_tokens_seen": 73068175, - "step": 3427 - }, - { - "epoch": 0.41219262911080384, - "flos": 19834106098680.0, - "grad_norm": 3.746804465297312, - "learning_rate": 2.653779004018453e-06, - "loss": 0.8423, - "num_input_tokens_seen": 73088460, - "step": 3428 - }, - { - "epoch": 0.4123128720014429, - "flos": 18083443949280.0, - "grad_norm": 2.265377794130679, - "learning_rate": 2.653042777504417e-06, - "loss": 0.803, - "num_input_tokens_seen": 73110770, - "step": 3429 - }, - { - "epoch": 0.412433114892082, - "flos": 19235293445280.0, - "grad_norm": 2.948674152946355, - "learning_rate": 2.6523064519243105e-06, - "loss": 0.7829, - "num_input_tokens_seen": 73130060, - "step": 3430 - }, - { - "epoch": 0.4125533577827211, - "flos": 15621752553720.0, - "grad_norm": 4.452325260561056, - "learning_rate": 2.6515700273898333e-06, - "loss": 0.7663, - "num_input_tokens_seen": 73147655, - "step": 3431 - }, - { - "epoch": 0.4126736006733602, - "flos": 19104876866880.0, - "grad_norm": 4.007560261007371, - "learning_rate": 2.6508335040127018e-06, - "loss": 0.6704, - "num_input_tokens_seen": 73167070, - "step": 3432 - }, - { - "epoch": 0.4127938435639993, - "flos": 18807797547240.0, - "grad_norm": 2.8705491152749096, - "learning_rate": 2.6500968819046446e-06, - "loss": 0.7532, - "num_input_tokens_seen": 73187090, - "step": 3433 - }, - { - "epoch": 0.4129140864546384, - "flos": 13130146987320.0, - "grad_norm": 4.061265258948255, - "learning_rate": 2.649360161177408e-06, - "loss": 0.5657, - "num_input_tokens_seen": 73201870, - "step": 3434 - }, - { - "epoch": 0.41303432934527745, - "flos": 17377132037160.0, - "grad_norm": 4.690733544544295, - "learning_rate": 2.6486233419427504e-06, - "loss": 0.7119, - "num_input_tokens_seen": 73221405, - "step": 3435 - }, - { - "epoch": 0.41315457223591656, - "flos": 14433340295280.0, - "grad_norm": 2.916814328043488, - "learning_rate": 2.6478864243124484e-06, - "loss": 0.7373, - "num_input_tokens_seen": 73240790, - "step": 3436 - }, - { - "epoch": 0.4132748151265556, - "flos": 15297830079360.0, - "grad_norm": 2.2913667193546123, - "learning_rate": 2.6471494083982903e-06, - "loss": 0.8384, - "num_input_tokens_seen": 73259895, - "step": 3437 - }, - { - "epoch": 0.4133950580171947, - "flos": 23667204541920.0, - "grad_norm": 2.4260607572213626, - "learning_rate": 2.6464122943120818e-06, - "loss": 0.7361, - "num_input_tokens_seen": 73279840, - "step": 3438 - }, - { - "epoch": 0.41351530090783384, - "flos": 17050835065800.0, - "grad_norm": 6.991317541463567, - "learning_rate": 2.645675082165642e-06, - "loss": 0.8048, - "num_input_tokens_seen": 73295770, - "step": 3439 - }, - { - "epoch": 0.4136355437984729, - "flos": 18754142897760.0, - "grad_norm": 3.843972300482276, - "learning_rate": 2.644937772070806e-06, - "loss": 0.7451, - "num_input_tokens_seen": 73313935, - "step": 3440 - }, - { - "epoch": 0.413755786689112, - "flos": 14487659803920.0, - "grad_norm": 4.374859616888348, - "learning_rate": 2.6442003641394225e-06, - "loss": 0.8139, - "num_input_tokens_seen": 73331250, - "step": 3441 - }, - { - "epoch": 0.4138760295797511, - "flos": 19699098826080.0, - "grad_norm": 2.0541369494699553, - "learning_rate": 2.643462858483356e-06, - "loss": 0.8212, - "num_input_tokens_seen": 73351255, - "step": 3442 - }, - { - "epoch": 0.41399627247039017, - "flos": 11949744698760.0, - "grad_norm": 3.01653564618214, - "learning_rate": 2.6427252552144856e-06, - "loss": 0.7249, - "num_input_tokens_seen": 73369625, - "step": 3443 - }, - { - "epoch": 0.4141165153610293, - "flos": 16783100037720.0, - "grad_norm": 2.539872260448496, - "learning_rate": 2.6419875544447044e-06, - "loss": 0.7351, - "num_input_tokens_seen": 73390745, - "step": 3444 - }, - { - "epoch": 0.4142367582516684, - "flos": 18457411837680.0, - "grad_norm": 2.296047554706592, - "learning_rate": 2.6412497562859218e-06, - "loss": 0.6953, - "num_input_tokens_seen": 73411745, - "step": 3445 - }, - { - "epoch": 0.41435700114230745, - "flos": 15865715443200.0, - "grad_norm": 3.918497158332311, - "learning_rate": 2.6405118608500617e-06, - "loss": 0.7463, - "num_input_tokens_seen": 73430290, - "step": 3446 - }, - { - "epoch": 0.41447724403294656, - "flos": 19051380517200.0, - "grad_norm": 3.1596099796317696, - "learning_rate": 2.6397738682490613e-06, - "loss": 0.789, - "num_input_tokens_seen": 73450910, - "step": 3447 - }, - { - "epoch": 0.41459748692358567, - "flos": 13326533939640.0, - "grad_norm": 2.6300647557920365, - "learning_rate": 2.6390357785948734e-06, - "loss": 0.7389, - "num_input_tokens_seen": 73467745, - "step": 3448 - }, - { - "epoch": 0.4147177298142247, - "flos": 17699154913920.0, - "grad_norm": 2.6366504801636337, - "learning_rate": 2.6382975919994667e-06, - "loss": 0.778, - "num_input_tokens_seen": 73488040, - "step": 3449 - }, - { - "epoch": 0.41483797270486383, - "flos": 14677239864840.0, - "grad_norm": 2.235886282770754, - "learning_rate": 2.637559308574822e-06, - "loss": 0.7047, - "num_input_tokens_seen": 73507505, - "step": 3450 - }, - { - "epoch": 0.4149582155955029, - "flos": 22075131843960.0, - "grad_norm": 3.09701904824114, - "learning_rate": 2.6368209284329376e-06, - "loss": 0.7026, - "num_input_tokens_seen": 73527855, - "step": 3451 - }, - { - "epoch": 0.415078458486142, - "flos": 12220012523640.0, - "grad_norm": 4.715526768373505, - "learning_rate": 2.636082451685825e-06, - "loss": 0.7343, - "num_input_tokens_seen": 73545775, - "step": 3452 - }, - { - "epoch": 0.4151987013767811, - "flos": 19080186589440.0, - "grad_norm": 2.244246862966192, - "learning_rate": 2.6353438784455094e-06, - "loss": 0.8409, - "num_input_tokens_seen": 73568780, - "step": 3453 - }, - { - "epoch": 0.41531894426742016, - "flos": 18024692046240.0, - "grad_norm": 3.818900442203669, - "learning_rate": 2.6346052088240326e-06, - "loss": 0.6882, - "num_input_tokens_seen": 73588020, - "step": 3454 - }, - { - "epoch": 0.4154391871580593, - "flos": 10896213073080.0, - "grad_norm": 2.4952428379259897, - "learning_rate": 2.63386644293345e-06, - "loss": 0.7545, - "num_input_tokens_seen": 73604085, - "step": 3455 - }, - { - "epoch": 0.4155594300486984, - "flos": 10652978362680.0, - "grad_norm": 4.708822354377779, - "learning_rate": 2.633127580885833e-06, - "loss": 0.817, - "num_input_tokens_seen": 73618305, - "step": 3456 - }, - { - "epoch": 0.41567967293933744, - "flos": 21643108571640.0, - "grad_norm": 4.535479025644968, - "learning_rate": 2.632388622793265e-06, - "loss": 0.6184, - "num_input_tokens_seen": 73637180, - "step": 3457 - }, - { - "epoch": 0.41579991582997655, - "flos": 14049937738800.0, - "grad_norm": 3.979150076360553, - "learning_rate": 2.6316495687678457e-06, - "loss": 0.6702, - "num_input_tokens_seen": 73655550, - "step": 3458 - }, - { - "epoch": 0.41592015872061566, - "flos": 17916686228160.0, - "grad_norm": 2.734355991842414, - "learning_rate": 2.6309104189216887e-06, - "loss": 0.7393, - "num_input_tokens_seen": 73672835, - "step": 3459 - }, - { - "epoch": 0.4160404016112547, - "flos": 15188304583200.0, - "grad_norm": 3.064576982240473, - "learning_rate": 2.630171173366923e-06, - "loss": 0.7284, - "num_input_tokens_seen": 73688355, - "step": 3460 - }, - { - "epoch": 0.41616064450189383, - "flos": 10112062793400.0, - "grad_norm": 3.831764204109323, - "learning_rate": 2.629431832215691e-06, - "loss": 0.7204, - "num_input_tokens_seen": 73702880, - "step": 3461 - }, - { - "epoch": 0.41628088739253294, - "flos": 14622477116760.0, - "grad_norm": 3.4643232262087515, - "learning_rate": 2.628692395580151e-06, - "loss": 0.8474, - "num_input_tokens_seen": 73722690, - "step": 3462 - }, - { - "epoch": 0.416401130283172, - "flos": 21400000501080.0, - "grad_norm": 2.6832078770123244, - "learning_rate": 2.6279528635724747e-06, - "loss": 0.7784, - "num_input_tokens_seen": 73742565, - "step": 3463 - }, - { - "epoch": 0.4165213731738111, - "flos": 11836008427920.0, - "grad_norm": 4.816827982980807, - "learning_rate": 2.627213236304848e-06, - "loss": 0.7586, - "num_input_tokens_seen": 73759085, - "step": 3464 - }, - { - "epoch": 0.4166416160644502, - "flos": 24801740531160.0, - "grad_norm": 2.746161556356366, - "learning_rate": 2.626473513889472e-06, - "loss": 0.6929, - "num_input_tokens_seen": 73781185, - "step": 3465 - }, - { - "epoch": 0.41676185895508927, - "flos": 15292701165840.0, - "grad_norm": 4.542198266697981, - "learning_rate": 2.625733696438562e-06, - "loss": 0.8078, - "num_input_tokens_seen": 73798410, - "step": 3466 - }, - { - "epoch": 0.4168821018457284, - "flos": 13434982997160.0, - "grad_norm": 3.0566421850035956, - "learning_rate": 2.6249937840643476e-06, - "loss": 0.738, - "num_input_tokens_seen": 73816435, - "step": 3467 - }, - { - "epoch": 0.41700234473636744, - "flos": 13651342892880.0, - "grad_norm": 2.373425614388134, - "learning_rate": 2.6242537768790733e-06, - "loss": 0.6538, - "num_input_tokens_seen": 73835310, - "step": 3468 - }, - { - "epoch": 0.41712258762700655, - "flos": 22780367317440.0, - "grad_norm": 2.6366294111022235, - "learning_rate": 2.6235136749949975e-06, - "loss": 0.6746, - "num_input_tokens_seen": 73858480, - "step": 3469 - }, - { - "epoch": 0.41724283051764566, - "flos": 26391913631520.0, - "grad_norm": 2.923138513572569, - "learning_rate": 2.6227734785243924e-06, - "loss": 0.5931, - "num_input_tokens_seen": 73878160, - "step": 3470 - }, - { - "epoch": 0.4173630734082847, - "flos": 18561143561160.0, - "grad_norm": 2.515428997483697, - "learning_rate": 2.6220331875795466e-06, - "loss": 0.7793, - "num_input_tokens_seen": 73897230, - "step": 3471 - }, - { - "epoch": 0.4174833162989238, - "flos": 19561780376400.0, - "grad_norm": 2.365331948596387, - "learning_rate": 2.62129280227276e-06, - "loss": 0.744, - "num_input_tokens_seen": 73916950, - "step": 3472 - }, - { - "epoch": 0.41760355918956293, - "flos": 55314460577400.0, - "grad_norm": 3.798456812267725, - "learning_rate": 2.62055232271635e-06, - "loss": 0.6621, - "num_input_tokens_seen": 73943855, - "step": 3473 - }, - { - "epoch": 0.417723802080202, - "flos": 10408825513440.0, - "grad_norm": 3.2545871640405335, - "learning_rate": 2.619811749022646e-06, - "loss": 0.8712, - "num_input_tokens_seen": 73958885, - "step": 3474 - }, - { - "epoch": 0.4178440449708411, - "flos": 10650002326440.0, - "grad_norm": 5.016310220181529, - "learning_rate": 2.6190710813039917e-06, - "loss": 0.6997, - "num_input_tokens_seen": 73971730, - "step": 3475 - }, - { - "epoch": 0.4179642878614802, - "flos": 15730043311440.0, - "grad_norm": 4.940575330027345, - "learning_rate": 2.618330319672747e-06, - "loss": 0.8078, - "num_input_tokens_seen": 73990870, - "step": 3476 - }, - { - "epoch": 0.41808453075211927, - "flos": 13461446232360.0, - "grad_norm": 3.3449382631452154, - "learning_rate": 2.617589464241284e-06, - "loss": 0.9034, - "num_input_tokens_seen": 74004990, - "step": 3477 - }, - { - "epoch": 0.4182047736427584, - "flos": 14837000734800.0, - "grad_norm": 2.9292929421813176, - "learning_rate": 2.6168485151219914e-06, - "loss": 0.7179, - "num_input_tokens_seen": 74024330, - "step": 3478 - }, - { - "epoch": 0.4183250165333975, - "flos": 13782645950160.0, - "grad_norm": 3.44284391304554, - "learning_rate": 2.616107472427269e-06, - "loss": 0.6996, - "num_input_tokens_seen": 74038745, - "step": 3479 - }, - { - "epoch": 0.41844525942403654, - "flos": 12940725226200.0, - "grad_norm": 4.84154061412155, - "learning_rate": 2.615366336269533e-06, - "loss": 0.7479, - "num_input_tokens_seen": 74052130, - "step": 3480 - }, - { - "epoch": 0.41856550231467565, - "flos": 13400699751840.0, - "grad_norm": 4.9385925243145765, - "learning_rate": 2.6146251067612126e-06, - "loss": 0.7724, - "num_input_tokens_seen": 74067325, - "step": 3481 - }, - { - "epoch": 0.41868574520531476, - "flos": 16672909682400.0, - "grad_norm": 2.5937647072248695, - "learning_rate": 2.6138837840147525e-06, - "loss": 0.806, - "num_input_tokens_seen": 74086080, - "step": 3482 - }, - { - "epoch": 0.4188059880959538, - "flos": 9951478764480.0, - "grad_norm": 4.481686094976561, - "learning_rate": 2.6131423681426103e-06, - "loss": 0.7544, - "num_input_tokens_seen": 74101715, - "step": 3483 - }, - { - "epoch": 0.41892623098659293, - "flos": 27801529759560.0, - "grad_norm": 2.399728831230717, - "learning_rate": 2.6124008592572587e-06, - "loss": 0.7103, - "num_input_tokens_seen": 74125420, - "step": 3484 - }, - { - "epoch": 0.419046473877232, - "flos": 17026366408080.0, - "grad_norm": 5.131590722465647, - "learning_rate": 2.6116592574711835e-06, - "loss": 0.7898, - "num_input_tokens_seen": 74143440, - "step": 3485 - }, - { - "epoch": 0.4191667167678711, - "flos": 15162949446600.0, - "grad_norm": 2.7262329951774373, - "learning_rate": 2.6109175628968853e-06, - "loss": 0.8164, - "num_input_tokens_seen": 74162925, - "step": 3486 - }, - { - "epoch": 0.4192869596585102, - "flos": 17268651319680.0, - "grad_norm": 5.558605759715593, - "learning_rate": 2.610175775646878e-06, - "loss": 0.8174, - "num_input_tokens_seen": 74181225, - "step": 3487 - }, - { - "epoch": 0.41940720254914926, - "flos": 18374792816160.0, - "grad_norm": 4.773328015059877, - "learning_rate": 2.6094338958336907e-06, - "loss": 0.7278, - "num_input_tokens_seen": 74199615, - "step": 3488 - }, - { - "epoch": 0.41952744543978837, - "flos": 11323455691440.0, - "grad_norm": 2.8288462019612495, - "learning_rate": 2.608691923569867e-06, - "loss": 0.7973, - "num_input_tokens_seen": 74216210, - "step": 3489 - }, - { - "epoch": 0.4196476883304275, - "flos": 18051946780440.0, - "grad_norm": 3.8352634435932424, - "learning_rate": 2.6079498589679616e-06, - "loss": 0.7479, - "num_input_tokens_seen": 74237020, - "step": 3490 - }, - { - "epoch": 0.41976793122106654, - "flos": 17967428161320.0, - "grad_norm": 4.0012996469579845, - "learning_rate": 2.6072077021405465e-06, - "loss": 0.7379, - "num_input_tokens_seen": 74255575, - "step": 3491 - }, - { - "epoch": 0.41988817411170565, - "flos": 15483832564800.0, - "grad_norm": 3.213556785280017, - "learning_rate": 2.6064654532002054e-06, - "loss": 0.6787, - "num_input_tokens_seen": 74274305, - "step": 3492 - }, - { - "epoch": 0.42000841700234476, - "flos": 23235751148880.0, - "grad_norm": 2.9880836970034577, - "learning_rate": 2.6057231122595375e-06, - "loss": 0.7412, - "num_input_tokens_seen": 74295335, - "step": 3493 - }, - { - "epoch": 0.4201286598929838, - "flos": 15561987531960.0, - "grad_norm": 2.8460846424611517, - "learning_rate": 2.604980679431154e-06, - "loss": 0.7203, - "num_input_tokens_seen": 74313295, - "step": 3494 - }, - { - "epoch": 0.4202489027836229, - "flos": 13538113181400.0, - "grad_norm": 3.146575452834923, - "learning_rate": 2.604238154827684e-06, - "loss": 0.7445, - "num_input_tokens_seen": 74329640, - "step": 3495 - }, - { - "epoch": 0.42036914567426203, - "flos": 14109481140840.0, - "grad_norm": 4.164659525314404, - "learning_rate": 2.6034955385617656e-06, - "loss": 0.705, - "num_input_tokens_seen": 74347690, - "step": 3496 - }, - { - "epoch": 0.4204893885649011, - "flos": 50026517397360.0, - "grad_norm": 0.7635878885043024, - "learning_rate": 2.6027528307460544e-06, - "loss": 0.6551, - "num_input_tokens_seen": 74411415, - "step": 3497 - }, - { - "epoch": 0.4206096314555402, - "flos": 16027185951000.0, - "grad_norm": 2.221375000570133, - "learning_rate": 2.602010031493217e-06, - "loss": 0.8421, - "num_input_tokens_seen": 74429365, - "step": 3498 - }, - { - "epoch": 0.42072987434617926, - "flos": 21480941544720.0, - "grad_norm": 15.299326184962107, - "learning_rate": 2.6012671409159367e-06, - "loss": 0.8452, - "num_input_tokens_seen": 74450420, - "step": 3499 - }, - { - "epoch": 0.42085011723681837, - "flos": 20239634475840.0, - "grad_norm": 2.5871118374167277, - "learning_rate": 2.6005241591269097e-06, - "loss": 0.8141, - "num_input_tokens_seen": 74469510, - "step": 3500 - }, - { - "epoch": 0.4209703601274575, - "flos": 20400756724080.0, - "grad_norm": 1.848738371610664, - "learning_rate": 2.5997810862388454e-06, - "loss": 0.787, - "num_input_tokens_seen": 74489070, - "step": 3501 - }, - { - "epoch": 0.42109060301809653, - "flos": 20183668649280.0, - "grad_norm": 2.733643962685665, - "learning_rate": 2.599037922364467e-06, - "loss": 0.7453, - "num_input_tokens_seen": 74507690, - "step": 3502 - }, - { - "epoch": 0.42121084590873564, - "flos": 21507594739680.0, - "grad_norm": 3.183111722998247, - "learning_rate": 2.5982946676165112e-06, - "loss": 0.7444, - "num_input_tokens_seen": 74527180, - "step": 3503 - }, - { - "epoch": 0.42133108879937475, - "flos": 49696959450120.0, - "grad_norm": 0.7604456116554164, - "learning_rate": 2.5975513221077313e-06, - "loss": 0.594, - "num_input_tokens_seen": 74590870, - "step": 3504 - }, - { - "epoch": 0.4214513316900138, - "flos": 16912281877680.0, - "grad_norm": 2.793089451882675, - "learning_rate": 2.5968078859508897e-06, - "loss": 0.8656, - "num_input_tokens_seen": 74607790, - "step": 3505 - }, - { - "epoch": 0.4215715745806529, - "flos": 11162301783240.0, - "grad_norm": 3.5130622169464516, - "learning_rate": 2.5960643592587673e-06, - "loss": 0.7787, - "num_input_tokens_seen": 74624920, - "step": 3506 - }, - { - "epoch": 0.42169181747129203, - "flos": 16188814758600.0, - "grad_norm": 3.782445040931976, - "learning_rate": 2.5953207421441553e-06, - "loss": 0.8082, - "num_input_tokens_seen": 74643240, - "step": 3507 - }, - { - "epoch": 0.4218120603619311, - "flos": 16560376490040.0, - "grad_norm": 3.0324658973581444, - "learning_rate": 2.5945770347198603e-06, - "loss": 0.7273, - "num_input_tokens_seen": 74661115, - "step": 3508 - }, - { - "epoch": 0.4219323032525702, - "flos": 14379780625680.0, - "grad_norm": 2.6962180565227087, - "learning_rate": 2.593833237098701e-06, - "loss": 0.8168, - "num_input_tokens_seen": 74678435, - "step": 3509 - }, - { - "epoch": 0.4220525461432093, - "flos": 22156357827240.0, - "grad_norm": 2.5374316802951618, - "learning_rate": 2.593089349393512e-06, - "loss": 0.6162, - "num_input_tokens_seen": 74698645, - "step": 3510 - }, - { - "epoch": 0.42217278903384836, - "flos": 17808522110280.0, - "grad_norm": 3.076542326404307, - "learning_rate": 2.592345371717141e-06, - "loss": 0.8198, - "num_input_tokens_seen": 74717895, - "step": 3511 - }, - { - "epoch": 0.42229303192448747, - "flos": 12462772334640.0, - "grad_norm": 3.583757124096403, - "learning_rate": 2.591601304182448e-06, - "loss": 0.6902, - "num_input_tokens_seen": 74735585, - "step": 3512 - }, - { - "epoch": 0.4224132748151266, - "flos": 16675315839360.0, - "grad_norm": 2.7214552475744997, - "learning_rate": 2.5908571469023067e-06, - "loss": 0.7789, - "num_input_tokens_seen": 74754790, - "step": 3513 - }, - { - "epoch": 0.42253351770576564, - "flos": 13000078668480.0, - "grad_norm": 4.897507964162305, - "learning_rate": 2.5901128999896067e-06, - "loss": 0.7422, - "num_input_tokens_seen": 74769940, - "step": 3514 - }, - { - "epoch": 0.42265376059640475, - "flos": 20914259259360.0, - "grad_norm": 2.0511904353687975, - "learning_rate": 2.5893685635572487e-06, - "loss": 0.6718, - "num_input_tokens_seen": 74790510, - "step": 3515 - }, - { - "epoch": 0.4227740034870438, - "flos": 11841770540640.0, - "grad_norm": 2.9562174434361177, - "learning_rate": 2.5886241377181483e-06, - "loss": 0.6759, - "num_input_tokens_seen": 74809100, - "step": 3516 - }, - { - "epoch": 0.4228942463776829, - "flos": 18532844048280.0, - "grad_norm": 2.2888515752798146, - "learning_rate": 2.587879622585234e-06, - "loss": 0.7905, - "num_input_tokens_seen": 74827420, - "step": 3517 - }, - { - "epoch": 0.423014489268322, - "flos": 19347763317720.0, - "grad_norm": 3.074581061877323, - "learning_rate": 2.5871350182714486e-06, - "loss": 0.7365, - "num_input_tokens_seen": 74848020, - "step": 3518 - }, - { - "epoch": 0.4231347321589611, - "flos": 12596988108240.0, - "grad_norm": 3.5878875682570674, - "learning_rate": 2.586390324889748e-06, - "loss": 0.7823, - "num_input_tokens_seen": 74863640, - "step": 3519 - }, - { - "epoch": 0.4232549750496002, - "flos": 16833968610720.0, - "grad_norm": 2.2632727678460642, - "learning_rate": 2.5856455425531003e-06, - "loss": 0.6526, - "num_input_tokens_seen": 74884835, - "step": 3520 - }, - { - "epoch": 0.4233752179402393, - "flos": 15537898793760.0, - "grad_norm": 2.4773600644837885, - "learning_rate": 2.5849006713744902e-06, - "loss": 0.7948, - "num_input_tokens_seen": 74903350, - "step": 3521 - }, - { - "epoch": 0.42349546083087836, - "flos": 15135726372360.0, - "grad_norm": 3.6137705538629312, - "learning_rate": 2.5841557114669135e-06, - "loss": 0.7131, - "num_input_tokens_seen": 74919930, - "step": 3522 - }, - { - "epoch": 0.42361570372151747, - "flos": 13566919253640.0, - "grad_norm": 4.231829451717407, - "learning_rate": 2.58341066294338e-06, - "loss": 0.6413, - "num_input_tokens_seen": 74936315, - "step": 3523 - }, - { - "epoch": 0.4237359466121566, - "flos": 15324546594240.0, - "grad_norm": 4.046823862113556, - "learning_rate": 2.5826655259169124e-06, - "loss": 0.8358, - "num_input_tokens_seen": 74954690, - "step": 3524 - }, - { - "epoch": 0.42385618950279563, - "flos": 13162245695400.0, - "grad_norm": 2.7319327646753657, - "learning_rate": 2.5819203005005475e-06, - "loss": 0.8803, - "num_input_tokens_seen": 74971745, - "step": 3525 - }, - { - "epoch": 0.42397643239343474, - "flos": 17403911871960.0, - "grad_norm": 2.6895376066127117, - "learning_rate": 2.581174986807336e-06, - "loss": 0.7735, - "num_input_tokens_seen": 74991700, - "step": 3526 - }, - { - "epoch": 0.42409667528407385, - "flos": 12057655536960.0, - "grad_norm": 2.8343339910768512, - "learning_rate": 2.580429584950341e-06, - "loss": 0.9014, - "num_input_tokens_seen": 75007170, - "step": 3527 - }, - { - "epoch": 0.4242169181747129, - "flos": 11679540193800.0, - "grad_norm": 2.6263984406547087, - "learning_rate": 2.5796840950426397e-06, - "loss": 0.6375, - "num_input_tokens_seen": 75023975, - "step": 3528 - }, - { - "epoch": 0.424337161065352, - "flos": 14676669985560.0, - "grad_norm": 3.4206709463503744, - "learning_rate": 2.578938517197322e-06, - "loss": 0.6411, - "num_input_tokens_seen": 75041790, - "step": 3529 - }, - { - "epoch": 0.4244574039559911, - "flos": 17484726275760.0, - "grad_norm": 3.0788412277452797, - "learning_rate": 2.5781928515274916e-06, - "loss": 0.6062, - "num_input_tokens_seen": 75060230, - "step": 3530 - }, - { - "epoch": 0.4245776468466302, - "flos": 12813031404360.0, - "grad_norm": 3.0057818010518775, - "learning_rate": 2.577447098146265e-06, - "loss": 0.6641, - "num_input_tokens_seen": 75077125, - "step": 3531 - }, - { - "epoch": 0.4246978897372693, - "flos": 20368563036120.0, - "grad_norm": 2.331748242354362, - "learning_rate": 2.5767012571667724e-06, - "loss": 0.7698, - "num_input_tokens_seen": 75096325, - "step": 3532 - }, - { - "epoch": 0.42481813262790835, - "flos": 11355744359280.0, - "grad_norm": 12.145211589378805, - "learning_rate": 2.5759553287021587e-06, - "loss": 0.6617, - "num_input_tokens_seen": 75114375, - "step": 3533 - }, - { - "epoch": 0.42493837551854746, - "flos": 17538507565080.0, - "grad_norm": 3.1042882054919367, - "learning_rate": 2.5752093128655786e-06, - "loss": 0.7513, - "num_input_tokens_seen": 75132340, - "step": 3534 - }, - { - "epoch": 0.4250586184091866, - "flos": 15216477456240.0, - "grad_norm": 2.486454986668031, - "learning_rate": 2.574463209770204e-06, - "loss": 0.7161, - "num_input_tokens_seen": 75151375, - "step": 3535 - }, - { - "epoch": 0.42517886129982563, - "flos": 22289465502240.0, - "grad_norm": 2.633344194735891, - "learning_rate": 2.5737170195292165e-06, - "loss": 0.7764, - "num_input_tokens_seen": 75174430, - "step": 3536 - }, - { - "epoch": 0.42529910419046474, - "flos": 14673693949320.0, - "grad_norm": 2.7529153109659195, - "learning_rate": 2.572970742255814e-06, - "loss": 0.7617, - "num_input_tokens_seen": 75192640, - "step": 3537 - }, - { - "epoch": 0.42541934708110385, - "flos": 16561896168120.0, - "grad_norm": 3.7669005706764804, - "learning_rate": 2.5722243780632046e-06, - "loss": 0.8013, - "num_input_tokens_seen": 75210625, - "step": 3538 - }, - { - "epoch": 0.4255395899717429, - "flos": 48811072024440.0, - "grad_norm": 0.8352680556107817, - "learning_rate": 2.5714779270646125e-06, - "loss": 0.6511, - "num_input_tokens_seen": 75271115, - "step": 3539 - }, - { - "epoch": 0.425659832862382, - "flos": 13083964088400.0, - "grad_norm": 6.955736814147409, - "learning_rate": 2.5707313893732735e-06, - "loss": 0.7516, - "num_input_tokens_seen": 75289375, - "step": 3540 - }, - { - "epoch": 0.4257800757530211, - "flos": 17591180755800.0, - "grad_norm": 2.620820487455373, - "learning_rate": 2.5699847651024364e-06, - "loss": 0.7505, - "num_input_tokens_seen": 75309735, - "step": 3541 - }, - { - "epoch": 0.4259003186436602, - "flos": 17349782323080.0, - "grad_norm": 3.0926844756012533, - "learning_rate": 2.5692380543653627e-06, - "loss": 0.7628, - "num_input_tokens_seen": 75327610, - "step": 3542 - }, - { - "epoch": 0.4260205615342993, - "flos": 11105259518040.0, - "grad_norm": 3.7158970507844815, - "learning_rate": 2.5684912572753293e-06, - "loss": 0.6773, - "num_input_tokens_seen": 75343005, - "step": 3543 - }, - { - "epoch": 0.4261408044249384, - "flos": 22507598355720.0, - "grad_norm": 3.5714191082420315, - "learning_rate": 2.5677443739456245e-06, - "loss": 0.8337, - "num_input_tokens_seen": 75364385, - "step": 3544 - }, - { - "epoch": 0.42626104731557746, - "flos": 17025511589160.0, - "grad_norm": 3.298280822993867, - "learning_rate": 2.5669974044895495e-06, - "loss": 0.775, - "num_input_tokens_seen": 75380500, - "step": 3545 - }, - { - "epoch": 0.42638129020621657, - "flos": 18969552994680.0, - "grad_norm": 4.137422294693751, - "learning_rate": 2.5662503490204187e-06, - "loss": 0.7724, - "num_input_tokens_seen": 75400385, - "step": 3546 - }, - { - "epoch": 0.4265015330968556, - "flos": 19426234884480.0, - "grad_norm": 2.488261511707918, - "learning_rate": 2.5655032076515603e-06, - "loss": 0.7471, - "num_input_tokens_seen": 75419430, - "step": 3547 - }, - { - "epoch": 0.42662177598749473, - "flos": 17863063238640.0, - "grad_norm": 2.6989883788269577, - "learning_rate": 2.5647559804963155e-06, - "loss": 0.8008, - "num_input_tokens_seen": 75439080, - "step": 3548 - }, - { - "epoch": 0.42674201887813384, - "flos": 16944665525400.0, - "grad_norm": 5.775164396249767, - "learning_rate": 2.5640086676680364e-06, - "loss": 0.7766, - "num_input_tokens_seen": 75460295, - "step": 3549 - }, - { - "epoch": 0.4268622617687729, - "flos": 15864955604160.0, - "grad_norm": 3.827262687875871, - "learning_rate": 2.5632612692800923e-06, - "loss": 0.7833, - "num_input_tokens_seen": 75479080, - "step": 3550 - }, - { - "epoch": 0.426982504659412, - "flos": 17160360561960.0, - "grad_norm": 3.8553134071415203, - "learning_rate": 2.5625137854458603e-06, - "loss": 0.7287, - "num_input_tokens_seen": 75497815, - "step": 3551 - }, - { - "epoch": 0.4271027475500511, - "flos": 13812686760840.0, - "grad_norm": 2.490387939505289, - "learning_rate": 2.561766216278735e-06, - "loss": 0.7857, - "num_input_tokens_seen": 75515130, - "step": 3552 - }, - { - "epoch": 0.4272229904406902, - "flos": 19699193805960.0, - "grad_norm": 2.4920979546510953, - "learning_rate": 2.561018561892121e-06, - "loss": 0.7946, - "num_input_tokens_seen": 75533990, - "step": 3553 - }, - { - "epoch": 0.4273432333313293, - "flos": 17538760844760.0, - "grad_norm": 2.163547495903154, - "learning_rate": 2.5602708223994363e-06, - "loss": 0.7485, - "num_input_tokens_seen": 75555575, - "step": 3554 - }, - { - "epoch": 0.4274634762219684, - "flos": 21697206460560.0, - "grad_norm": 3.2674265003766756, - "learning_rate": 2.559522997914115e-06, - "loss": 0.6602, - "num_input_tokens_seen": 75574875, - "step": 3555 - }, - { - "epoch": 0.42758371911260745, - "flos": 15675882102600.0, - "grad_norm": 3.7514893435296734, - "learning_rate": 2.558775088549599e-06, - "loss": 0.8455, - "num_input_tokens_seen": 75594175, - "step": 3556 - }, - { - "epoch": 0.42770396200324656, - "flos": 10730626770480.0, - "grad_norm": 3.6118334039688076, - "learning_rate": 2.5580270944193467e-06, - "loss": 0.6364, - "num_input_tokens_seen": 75610715, - "step": 3557 - }, - { - "epoch": 0.4278242048938857, - "flos": 52107750612720.0, - "grad_norm": 0.7331166138620573, - "learning_rate": 2.557279015636827e-06, - "loss": 0.5673, - "num_input_tokens_seen": 75670845, - "step": 3558 - }, - { - "epoch": 0.42794444778452473, - "flos": 51152505197400.0, - "grad_norm": 0.8123009144355335, - "learning_rate": 2.5565308523155245e-06, - "loss": 0.6504, - "num_input_tokens_seen": 75730165, - "step": 3559 - }, - { - "epoch": 0.42806469067516384, - "flos": 13292598953880.0, - "grad_norm": 4.553916276968825, - "learning_rate": 2.5557826045689336e-06, - "loss": 0.8035, - "num_input_tokens_seen": 75746125, - "step": 3560 - }, - { - "epoch": 0.4281849335658029, - "flos": 40179787966440.0, - "grad_norm": 0.8368162011137279, - "learning_rate": 2.5550342725105643e-06, - "loss": 0.6011, - "num_input_tokens_seen": 75804010, - "step": 3561 - }, - { - "epoch": 0.428305176456442, - "flos": 12597716287320.0, - "grad_norm": 2.6721869392559494, - "learning_rate": 2.554285856253937e-06, - "loss": 0.8, - "num_input_tokens_seen": 75822565, - "step": 3562 - }, - { - "epoch": 0.4284254193470811, - "flos": 19320951822960.0, - "grad_norm": 2.4575036690918397, - "learning_rate": 2.5535373559125855e-06, - "loss": 0.749, - "num_input_tokens_seen": 75842650, - "step": 3563 - }, - { - "epoch": 0.42854566223772017, - "flos": 21803407660920.0, - "grad_norm": 2.1520849719661395, - "learning_rate": 2.552788771600057e-06, - "loss": 0.8101, - "num_input_tokens_seen": 75862680, - "step": 3564 - }, - { - "epoch": 0.4286659051283593, - "flos": 16108126994640.0, - "grad_norm": 3.0247731967108753, - "learning_rate": 2.5520401034299118e-06, - "loss": 0.799, - "num_input_tokens_seen": 75880160, - "step": 3565 - }, - { - "epoch": 0.4287861480189984, - "flos": 9681559199160.0, - "grad_norm": 3.806102325130368, - "learning_rate": 2.551291351515722e-06, - "loss": 0.8516, - "num_input_tokens_seen": 75896895, - "step": 3566 - }, - { - "epoch": 0.42890639090963745, - "flos": 19537090098960.0, - "grad_norm": 4.908412966813531, - "learning_rate": 2.5505425159710726e-06, - "loss": 0.8469, - "num_input_tokens_seen": 75916425, - "step": 3567 - }, - { - "epoch": 0.42902663380027656, - "flos": 17615301153960.0, - "grad_norm": 5.637275525548858, - "learning_rate": 2.549793596909561e-06, - "loss": 0.8203, - "num_input_tokens_seen": 75934765, - "step": 3568 - }, - { - "epoch": 0.42914687669091567, - "flos": 11382270914400.0, - "grad_norm": 7.05911795058238, - "learning_rate": 2.5490445944447976e-06, - "loss": 0.6442, - "num_input_tokens_seen": 75952980, - "step": 3569 - }, - { - "epoch": 0.4292671195815547, - "flos": 23101472055360.0, - "grad_norm": 9.818080602952607, - "learning_rate": 2.548295508690406e-06, - "loss": 0.6404, - "num_input_tokens_seen": 75973995, - "step": 3570 - }, - { - "epoch": 0.42938736247219383, - "flos": 22205105182920.0, - "grad_norm": 2.7538513610997164, - "learning_rate": 2.5475463397600217e-06, - "loss": 0.7512, - "num_input_tokens_seen": 75993795, - "step": 3571 - }, - { - "epoch": 0.42950760536283294, - "flos": 21533646395400.0, - "grad_norm": 4.687933939875197, - "learning_rate": 2.546797087767293e-06, - "loss": 0.7657, - "num_input_tokens_seen": 76013640, - "step": 3572 - }, - { - "epoch": 0.429627848253472, - "flos": 19698655586640.0, - "grad_norm": 3.724871685711094, - "learning_rate": 2.546047752825881e-06, - "loss": 0.8516, - "num_input_tokens_seen": 76033965, - "step": 3573 - }, - { - "epoch": 0.4297480911441111, - "flos": 10086201097440.0, - "grad_norm": 3.4377668797323597, - "learning_rate": 2.5452983350494595e-06, - "loss": 0.9173, - "num_input_tokens_seen": 76049240, - "step": 3574 - }, - { - "epoch": 0.4298683340347502, - "flos": 15163044426480.0, - "grad_norm": 3.609304980635095, - "learning_rate": 2.544548834551713e-06, - "loss": 0.6442, - "num_input_tokens_seen": 76067965, - "step": 3575 - }, - { - "epoch": 0.4299885769253893, - "flos": 15266681170080.0, - "grad_norm": 3.813119056345633, - "learning_rate": 2.5437992514463424e-06, - "loss": 0.9086, - "num_input_tokens_seen": 76081010, - "step": 3576 - }, - { - "epoch": 0.4301088198160284, - "flos": 18673581773640.0, - "grad_norm": 2.599362471252905, - "learning_rate": 2.5430495858470565e-06, - "loss": 0.8643, - "num_input_tokens_seen": 76100200, - "step": 3577 - }, - { - "epoch": 0.43022906270666744, - "flos": 13326312319920.0, - "grad_norm": 3.4116722136953856, - "learning_rate": 2.54229983786758e-06, - "loss": 0.756, - "num_input_tokens_seen": 76117865, - "step": 3578 - }, - { - "epoch": 0.43034930559730655, - "flos": 17129718212040.0, - "grad_norm": 2.5733063952200523, - "learning_rate": 2.541550007621651e-06, - "loss": 0.8287, - "num_input_tokens_seen": 76136075, - "step": 3579 - }, - { - "epoch": 0.43046954848794566, - "flos": 20671562768280.0, - "grad_norm": 3.1825496154897115, - "learning_rate": 2.5408000952230156e-06, - "loss": 0.7889, - "num_input_tokens_seen": 76154585, - "step": 3580 - }, - { - "epoch": 0.4305897913785847, - "flos": 20963956413840.0, - "grad_norm": 3.2554428300953977, - "learning_rate": 2.5400501007854357e-06, - "loss": 0.8811, - "num_input_tokens_seen": 76173750, - "step": 3581 - }, - { - "epoch": 0.43071003426922383, - "flos": 14946557890920.0, - "grad_norm": 2.6890519562536155, - "learning_rate": 2.539300024422685e-06, - "loss": 0.7412, - "num_input_tokens_seen": 76191415, - "step": 3582 - }, - { - "epoch": 0.43083027715986294, - "flos": 38298736407240.0, - "grad_norm": 0.7995857060092261, - "learning_rate": 2.538549866248549e-06, - "loss": 0.6317, - "num_input_tokens_seen": 76246115, - "step": 3583 - }, - { - "epoch": 0.430950520050502, - "flos": 12165534715200.0, - "grad_norm": 4.0460390383712825, - "learning_rate": 2.5377996263768274e-06, - "loss": 0.7914, - "num_input_tokens_seen": 76263915, - "step": 3584 - }, - { - "epoch": 0.4310707629411411, - "flos": 18025071965760.0, - "grad_norm": 7.5867772369283015, - "learning_rate": 2.5370493049213293e-06, - "loss": 0.6563, - "num_input_tokens_seen": 76283280, - "step": 3585 - }, - { - "epoch": 0.4311910058317802, - "flos": 19375397971440.0, - "grad_norm": 16.3268343912785, - "learning_rate": 2.536298901995878e-06, - "loss": 0.7826, - "num_input_tokens_seen": 76302210, - "step": 3586 - }, - { - "epoch": 0.43131124872241927, - "flos": 18430252083360.0, - "grad_norm": 2.897175393275218, - "learning_rate": 2.535548417714311e-06, - "loss": 0.7879, - "num_input_tokens_seen": 76321230, - "step": 3587 - }, - { - "epoch": 0.4314314916130584, - "flos": 15809338037160.0, - "grad_norm": 2.754932228680997, - "learning_rate": 2.534797852190474e-06, - "loss": 0.8542, - "num_input_tokens_seen": 76341130, - "step": 3588 - }, - { - "epoch": 0.4315517345036975, - "flos": 14077065833160.0, - "grad_norm": 2.445014521081783, - "learning_rate": 2.5340472055382283e-06, - "loss": 0.787, - "num_input_tokens_seen": 76356880, - "step": 3589 - }, - { - "epoch": 0.43167197739433655, - "flos": 17776581702000.0, - "grad_norm": 4.322594590042902, - "learning_rate": 2.5332964778714468e-06, - "loss": 0.796, - "num_input_tokens_seen": 76373785, - "step": 3590 - }, - { - "epoch": 0.43179222028497566, - "flos": 12295698013920.0, - "grad_norm": 2.3463742632469224, - "learning_rate": 2.5325456693040123e-06, - "loss": 0.6493, - "num_input_tokens_seen": 76390700, - "step": 3591 - }, - { - "epoch": 0.43191246317561477, - "flos": 12867319253040.0, - "grad_norm": 3.631074091578512, - "learning_rate": 2.531794779949824e-06, - "loss": 0.7377, - "num_input_tokens_seen": 76408320, - "step": 3592 - }, - { - "epoch": 0.4320327060662538, - "flos": 17485106195280.0, - "grad_norm": 2.9009541487490855, - "learning_rate": 2.5310438099227903e-06, - "loss": 0.8638, - "num_input_tokens_seen": 76425305, - "step": 3593 - }, - { - "epoch": 0.43215294895689293, - "flos": 48956495423880.0, - "grad_norm": 1.3162433738577939, - "learning_rate": 2.530292759336833e-06, - "loss": 0.5535, - "num_input_tokens_seen": 76485760, - "step": 3594 - }, - { - "epoch": 0.432273191847532, - "flos": 15054215449440.0, - "grad_norm": 3.9628236030788266, - "learning_rate": 2.5295416283058855e-06, - "loss": 0.6897, - "num_input_tokens_seen": 76504345, - "step": 3595 - }, - { - "epoch": 0.4323934347381711, - "flos": 14083334505240.0, - "grad_norm": 2.965711142572175, - "learning_rate": 2.5287904169438943e-06, - "loss": 0.6459, - "num_input_tokens_seen": 76523270, - "step": 3596 - }, - { - "epoch": 0.4325136776288102, - "flos": 15892147018440.0, - "grad_norm": 4.649458880369739, - "learning_rate": 2.528039125364817e-06, - "loss": 0.6197, - "num_input_tokens_seen": 76541795, - "step": 3597 - }, - { - "epoch": 0.43263392051944927, - "flos": 16346042831760.0, - "grad_norm": 2.8071408393461574, - "learning_rate": 2.5272877536826246e-06, - "loss": 0.73, - "num_input_tokens_seen": 76560310, - "step": 3598 - }, - { - "epoch": 0.4327541634100884, - "flos": 21399652241520.0, - "grad_norm": 3.544160047486528, - "learning_rate": 2.5265363020112986e-06, - "loss": 0.6726, - "num_input_tokens_seen": 76580350, - "step": 3599 - }, - { - "epoch": 0.4328744063007275, - "flos": 19104781887000.0, - "grad_norm": 2.4665809470395126, - "learning_rate": 2.5257847704648344e-06, - "loss": 0.8301, - "num_input_tokens_seen": 76601300, - "step": 3600 - }, - { - "epoch": 0.43299464919136654, - "flos": 12084467031720.0, - "grad_norm": 5.683763555238331, - "learning_rate": 2.525033159157239e-06, - "loss": 0.7541, - "num_input_tokens_seen": 76617335, - "step": 3601 - }, - { - "epoch": 0.43311489208200565, - "flos": 11733638082720.0, - "grad_norm": 2.9217653304953113, - "learning_rate": 2.52428146820253e-06, - "loss": 0.7523, - "num_input_tokens_seen": 76635310, - "step": 3602 - }, - { - "epoch": 0.43323513497264476, - "flos": 16783100037720.0, - "grad_norm": 3.147679239976386, - "learning_rate": 2.52352969771474e-06, - "loss": 0.8043, - "num_input_tokens_seen": 76654255, - "step": 3603 - }, - { - "epoch": 0.4333553778632838, - "flos": 18535788424560.0, - "grad_norm": 2.4356575874182713, - "learning_rate": 2.5227778478079106e-06, - "loss": 0.8731, - "num_input_tokens_seen": 76673385, - "step": 3604 - }, - { - "epoch": 0.43347562075392293, - "flos": 14160444693720.0, - "grad_norm": 2.3453043551878583, - "learning_rate": 2.522025918596098e-06, - "loss": 0.7566, - "num_input_tokens_seen": 76691405, - "step": 3605 - }, - { - "epoch": 0.43359586364456204, - "flos": 19296546485160.0, - "grad_norm": 2.218954264949538, - "learning_rate": 2.521273910193368e-06, - "loss": 0.6394, - "num_input_tokens_seen": 76714305, - "step": 3606 - }, - { - "epoch": 0.4337161065352011, - "flos": 11645098648680.0, - "grad_norm": 3.201055388457936, - "learning_rate": 2.5205218227138006e-06, - "loss": 0.8522, - "num_input_tokens_seen": 76726980, - "step": 3607 - }, - { - "epoch": 0.4338363494258402, - "flos": 14780053449480.0, - "grad_norm": 2.678127357366141, - "learning_rate": 2.519769656271486e-06, - "loss": 0.7743, - "num_input_tokens_seen": 76744120, - "step": 3608 - }, - { - "epoch": 0.43395659231647926, - "flos": 14676575005680.0, - "grad_norm": 4.046731482358029, - "learning_rate": 2.5190174109805285e-06, - "loss": 0.6571, - "num_input_tokens_seen": 76763665, - "step": 3609 - }, - { - "epoch": 0.43407683520711837, - "flos": 14541852672720.0, - "grad_norm": 2.9480835201280153, - "learning_rate": 2.518265086955042e-06, - "loss": 0.6194, - "num_input_tokens_seen": 76781105, - "step": 3610 - }, - { - "epoch": 0.4341970780977575, - "flos": 16914656374680.0, - "grad_norm": 7.831938707370968, - "learning_rate": 2.5175126843091534e-06, - "loss": 0.8116, - "num_input_tokens_seen": 76800195, - "step": 3611 - }, - { - "epoch": 0.43431732098839654, - "flos": 27496883709480.0, - "grad_norm": 3.507088703340094, - "learning_rate": 2.5167602031570034e-06, - "loss": 0.7302, - "num_input_tokens_seen": 76820100, - "step": 3612 - }, - { - "epoch": 0.43443756387903565, - "flos": 23397664896120.0, - "grad_norm": 2.6079964108354643, - "learning_rate": 2.51600764361274e-06, - "loss": 0.7224, - "num_input_tokens_seen": 76841345, - "step": 3613 - }, - { - "epoch": 0.43455780676967476, - "flos": 17187805255920.0, - "grad_norm": 2.763883052885794, - "learning_rate": 2.5152550057905283e-06, - "loss": 0.7755, - "num_input_tokens_seen": 76860955, - "step": 3614 - }, - { - "epoch": 0.4346780496603138, - "flos": 17728024306080.0, - "grad_norm": 3.212594822771819, - "learning_rate": 2.5145022898045415e-06, - "loss": 0.7332, - "num_input_tokens_seen": 76879860, - "step": 3615 - }, - { - "epoch": 0.4347982925509529, - "flos": 12462677354760.0, - "grad_norm": 2.828502650890352, - "learning_rate": 2.5137494957689664e-06, - "loss": 0.8805, - "num_input_tokens_seen": 76895190, - "step": 3616 - }, - { - "epoch": 0.43491853544159204, - "flos": 44921754474960.0, - "grad_norm": 0.7528284699997451, - "learning_rate": 2.5129966237980016e-06, - "loss": 0.5917, - "num_input_tokens_seen": 76957905, - "step": 3617 - }, - { - "epoch": 0.4350387783322311, - "flos": 16053649186200.0, - "grad_norm": 3.3235498062334137, - "learning_rate": 2.512243674005857e-06, - "loss": 0.7612, - "num_input_tokens_seen": 76976990, - "step": 3618 - }, - { - "epoch": 0.4351590212228702, - "flos": 18376059214560.0, - "grad_norm": 3.0068833538503132, - "learning_rate": 2.5114906465067537e-06, - "loss": 0.8487, - "num_input_tokens_seen": 76997695, - "step": 3619 - }, - { - "epoch": 0.4352792641135093, - "flos": 15729378452280.0, - "grad_norm": 8.061936469677324, - "learning_rate": 2.5107375414149264e-06, - "loss": 0.7255, - "num_input_tokens_seen": 77016660, - "step": 3620 - }, - { - "epoch": 0.43539950700414837, - "flos": 11841865520520.0, - "grad_norm": 3.40885587633448, - "learning_rate": 2.5099843588446197e-06, - "loss": 0.6854, - "num_input_tokens_seen": 77034700, - "step": 3621 - }, - { - "epoch": 0.4355197498947875, - "flos": 12166357874160.0, - "grad_norm": 2.0508297287796795, - "learning_rate": 2.509231098910091e-06, - "loss": 0.6014, - "num_input_tokens_seen": 77054290, - "step": 3622 - }, - { - "epoch": 0.4356399927854266, - "flos": 11922711584280.0, - "grad_norm": 3.0101938308203486, - "learning_rate": 2.508477761725611e-06, - "loss": 0.7179, - "num_input_tokens_seen": 77072285, - "step": 3623 - }, - { - "epoch": 0.43576023567606564, - "flos": 12543871678080.0, - "grad_norm": 2.185697848617054, - "learning_rate": 2.507724347405458e-06, - "loss": 0.7875, - "num_input_tokens_seen": 77089955, - "step": 3624 - }, - { - "epoch": 0.43588047856670475, - "flos": 11592298818120.0, - "grad_norm": 3.056729231942553, - "learning_rate": 2.5069708560639243e-06, - "loss": 0.7994, - "num_input_tokens_seen": 77107585, - "step": 3625 - }, - { - "epoch": 0.4360007214573438, - "flos": 17322812528520.0, - "grad_norm": 3.7589803613797677, - "learning_rate": 2.5062172878153158e-06, - "loss": 0.5968, - "num_input_tokens_seen": 77126580, - "step": 3626 - }, - { - "epoch": 0.4361209643479829, - "flos": 16078561083360.0, - "grad_norm": 2.858647154845838, - "learning_rate": 2.505463642773947e-06, - "loss": 0.8572, - "num_input_tokens_seen": 77146265, - "step": 3627 - }, - { - "epoch": 0.43624120723862203, - "flos": 12705183886080.0, - "grad_norm": 2.728623624942927, - "learning_rate": 2.504709921054146e-06, - "loss": 0.7321, - "num_input_tokens_seen": 77162800, - "step": 3628 - }, - { - "epoch": 0.4363614501292611, - "flos": 13056930973920.0, - "grad_norm": 3.4989528451921754, - "learning_rate": 2.50395612277025e-06, - "loss": 0.8305, - "num_input_tokens_seen": 77178375, - "step": 3629 - }, - { - "epoch": 0.4364816930199002, - "flos": 14838647052720.0, - "grad_norm": 2.7822299045423504, - "learning_rate": 2.503202248036612e-06, - "loss": 0.7221, - "num_input_tokens_seen": 77196950, - "step": 3630 - }, - { - "epoch": 0.4366019359105393, - "flos": 17620208447760.0, - "grad_norm": 2.3712134040417627, - "learning_rate": 2.5024482969675927e-06, - "loss": 0.7078, - "num_input_tokens_seen": 77216625, - "step": 3631 - }, - { - "epoch": 0.43672217880117836, - "flos": 15911486762640.0, - "grad_norm": 2.5918719374963066, - "learning_rate": 2.501694269677566e-06, - "loss": 0.8293, - "num_input_tokens_seen": 77234115, - "step": 3632 - }, - { - "epoch": 0.4368424216918175, - "flos": 13159617918720.0, - "grad_norm": 2.796458463712633, - "learning_rate": 2.500940166280918e-06, - "loss": 0.78, - "num_input_tokens_seen": 77252265, - "step": 3633 - }, - { - "epoch": 0.4369626645824566, - "flos": 18645693840240.0, - "grad_norm": 3.3132736454224068, - "learning_rate": 2.500185986892045e-06, - "loss": 0.7655, - "num_input_tokens_seen": 77271470, - "step": 3634 - }, - { - "epoch": 0.43708290747309564, - "flos": 18539017740480.0, - "grad_norm": 2.9509432367691013, - "learning_rate": 2.499431731625355e-06, - "loss": 0.7633, - "num_input_tokens_seen": 77290215, - "step": 3635 - }, - { - "epoch": 0.43720315036373475, - "flos": 23180576821320.0, - "grad_norm": 2.256113436045207, - "learning_rate": 2.4986774005952686e-06, - "loss": 0.7849, - "num_input_tokens_seen": 77312310, - "step": 3636 - }, - { - "epoch": 0.43732339325437386, - "flos": 16917537431040.0, - "grad_norm": 2.7140882140562783, - "learning_rate": 2.4979229939162166e-06, - "loss": 0.8257, - "num_input_tokens_seen": 77330810, - "step": 3637 - }, - { - "epoch": 0.4374436361450129, - "flos": 20347292034360.0, - "grad_norm": 1.799800382247801, - "learning_rate": 2.4971685117026433e-06, - "loss": 0.7851, - "num_input_tokens_seen": 77350295, - "step": 3638 - }, - { - "epoch": 0.437563879035652, - "flos": 17702605849560.0, - "grad_norm": 1.9112352939237032, - "learning_rate": 2.4964139540690018e-06, - "loss": 0.7533, - "num_input_tokens_seen": 77373350, - "step": 3639 - }, - { - "epoch": 0.4376841219262911, - "flos": 17187868575840.0, - "grad_norm": 2.733607505988315, - "learning_rate": 2.495659321129758e-06, - "loss": 0.7109, - "num_input_tokens_seen": 77390815, - "step": 3640 - }, - { - "epoch": 0.4378043648169302, - "flos": 18646295379480.0, - "grad_norm": 2.128295250803108, - "learning_rate": 2.494904612999389e-06, - "loss": 0.7399, - "num_input_tokens_seen": 77409245, - "step": 3641 - }, - { - "epoch": 0.4379246077075693, - "flos": 39719433521280.0, - "grad_norm": 0.7992909403515303, - "learning_rate": 2.4941498297923843e-06, - "loss": 0.6, - "num_input_tokens_seen": 77469535, - "step": 3642 - }, - { - "epoch": 0.43804485059820836, - "flos": 15048959896080.0, - "grad_norm": 3.482821952358612, - "learning_rate": 2.4933949716232424e-06, - "loss": 0.6933, - "num_input_tokens_seen": 77486780, - "step": 3643 - }, - { - "epoch": 0.43816509348884747, - "flos": 17482826678160.0, - "grad_norm": 2.5997572125271455, - "learning_rate": 2.492640038606476e-06, - "loss": 0.7109, - "num_input_tokens_seen": 77504865, - "step": 3644 - }, - { - "epoch": 0.4382853363794866, - "flos": 10680676336320.0, - "grad_norm": 6.188453061464174, - "learning_rate": 2.491885030856608e-06, - "loss": 0.7756, - "num_input_tokens_seen": 77522680, - "step": 3645 - }, - { - "epoch": 0.43840557927012563, - "flos": 12516648603840.0, - "grad_norm": 2.887112354498446, - "learning_rate": 2.4911299484881713e-06, - "loss": 0.8082, - "num_input_tokens_seen": 77539930, - "step": 3646 - }, - { - "epoch": 0.43852582216076474, - "flos": 14163547369800.0, - "grad_norm": 3.6740670661349557, - "learning_rate": 2.490374791615712e-06, - "loss": 0.802, - "num_input_tokens_seen": 77559675, - "step": 3647 - }, - { - "epoch": 0.43864606505140386, - "flos": 13189658729400.0, - "grad_norm": 3.3843457841536746, - "learning_rate": 2.4896195603537867e-06, - "loss": 0.7586, - "num_input_tokens_seen": 77574005, - "step": 3648 - }, - { - "epoch": 0.4387663079420429, - "flos": 14351512772760.0, - "grad_norm": 3.1543659592376665, - "learning_rate": 2.488864254816964e-06, - "loss": 0.7224, - "num_input_tokens_seen": 77592415, - "step": 3649 - }, - { - "epoch": 0.438886550832682, - "flos": 14406813740160.0, - "grad_norm": 3.1304533106720904, - "learning_rate": 2.4881088751198218e-06, - "loss": 0.654, - "num_input_tokens_seen": 77610295, - "step": 3650 - }, - { - "epoch": 0.43900679372332113, - "flos": 10571055860280.0, - "grad_norm": 5.642447982771419, - "learning_rate": 2.4873534213769517e-06, - "loss": 0.613, - "num_input_tokens_seen": 77625245, - "step": 3651 - }, - { - "epoch": 0.4391270366139602, - "flos": 17615712733440.0, - "grad_norm": 2.0619661044749056, - "learning_rate": 2.4865978937029547e-06, - "loss": 0.701, - "num_input_tokens_seen": 77643945, - "step": 3652 - }, - { - "epoch": 0.4392472795045993, - "flos": 23153480386920.0, - "grad_norm": 2.3307615366700025, - "learning_rate": 2.485842292212445e-06, - "loss": 0.6523, - "num_input_tokens_seen": 77664880, - "step": 3653 - }, - { - "epoch": 0.4393675223952384, - "flos": 10815177049560.0, - "grad_norm": 4.847443384604235, - "learning_rate": 2.485086617020045e-06, - "loss": 0.7778, - "num_input_tokens_seen": 77683095, - "step": 3654 - }, - { - "epoch": 0.43948776528587746, - "flos": 10784724659400.0, - "grad_norm": 3.7142181992944647, - "learning_rate": 2.4843308682403903e-06, - "loss": 0.7973, - "num_input_tokens_seen": 77699730, - "step": 3655 - }, - { - "epoch": 0.4396080081765166, - "flos": 10110353155560.0, - "grad_norm": 1.7504611770042477, - "learning_rate": 2.4835750459881294e-06, - "loss": 0.8126, - "num_input_tokens_seen": 77716075, - "step": 3656 - }, - { - "epoch": 0.43972825106715563, - "flos": 13299342525360.0, - "grad_norm": 2.1542313566646247, - "learning_rate": 2.4828191503779177e-06, - "loss": 0.795, - "num_input_tokens_seen": 77733895, - "step": 3657 - }, - { - "epoch": 0.43984849395779474, - "flos": 12300478667880.0, - "grad_norm": 22.751809519811413, - "learning_rate": 2.482063181524425e-06, - "loss": 0.8783, - "num_input_tokens_seen": 77749515, - "step": 3658 - }, - { - "epoch": 0.43996873684843385, - "flos": 13645264180560.0, - "grad_norm": 2.1708837209212155, - "learning_rate": 2.4813071395423307e-06, - "loss": 0.7874, - "num_input_tokens_seen": 77766800, - "step": 3659 - }, - { - "epoch": 0.4400889797390729, - "flos": 17317430335320.0, - "grad_norm": 2.8232127148182746, - "learning_rate": 2.4805510245463263e-06, - "loss": 0.6382, - "num_input_tokens_seen": 77786675, - "step": 3660 - }, - { - "epoch": 0.440209222629712, - "flos": 16945172084760.0, - "grad_norm": 2.888079703132008, - "learning_rate": 2.4797948366511137e-06, - "loss": 0.563, - "num_input_tokens_seen": 77806105, - "step": 3661 - }, - { - "epoch": 0.4403294655203511, - "flos": 18183598097280.0, - "grad_norm": 2.498079229627014, - "learning_rate": 2.4790385759714055e-06, - "loss": 0.7347, - "num_input_tokens_seen": 77824890, - "step": 3662 - }, - { - "epoch": 0.4404497084109902, - "flos": 16512832212840.0, - "grad_norm": 2.641228601212842, - "learning_rate": 2.478282242621926e-06, - "loss": 0.6981, - "num_input_tokens_seen": 77845070, - "step": 3663 - }, - { - "epoch": 0.4405699513016293, - "flos": 47899101283080.0, - "grad_norm": 0.9147960295312884, - "learning_rate": 2.477525836717411e-06, - "loss": 0.6271, - "num_input_tokens_seen": 77912555, - "step": 3664 - }, - { - "epoch": 0.4406901941922684, - "flos": 26205182967000.0, - "grad_norm": 3.224335647878214, - "learning_rate": 2.476769358372606e-06, - "loss": 0.7754, - "num_input_tokens_seen": 77933925, - "step": 3665 - }, - { - "epoch": 0.44081043708290746, - "flos": 13164525212520.0, - "grad_norm": 2.820228216121795, - "learning_rate": 2.4760128077022683e-06, - "loss": 0.7278, - "num_input_tokens_seen": 77951780, - "step": 3666 - }, - { - "epoch": 0.44093067997354657, - "flos": 22128691513560.0, - "grad_norm": 2.266232534047745, - "learning_rate": 2.4752561848211672e-06, - "loss": 0.6719, - "num_input_tokens_seen": 77973900, - "step": 3667 - }, - { - "epoch": 0.4410509228641857, - "flos": 17023010452320.0, - "grad_norm": 4.6826739072099235, - "learning_rate": 2.4744994898440797e-06, - "loss": 0.6947, - "num_input_tokens_seen": 77992410, - "step": 3668 - }, - { - "epoch": 0.44117116575482473, - "flos": 14244931652880.0, - "grad_norm": 2.465992316865979, - "learning_rate": 2.473742722885797e-06, - "loss": 0.8223, - "num_input_tokens_seen": 78011150, - "step": 3669 - }, - { - "epoch": 0.44129140864546385, - "flos": 19834327718400.0, - "grad_norm": 4.762536220462864, - "learning_rate": 2.4729858840611197e-06, - "loss": 0.6384, - "num_input_tokens_seen": 78029780, - "step": 3670 - }, - { - "epoch": 0.4414116515361029, - "flos": 19130168683560.0, - "grad_norm": 4.165101685673707, - "learning_rate": 2.4722289734848605e-06, - "loss": 0.7122, - "num_input_tokens_seen": 78049965, - "step": 3671 - }, - { - "epoch": 0.441531894426742, - "flos": 16024811454000.0, - "grad_norm": 4.707752270697847, - "learning_rate": 2.471471991271841e-06, - "loss": 0.7665, - "num_input_tokens_seen": 78066810, - "step": 3672 - }, - { - "epoch": 0.4416521373173811, - "flos": 17157669465360.0, - "grad_norm": 2.0162534695032, - "learning_rate": 2.470714937536896e-06, - "loss": 0.7805, - "num_input_tokens_seen": 78085255, - "step": 3673 - }, - { - "epoch": 0.4417723802080202, - "flos": 14861216112840.0, - "grad_norm": 2.9206640008517897, - "learning_rate": 2.469957812394868e-06, - "loss": 0.7001, - "num_input_tokens_seen": 78103785, - "step": 3674 - }, - { - "epoch": 0.4418926230986593, - "flos": 13786223525640.0, - "grad_norm": 2.5482916139423133, - "learning_rate": 2.4692006159606148e-06, - "loss": 0.7505, - "num_input_tokens_seen": 78121035, - "step": 3675 - }, - { - "epoch": 0.4420128659892984, - "flos": 14218215138000.0, - "grad_norm": 3.0565421871629286, - "learning_rate": 2.468443348349e-06, - "loss": 0.7662, - "num_input_tokens_seen": 78138630, - "step": 3676 - }, - { - "epoch": 0.44213310887993745, - "flos": 13056107814960.0, - "grad_norm": 3.293873075614304, - "learning_rate": 2.467686009674902e-06, - "loss": 0.8071, - "num_input_tokens_seen": 78152800, - "step": 3677 - }, - { - "epoch": 0.44225335177057656, - "flos": 14025374101200.0, - "grad_norm": 2.436242292616089, - "learning_rate": 2.466928600053209e-06, - "loss": 0.8348, - "num_input_tokens_seen": 78167825, - "step": 3678 - }, - { - "epoch": 0.4423735946612157, - "flos": 17182676342400.0, - "grad_norm": 2.9854271117668993, - "learning_rate": 2.466171119598818e-06, - "loss": 0.694, - "num_input_tokens_seen": 78187515, - "step": 3679 - }, - { - "epoch": 0.44249383755185473, - "flos": 19561780376400.0, - "grad_norm": 2.7469869927152866, - "learning_rate": 2.465413568426639e-06, - "loss": 0.7474, - "num_input_tokens_seen": 78208185, - "step": 3680 - }, - { - "epoch": 0.44261408044249384, - "flos": 16943810706480.0, - "grad_norm": 6.649864034768545, - "learning_rate": 2.464655946651591e-06, - "loss": 0.8084, - "num_input_tokens_seen": 78226910, - "step": 3681 - }, - { - "epoch": 0.44273432333313295, - "flos": 17917129467600.0, - "grad_norm": 2.5222225512235017, - "learning_rate": 2.4638982543886065e-06, - "loss": 0.7857, - "num_input_tokens_seen": 78246670, - "step": 3682 - }, - { - "epoch": 0.442854566223772, - "flos": 12785586710400.0, - "grad_norm": 4.172050329517415, - "learning_rate": 2.4631404917526254e-06, - "loss": 0.8463, - "num_input_tokens_seen": 78263345, - "step": 3683 - }, - { - "epoch": 0.4429748091144111, - "flos": 18237537686400.0, - "grad_norm": 2.3325288839074845, - "learning_rate": 2.4623826588586e-06, - "loss": 0.7718, - "num_input_tokens_seen": 78283335, - "step": 3684 - }, - { - "epoch": 0.4430950520050502, - "flos": 15809243057280.0, - "grad_norm": 2.365335678085805, - "learning_rate": 2.461624755821492e-06, - "loss": 0.8166, - "num_input_tokens_seen": 78302535, - "step": 3685 - }, - { - "epoch": 0.4432152948956893, - "flos": 17998355450880.0, - "grad_norm": 2.275324297417128, - "learning_rate": 2.4608667827562763e-06, - "loss": 0.761, - "num_input_tokens_seen": 78321585, - "step": 3686 - }, - { - "epoch": 0.4433355377863284, - "flos": 15918673573560.0, - "grad_norm": 2.4236226329025583, - "learning_rate": 2.460108739777936e-06, - "loss": 0.8771, - "num_input_tokens_seen": 78340440, - "step": 3687 - }, - { - "epoch": 0.44345578067696745, - "flos": 14677081565040.0, - "grad_norm": 2.8060079457448532, - "learning_rate": 2.4593506270014656e-06, - "loss": 0.7501, - "num_input_tokens_seen": 78359130, - "step": 3688 - }, - { - "epoch": 0.44357602356760656, - "flos": 17700073052760.0, - "grad_norm": 3.788112371814186, - "learning_rate": 2.45859244454187e-06, - "loss": 0.8027, - "num_input_tokens_seen": 78378640, - "step": 3689 - }, - { - "epoch": 0.44369626645824567, - "flos": 16617925314600.0, - "grad_norm": 2.7442054387919668, - "learning_rate": 2.4578341925141655e-06, - "loss": 0.6504, - "num_input_tokens_seen": 78397575, - "step": 3690 - }, - { - "epoch": 0.4438165093488847, - "flos": 27957396454440.0, - "grad_norm": 2.3089730456246436, - "learning_rate": 2.457075871033378e-06, - "loss": 0.701, - "num_input_tokens_seen": 78419170, - "step": 3691 - }, - { - "epoch": 0.44393675223952384, - "flos": 11301361530720.0, - "grad_norm": 2.719251915213733, - "learning_rate": 2.4563174802145445e-06, - "loss": 0.8654, - "num_input_tokens_seen": 78436140, - "step": 3692 - }, - { - "epoch": 0.44405699513016295, - "flos": 47610158573160.0, - "grad_norm": 0.6309555500050327, - "learning_rate": 2.455559020172712e-06, - "loss": 0.4911, - "num_input_tokens_seen": 78503215, - "step": 3693 - }, - { - "epoch": 0.444177238020802, - "flos": 17565350719800.0, - "grad_norm": 4.398667541234011, - "learning_rate": 2.4548004910229385e-06, - "loss": 0.8741, - "num_input_tokens_seen": 78520510, - "step": 3694 - }, - { - "epoch": 0.4442974809114411, - "flos": 16511059255080.0, - "grad_norm": 2.150658364739726, - "learning_rate": 2.4540418928802913e-06, - "loss": 0.8526, - "num_input_tokens_seen": 78538965, - "step": 3695 - }, - { - "epoch": 0.4444177238020802, - "flos": 12894320707560.0, - "grad_norm": 2.6909748944231664, - "learning_rate": 2.4532832258598506e-06, - "loss": 0.635, - "num_input_tokens_seen": 78556515, - "step": 3696 - }, - { - "epoch": 0.4445379666927193, - "flos": 20995042003200.0, - "grad_norm": 8.860373896508968, - "learning_rate": 2.4525244900767047e-06, - "loss": 0.7986, - "num_input_tokens_seen": 78577050, - "step": 3697 - }, - { - "epoch": 0.4446582095833584, - "flos": 51986040523320.0, - "grad_norm": 0.8245402789204387, - "learning_rate": 2.4517656856459536e-06, - "loss": 0.6253, - "num_input_tokens_seen": 78642615, - "step": 3698 - }, - { - "epoch": 0.4447784524739975, - "flos": 19428736021320.0, - "grad_norm": 3.8715384003741056, - "learning_rate": 2.4510068126827073e-06, - "loss": 0.6649, - "num_input_tokens_seen": 78663335, - "step": 3699 - }, - { - "epoch": 0.44489869536463655, - "flos": 8439587271120.0, - "grad_norm": 3.7192290181899383, - "learning_rate": 2.450247871302086e-06, - "loss": 0.8044, - "num_input_tokens_seen": 78680830, - "step": 3700 - }, - { - "epoch": 0.44501893825527566, - "flos": 14946336271200.0, - "grad_norm": 3.778068149539645, - "learning_rate": 2.44948886161922e-06, - "loss": 0.8167, - "num_input_tokens_seen": 78699565, - "step": 3701 - }, - { - "epoch": 0.4451391811459148, - "flos": 13327768678080.0, - "grad_norm": 2.3686642714313937, - "learning_rate": 2.4487297837492524e-06, - "loss": 0.8344, - "num_input_tokens_seen": 78718450, - "step": 3702 - }, - { - "epoch": 0.44525942403655383, - "flos": 12327860041920.0, - "grad_norm": 2.9293603332946723, - "learning_rate": 2.4479706378073323e-06, - "loss": 0.5923, - "num_input_tokens_seen": 78736710, - "step": 3703 - }, - { - "epoch": 0.44537966692719294, - "flos": 17026334748120.0, - "grad_norm": 2.114658548555189, - "learning_rate": 2.447211423908623e-06, - "loss": 0.825, - "num_input_tokens_seen": 78756475, - "step": 3704 - }, - { - "epoch": 0.445499909817832, - "flos": 15890247420840.0, - "grad_norm": 2.5016611302581593, - "learning_rate": 2.4464521421682966e-06, - "loss": 0.7316, - "num_input_tokens_seen": 78773785, - "step": 3705 - }, - { - "epoch": 0.4456201527084711, - "flos": 17565857279160.0, - "grad_norm": 1.8413391515380697, - "learning_rate": 2.4456927927015345e-06, - "loss": 0.8634, - "num_input_tokens_seen": 78794545, - "step": 3706 - }, - { - "epoch": 0.4457403955991102, - "flos": 13731650737320.0, - "grad_norm": 3.655984461344413, - "learning_rate": 2.4449333756235307e-06, - "loss": 0.7347, - "num_input_tokens_seen": 78810980, - "step": 3707 - }, - { - "epoch": 0.4458606384897493, - "flos": 14028096857760.0, - "grad_norm": 4.150445771692453, - "learning_rate": 2.4441738910494876e-06, - "loss": 0.7851, - "num_input_tokens_seen": 78825435, - "step": 3708 - }, - { - "epoch": 0.4459808813803884, - "flos": 15621974173440.0, - "grad_norm": 2.811333986811313, - "learning_rate": 2.4434143390946176e-06, - "loss": 0.8105, - "num_input_tokens_seen": 78843965, - "step": 3709 - }, - { - "epoch": 0.4461011242710275, - "flos": 17047732389720.0, - "grad_norm": 2.472354141268482, - "learning_rate": 2.4426547198741457e-06, - "loss": 0.837, - "num_input_tokens_seen": 78861890, - "step": 3710 - }, - { - "epoch": 0.44622136716166655, - "flos": 14757262769640.0, - "grad_norm": 2.9083490482404906, - "learning_rate": 2.441895033503305e-06, - "loss": 0.7257, - "num_input_tokens_seen": 78879530, - "step": 3711 - }, - { - "epoch": 0.44634161005230566, - "flos": 15563317250280.0, - "grad_norm": 2.9636086365295764, - "learning_rate": 2.4411352800973375e-06, - "loss": 0.8157, - "num_input_tokens_seen": 78897685, - "step": 3712 - }, - { - "epoch": 0.44646185294294477, - "flos": 16782340198680.0, - "grad_norm": 3.02607946760737, - "learning_rate": 2.4403754597715005e-06, - "loss": 0.7205, - "num_input_tokens_seen": 78916850, - "step": 3713 - }, - { - "epoch": 0.4465820958335838, - "flos": 16566518522280.0, - "grad_norm": 3.039538348681903, - "learning_rate": 2.4396155726410553e-06, - "loss": 0.9113, - "num_input_tokens_seen": 78935180, - "step": 3714 - }, - { - "epoch": 0.44670233872422294, - "flos": 16592063618640.0, - "grad_norm": 3.1084890972582544, - "learning_rate": 2.438855618821278e-06, - "loss": 0.9005, - "num_input_tokens_seen": 78950700, - "step": 3715 - }, - { - "epoch": 0.44682258161486205, - "flos": 17264915444400.0, - "grad_norm": 5.0682116654174605, - "learning_rate": 2.4380955984274517e-06, - "loss": 0.6552, - "num_input_tokens_seen": 78969075, - "step": 3716 - }, - { - "epoch": 0.4469428245055011, - "flos": 19425348405600.0, - "grad_norm": 2.1490180323622154, - "learning_rate": 2.4373355115748716e-06, - "loss": 0.7484, - "num_input_tokens_seen": 78989625, - "step": 3717 - }, - { - "epoch": 0.4470630673961402, - "flos": 15727953754080.0, - "grad_norm": 2.138315333079778, - "learning_rate": 2.436575358378842e-06, - "loss": 0.7125, - "num_input_tokens_seen": 79008835, - "step": 3718 - }, - { - "epoch": 0.44718331028677927, - "flos": 11782132158720.0, - "grad_norm": 6.840997011569636, - "learning_rate": 2.4358151389546782e-06, - "loss": 0.8093, - "num_input_tokens_seen": 79025240, - "step": 3719 - }, - { - "epoch": 0.4473035531774184, - "flos": 14378387587440.0, - "grad_norm": 3.597334028263275, - "learning_rate": 2.4350548534177035e-06, - "loss": 0.7333, - "num_input_tokens_seen": 79041790, - "step": 3720 - }, - { - "epoch": 0.4474237960680575, - "flos": 30472267600080.0, - "grad_norm": 2.3528416598536275, - "learning_rate": 2.434294501883254e-06, - "loss": 0.6597, - "num_input_tokens_seen": 79064605, - "step": 3721 - }, - { - "epoch": 0.44754403895869654, - "flos": 16753977365880.0, - "grad_norm": 1.9305209317597793, - "learning_rate": 2.433534084466674e-06, - "loss": 0.6484, - "num_input_tokens_seen": 79083545, - "step": 3722 - }, - { - "epoch": 0.44766428184933565, - "flos": 18781334312040.0, - "grad_norm": 1.9147122895566255, - "learning_rate": 2.4327736012833178e-06, - "loss": 0.7046, - "num_input_tokens_seen": 79104985, - "step": 3723 - }, - { - "epoch": 0.44778452473997477, - "flos": 14946779510640.0, - "grad_norm": 2.9844596810834445, - "learning_rate": 2.4320130524485506e-06, - "loss": 0.7453, - "num_input_tokens_seen": 79123500, - "step": 3724 - }, - { - "epoch": 0.4479047676306138, - "flos": 16075648367040.0, - "grad_norm": 2.4055249992154697, - "learning_rate": 2.431252438077746e-06, - "loss": 0.7881, - "num_input_tokens_seen": 79142720, - "step": 3725 - }, - { - "epoch": 0.44802501052125293, - "flos": 15699875860920.0, - "grad_norm": 4.866620418032713, - "learning_rate": 2.4304917582862906e-06, - "loss": 0.7526, - "num_input_tokens_seen": 79161620, - "step": 3726 - }, - { - "epoch": 0.44814525341189204, - "flos": 16188561478920.0, - "grad_norm": 2.9607242718820217, - "learning_rate": 2.4297310131895774e-06, - "loss": 0.8701, - "num_input_tokens_seen": 79179885, - "step": 3727 - }, - { - "epoch": 0.4482654963025311, - "flos": 12138121681200.0, - "grad_norm": 4.8801695972426575, - "learning_rate": 2.4289702029030113e-06, - "loss": 0.7327, - "num_input_tokens_seen": 79197075, - "step": 3728 - }, - { - "epoch": 0.4483857391931702, - "flos": 13756625954400.0, - "grad_norm": 2.8228509104672583, - "learning_rate": 2.4282093275420057e-06, - "loss": 0.8137, - "num_input_tokens_seen": 79215825, - "step": 3729 - }, - { - "epoch": 0.4485059820838093, - "flos": 14889768905400.0, - "grad_norm": 2.9915573213477256, - "learning_rate": 2.4274483872219863e-06, - "loss": 0.6854, - "num_input_tokens_seen": 79232905, - "step": 3730 - }, - { - "epoch": 0.4486262249744484, - "flos": 14649858490800.0, - "grad_norm": 2.341925457628952, - "learning_rate": 2.426687382058386e-06, - "loss": 0.9177, - "num_input_tokens_seen": 79250905, - "step": 3731 - }, - { - "epoch": 0.4487464678650875, - "flos": 47623324625160.0, - "grad_norm": 0.9565346811431111, - "learning_rate": 2.425926312166649e-06, - "loss": 0.6106, - "num_input_tokens_seen": 79303500, - "step": 3732 - }, - { - "epoch": 0.4488667107557266, - "flos": 15183144009720.0, - "grad_norm": 2.9240988362068494, - "learning_rate": 2.42516517766223e-06, - "loss": 0.7049, - "num_input_tokens_seen": 79321300, - "step": 3733 - }, - { - "epoch": 0.44898695364636565, - "flos": 17539077444360.0, - "grad_norm": 9.004314236282664, - "learning_rate": 2.4244039786605907e-06, - "loss": 0.6641, - "num_input_tokens_seen": 79342025, - "step": 3734 - }, - { - "epoch": 0.44910719653700476, - "flos": 13596390185040.0, - "grad_norm": 3.081786326067025, - "learning_rate": 2.4236427152772055e-06, - "loss": 0.8075, - "num_input_tokens_seen": 79360150, - "step": 3735 - }, - { - "epoch": 0.4492274394276438, - "flos": 42025380370440.0, - "grad_norm": 0.9304578458593894, - "learning_rate": 2.422881387627557e-06, - "loss": 0.6016, - "num_input_tokens_seen": 79412320, - "step": 3736 - }, - { - "epoch": 0.4493476823182829, - "flos": 17022852152520.0, - "grad_norm": 1.8859562251086643, - "learning_rate": 2.422119995827139e-06, - "loss": 0.757, - "num_input_tokens_seen": 79432165, - "step": 3737 - }, - { - "epoch": 0.44946792520892204, - "flos": 11517404826840.0, - "grad_norm": 4.643913773412217, - "learning_rate": 2.4213585399914528e-06, - "loss": 0.7223, - "num_input_tokens_seen": 79449090, - "step": 3738 - }, - { - "epoch": 0.4495881680995611, - "flos": 14325967676400.0, - "grad_norm": 3.3670456008927485, - "learning_rate": 2.4205970202360113e-06, - "loss": 0.8394, - "num_input_tokens_seen": 79468375, - "step": 3739 - }, - { - "epoch": 0.4497084109902002, - "flos": 19078223671920.0, - "grad_norm": 2.553637180758486, - "learning_rate": 2.4198354366763354e-06, - "loss": 0.7677, - "num_input_tokens_seen": 79486735, - "step": 3740 - }, - { - "epoch": 0.4498286538808393, - "flos": 10761490740120.0, - "grad_norm": 3.582293219019243, - "learning_rate": 2.4190737894279587e-06, - "loss": 0.7617, - "num_input_tokens_seen": 79503825, - "step": 3741 - }, - { - "epoch": 0.44994889677147837, - "flos": 11247295301760.0, - "grad_norm": 3.3406094928092487, - "learning_rate": 2.4183120786064203e-06, - "loss": 0.7894, - "num_input_tokens_seen": 79520420, - "step": 3742 - }, - { - "epoch": 0.4500691396621175, - "flos": 15945295108560.0, - "grad_norm": 3.9686178350594497, - "learning_rate": 2.417550304327273e-06, - "loss": 0.8433, - "num_input_tokens_seen": 79538180, - "step": 3743 - }, - { - "epoch": 0.4501893825527566, - "flos": 23778344696040.0, - "grad_norm": 2.1457032716710422, - "learning_rate": 2.4167884667060763e-06, - "loss": 0.7413, - "num_input_tokens_seen": 79560610, - "step": 3744 - }, - { - "epoch": 0.45030962544339564, - "flos": 12057528897120.0, - "grad_norm": 3.1037122833381545, - "learning_rate": 2.4160265658584e-06, - "loss": 0.8587, - "num_input_tokens_seen": 79575220, - "step": 3745 - }, - { - "epoch": 0.45042986833403476, - "flos": 14298491322480.0, - "grad_norm": 4.029294744834022, - "learning_rate": 2.4152646018998253e-06, - "loss": 0.6609, - "num_input_tokens_seen": 79593890, - "step": 3746 - }, - { - "epoch": 0.45055011122467387, - "flos": 16917949010520.0, - "grad_norm": 3.6004218490122413, - "learning_rate": 2.4145025749459403e-06, - "loss": 0.6977, - "num_input_tokens_seen": 79614635, - "step": 3747 - }, - { - "epoch": 0.4506703541153129, - "flos": 14565624811320.0, - "grad_norm": 2.6362460694661327, - "learning_rate": 2.413740485112344e-06, - "loss": 0.6864, - "num_input_tokens_seen": 79632695, - "step": 3748 - }, - { - "epoch": 0.45079059700595203, - "flos": 14244013514040.0, - "grad_norm": 3.950895261109539, - "learning_rate": 2.412978332514646e-06, - "loss": 0.8067, - "num_input_tokens_seen": 79651195, - "step": 3749 - }, - { - "epoch": 0.4509108398965911, - "flos": 20265844431360.0, - "grad_norm": 3.972564666202508, - "learning_rate": 2.4122161172684623e-06, - "loss": 0.6938, - "num_input_tokens_seen": 79671710, - "step": 3750 - }, - { - "epoch": 0.4510310827872302, - "flos": 15351326429040.0, - "grad_norm": 2.4777542279449594, - "learning_rate": 2.4114538394894216e-06, - "loss": 0.8113, - "num_input_tokens_seen": 79689070, - "step": 3751 - }, - { - "epoch": 0.4511513256778693, - "flos": 11814610786320.0, - "grad_norm": 3.1854236796476325, - "learning_rate": 2.410691499293161e-06, - "loss": 0.8204, - "num_input_tokens_seen": 79706945, - "step": 3752 - }, - { - "epoch": 0.45127156856850836, - "flos": 18452251264200.0, - "grad_norm": 2.3189936013801735, - "learning_rate": 2.409929096795326e-06, - "loss": 0.7207, - "num_input_tokens_seen": 79727035, - "step": 3753 - }, - { - "epoch": 0.4513918114591475, - "flos": 14919809716080.0, - "grad_norm": 2.379082634060343, - "learning_rate": 2.409166632111573e-06, - "loss": 0.7725, - "num_input_tokens_seen": 79744890, - "step": 3754 - }, - { - "epoch": 0.4515120543497866, - "flos": 19534399002360.0, - "grad_norm": 2.7613437943007906, - "learning_rate": 2.4084041053575674e-06, - "loss": 0.7705, - "num_input_tokens_seen": 79764030, - "step": 3755 - }, - { - "epoch": 0.45163229724042564, - "flos": 15054943628520.0, - "grad_norm": 2.371156895910605, - "learning_rate": 2.4076415166489834e-06, - "loss": 0.7119, - "num_input_tokens_seen": 79783160, - "step": 3756 - }, - { - "epoch": 0.45175254013106475, - "flos": 15864417384840.0, - "grad_norm": 3.4680201131992012, - "learning_rate": 2.406878866101506e-06, - "loss": 0.7864, - "num_input_tokens_seen": 79801845, - "step": 3757 - }, - { - "epoch": 0.45187278302170386, - "flos": 13784608867680.0, - "grad_norm": 3.773487920009616, - "learning_rate": 2.4061161538308273e-06, - "loss": 0.7687, - "num_input_tokens_seen": 79818410, - "step": 3758 - }, - { - "epoch": 0.4519930259123429, - "flos": 13564639736520.0, - "grad_norm": 2.8514844073746026, - "learning_rate": 2.4053533799526523e-06, - "loss": 0.8796, - "num_input_tokens_seen": 79833850, - "step": 3759 - }, - { - "epoch": 0.452113268802982, - "flos": 18457253537880.0, - "grad_norm": 3.8210040443636615, - "learning_rate": 2.404590544582691e-06, - "loss": 0.8373, - "num_input_tokens_seen": 79851805, - "step": 3760 - }, - { - "epoch": 0.45223351169362114, - "flos": 28955342173080.0, - "grad_norm": 2.9917431693182004, - "learning_rate": 2.403827647836666e-06, - "loss": 0.7944, - "num_input_tokens_seen": 79872080, - "step": 3761 - }, - { - "epoch": 0.4523537545842602, - "flos": 15785724198360.0, - "grad_norm": 2.5364527039723264, - "learning_rate": 2.4030646898303075e-06, - "loss": 0.6749, - "num_input_tokens_seen": 79893290, - "step": 3762 - }, - { - "epoch": 0.4524739974748993, - "flos": 20860351330200.0, - "grad_norm": 2.5295624935938195, - "learning_rate": 2.4023016706793566e-06, - "loss": 0.8008, - "num_input_tokens_seen": 79912805, - "step": 3763 - }, - { - "epoch": 0.4525942403655384, - "flos": 45372579272160.0, - "grad_norm": 0.8234562895221771, - "learning_rate": 2.401538590499561e-06, - "loss": 0.5955, - "num_input_tokens_seen": 79972980, - "step": 3764 - }, - { - "epoch": 0.45271448325617747, - "flos": 20187467844480.0, - "grad_norm": 6.504911181278216, - "learning_rate": 2.400775449406682e-06, - "loss": 0.688, - "num_input_tokens_seen": 79995895, - "step": 3765 - }, - { - "epoch": 0.4528347261468166, - "flos": 16429168412640.0, - "grad_norm": 2.323261172716932, - "learning_rate": 2.400012247516485e-06, - "loss": 0.7193, - "num_input_tokens_seen": 80016180, - "step": 3766 - }, - { - "epoch": 0.45295496903745563, - "flos": 15431191034040.0, - "grad_norm": 2.210889188959812, - "learning_rate": 2.3992489849447484e-06, - "loss": 0.8744, - "num_input_tokens_seen": 80034355, - "step": 3767 - }, - { - "epoch": 0.45307521192809475, - "flos": 16998288514920.0, - "grad_norm": 2.525703758894073, - "learning_rate": 2.3984856618072584e-06, - "loss": 0.7766, - "num_input_tokens_seen": 80054110, - "step": 3768 - }, - { - "epoch": 0.45319545481873386, - "flos": 11328711244800.0, - "grad_norm": 2.6073422309055867, - "learning_rate": 2.3977222782198098e-06, - "loss": 0.7201, - "num_input_tokens_seen": 80072465, - "step": 3769 - }, - { - "epoch": 0.4533156977093729, - "flos": 16053744166080.0, - "grad_norm": 3.7128601875642855, - "learning_rate": 2.3969588342982077e-06, - "loss": 0.7351, - "num_input_tokens_seen": 80091560, - "step": 3770 - }, - { - "epoch": 0.453435940600012, - "flos": 17753822682120.0, - "grad_norm": 2.4037268154158897, - "learning_rate": 2.396195330158267e-06, - "loss": 0.7125, - "num_input_tokens_seen": 80111170, - "step": 3771 - }, - { - "epoch": 0.45355618349065113, - "flos": 17160550521720.0, - "grad_norm": 3.172520914692699, - "learning_rate": 2.3954317659158094e-06, - "loss": 0.7758, - "num_input_tokens_seen": 80131225, - "step": 3772 - }, - { - "epoch": 0.4536764263812902, - "flos": 49332267930000.0, - "grad_norm": 0.9052791327798363, - "learning_rate": 2.394668141686667e-06, - "loss": 0.6164, - "num_input_tokens_seen": 80192910, - "step": 3773 - }, - { - "epoch": 0.4537966692719293, - "flos": 31443180204240.0, - "grad_norm": 2.27561138866684, - "learning_rate": 2.3939044575866813e-06, - "loss": 0.6629, - "num_input_tokens_seen": 80215380, - "step": 3774 - }, - { - "epoch": 0.4539169121625684, - "flos": 26121519166800.0, - "grad_norm": 2.9575765653681136, - "learning_rate": 2.3931407137317024e-06, - "loss": 0.7384, - "num_input_tokens_seen": 80235255, - "step": 3775 - }, - { - "epoch": 0.45403715505320746, - "flos": 13515227521680.0, - "grad_norm": 2.797838354162758, - "learning_rate": 2.3923769102375907e-06, - "loss": 0.8283, - "num_input_tokens_seen": 80253840, - "step": 3776 - }, - { - "epoch": 0.4541573979438466, - "flos": 18348709500480.0, - "grad_norm": 2.6017004211340984, - "learning_rate": 2.391613047220213e-06, - "loss": 0.7733, - "num_input_tokens_seen": 80273460, - "step": 3777 - }, - { - "epoch": 0.4542776408344857, - "flos": 13380315228960.0, - "grad_norm": 2.4382825720043075, - "learning_rate": 2.390849124795447e-06, - "loss": 0.7846, - "num_input_tokens_seen": 80289180, - "step": 3778 - }, - { - "epoch": 0.45439788372512474, - "flos": 15132971955840.0, - "grad_norm": 3.4649179272200095, - "learning_rate": 2.3900851430791804e-06, - "loss": 0.825, - "num_input_tokens_seen": 80306920, - "step": 3779 - }, - { - "epoch": 0.45451812661576385, - "flos": 16323600411480.0, - "grad_norm": 2.8805331184431386, - "learning_rate": 2.389321102187307e-06, - "loss": 0.8348, - "num_input_tokens_seen": 80325420, - "step": 3780 - }, - { - "epoch": 0.4546383695064029, - "flos": 15919591712400.0, - "grad_norm": 3.034479053414334, - "learning_rate": 2.3885570022357326e-06, - "loss": 0.8106, - "num_input_tokens_seen": 80344270, - "step": 3781 - }, - { - "epoch": 0.454758612397042, - "flos": 47363821186680.0, - "grad_norm": 1.404943495052004, - "learning_rate": 2.38779284334037e-06, - "loss": 0.6214, - "num_input_tokens_seen": 80408965, - "step": 3782 - }, - { - "epoch": 0.4548788552876811, - "flos": 20019507044880.0, - "grad_norm": 2.9157322249571904, - "learning_rate": 2.387028625617141e-06, - "loss": 0.77, - "num_input_tokens_seen": 80427900, - "step": 3783 - }, - { - "epoch": 0.4549990981783202, - "flos": 16729002148800.0, - "grad_norm": 2.694201962111691, - "learning_rate": 2.3862643491819766e-06, - "loss": 0.8377, - "num_input_tokens_seen": 80446185, - "step": 3784 - }, - { - "epoch": 0.4551193410689593, - "flos": 17026113128400.0, - "grad_norm": 3.463166976203417, - "learning_rate": 2.3855000141508186e-06, - "loss": 0.8204, - "num_input_tokens_seen": 80466060, - "step": 3785 - }, - { - "epoch": 0.4552395839595984, - "flos": 15189950901120.0, - "grad_norm": 3.8475604514961788, - "learning_rate": 2.3847356206396143e-06, - "loss": 0.8138, - "num_input_tokens_seen": 80483090, - "step": 3786 - }, - { - "epoch": 0.45535982685023746, - "flos": 17025163329600.0, - "grad_norm": 1.884736846545997, - "learning_rate": 2.3839711687643227e-06, - "loss": 0.7735, - "num_input_tokens_seen": 80504035, - "step": 3787 - }, - { - "epoch": 0.45548006974087657, - "flos": 14352589211400.0, - "grad_norm": 4.711519879075419, - "learning_rate": 2.38320665864091e-06, - "loss": 0.7099, - "num_input_tokens_seen": 80523105, - "step": 3788 - }, - { - "epoch": 0.4556003126315157, - "flos": 14649890150760.0, - "grad_norm": 4.070806843031426, - "learning_rate": 2.3824420903853516e-06, - "loss": 0.8074, - "num_input_tokens_seen": 80541290, - "step": 3789 - }, - { - "epoch": 0.45572055552215474, - "flos": 16803959460000.0, - "grad_norm": 3.336951930160832, - "learning_rate": 2.3816774641136324e-06, - "loss": 0.7874, - "num_input_tokens_seen": 80558265, - "step": 3790 - }, - { - "epoch": 0.45584079841279385, - "flos": 24316790788440.0, - "grad_norm": 1.9724033306883566, - "learning_rate": 2.380912779941745e-06, - "loss": 0.7072, - "num_input_tokens_seen": 80581105, - "step": 3791 - }, - { - "epoch": 0.45596104130343296, - "flos": 19996463085360.0, - "grad_norm": 3.14746623996024, - "learning_rate": 2.3801480379856918e-06, - "loss": 0.7929, - "num_input_tokens_seen": 80602535, - "step": 3792 - }, - { - "epoch": 0.456081284194072, - "flos": 15783634641000.0, - "grad_norm": 3.140610728065364, - "learning_rate": 2.379383238361484e-06, - "loss": 0.8217, - "num_input_tokens_seen": 80621615, - "step": 3793 - }, - { - "epoch": 0.4562015270847111, - "flos": 26395491207000.0, - "grad_norm": 2.710911115798301, - "learning_rate": 2.3786183811851407e-06, - "loss": 0.7743, - "num_input_tokens_seen": 80642040, - "step": 3794 - }, - { - "epoch": 0.45632176997535023, - "flos": 9870537720840.0, - "grad_norm": 2.533330462688359, - "learning_rate": 2.3778534665726892e-06, - "loss": 0.787, - "num_input_tokens_seen": 80658590, - "step": 3795 - }, - { - "epoch": 0.4564420128659893, - "flos": 23965581919920.0, - "grad_norm": 2.4533040169678135, - "learning_rate": 2.377088494640168e-06, - "loss": 0.7119, - "num_input_tokens_seen": 80680060, - "step": 3796 - }, - { - "epoch": 0.4565622557566284, - "flos": 14894264619720.0, - "grad_norm": 2.345126752213567, - "learning_rate": 2.3763234655036216e-06, - "loss": 0.7677, - "num_input_tokens_seen": 80698980, - "step": 3797 - }, - { - "epoch": 0.45668249864726745, - "flos": 18591880890960.0, - "grad_norm": 2.5527334410265126, - "learning_rate": 2.3755583792791046e-06, - "loss": 0.8564, - "num_input_tokens_seen": 80718570, - "step": 3798 - }, - { - "epoch": 0.45680274153790656, - "flos": 11328457965120.0, - "grad_norm": 2.5099050757707717, - "learning_rate": 2.3747932360826803e-06, - "loss": 0.7277, - "num_input_tokens_seen": 80735220, - "step": 3799 - }, - { - "epoch": 0.4569229844285457, - "flos": 14461323208560.0, - "grad_norm": 6.211704883312877, - "learning_rate": 2.3740280360304205e-06, - "loss": 0.8015, - "num_input_tokens_seen": 80752665, - "step": 3800 - }, - { - "epoch": 0.45704322731918473, - "flos": 17643790626600.0, - "grad_norm": 2.378659891736143, - "learning_rate": 2.3732627792384038e-06, - "loss": 0.6699, - "num_input_tokens_seen": 80773455, - "step": 3801 - }, - { - "epoch": 0.45716347020982384, - "flos": 22989888661800.0, - "grad_norm": 2.9094748704201665, - "learning_rate": 2.3724974658227207e-06, - "loss": 0.7378, - "num_input_tokens_seen": 80793965, - "step": 3802 - }, - { - "epoch": 0.45728371310046295, - "flos": 19425380065560.0, - "grad_norm": 3.6103321474237906, - "learning_rate": 2.3717320958994687e-06, - "loss": 0.6904, - "num_input_tokens_seen": 80811245, - "step": 3803 - }, - { - "epoch": 0.457403955991102, - "flos": 13081842871080.0, - "grad_norm": 2.251264428786701, - "learning_rate": 2.3709666695847534e-06, - "loss": 0.6912, - "num_input_tokens_seen": 80829145, - "step": 3804 - }, - { - "epoch": 0.4575241988817411, - "flos": 31066647859080.0, - "grad_norm": 2.184467284254677, - "learning_rate": 2.370201186994689e-06, - "loss": 0.6914, - "num_input_tokens_seen": 80852550, - "step": 3805 - }, - { - "epoch": 0.45764444177238023, - "flos": 22101943338720.0, - "grad_norm": 5.110727673581877, - "learning_rate": 2.369435648245399e-06, - "loss": 0.6759, - "num_input_tokens_seen": 80872485, - "step": 3806 - }, - { - "epoch": 0.4577646846630193, - "flos": 17619290308920.0, - "grad_norm": 3.19283894722357, - "learning_rate": 2.368670053453015e-06, - "loss": 0.8414, - "num_input_tokens_seen": 80893895, - "step": 3807 - }, - { - "epoch": 0.4578849275536584, - "flos": 12703790847840.0, - "grad_norm": 3.869247103847823, - "learning_rate": 2.3679044027336757e-06, - "loss": 0.7061, - "num_input_tokens_seen": 80909505, - "step": 3808 - }, - { - "epoch": 0.4580051704442975, - "flos": 9810677719200.0, - "grad_norm": 4.217040669336251, - "learning_rate": 2.3671386962035326e-06, - "loss": 0.6671, - "num_input_tokens_seen": 80926695, - "step": 3809 - }, - { - "epoch": 0.45812541333493656, - "flos": 13161739136040.0, - "grad_norm": 9.044307575028547, - "learning_rate": 2.3663729339787405e-06, - "loss": 0.6683, - "num_input_tokens_seen": 80943350, - "step": 3810 - }, - { - "epoch": 0.45824565622557567, - "flos": 14777837252280.0, - "grad_norm": 3.4657852287161295, - "learning_rate": 2.365607116175466e-06, - "loss": 0.7016, - "num_input_tokens_seen": 80958495, - "step": 3811 - }, - { - "epoch": 0.4583658991162148, - "flos": 14514471298680.0, - "grad_norm": 3.3096046029501247, - "learning_rate": 2.3648412429098825e-06, - "loss": 0.6334, - "num_input_tokens_seen": 80976565, - "step": 3812 - }, - { - "epoch": 0.45848614200685384, - "flos": 15376333306080.0, - "grad_norm": 2.4226538605541363, - "learning_rate": 2.364075314298172e-06, - "loss": 0.7981, - "num_input_tokens_seen": 80993740, - "step": 3813 - }, - { - "epoch": 0.45860638489749295, - "flos": 15405867557400.0, - "grad_norm": 2.1634288094156053, - "learning_rate": 2.3633093304565267e-06, - "loss": 0.6789, - "num_input_tokens_seen": 81012515, - "step": 3814 - }, - { - "epoch": 0.458726627788132, - "flos": 19672034051640.0, - "grad_norm": 2.7214390402278927, - "learning_rate": 2.3625432915011443e-06, - "loss": 0.6096, - "num_input_tokens_seen": 81034145, - "step": 3815 - }, - { - "epoch": 0.4588468706787711, - "flos": 17647304882160.0, - "grad_norm": 4.914474106184348, - "learning_rate": 2.3617771975482334e-06, - "loss": 0.6383, - "num_input_tokens_seen": 81052695, - "step": 3816 - }, - { - "epoch": 0.4589671135694102, - "flos": 13051675420560.0, - "grad_norm": 2.6258480881841697, - "learning_rate": 2.3610110487140083e-06, - "loss": 0.7298, - "num_input_tokens_seen": 81070850, - "step": 3817 - }, - { - "epoch": 0.4590873564600493, - "flos": 18778421595720.0, - "grad_norm": 2.013980283161465, - "learning_rate": 2.360244845114695e-06, - "loss": 0.8034, - "num_input_tokens_seen": 81090190, - "step": 3818 - }, - { - "epoch": 0.4592075993506884, - "flos": 13515290841600.0, - "grad_norm": 3.6586517531403335, - "learning_rate": 2.3594785868665245e-06, - "loss": 0.6726, - "num_input_tokens_seen": 81106215, - "step": 3819 - }, - { - "epoch": 0.4593278422413275, - "flos": 15081533503560.0, - "grad_norm": 2.905026085846432, - "learning_rate": 2.3587122740857386e-06, - "loss": 0.7927, - "num_input_tokens_seen": 81123035, - "step": 3820 - }, - { - "epoch": 0.45944808513196655, - "flos": 15619536356520.0, - "grad_norm": 2.235437207395732, - "learning_rate": 2.357945906888586e-06, - "loss": 0.767, - "num_input_tokens_seen": 81142195, - "step": 3821 - }, - { - "epoch": 0.45956832802260567, - "flos": 15670278289680.0, - "grad_norm": 2.8294795886225512, - "learning_rate": 2.357179485391324e-06, - "loss": 0.7698, - "num_input_tokens_seen": 81159770, - "step": 3822 - }, - { - "epoch": 0.4596885709132448, - "flos": 16377824940240.0, - "grad_norm": 2.3136461871252525, - "learning_rate": 2.3564130097102173e-06, - "loss": 0.8404, - "num_input_tokens_seen": 81179145, - "step": 3823 - }, - { - "epoch": 0.45980881380388383, - "flos": 21261099053400.0, - "grad_norm": 3.1168368723151114, - "learning_rate": 2.355646479961541e-06, - "loss": 0.727, - "num_input_tokens_seen": 81198175, - "step": 3824 - }, - { - "epoch": 0.45992905669452294, - "flos": 24528211730400.0, - "grad_norm": 2.7740531130699897, - "learning_rate": 2.354879896261576e-06, - "loss": 0.7046, - "num_input_tokens_seen": 81218105, - "step": 3825 - }, - { - "epoch": 0.46004929958516205, - "flos": 26690765908920.0, - "grad_norm": 3.7495840477726374, - "learning_rate": 2.3541132587266133e-06, - "loss": 0.556, - "num_input_tokens_seen": 81240545, - "step": 3826 - }, - { - "epoch": 0.4601695424758011, - "flos": 12570809812680.0, - "grad_norm": 2.404673369467143, - "learning_rate": 2.3533465674729515e-06, - "loss": 0.6795, - "num_input_tokens_seen": 81257495, - "step": 3827 - }, - { - "epoch": 0.4602897853664402, - "flos": 11571692675520.0, - "grad_norm": 3.0193488496513803, - "learning_rate": 2.352579822616895e-06, - "loss": 0.7155, - "num_input_tokens_seen": 81274650, - "step": 3828 - }, - { - "epoch": 0.4604100282570793, - "flos": 18620433683520.0, - "grad_norm": 2.7509259485714854, - "learning_rate": 2.351813024274761e-06, - "loss": 0.7665, - "num_input_tokens_seen": 81295725, - "step": 3829 - }, - { - "epoch": 0.4605302711477184, - "flos": 20260905477600.0, - "grad_norm": 2.5171731485517714, - "learning_rate": 2.3510461725628693e-06, - "loss": 0.7135, - "num_input_tokens_seen": 81315910, - "step": 3830 - }, - { - "epoch": 0.4606505140383575, - "flos": 17455761903720.0, - "grad_norm": 2.4782834015643243, - "learning_rate": 2.350279267597554e-06, - "loss": 0.692, - "num_input_tokens_seen": 81336270, - "step": 3831 - }, - { - "epoch": 0.46077075692899655, - "flos": 11733416463000.0, - "grad_norm": 3.7630777568097615, - "learning_rate": 2.3495123094951515e-06, - "loss": 0.8102, - "num_input_tokens_seen": 81354335, - "step": 3832 - }, - { - "epoch": 0.46089099981963566, - "flos": 35927036312520.0, - "grad_norm": 3.336626682962893, - "learning_rate": 2.34874529837201e-06, - "loss": 0.732, - "num_input_tokens_seen": 81377600, - "step": 3833 - }, - { - "epoch": 0.46101124271027477, - "flos": 13947915653160.0, - "grad_norm": 2.9277570137029167, - "learning_rate": 2.347978234344483e-06, - "loss": 0.7847, - "num_input_tokens_seen": 81393525, - "step": 3834 - }, - { - "epoch": 0.4611314856009138, - "flos": 28933184692440.0, - "grad_norm": 2.895050949412605, - "learning_rate": 2.347211117528935e-06, - "loss": 0.6844, - "num_input_tokens_seen": 81415545, - "step": 3835 - }, - { - "epoch": 0.46125172849155294, - "flos": 15214102959240.0, - "grad_norm": 2.056086191070273, - "learning_rate": 2.3464439480417374e-06, - "loss": 0.7019, - "num_input_tokens_seen": 81434785, - "step": 3836 - }, - { - "epoch": 0.46137197138219205, - "flos": 13083077609520.0, - "grad_norm": 3.998014434101408, - "learning_rate": 2.3456767259992676e-06, - "loss": 0.7428, - "num_input_tokens_seen": 81452150, - "step": 3837 - }, - { - "epoch": 0.4614922142728311, - "flos": 12273350573520.0, - "grad_norm": 3.7891755077381126, - "learning_rate": 2.3449094515179135e-06, - "loss": 0.8539, - "num_input_tokens_seen": 81469330, - "step": 3838 - }, - { - "epoch": 0.4616124571634702, - "flos": 19509962004600.0, - "grad_norm": 2.238040078902831, - "learning_rate": 2.34414212471407e-06, - "loss": 0.8046, - "num_input_tokens_seen": 81488845, - "step": 3839 - }, - { - "epoch": 0.4617327000541093, - "flos": 14866313366400.0, - "grad_norm": 2.5426823092864455, - "learning_rate": 2.3433747457041394e-06, - "loss": 0.7219, - "num_input_tokens_seen": 81507270, - "step": 3840 - }, - { - "epoch": 0.4618529429447484, - "flos": 21697554720120.0, - "grad_norm": 4.641093940032358, - "learning_rate": 2.342607314604533e-06, - "loss": 0.8258, - "num_input_tokens_seen": 81526740, - "step": 3841 - }, - { - "epoch": 0.4619731858353875, - "flos": 14456162635080.0, - "grad_norm": 13.77987255729575, - "learning_rate": 2.3418398315316694e-06, - "loss": 0.8276, - "num_input_tokens_seen": 81544280, - "step": 3842 - }, - { - "epoch": 0.4620934287260266, - "flos": 13838010237480.0, - "grad_norm": 3.7374621284122624, - "learning_rate": 2.3410722966019755e-06, - "loss": 0.7584, - "num_input_tokens_seen": 81559115, - "step": 3843 - }, - { - "epoch": 0.46221367161666566, - "flos": 27448484613360.0, - "grad_norm": 2.5377414494538493, - "learning_rate": 2.3403047099318848e-06, - "loss": 0.642, - "num_input_tokens_seen": 81582905, - "step": 3844 - }, - { - "epoch": 0.46233391450730477, - "flos": 10491222915240.0, - "grad_norm": 2.900586917284445, - "learning_rate": 2.3395370716378405e-06, - "loss": 0.7258, - "num_input_tokens_seen": 81600070, - "step": 3845 - }, - { - "epoch": 0.4624541573979438, - "flos": 16459652462760.0, - "grad_norm": 3.469753085979049, - "learning_rate": 2.338769381836292e-06, - "loss": 0.7126, - "num_input_tokens_seen": 81619400, - "step": 3846 - }, - { - "epoch": 0.46257440028858293, - "flos": 10518256029720.0, - "grad_norm": 2.924785449644578, - "learning_rate": 2.3380016406436984e-06, - "loss": 0.6997, - "num_input_tokens_seen": 81636600, - "step": 3847 - }, - { - "epoch": 0.46269464317922204, - "flos": 17080685916720.0, - "grad_norm": 3.068393253015553, - "learning_rate": 2.337233848176524e-06, - "loss": 0.8029, - "num_input_tokens_seen": 81654090, - "step": 3848 - }, - { - "epoch": 0.4628148860698611, - "flos": 13542957155280.0, - "grad_norm": 3.27700109134082, - "learning_rate": 2.3364660045512435e-06, - "loss": 0.8049, - "num_input_tokens_seen": 81672570, - "step": 3849 - }, - { - "epoch": 0.4629351289605002, - "flos": 52119084878400.0, - "grad_norm": 0.7749918337360953, - "learning_rate": 2.335698109884337e-06, - "loss": 0.6199, - "num_input_tokens_seen": 81737495, - "step": 3850 - }, - { - "epoch": 0.4630553718511393, - "flos": 43993135086000.0, - "grad_norm": 0.8623640962313085, - "learning_rate": 2.334930164292294e-06, - "loss": 0.6477, - "num_input_tokens_seen": 81799765, - "step": 3851 - }, - { - "epoch": 0.4631756147417784, - "flos": 11624999065440.0, - "grad_norm": 2.8167156414612204, - "learning_rate": 2.334162167891612e-06, - "loss": 0.7727, - "num_input_tokens_seen": 81816750, - "step": 3852 - }, - { - "epoch": 0.4632958576324175, - "flos": 12004349147040.0, - "grad_norm": 5.101242078734316, - "learning_rate": 2.333394120798795e-06, - "loss": 0.7167, - "num_input_tokens_seen": 81835205, - "step": 3853 - }, - { - "epoch": 0.4634161005230566, - "flos": 16351108425360.0, - "grad_norm": 2.8688863352041833, - "learning_rate": 2.3326260231303545e-06, - "loss": 0.7194, - "num_input_tokens_seen": 81853525, - "step": 3854 - }, - { - "epoch": 0.46353634341369565, - "flos": 11463465237720.0, - "grad_norm": 1.801135245591679, - "learning_rate": 2.331857875002811e-06, - "loss": 0.8627, - "num_input_tokens_seen": 81871845, - "step": 3855 - }, - { - "epoch": 0.46365658630433476, - "flos": 20779125346920.0, - "grad_norm": 17.042846539356695, - "learning_rate": 2.3310896765326916e-06, - "loss": 0.7434, - "num_input_tokens_seen": 81892565, - "step": 3856 - }, - { - "epoch": 0.46377682919497387, - "flos": 18024755366160.0, - "grad_norm": 2.1715598117813375, - "learning_rate": 2.330321427836531e-06, - "loss": 0.8248, - "num_input_tokens_seen": 81914155, - "step": 3857 - }, - { - "epoch": 0.4638970720856129, - "flos": 13919331200640.0, - "grad_norm": 2.9695947348761744, - "learning_rate": 2.3295531290308733e-06, - "loss": 0.8159, - "num_input_tokens_seen": 81932025, - "step": 3858 - }, - { - "epoch": 0.46401731497625204, - "flos": 13481862415200.0, - "grad_norm": 4.681599272671458, - "learning_rate": 2.3287847802322678e-06, - "loss": 0.7237, - "num_input_tokens_seen": 81947315, - "step": 3859 - }, - { - "epoch": 0.4641375578668911, - "flos": 19104433627440.0, - "grad_norm": 3.5536631690764513, - "learning_rate": 2.3280163815572723e-06, - "loss": 0.8174, - "num_input_tokens_seen": 81967630, - "step": 3860 - }, - { - "epoch": 0.4642578007575302, - "flos": 14296148485440.0, - "grad_norm": 3.2996916829200185, - "learning_rate": 2.3272479331224522e-06, - "loss": 0.7482, - "num_input_tokens_seen": 81984780, - "step": 3861 - }, - { - "epoch": 0.4643780436481693, - "flos": 20673209086200.0, - "grad_norm": 2.289238099533332, - "learning_rate": 2.3264794350443817e-06, - "loss": 0.7624, - "num_input_tokens_seen": 82006595, - "step": 3862 - }, - { - "epoch": 0.46449828653880837, - "flos": 18592672389960.0, - "grad_norm": 2.392106339007859, - "learning_rate": 2.3257108874396396e-06, - "loss": 0.7751, - "num_input_tokens_seen": 82027410, - "step": 3863 - }, - { - "epoch": 0.4646185294294475, - "flos": 11679666833640.0, - "grad_norm": 3.573074574590961, - "learning_rate": 2.3249422904248152e-06, - "loss": 0.7195, - "num_input_tokens_seen": 82045565, - "step": 3864 - }, - { - "epoch": 0.4647387723200866, - "flos": 19324117818960.0, - "grad_norm": 1.9558561227145543, - "learning_rate": 2.324173644116504e-06, - "loss": 0.8616, - "num_input_tokens_seen": 82068135, - "step": 3865 - }, - { - "epoch": 0.46485901521072565, - "flos": 19913242524600.0, - "grad_norm": 10.930567697089847, - "learning_rate": 2.3234049486313087e-06, - "loss": 0.8018, - "num_input_tokens_seen": 82089305, - "step": 3866 - }, - { - "epoch": 0.46497925810136476, - "flos": 17781868915320.0, - "grad_norm": 2.480514694148814, - "learning_rate": 2.322636204085839e-06, - "loss": 0.7488, - "num_input_tokens_seen": 82109095, - "step": 3867 - }, - { - "epoch": 0.46509950099200387, - "flos": 11841580580880.0, - "grad_norm": 2.8800204022002767, - "learning_rate": 2.3218674105967143e-06, - "loss": 0.7498, - "num_input_tokens_seen": 82127080, - "step": 3868 - }, - { - "epoch": 0.4652197438826429, - "flos": 17162228499600.0, - "grad_norm": 2.1575813053587107, - "learning_rate": 2.3210985682805593e-06, - "loss": 0.8157, - "num_input_tokens_seen": 82148580, - "step": 3869 - }, - { - "epoch": 0.46533998677328203, - "flos": 11814357506640.0, - "grad_norm": 3.0379641394140457, - "learning_rate": 2.320329677254007e-06, - "loss": 0.6544, - "num_input_tokens_seen": 82165630, - "step": 3870 - }, - { - "epoch": 0.46546022966392114, - "flos": 15459522206880.0, - "grad_norm": 3.230194190834051, - "learning_rate": 2.319560737633697e-06, - "loss": 0.705, - "num_input_tokens_seen": 82184070, - "step": 3871 - }, - { - "epoch": 0.4655804725545602, - "flos": 30283035798720.0, - "grad_norm": 2.243939396507346, - "learning_rate": 2.3187917495362775e-06, - "loss": 0.6721, - "num_input_tokens_seen": 82208200, - "step": 3872 - }, - { - "epoch": 0.4657007154451993, - "flos": 14296655044800.0, - "grad_norm": 3.101309278322162, - "learning_rate": 2.318022713078403e-06, - "loss": 0.7412, - "num_input_tokens_seen": 82222500, - "step": 3873 - }, - { - "epoch": 0.4658209583358384, - "flos": 11296612536720.0, - "grad_norm": 2.826062287985265, - "learning_rate": 2.3172536283767354e-06, - "loss": 0.8358, - "num_input_tokens_seen": 82235980, - "step": 3874 - }, - { - "epoch": 0.4659412012264775, - "flos": 10842526763640.0, - "grad_norm": 3.120984205631967, - "learning_rate": 2.3164844955479447e-06, - "loss": 0.7887, - "num_input_tokens_seen": 82251510, - "step": 3875 - }, - { - "epoch": 0.4660614441171166, - "flos": 17889716433600.0, - "grad_norm": 2.7632596520638666, - "learning_rate": 2.3157153147087082e-06, - "loss": 0.6814, - "num_input_tokens_seen": 82273120, - "step": 3876 - }, - { - "epoch": 0.46618168700775564, - "flos": 16162098243720.0, - "grad_norm": 2.3767745766701696, - "learning_rate": 2.314946085975709e-06, - "loss": 0.8251, - "num_input_tokens_seen": 82291820, - "step": 3877 - }, - { - "epoch": 0.46630192989839475, - "flos": 19185849570480.0, - "grad_norm": 3.389592448579661, - "learning_rate": 2.3141768094656393e-06, - "loss": 0.8097, - "num_input_tokens_seen": 82310115, - "step": 3878 - }, - { - "epoch": 0.46642217278903386, - "flos": 8331011573760.0, - "grad_norm": 2.940731949031929, - "learning_rate": 2.3134074852951966e-06, - "loss": 0.8174, - "num_input_tokens_seen": 82326425, - "step": 3879 - }, - { - "epoch": 0.4665424156796729, - "flos": 23722378869480.0, - "grad_norm": 3.397383883908127, - "learning_rate": 2.312638113581088e-06, - "loss": 0.7749, - "num_input_tokens_seen": 82346630, - "step": 3880 - }, - { - "epoch": 0.46666265857031203, - "flos": 13456317318840.0, - "grad_norm": 4.452421433935271, - "learning_rate": 2.311868694440027e-06, - "loss": 0.7685, - "num_input_tokens_seen": 82360770, - "step": 3881 - }, - { - "epoch": 0.46678290146095114, - "flos": 50469368376000.0, - "grad_norm": 0.7411387211093564, - "learning_rate": 2.3110992279887323e-06, - "loss": 0.6337, - "num_input_tokens_seen": 82432415, - "step": 3882 - }, - { - "epoch": 0.4669031443515902, - "flos": 12921607101720.0, - "grad_norm": 2.765457062013587, - "learning_rate": 2.310329714343932e-06, - "loss": 0.8332, - "num_input_tokens_seen": 82448285, - "step": 3883 - }, - { - "epoch": 0.4670233872422293, - "flos": 17535563188800.0, - "grad_norm": 2.255842147695313, - "learning_rate": 2.309560153622361e-06, - "loss": 0.7984, - "num_input_tokens_seen": 82464915, - "step": 3884 - }, - { - "epoch": 0.4671436301328684, - "flos": 20617433219400.0, - "grad_norm": 2.56680020773472, - "learning_rate": 2.3087905459407602e-06, - "loss": 0.7234, - "num_input_tokens_seen": 82483575, - "step": 3885 - }, - { - "epoch": 0.46726387302350747, - "flos": 51155607873480.0, - "grad_norm": 0.8794000213468617, - "learning_rate": 2.3080208914158795e-06, - "loss": 0.6745, - "num_input_tokens_seen": 82546295, - "step": 3886 - }, - { - "epoch": 0.4673841159141466, - "flos": 18699316829760.0, - "grad_norm": 2.831210313396743, - "learning_rate": 2.3072511901644753e-06, - "loss": 0.6884, - "num_input_tokens_seen": 82565085, - "step": 3887 - }, - { - "epoch": 0.4675043588047857, - "flos": 17944320881880.0, - "grad_norm": 5.0188850432169065, - "learning_rate": 2.306481442303309e-06, - "loss": 0.7929, - "num_input_tokens_seen": 82584380, - "step": 3888 - }, - { - "epoch": 0.46762460169542475, - "flos": 15325686352800.0, - "grad_norm": 2.376519670671099, - "learning_rate": 2.3057116479491515e-06, - "loss": 0.7154, - "num_input_tokens_seen": 82603510, - "step": 3889 - }, - { - "epoch": 0.46774484458606386, - "flos": 14001443662800.0, - "grad_norm": 2.5425308717786996, - "learning_rate": 2.30494180721878e-06, - "loss": 0.7506, - "num_input_tokens_seen": 82620570, - "step": 3890 - }, - { - "epoch": 0.4678650874767029, - "flos": 13110427323600.0, - "grad_norm": 2.4056148929438135, - "learning_rate": 2.3041719202289794e-06, - "loss": 0.8862, - "num_input_tokens_seen": 82636465, - "step": 3891 - }, - { - "epoch": 0.467985330367342, - "flos": 15621562593960.0, - "grad_norm": 3.308007991241434, - "learning_rate": 2.30340198709654e-06, - "loss": 0.7912, - "num_input_tokens_seen": 82656020, - "step": 3892 - }, - { - "epoch": 0.46810557325798113, - "flos": 15000687439800.0, - "grad_norm": 2.6416244872503407, - "learning_rate": 2.3026320079382605e-06, - "loss": 0.7425, - "num_input_tokens_seen": 82672675, - "step": 3893 - }, - { - "epoch": 0.4682258161486202, - "flos": 22102418238120.0, - "grad_norm": 5.392324163252039, - "learning_rate": 2.3018619828709454e-06, - "loss": 0.7393, - "num_input_tokens_seen": 82693935, - "step": 3894 - }, - { - "epoch": 0.4683460590392593, - "flos": 18530786150880.0, - "grad_norm": 4.734562130493683, - "learning_rate": 2.3010919120114084e-06, - "loss": 0.8062, - "num_input_tokens_seen": 82710185, - "step": 3895 - }, - { - "epoch": 0.4684663019298984, - "flos": 11185472382600.0, - "grad_norm": 3.2128843916162935, - "learning_rate": 2.3003217954764672e-06, - "loss": 0.6288, - "num_input_tokens_seen": 82724610, - "step": 3896 - }, - { - "epoch": 0.46858654482053747, - "flos": 20370842553240.0, - "grad_norm": 3.367459385774942, - "learning_rate": 2.299551633382949e-06, - "loss": 0.786, - "num_input_tokens_seen": 82744640, - "step": 3897 - }, - { - "epoch": 0.4687067877111766, - "flos": 13164398572680.0, - "grad_norm": 2.605094642603955, - "learning_rate": 2.2987814258476854e-06, - "loss": 0.8257, - "num_input_tokens_seen": 82762160, - "step": 3898 - }, - { - "epoch": 0.4688270306018157, - "flos": 12376797357360.0, - "grad_norm": 3.496857867569668, - "learning_rate": 2.2980111729875177e-06, - "loss": 0.6492, - "num_input_tokens_seen": 82778130, - "step": 3899 - }, - { - "epoch": 0.46894727349245474, - "flos": 13002326525640.0, - "grad_norm": 2.5155985576116593, - "learning_rate": 2.2972408749192917e-06, - "loss": 0.8038, - "num_input_tokens_seen": 82795580, - "step": 3900 - }, - { - "epoch": 0.46906751638309385, - "flos": 15703516756320.0, - "grad_norm": 2.727128459264671, - "learning_rate": 2.296470531759861e-06, - "loss": 0.6587, - "num_input_tokens_seen": 82813400, - "step": 3901 - }, - { - "epoch": 0.46918775927373296, - "flos": 14864287128960.0, - "grad_norm": 3.116860585291246, - "learning_rate": 2.2957001436260866e-06, - "loss": 0.7751, - "num_input_tokens_seen": 82830180, - "step": 3902 - }, - { - "epoch": 0.469308002164372, - "flos": 13432450200360.0, - "grad_norm": 2.355679175244046, - "learning_rate": 2.294929710634836e-06, - "loss": 0.7081, - "num_input_tokens_seen": 82847990, - "step": 3903 - }, - { - "epoch": 0.46942824505501113, - "flos": 27907635980040.0, - "grad_norm": 2.450309157815003, - "learning_rate": 2.2941592329029823e-06, - "loss": 0.5944, - "num_input_tokens_seen": 82868815, - "step": 3904 - }, - { - "epoch": 0.46954848794565024, - "flos": 16000374456240.0, - "grad_norm": 5.138594113163529, - "learning_rate": 2.2933887105474067e-06, - "loss": 0.7797, - "num_input_tokens_seen": 82886710, - "step": 3905 - }, - { - "epoch": 0.4696687308362893, - "flos": 16106702296440.0, - "grad_norm": 2.525561876383723, - "learning_rate": 2.2926181436849974e-06, - "loss": 0.7961, - "num_input_tokens_seen": 82905785, - "step": 3906 - }, - { - "epoch": 0.4697889737269284, - "flos": 15808324918440.0, - "grad_norm": 2.8498911518927734, - "learning_rate": 2.2918475324326478e-06, - "loss": 0.7134, - "num_input_tokens_seen": 82925225, - "step": 3907 - }, - { - "epoch": 0.46990921661756746, - "flos": 18483780093000.0, - "grad_norm": 2.89201527746869, - "learning_rate": 2.2910768769072603e-06, - "loss": 0.8899, - "num_input_tokens_seen": 82943725, - "step": 3908 - }, - { - "epoch": 0.47002945950820657, - "flos": 10058534783760.0, - "grad_norm": 2.9445880838862077, - "learning_rate": 2.2903061772257417e-06, - "loss": 0.7385, - "num_input_tokens_seen": 82961430, - "step": 3909 - }, - { - "epoch": 0.4701497023988457, - "flos": 19237256362800.0, - "grad_norm": 2.4565810623506112, - "learning_rate": 2.289535433505007e-06, - "loss": 0.7713, - "num_input_tokens_seen": 82982505, - "step": 3910 - }, - { - "epoch": 0.47026994528948474, - "flos": 18780669452880.0, - "grad_norm": 4.342943514596025, - "learning_rate": 2.2887646458619767e-06, - "loss": 0.6238, - "num_input_tokens_seen": 83003590, - "step": 3911 - }, - { - "epoch": 0.47039018818012385, - "flos": 15024237958680.0, - "grad_norm": 2.1616327299671276, - "learning_rate": 2.2879938144135797e-06, - "loss": 0.7498, - "num_input_tokens_seen": 83019415, - "step": 3912 - }, - { - "epoch": 0.47051043107076296, - "flos": 15781386783840.0, - "grad_norm": 2.160083624669185, - "learning_rate": 2.2872229392767496e-06, - "loss": 0.7488, - "num_input_tokens_seen": 83039240, - "step": 3913 - }, - { - "epoch": 0.470630673961402, - "flos": 13839814855200.0, - "grad_norm": 2.546873783041072, - "learning_rate": 2.286452020568428e-06, - "loss": 0.7371, - "num_input_tokens_seen": 83057035, - "step": 3914 - }, - { - "epoch": 0.4707509168520411, - "flos": 14569455666480.0, - "grad_norm": 3.82542567159639, - "learning_rate": 2.2856810584055637e-06, - "loss": 0.7286, - "num_input_tokens_seen": 83074290, - "step": 3915 - }, - { - "epoch": 0.47087115974268023, - "flos": 29504299372200.0, - "grad_norm": 1.8835019317487427, - "learning_rate": 2.2849100529051085e-06, - "loss": 0.6643, - "num_input_tokens_seen": 83100945, - "step": 3916 - }, - { - "epoch": 0.4709914026333193, - "flos": 9843029706960.0, - "grad_norm": 4.4643964568435175, - "learning_rate": 2.284139004184026e-06, - "loss": 0.7754, - "num_input_tokens_seen": 83117895, - "step": 3917 - }, - { - "epoch": 0.4711116455239584, - "flos": 14595728941920.0, - "grad_norm": 3.7017958947894045, - "learning_rate": 2.2833679123592814e-06, - "loss": 0.7239, - "num_input_tokens_seen": 83134875, - "step": 3918 - }, - { - "epoch": 0.4712318884145975, - "flos": 23587751516400.0, - "grad_norm": 3.5479463144087675, - "learning_rate": 2.2825967775478508e-06, - "loss": 0.6132, - "num_input_tokens_seen": 83155695, - "step": 3919 - }, - { - "epoch": 0.47135213130523657, - "flos": 14649953470680.0, - "grad_norm": 2.575176795890009, - "learning_rate": 2.2818255998667135e-06, - "loss": 0.8195, - "num_input_tokens_seen": 83173925, - "step": 3920 - }, - { - "epoch": 0.4714723741958757, - "flos": 14190390524520.0, - "grad_norm": 2.2456234646926503, - "learning_rate": 2.2810543794328566e-06, - "loss": 0.7758, - "num_input_tokens_seen": 83192680, - "step": 3921 - }, - { - "epoch": 0.4715926170865148, - "flos": 14890908663960.0, - "grad_norm": 2.86782638541455, - "learning_rate": 2.2802831163632735e-06, - "loss": 0.8026, - "num_input_tokens_seen": 83211120, - "step": 3922 - }, - { - "epoch": 0.47171285997715384, - "flos": 16592380218240.0, - "grad_norm": 2.071550797863736, - "learning_rate": 2.279511810774965e-06, - "loss": 0.7248, - "num_input_tokens_seen": 83232370, - "step": 3923 - }, - { - "epoch": 0.47183310286779295, - "flos": 15432552412320.0, - "grad_norm": 2.802400469412017, - "learning_rate": 2.2787404627849364e-06, - "loss": 0.698, - "num_input_tokens_seen": 83251300, - "step": 3924 - }, - { - "epoch": 0.471953345758432, - "flos": 15892368638160.0, - "grad_norm": 2.61343451764672, - "learning_rate": 2.277969072510202e-06, - "loss": 0.7706, - "num_input_tokens_seen": 83270000, - "step": 3925 - }, - { - "epoch": 0.4720735886490711, - "flos": 14511115342920.0, - "grad_norm": 2.0825826691227576, - "learning_rate": 2.2771976400677803e-06, - "loss": 0.8024, - "num_input_tokens_seen": 83288550, - "step": 3926 - }, - { - "epoch": 0.47219383153971023, - "flos": 14001570302640.0, - "grad_norm": 2.222918396090759, - "learning_rate": 2.2764261655746965e-06, - "loss": 0.7717, - "num_input_tokens_seen": 83305765, - "step": 3927 - }, - { - "epoch": 0.4723140744303493, - "flos": 17000789651760.0, - "grad_norm": 1.972375166101181, - "learning_rate": 2.2756546491479832e-06, - "loss": 0.7444, - "num_input_tokens_seen": 83326400, - "step": 3928 - }, - { - "epoch": 0.4724343173209884, - "flos": 13299595805040.0, - "grad_norm": 3.674373734239236, - "learning_rate": 2.274883090904679e-06, - "loss": 0.786, - "num_input_tokens_seen": 83343885, - "step": 3929 - }, - { - "epoch": 0.4725545602116275, - "flos": 15540273290760.0, - "grad_norm": 3.0095911885186792, - "learning_rate": 2.2741114909618283e-06, - "loss": 0.6573, - "num_input_tokens_seen": 83359500, - "step": 3930 - }, - { - "epoch": 0.47267480310226656, - "flos": 15675755462760.0, - "grad_norm": 2.4247718467746884, - "learning_rate": 2.2733398494364828e-06, - "loss": 0.704, - "num_input_tokens_seen": 83378465, - "step": 3931 - }, - { - "epoch": 0.47279504599290567, - "flos": 13704585962880.0, - "grad_norm": 2.4119055534457967, - "learning_rate": 2.272568166445699e-06, - "loss": 0.8327, - "num_input_tokens_seen": 83396750, - "step": 3932 - }, - { - "epoch": 0.4729152888835448, - "flos": 15432837351960.0, - "grad_norm": 2.580383236536756, - "learning_rate": 2.271796442106541e-06, - "loss": 0.6271, - "num_input_tokens_seen": 83415825, - "step": 3933 - }, - { - "epoch": 0.47303553177418384, - "flos": 51770499295200.0, - "grad_norm": 0.7975155369073267, - "learning_rate": 2.271024676536079e-06, - "loss": 0.6036, - "num_input_tokens_seen": 83475805, - "step": 3934 - }, - { - "epoch": 0.47315577466482295, - "flos": 16431384609840.0, - "grad_norm": 3.3308710222654634, - "learning_rate": 2.2702528698513894e-06, - "loss": 0.7175, - "num_input_tokens_seen": 83496650, - "step": 3935 - }, - { - "epoch": 0.47327601755546206, - "flos": 17835903484320.0, - "grad_norm": 4.246750430763669, - "learning_rate": 2.269481022169554e-06, - "loss": 0.7675, - "num_input_tokens_seen": 83514965, - "step": 3936 - }, - { - "epoch": 0.4733962604461011, - "flos": 16780060681560.0, - "grad_norm": 3.2353117687335424, - "learning_rate": 2.2687091336076614e-06, - "loss": 0.7895, - "num_input_tokens_seen": 83534025, - "step": 3937 - }, - { - "epoch": 0.4735165033367402, - "flos": 13375882834560.0, - "grad_norm": 3.8857803515419764, - "learning_rate": 2.267937204282807e-06, - "loss": 0.7812, - "num_input_tokens_seen": 83550885, - "step": 3938 - }, - { - "epoch": 0.4736367462273793, - "flos": 16861761564240.0, - "grad_norm": 3.0447997239369813, - "learning_rate": 2.2671652343120926e-06, - "loss": 0.7575, - "num_input_tokens_seen": 83571080, - "step": 3939 - }, - { - "epoch": 0.4737569891180184, - "flos": 18591849231000.0, - "grad_norm": 2.3175908319641536, - "learning_rate": 2.2663932238126236e-06, - "loss": 0.7871, - "num_input_tokens_seen": 83589360, - "step": 3940 - }, - { - "epoch": 0.4738772320086575, - "flos": 18943469679000.0, - "grad_norm": 2.3191620000567714, - "learning_rate": 2.265621172901515e-06, - "loss": 0.7811, - "num_input_tokens_seen": 83612195, - "step": 3941 - }, - { - "epoch": 0.47399747489929656, - "flos": 20212791321120.0, - "grad_norm": 3.527565983966882, - "learning_rate": 2.2648490816958854e-06, - "loss": 0.7017, - "num_input_tokens_seen": 83632910, - "step": 3942 - }, - { - "epoch": 0.47411771778993567, - "flos": 18213923847600.0, - "grad_norm": 3.531253510870074, - "learning_rate": 2.264076950312861e-06, - "loss": 0.7043, - "num_input_tokens_seen": 83651440, - "step": 3943 - }, - { - "epoch": 0.4742379606805748, - "flos": 16648282724880.0, - "grad_norm": 2.8857187234663666, - "learning_rate": 2.2633047788695727e-06, - "loss": 0.7997, - "num_input_tokens_seen": 83671465, - "step": 3944 - }, - { - "epoch": 0.47435820357121383, - "flos": 14378292607560.0, - "grad_norm": 3.7150083038114055, - "learning_rate": 2.262532567483159e-06, - "loss": 0.6294, - "num_input_tokens_seen": 83689745, - "step": 3945 - }, - { - "epoch": 0.47447844646185294, - "flos": 18484128352560.0, - "grad_norm": 7.944531067536752, - "learning_rate": 2.2617603162707635e-06, - "loss": 0.7816, - "num_input_tokens_seen": 83709875, - "step": 3946 - }, - { - "epoch": 0.47459868935249205, - "flos": 17996899092720.0, - "grad_norm": 2.3099572920050817, - "learning_rate": 2.2609880253495363e-06, - "loss": 0.8079, - "num_input_tokens_seen": 83729230, - "step": 3947 - }, - { - "epoch": 0.4747189322431311, - "flos": 15027847194120.0, - "grad_norm": 2.7698338266649944, - "learning_rate": 2.260215694836633e-06, - "loss": 0.8434, - "num_input_tokens_seen": 83748125, - "step": 3948 - }, - { - "epoch": 0.4748391751337702, - "flos": 19051443837120.0, - "grad_norm": 2.195391453310836, - "learning_rate": 2.2594433248492157e-06, - "loss": 0.6294, - "num_input_tokens_seen": 83766820, - "step": 3949 - }, - { - "epoch": 0.47495941802440933, - "flos": 16702348953840.0, - "grad_norm": 2.2612201885664343, - "learning_rate": 2.2586709155044527e-06, - "loss": 0.7838, - "num_input_tokens_seen": 83787140, - "step": 3950 - }, - { - "epoch": 0.4750796609150484, - "flos": 20454917932920.0, - "grad_norm": 1.9151548421623792, - "learning_rate": 2.2578984669195167e-06, - "loss": 0.7487, - "num_input_tokens_seen": 83807825, - "step": 3951 - }, - { - "epoch": 0.4751999038056875, - "flos": 26203536649080.0, - "grad_norm": 2.3218815183483854, - "learning_rate": 2.2571259792115887e-06, - "loss": 0.6585, - "num_input_tokens_seen": 83828765, - "step": 3952 - }, - { - "epoch": 0.4753201466963266, - "flos": 16161908283960.0, - "grad_norm": 3.325607708916666, - "learning_rate": 2.2563534524978544e-06, - "loss": 0.7787, - "num_input_tokens_seen": 83845955, - "step": 3953 - }, - { - "epoch": 0.47544038958696566, - "flos": 22156452807120.0, - "grad_norm": 3.230612515222267, - "learning_rate": 2.2555808868955052e-06, - "loss": 0.7115, - "num_input_tokens_seen": 83867805, - "step": 3954 - }, - { - "epoch": 0.47556063247760477, - "flos": 17183467841400.0, - "grad_norm": 4.180078563043151, - "learning_rate": 2.254808282521738e-06, - "loss": 0.7114, - "num_input_tokens_seen": 83886275, - "step": 3955 - }, - { - "epoch": 0.4756808753682438, - "flos": 18429650544120.0, - "grad_norm": 4.837730950256567, - "learning_rate": 2.2540356394937573e-06, - "loss": 0.7973, - "num_input_tokens_seen": 83904695, - "step": 3956 - }, - { - "epoch": 0.47580111825888294, - "flos": 11409873908160.0, - "grad_norm": 3.2658625612137744, - "learning_rate": 2.253262957928772e-06, - "loss": 0.8136, - "num_input_tokens_seen": 83921300, - "step": 3957 - }, - { - "epoch": 0.47592136114952205, - "flos": 12864976416000.0, - "grad_norm": 1.996406426174099, - "learning_rate": 2.2524902379439976e-06, - "loss": 0.6973, - "num_input_tokens_seen": 83939690, - "step": 3958 - }, - { - "epoch": 0.4760416040401611, - "flos": 45270398886720.0, - "grad_norm": 0.7584162794240081, - "learning_rate": 2.251717479656655e-06, - "loss": 0.6495, - "num_input_tokens_seen": 84004205, - "step": 3959 - }, - { - "epoch": 0.4761618469308002, - "flos": 13434761377440.0, - "grad_norm": 2.44497463829445, - "learning_rate": 2.2509446831839704e-06, - "loss": 0.7366, - "num_input_tokens_seen": 84023365, - "step": 3960 - }, - { - "epoch": 0.4762820898214393, - "flos": 13164651852360.0, - "grad_norm": 2.8420472129857104, - "learning_rate": 2.250171848643177e-06, - "loss": 0.7961, - "num_input_tokens_seen": 84040375, - "step": 3961 - }, - { - "epoch": 0.4764023327120784, - "flos": 14110019360160.0, - "grad_norm": 4.9093132418239165, - "learning_rate": 2.249398976151513e-06, - "loss": 0.8533, - "num_input_tokens_seen": 84057645, - "step": 3962 - }, - { - "epoch": 0.4765225756027175, - "flos": 16647617865720.0, - "grad_norm": 2.9769195582779053, - "learning_rate": 2.248626065826223e-06, - "loss": 0.7727, - "num_input_tokens_seen": 84075570, - "step": 3963 - }, - { - "epoch": 0.4766428184933566, - "flos": 48611582396040.0, - "grad_norm": 0.7564081767466843, - "learning_rate": 2.2478531177845564e-06, - "loss": 0.634, - "num_input_tokens_seen": 84136285, - "step": 3964 - }, - { - "epoch": 0.47676306138399566, - "flos": 17676142614360.0, - "grad_norm": 3.1873433691698048, - "learning_rate": 2.247080132143769e-06, - "loss": 0.8356, - "num_input_tokens_seen": 84158360, - "step": 3965 - }, - { - "epoch": 0.47688330427463477, - "flos": 9141181849200.0, - "grad_norm": 2.80642196026456, - "learning_rate": 2.246307109021121e-06, - "loss": 0.6682, - "num_input_tokens_seen": 84175485, - "step": 3966 - }, - { - "epoch": 0.4770035471652739, - "flos": 15642643635960.0, - "grad_norm": 1.8835255392564365, - "learning_rate": 2.2455340485338817e-06, - "loss": 0.8061, - "num_input_tokens_seen": 84192840, - "step": 3967 - }, - { - "epoch": 0.47712379005591293, - "flos": 18430568682960.0, - "grad_norm": 6.640540842757175, - "learning_rate": 2.244760950799322e-06, - "loss": 0.6651, - "num_input_tokens_seen": 84210830, - "step": 3968 - }, - { - "epoch": 0.47724403294655204, - "flos": 16134843509520.0, - "grad_norm": 3.444931043756933, - "learning_rate": 2.2439878159347203e-06, - "loss": 0.713, - "num_input_tokens_seen": 84229975, - "step": 3969 - }, - { - "epoch": 0.4773642758371911, - "flos": 51794750824560.0, - "grad_norm": 0.817468417662935, - "learning_rate": 2.2432146440573616e-06, - "loss": 0.6165, - "num_input_tokens_seen": 84295655, - "step": 3970 - }, - { - "epoch": 0.4774845187278302, - "flos": 17240478446640.0, - "grad_norm": 2.500338531461614, - "learning_rate": 2.242441435284534e-06, - "loss": 0.643, - "num_input_tokens_seen": 84314250, - "step": 3971 - }, - { - "epoch": 0.4776047616184693, - "flos": 16890409336680.0, - "grad_norm": 2.2402780091244265, - "learning_rate": 2.2416681897335337e-06, - "loss": 0.8314, - "num_input_tokens_seen": 84332120, - "step": 3972 - }, - { - "epoch": 0.4777250045091084, - "flos": 23418872577960.0, - "grad_norm": 2.4053360825936676, - "learning_rate": 2.240894907521661e-06, - "loss": 0.6507, - "num_input_tokens_seen": 84350920, - "step": 3973 - }, - { - "epoch": 0.4778452473997475, - "flos": 17780380897200.0, - "grad_norm": 2.677515957283461, - "learning_rate": 2.240121588766223e-06, - "loss": 0.6232, - "num_input_tokens_seen": 84370690, - "step": 3974 - }, - { - "epoch": 0.4779654902903866, - "flos": 23180513501400.0, - "grad_norm": 3.7333624197190143, - "learning_rate": 2.239348233584531e-06, - "loss": 0.6896, - "num_input_tokens_seen": 84391265, - "step": 3975 - }, - { - "epoch": 0.47808573318102565, - "flos": 14244805013040.0, - "grad_norm": 2.8466749973160335, - "learning_rate": 2.2385748420939013e-06, - "loss": 0.7889, - "num_input_tokens_seen": 84410180, - "step": 3976 - }, - { - "epoch": 0.47820597607166476, - "flos": 16539453747840.0, - "grad_norm": 4.669647604411884, - "learning_rate": 2.2378014144116583e-06, - "loss": 0.7126, - "num_input_tokens_seen": 84428710, - "step": 3977 - }, - { - "epoch": 0.4783262189623039, - "flos": 16836944646960.0, - "grad_norm": 2.6220174830280847, - "learning_rate": 2.23702795065513e-06, - "loss": 0.7816, - "num_input_tokens_seen": 84448010, - "step": 3978 - }, - { - "epoch": 0.47844646185294293, - "flos": 36652027601040.0, - "grad_norm": 1.0002425614861985, - "learning_rate": 2.2362544509416493e-06, - "loss": 0.6979, - "num_input_tokens_seen": 84499845, - "step": 3979 - }, - { - "epoch": 0.47856670474358204, - "flos": 14784327544080.0, - "grad_norm": 3.785272397679431, - "learning_rate": 2.2354809153885572e-06, - "loss": 0.8055, - "num_input_tokens_seen": 84516635, - "step": 3980 - }, - { - "epoch": 0.47868694763422115, - "flos": 15347115654360.0, - "grad_norm": 2.9483657512819743, - "learning_rate": 2.234707344113197e-06, - "loss": 0.8281, - "num_input_tokens_seen": 84534450, - "step": 3981 - }, - { - "epoch": 0.4788071905248602, - "flos": 13894166023800.0, - "grad_norm": 2.855412306534315, - "learning_rate": 2.233933737232919e-06, - "loss": 0.7619, - "num_input_tokens_seen": 84551950, - "step": 3982 - }, - { - "epoch": 0.4789274334154993, - "flos": 16836248127840.0, - "grad_norm": 2.4038521845815275, - "learning_rate": 2.2331600948650793e-06, - "loss": 0.7589, - "num_input_tokens_seen": 84571815, - "step": 3983 - }, - { - "epoch": 0.4790476763061384, - "flos": 16890599296440.0, - "grad_norm": 2.5329302638620224, - "learning_rate": 2.2323864171270386e-06, - "loss": 0.7866, - "num_input_tokens_seen": 84592805, - "step": 3984 - }, - { - "epoch": 0.4791679191967775, - "flos": 15487346820360.0, - "grad_norm": 4.1678083508703905, - "learning_rate": 2.231612704136164e-06, - "loss": 0.7138, - "num_input_tokens_seen": 84612895, - "step": 3985 - }, - { - "epoch": 0.4792881620874166, - "flos": 16316603560320.0, - "grad_norm": 2.3306764280578904, - "learning_rate": 2.2308389560098253e-06, - "loss": 0.7133, - "num_input_tokens_seen": 84628990, - "step": 3986 - }, - { - "epoch": 0.47940840497805565, - "flos": 12705817085280.0, - "grad_norm": 3.560827699301634, - "learning_rate": 2.2300651728654008e-06, - "loss": 0.7492, - "num_input_tokens_seen": 84643970, - "step": 3987 - }, - { - "epoch": 0.47952864786869476, - "flos": 48187094194200.0, - "grad_norm": 0.742124052849323, - "learning_rate": 2.229291354820272e-06, - "loss": 0.6265, - "num_input_tokens_seen": 84704700, - "step": 3988 - }, - { - "epoch": 0.47964889075933387, - "flos": 12246064179360.0, - "grad_norm": 8.026408951805353, - "learning_rate": 2.228517501991828e-06, - "loss": 0.7472, - "num_input_tokens_seen": 84723220, - "step": 3989 - }, - { - "epoch": 0.4797691336499729, - "flos": 51683040791160.0, - "grad_norm": 0.8375118083975427, - "learning_rate": 2.22774361449746e-06, - "loss": 0.6328, - "num_input_tokens_seen": 84779420, - "step": 3990 - }, - { - "epoch": 0.47988937654061203, - "flos": 13839941495040.0, - "grad_norm": 5.6872722873585575, - "learning_rate": 2.2269696924545668e-06, - "loss": 0.6849, - "num_input_tokens_seen": 84796970, - "step": 3991 - }, - { - "epoch": 0.48000961943125114, - "flos": 10515533273160.0, - "grad_norm": 3.272197479416442, - "learning_rate": 2.2261957359805523e-06, - "loss": 0.7557, - "num_input_tokens_seen": 84813925, - "step": 3992 - }, - { - "epoch": 0.4801298623218902, - "flos": 19832586420600.0, - "grad_norm": 2.4268006472577763, - "learning_rate": 2.225421745192823e-06, - "loss": 0.7391, - "num_input_tokens_seen": 84833800, - "step": 3993 - }, - { - "epoch": 0.4802501052125293, - "flos": 19317184287720.0, - "grad_norm": 2.800803212355052, - "learning_rate": 2.2246477202087955e-06, - "loss": 0.7529, - "num_input_tokens_seen": 84854200, - "step": 3994 - }, - { - "epoch": 0.4803703481031684, - "flos": 15349743431040.0, - "grad_norm": 1.9335239590466728, - "learning_rate": 2.223873661145887e-06, - "loss": 0.8173, - "num_input_tokens_seen": 84873975, - "step": 3995 - }, - { - "epoch": 0.4804905909938075, - "flos": 15135219813000.0, - "grad_norm": 2.188723848492755, - "learning_rate": 2.2230995681215226e-06, - "loss": 0.6988, - "num_input_tokens_seen": 84893220, - "step": 3996 - }, - { - "epoch": 0.4806108338844466, - "flos": 12138913180200.0, - "grad_norm": 3.4571002155903385, - "learning_rate": 2.2223254412531305e-06, - "loss": 0.768, - "num_input_tokens_seen": 84910310, - "step": 3997 - }, - { - "epoch": 0.4807310767750857, - "flos": 14622920356200.0, - "grad_norm": 2.2176212072093264, - "learning_rate": 2.221551280658146e-06, - "loss": 0.8006, - "num_input_tokens_seen": 84929090, - "step": 3998 - }, - { - "epoch": 0.48085131966572475, - "flos": 16971793619760.0, - "grad_norm": 1.9851165947674627, - "learning_rate": 2.2207770864540085e-06, - "loss": 0.7299, - "num_input_tokens_seen": 84947785, - "step": 3999 - }, - { - "epoch": 0.48097156255636386, - "flos": 15027973833960.0, - "grad_norm": 3.010886783697315, - "learning_rate": 2.220002858758162e-06, - "loss": 0.7063, - "num_input_tokens_seen": 84965495, - "step": 4000 - }, - { - "epoch": 0.481091805447003, - "flos": 52002245931480.0, - "grad_norm": 0.9136300979724634, - "learning_rate": 2.2192285976880573e-06, - "loss": 0.6205, - "num_input_tokens_seen": 85029470, - "step": 4001 - }, - { - "epoch": 0.48121204833764203, - "flos": 26772276831840.0, - "grad_norm": 1.8975653509212391, - "learning_rate": 2.2184543033611485e-06, - "loss": 0.7944, - "num_input_tokens_seen": 85050270, - "step": 4002 - }, - { - "epoch": 0.48133229122828114, - "flos": 20157743633400.0, - "grad_norm": 2.6374383241212866, - "learning_rate": 2.2176799758948957e-06, - "loss": 0.7971, - "num_input_tokens_seen": 85070150, - "step": 4003 - }, - { - "epoch": 0.4814525341189202, - "flos": 31690309089720.0, - "grad_norm": 2.448513288365661, - "learning_rate": 2.2169056154067635e-06, - "loss": 0.7182, - "num_input_tokens_seen": 85093790, - "step": 4004 - }, - { - "epoch": 0.4815727770095593, - "flos": 17749168668000.0, - "grad_norm": 2.268616063391647, - "learning_rate": 2.216131222014222e-06, - "loss": 0.8141, - "num_input_tokens_seen": 85111585, - "step": 4005 - }, - { - "epoch": 0.4816930199001984, - "flos": 13218433141680.0, - "grad_norm": 4.660221531067802, - "learning_rate": 2.2153567958347455e-06, - "loss": 0.8022, - "num_input_tokens_seen": 85127515, - "step": 4006 - }, - { - "epoch": 0.48181326279083747, - "flos": 12597621307440.0, - "grad_norm": 2.4389304829755507, - "learning_rate": 2.214582336985815e-06, - "loss": 0.7828, - "num_input_tokens_seen": 85145135, - "step": 4007 - }, - { - "epoch": 0.4819335056814766, - "flos": 10842590083560.0, - "grad_norm": 3.3243824148624332, - "learning_rate": 2.2138078455849142e-06, - "loss": 0.6319, - "num_input_tokens_seen": 85162850, - "step": 4008 - }, - { - "epoch": 0.4820537485721157, - "flos": 14055668191560.0, - "grad_norm": 2.7197886269848386, - "learning_rate": 2.2130333217495334e-06, - "loss": 0.7654, - "num_input_tokens_seen": 85181740, - "step": 4009 - }, - { - "epoch": 0.48217399146275475, - "flos": 11679191934240.0, - "grad_norm": 3.464772598929471, - "learning_rate": 2.2122587655971665e-06, - "loss": 0.6517, - "num_input_tokens_seen": 85196405, - "step": 4010 - }, - { - "epoch": 0.48229423435339386, - "flos": 17674116376920.0, - "grad_norm": 2.252826716430697, - "learning_rate": 2.211484177245314e-06, - "loss": 0.6259, - "num_input_tokens_seen": 85215715, - "step": 4011 - }, - { - "epoch": 0.48241447724403297, - "flos": 17430755026680.0, - "grad_norm": 2.470447584363984, - "learning_rate": 2.21070955681148e-06, - "loss": 0.7089, - "num_input_tokens_seen": 85234540, - "step": 4012 - }, - { - "epoch": 0.482534720134672, - "flos": 16916049412920.0, - "grad_norm": 2.3040777919338917, - "learning_rate": 2.209934904413174e-06, - "loss": 0.7613, - "num_input_tokens_seen": 85255865, - "step": 4013 - }, - { - "epoch": 0.48265496302531113, - "flos": 15298273318800.0, - "grad_norm": 5.361591381287143, - "learning_rate": 2.2091602201679095e-06, - "loss": 0.6971, - "num_input_tokens_seen": 85275195, - "step": 4014 - }, - { - "epoch": 0.48277520591595025, - "flos": 11085191594760.0, - "grad_norm": 3.2056200683936615, - "learning_rate": 2.208385504193206e-06, - "loss": 0.7996, - "num_input_tokens_seen": 85292415, - "step": 4015 - }, - { - "epoch": 0.4828954488065893, - "flos": 13029802879560.0, - "grad_norm": 2.765672241497118, - "learning_rate": 2.2076107566065873e-06, - "loss": 0.8019, - "num_input_tokens_seen": 85309920, - "step": 4016 - }, - { - "epoch": 0.4830156916972284, - "flos": 23559926902920.0, - "grad_norm": 3.3847471700034246, - "learning_rate": 2.2068359775255816e-06, - "loss": 0.7342, - "num_input_tokens_seen": 85327950, - "step": 4017 - }, - { - "epoch": 0.48313593458786747, - "flos": 15999677937120.0, - "grad_norm": 3.164799357196831, - "learning_rate": 2.206061167067723e-06, - "loss": 0.7544, - "num_input_tokens_seen": 85345780, - "step": 4018 - }, - { - "epoch": 0.4832561774785066, - "flos": 16539517067760.0, - "grad_norm": 2.313884938992165, - "learning_rate": 2.205286325350549e-06, - "loss": 0.7715, - "num_input_tokens_seen": 85364565, - "step": 4019 - }, - { - "epoch": 0.4833764203691457, - "flos": 9756801450000.0, - "grad_norm": 2.4805546783221684, - "learning_rate": 2.204511452491603e-06, - "loss": 0.7243, - "num_input_tokens_seen": 85380910, - "step": 4020 - }, - { - "epoch": 0.48349666325978474, - "flos": 32470850133960.0, - "grad_norm": 2.292679360631417, - "learning_rate": 2.2037365486084316e-06, - "loss": 0.7323, - "num_input_tokens_seen": 85403870, - "step": 4021 - }, - { - "epoch": 0.48361690615042385, - "flos": 19075500915360.0, - "grad_norm": 3.1306791961663976, - "learning_rate": 2.2029616138185886e-06, - "loss": 0.7601, - "num_input_tokens_seen": 85422590, - "step": 4022 - }, - { - "epoch": 0.48373714904106296, - "flos": 16296915556560.0, - "grad_norm": 2.372242983693205, - "learning_rate": 2.202186648239629e-06, - "loss": 0.8151, - "num_input_tokens_seen": 85442245, - "step": 4023 - }, - { - "epoch": 0.483857391931702, - "flos": 20751205753560.0, - "grad_norm": 2.2368929886714146, - "learning_rate": 2.201411651989117e-06, - "loss": 0.7036, - "num_input_tokens_seen": 85463945, - "step": 4024 - }, - { - "epoch": 0.48397763482234113, - "flos": 20104405583520.0, - "grad_norm": 3.235029661110414, - "learning_rate": 2.2006366251846167e-06, - "loss": 0.7603, - "num_input_tokens_seen": 85484305, - "step": 4025 - }, - { - "epoch": 0.48409787771298024, - "flos": 12243816322200.0, - "grad_norm": 2.304159537149704, - "learning_rate": 2.1998615679436997e-06, - "loss": 0.7372, - "num_input_tokens_seen": 85501565, - "step": 4026 - }, - { - "epoch": 0.4842181206036193, - "flos": 18376755733680.0, - "grad_norm": 3.347928867345551, - "learning_rate": 2.199086480383942e-06, - "loss": 0.7471, - "num_input_tokens_seen": 85520660, - "step": 4027 - }, - { - "epoch": 0.4843383634942584, - "flos": 22290605260800.0, - "grad_norm": 4.91586391986325, - "learning_rate": 2.1983113626229234e-06, - "loss": 0.6401, - "num_input_tokens_seen": 85539630, - "step": 4028 - }, - { - "epoch": 0.4844586063848975, - "flos": 14920727854920.0, - "grad_norm": 2.5931233947557697, - "learning_rate": 2.1975362147782293e-06, - "loss": 0.7709, - "num_input_tokens_seen": 85558545, - "step": 4029 - }, - { - "epoch": 0.48457884927553657, - "flos": 51849956812080.0, - "grad_norm": 0.8499618675990871, - "learning_rate": 2.196761036967448e-06, - "loss": 0.5623, - "num_input_tokens_seen": 85626230, - "step": 4030 - }, - { - "epoch": 0.4846990921661757, - "flos": 14565529831440.0, - "grad_norm": 2.501813612181702, - "learning_rate": 2.1959858293081743e-06, - "loss": 0.7668, - "num_input_tokens_seen": 85645085, - "step": 4031 - }, - { - "epoch": 0.4848193350568148, - "flos": 16890915896040.0, - "grad_norm": 5.442905589153775, - "learning_rate": 2.1952105919180056e-06, - "loss": 0.7523, - "num_input_tokens_seen": 85664060, - "step": 4032 - }, - { - "epoch": 0.48493957794745385, - "flos": 16431701209440.0, - "grad_norm": 3.8808328352137313, - "learning_rate": 2.1944353249145456e-06, - "loss": 0.6642, - "num_input_tokens_seen": 85682890, - "step": 4033 - }, - { - "epoch": 0.48505982083809296, - "flos": 18348962780160.0, - "grad_norm": 4.416492218794997, - "learning_rate": 2.193660028415401e-06, - "loss": 0.7314, - "num_input_tokens_seen": 85703390, - "step": 4034 - }, - { - "epoch": 0.485180063728732, - "flos": 19618442722080.0, - "grad_norm": 2.447096396377315, - "learning_rate": 2.1928847025381852e-06, - "loss": 0.8134, - "num_input_tokens_seen": 85723715, - "step": 4035 - }, - { - "epoch": 0.4853003066193711, - "flos": 17618277190200.0, - "grad_norm": 2.4555243160261027, - "learning_rate": 2.192109347400512e-06, - "loss": 0.826, - "num_input_tokens_seen": 85743650, - "step": 4036 - }, - { - "epoch": 0.48542054951001024, - "flos": 16891295815560.0, - "grad_norm": 2.6694821356279057, - "learning_rate": 2.191333963120004e-06, - "loss": 0.7772, - "num_input_tokens_seen": 85762350, - "step": 4037 - }, - { - "epoch": 0.4855407924006493, - "flos": 18806689448640.0, - "grad_norm": 3.376263134848542, - "learning_rate": 2.190558549814286e-06, - "loss": 0.6778, - "num_input_tokens_seen": 85782230, - "step": 4038 - }, - { - "epoch": 0.4856610352912884, - "flos": 17565825619200.0, - "grad_norm": 3.92935743519416, - "learning_rate": 2.1897831076009872e-06, - "loss": 0.7747, - "num_input_tokens_seen": 85801590, - "step": 4039 - }, - { - "epoch": 0.4857812781819275, - "flos": 17646323423400.0, - "grad_norm": 5.679349319905386, - "learning_rate": 2.1890076365977426e-06, - "loss": 0.7856, - "num_input_tokens_seen": 85821135, - "step": 4040 - }, - { - "epoch": 0.48590152107256657, - "flos": 41457811606200.0, - "grad_norm": 0.8842143516073863, - "learning_rate": 2.188232136922189e-06, - "loss": 0.5583, - "num_input_tokens_seen": 85878975, - "step": 4041 - }, - { - "epoch": 0.4860217639632057, - "flos": 14649193631640.0, - "grad_norm": 2.4195852961553337, - "learning_rate": 2.187456608691971e-06, - "loss": 0.7371, - "num_input_tokens_seen": 85897570, - "step": 4042 - }, - { - "epoch": 0.4861420068538448, - "flos": 13002769765080.0, - "grad_norm": 2.4237835654835913, - "learning_rate": 2.1866810520247334e-06, - "loss": 0.8604, - "num_input_tokens_seen": 85916160, - "step": 4043 - }, - { - "epoch": 0.48626224974448384, - "flos": 19240010779320.0, - "grad_norm": 5.339593174042591, - "learning_rate": 2.185905467038129e-06, - "loss": 0.6266, - "num_input_tokens_seen": 85934785, - "step": 4044 - }, - { - "epoch": 0.48638249263512295, - "flos": 16135001809320.0, - "grad_norm": 1.962724821005722, - "learning_rate": 2.1851298538498127e-06, - "loss": 0.7558, - "num_input_tokens_seen": 85954220, - "step": 4045 - }, - { - "epoch": 0.48650273552576206, - "flos": 18403725528240.0, - "grad_norm": 4.1229689250536445, - "learning_rate": 2.184354212577446e-06, - "loss": 0.7858, - "num_input_tokens_seen": 85974245, - "step": 4046 - }, - { - "epoch": 0.4866229784164011, - "flos": 12732438620280.0, - "grad_norm": 8.008165483836102, - "learning_rate": 2.1835785433386907e-06, - "loss": 0.5975, - "num_input_tokens_seen": 85992780, - "step": 4047 - }, - { - "epoch": 0.48674322130704023, - "flos": 17079736117920.0, - "grad_norm": 4.9762710174523, - "learning_rate": 2.182802846251216e-06, - "loss": 0.6301, - "num_input_tokens_seen": 86012770, - "step": 4048 - }, - { - "epoch": 0.4868634641976793, - "flos": 21130334215440.0, - "grad_norm": 2.5869367345764704, - "learning_rate": 2.182027121432696e-06, - "loss": 0.7011, - "num_input_tokens_seen": 86033115, - "step": 4049 - }, - { - "epoch": 0.4869837070883184, - "flos": 13893659464440.0, - "grad_norm": 3.6345120300018308, - "learning_rate": 2.1812513690008054e-06, - "loss": 0.8069, - "num_input_tokens_seen": 86051955, - "step": 4050 - }, - { - "epoch": 0.4871039499789575, - "flos": 11003933951520.0, - "grad_norm": 3.202056855535064, - "learning_rate": 2.180475589073227e-06, - "loss": 0.7783, - "num_input_tokens_seen": 86069375, - "step": 4051 - }, - { - "epoch": 0.48722419286959656, - "flos": 19183285113720.0, - "grad_norm": 2.7526325283315702, - "learning_rate": 2.1796997817676456e-06, - "loss": 0.7303, - "num_input_tokens_seen": 86090105, - "step": 4052 - }, - { - "epoch": 0.4873444357602357, - "flos": 17594346751800.0, - "grad_norm": 1.8077390558594428, - "learning_rate": 2.1789239472017494e-06, - "loss": 0.6637, - "num_input_tokens_seen": 86111475, - "step": 4053 - }, - { - "epoch": 0.4874646786508748, - "flos": 16701810734520.0, - "grad_norm": 4.3009834761720125, - "learning_rate": 2.1781480854932326e-06, - "loss": 0.71, - "num_input_tokens_seen": 86130960, - "step": 4054 - }, - { - "epoch": 0.48758492154151384, - "flos": 15567338065200.0, - "grad_norm": 6.564340387867101, - "learning_rate": 2.1773721967597933e-06, - "loss": 0.7846, - "num_input_tokens_seen": 86149130, - "step": 4055 - }, - { - "epoch": 0.48770516443215295, - "flos": 45880667954280.0, - "grad_norm": 0.9011970599176913, - "learning_rate": 2.1765962811191322e-06, - "loss": 0.6233, - "num_input_tokens_seen": 86203315, - "step": 4056 - }, - { - "epoch": 0.48782540732279206, - "flos": 48762134709000.0, - "grad_norm": 0.9647445936642162, - "learning_rate": 2.1758203386889566e-06, - "loss": 0.6756, - "num_input_tokens_seen": 86265805, - "step": 4057 - }, - { - "epoch": 0.4879456502134311, - "flos": 10623539091240.0, - "grad_norm": 4.217671800516266, - "learning_rate": 2.1750443695869746e-06, - "loss": 0.8243, - "num_input_tokens_seen": 86281095, - "step": 4058 - }, - { - "epoch": 0.4880658931040702, - "flos": 14244836673000.0, - "grad_norm": 2.7529729032558223, - "learning_rate": 2.174268373930901e-06, - "loss": 0.8404, - "num_input_tokens_seen": 86298330, - "step": 4059 - }, - { - "epoch": 0.48818613599470934, - "flos": 12188736974520.0, - "grad_norm": 2.660874234856955, - "learning_rate": 2.1734923518384537e-06, - "loss": 0.7878, - "num_input_tokens_seen": 86314655, - "step": 4060 - }, - { - "epoch": 0.4883063788853484, - "flos": 19614516887040.0, - "grad_norm": 3.0108527337392537, - "learning_rate": 2.1727163034273547e-06, - "loss": 0.8057, - "num_input_tokens_seen": 86332540, - "step": 4061 - }, - { - "epoch": 0.4884266217759875, - "flos": 12219126044760.0, - "grad_norm": 2.6945775011207043, - "learning_rate": 2.17194022881533e-06, - "loss": 0.7649, - "num_input_tokens_seen": 86348350, - "step": 4062 - }, - { - "epoch": 0.4885468646666266, - "flos": 17728087626000.0, - "grad_norm": 2.283895343567356, - "learning_rate": 2.1711641281201092e-06, - "loss": 0.6559, - "num_input_tokens_seen": 86368000, - "step": 4063 - }, - { - "epoch": 0.48866710755726567, - "flos": 10626071888040.0, - "grad_norm": 2.901076439330053, - "learning_rate": 2.1703880014594264e-06, - "loss": 0.7616, - "num_input_tokens_seen": 86385310, - "step": 4064 - }, - { - "epoch": 0.4887873504479048, - "flos": 21130429195320.0, - "grad_norm": 2.717047585849529, - "learning_rate": 2.1696118489510182e-06, - "loss": 0.7104, - "num_input_tokens_seen": 86405960, - "step": 4065 - }, - { - "epoch": 0.48890759333854383, - "flos": 16675189199520.0, - "grad_norm": 2.711177056801196, - "learning_rate": 2.1688356707126286e-06, - "loss": 0.698, - "num_input_tokens_seen": 86425300, - "step": 4066 - }, - { - "epoch": 0.48902783622918294, - "flos": 12976148230080.0, - "grad_norm": 3.197412970986271, - "learning_rate": 2.168059466862001e-06, - "loss": 0.6871, - "num_input_tokens_seen": 86443170, - "step": 4067 - }, - { - "epoch": 0.48914807911982205, - "flos": 16324360250520.0, - "grad_norm": 6.215888663827846, - "learning_rate": 2.167283237516887e-06, - "loss": 0.796, - "num_input_tokens_seen": 86461165, - "step": 4068 - }, - { - "epoch": 0.4892683220104611, - "flos": 11922964863960.0, - "grad_norm": 2.570709939856916, - "learning_rate": 2.1665069827950383e-06, - "loss": 0.7436, - "num_input_tokens_seen": 86478170, - "step": 4069 - }, - { - "epoch": 0.4893885649011002, - "flos": 11460742481160.0, - "grad_norm": 4.5813100105716345, - "learning_rate": 2.1657307028142126e-06, - "loss": 0.8569, - "num_input_tokens_seen": 86495430, - "step": 4070 - }, - { - "epoch": 0.48950880779173933, - "flos": 20965286132160.0, - "grad_norm": 3.630838493627757, - "learning_rate": 2.164954397692171e-06, - "loss": 0.654, - "num_input_tokens_seen": 86514575, - "step": 4071 - }, - { - "epoch": 0.4896290506823784, - "flos": 48798408040440.0, - "grad_norm": 1.1256128337399598, - "learning_rate": 2.164178067546678e-06, - "loss": 0.8204, - "num_input_tokens_seen": 86573460, - "step": 4072 - }, - { - "epoch": 0.4897492935730175, - "flos": 9086925660480.0, - "grad_norm": 2.3052372680815525, - "learning_rate": 2.163401712495504e-06, - "loss": 0.8977, - "num_input_tokens_seen": 86590875, - "step": 4073 - }, - { - "epoch": 0.4898695364636566, - "flos": 17187520316280.0, - "grad_norm": 1.8430790731115747, - "learning_rate": 2.1626253326564194e-06, - "loss": 0.7763, - "num_input_tokens_seen": 86609545, - "step": 4074 - }, - { - "epoch": 0.48998977935429566, - "flos": 19913242524600.0, - "grad_norm": 2.7185609247517175, - "learning_rate": 2.161848928147201e-06, - "loss": 0.754, - "num_input_tokens_seen": 86629535, - "step": 4075 - }, - { - "epoch": 0.4901100222449348, - "flos": 14865838467000.0, - "grad_norm": 2.2901336975715383, - "learning_rate": 2.161072499085629e-06, - "loss": 0.7751, - "num_input_tokens_seen": 86648250, - "step": 4076 - }, - { - "epoch": 0.4902302651355739, - "flos": 22345557968640.0, - "grad_norm": 1.9417117087688618, - "learning_rate": 2.160296045589487e-06, - "loss": 0.8132, - "num_input_tokens_seen": 86671430, - "step": 4077 - }, - { - "epoch": 0.49035050802621294, - "flos": 14003659860000.0, - "grad_norm": 2.876399155868642, - "learning_rate": 2.159519567776562e-06, - "loss": 0.6722, - "num_input_tokens_seen": 86690800, - "step": 4078 - }, - { - "epoch": 0.49047075091685205, - "flos": 16262568991320.0, - "grad_norm": 3.692341487222459, - "learning_rate": 2.1587430657646463e-06, - "loss": 0.6904, - "num_input_tokens_seen": 86703955, - "step": 4079 - }, - { - "epoch": 0.4905909938074911, - "flos": 14730261315120.0, - "grad_norm": 2.790317626975757, - "learning_rate": 2.157966539671533e-06, - "loss": 0.7557, - "num_input_tokens_seen": 86723315, - "step": 4080 - }, - { - "epoch": 0.4907112366981302, - "flos": 12544029977880.0, - "grad_norm": 3.280295969007539, - "learning_rate": 2.157189989615021e-06, - "loss": 0.6402, - "num_input_tokens_seen": 86741625, - "step": 4081 - }, - { - "epoch": 0.4908314795887693, - "flos": 15864385724880.0, - "grad_norm": 2.8118926346143858, - "learning_rate": 2.156413415712913e-06, - "loss": 0.7323, - "num_input_tokens_seen": 86763395, - "step": 4082 - }, - { - "epoch": 0.4909517224794084, - "flos": 19213040984760.0, - "grad_norm": 4.586092424781438, - "learning_rate": 2.155636818083014e-06, - "loss": 0.7637, - "num_input_tokens_seen": 86784485, - "step": 4083 - }, - { - "epoch": 0.4910719653700475, - "flos": 16944443905680.0, - "grad_norm": 2.8262030883836977, - "learning_rate": 2.154860196843134e-06, - "loss": 0.8234, - "num_input_tokens_seen": 86803400, - "step": 4084 - }, - { - "epoch": 0.4911922082606866, - "flos": 17080369317120.0, - "grad_norm": 2.342100045859889, - "learning_rate": 2.154083552111085e-06, - "loss": 0.752, - "num_input_tokens_seen": 86822290, - "step": 4085 - }, - { - "epoch": 0.49131245115132566, - "flos": 21425197337880.0, - "grad_norm": 8.369379235586264, - "learning_rate": 2.1533068840046834e-06, - "loss": 0.8006, - "num_input_tokens_seen": 86842275, - "step": 4086 - }, - { - "epoch": 0.49143269404196477, - "flos": 14722283005200.0, - "grad_norm": 8.433371204798807, - "learning_rate": 2.152530192641749e-06, - "loss": 0.5994, - "num_input_tokens_seen": 86856905, - "step": 4087 - }, - { - "epoch": 0.4915529369326039, - "flos": 17863316518320.0, - "grad_norm": 2.0758778748799305, - "learning_rate": 2.1517534781401068e-06, - "loss": 0.7067, - "num_input_tokens_seen": 86874505, - "step": 4088 - }, - { - "epoch": 0.49167317982324293, - "flos": 7601782341960.0, - "grad_norm": 3.066375771146297, - "learning_rate": 2.150976740617581e-06, - "loss": 0.6891, - "num_input_tokens_seen": 86890785, - "step": 4089 - }, - { - "epoch": 0.49179342271388204, - "flos": 18754016257920.0, - "grad_norm": 2.8924157446720646, - "learning_rate": 2.150199980192006e-06, - "loss": 0.711, - "num_input_tokens_seen": 86909625, - "step": 4090 - }, - { - "epoch": 0.49191366560452116, - "flos": 15429576376080.0, - "grad_norm": 2.2367472826343326, - "learning_rate": 2.1494231969812114e-06, - "loss": 0.7992, - "num_input_tokens_seen": 86928335, - "step": 4091 - }, - { - "epoch": 0.4920339084951602, - "flos": 19104845206920.0, - "grad_norm": 2.9770440140181615, - "learning_rate": 2.1486463911030372e-06, - "loss": 0.7799, - "num_input_tokens_seen": 86948705, - "step": 4092 - }, - { - "epoch": 0.4921541513857993, - "flos": 18374792816160.0, - "grad_norm": 2.984907332472361, - "learning_rate": 2.147869562675324e-06, - "loss": 0.7337, - "num_input_tokens_seen": 86967395, - "step": 4093 - }, - { - "epoch": 0.49227439427643843, - "flos": 17863316518320.0, - "grad_norm": 2.5555494994861143, - "learning_rate": 2.147092711815915e-06, - "loss": 0.7046, - "num_input_tokens_seen": 86986465, - "step": 4094 - }, - { - "epoch": 0.4923946371670775, - "flos": 8223069075600.0, - "grad_norm": 4.6400791450853776, - "learning_rate": 2.1463158386426593e-06, - "loss": 0.8508, - "num_input_tokens_seen": 87003995, - "step": 4095 - }, - { - "epoch": 0.4925148800577166, - "flos": 22345209709080.0, - "grad_norm": 9.5721976778475, - "learning_rate": 2.145538943273407e-06, - "loss": 0.7586, - "num_input_tokens_seen": 87023990, - "step": 4096 - }, - { - "epoch": 0.49263512294835565, - "flos": 15243922150200.0, - "grad_norm": 2.8328351162461614, - "learning_rate": 2.144762025826013e-06, - "loss": 0.705, - "num_input_tokens_seen": 87042800, - "step": 4097 - }, - { - "epoch": 0.49275536583899476, - "flos": 17402455513800.0, - "grad_norm": 2.464528204160789, - "learning_rate": 2.143985086418334e-06, - "loss": 0.8498, - "num_input_tokens_seen": 87057700, - "step": 4098 - }, - { - "epoch": 0.4928756087296339, - "flos": 16297105516320.0, - "grad_norm": 2.0262912965833957, - "learning_rate": 2.1432081251682324e-06, - "loss": 0.7601, - "num_input_tokens_seen": 87077790, - "step": 4099 - }, - { - "epoch": 0.49299585162027293, - "flos": 14352019332120.0, - "grad_norm": 2.523006059830593, - "learning_rate": 2.142431142193572e-06, - "loss": 0.8513, - "num_input_tokens_seen": 87095290, - "step": 4100 - }, - { - "epoch": 0.49311609451091204, - "flos": 28555164329160.0, - "grad_norm": 2.91946670914119, - "learning_rate": 2.1416541376122207e-06, - "loss": 0.7108, - "num_input_tokens_seen": 87115190, - "step": 4101 - }, - { - "epoch": 0.49323633740155115, - "flos": 20778492147720.0, - "grad_norm": 2.4251934328307727, - "learning_rate": 2.1408771115420496e-06, - "loss": 0.7192, - "num_input_tokens_seen": 87134770, - "step": 4102 - }, - { - "epoch": 0.4933565802921902, - "flos": 15453981713880.0, - "grad_norm": 2.220169580357693, - "learning_rate": 2.140100064100932e-06, - "loss": 0.6435, - "num_input_tokens_seen": 87150465, - "step": 4103 - }, - { - "epoch": 0.4934768231828293, - "flos": 13162973874480.0, - "grad_norm": 3.141491822521854, - "learning_rate": 2.139322995406746e-06, - "loss": 0.75, - "num_input_tokens_seen": 87167820, - "step": 4104 - }, - { - "epoch": 0.4935970660734684, - "flos": 17181473263920.0, - "grad_norm": 2.6790616831126917, - "learning_rate": 2.1385459055773727e-06, - "loss": 0.7908, - "num_input_tokens_seen": 87185730, - "step": 4105 - }, - { - "epoch": 0.4937173089641075, - "flos": 47531013164520.0, - "grad_norm": 2.3688800665341425, - "learning_rate": 2.137768794730696e-06, - "loss": 0.7197, - "num_input_tokens_seen": 87208900, - "step": 4106 - }, - { - "epoch": 0.4938375518547466, - "flos": 16350696845880.0, - "grad_norm": 3.2003259850400334, - "learning_rate": 2.1369916629846026e-06, - "loss": 0.7886, - "num_input_tokens_seen": 87228370, - "step": 4107 - }, - { - "epoch": 0.4939577947453857, - "flos": 13029834539520.0, - "grad_norm": 2.149994331174962, - "learning_rate": 2.136214510456983e-06, - "loss": 0.7349, - "num_input_tokens_seen": 87246545, - "step": 4108 - }, - { - "epoch": 0.49407803763602476, - "flos": 51672023125080.0, - "grad_norm": 0.947117650380491, - "learning_rate": 2.1354373372657296e-06, - "loss": 0.6977, - "num_input_tokens_seen": 87304705, - "step": 4109 - }, - { - "epoch": 0.49419828052666387, - "flos": 17809883488560.0, - "grad_norm": 3.178305285656267, - "learning_rate": 2.1346601435287404e-06, - "loss": 0.6944, - "num_input_tokens_seen": 87326695, - "step": 4110 - }, - { - "epoch": 0.494318523417303, - "flos": 21560552870040.0, - "grad_norm": 5.6334053817445415, - "learning_rate": 2.1338829293639144e-06, - "loss": 0.7862, - "num_input_tokens_seen": 87346775, - "step": 4111 - }, - { - "epoch": 0.49443876630794203, - "flos": 11112762928560.0, - "grad_norm": 3.1815796267268897, - "learning_rate": 2.1331056948891547e-06, - "loss": 0.8191, - "num_input_tokens_seen": 87363595, - "step": 4112 - }, - { - "epoch": 0.49455900919858115, - "flos": 8950873609200.0, - "grad_norm": 3.5176955165898773, - "learning_rate": 2.1323284402223666e-06, - "loss": 0.7492, - "num_input_tokens_seen": 87379305, - "step": 4113 - }, - { - "epoch": 0.4946792520892202, - "flos": 16670978424840.0, - "grad_norm": 2.0769679928674867, - "learning_rate": 2.1315511654814597e-06, - "loss": 0.8679, - "num_input_tokens_seen": 87397435, - "step": 4114 - }, - { - "epoch": 0.4947994949798593, - "flos": 16944222285960.0, - "grad_norm": 2.783136135432841, - "learning_rate": 2.1307738707843456e-06, - "loss": 0.763, - "num_input_tokens_seen": 87416820, - "step": 4115 - }, - { - "epoch": 0.4949197378704984, - "flos": 17323192448040.0, - "grad_norm": 7.490483413395788, - "learning_rate": 2.1299965562489385e-06, - "loss": 0.6671, - "num_input_tokens_seen": 87436345, - "step": 4116 - }, - { - "epoch": 0.4950399807611375, - "flos": 19730121095520.0, - "grad_norm": 1.7150019613983238, - "learning_rate": 2.129219221993158e-06, - "loss": 0.779, - "num_input_tokens_seen": 87460850, - "step": 4117 - }, - { - "epoch": 0.4951602236517766, - "flos": 49633616852880.0, - "grad_norm": 0.8220788014037755, - "learning_rate": 2.128441868134924e-06, - "loss": 0.6367, - "num_input_tokens_seen": 87522505, - "step": 4118 - }, - { - "epoch": 0.4952804665424157, - "flos": 14541536073120.0, - "grad_norm": 5.308410147326253, - "learning_rate": 2.1276644947921606e-06, - "loss": 0.8182, - "num_input_tokens_seen": 87541140, - "step": 4119 - }, - { - "epoch": 0.49540070943305475, - "flos": 13731429117600.0, - "grad_norm": 2.5947821528258936, - "learning_rate": 2.126887102082795e-06, - "loss": 0.8108, - "num_input_tokens_seen": 87560885, - "step": 4120 - }, - { - "epoch": 0.49552095232369386, - "flos": 18265773879360.0, - "grad_norm": 2.1454930328846316, - "learning_rate": 2.126109690124757e-06, - "loss": 0.6832, - "num_input_tokens_seen": 87581420, - "step": 4121 - }, - { - "epoch": 0.495641195214333, - "flos": 16729097128680.0, - "grad_norm": 3.3706727720921585, - "learning_rate": 2.1253322590359786e-06, - "loss": 0.696, - "num_input_tokens_seen": 87600475, - "step": 4122 - }, - { - "epoch": 0.49576143810497203, - "flos": 18883641337320.0, - "grad_norm": 2.904022511681349, - "learning_rate": 2.124554808934397e-06, - "loss": 0.7122, - "num_input_tokens_seen": 87620775, - "step": 4123 - }, - { - "epoch": 0.49588168099561114, - "flos": 16190207796840.0, - "grad_norm": 3.949555968424461, - "learning_rate": 2.1237773399379496e-06, - "loss": 0.7178, - "num_input_tokens_seen": 87641460, - "step": 4124 - }, - { - "epoch": 0.49600192388625025, - "flos": 17861131981080.0, - "grad_norm": 2.2133063576483027, - "learning_rate": 2.122999852164578e-06, - "loss": 0.8526, - "num_input_tokens_seen": 87661800, - "step": 4125 - }, - { - "epoch": 0.4961221667768893, - "flos": 16729255428480.0, - "grad_norm": 4.052979331829043, - "learning_rate": 2.122222345732227e-06, - "loss": 0.5634, - "num_input_tokens_seen": 87681435, - "step": 4126 - }, - { - "epoch": 0.4962424096675284, - "flos": 13029296320200.0, - "grad_norm": 2.4829611923989034, - "learning_rate": 2.121444820758843e-06, - "loss": 0.818, - "num_input_tokens_seen": 87699795, - "step": 4127 - }, - { - "epoch": 0.49636265255816747, - "flos": 15941147653800.0, - "grad_norm": 2.3564795101512543, - "learning_rate": 2.120667277362376e-06, - "loss": 0.7585, - "num_input_tokens_seen": 87718230, - "step": 4128 - }, - { - "epoch": 0.4964828954488066, - "flos": 11919070688880.0, - "grad_norm": 2.4032886662699573, - "learning_rate": 2.1198897156607796e-06, - "loss": 0.8341, - "num_input_tokens_seen": 87735305, - "step": 4129 - }, - { - "epoch": 0.4966031383394457, - "flos": 18100092596880.0, - "grad_norm": 2.736696663652224, - "learning_rate": 2.1191121357720085e-06, - "loss": 0.7261, - "num_input_tokens_seen": 87753085, - "step": 4130 - }, - { - "epoch": 0.49672338123008475, - "flos": 16783448297280.0, - "grad_norm": 2.459037992273122, - "learning_rate": 2.1183345378140206e-06, - "loss": 0.7295, - "num_input_tokens_seen": 87772550, - "step": 4131 - }, - { - "epoch": 0.49684362412072386, - "flos": 48644630902920.0, - "grad_norm": 1.0050202953130294, - "learning_rate": 2.1175569219047783e-06, - "loss": 0.6546, - "num_input_tokens_seen": 87833710, - "step": 4132 - }, - { - "epoch": 0.49696386701136297, - "flos": 14592784565640.0, - "grad_norm": 3.252279802473062, - "learning_rate": 2.1167792881622437e-06, - "loss": 0.7095, - "num_input_tokens_seen": 87852450, - "step": 4133 - }, - { - "epoch": 0.497084109902002, - "flos": 18129943447800.0, - "grad_norm": 2.016568931334056, - "learning_rate": 2.116001636704384e-06, - "loss": 0.796, - "num_input_tokens_seen": 87872555, - "step": 4134 - }, - { - "epoch": 0.49720435279264114, - "flos": 15999772917000.0, - "grad_norm": 2.140619765226549, - "learning_rate": 2.1152239676491685e-06, - "loss": 0.7787, - "num_input_tokens_seen": 87890380, - "step": 4135 - }, - { - "epoch": 0.49732459568328025, - "flos": 17431071626280.0, - "grad_norm": 4.301614706579647, - "learning_rate": 2.114446281114569e-06, - "loss": 0.7285, - "num_input_tokens_seen": 87909120, - "step": 4136 - }, - { - "epoch": 0.4974448385739193, - "flos": 14649858490800.0, - "grad_norm": 2.3832921280222186, - "learning_rate": 2.1136685772185587e-06, - "loss": 0.738, - "num_input_tokens_seen": 87927865, - "step": 4137 - }, - { - "epoch": 0.4975650814645584, - "flos": 18181793479560.0, - "grad_norm": 5.8407820238096715, - "learning_rate": 2.1128908560791163e-06, - "loss": 0.771, - "num_input_tokens_seen": 87947415, - "step": 4138 - }, - { - "epoch": 0.4976853243551975, - "flos": 14488419642960.0, - "grad_norm": 3.3927325892711093, - "learning_rate": 2.1121131178142203e-06, - "loss": 0.7617, - "num_input_tokens_seen": 87966500, - "step": 4139 - }, - { - "epoch": 0.4978055672458366, - "flos": 16939694911680.0, - "grad_norm": 1.8661419482931085, - "learning_rate": 2.1113353625418544e-06, - "loss": 0.8086, - "num_input_tokens_seen": 87984770, - "step": 4140 - }, - { - "epoch": 0.4979258101364757, - "flos": 11328552945000.0, - "grad_norm": 2.0301217099990994, - "learning_rate": 2.1105575903800017e-06, - "loss": 0.7766, - "num_input_tokens_seen": 88003210, - "step": 4141 - }, - { - "epoch": 0.4980460530271148, - "flos": 19318007446680.0, - "grad_norm": 3.6875164225584385, - "learning_rate": 2.1097798014466502e-06, - "loss": 0.8344, - "num_input_tokens_seen": 88022530, - "step": 4142 - }, - { - "epoch": 0.49816629591775385, - "flos": 12597463007640.0, - "grad_norm": 3.4905390251588977, - "learning_rate": 2.109001995859791e-06, - "loss": 0.5542, - "num_input_tokens_seen": 88041150, - "step": 4143 - }, - { - "epoch": 0.49828653880839296, - "flos": 47872416428160.0, - "grad_norm": 0.8370066784030548, - "learning_rate": 2.108224173737415e-06, - "loss": 0.6253, - "num_input_tokens_seen": 88104170, - "step": 4144 - }, - { - "epoch": 0.498406781699032, - "flos": 20152931319480.0, - "grad_norm": 2.0229835737526027, - "learning_rate": 2.1074463351975183e-06, - "loss": 0.7393, - "num_input_tokens_seen": 88122775, - "step": 4145 - }, - { - "epoch": 0.49852702458967113, - "flos": 23124579334800.0, - "grad_norm": 2.0561326894950875, - "learning_rate": 2.106668480358098e-06, - "loss": 0.6949, - "num_input_tokens_seen": 88142720, - "step": 4146 - }, - { - "epoch": 0.49864726748031024, - "flos": 16725234613560.0, - "grad_norm": 1.9776948843894895, - "learning_rate": 2.105890609337154e-06, - "loss": 0.6929, - "num_input_tokens_seen": 88160955, - "step": 4147 - }, - { - "epoch": 0.4987675103709493, - "flos": 51924375903960.0, - "grad_norm": 0.6897339073507778, - "learning_rate": 2.1051127222526883e-06, - "loss": 0.6392, - "num_input_tokens_seen": 88232790, - "step": 4148 - }, - { - "epoch": 0.4988877532615884, - "flos": 21103079481240.0, - "grad_norm": 2.1560368086976123, - "learning_rate": 2.1043348192227067e-06, - "loss": 0.7954, - "num_input_tokens_seen": 88252880, - "step": 4149 - }, - { - "epoch": 0.4990079961522275, - "flos": 12300003768480.0, - "grad_norm": 2.207343209423821, - "learning_rate": 2.1035569003652156e-06, - "loss": 0.6131, - "num_input_tokens_seen": 88271230, - "step": 4150 - }, - { - "epoch": 0.4991282390428666, - "flos": 9648479032320.0, - "grad_norm": 3.3209974592199716, - "learning_rate": 2.1027789657982255e-06, - "loss": 0.8001, - "num_input_tokens_seen": 88285165, - "step": 4151 - }, - { - "epoch": 0.4992484819335057, - "flos": 15751504272960.0, - "grad_norm": 2.7604251405597875, - "learning_rate": 2.1020010156397482e-06, - "loss": 0.7616, - "num_input_tokens_seen": 88302105, - "step": 4152 - }, - { - "epoch": 0.4993687248241448, - "flos": 18210789511560.0, - "grad_norm": 2.5792221971325766, - "learning_rate": 2.101223050007797e-06, - "loss": 0.7646, - "num_input_tokens_seen": 88320375, - "step": 4153 - }, - { - "epoch": 0.49948896771478385, - "flos": 46397463125400.0, - "grad_norm": 0.8577831784432738, - "learning_rate": 2.1004450690203904e-06, - "loss": 0.5744, - "num_input_tokens_seen": 88376175, - "step": 4154 - }, - { - "epoch": 0.49960921060542296, - "flos": 50353574207760.0, - "grad_norm": 1.0187569085571446, - "learning_rate": 2.099667072795546e-06, - "loss": 0.7064, - "num_input_tokens_seen": 88438015, - "step": 4155 - }, - { - "epoch": 0.49972945349606207, - "flos": 17322622568760.0, - "grad_norm": 2.3969525141908097, - "learning_rate": 2.0988890614512864e-06, - "loss": 0.776, - "num_input_tokens_seen": 88457625, - "step": 4156 - }, - { - "epoch": 0.4998496963867011, - "flos": 14433815194680.0, - "grad_norm": 3.1091414320743134, - "learning_rate": 2.098111035105635e-06, - "loss": 0.8238, - "num_input_tokens_seen": 88475770, - "step": 4157 - }, - { - "epoch": 0.49996993927734024, - "flos": 16290235305000.0, - "grad_norm": 2.0746281394045254, - "learning_rate": 2.0973329938766176e-06, - "loss": 0.728, - "num_input_tokens_seen": 88492920, - "step": 4158 - }, - { - "epoch": 0.5000901821679793, - "flos": 17076285182280.0, - "grad_norm": 2.233594851578624, - "learning_rate": 2.0965549378822618e-06, - "loss": 0.7807, - "num_input_tokens_seen": 88513930, - "step": 4159 - }, - { - "epoch": 0.5002104250586185, - "flos": 14865331907640.0, - "grad_norm": 4.409139132392393, - "learning_rate": 2.095776867240599e-06, - "loss": 0.8229, - "num_input_tokens_seen": 88530640, - "step": 4160 - }, - { - "epoch": 0.5003306679492575, - "flos": 10167775340280.0, - "grad_norm": 3.6230373989094407, - "learning_rate": 2.094998782069661e-06, - "loss": 0.7962, - "num_input_tokens_seen": 88548065, - "step": 4161 - }, - { - "epoch": 0.5004509108398966, - "flos": 20292814225920.0, - "grad_norm": 4.418006525797274, - "learning_rate": 2.0942206824874845e-06, - "loss": 0.7377, - "num_input_tokens_seen": 88570560, - "step": 4162 - }, - { - "epoch": 0.5005711537305357, - "flos": 10896561332640.0, - "grad_norm": 6.23195270337238, - "learning_rate": 2.093442568612105e-06, - "loss": 0.7735, - "num_input_tokens_seen": 88588085, - "step": 4163 - }, - { - "epoch": 0.5006913966211748, - "flos": 19427026383480.0, - "grad_norm": 3.1269645619227915, - "learning_rate": 2.0926644405615613e-06, - "loss": 0.8266, - "num_input_tokens_seen": 88608705, - "step": 4164 - }, - { - "epoch": 0.5008116395118138, - "flos": 14947064450280.0, - "grad_norm": 3.9164573667619647, - "learning_rate": 2.091886298453897e-06, - "loss": 0.8051, - "num_input_tokens_seen": 88626610, - "step": 4165 - }, - { - "epoch": 0.500931882402453, - "flos": 15783697960920.0, - "grad_norm": 3.6625273199006387, - "learning_rate": 2.091108142407153e-06, - "loss": 0.703, - "num_input_tokens_seen": 88645070, - "step": 4166 - }, - { - "epoch": 0.5010521252930921, - "flos": 42584690376480.0, - "grad_norm": 0.9059358907463568, - "learning_rate": 2.090329972539377e-06, - "loss": 0.6812, - "num_input_tokens_seen": 88703355, - "step": 4167 - }, - { - "epoch": 0.5011723681837311, - "flos": 13597403303760.0, - "grad_norm": 5.326765049076364, - "learning_rate": 2.089551788968616e-06, - "loss": 0.6665, - "num_input_tokens_seen": 88721040, - "step": 4168 - }, - { - "epoch": 0.5012926110743702, - "flos": 41176530606600.0, - "grad_norm": 0.878390046164132, - "learning_rate": 2.08877359181292e-06, - "loss": 0.634, - "num_input_tokens_seen": 88777325, - "step": 4169 - }, - { - "epoch": 0.5014128539650093, - "flos": 17750371746480.0, - "grad_norm": 26.40379981236129, - "learning_rate": 2.0879953811903396e-06, - "loss": 0.8258, - "num_input_tokens_seen": 88791930, - "step": 4170 - }, - { - "epoch": 0.5015330968556484, - "flos": 20185378287120.0, - "grad_norm": 3.6398232031424653, - "learning_rate": 2.08721715721893e-06, - "loss": 0.7657, - "num_input_tokens_seen": 88810975, - "step": 4171 - }, - { - "epoch": 0.5016533397462875, - "flos": 17426670891840.0, - "grad_norm": 2.8951015142322665, - "learning_rate": 2.0864389200167477e-06, - "loss": 0.7628, - "num_input_tokens_seen": 88828950, - "step": 4172 - }, - { - "epoch": 0.5017735826369266, - "flos": 18532242509040.0, - "grad_norm": 11.874251492651744, - "learning_rate": 2.0856606697018504e-06, - "loss": 0.7811, - "num_input_tokens_seen": 88846680, - "step": 4173 - }, - { - "epoch": 0.5018938255275657, - "flos": 12300447007920.0, - "grad_norm": 6.397979310040709, - "learning_rate": 2.084882406392297e-06, - "loss": 0.7096, - "num_input_tokens_seen": 88864360, - "step": 4174 - }, - { - "epoch": 0.5020140684182047, - "flos": 18695580954480.0, - "grad_norm": 8.840728782888753, - "learning_rate": 2.0841041302061496e-06, - "loss": 0.6817, - "num_input_tokens_seen": 88883540, - "step": 4175 - }, - { - "epoch": 0.5021343113088439, - "flos": 17322559248840.0, - "grad_norm": 8.280917409300038, - "learning_rate": 2.083325841261473e-06, - "loss": 0.7361, - "num_input_tokens_seen": 88902320, - "step": 4176 - }, - { - "epoch": 0.502254554199483, - "flos": 17970372537600.0, - "grad_norm": 2.516947049401995, - "learning_rate": 2.0825475396763322e-06, - "loss": 0.6451, - "num_input_tokens_seen": 88922690, - "step": 4177 - }, - { - "epoch": 0.502374797090122, - "flos": 25153234339320.0, - "grad_norm": 2.3431263411775762, - "learning_rate": 2.081769225568796e-06, - "loss": 0.6366, - "num_input_tokens_seen": 88944860, - "step": 4178 - }, - { - "epoch": 0.5024950399807612, - "flos": 19077907072320.0, - "grad_norm": 3.210676270785283, - "learning_rate": 2.0809908990569327e-06, - "loss": 0.7394, - "num_input_tokens_seen": 88966360, - "step": 4179 - }, - { - "epoch": 0.5026152828714002, - "flos": 15541508029200.0, - "grad_norm": 3.548509263287639, - "learning_rate": 2.0802125602588146e-06, - "loss": 0.7792, - "num_input_tokens_seen": 88985345, - "step": 4180 - }, - { - "epoch": 0.5027355257620393, - "flos": 22722660193080.0, - "grad_norm": 4.166646703036484, - "learning_rate": 2.0794342092925146e-06, - "loss": 0.6487, - "num_input_tokens_seen": 89006200, - "step": 4181 - }, - { - "epoch": 0.5028557686526784, - "flos": 18160332518040.0, - "grad_norm": 4.367160858455187, - "learning_rate": 2.078655846276108e-06, - "loss": 0.6706, - "num_input_tokens_seen": 89026250, - "step": 4182 - }, - { - "epoch": 0.5029760115433175, - "flos": 16810354771920.0, - "grad_norm": 4.271773940302307, - "learning_rate": 2.0778774713276727e-06, - "loss": 0.6707, - "num_input_tokens_seen": 89045445, - "step": 4183 - }, - { - "epoch": 0.5030962544339566, - "flos": 11140112642640.0, - "grad_norm": 3.969949976416149, - "learning_rate": 2.077099084565287e-06, - "loss": 0.6496, - "num_input_tokens_seen": 89062570, - "step": 4184 - }, - { - "epoch": 0.5032164973245957, - "flos": 17940426706800.0, - "grad_norm": 9.126727633034617, - "learning_rate": 2.0763206861070313e-06, - "loss": 0.6229, - "num_input_tokens_seen": 89081350, - "step": 4185 - }, - { - "epoch": 0.5033367402152348, - "flos": 11811761389920.0, - "grad_norm": 6.448778069248917, - "learning_rate": 2.0755422760709876e-06, - "loss": 0.7402, - "num_input_tokens_seen": 89098470, - "step": 4186 - }, - { - "epoch": 0.5034569831058738, - "flos": 15643340155080.0, - "grad_norm": 5.370549688353895, - "learning_rate": 2.0747638545752417e-06, - "loss": 0.7529, - "num_input_tokens_seen": 89116750, - "step": 4187 - }, - { - "epoch": 0.503577225996513, - "flos": 15028068813840.0, - "grad_norm": 10.109977598080592, - "learning_rate": 2.073985421737878e-06, - "loss": 0.8147, - "num_input_tokens_seen": 89133780, - "step": 4188 - }, - { - "epoch": 0.5036974688871521, - "flos": 19963699518120.0, - "grad_norm": 7.03671283012269, - "learning_rate": 2.0732069776769844e-06, - "loss": 0.7361, - "num_input_tokens_seen": 89150910, - "step": 4189 - }, - { - "epoch": 0.5038177117777911, - "flos": 14919493116480.0, - "grad_norm": 5.756508833608788, - "learning_rate": 2.072428522510651e-06, - "loss": 0.7143, - "num_input_tokens_seen": 89167195, - "step": 4190 - }, - { - "epoch": 0.5039379546684303, - "flos": 16026489431880.0, - "grad_norm": 8.598690873686001, - "learning_rate": 2.071650056356968e-06, - "loss": 0.7347, - "num_input_tokens_seen": 89184455, - "step": 4191 - }, - { - "epoch": 0.5040581975590693, - "flos": 14622382136880.0, - "grad_norm": 4.8372442054640885, - "learning_rate": 2.070871579334028e-06, - "loss": 0.7706, - "num_input_tokens_seen": 89203285, - "step": 4192 - }, - { - "epoch": 0.5041784404497084, - "flos": 15324863193840.0, - "grad_norm": 6.155975490588202, - "learning_rate": 2.0700930915599264e-06, - "loss": 0.7038, - "num_input_tokens_seen": 89222735, - "step": 4193 - }, - { - "epoch": 0.5042986833403476, - "flos": 9060367445400.0, - "grad_norm": 5.244816900935706, - "learning_rate": 2.0693145931527583e-06, - "loss": 0.77, - "num_input_tokens_seen": 89238935, - "step": 4194 - }, - { - "epoch": 0.5044189262309866, - "flos": 21424374178920.0, - "grad_norm": 9.314892762284039, - "learning_rate": 2.068536084230622e-06, - "loss": 0.7702, - "num_input_tokens_seen": 89260520, - "step": 4195 - }, - { - "epoch": 0.5045391691216257, - "flos": 17480167241520.0, - "grad_norm": 3.669019522181695, - "learning_rate": 2.067757564911616e-06, - "loss": 0.8636, - "num_input_tokens_seen": 89278815, - "step": 4196 - }, - { - "epoch": 0.5046594120122648, - "flos": 18052136740200.0, - "grad_norm": 3.521664056832652, - "learning_rate": 2.0669790353138407e-06, - "loss": 0.8982, - "num_input_tokens_seen": 89297500, - "step": 4197 - }, - { - "epoch": 0.5047796549029039, - "flos": 17102526797760.0, - "grad_norm": 4.53202865750443, - "learning_rate": 2.0662004955553995e-06, - "loss": 0.7167, - "num_input_tokens_seen": 89316920, - "step": 4198 - }, - { - "epoch": 0.5048998977935429, - "flos": 12618354089880.0, - "grad_norm": 4.483120945623254, - "learning_rate": 2.065421945754395e-06, - "loss": 0.7473, - "num_input_tokens_seen": 89334370, - "step": 4199 - }, - { - "epoch": 0.505020140684182, - "flos": 25609124730120.0, - "grad_norm": 5.681775576345054, - "learning_rate": 2.0646433860289344e-06, - "loss": 0.7737, - "num_input_tokens_seen": 89353015, - "step": 4200 - }, - { - "epoch": 0.5051403835748212, - "flos": 18214208787240.0, - "grad_norm": 4.458090493311864, - "learning_rate": 2.0638648164971233e-06, - "loss": 0.8045, - "num_input_tokens_seen": 89371200, - "step": 4201 - }, - { - "epoch": 0.5052606264654602, - "flos": 15324736554000.0, - "grad_norm": 3.3135021780987466, - "learning_rate": 2.06308623727707e-06, - "loss": 0.872, - "num_input_tokens_seen": 89391020, - "step": 4202 - }, - { - "epoch": 0.5053808693560993, - "flos": 14349296575560.0, - "grad_norm": 6.110339934886851, - "learning_rate": 2.0623076484868846e-06, - "loss": 0.7361, - "num_input_tokens_seen": 89408195, - "step": 4203 - }, - { - "epoch": 0.5055011122467384, - "flos": 49774987777440.0, - "grad_norm": 0.9145420479344761, - "learning_rate": 2.061529050244679e-06, - "loss": 0.6789, - "num_input_tokens_seen": 89467660, - "step": 4204 - }, - { - "epoch": 0.5056213551373775, - "flos": 12300225388200.0, - "grad_norm": 19.822685054589286, - "learning_rate": 2.060750442668565e-06, - "loss": 0.7271, - "num_input_tokens_seen": 89485135, - "step": 4205 - }, - { - "epoch": 0.5057415980280165, - "flos": 11160528825480.0, - "grad_norm": 15.644632685376527, - "learning_rate": 2.059971825876657e-06, - "loss": 0.6235, - "num_input_tokens_seen": 89499365, - "step": 4206 - }, - { - "epoch": 0.5058618409186557, - "flos": 13893659464440.0, - "grad_norm": 3.187630445017726, - "learning_rate": 2.0591931999870713e-06, - "loss": 0.7552, - "num_input_tokens_seen": 89518010, - "step": 4207 - }, - { - "epoch": 0.5059820838092948, - "flos": 46776116687880.0, - "grad_norm": 0.9405331500253137, - "learning_rate": 2.0584145651179234e-06, - "loss": 0.6281, - "num_input_tokens_seen": 89573440, - "step": 4208 - }, - { - "epoch": 0.5061023266999338, - "flos": 11239918531080.0, - "grad_norm": 3.7450191479996837, - "learning_rate": 2.0576359213873327e-06, - "loss": 0.7697, - "num_input_tokens_seen": 89588310, - "step": 4209 - }, - { - "epoch": 0.506222569590573, - "flos": 16427807034360.0, - "grad_norm": 3.982042295467931, - "learning_rate": 2.056857268913419e-06, - "loss": 0.6659, - "num_input_tokens_seen": 89608080, - "step": 4210 - }, - { - "epoch": 0.506342812481212, - "flos": 12806636092440.0, - "grad_norm": 4.726591756430632, - "learning_rate": 2.056078607814303e-06, - "loss": 0.826, - "num_input_tokens_seen": 89623585, - "step": 4211 - }, - { - "epoch": 0.5064630553718511, - "flos": 17131934409240.0, - "grad_norm": 3.7530934608017286, - "learning_rate": 2.055299938208106e-06, - "loss": 0.7685, - "num_input_tokens_seen": 89644295, - "step": 4212 - }, - { - "epoch": 0.5065832982624903, - "flos": 17565034120200.0, - "grad_norm": 3.7500385282444593, - "learning_rate": 2.0545212602129526e-06, - "loss": 0.8479, - "num_input_tokens_seen": 89663870, - "step": 4213 - }, - { - "epoch": 0.5067035411531293, - "flos": 15726307436160.0, - "grad_norm": 10.836417673661664, - "learning_rate": 2.0537425739469673e-06, - "loss": 0.6208, - "num_input_tokens_seen": 89682525, - "step": 4214 - }, - { - "epoch": 0.5068237840437684, - "flos": 48612468874920.0, - "grad_norm": 1.0349148450825632, - "learning_rate": 2.052963879528276e-06, - "loss": 0.6325, - "num_input_tokens_seen": 89742115, - "step": 4215 - }, - { - "epoch": 0.5069440269344075, - "flos": 19989434574240.0, - "grad_norm": 4.233950885774352, - "learning_rate": 2.052185177075007e-06, - "loss": 0.7515, - "num_input_tokens_seen": 89761405, - "step": 4216 - }, - { - "epoch": 0.5070642698250466, - "flos": 17105597813880.0, - "grad_norm": 3.6026149047909186, - "learning_rate": 2.051406466705288e-06, - "loss": 0.8107, - "num_input_tokens_seen": 89780665, - "step": 4217 - }, - { - "epoch": 0.5071845127156857, - "flos": 14866313366400.0, - "grad_norm": 4.920976054129646, - "learning_rate": 2.0506277485372486e-06, - "loss": 0.7905, - "num_input_tokens_seen": 89799210, - "step": 4218 - }, - { - "epoch": 0.5073047556063248, - "flos": 8762654926560.0, - "grad_norm": 4.181298289568987, - "learning_rate": 2.04984902268902e-06, - "loss": 0.6484, - "num_input_tokens_seen": 89816130, - "step": 4219 - }, - { - "epoch": 0.5074249984969639, - "flos": 14379812285640.0, - "grad_norm": 7.238295215937012, - "learning_rate": 2.0490702892787345e-06, - "loss": 0.7218, - "num_input_tokens_seen": 89834910, - "step": 4220 - }, - { - "epoch": 0.5075452413876029, - "flos": 21101433163320.0, - "grad_norm": 9.268200675546847, - "learning_rate": 2.0482915484245246e-06, - "loss": 0.613, - "num_input_tokens_seen": 89856250, - "step": 4221 - }, - { - "epoch": 0.5076654842782421, - "flos": 14865711827160.0, - "grad_norm": 11.64072986529947, - "learning_rate": 2.047512800244526e-06, - "loss": 0.8142, - "num_input_tokens_seen": 89871235, - "step": 4222 - }, - { - "epoch": 0.5077857271688812, - "flos": 19321078462800.0, - "grad_norm": 3.5161814609523803, - "learning_rate": 2.046734044856873e-06, - "loss": 0.7636, - "num_input_tokens_seen": 89890365, - "step": 4223 - }, - { - "epoch": 0.5079059700595202, - "flos": 15945864987840.0, - "grad_norm": 4.505285973146333, - "learning_rate": 2.045955282379702e-06, - "loss": 0.7879, - "num_input_tokens_seen": 89908745, - "step": 4224 - }, - { - "epoch": 0.5080262129501594, - "flos": 9573078481680.0, - "grad_norm": 6.116704188482105, - "learning_rate": 2.045176512931152e-06, - "loss": 0.7338, - "num_input_tokens_seen": 89923095, - "step": 4225 - }, - { - "epoch": 0.5081464558407984, - "flos": 18537941301840.0, - "grad_norm": 3.4658944688195774, - "learning_rate": 2.0443977366293604e-06, - "loss": 0.7379, - "num_input_tokens_seen": 89940855, - "step": 4226 - }, - { - "epoch": 0.5082666987314375, - "flos": 22718354438520.0, - "grad_norm": 5.231759855702872, - "learning_rate": 2.043618953592468e-06, - "loss": 0.7565, - "num_input_tokens_seen": 89963030, - "step": 4227 - }, - { - "epoch": 0.5083869416220766, - "flos": 14379495686040.0, - "grad_norm": 5.346696364842711, - "learning_rate": 2.0428401639386144e-06, - "loss": 0.7957, - "num_input_tokens_seen": 89983315, - "step": 4228 - }, - { - "epoch": 0.5085071845127157, - "flos": 52967649702600.0, - "grad_norm": 0.9012341964330768, - "learning_rate": 2.042061367785943e-06, - "loss": 0.6424, - "num_input_tokens_seen": 90036175, - "step": 4229 - }, - { - "epoch": 0.5086274274033548, - "flos": 26419231685640.0, - "grad_norm": 5.174064242028663, - "learning_rate": 2.041282565252594e-06, - "loss": 0.7333, - "num_input_tokens_seen": 90056060, - "step": 4230 - }, - { - "epoch": 0.5087476702939938, - "flos": 17214901690320.0, - "grad_norm": 3.428400376003887, - "learning_rate": 2.040503756456714e-06, - "loss": 0.7471, - "num_input_tokens_seen": 90074990, - "step": 4231 - }, - { - "epoch": 0.508867913184633, - "flos": 11004535490760.0, - "grad_norm": 7.01984671009645, - "learning_rate": 2.0397249415164456e-06, - "loss": 0.7848, - "num_input_tokens_seen": 90092065, - "step": 4232 - }, - { - "epoch": 0.508988156075272, - "flos": 18970027894080.0, - "grad_norm": 4.04853933862167, - "learning_rate": 2.0389461205499354e-06, - "loss": 0.7873, - "num_input_tokens_seen": 90110920, - "step": 4233 - }, - { - "epoch": 0.5091083989659111, - "flos": 10059262962840.0, - "grad_norm": 4.048556534472179, - "learning_rate": 2.03816729367533e-06, - "loss": 0.7151, - "num_input_tokens_seen": 90128795, - "step": 4234 - }, - { - "epoch": 0.5092286418565503, - "flos": 15432077512920.0, - "grad_norm": 5.35797065152129, - "learning_rate": 2.0373884610107765e-06, - "loss": 0.6841, - "num_input_tokens_seen": 90148830, - "step": 4235 - }, - { - "epoch": 0.5093488847471893, - "flos": 13593699088440.0, - "grad_norm": 6.211552888145328, - "learning_rate": 2.0366096226744225e-06, - "loss": 0.6712, - "num_input_tokens_seen": 90163745, - "step": 4236 - }, - { - "epoch": 0.5094691276378284, - "flos": 17429298668520.0, - "grad_norm": 3.627841230625853, - "learning_rate": 2.035830778784418e-06, - "loss": 0.746, - "num_input_tokens_seen": 90184140, - "step": 4237 - }, - { - "epoch": 0.5095893705284675, - "flos": 12705405505800.0, - "grad_norm": 12.868196041417443, - "learning_rate": 2.0350519294589134e-06, - "loss": 0.7927, - "num_input_tokens_seen": 90201900, - "step": 4238 - }, - { - "epoch": 0.5097096134191066, - "flos": 18943501338960.0, - "grad_norm": 3.7686059320093905, - "learning_rate": 2.0342730748160588e-06, - "loss": 0.8148, - "num_input_tokens_seen": 90222085, - "step": 4239 - }, - { - "epoch": 0.5098298563097456, - "flos": 20346278915640.0, - "grad_norm": 4.799788295527707, - "learning_rate": 2.033494214974006e-06, - "loss": 0.6684, - "num_input_tokens_seen": 90242950, - "step": 4240 - }, - { - "epoch": 0.5099500992003848, - "flos": 15619884616080.0, - "grad_norm": 3.784515232099375, - "learning_rate": 2.0327153500509067e-06, - "loss": 0.8176, - "num_input_tokens_seen": 90261695, - "step": 4241 - }, - { - "epoch": 0.5100703420910239, - "flos": 14515452757440.0, - "grad_norm": 3.19410365585835, - "learning_rate": 2.031936480164916e-06, - "loss": 0.8385, - "num_input_tokens_seen": 90279155, - "step": 4242 - }, - { - "epoch": 0.5101905849816629, - "flos": 18055492695960.0, - "grad_norm": 4.928376665867671, - "learning_rate": 2.0311576054341857e-06, - "loss": 0.7903, - "num_input_tokens_seen": 90299490, - "step": 4243 - }, - { - "epoch": 0.5103108278723021, - "flos": 16782973397880.0, - "grad_norm": 3.4565863888822674, - "learning_rate": 2.0303787259768715e-06, - "loss": 0.6172, - "num_input_tokens_seen": 90320110, - "step": 4244 - }, - { - "epoch": 0.5104310707629411, - "flos": 15729441772200.0, - "grad_norm": 5.484468203891074, - "learning_rate": 2.0295998419111294e-06, - "loss": 0.664, - "num_input_tokens_seen": 90337120, - "step": 4245 - }, - { - "epoch": 0.5105513136535802, - "flos": 10842526763640.0, - "grad_norm": 3.810033907401523, - "learning_rate": 2.028820953355115e-06, - "loss": 0.7117, - "num_input_tokens_seen": 90354940, - "step": 4246 - }, - { - "epoch": 0.5106715565442194, - "flos": 16540403546640.0, - "grad_norm": 12.59423948335359, - "learning_rate": 2.0280420604269834e-06, - "loss": 0.765, - "num_input_tokens_seen": 90374200, - "step": 4247 - }, - { - "epoch": 0.5107917994348584, - "flos": 52384888648920.0, - "grad_norm": 0.8298043192145382, - "learning_rate": 2.027263163244895e-06, - "loss": 0.6371, - "num_input_tokens_seen": 90443045, - "step": 4248 - }, - { - "epoch": 0.5109120423254975, - "flos": 18187144012800.0, - "grad_norm": 2.7912016713818275, - "learning_rate": 2.026484261927005e-06, - "loss": 0.7276, - "num_input_tokens_seen": 90462260, - "step": 4249 - }, - { - "epoch": 0.5110322852161366, - "flos": 15537297254520.0, - "grad_norm": 8.199370885766985, - "learning_rate": 2.025705356591475e-06, - "loss": 0.7187, - "num_input_tokens_seen": 90479670, - "step": 4250 - }, - { - "epoch": 0.5111525281067757, - "flos": 49001000344920.0, - "grad_norm": 0.8472812719733763, - "learning_rate": 2.024926447356462e-06, - "loss": 0.6141, - "num_input_tokens_seen": 90541675, - "step": 4251 - }, - { - "epoch": 0.5112727709974147, - "flos": 10815556969080.0, - "grad_norm": 6.69594774826974, - "learning_rate": 2.024147534340127e-06, - "loss": 0.7665, - "num_input_tokens_seen": 90559255, - "step": 4252 - }, - { - "epoch": 0.5113930138880539, - "flos": 15485858802240.0, - "grad_norm": 8.983784372569948, - "learning_rate": 2.02336861766063e-06, - "loss": 0.7746, - "num_input_tokens_seen": 90578035, - "step": 4253 - }, - { - "epoch": 0.511513256778693, - "flos": 14916802019880.0, - "grad_norm": 5.010612288348639, - "learning_rate": 2.0225896974361327e-06, - "loss": 0.7707, - "num_input_tokens_seen": 90597860, - "step": 4254 - }, - { - "epoch": 0.511633499669332, - "flos": 51533184997320.0, - "grad_norm": 0.9468216947148994, - "learning_rate": 2.0218107737847962e-06, - "loss": 0.6469, - "num_input_tokens_seen": 90659625, - "step": 4255 - }, - { - "epoch": 0.5117537425599712, - "flos": 17647241562240.0, - "grad_norm": 4.036250598443141, - "learning_rate": 2.0210318468247826e-06, - "loss": 0.748, - "num_input_tokens_seen": 90678315, - "step": 4256 - }, - { - "epoch": 0.5118739854506102, - "flos": 15324609914160.0, - "grad_norm": 3.95096610604059, - "learning_rate": 2.020252916674255e-06, - "loss": 0.8031, - "num_input_tokens_seen": 90697150, - "step": 4257 - }, - { - "epoch": 0.5119942283412493, - "flos": 12732786879840.0, - "grad_norm": 5.110127338233742, - "learning_rate": 2.019473983451375e-06, - "loss": 0.787, - "num_input_tokens_seen": 90715290, - "step": 4258 - }, - { - "epoch": 0.5121144712318885, - "flos": 15403619700240.0, - "grad_norm": 5.155619819741173, - "learning_rate": 2.0186950472743076e-06, - "loss": 0.6951, - "num_input_tokens_seen": 90734915, - "step": 4259 - }, - { - "epoch": 0.5122347141225275, - "flos": 14510862063240.0, - "grad_norm": 3.623731720145663, - "learning_rate": 2.0179161082612162e-06, - "loss": 0.7316, - "num_input_tokens_seen": 90754025, - "step": 4260 - }, - { - "epoch": 0.5123549570131666, - "flos": 16485735778440.0, - "grad_norm": 4.1229293712531385, - "learning_rate": 2.017137166530266e-06, - "loss": 0.6998, - "num_input_tokens_seen": 90773280, - "step": 4261 - }, - { - "epoch": 0.5124751999038056, - "flos": 14860361293920.0, - "grad_norm": 5.094845706598763, - "learning_rate": 2.0163582221996213e-06, - "loss": 0.7917, - "num_input_tokens_seen": 90791375, - "step": 4262 - }, - { - "epoch": 0.5125954427944448, - "flos": 29257392106440.0, - "grad_norm": 5.53403536391462, - "learning_rate": 2.015579275387446e-06, - "loss": 0.6665, - "num_input_tokens_seen": 90816415, - "step": 4263 - }, - { - "epoch": 0.5127156856850839, - "flos": 21427096935480.0, - "grad_norm": 14.537138108975029, - "learning_rate": 2.0148003262119085e-06, - "loss": 0.6708, - "num_input_tokens_seen": 90837105, - "step": 4264 - }, - { - "epoch": 0.5128359285757229, - "flos": 9843884525880.0, - "grad_norm": 5.313310449152877, - "learning_rate": 2.0140213747911728e-06, - "loss": 0.7372, - "num_input_tokens_seen": 90855360, - "step": 4265 - }, - { - "epoch": 0.5129561714663621, - "flos": 18457000258200.0, - "grad_norm": 6.547195171821283, - "learning_rate": 2.013242421243406e-06, - "loss": 0.7864, - "num_input_tokens_seen": 90873985, - "step": 4266 - }, - { - "epoch": 0.5130764143570011, - "flos": 13245529576080.0, - "grad_norm": 3.6265465955866247, - "learning_rate": 2.012463465686774e-06, - "loss": 0.7698, - "num_input_tokens_seen": 90893455, - "step": 4267 - }, - { - "epoch": 0.5131966572476402, - "flos": 44072588111520.0, - "grad_norm": 0.8424683479453302, - "learning_rate": 2.0116845082394446e-06, - "loss": 0.5921, - "num_input_tokens_seen": 90958010, - "step": 4268 - }, - { - "epoch": 0.5133169001382794, - "flos": 13515670761120.0, - "grad_norm": 4.474722758820992, - "learning_rate": 2.0109055490195836e-06, - "loss": 0.7678, - "num_input_tokens_seen": 90976185, - "step": 4269 - }, - { - "epoch": 0.5134371430289184, - "flos": 11301646470360.0, - "grad_norm": 4.283239640177228, - "learning_rate": 2.0101265881453605e-06, - "loss": 0.6276, - "num_input_tokens_seen": 90994380, - "step": 4270 - }, - { - "epoch": 0.5135573859195575, - "flos": 15675565503000.0, - "grad_norm": 5.929177323861462, - "learning_rate": 2.009347625734941e-06, - "loss": 0.7744, - "num_input_tokens_seen": 91014010, - "step": 4271 - }, - { - "epoch": 0.5136776288101966, - "flos": 12921733741560.0, - "grad_norm": 6.645237655810156, - "learning_rate": 2.0085686619064954e-06, - "loss": 0.7393, - "num_input_tokens_seen": 91030170, - "step": 4272 - }, - { - "epoch": 0.5137978717008357, - "flos": 12084751971360.0, - "grad_norm": 5.998422415282869, - "learning_rate": 2.00778969677819e-06, - "loss": 0.8174, - "num_input_tokens_seen": 91046925, - "step": 4273 - }, - { - "epoch": 0.5139181145914747, - "flos": 15109104837360.0, - "grad_norm": 4.456739384341585, - "learning_rate": 2.0070107304681934e-06, - "loss": 0.6307, - "num_input_tokens_seen": 91065600, - "step": 4274 - }, - { - "epoch": 0.5140383574821139, - "flos": 24181340276400.0, - "grad_norm": 4.476347327262163, - "learning_rate": 2.006231763094675e-06, - "loss": 0.7787, - "num_input_tokens_seen": 91086340, - "step": 4275 - }, - { - "epoch": 0.514158600372753, - "flos": 14271964767360.0, - "grad_norm": 6.696384410894195, - "learning_rate": 2.0054527947758027e-06, - "loss": 0.8694, - "num_input_tokens_seen": 91104860, - "step": 4276 - }, - { - "epoch": 0.514278843263392, - "flos": 46091143588800.0, - "grad_norm": 0.7764950437049852, - "learning_rate": 2.004673825629746e-06, - "loss": 0.584, - "num_input_tokens_seen": 91165360, - "step": 4277 - }, - { - "epoch": 0.5143990861540312, - "flos": 18967400117400.0, - "grad_norm": 7.120528842702336, - "learning_rate": 2.0038948557746744e-06, - "loss": 0.7154, - "num_input_tokens_seen": 91186935, - "step": 4278 - }, - { - "epoch": 0.5145193290446702, - "flos": 17079324538440.0, - "grad_norm": 6.176060129199069, - "learning_rate": 2.0031158853287558e-06, - "loss": 0.7258, - "num_input_tokens_seen": 91207090, - "step": 4279 - }, - { - "epoch": 0.5146395719353093, - "flos": 16728527249400.0, - "grad_norm": 10.759515853256799, - "learning_rate": 2.0023369144101593e-06, - "loss": 0.6982, - "num_input_tokens_seen": 91224980, - "step": 4280 - }, - { - "epoch": 0.5147598148259485, - "flos": 19344059102400.0, - "grad_norm": 4.53228053508991, - "learning_rate": 2.0015579431370555e-06, - "loss": 0.7466, - "num_input_tokens_seen": 91246380, - "step": 4281 - }, - { - "epoch": 0.5148800577165875, - "flos": 21988935246960.0, - "grad_norm": 5.764068972469428, - "learning_rate": 2.000778971627612e-06, - "loss": 0.6902, - "num_input_tokens_seen": 91265565, - "step": 4282 - }, - { - "epoch": 0.5150003006072266, - "flos": 13083584168880.0, - "grad_norm": 7.071305448662999, - "learning_rate": 2e-06, - "loss": 0.8856, - "num_input_tokens_seen": 91282880, - "step": 4283 - }, - { - "epoch": 0.5151205434978657, - "flos": 13488669306600.0, - "grad_norm": 9.085194341652024, - "learning_rate": 1.9992210283723878e-06, - "loss": 0.8412, - "num_input_tokens_seen": 91299840, - "step": 4284 - }, - { - "epoch": 0.5152407863885048, - "flos": 18567855472680.0, - "grad_norm": 2.476467768545066, - "learning_rate": 1.9984420568629448e-06, - "loss": 0.7689, - "num_input_tokens_seen": 91322325, - "step": 4285 - }, - { - "epoch": 0.5153610292791438, - "flos": 13377972391920.0, - "grad_norm": 7.040565389557764, - "learning_rate": 1.9976630855898405e-06, - "loss": 0.7663, - "num_input_tokens_seen": 91339800, - "step": 4286 - }, - { - "epoch": 0.515481272169783, - "flos": 22318619834040.0, - "grad_norm": 6.082184286770846, - "learning_rate": 1.9968841146712445e-06, - "loss": 0.7351, - "num_input_tokens_seen": 91359135, - "step": 4287 - }, - { - "epoch": 0.5156015150604221, - "flos": 16862014843920.0, - "grad_norm": 12.155093885817866, - "learning_rate": 1.996105144225326e-06, - "loss": 0.6969, - "num_input_tokens_seen": 91379090, - "step": 4288 - }, - { - "epoch": 0.5157217579510611, - "flos": 13029707899680.0, - "grad_norm": 10.380052921696814, - "learning_rate": 1.995326174370254e-06, - "loss": 0.7735, - "num_input_tokens_seen": 91397910, - "step": 4289 - }, - { - "epoch": 0.5158420008417003, - "flos": 14001285363000.0, - "grad_norm": 5.850436541579678, - "learning_rate": 1.994547205224197e-06, - "loss": 0.7101, - "num_input_tokens_seen": 91416535, - "step": 4290 - }, - { - "epoch": 0.5159622437323393, - "flos": 16405174654320.0, - "grad_norm": 5.103904291792254, - "learning_rate": 1.993768236905325e-06, - "loss": 0.663, - "num_input_tokens_seen": 91436925, - "step": 4291 - }, - { - "epoch": 0.5160824866229784, - "flos": 18020291311800.0, - "grad_norm": 7.136178332884518, - "learning_rate": 1.992989269531807e-06, - "loss": 0.6384, - "num_input_tokens_seen": 91455455, - "step": 4292 - }, - { - "epoch": 0.5162027295136175, - "flos": 13136162379720.0, - "grad_norm": 6.784713774612601, - "learning_rate": 1.99221030322181e-06, - "loss": 0.641, - "num_input_tokens_seen": 91471980, - "step": 4293 - }, - { - "epoch": 0.5163229724042566, - "flos": 20049991095000.0, - "grad_norm": 5.247365381296048, - "learning_rate": 1.991431338093505e-06, - "loss": 0.7943, - "num_input_tokens_seen": 91494055, - "step": 4294 - }, - { - "epoch": 0.5164432152948957, - "flos": 15919116813000.0, - "grad_norm": 6.736626834943528, - "learning_rate": 1.9906523742650587e-06, - "loss": 0.768, - "num_input_tokens_seen": 91512635, - "step": 4295 - }, - { - "epoch": 0.5165634581855347, - "flos": 18721189370760.0, - "grad_norm": 3.947829246391869, - "learning_rate": 1.9898734118546397e-06, - "loss": 0.7392, - "num_input_tokens_seen": 91532305, - "step": 4296 - }, - { - "epoch": 0.5166837010761739, - "flos": 14541314453400.0, - "grad_norm": 3.8052129676359696, - "learning_rate": 1.989094450980416e-06, - "loss": 0.7928, - "num_input_tokens_seen": 91552125, - "step": 4297 - }, - { - "epoch": 0.516803943966813, - "flos": 19532277785040.0, - "grad_norm": 6.1674461215804435, - "learning_rate": 1.9883154917605556e-06, - "loss": 0.7358, - "num_input_tokens_seen": 91571055, - "step": 4298 - }, - { - "epoch": 0.516924186857452, - "flos": 14379812285640.0, - "grad_norm": 6.214005850767998, - "learning_rate": 1.9875365343132262e-06, - "loss": 0.8038, - "num_input_tokens_seen": 91587895, - "step": 4299 - }, - { - "epoch": 0.5170444297480912, - "flos": 11621453149920.0, - "grad_norm": 20.43298850004494, - "learning_rate": 1.9867575787565946e-06, - "loss": 0.8286, - "num_input_tokens_seen": 91602275, - "step": 4300 - }, - { - "epoch": 0.5171646726387302, - "flos": 10303099212480.0, - "grad_norm": 11.411842459280171, - "learning_rate": 1.9859786252088275e-06, - "loss": 0.8442, - "num_input_tokens_seen": 91619175, - "step": 4301 - }, - { - "epoch": 0.5172849155293693, - "flos": 17261717788440.0, - "grad_norm": 7.264492151687125, - "learning_rate": 1.9851996737880914e-06, - "loss": 0.6518, - "num_input_tokens_seen": 91634080, - "step": 4302 - }, - { - "epoch": 0.5174051584200084, - "flos": 10384040256120.0, - "grad_norm": 7.159857094899643, - "learning_rate": 1.9844207246125537e-06, - "loss": 0.7338, - "num_input_tokens_seen": 91650380, - "step": 4303 - }, - { - "epoch": 0.5175254013106475, - "flos": 27853886350680.0, - "grad_norm": 6.490385721320686, - "learning_rate": 1.983641777800379e-06, - "loss": 0.6676, - "num_input_tokens_seen": 91672745, - "step": 4304 - }, - { - "epoch": 0.5176456442012866, - "flos": 50551512498120.0, - "grad_norm": 0.8719485754327245, - "learning_rate": 1.9828628334697343e-06, - "loss": 0.6199, - "num_input_tokens_seen": 91739675, - "step": 4305 - }, - { - "epoch": 0.5177658870919257, - "flos": 47244639402720.0, - "grad_norm": 0.8201226877680823, - "learning_rate": 1.982083891738784e-06, - "loss": 0.5857, - "num_input_tokens_seen": 91800265, - "step": 4306 - }, - { - "epoch": 0.5178861299825648, - "flos": 19536836819280.0, - "grad_norm": 6.502102731832429, - "learning_rate": 1.9813049527256923e-06, - "loss": 0.814, - "num_input_tokens_seen": 91820380, - "step": 4307 - }, - { - "epoch": 0.5180063728732038, - "flos": 13084027408320.0, - "grad_norm": 6.775617237760462, - "learning_rate": 1.9805260165486252e-06, - "loss": 0.7974, - "num_input_tokens_seen": 91839470, - "step": 4308 - }, - { - "epoch": 0.518126615763843, - "flos": 14245216592520.0, - "grad_norm": 4.893767593371652, - "learning_rate": 1.9797470833257457e-06, - "loss": 0.8475, - "num_input_tokens_seen": 91858890, - "step": 4309 - }, - { - "epoch": 0.5182468586544821, - "flos": 15135536412600.0, - "grad_norm": 7.596603297055338, - "learning_rate": 1.9789681531752177e-06, - "loss": 0.7579, - "num_input_tokens_seen": 91878830, - "step": 4310 - }, - { - "epoch": 0.5183671015451211, - "flos": 16917632410920.0, - "grad_norm": 4.94552443111989, - "learning_rate": 1.978189226215204e-06, - "loss": 0.7126, - "num_input_tokens_seen": 91899095, - "step": 4311 - }, - { - "epoch": 0.5184873444357603, - "flos": 12835347184800.0, - "grad_norm": 5.332798136242742, - "learning_rate": 1.9774103025638675e-06, - "loss": 0.7486, - "num_input_tokens_seen": 91916940, - "step": 4312 - }, - { - "epoch": 0.5186075873263993, - "flos": 18268085056440.0, - "grad_norm": 4.090017233068447, - "learning_rate": 1.9766313823393696e-06, - "loss": 0.7499, - "num_input_tokens_seen": 91937525, - "step": 4313 - }, - { - "epoch": 0.5187278302170384, - "flos": 11054169325320.0, - "grad_norm": 7.203871682082772, - "learning_rate": 1.975852465659873e-06, - "loss": 0.6655, - "num_input_tokens_seen": 91953225, - "step": 4314 - }, - { - "epoch": 0.5188480731076776, - "flos": 18321771365880.0, - "grad_norm": 7.665478185778784, - "learning_rate": 1.9750735526435377e-06, - "loss": 0.6834, - "num_input_tokens_seen": 91969890, - "step": 4315 - }, - { - "epoch": 0.5189683159983166, - "flos": 18159446039160.0, - "grad_norm": 7.080496485484407, - "learning_rate": 1.974294643408525e-06, - "loss": 0.7675, - "num_input_tokens_seen": 91987405, - "step": 4316 - }, - { - "epoch": 0.5190885588889557, - "flos": 18132982803960.0, - "grad_norm": 6.015524806389635, - "learning_rate": 1.9735157380729947e-06, - "loss": 0.6537, - "num_input_tokens_seen": 92007535, - "step": 4317 - }, - { - "epoch": 0.5192088017795948, - "flos": 18101453975160.0, - "grad_norm": 7.433615559692782, - "learning_rate": 1.9727368367551053e-06, - "loss": 0.8178, - "num_input_tokens_seen": 92025805, - "step": 4318 - }, - { - "epoch": 0.5193290446702339, - "flos": 19964079437640.0, - "grad_norm": 7.049127121585305, - "learning_rate": 1.9719579395730164e-06, - "loss": 0.6807, - "num_input_tokens_seen": 92044900, - "step": 4319 - }, - { - "epoch": 0.5194492875608729, - "flos": 8304200079000.0, - "grad_norm": 8.629042410650483, - "learning_rate": 1.9711790466448854e-06, - "loss": 0.9144, - "num_input_tokens_seen": 92058640, - "step": 4320 - }, - { - "epoch": 0.5195695304515121, - "flos": 14865236927760.0, - "grad_norm": 18.068688622723062, - "learning_rate": 1.9704001580888704e-06, - "loss": 0.6818, - "num_input_tokens_seen": 92077100, - "step": 4321 - }, - { - "epoch": 0.5196897733421512, - "flos": 14650365050160.0, - "grad_norm": 10.848008256914715, - "learning_rate": 1.9696212740231283e-06, - "loss": 0.8574, - "num_input_tokens_seen": 92095470, - "step": 4322 - }, - { - "epoch": 0.5198100162327902, - "flos": 17430818346600.0, - "grad_norm": 14.740156089339283, - "learning_rate": 1.9688423945658146e-06, - "loss": 0.798, - "num_input_tokens_seen": 92116055, - "step": 4323 - }, - { - "epoch": 0.5199302591234293, - "flos": 17592415494240.0, - "grad_norm": 9.577533548087738, - "learning_rate": 1.9680635198350845e-06, - "loss": 0.7037, - "num_input_tokens_seen": 92135485, - "step": 4324 - }, - { - "epoch": 0.5200505020140684, - "flos": 19320825183120.0, - "grad_norm": 4.802853316377432, - "learning_rate": 1.967284649949093e-06, - "loss": 0.707, - "num_input_tokens_seen": 92154415, - "step": 4325 - }, - { - "epoch": 0.5201707449047075, - "flos": 29123018033040.0, - "grad_norm": 6.531646862475342, - "learning_rate": 1.966505785025994e-06, - "loss": 0.717, - "num_input_tokens_seen": 92176040, - "step": 4326 - }, - { - "epoch": 0.5202909877953465, - "flos": 39541368703080.0, - "grad_norm": 6.310141547741941, - "learning_rate": 1.965726925183941e-06, - "loss": 0.7534, - "num_input_tokens_seen": 92198865, - "step": 4327 - }, - { - "epoch": 0.5204112306859857, - "flos": 14271901447440.0, - "grad_norm": 5.776098977421756, - "learning_rate": 1.964948070541087e-06, - "loss": 0.8239, - "num_input_tokens_seen": 92217245, - "step": 4328 - }, - { - "epoch": 0.5205314735766248, - "flos": 11139701063160.0, - "grad_norm": 6.311021990587089, - "learning_rate": 1.9641692212155816e-06, - "loss": 0.67, - "num_input_tokens_seen": 92234730, - "step": 4329 - }, - { - "epoch": 0.5206517164672638, - "flos": 43671419826120.0, - "grad_norm": 4.332839766539363, - "learning_rate": 1.9633903773255777e-06, - "loss": 0.7127, - "num_input_tokens_seen": 92256765, - "step": 4330 - }, - { - "epoch": 0.520771959357903, - "flos": 19699700365320.0, - "grad_norm": 4.208840493842966, - "learning_rate": 1.9626115389892237e-06, - "loss": 0.7312, - "num_input_tokens_seen": 92277275, - "step": 4331 - }, - { - "epoch": 0.520892202248542, - "flos": 19726226920440.0, - "grad_norm": 6.128200395539512, - "learning_rate": 1.96183270632467e-06, - "loss": 0.8276, - "num_input_tokens_seen": 92296845, - "step": 4332 - }, - { - "epoch": 0.5210124451391811, - "flos": 18943564658880.0, - "grad_norm": 4.487390441407212, - "learning_rate": 1.9610538794500644e-06, - "loss": 0.77, - "num_input_tokens_seen": 92316115, - "step": 4333 - }, - { - "epoch": 0.5211326880298203, - "flos": 52034281319640.0, - "grad_norm": 0.8013551469865703, - "learning_rate": 1.9602750584835542e-06, - "loss": 0.6256, - "num_input_tokens_seen": 92381770, - "step": 4334 - }, - { - "epoch": 0.5212529309204593, - "flos": 11379041598480.0, - "grad_norm": 11.247385017806119, - "learning_rate": 1.959496243543286e-06, - "loss": 0.8057, - "num_input_tokens_seen": 92399370, - "step": 4335 - }, - { - "epoch": 0.5213731738110984, - "flos": 19233425507640.0, - "grad_norm": 5.330367078039327, - "learning_rate": 1.9587174347474057e-06, - "loss": 0.777, - "num_input_tokens_seen": 92415600, - "step": 4336 - }, - { - "epoch": 0.5214934167017375, - "flos": 14182823794080.0, - "grad_norm": 4.235951710743638, - "learning_rate": 1.9579386322140574e-06, - "loss": 0.7895, - "num_input_tokens_seen": 92431000, - "step": 4337 - }, - { - "epoch": 0.5216136595923766, - "flos": 22722375253440.0, - "grad_norm": 7.654929035438386, - "learning_rate": 1.9571598360613854e-06, - "loss": 0.7905, - "num_input_tokens_seen": 92453595, - "step": 4338 - }, - { - "epoch": 0.5217339024830157, - "flos": 16053997445760.0, - "grad_norm": 4.253163226402195, - "learning_rate": 1.956381046407532e-06, - "loss": 0.6784, - "num_input_tokens_seen": 92473610, - "step": 4339 - }, - { - "epoch": 0.5218541453736548, - "flos": 15297735099480.0, - "grad_norm": 5.631188612252974, - "learning_rate": 1.9556022633706394e-06, - "loss": 0.8397, - "num_input_tokens_seen": 92492120, - "step": 4340 - }, - { - "epoch": 0.5219743882642939, - "flos": 17539014124440.0, - "grad_norm": 5.60159195342893, - "learning_rate": 1.954823487068848e-06, - "loss": 0.7805, - "num_input_tokens_seen": 92512050, - "step": 4341 - }, - { - "epoch": 0.5220946311549329, - "flos": 21125870161080.0, - "grad_norm": 7.885404948767989, - "learning_rate": 1.9540447176202976e-06, - "loss": 0.7896, - "num_input_tokens_seen": 92533015, - "step": 4342 - }, - { - "epoch": 0.5222148740455721, - "flos": 49543277292480.0, - "grad_norm": 0.8438393934013312, - "learning_rate": 1.9532659551431272e-06, - "loss": 0.6443, - "num_input_tokens_seen": 92599765, - "step": 4343 - }, - { - "epoch": 0.5223351169362112, - "flos": 45590296054800.0, - "grad_norm": 4.033562994689547, - "learning_rate": 1.9524871997554744e-06, - "loss": 0.6543, - "num_input_tokens_seen": 92627245, - "step": 4344 - }, - { - "epoch": 0.5224553598268502, - "flos": 10653168322440.0, - "grad_norm": 8.696332434707255, - "learning_rate": 1.951708451575475e-06, - "loss": 0.766, - "num_input_tokens_seen": 92644030, - "step": 4345 - }, - { - "epoch": 0.5225756027174894, - "flos": 10784851299240.0, - "grad_norm": 3.4145148564645447, - "learning_rate": 1.9509297107212657e-06, - "loss": 0.7916, - "num_input_tokens_seen": 92660520, - "step": 4346 - }, - { - "epoch": 0.5226958456081284, - "flos": 17213382012240.0, - "grad_norm": 6.41509815548308, - "learning_rate": 1.95015097731098e-06, - "loss": 0.781, - "num_input_tokens_seen": 92679730, - "step": 4347 - }, - { - "epoch": 0.5228160884987675, - "flos": 13921072498440.0, - "grad_norm": 6.709466242247544, - "learning_rate": 1.949372251462751e-06, - "loss": 0.8077, - "num_input_tokens_seen": 92696865, - "step": 4348 - }, - { - "epoch": 0.5229363313894067, - "flos": 15400517024160.0, - "grad_norm": 6.381462860293452, - "learning_rate": 1.9485935332947124e-06, - "loss": 0.8123, - "num_input_tokens_seen": 92714495, - "step": 4349 - }, - { - "epoch": 0.5230565742800457, - "flos": 10788682154400.0, - "grad_norm": 12.993072424575853, - "learning_rate": 1.947814822924993e-06, - "loss": 0.8118, - "num_input_tokens_seen": 92731725, - "step": 4350 - }, - { - "epoch": 0.5231768171706848, - "flos": 18916626524280.0, - "grad_norm": 5.84462335761534, - "learning_rate": 1.9470361204717236e-06, - "loss": 0.8086, - "num_input_tokens_seen": 92750585, - "step": 4351 - }, - { - "epoch": 0.5232970600613239, - "flos": 16644578509560.0, - "grad_norm": 5.247648529960266, - "learning_rate": 1.9462574260530326e-06, - "loss": 0.7948, - "num_input_tokens_seen": 92770585, - "step": 4352 - }, - { - "epoch": 0.523417302951963, - "flos": 12624179522520.0, - "grad_norm": 7.937849622821125, - "learning_rate": 1.9454787397870472e-06, - "loss": 0.7929, - "num_input_tokens_seen": 92787625, - "step": 4353 - }, - { - "epoch": 0.523537545842602, - "flos": 13542703875600.0, - "grad_norm": 13.458768823780023, - "learning_rate": 1.944700061791894e-06, - "loss": 0.7002, - "num_input_tokens_seen": 92805740, - "step": 4354 - }, - { - "epoch": 0.5236577887332411, - "flos": 14406750420240.0, - "grad_norm": 22.78917766511867, - "learning_rate": 1.943921392185698e-06, - "loss": 0.6465, - "num_input_tokens_seen": 92824085, - "step": 4355 - }, - { - "epoch": 0.5237780316238803, - "flos": 17241903144840.0, - "grad_norm": 12.635319184474712, - "learning_rate": 1.9431427310865814e-06, - "loss": 0.7581, - "num_input_tokens_seen": 92843410, - "step": 4356 - }, - { - "epoch": 0.5238982745145193, - "flos": 16458639344040.0, - "grad_norm": 5.602711920077187, - "learning_rate": 1.942364078612667e-06, - "loss": 0.7794, - "num_input_tokens_seen": 92861860, - "step": 4357 - }, - { - "epoch": 0.5240185174051584, - "flos": 19860727633680.0, - "grad_norm": 5.234642819057976, - "learning_rate": 1.9415854348820765e-06, - "loss": 0.7309, - "num_input_tokens_seen": 92881430, - "step": 4358 - }, - { - "epoch": 0.5241387602957975, - "flos": 16593963216240.0, - "grad_norm": 7.229825329708575, - "learning_rate": 1.940806800012929e-06, - "loss": 0.6528, - "num_input_tokens_seen": 92901220, - "step": 4359 - }, - { - "epoch": 0.5242590031864366, - "flos": 29825562409920.0, - "grad_norm": 5.694452752001318, - "learning_rate": 1.9400281741233432e-06, - "loss": 0.6255, - "num_input_tokens_seen": 92925830, - "step": 4360 - }, - { - "epoch": 0.5243792460770756, - "flos": 49163262351720.0, - "grad_norm": 0.687071276416869, - "learning_rate": 1.939249557331435e-06, - "loss": 0.5505, - "num_input_tokens_seen": 92991365, - "step": 4361 - }, - { - "epoch": 0.5244994889677148, - "flos": 20670929569080.0, - "grad_norm": 7.1663949012978385, - "learning_rate": 1.938470949755321e-06, - "loss": 0.709, - "num_input_tokens_seen": 93010965, - "step": 4362 - }, - { - "epoch": 0.5246197318583539, - "flos": 48626557557120.0, - "grad_norm": 0.8861096008099235, - "learning_rate": 1.937692351513115e-06, - "loss": 0.6149, - "num_input_tokens_seen": 93069680, - "step": 4363 - }, - { - "epoch": 0.5247399747489929, - "flos": 15379467642120.0, - "grad_norm": 4.8230518736813, - "learning_rate": 1.9369137627229297e-06, - "loss": 0.7866, - "num_input_tokens_seen": 93087800, - "step": 4364 - }, - { - "epoch": 0.5248602176396321, - "flos": 13893627804480.0, - "grad_norm": 5.309433714130547, - "learning_rate": 1.936135183502877e-06, - "loss": 0.8651, - "num_input_tokens_seen": 93104820, - "step": 4365 - }, - { - "epoch": 0.5249804605302711, - "flos": 16242691027800.0, - "grad_norm": 6.149578371559242, - "learning_rate": 1.935356613971066e-06, - "loss": 0.7864, - "num_input_tokens_seen": 93125200, - "step": 4366 - }, - { - "epoch": 0.5251007034209102, - "flos": 17430976646400.0, - "grad_norm": 19.170309411677557, - "learning_rate": 1.9345780542456047e-06, - "loss": 0.7589, - "num_input_tokens_seen": 93144295, - "step": 4367 - }, - { - "epoch": 0.5252209463115494, - "flos": 17052703003440.0, - "grad_norm": 4.553373431861849, - "learning_rate": 1.9337995044446007e-06, - "loss": 0.7043, - "num_input_tokens_seen": 93162855, - "step": 4368 - }, - { - "epoch": 0.5253411892021884, - "flos": 14487723123840.0, - "grad_norm": 11.082762012718959, - "learning_rate": 1.9330209646861596e-06, - "loss": 0.7859, - "num_input_tokens_seen": 93181725, - "step": 4369 - }, - { - "epoch": 0.5254614320928275, - "flos": 17673704797440.0, - "grad_norm": 4.7704255486446, - "learning_rate": 1.9322424350883843e-06, - "loss": 0.7672, - "num_input_tokens_seen": 93203280, - "step": 4370 - }, - { - "epoch": 0.5255816749834666, - "flos": 18052105080240.0, - "grad_norm": 8.611279174914742, - "learning_rate": 1.931463915769379e-06, - "loss": 0.77, - "num_input_tokens_seen": 93223115, - "step": 4371 - }, - { - "epoch": 0.5257019178741057, - "flos": 10275147959160.0, - "grad_norm": 7.381912095804552, - "learning_rate": 1.930685406847242e-06, - "loss": 0.7276, - "num_input_tokens_seen": 93237410, - "step": 4372 - }, - { - "epoch": 0.5258221607647448, - "flos": 17240795046240.0, - "grad_norm": 5.166344950514346, - "learning_rate": 1.9299069084400734e-06, - "loss": 0.8106, - "num_input_tokens_seen": 93257990, - "step": 4373 - }, - { - "epoch": 0.5259424036553839, - "flos": 18288754518960.0, - "grad_norm": 7.112416448923665, - "learning_rate": 1.9291284206659717e-06, - "loss": 0.673, - "num_input_tokens_seen": 93275895, - "step": 4374 - }, - { - "epoch": 0.526062646546023, - "flos": 21099850165320.0, - "grad_norm": 6.128791725348098, - "learning_rate": 1.928349943643032e-06, - "loss": 0.6968, - "num_input_tokens_seen": 93294715, - "step": 4375 - }, - { - "epoch": 0.526182889436662, - "flos": 16702633893480.0, - "grad_norm": 4.13107292038167, - "learning_rate": 1.9275714774893493e-06, - "loss": 0.8142, - "num_input_tokens_seen": 93313890, - "step": 4376 - }, - { - "epoch": 0.5263031323273012, - "flos": 16782435178560.0, - "grad_norm": 12.366710622320348, - "learning_rate": 1.9267930223230154e-06, - "loss": 0.7065, - "num_input_tokens_seen": 93332085, - "step": 4377 - }, - { - "epoch": 0.5264233752179402, - "flos": 12948291956640.0, - "grad_norm": 8.973127300434598, - "learning_rate": 1.9260145782621224e-06, - "loss": 0.7617, - "num_input_tokens_seen": 93349585, - "step": 4378 - }, - { - "epoch": 0.5265436181085793, - "flos": 17886202178040.0, - "grad_norm": 10.047591069689835, - "learning_rate": 1.925236145424758e-06, - "loss": 0.8641, - "num_input_tokens_seen": 93368125, - "step": 4379 - }, - { - "epoch": 0.5266638609992185, - "flos": 51038266858560.0, - "grad_norm": 0.702688738797331, - "learning_rate": 1.924457723929012e-06, - "loss": 0.5945, - "num_input_tokens_seen": 93438655, - "step": 4380 - }, - { - "epoch": 0.5267841038898575, - "flos": 15160891549200.0, - "grad_norm": 7.686883669081521, - "learning_rate": 1.9236793138929685e-06, - "loss": 0.8078, - "num_input_tokens_seen": 93457645, - "step": 4381 - }, - { - "epoch": 0.5269043467804966, - "flos": 12567232237200.0, - "grad_norm": 6.989774865275442, - "learning_rate": 1.9229009154347133e-06, - "loss": 0.7916, - "num_input_tokens_seen": 93474955, - "step": 4382 - }, - { - "epoch": 0.5270245896711357, - "flos": 13299500825160.0, - "grad_norm": 8.522780913213063, - "learning_rate": 1.922122528672327e-06, - "loss": 0.7814, - "num_input_tokens_seen": 93493340, - "step": 4383 - }, - { - "epoch": 0.5271448325617748, - "flos": 15567306405240.0, - "grad_norm": 7.944563478705894, - "learning_rate": 1.9213441537238914e-06, - "loss": 0.7612, - "num_input_tokens_seen": 93509935, - "step": 4384 - }, - { - "epoch": 0.5272650754524139, - "flos": 48287501621880.0, - "grad_norm": 0.9653200834747627, - "learning_rate": 1.920565790707485e-06, - "loss": 0.6458, - "num_input_tokens_seen": 93575045, - "step": 4385 - }, - { - "epoch": 0.527385318343053, - "flos": 14374145152800.0, - "grad_norm": 5.486650739224114, - "learning_rate": 1.9197874397411853e-06, - "loss": 0.6425, - "num_input_tokens_seen": 93591395, - "step": 4386 - }, - { - "epoch": 0.5275055612336921, - "flos": 9220634874720.0, - "grad_norm": 8.136098002005765, - "learning_rate": 1.919009100943067e-06, - "loss": 0.6101, - "num_input_tokens_seen": 93606805, - "step": 4387 - }, - { - "epoch": 0.5276258041243311, - "flos": 12948640216200.0, - "grad_norm": 5.911776747229384, - "learning_rate": 1.9182307744312043e-06, - "loss": 0.6387, - "num_input_tokens_seen": 93623630, - "step": 4388 - }, - { - "epoch": 0.5277460470149702, - "flos": 16620331471560.0, - "grad_norm": 5.812112118484298, - "learning_rate": 1.9174524603236676e-06, - "loss": 0.742, - "num_input_tokens_seen": 93642300, - "step": 4389 - }, - { - "epoch": 0.5278662899056094, - "flos": 14542644171720.0, - "grad_norm": 9.788629545119278, - "learning_rate": 1.916674158738527e-06, - "loss": 0.7517, - "num_input_tokens_seen": 93660925, - "step": 4390 - }, - { - "epoch": 0.5279865327962484, - "flos": 13138346916960.0, - "grad_norm": 6.3523576339628125, - "learning_rate": 1.9158958697938506e-06, - "loss": 0.5785, - "num_input_tokens_seen": 93679025, - "step": 4391 - }, - { - "epoch": 0.5281067756868875, - "flos": 11598282550560.0, - "grad_norm": 10.27879717012883, - "learning_rate": 1.9151175936077032e-06, - "loss": 0.8238, - "num_input_tokens_seen": 93693715, - "step": 4392 - }, - { - "epoch": 0.5282270185775266, - "flos": 14189947285080.0, - "grad_norm": 7.700948473490614, - "learning_rate": 1.9143393302981507e-06, - "loss": 0.7805, - "num_input_tokens_seen": 93711120, - "step": 4393 - }, - { - "epoch": 0.5283472614681657, - "flos": 11950061298360.0, - "grad_norm": 14.742816274846104, - "learning_rate": 1.913561079983252e-06, - "loss": 0.81, - "num_input_tokens_seen": 93729665, - "step": 4394 - }, - { - "epoch": 0.5284675043588047, - "flos": 19617682883040.0, - "grad_norm": 6.017699520541986, - "learning_rate": 1.9127828427810693e-06, - "loss": 0.7225, - "num_input_tokens_seen": 93749950, - "step": 4395 - }, - { - "epoch": 0.5285877472494439, - "flos": 14540174694840.0, - "grad_norm": 8.911368629157172, - "learning_rate": 1.9120046188096607e-06, - "loss": 0.7868, - "num_input_tokens_seen": 93767715, - "step": 4396 - }, - { - "epoch": 0.528707990140083, - "flos": 14622857036280.0, - "grad_norm": 13.293962780568208, - "learning_rate": 1.9112264081870804e-06, - "loss": 0.7358, - "num_input_tokens_seen": 93785825, - "step": 4397 - }, - { - "epoch": 0.528828233030722, - "flos": 15108851557680.0, - "grad_norm": 5.152579185769492, - "learning_rate": 1.9104482110313843e-06, - "loss": 0.7383, - "num_input_tokens_seen": 93805135, - "step": 4398 - }, - { - "epoch": 0.5289484759213612, - "flos": 18457316857800.0, - "grad_norm": 19.271227737377277, - "learning_rate": 1.909670027460623e-06, - "loss": 0.731, - "num_input_tokens_seen": 93822155, - "step": 4399 - }, - { - "epoch": 0.5290687188120002, - "flos": 22858300664880.0, - "grad_norm": 7.826834993041618, - "learning_rate": 1.908891857592847e-06, - "loss": 0.6975, - "num_input_tokens_seen": 93842945, - "step": 4400 - }, - { - "epoch": 0.5291889617026393, - "flos": 14703101560800.0, - "grad_norm": 7.68374566925309, - "learning_rate": 1.9081137015461034e-06, - "loss": 0.8764, - "num_input_tokens_seen": 93858740, - "step": 4401 - }, - { - "epoch": 0.5293092045932785, - "flos": 14350499654040.0, - "grad_norm": 4.83770690002372, - "learning_rate": 1.9073355594384383e-06, - "loss": 0.8842, - "num_input_tokens_seen": 93876700, - "step": 4402 - }, - { - "epoch": 0.5294294474839175, - "flos": 17810105108280.0, - "grad_norm": 6.532859786381872, - "learning_rate": 1.906557431387895e-06, - "loss": 0.7895, - "num_input_tokens_seen": 93895410, - "step": 4403 - }, - { - "epoch": 0.5295496903745566, - "flos": 13782044410920.0, - "grad_norm": 5.463571418072693, - "learning_rate": 1.905779317512516e-06, - "loss": 0.7737, - "num_input_tokens_seen": 93912675, - "step": 4404 - }, - { - "epoch": 0.5296699332651957, - "flos": 15297640119600.0, - "grad_norm": 4.973413929597284, - "learning_rate": 1.9050012179303385e-06, - "loss": 0.7811, - "num_input_tokens_seen": 93930905, - "step": 4405 - }, - { - "epoch": 0.5297901761558348, - "flos": 16128163257960.0, - "grad_norm": 9.053106662045632, - "learning_rate": 1.904223132759401e-06, - "loss": 0.6567, - "num_input_tokens_seen": 93949225, - "step": 4406 - }, - { - "epoch": 0.5299104190464738, - "flos": 15945453408360.0, - "grad_norm": 9.568779373373024, - "learning_rate": 1.9034450621177383e-06, - "loss": 0.6564, - "num_input_tokens_seen": 93967265, - "step": 4407 - }, - { - "epoch": 0.530030661937113, - "flos": 10707551151000.0, - "grad_norm": 6.454686010688826, - "learning_rate": 1.9026670061233824e-06, - "loss": 0.6879, - "num_input_tokens_seen": 93984420, - "step": 4408 - }, - { - "epoch": 0.5301509048277521, - "flos": 15540684870240.0, - "grad_norm": 4.195441842527235, - "learning_rate": 1.901888964894365e-06, - "loss": 0.7904, - "num_input_tokens_seen": 94003180, - "step": 4409 - }, - { - "epoch": 0.5302711477183911, - "flos": 19022954364480.0, - "grad_norm": 6.180372969350896, - "learning_rate": 1.9011109385487134e-06, - "loss": 0.6575, - "num_input_tokens_seen": 94024150, - "step": 4410 - }, - { - "epoch": 0.5303913906090303, - "flos": 16296757256760.0, - "grad_norm": 10.522209656584918, - "learning_rate": 1.900332927204454e-06, - "loss": 0.6346, - "num_input_tokens_seen": 94042320, - "step": 4411 - }, - { - "epoch": 0.5305116334996693, - "flos": 18267388537320.0, - "grad_norm": 9.959937143395566, - "learning_rate": 1.8995549309796097e-06, - "loss": 0.7485, - "num_input_tokens_seen": 94061345, - "step": 4412 - }, - { - "epoch": 0.5306318763903084, - "flos": 14754033453720.0, - "grad_norm": 6.4881392617854825, - "learning_rate": 1.8987769499922028e-06, - "loss": 0.7435, - "num_input_tokens_seen": 94080035, - "step": 4413 - }, - { - "epoch": 0.5307521192809476, - "flos": 14811613938240.0, - "grad_norm": 4.718486652225631, - "learning_rate": 1.897998984360252e-06, - "loss": 0.6827, - "num_input_tokens_seen": 94098725, - "step": 4414 - }, - { - "epoch": 0.5308723621715866, - "flos": 21160375026120.0, - "grad_norm": 2.7208960821404276, - "learning_rate": 1.897221034201775e-06, - "loss": 0.764, - "num_input_tokens_seen": 94122185, - "step": 4415 - }, - { - "epoch": 0.5309926050622257, - "flos": 20134256434440.0, - "grad_norm": 26.528695171087264, - "learning_rate": 1.8964430996347842e-06, - "loss": 0.656, - "num_input_tokens_seen": 94143455, - "step": 4416 - }, - { - "epoch": 0.5311128479528648, - "flos": 14994260467920.0, - "grad_norm": 5.184938096915686, - "learning_rate": 1.8956651807772931e-06, - "loss": 0.8169, - "num_input_tokens_seen": 94161210, - "step": 4417 - }, - { - "epoch": 0.5312330908435039, - "flos": 15648880648080.0, - "grad_norm": 4.384755185121202, - "learning_rate": 1.8948872777473115e-06, - "loss": 0.8143, - "num_input_tokens_seen": 94178885, - "step": 4418 - }, - { - "epoch": 0.531353333734143, - "flos": 18105601429920.0, - "grad_norm": 7.734486958627805, - "learning_rate": 1.8941093906628458e-06, - "loss": 0.6252, - "num_input_tokens_seen": 94196390, - "step": 4419 - }, - { - "epoch": 0.531473576624782, - "flos": 22370659825560.0, - "grad_norm": 6.2484214315090965, - "learning_rate": 1.893331519641902e-06, - "loss": 0.697, - "num_input_tokens_seen": 94218255, - "step": 4420 - }, - { - "epoch": 0.5315938195154212, - "flos": 16836912987000.0, - "grad_norm": 10.532278672868884, - "learning_rate": 1.8925536648024815e-06, - "loss": 0.7339, - "num_input_tokens_seen": 94235395, - "step": 4421 - }, - { - "epoch": 0.5317140624060602, - "flos": 16648789284240.0, - "grad_norm": 4.167247935122556, - "learning_rate": 1.8917758262625849e-06, - "loss": 0.7513, - "num_input_tokens_seen": 94255355, - "step": 4422 - }, - { - "epoch": 0.5318343052966993, - "flos": 16702380613800.0, - "grad_norm": 6.052624396487935, - "learning_rate": 1.8909980041402089e-06, - "loss": 0.7939, - "num_input_tokens_seen": 94273670, - "step": 4423 - }, - { - "epoch": 0.5319545481873384, - "flos": 9897380875560.0, - "grad_norm": 9.757444620642351, - "learning_rate": 1.8902201985533494e-06, - "loss": 0.6461, - "num_input_tokens_seen": 94290655, - "step": 4424 - }, - { - "epoch": 0.5320747910779775, - "flos": 16214739774480.0, - "grad_norm": 15.375667898666325, - "learning_rate": 1.8894424096199983e-06, - "loss": 0.7342, - "num_input_tokens_seen": 94309580, - "step": 4425 - }, - { - "epoch": 0.5321950339686166, - "flos": 13569578690280.0, - "grad_norm": 7.867996977269766, - "learning_rate": 1.8886646374581463e-06, - "loss": 0.8421, - "num_input_tokens_seen": 94328525, - "step": 4426 - }, - { - "epoch": 0.5323152768592557, - "flos": 16728717209160.0, - "grad_norm": 3.760751063025102, - "learning_rate": 1.8878868821857795e-06, - "loss": 0.6905, - "num_input_tokens_seen": 94347895, - "step": 4427 - }, - { - "epoch": 0.5324355197498948, - "flos": 24937475982840.0, - "grad_norm": 8.362069695639146, - "learning_rate": 1.8871091439208838e-06, - "loss": 0.7227, - "num_input_tokens_seen": 94369225, - "step": 4428 - }, - { - "epoch": 0.5325557626405338, - "flos": 17024466810480.0, - "grad_norm": 5.2288847868835235, - "learning_rate": 1.8863314227814414e-06, - "loss": 0.7429, - "num_input_tokens_seen": 94387255, - "step": 4429 - }, - { - "epoch": 0.532676005531173, - "flos": 19585964094480.0, - "grad_norm": 6.870140994206264, - "learning_rate": 1.8855537188854313e-06, - "loss": 0.4591, - "num_input_tokens_seen": 94405950, - "step": 4430 - }, - { - "epoch": 0.5327962484218121, - "flos": 13056329434680.0, - "grad_norm": 5.562793509636986, - "learning_rate": 1.8847760323508315e-06, - "loss": 0.7624, - "num_input_tokens_seen": 94424575, - "step": 4431 - }, - { - "epoch": 0.5329164913124511, - "flos": 13078296955560.0, - "grad_norm": 11.483392416511837, - "learning_rate": 1.883998363295616e-06, - "loss": 0.7364, - "num_input_tokens_seen": 94441775, - "step": 4432 - }, - { - "epoch": 0.5330367342030903, - "flos": 42189003755520.0, - "grad_norm": 0.963943648470018, - "learning_rate": 1.8832207118377565e-06, - "loss": 0.679, - "num_input_tokens_seen": 94496865, - "step": 4433 - }, - { - "epoch": 0.5331569770937293, - "flos": 12705500485680.0, - "grad_norm": 3.720634944421239, - "learning_rate": 1.882443078095222e-06, - "loss": 0.6693, - "num_input_tokens_seen": 94515465, - "step": 4434 - }, - { - "epoch": 0.5332772199843684, - "flos": 41817505344000.0, - "grad_norm": 0.8629615847766527, - "learning_rate": 1.8816654621859794e-06, - "loss": 0.6813, - "num_input_tokens_seen": 94574850, - "step": 4435 - }, - { - "epoch": 0.5333974628750076, - "flos": 13650773013600.0, - "grad_norm": 4.882489833088733, - "learning_rate": 1.8808878642279915e-06, - "loss": 0.6943, - "num_input_tokens_seen": 94589975, - "step": 4436 - }, - { - "epoch": 0.5335177057656466, - "flos": 17430818346600.0, - "grad_norm": 7.6964468647578546, - "learning_rate": 1.8801102843392209e-06, - "loss": 0.6413, - "num_input_tokens_seen": 94609100, - "step": 4437 - }, - { - "epoch": 0.5336379486562857, - "flos": 18372513299040.0, - "grad_norm": 3.848569748000497, - "learning_rate": 1.8793327226376238e-06, - "loss": 0.8448, - "num_input_tokens_seen": 94628140, - "step": 4438 - }, - { - "epoch": 0.5337581915469248, - "flos": 15648025829160.0, - "grad_norm": 15.644679989149367, - "learning_rate": 1.8785551792411569e-06, - "loss": 0.7824, - "num_input_tokens_seen": 94646870, - "step": 4439 - }, - { - "epoch": 0.5338784344375639, - "flos": 10814828790000.0, - "grad_norm": 11.260048003018168, - "learning_rate": 1.8777776542677733e-06, - "loss": 0.809, - "num_input_tokens_seen": 94664640, - "step": 4440 - }, - { - "epoch": 0.5339986773282029, - "flos": 15216509116200.0, - "grad_norm": 5.5760699421506414, - "learning_rate": 1.8770001478354216e-06, - "loss": 0.6961, - "num_input_tokens_seen": 94684035, - "step": 4441 - }, - { - "epoch": 0.5341189202188421, - "flos": 13111662062040.0, - "grad_norm": 5.458746780384076, - "learning_rate": 1.8762226600620504e-06, - "loss": 0.8099, - "num_input_tokens_seen": 94702370, - "step": 4442 - }, - { - "epoch": 0.5342391631094812, - "flos": 7976953308840.0, - "grad_norm": 9.173791324508368, - "learning_rate": 1.8754451910656031e-06, - "loss": 0.582, - "num_input_tokens_seen": 94715990, - "step": 4443 - }, - { - "epoch": 0.5343594060001202, - "flos": 11164897899960.0, - "grad_norm": 7.7446590601213385, - "learning_rate": 1.8746677409640212e-06, - "loss": 0.7981, - "num_input_tokens_seen": 94732810, - "step": 4444 - }, - { - "epoch": 0.5344796488907594, - "flos": 19720781407320.0, - "grad_norm": 5.655415678928017, - "learning_rate": 1.8738903098752432e-06, - "loss": 0.8263, - "num_input_tokens_seen": 94751660, - "step": 4445 - }, - { - "epoch": 0.5345998917813984, - "flos": 18619198945080.0, - "grad_norm": 3.667891788995587, - "learning_rate": 1.8731128979172052e-06, - "loss": 0.712, - "num_input_tokens_seen": 94770580, - "step": 4446 - }, - { - "epoch": 0.5347201346720375, - "flos": 24126767488080.0, - "grad_norm": 10.026349257998117, - "learning_rate": 1.8723355052078394e-06, - "loss": 0.6432, - "num_input_tokens_seen": 94790335, - "step": 4447 - }, - { - "epoch": 0.5348403775626767, - "flos": 13110648943320.0, - "grad_norm": 3.9324003831937913, - "learning_rate": 1.8715581318650765e-06, - "loss": 0.7525, - "num_input_tokens_seen": 94809110, - "step": 4448 - }, - { - "epoch": 0.5349606204533157, - "flos": 12840856017840.0, - "grad_norm": 10.248517185046211, - "learning_rate": 1.8707807780068422e-06, - "loss": 0.792, - "num_input_tokens_seen": 94826645, - "step": 4449 - }, - { - "epoch": 0.5350808633439548, - "flos": 21399145682160.0, - "grad_norm": 42.75393462339168, - "learning_rate": 1.8700034437510611e-06, - "loss": 0.6511, - "num_input_tokens_seen": 94846460, - "step": 4450 - }, - { - "epoch": 0.5352011062345938, - "flos": 14244425093520.0, - "grad_norm": 9.032836352351312, - "learning_rate": 1.8692261292156549e-06, - "loss": 0.7916, - "num_input_tokens_seen": 94865415, - "step": 4451 - }, - { - "epoch": 0.535321349125233, - "flos": 17188248495360.0, - "grad_norm": 5.119119759829664, - "learning_rate": 1.8684488345185401e-06, - "loss": 0.7981, - "num_input_tokens_seen": 94885310, - "step": 4452 - }, - { - "epoch": 0.535441592015872, - "flos": 14967259013400.0, - "grad_norm": 13.164134945089977, - "learning_rate": 1.8676715597776332e-06, - "loss": 0.7647, - "num_input_tokens_seen": 94903375, - "step": 4453 - }, - { - "epoch": 0.5355618349065111, - "flos": 14298586302360.0, - "grad_norm": 6.335891441243656, - "learning_rate": 1.8668943051108455e-06, - "loss": 0.7446, - "num_input_tokens_seen": 94920400, - "step": 4454 - }, - { - "epoch": 0.5356820777971503, - "flos": 17593017033480.0, - "grad_norm": 12.898839712081422, - "learning_rate": 1.8661170706360856e-06, - "loss": 0.7413, - "num_input_tokens_seen": 94939285, - "step": 4455 - }, - { - "epoch": 0.5358023206877893, - "flos": 15268929027240.0, - "grad_norm": 4.51164666981112, - "learning_rate": 1.8653398564712594e-06, - "loss": 0.8099, - "num_input_tokens_seen": 94957950, - "step": 4456 - }, - { - "epoch": 0.5359225635784284, - "flos": 16404699754920.0, - "grad_norm": 3.4562362532661326, - "learning_rate": 1.8645626627342704e-06, - "loss": 0.8059, - "num_input_tokens_seen": 94978435, - "step": 4457 - }, - { - "epoch": 0.5360428064690675, - "flos": 17647178242320.0, - "grad_norm": 7.884368410232457, - "learning_rate": 1.8637854895430172e-06, - "loss": 0.7804, - "num_input_tokens_seen": 94997420, - "step": 4458 - }, - { - "epoch": 0.5361630493597066, - "flos": 15676198702200.0, - "grad_norm": 4.986900144370925, - "learning_rate": 1.8630083370153978e-06, - "loss": 0.6684, - "num_input_tokens_seen": 95016780, - "step": 4459 - }, - { - "epoch": 0.5362832922503457, - "flos": 50798700212160.0, - "grad_norm": 0.7938632523849707, - "learning_rate": 1.8622312052693041e-06, - "loss": 0.5823, - "num_input_tokens_seen": 95077680, - "step": 4460 - }, - { - "epoch": 0.5364035351409848, - "flos": 7060961752560.0, - "grad_norm": 7.436267398025801, - "learning_rate": 1.8614540944226267e-06, - "loss": 0.715, - "num_input_tokens_seen": 95094070, - "step": 4461 - }, - { - "epoch": 0.5365237780316239, - "flos": 17048713848480.0, - "grad_norm": 5.692812272705435, - "learning_rate": 1.8606770045932537e-06, - "loss": 0.6721, - "num_input_tokens_seen": 95112905, - "step": 4462 - }, - { - "epoch": 0.5366440209222629, - "flos": 19482707270400.0, - "grad_norm": 5.276860506534157, - "learning_rate": 1.859899935899068e-06, - "loss": 0.7972, - "num_input_tokens_seen": 95132480, - "step": 4463 - }, - { - "epoch": 0.5367642638129021, - "flos": 14324194718640.0, - "grad_norm": 4.089240465966874, - "learning_rate": 1.8591228884579506e-06, - "loss": 0.7828, - "num_input_tokens_seen": 95150695, - "step": 4464 - }, - { - "epoch": 0.5368845067035412, - "flos": 17512044329880.0, - "grad_norm": 17.324675427323577, - "learning_rate": 1.8583458623877795e-06, - "loss": 0.8103, - "num_input_tokens_seen": 95169515, - "step": 4465 - }, - { - "epoch": 0.5370047495941802, - "flos": 12300383688000.0, - "grad_norm": 7.8184245114230215, - "learning_rate": 1.8575688578064281e-06, - "loss": 0.7155, - "num_input_tokens_seen": 95187360, - "step": 4466 - }, - { - "epoch": 0.5371249924848194, - "flos": 15162854466720.0, - "grad_norm": 3.6266462599890787, - "learning_rate": 1.8567918748317674e-06, - "loss": 0.7384, - "num_input_tokens_seen": 95206430, - "step": 4467 - }, - { - "epoch": 0.5372452353754584, - "flos": 13110807243120.0, - "grad_norm": 4.420236236680289, - "learning_rate": 1.8560149135816659e-06, - "loss": 0.8062, - "num_input_tokens_seen": 95222985, - "step": 4468 - }, - { - "epoch": 0.5373654782660975, - "flos": 11193134092920.0, - "grad_norm": 6.932845195068133, - "learning_rate": 1.8552379741739873e-06, - "loss": 0.8143, - "num_input_tokens_seen": 95240050, - "step": 4469 - }, - { - "epoch": 0.5374857211567367, - "flos": 50881414213560.0, - "grad_norm": 0.9155028968090185, - "learning_rate": 1.8544610567265935e-06, - "loss": 0.5741, - "num_input_tokens_seen": 95293710, - "step": 4470 - }, - { - "epoch": 0.5376059640473757, - "flos": 10977312416520.0, - "grad_norm": 4.055957867134556, - "learning_rate": 1.853684161357341e-06, - "loss": 0.8171, - "num_input_tokens_seen": 95311090, - "step": 4471 - }, - { - "epoch": 0.5377262069380148, - "flos": 14461069928880.0, - "grad_norm": 5.756757994197756, - "learning_rate": 1.852907288184085e-06, - "loss": 0.766, - "num_input_tokens_seen": 95329695, - "step": 4472 - }, - { - "epoch": 0.5378464498286539, - "flos": 22016728200480.0, - "grad_norm": 5.276098503969006, - "learning_rate": 1.8521304373246762e-06, - "loss": 0.6837, - "num_input_tokens_seen": 95350460, - "step": 4473 - }, - { - "epoch": 0.537966692719293, - "flos": 15540431590560.0, - "grad_norm": 6.869145623043391, - "learning_rate": 1.8513536088969626e-06, - "loss": 0.8679, - "num_input_tokens_seen": 95367845, - "step": 4474 - }, - { - "epoch": 0.538086935609932, - "flos": 15756506546640.0, - "grad_norm": 4.485698012998463, - "learning_rate": 1.8505768030187884e-06, - "loss": 0.7818, - "num_input_tokens_seen": 95387695, - "step": 4475 - }, - { - "epoch": 0.5382071785005712, - "flos": 16647586205760.0, - "grad_norm": 8.007347071077863, - "learning_rate": 1.849800019807995e-06, - "loss": 0.79, - "num_input_tokens_seen": 95408640, - "step": 4476 - }, - { - "epoch": 0.5383274213912103, - "flos": 18266122138920.0, - "grad_norm": 4.975594083500447, - "learning_rate": 1.8490232593824186e-06, - "loss": 0.7018, - "num_input_tokens_seen": 95424815, - "step": 4477 - }, - { - "epoch": 0.5384476642818493, - "flos": 16324265270640.0, - "grad_norm": 3.2926097597264987, - "learning_rate": 1.8482465218598935e-06, - "loss": 0.8279, - "num_input_tokens_seen": 95444480, - "step": 4478 - }, - { - "epoch": 0.5385679071724885, - "flos": 16620838030920.0, - "grad_norm": 4.546535833234546, - "learning_rate": 1.8474698073582508e-06, - "loss": 0.8211, - "num_input_tokens_seen": 95465570, - "step": 4479 - }, - { - "epoch": 0.5386881500631275, - "flos": 11618952013080.0, - "grad_norm": 4.622310371862552, - "learning_rate": 1.8466931159953166e-06, - "loss": 0.8453, - "num_input_tokens_seen": 95481925, - "step": 4480 - }, - { - "epoch": 0.5388083929537666, - "flos": 17619068689200.0, - "grad_norm": 6.764527123862588, - "learning_rate": 1.8459164478889158e-06, - "loss": 0.8275, - "num_input_tokens_seen": 95503040, - "step": 4481 - }, - { - "epoch": 0.5389286358444056, - "flos": 16269977421960.0, - "grad_norm": 3.2426061888448707, - "learning_rate": 1.8451398031568663e-06, - "loss": 0.7369, - "num_input_tokens_seen": 95522385, - "step": 4482 - }, - { - "epoch": 0.5390488787350448, - "flos": 17783515233240.0, - "grad_norm": 5.212320353021715, - "learning_rate": 1.844363181916986e-06, - "loss": 0.7321, - "num_input_tokens_seen": 95542830, - "step": 4483 - }, - { - "epoch": 0.5391691216256839, - "flos": 12084498691680.0, - "grad_norm": 7.086976703789058, - "learning_rate": 1.8435865842870868e-06, - "loss": 0.8151, - "num_input_tokens_seen": 95560490, - "step": 4484 - }, - { - "epoch": 0.5392893645163229, - "flos": 17078786319120.0, - "grad_norm": 9.668661591335868, - "learning_rate": 1.8428100103849787e-06, - "loss": 0.7001, - "num_input_tokens_seen": 95580005, - "step": 4485 - }, - { - "epoch": 0.5394096074069621, - "flos": 11409937228080.0, - "grad_norm": 5.927106574559918, - "learning_rate": 1.842033460328467e-06, - "loss": 0.6932, - "num_input_tokens_seen": 95598445, - "step": 4486 - }, - { - "epoch": 0.5395298502976011, - "flos": 16755845303520.0, - "grad_norm": 3.4371172605194946, - "learning_rate": 1.8412569342353541e-06, - "loss": 0.7366, - "num_input_tokens_seen": 95618320, - "step": 4487 - }, - { - "epoch": 0.5396500931882402, - "flos": 17457946440960.0, - "grad_norm": 10.75211153805872, - "learning_rate": 1.840480432223438e-06, - "loss": 0.8287, - "num_input_tokens_seen": 95637045, - "step": 4488 - }, - { - "epoch": 0.5397703360788794, - "flos": 19293918708480.0, - "grad_norm": 8.908046608126755, - "learning_rate": 1.8397039544105131e-06, - "loss": 0.7473, - "num_input_tokens_seen": 95655850, - "step": 4489 - }, - { - "epoch": 0.5398905789695184, - "flos": 15513303496200.0, - "grad_norm": 4.398425574820091, - "learning_rate": 1.8389275009143711e-06, - "loss": 0.685, - "num_input_tokens_seen": 95675310, - "step": 4490 - }, - { - "epoch": 0.5400108218601575, - "flos": 18373969657200.0, - "grad_norm": 4.821287183353644, - "learning_rate": 1.8381510718527988e-06, - "loss": 0.7105, - "num_input_tokens_seen": 95694640, - "step": 4491 - }, - { - "epoch": 0.5401310647507966, - "flos": 19319178865200.0, - "grad_norm": 4.829295587450791, - "learning_rate": 1.8373746673435812e-06, - "loss": 0.624, - "num_input_tokens_seen": 95715385, - "step": 4492 - }, - { - "epoch": 0.5402513076414357, - "flos": 20427948138360.0, - "grad_norm": 10.226715436451924, - "learning_rate": 1.8365982875044964e-06, - "loss": 0.7738, - "num_input_tokens_seen": 95735415, - "step": 4493 - }, - { - "epoch": 0.5403715505320748, - "flos": 16756130243160.0, - "grad_norm": 12.36377985763917, - "learning_rate": 1.8358219324533217e-06, - "loss": 0.734, - "num_input_tokens_seen": 95755400, - "step": 4494 - }, - { - "epoch": 0.5404917934227139, - "flos": 22180636525200.0, - "grad_norm": 6.721416281455366, - "learning_rate": 1.8350456023078292e-06, - "loss": 0.6825, - "num_input_tokens_seen": 95777495, - "step": 4495 - }, - { - "epoch": 0.540612036313353, - "flos": 14569044087000.0, - "grad_norm": 6.095939673197855, - "learning_rate": 1.8342692971857874e-06, - "loss": 0.7443, - "num_input_tokens_seen": 95796415, - "step": 4496 - }, - { - "epoch": 0.540732279203992, - "flos": 17781647295600.0, - "grad_norm": 11.835007311839274, - "learning_rate": 1.833493017204962e-06, - "loss": 0.6869, - "num_input_tokens_seen": 95816240, - "step": 4497 - }, - { - "epoch": 0.5408525220946312, - "flos": 14757452729400.0, - "grad_norm": 8.690638572136336, - "learning_rate": 1.8327167624831134e-06, - "loss": 0.7509, - "num_input_tokens_seen": 95833690, - "step": 4498 - }, - { - "epoch": 0.5409727649852702, - "flos": 17674337996640.0, - "grad_norm": 9.682614774410315, - "learning_rate": 1.831940533137999e-06, - "loss": 0.6908, - "num_input_tokens_seen": 95852315, - "step": 4499 - }, - { - "epoch": 0.5410930078759093, - "flos": 17509828132680.0, - "grad_norm": 3.9803911225870166, - "learning_rate": 1.8311643292873718e-06, - "loss": 0.7, - "num_input_tokens_seen": 95870855, - "step": 4500 - }, - { - "epoch": 0.5412132507665485, - "flos": 15432299132640.0, - "grad_norm": 4.792060946400168, - "learning_rate": 1.8303881510489818e-06, - "loss": 0.8698, - "num_input_tokens_seen": 95888965, - "step": 4501 - }, - { - "epoch": 0.5413334936571875, - "flos": 22183454261640.0, - "grad_norm": 4.408756922734544, - "learning_rate": 1.829611998540574e-06, - "loss": 0.667, - "num_input_tokens_seen": 95909890, - "step": 4502 - }, - { - "epoch": 0.5414537365478266, - "flos": 17781678955560.0, - "grad_norm": 16.5231844691028, - "learning_rate": 1.8288358718798914e-06, - "loss": 0.7872, - "num_input_tokens_seen": 95928800, - "step": 4503 - }, - { - "epoch": 0.5415739794384657, - "flos": 12138659900520.0, - "grad_norm": 4.307121793202451, - "learning_rate": 1.8280597711846703e-06, - "loss": 0.7086, - "num_input_tokens_seen": 95946760, - "step": 4504 - }, - { - "epoch": 0.5416942223291048, - "flos": 16972205199240.0, - "grad_norm": 9.140617487977682, - "learning_rate": 1.8272836965726455e-06, - "loss": 0.8147, - "num_input_tokens_seen": 95965415, - "step": 4505 - }, - { - "epoch": 0.5418144652197439, - "flos": 14838963652320.0, - "grad_norm": 7.899329251171153, - "learning_rate": 1.8265076481615461e-06, - "loss": 0.7605, - "num_input_tokens_seen": 95985050, - "step": 4506 - }, - { - "epoch": 0.541934708110383, - "flos": 9033397650840.0, - "grad_norm": 38.636786480390036, - "learning_rate": 1.8257316260690987e-06, - "loss": 0.8517, - "num_input_tokens_seen": 96002555, - "step": 4507 - }, - { - "epoch": 0.5420549510010221, - "flos": 15542711107680.0, - "grad_norm": 7.062192927073642, - "learning_rate": 1.8249556304130254e-06, - "loss": 0.7521, - "num_input_tokens_seen": 96023555, - "step": 4508 - }, - { - "epoch": 0.5421751938916611, - "flos": 21637156499160.0, - "grad_norm": 9.480999869238888, - "learning_rate": 1.824179661311044e-06, - "loss": 0.6673, - "num_input_tokens_seen": 96042025, - "step": 4509 - }, - { - "epoch": 0.5422954367823003, - "flos": 13677774468120.0, - "grad_norm": 13.610418794286213, - "learning_rate": 1.823403718880868e-06, - "loss": 0.772, - "num_input_tokens_seen": 96060505, - "step": 4510 - }, - { - "epoch": 0.5424156796729394, - "flos": 29392937598360.0, - "grad_norm": 3.890947075806675, - "learning_rate": 1.822627803240207e-06, - "loss": 0.6594, - "num_input_tokens_seen": 96082555, - "step": 4511 - }, - { - "epoch": 0.5425359225635784, - "flos": 8358392947800.0, - "grad_norm": 7.382732669459268, - "learning_rate": 1.8218519145067675e-06, - "loss": 0.8375, - "num_input_tokens_seen": 96097895, - "step": 4512 - }, - { - "epoch": 0.5426561654542175, - "flos": 14784454183920.0, - "grad_norm": 8.112337545170156, - "learning_rate": 1.8210760527982508e-06, - "loss": 0.8719, - "num_input_tokens_seen": 96117900, - "step": 4513 - }, - { - "epoch": 0.5427764083448566, - "flos": 15999646277160.0, - "grad_norm": 7.3140976945518865, - "learning_rate": 1.8203002182323552e-06, - "loss": 0.7294, - "num_input_tokens_seen": 96135175, - "step": 4514 - }, - { - "epoch": 0.5428966512354957, - "flos": 14348030177160.0, - "grad_norm": 5.078974162775646, - "learning_rate": 1.819524410926773e-06, - "loss": 0.7444, - "num_input_tokens_seen": 96152575, - "step": 4515 - }, - { - "epoch": 0.5430168941261347, - "flos": 16377413360760.0, - "grad_norm": 2.9220737654861457, - "learning_rate": 1.8187486309991944e-06, - "loss": 0.7551, - "num_input_tokens_seen": 96173175, - "step": 4516 - }, - { - "epoch": 0.5431371370167739, - "flos": 13699013809920.0, - "grad_norm": 4.782852602382106, - "learning_rate": 1.817972878567304e-06, - "loss": 0.7488, - "num_input_tokens_seen": 96191550, - "step": 4517 - }, - { - "epoch": 0.543257379907413, - "flos": 13730732598480.0, - "grad_norm": 22.193370289671037, - "learning_rate": 1.8171971537487834e-06, - "loss": 0.7476, - "num_input_tokens_seen": 96209920, - "step": 4518 - }, - { - "epoch": 0.543377622798052, - "flos": 12759408414840.0, - "grad_norm": 8.893976260408115, - "learning_rate": 1.8164214566613093e-06, - "loss": 0.7891, - "num_input_tokens_seen": 96228265, - "step": 4519 - }, - { - "epoch": 0.5434978656886912, - "flos": 13866499710120.0, - "grad_norm": 8.002079693363802, - "learning_rate": 1.8156457874225547e-06, - "loss": 0.6255, - "num_input_tokens_seen": 96246445, - "step": 4520 - }, - { - "epoch": 0.5436181085793302, - "flos": 12597811267200.0, - "grad_norm": 10.131485733781997, - "learning_rate": 1.814870146150187e-06, - "loss": 0.7859, - "num_input_tokens_seen": 96264275, - "step": 4521 - }, - { - "epoch": 0.5437383514699693, - "flos": 13947345773880.0, - "grad_norm": 5.098283119681823, - "learning_rate": 1.814094532961871e-06, - "loss": 0.7708, - "num_input_tokens_seen": 96282570, - "step": 4522 - }, - { - "epoch": 0.5438585943606085, - "flos": 16540846786080.0, - "grad_norm": 5.677210628470599, - "learning_rate": 1.8133189479752666e-06, - "loss": 0.8094, - "num_input_tokens_seen": 96301220, - "step": 4523 - }, - { - "epoch": 0.5439788372512475, - "flos": 15945453408360.0, - "grad_norm": 7.631043337461326, - "learning_rate": 1.8125433913080292e-06, - "loss": 0.8045, - "num_input_tokens_seen": 96318640, - "step": 4524 - }, - { - "epoch": 0.5440990801418866, - "flos": 11949839678640.0, - "grad_norm": 4.869547215274446, - "learning_rate": 1.811767863077811e-06, - "loss": 0.7963, - "num_input_tokens_seen": 96337310, - "step": 4525 - }, - { - "epoch": 0.5442193230325257, - "flos": 15810066216240.0, - "grad_norm": 4.095731997990742, - "learning_rate": 1.8109923634022577e-06, - "loss": 0.7682, - "num_input_tokens_seen": 96357055, - "step": 4526 - }, - { - "epoch": 0.5443395659231648, - "flos": 11269484442360.0, - "grad_norm": 7.651825291905949, - "learning_rate": 1.8102168923990128e-06, - "loss": 0.8441, - "num_input_tokens_seen": 96370320, - "step": 4527 - }, - { - "epoch": 0.5444598088138038, - "flos": 13704522642960.0, - "grad_norm": 4.936016004613714, - "learning_rate": 1.809441450185714e-06, - "loss": 0.7717, - "num_input_tokens_seen": 96388525, - "step": 4528 - }, - { - "epoch": 0.544580051704443, - "flos": 15459933786360.0, - "grad_norm": 6.350931546824542, - "learning_rate": 1.8086660368799958e-06, - "loss": 0.7149, - "num_input_tokens_seen": 96406295, - "step": 4529 - }, - { - "epoch": 0.5447002945950821, - "flos": 23859380719560.0, - "grad_norm": 8.131833064222052, - "learning_rate": 1.807890652599488e-06, - "loss": 0.7492, - "num_input_tokens_seen": 96430400, - "step": 4530 - }, - { - "epoch": 0.5448205374857211, - "flos": 8544205473480.0, - "grad_norm": 10.988148633886903, - "learning_rate": 1.8071152974618156e-06, - "loss": 0.8045, - "num_input_tokens_seen": 96447920, - "step": 4531 - }, - { - "epoch": 0.5449407803763603, - "flos": 17673736457400.0, - "grad_norm": 29.034367684451773, - "learning_rate": 1.806339971584599e-06, - "loss": 0.7684, - "num_input_tokens_seen": 96464300, - "step": 4532 - }, - { - "epoch": 0.5450610232669993, - "flos": 17026081468440.0, - "grad_norm": 5.460515040045534, - "learning_rate": 1.8055646750854546e-06, - "loss": 0.838, - "num_input_tokens_seen": 96483530, - "step": 4533 - }, - { - "epoch": 0.5451812661576384, - "flos": 12975261751200.0, - "grad_norm": 7.6162935172508925, - "learning_rate": 1.8047894080819945e-06, - "loss": 0.799, - "num_input_tokens_seen": 96500500, - "step": 4534 - }, - { - "epoch": 0.5453015090482776, - "flos": 52410528725160.0, - "grad_norm": 0.7511026700694204, - "learning_rate": 1.8040141706918258e-06, - "loss": 0.6572, - "num_input_tokens_seen": 96561460, - "step": 4535 - }, - { - "epoch": 0.5454217519389166, - "flos": 18722835688680.0, - "grad_norm": 6.37396890118101, - "learning_rate": 1.8032389630325525e-06, - "loss": 0.7533, - "num_input_tokens_seen": 96579930, - "step": 4536 - }, - { - "epoch": 0.5455419948295557, - "flos": 17321736089880.0, - "grad_norm": 3.9542187772049853, - "learning_rate": 1.8024637852217707e-06, - "loss": 0.7472, - "num_input_tokens_seen": 96599375, - "step": 4537 - }, - { - "epoch": 0.5456622377201948, - "flos": 17133644047080.0, - "grad_norm": 5.039582549472646, - "learning_rate": 1.8016886373770766e-06, - "loss": 0.8293, - "num_input_tokens_seen": 96617610, - "step": 4538 - }, - { - "epoch": 0.5457824806108339, - "flos": 17565920599080.0, - "grad_norm": 5.008469465609175, - "learning_rate": 1.8009135196160579e-06, - "loss": 0.7542, - "num_input_tokens_seen": 96636205, - "step": 4539 - }, - { - "epoch": 0.545902723501473, - "flos": 16513528731960.0, - "grad_norm": 4.083106028140358, - "learning_rate": 1.8001384320563e-06, - "loss": 0.8249, - "num_input_tokens_seen": 96656180, - "step": 4540 - }, - { - "epoch": 0.5460229663921121, - "flos": 40665809656440.0, - "grad_norm": 0.8112205187902694, - "learning_rate": 1.7993633748153833e-06, - "loss": 0.6013, - "num_input_tokens_seen": 96710505, - "step": 4541 - }, - { - "epoch": 0.5461432092827512, - "flos": 11220452147040.0, - "grad_norm": 10.571389334469595, - "learning_rate": 1.7985883480108834e-06, - "loss": 0.7106, - "num_input_tokens_seen": 96727860, - "step": 4542 - }, - { - "epoch": 0.5462634521733902, - "flos": 17593270313160.0, - "grad_norm": 5.396251274428616, - "learning_rate": 1.797813351760371e-06, - "loss": 0.7112, - "num_input_tokens_seen": 96749285, - "step": 4543 - }, - { - "epoch": 0.5463836950640293, - "flos": 16702253973960.0, - "grad_norm": 6.801597340286658, - "learning_rate": 1.7970383861814116e-06, - "loss": 0.7585, - "num_input_tokens_seen": 96768775, - "step": 4544 - }, - { - "epoch": 0.5465039379546685, - "flos": 14946177971400.0, - "grad_norm": 7.171425799096255, - "learning_rate": 1.7962634513915684e-06, - "loss": 0.728, - "num_input_tokens_seen": 96785845, - "step": 4545 - }, - { - "epoch": 0.5466241808453075, - "flos": 12647761701360.0, - "grad_norm": 4.324160746915075, - "learning_rate": 1.7954885475083969e-06, - "loss": 0.7843, - "num_input_tokens_seen": 96803235, - "step": 4546 - }, - { - "epoch": 0.5467444237359466, - "flos": 15811300954680.0, - "grad_norm": 9.00832617586212, - "learning_rate": 1.7947136746494513e-06, - "loss": 0.7201, - "num_input_tokens_seen": 96823870, - "step": 4547 - }, - { - "epoch": 0.5468646666265857, - "flos": 17700927871680.0, - "grad_norm": 11.576746396501317, - "learning_rate": 1.793938832932277e-06, - "loss": 0.8559, - "num_input_tokens_seen": 96841700, - "step": 4548 - }, - { - "epoch": 0.5469849095172248, - "flos": 20184966707640.0, - "grad_norm": 7.699278948822878, - "learning_rate": 1.7931640224744185e-06, - "loss": 0.6813, - "num_input_tokens_seen": 96861970, - "step": 4549 - }, - { - "epoch": 0.5471051524078638, - "flos": 20508857522040.0, - "grad_norm": 4.862627222902361, - "learning_rate": 1.7923892433934127e-06, - "loss": 0.7214, - "num_input_tokens_seen": 96882765, - "step": 4550 - }, - { - "epoch": 0.547225395298503, - "flos": 13245782855760.0, - "grad_norm": 4.207404811641427, - "learning_rate": 1.7916144958067939e-06, - "loss": 0.7645, - "num_input_tokens_seen": 96900345, - "step": 4551 - }, - { - "epoch": 0.5473456381891421, - "flos": 15622164133200.0, - "grad_norm": 4.066571586831492, - "learning_rate": 1.7908397798320905e-06, - "loss": 0.7713, - "num_input_tokens_seen": 96919800, - "step": 4552 - }, - { - "epoch": 0.5474658810797811, - "flos": 14563440274080.0, - "grad_norm": 4.624812766744337, - "learning_rate": 1.7900650955868265e-06, - "loss": 0.7366, - "num_input_tokens_seen": 96939165, - "step": 4553 - }, - { - "epoch": 0.5475861239704203, - "flos": 37168881600720.0, - "grad_norm": 3.304765560589953, - "learning_rate": 1.7892904431885202e-06, - "loss": 0.7577, - "num_input_tokens_seen": 96966060, - "step": 4554 - }, - { - "epoch": 0.5477063668610593, - "flos": 15136454551440.0, - "grad_norm": 3.8290321987867513, - "learning_rate": 1.788515822754686e-06, - "loss": 0.7381, - "num_input_tokens_seen": 96986200, - "step": 4555 - }, - { - "epoch": 0.5478266097516984, - "flos": 14325682736760.0, - "grad_norm": 23.726071158890147, - "learning_rate": 1.7877412344028335e-06, - "loss": 0.7572, - "num_input_tokens_seen": 97005725, - "step": 4556 - }, - { - "epoch": 0.5479468526423376, - "flos": 9357541744920.0, - "grad_norm": 16.03216657193389, - "learning_rate": 1.7869666782504668e-06, - "loss": 0.7517, - "num_input_tokens_seen": 97022025, - "step": 4557 - }, - { - "epoch": 0.5480670955329766, - "flos": 13570211889480.0, - "grad_norm": 5.949810236621555, - "learning_rate": 1.7861921544150867e-06, - "loss": 0.6764, - "num_input_tokens_seen": 97040595, - "step": 4558 - }, - { - "epoch": 0.5481873384236157, - "flos": 11619711852120.0, - "grad_norm": 4.457370380735661, - "learning_rate": 1.7854176630141856e-06, - "loss": 0.743, - "num_input_tokens_seen": 97057450, - "step": 4559 - }, - { - "epoch": 0.5483075813142548, - "flos": 16674682640160.0, - "grad_norm": 8.636237301427196, - "learning_rate": 1.784643204165255e-06, - "loss": 0.8139, - "num_input_tokens_seen": 97076490, - "step": 4560 - }, - { - "epoch": 0.5484278242048939, - "flos": 14109101221320.0, - "grad_norm": 7.230285411387779, - "learning_rate": 1.7838687779857783e-06, - "loss": 0.7555, - "num_input_tokens_seen": 97094085, - "step": 4561 - }, - { - "epoch": 0.5485480670955329, - "flos": 16697916559440.0, - "grad_norm": 5.142886884542781, - "learning_rate": 1.7830943845932366e-06, - "loss": 0.6231, - "num_input_tokens_seen": 97113130, - "step": 4562 - }, - { - "epoch": 0.5486683099861721, - "flos": 16590987180000.0, - "grad_norm": 10.91531930621549, - "learning_rate": 1.7823200241051044e-06, - "loss": 0.7404, - "num_input_tokens_seen": 97131765, - "step": 4563 - }, - { - "epoch": 0.5487885528768112, - "flos": 16945330384560.0, - "grad_norm": 4.12141331884476, - "learning_rate": 1.7815456966388513e-06, - "loss": 0.7847, - "num_input_tokens_seen": 97150580, - "step": 4564 - }, - { - "epoch": 0.5489087957674502, - "flos": 16134495249960.0, - "grad_norm": 7.483849493065937, - "learning_rate": 1.780771402311943e-06, - "loss": 0.8022, - "num_input_tokens_seen": 97169135, - "step": 4565 - }, - { - "epoch": 0.5490290386580894, - "flos": 17808553770240.0, - "grad_norm": 9.897285135417581, - "learning_rate": 1.7799971412418374e-06, - "loss": 0.7682, - "num_input_tokens_seen": 97190250, - "step": 4566 - }, - { - "epoch": 0.5491492815487284, - "flos": 13352458955520.0, - "grad_norm": 8.645127404742992, - "learning_rate": 1.7792229135459918e-06, - "loss": 0.7195, - "num_input_tokens_seen": 97206620, - "step": 4567 - }, - { - "epoch": 0.5492695244393675, - "flos": 47588439840600.0, - "grad_norm": 0.7675120783920946, - "learning_rate": 1.7784487193418538e-06, - "loss": 0.6265, - "num_input_tokens_seen": 97264190, - "step": 4568 - }, - { - "epoch": 0.5493897673300067, - "flos": 12674573196120.0, - "grad_norm": 30.828865456135425, - "learning_rate": 1.7776745587468698e-06, - "loss": 0.5881, - "num_input_tokens_seen": 97281335, - "step": 4569 - }, - { - "epoch": 0.5495100102206457, - "flos": 14541821012760.0, - "grad_norm": 9.845816840309883, - "learning_rate": 1.7769004318784776e-06, - "loss": 0.798, - "num_input_tokens_seen": 97298700, - "step": 4570 - }, - { - "epoch": 0.5496302531112848, - "flos": 12192377869920.0, - "grad_norm": 13.09889400559917, - "learning_rate": 1.776126338854113e-06, - "loss": 0.7918, - "num_input_tokens_seen": 97316210, - "step": 4571 - }, - { - "epoch": 0.5497504960019239, - "flos": 17998418770800.0, - "grad_norm": 10.536707338542053, - "learning_rate": 1.7753522797912044e-06, - "loss": 0.8285, - "num_input_tokens_seen": 97336015, - "step": 4572 - }, - { - "epoch": 0.549870738892563, - "flos": 11247580241400.0, - "grad_norm": 9.60710839312465, - "learning_rate": 1.7745782548071765e-06, - "loss": 0.6819, - "num_input_tokens_seen": 97352630, - "step": 4573 - }, - { - "epoch": 0.549990981783202, - "flos": 15405709257600.0, - "grad_norm": 5.136424287543808, - "learning_rate": 1.7738042640194482e-06, - "loss": 0.7228, - "num_input_tokens_seen": 97372015, - "step": 4574 - }, - { - "epoch": 0.5501112246738411, - "flos": 15647329310040.0, - "grad_norm": 5.838467255208059, - "learning_rate": 1.7730303075454335e-06, - "loss": 0.6844, - "num_input_tokens_seen": 97390625, - "step": 4575 - }, - { - "epoch": 0.5502314675644803, - "flos": 12732438620280.0, - "grad_norm": 4.363256988867756, - "learning_rate": 1.7722563855025402e-06, - "loss": 0.835, - "num_input_tokens_seen": 97408375, - "step": 4576 - }, - { - "epoch": 0.5503517104551193, - "flos": 17803646476440.0, - "grad_norm": 7.728570659789518, - "learning_rate": 1.7714824980081721e-06, - "loss": 0.6933, - "num_input_tokens_seen": 97427390, - "step": 4577 - }, - { - "epoch": 0.5504719533457584, - "flos": 16405079674440.0, - "grad_norm": 5.117500773151677, - "learning_rate": 1.7707086451797276e-06, - "loss": 0.7219, - "num_input_tokens_seen": 97447985, - "step": 4578 - }, - { - "epoch": 0.5505921962363975, - "flos": 49620165861240.0, - "grad_norm": 0.7093944362044707, - "learning_rate": 1.7699348271345993e-06, - "loss": 0.533, - "num_input_tokens_seen": 97510330, - "step": 4579 - }, - { - "epoch": 0.5507124391270366, - "flos": 33626693276280.0, - "grad_norm": 0.7378575961312448, - "learning_rate": 1.7691610439901753e-06, - "loss": 0.5565, - "num_input_tokens_seen": 97572985, - "step": 4580 - }, - { - "epoch": 0.5508326820176757, - "flos": 16297390455960.0, - "grad_norm": 10.81896285580973, - "learning_rate": 1.7683872958638367e-06, - "loss": 0.7397, - "num_input_tokens_seen": 97591585, - "step": 4581 - }, - { - "epoch": 0.5509529249083148, - "flos": 14623901814960.0, - "grad_norm": 6.387191185699375, - "learning_rate": 1.7676135828729614e-06, - "loss": 0.8193, - "num_input_tokens_seen": 97611015, - "step": 4582 - }, - { - "epoch": 0.5510731677989539, - "flos": 15972296563080.0, - "grad_norm": 5.320386709628374, - "learning_rate": 1.7668399051349205e-06, - "loss": 0.8143, - "num_input_tokens_seen": 97630415, - "step": 4583 - }, - { - "epoch": 0.5511934106895929, - "flos": 15700477400160.0, - "grad_norm": 8.334035411505903, - "learning_rate": 1.766066262767081e-06, - "loss": 0.8092, - "num_input_tokens_seen": 97647975, - "step": 4584 - }, - { - "epoch": 0.5513136535802321, - "flos": 15405550957800.0, - "grad_norm": 8.89206012211636, - "learning_rate": 1.765292655886803e-06, - "loss": 0.7542, - "num_input_tokens_seen": 97666340, - "step": 4585 - }, - { - "epoch": 0.5514338964708712, - "flos": 20397559068120.0, - "grad_norm": 5.5371586334890095, - "learning_rate": 1.764519084611443e-06, - "loss": 0.6986, - "num_input_tokens_seen": 97686515, - "step": 4586 - }, - { - "epoch": 0.5515541393615102, - "flos": 16026964331280.0, - "grad_norm": 4.667065853121012, - "learning_rate": 1.7637455490583505e-06, - "loss": 0.7528, - "num_input_tokens_seen": 97705560, - "step": 4587 - }, - { - "epoch": 0.5516743822521494, - "flos": 14973749305200.0, - "grad_norm": 9.385741730260238, - "learning_rate": 1.7629720493448701e-06, - "loss": 0.7609, - "num_input_tokens_seen": 97722575, - "step": 4588 - }, - { - "epoch": 0.5517946251427884, - "flos": 10870161417360.0, - "grad_norm": 10.87083618429158, - "learning_rate": 1.7621985855883418e-06, - "loss": 0.8353, - "num_input_tokens_seen": 97738995, - "step": 4589 - }, - { - "epoch": 0.5519148680334275, - "flos": 13433874898560.0, - "grad_norm": 6.467820547658906, - "learning_rate": 1.7614251579060983e-06, - "loss": 0.7012, - "num_input_tokens_seen": 97757310, - "step": 4590 - }, - { - "epoch": 0.5520351109240667, - "flos": 18397900095600.0, - "grad_norm": 5.7756541620630175, - "learning_rate": 1.76065176641547e-06, - "loss": 0.8378, - "num_input_tokens_seen": 97779740, - "step": 4591 - }, - { - "epoch": 0.5521553538147057, - "flos": 15406089177120.0, - "grad_norm": 5.6764425586867615, - "learning_rate": 1.759878411233777e-06, - "loss": 0.7602, - "num_input_tokens_seen": 97797920, - "step": 4592 - }, - { - "epoch": 0.5522755967053448, - "flos": 13785621986400.0, - "grad_norm": 4.529742812377879, - "learning_rate": 1.7591050924783388e-06, - "loss": 0.7481, - "num_input_tokens_seen": 97814830, - "step": 4593 - }, - { - "epoch": 0.5523958395959839, - "flos": 47683469566440.0, - "grad_norm": 0.8643676687993018, - "learning_rate": 1.7583318102664661e-06, - "loss": 0.6254, - "num_input_tokens_seen": 97882115, - "step": 4594 - }, - { - "epoch": 0.552516082486623, - "flos": 7872303446520.0, - "grad_norm": 4.659031389283412, - "learning_rate": 1.757558564715466e-06, - "loss": 0.7724, - "num_input_tokens_seen": 97899910, - "step": 4595 - }, - { - "epoch": 0.552636325377262, - "flos": 16242437748120.0, - "grad_norm": 8.9359424007407, - "learning_rate": 1.7567853559426386e-06, - "loss": 0.7089, - "num_input_tokens_seen": 97916680, - "step": 4596 - }, - { - "epoch": 0.5527565682679012, - "flos": 17566047238920.0, - "grad_norm": 3.988114102230179, - "learning_rate": 1.7560121840652797e-06, - "loss": 0.7373, - "num_input_tokens_seen": 97935785, - "step": 4597 - }, - { - "epoch": 0.5528768111585403, - "flos": 14406750420240.0, - "grad_norm": 4.5458664447383885, - "learning_rate": 1.7552390492006782e-06, - "loss": 0.6691, - "num_input_tokens_seen": 97953825, - "step": 4598 - }, - { - "epoch": 0.5529970540491793, - "flos": 19215067222200.0, - "grad_norm": 6.3226475498635, - "learning_rate": 1.7544659514661184e-06, - "loss": 0.6344, - "num_input_tokens_seen": 97976635, - "step": 4599 - }, - { - "epoch": 0.5531172969398185, - "flos": 17890033033200.0, - "grad_norm": 4.121020622870152, - "learning_rate": 1.7536928909788786e-06, - "loss": 0.7853, - "num_input_tokens_seen": 97995660, - "step": 4600 - }, - { - "epoch": 0.5532375398304575, - "flos": 47851584174480.0, - "grad_norm": 0.8869969943919217, - "learning_rate": 1.752919867856231e-06, - "loss": 0.6328, - "num_input_tokens_seen": 98047025, - "step": 4601 - }, - { - "epoch": 0.5533577827210966, - "flos": 14380033905360.0, - "grad_norm": 3.624915417966956, - "learning_rate": 1.7521468822154436e-06, - "loss": 0.7693, - "num_input_tokens_seen": 98065660, - "step": 4602 - }, - { - "epoch": 0.5534780256117358, - "flos": 23722093929840.0, - "grad_norm": 4.44190502469527, - "learning_rate": 1.751373934173777e-06, - "loss": 0.7192, - "num_input_tokens_seen": 98088125, - "step": 4603 - }, - { - "epoch": 0.5535982685023748, - "flos": 16999396613520.0, - "grad_norm": 7.458792333135952, - "learning_rate": 1.750601023848487e-06, - "loss": 0.7155, - "num_input_tokens_seen": 98108570, - "step": 4604 - }, - { - "epoch": 0.5537185113930139, - "flos": 17835618544680.0, - "grad_norm": 3.458758837402877, - "learning_rate": 1.749828151356823e-06, - "loss": 0.7198, - "num_input_tokens_seen": 98128485, - "step": 4605 - }, - { - "epoch": 0.553838754283653, - "flos": 17240890026120.0, - "grad_norm": 5.167922246814584, - "learning_rate": 1.7490553168160297e-06, - "loss": 0.7464, - "num_input_tokens_seen": 98149275, - "step": 4606 - }, - { - "epoch": 0.5539589971742921, - "flos": 12597906247080.0, - "grad_norm": 5.668997715223992, - "learning_rate": 1.748282520343345e-06, - "loss": 0.7412, - "num_input_tokens_seen": 98168025, - "step": 4607 - }, - { - "epoch": 0.5540792400649311, - "flos": 20212569701400.0, - "grad_norm": 5.52932326385658, - "learning_rate": 1.7475097620560023e-06, - "loss": 0.7662, - "num_input_tokens_seen": 98187810, - "step": 4608 - }, - { - "epoch": 0.5541994829555702, - "flos": 17484979555440.0, - "grad_norm": 5.799651681372265, - "learning_rate": 1.746737042071228e-06, - "loss": 0.6926, - "num_input_tokens_seen": 98206035, - "step": 4609 - }, - { - "epoch": 0.5543197258462094, - "flos": 14699460665400.0, - "grad_norm": 4.245062484371204, - "learning_rate": 1.7459643605062424e-06, - "loss": 0.7807, - "num_input_tokens_seen": 98223015, - "step": 4610 - }, - { - "epoch": 0.5544399687368484, - "flos": 15291878006880.0, - "grad_norm": 3.5234790469678385, - "learning_rate": 1.745191717478262e-06, - "loss": 0.7889, - "num_input_tokens_seen": 98241315, - "step": 4611 - }, - { - "epoch": 0.5545602116274875, - "flos": 18698430350880.0, - "grad_norm": 3.8586514061039674, - "learning_rate": 1.7444191131044948e-06, - "loss": 0.7876, - "num_input_tokens_seen": 98261310, - "step": 4612 - }, - { - "epoch": 0.5546804545181266, - "flos": 15351199789200.0, - "grad_norm": 3.8496715554180367, - "learning_rate": 1.7436465475021456e-06, - "loss": 0.7137, - "num_input_tokens_seen": 98281080, - "step": 4613 - }, - { - "epoch": 0.5548006974087657, - "flos": 19671812431920.0, - "grad_norm": 4.852159251493656, - "learning_rate": 1.7428740207884111e-06, - "loss": 0.6852, - "num_input_tokens_seen": 98301680, - "step": 4614 - }, - { - "epoch": 0.5549209402994048, - "flos": 24721116087120.0, - "grad_norm": 3.6158050465904172, - "learning_rate": 1.7421015330804833e-06, - "loss": 0.6079, - "num_input_tokens_seen": 98321320, - "step": 4615 - }, - { - "epoch": 0.5550411831900439, - "flos": 17404260131520.0, - "grad_norm": 4.9130652648454465, - "learning_rate": 1.7413290844955475e-06, - "loss": 0.7094, - "num_input_tokens_seen": 98341070, - "step": 4616 - }, - { - "epoch": 0.555161426080683, - "flos": 15836244511800.0, - "grad_norm": 3.5151431961519184, - "learning_rate": 1.7405566751507843e-06, - "loss": 0.7659, - "num_input_tokens_seen": 98358835, - "step": 4617 - }, - { - "epoch": 0.555281668971322, - "flos": 36492990418800.0, - "grad_norm": 3.4681761670802307, - "learning_rate": 1.7397843051633668e-06, - "loss": 0.6521, - "num_input_tokens_seen": 98381250, - "step": 4618 - }, - { - "epoch": 0.5554019118619612, - "flos": 15163456005960.0, - "grad_norm": 5.763460078495904, - "learning_rate": 1.739011974650464e-06, - "loss": 0.698, - "num_input_tokens_seen": 98400300, - "step": 4619 - }, - { - "epoch": 0.5555221547526003, - "flos": 18672347035200.0, - "grad_norm": 16.333519966979523, - "learning_rate": 1.7382396837292365e-06, - "loss": 0.7376, - "num_input_tokens_seen": 98420480, - "step": 4620 - }, - { - "epoch": 0.5556423976432393, - "flos": 15918895193280.0, - "grad_norm": 3.8590307661353895, - "learning_rate": 1.737467432516841e-06, - "loss": 0.7209, - "num_input_tokens_seen": 98440300, - "step": 4621 - }, - { - "epoch": 0.5557626405338785, - "flos": 18240956962080.0, - "grad_norm": 5.785894394481444, - "learning_rate": 1.7366952211304274e-06, - "loss": 0.7156, - "num_input_tokens_seen": 98457865, - "step": 4622 - }, - { - "epoch": 0.5558828834245175, - "flos": 13650551393880.0, - "grad_norm": 38.09423547424449, - "learning_rate": 1.735923049687139e-06, - "loss": 0.8181, - "num_input_tokens_seen": 98474160, - "step": 4623 - }, - { - "epoch": 0.5560031263151566, - "flos": 19996463085360.0, - "grad_norm": 3.2605013282842203, - "learning_rate": 1.7351509183041144e-06, - "loss": 0.7316, - "num_input_tokens_seen": 98494210, - "step": 4624 - }, - { - "epoch": 0.5561233692057957, - "flos": 17133422427360.0, - "grad_norm": 6.088011245095574, - "learning_rate": 1.7343788270984852e-06, - "loss": 0.7084, - "num_input_tokens_seen": 98513070, - "step": 4625 - }, - { - "epoch": 0.5562436120964348, - "flos": 27692225883120.0, - "grad_norm": 3.6261443600920042, - "learning_rate": 1.7336067761873764e-06, - "loss": 0.7329, - "num_input_tokens_seen": 98535215, - "step": 4626 - }, - { - "epoch": 0.5563638549870739, - "flos": 18429682204080.0, - "grad_norm": 4.351452920242829, - "learning_rate": 1.7328347656879076e-06, - "loss": 0.7403, - "num_input_tokens_seen": 98554795, - "step": 4627 - }, - { - "epoch": 0.556484097877713, - "flos": 9863509209720.0, - "grad_norm": 6.947340850379908, - "learning_rate": 1.7320627957171927e-06, - "loss": 0.6754, - "num_input_tokens_seen": 98569175, - "step": 4628 - }, - { - "epoch": 0.5566043407683521, - "flos": 18079043214840.0, - "grad_norm": 5.244167539502628, - "learning_rate": 1.7312908663923382e-06, - "loss": 0.7881, - "num_input_tokens_seen": 98585070, - "step": 4629 - }, - { - "epoch": 0.5567245836589911, - "flos": 15048833256240.0, - "grad_norm": 5.5883051622823885, - "learning_rate": 1.7305189778304463e-06, - "loss": 0.6583, - "num_input_tokens_seen": 98602965, - "step": 4630 - }, - { - "epoch": 0.5568448265496303, - "flos": 15135631392480.0, - "grad_norm": 5.048326785432925, - "learning_rate": 1.729747130148611e-06, - "loss": 0.786, - "num_input_tokens_seen": 98621880, - "step": 4631 - }, - { - "epoch": 0.5569650694402694, - "flos": 18539207700240.0, - "grad_norm": 8.02127108349555, - "learning_rate": 1.7289753234639208e-06, - "loss": 0.7498, - "num_input_tokens_seen": 98640575, - "step": 4632 - }, - { - "epoch": 0.5570853123309084, - "flos": 14400893327640.0, - "grad_norm": 3.6001596097531903, - "learning_rate": 1.7282035578934592e-06, - "loss": 0.7462, - "num_input_tokens_seen": 98658460, - "step": 4633 - }, - { - "epoch": 0.5572055552215476, - "flos": 11734366261800.0, - "grad_norm": 6.153752008965083, - "learning_rate": 1.727431833554301e-06, - "loss": 0.7651, - "num_input_tokens_seen": 98676655, - "step": 4634 - }, - { - "epoch": 0.5573257981121866, - "flos": 12489647149320.0, - "grad_norm": 5.284148194122793, - "learning_rate": 1.7266601505635175e-06, - "loss": 0.7481, - "num_input_tokens_seen": 98693715, - "step": 4635 - }, - { - "epoch": 0.5574460410028257, - "flos": 13731619077360.0, - "grad_norm": 4.610066197182772, - "learning_rate": 1.7258885090381717e-06, - "loss": 0.7372, - "num_input_tokens_seen": 98711475, - "step": 4636 - }, - { - "epoch": 0.5575662838934649, - "flos": 21750639490320.0, - "grad_norm": 8.20343330893835, - "learning_rate": 1.7251169090953213e-06, - "loss": 0.7722, - "num_input_tokens_seen": 98731670, - "step": 4637 - }, - { - "epoch": 0.5576865267841039, - "flos": 16134970149360.0, - "grad_norm": 13.172611745409963, - "learning_rate": 1.7243453508520168e-06, - "loss": 0.7407, - "num_input_tokens_seen": 98748375, - "step": 4638 - }, - { - "epoch": 0.557806769674743, - "flos": 12538584464760.0, - "grad_norm": 5.840261795237784, - "learning_rate": 1.7235738344253038e-06, - "loss": 0.8315, - "num_input_tokens_seen": 98761725, - "step": 4639 - }, - { - "epoch": 0.557927012565382, - "flos": 18101517295080.0, - "grad_norm": 4.98908606528748, - "learning_rate": 1.72280235993222e-06, - "loss": 0.8, - "num_input_tokens_seen": 98779750, - "step": 4640 - }, - { - "epoch": 0.5580472554560212, - "flos": 12381831291000.0, - "grad_norm": 11.642884638065969, - "learning_rate": 1.722030927489798e-06, - "loss": 0.6696, - "num_input_tokens_seen": 98796750, - "step": 4641 - }, - { - "epoch": 0.5581674983466602, - "flos": 17210817555480.0, - "grad_norm": 3.0576611652458543, - "learning_rate": 1.7212595372150634e-06, - "loss": 0.728, - "num_input_tokens_seen": 98816450, - "step": 4642 - }, - { - "epoch": 0.5582877412372993, - "flos": 9789501697320.0, - "grad_norm": 8.312166823973778, - "learning_rate": 1.720488189225035e-06, - "loss": 0.7134, - "num_input_tokens_seen": 98833870, - "step": 4643 - }, - { - "epoch": 0.5584079841279385, - "flos": 16022626916760.0, - "grad_norm": 4.67989678782579, - "learning_rate": 1.7197168836367265e-06, - "loss": 0.7752, - "num_input_tokens_seen": 98850400, - "step": 4644 - }, - { - "epoch": 0.5585282270185775, - "flos": 13758652191840.0, - "grad_norm": 4.039887865984223, - "learning_rate": 1.7189456205671433e-06, - "loss": 0.8039, - "num_input_tokens_seen": 98868965, - "step": 4645 - }, - { - "epoch": 0.5586484699092166, - "flos": 15996796880760.0, - "grad_norm": 2.918968182726564, - "learning_rate": 1.7181744001332866e-06, - "loss": 0.8078, - "num_input_tokens_seen": 98887295, - "step": 4646 - }, - { - "epoch": 0.5587687127998557, - "flos": 16755655343760.0, - "grad_norm": 4.736304767983645, - "learning_rate": 1.7174032224521493e-06, - "loss": 0.6231, - "num_input_tokens_seen": 98905725, - "step": 4647 - }, - { - "epoch": 0.5588889556904948, - "flos": 14838805352520.0, - "grad_norm": 3.6412266573827248, - "learning_rate": 1.7166320876407184e-06, - "loss": 0.6848, - "num_input_tokens_seen": 98924865, - "step": 4648 - }, - { - "epoch": 0.5590091985811338, - "flos": 12003684287880.0, - "grad_norm": 9.791153351876424, - "learning_rate": 1.7158609958159742e-06, - "loss": 0.6675, - "num_input_tokens_seen": 98941990, - "step": 4649 - }, - { - "epoch": 0.559129441471773, - "flos": 10567098365280.0, - "grad_norm": 4.135350506282926, - "learning_rate": 1.7150899470948911e-06, - "loss": 0.7669, - "num_input_tokens_seen": 98956975, - "step": 4650 - }, - { - "epoch": 0.5592496843624121, - "flos": 44609451205920.0, - "grad_norm": 0.8070906522146902, - "learning_rate": 1.7143189415944365e-06, - "loss": 0.593, - "num_input_tokens_seen": 99021155, - "step": 4651 - }, - { - "epoch": 0.5593699272530511, - "flos": 15270322065480.0, - "grad_norm": 4.136615236768514, - "learning_rate": 1.7135479794315714e-06, - "loss": 0.7441, - "num_input_tokens_seen": 99037830, - "step": 4652 - }, - { - "epoch": 0.5594901701436903, - "flos": 9357668384760.0, - "grad_norm": 4.500349878908613, - "learning_rate": 1.7127770607232502e-06, - "loss": 0.7683, - "num_input_tokens_seen": 99056095, - "step": 4653 - }, - { - "epoch": 0.5596104130343293, - "flos": 16917632410920.0, - "grad_norm": 5.192868683334243, - "learning_rate": 1.7120061855864204e-06, - "loss": 0.7848, - "num_input_tokens_seen": 99075825, - "step": 4654 - }, - { - "epoch": 0.5597306559249684, - "flos": 19023619223640.0, - "grad_norm": 4.229400302590861, - "learning_rate": 1.7112353541380233e-06, - "loss": 0.6979, - "num_input_tokens_seen": 99095405, - "step": 4655 - }, - { - "epoch": 0.5598508988156076, - "flos": 16458987603600.0, - "grad_norm": 3.934046972051567, - "learning_rate": 1.7104645664949931e-06, - "loss": 0.7154, - "num_input_tokens_seen": 99117595, - "step": 4656 - }, - { - "epoch": 0.5599711417062466, - "flos": 16918012330440.0, - "grad_norm": 3.550654342951811, - "learning_rate": 1.7096938227742584e-06, - "loss": 0.7091, - "num_input_tokens_seen": 99138445, - "step": 4657 - }, - { - "epoch": 0.5600913845968857, - "flos": 16344808093320.0, - "grad_norm": 4.481539264956808, - "learning_rate": 1.70892312309274e-06, - "loss": 0.8218, - "num_input_tokens_seen": 99156055, - "step": 4658 - }, - { - "epoch": 0.5602116274875248, - "flos": 12862538599080.0, - "grad_norm": 5.1408348156626, - "learning_rate": 1.7081524675673523e-06, - "loss": 0.655, - "num_input_tokens_seen": 99171265, - "step": 4659 - }, - { - "epoch": 0.5603318703781639, - "flos": 51710232205440.0, - "grad_norm": 0.9818990541033409, - "learning_rate": 1.7073818563150026e-06, - "loss": 0.6356, - "num_input_tokens_seen": 99233065, - "step": 4660 - }, - { - "epoch": 0.560452113268803, - "flos": 13537511642160.0, - "grad_norm": 4.458145684306972, - "learning_rate": 1.7066112894525935e-06, - "loss": 0.8399, - "num_input_tokens_seen": 99250865, - "step": 4661 - }, - { - "epoch": 0.5605723561594421, - "flos": 18510654907680.0, - "grad_norm": 2.860896805161828, - "learning_rate": 1.7058407670970177e-06, - "loss": 0.7145, - "num_input_tokens_seen": 99272060, - "step": 4662 - }, - { - "epoch": 0.5606925990500812, - "flos": 15054816988680.0, - "grad_norm": 3.7971831773562705, - "learning_rate": 1.7050702893651643e-06, - "loss": 0.5929, - "num_input_tokens_seen": 99291360, - "step": 4663 - }, - { - "epoch": 0.5608128419407202, - "flos": 25719884964720.0, - "grad_norm": 7.278215684731169, - "learning_rate": 1.7042998563739134e-06, - "loss": 0.7405, - "num_input_tokens_seen": 99309430, - "step": 4664 - }, - { - "epoch": 0.5609330848313594, - "flos": 18047577705960.0, - "grad_norm": 4.3221158063308485, - "learning_rate": 1.703529468240139e-06, - "loss": 0.6945, - "num_input_tokens_seen": 99328020, - "step": 4665 - }, - { - "epoch": 0.5610533277219985, - "flos": 13697874051360.0, - "grad_norm": 7.978602403521344, - "learning_rate": 1.7027591250807088e-06, - "loss": 0.711, - "num_input_tokens_seen": 99344915, - "step": 4666 - }, - { - "epoch": 0.5611735706126375, - "flos": 10923309507480.0, - "grad_norm": 8.698432903997798, - "learning_rate": 1.7019888270124825e-06, - "loss": 0.8057, - "num_input_tokens_seen": 99361800, - "step": 4667 - }, - { - "epoch": 0.5612938135032767, - "flos": 12000170032320.0, - "grad_norm": 4.345399173007967, - "learning_rate": 1.7012185741523147e-06, - "loss": 0.8085, - "num_input_tokens_seen": 99377845, - "step": 4668 - }, - { - "epoch": 0.5614140563939157, - "flos": 18808050826920.0, - "grad_norm": 5.130947848494916, - "learning_rate": 1.7004483666170514e-06, - "loss": 0.592, - "num_input_tokens_seen": 99398060, - "step": 4669 - }, - { - "epoch": 0.5615342992845548, - "flos": 18105791389680.0, - "grad_norm": 3.675049144390328, - "learning_rate": 1.699678204523533e-06, - "loss": 0.7747, - "num_input_tokens_seen": 99417645, - "step": 4670 - }, - { - "epoch": 0.5616545421751938, - "flos": 16106164077120.0, - "grad_norm": 6.838819983794862, - "learning_rate": 1.6989080879885918e-06, - "loss": 0.6553, - "num_input_tokens_seen": 99435225, - "step": 4671 - }, - { - "epoch": 0.561774785065833, - "flos": 39604589151840.0, - "grad_norm": 0.8924506699529157, - "learning_rate": 1.6981380171290544e-06, - "loss": 0.6305, - "num_input_tokens_seen": 99495970, - "step": 4672 - }, - { - "epoch": 0.5618950279564721, - "flos": 14428686281160.0, - "grad_norm": 4.287397217700239, - "learning_rate": 1.6973679920617396e-06, - "loss": 0.7246, - "num_input_tokens_seen": 99513225, - "step": 4673 - }, - { - "epoch": 0.5620152708471111, - "flos": 12246760698480.0, - "grad_norm": 7.306325016990844, - "learning_rate": 1.6965980129034603e-06, - "loss": 0.8393, - "num_input_tokens_seen": 99530330, - "step": 4674 - }, - { - "epoch": 0.5621355137377503, - "flos": 19645380856680.0, - "grad_norm": 3.674405269118538, - "learning_rate": 1.6958280797710209e-06, - "loss": 0.7529, - "num_input_tokens_seen": 99551975, - "step": 4675 - }, - { - "epoch": 0.5622557566283893, - "flos": 45116146849800.0, - "grad_norm": 0.7189867210329183, - "learning_rate": 1.6950581927812198e-06, - "loss": 0.5506, - "num_input_tokens_seen": 99611265, - "step": 4676 - }, - { - "epoch": 0.5623759995190284, - "flos": 19401576267000.0, - "grad_norm": 3.933166662553516, - "learning_rate": 1.6942883520508486e-06, - "loss": 0.766, - "num_input_tokens_seen": 99629720, - "step": 4677 - }, - { - "epoch": 0.5624962424096676, - "flos": 14163737329560.0, - "grad_norm": 4.555661445772293, - "learning_rate": 1.693518557696691e-06, - "loss": 0.7632, - "num_input_tokens_seen": 99648580, - "step": 4678 - }, - { - "epoch": 0.5626164853003066, - "flos": 15108313338360.0, - "grad_norm": 5.022953561873852, - "learning_rate": 1.6927488098355252e-06, - "loss": 0.8744, - "num_input_tokens_seen": 99665930, - "step": 4679 - }, - { - "epoch": 0.5627367281909457, - "flos": 46268534565120.0, - "grad_norm": 0.8901163623049478, - "learning_rate": 1.6919791085841201e-06, - "loss": 0.6664, - "num_input_tokens_seen": 99723060, - "step": 4680 - }, - { - "epoch": 0.5628569710815848, - "flos": 9276537381360.0, - "grad_norm": 7.30109762013495, - "learning_rate": 1.6912094540592396e-06, - "loss": 0.7782, - "num_input_tokens_seen": 99738300, - "step": 4681 - }, - { - "epoch": 0.5629772139722239, - "flos": 9996616884720.0, - "grad_norm": 7.015276452053081, - "learning_rate": 1.6904398463776393e-06, - "loss": 0.7851, - "num_input_tokens_seen": 99751820, - "step": 4682 - }, - { - "epoch": 0.5630974568628629, - "flos": 15700414080240.0, - "grad_norm": 3.7771600720029976, - "learning_rate": 1.6896702856560683e-06, - "loss": 0.7173, - "num_input_tokens_seen": 99770635, - "step": 4683 - }, - { - "epoch": 0.5632176997535021, - "flos": 10356025682880.0, - "grad_norm": 4.597828410513957, - "learning_rate": 1.6889007720112677e-06, - "loss": 0.6585, - "num_input_tokens_seen": 99788100, - "step": 4684 - }, - { - "epoch": 0.5633379426441412, - "flos": 15215401017600.0, - "grad_norm": 6.819438795340745, - "learning_rate": 1.6881313055599734e-06, - "loss": 0.7635, - "num_input_tokens_seen": 99807750, - "step": 4685 - }, - { - "epoch": 0.5634581855347802, - "flos": 16539232128120.0, - "grad_norm": 4.884413409432912, - "learning_rate": 1.6873618864189117e-06, - "loss": 0.8012, - "num_input_tokens_seen": 99823240, - "step": 4686 - }, - { - "epoch": 0.5635784284254194, - "flos": 15729758371800.0, - "grad_norm": 7.410074397445926, - "learning_rate": 1.686592514704803e-06, - "loss": 0.7647, - "num_input_tokens_seen": 99840355, - "step": 4687 - }, - { - "epoch": 0.5636986713160584, - "flos": 14486646685200.0, - "grad_norm": 3.919750960671987, - "learning_rate": 1.685823190534361e-06, - "loss": 0.6873, - "num_input_tokens_seen": 99858315, - "step": 4688 - }, - { - "epoch": 0.5638189142066975, - "flos": 14460943289040.0, - "grad_norm": 3.9050019979044435, - "learning_rate": 1.6850539140242907e-06, - "loss": 0.8071, - "num_input_tokens_seen": 99877295, - "step": 4689 - }, - { - "epoch": 0.5639391570973367, - "flos": 16756130243160.0, - "grad_norm": 18.396671723917294, - "learning_rate": 1.684284685291292e-06, - "loss": 0.7986, - "num_input_tokens_seen": 99898660, - "step": 4690 - }, - { - "epoch": 0.5640593999879757, - "flos": 17372003123640.0, - "grad_norm": 3.73767926861073, - "learning_rate": 1.683515504452055e-06, - "loss": 0.7887, - "num_input_tokens_seen": 99915755, - "step": 4691 - }, - { - "epoch": 0.5641796428786148, - "flos": 16620046531920.0, - "grad_norm": 6.798398884036932, - "learning_rate": 1.6827463716232648e-06, - "loss": 0.6463, - "num_input_tokens_seen": 99936135, - "step": 4692 - }, - { - "epoch": 0.5642998857692539, - "flos": 14460531709560.0, - "grad_norm": 3.7574444945519385, - "learning_rate": 1.6819772869215972e-06, - "loss": 0.7398, - "num_input_tokens_seen": 99954935, - "step": 4693 - }, - { - "epoch": 0.564420128659893, - "flos": 16972363499040.0, - "grad_norm": 4.228165807195793, - "learning_rate": 1.6812082504637228e-06, - "loss": 0.8038, - "num_input_tokens_seen": 99975975, - "step": 4694 - }, - { - "epoch": 0.564540371550532, - "flos": 17025828188760.0, - "grad_norm": 3.501596223055132, - "learning_rate": 1.6804392623663025e-06, - "loss": 0.7351, - "num_input_tokens_seen": 99996900, - "step": 4695 - }, - { - "epoch": 0.5646606144411712, - "flos": 18322341245160.0, - "grad_norm": 3.972088708538128, - "learning_rate": 1.6796703227459935e-06, - "loss": 0.7595, - "num_input_tokens_seen": 100014575, - "step": 4696 - }, - { - "epoch": 0.5647808573318103, - "flos": 26855022493200.0, - "grad_norm": 3.4739877832051866, - "learning_rate": 1.6789014317194407e-06, - "loss": 0.7441, - "num_input_tokens_seen": 100035775, - "step": 4697 - }, - { - "epoch": 0.5649011002224493, - "flos": 16485704118480.0, - "grad_norm": 13.417905635768594, - "learning_rate": 1.6781325894032853e-06, - "loss": 0.7105, - "num_input_tokens_seen": 100054455, - "step": 4698 - }, - { - "epoch": 0.5650213431130885, - "flos": 13515607441200.0, - "grad_norm": 4.543620724524669, - "learning_rate": 1.6773637959141608e-06, - "loss": 0.9004, - "num_input_tokens_seen": 100071150, - "step": 4699 - }, - { - "epoch": 0.5651415860037275, - "flos": 12783275533320.0, - "grad_norm": 4.905116636734615, - "learning_rate": 1.6765950513686915e-06, - "loss": 0.6321, - "num_input_tokens_seen": 100088980, - "step": 4700 - }, - { - "epoch": 0.5652618288943666, - "flos": 18699633429360.0, - "grad_norm": 4.601545505459916, - "learning_rate": 1.675826355883496e-06, - "loss": 0.7418, - "num_input_tokens_seen": 100107915, - "step": 4701 - }, - { - "epoch": 0.5653820717850057, - "flos": 14380033905360.0, - "grad_norm": 3.782528747731733, - "learning_rate": 1.6750577095751848e-06, - "loss": 0.7806, - "num_input_tokens_seen": 100126745, - "step": 4702 - }, - { - "epoch": 0.5655023146756448, - "flos": 19779944889840.0, - "grad_norm": 3.4005412288051606, - "learning_rate": 1.6742891125603605e-06, - "loss": 0.7219, - "num_input_tokens_seen": 100147370, - "step": 4703 - }, - { - "epoch": 0.5656225575662839, - "flos": 20289806529720.0, - "grad_norm": 5.161287378687173, - "learning_rate": 1.6735205649556185e-06, - "loss": 0.6994, - "num_input_tokens_seen": 100166960, - "step": 4704 - }, - { - "epoch": 0.5657428004569229, - "flos": 17833149067800.0, - "grad_norm": 3.3385028489703297, - "learning_rate": 1.6727520668775476e-06, - "loss": 0.8256, - "num_input_tokens_seen": 100186965, - "step": 4705 - }, - { - "epoch": 0.5658630433475621, - "flos": 16053110966880.0, - "grad_norm": 4.6394503869734915, - "learning_rate": 1.6719836184427275e-06, - "loss": 0.7407, - "num_input_tokens_seen": 100206990, - "step": 4706 - }, - { - "epoch": 0.5659832862382012, - "flos": 22317796675080.0, - "grad_norm": 2.928519461167372, - "learning_rate": 1.671215219767733e-06, - "loss": 0.6339, - "num_input_tokens_seen": 100226170, - "step": 4707 - }, - { - "epoch": 0.5661035291288402, - "flos": 9573300101400.0, - "grad_norm": 15.463820533212303, - "learning_rate": 1.670446870969127e-06, - "loss": 0.7459, - "num_input_tokens_seen": 100243555, - "step": 4708 - }, - { - "epoch": 0.5662237720194794, - "flos": 11760006338040.0, - "grad_norm": 4.477204974075099, - "learning_rate": 1.6696785721634685e-06, - "loss": 0.7963, - "num_input_tokens_seen": 100257760, - "step": 4709 - }, - { - "epoch": 0.5663440149101184, - "flos": 12895333826280.0, - "grad_norm": 4.576386730378175, - "learning_rate": 1.6689103234673086e-06, - "loss": 0.7157, - "num_input_tokens_seen": 100275800, - "step": 4710 - }, - { - "epoch": 0.5664642578007575, - "flos": 17107370771640.0, - "grad_norm": 13.390817576284434, - "learning_rate": 1.668142124997189e-06, - "loss": 0.7404, - "num_input_tokens_seen": 100295180, - "step": 4711 - }, - { - "epoch": 0.5665845006913967, - "flos": 48305991038520.0, - "grad_norm": 0.7600865801633933, - "learning_rate": 1.6673739768696453e-06, - "loss": 0.6208, - "num_input_tokens_seen": 100361470, - "step": 4712 - }, - { - "epoch": 0.5667047435820357, - "flos": 19159196375520.0, - "grad_norm": 6.137596723178947, - "learning_rate": 1.6666058792012052e-06, - "loss": 0.7504, - "num_input_tokens_seen": 100382075, - "step": 4713 - }, - { - "epoch": 0.5668249864726748, - "flos": 53006048742720.0, - "grad_norm": 0.8603239542139376, - "learning_rate": 1.6658378321083878e-06, - "loss": 0.6915, - "num_input_tokens_seen": 100446125, - "step": 4714 - }, - { - "epoch": 0.5669452293633139, - "flos": 16238796852720.0, - "grad_norm": 4.546476261444023, - "learning_rate": 1.6650698357077055e-06, - "loss": 0.8088, - "num_input_tokens_seen": 100462890, - "step": 4715 - }, - { - "epoch": 0.567065472253953, - "flos": 13299532485120.0, - "grad_norm": 5.457375168117539, - "learning_rate": 1.6643018901156632e-06, - "loss": 0.7955, - "num_input_tokens_seen": 100481705, - "step": 4716 - }, - { - "epoch": 0.567185715144592, - "flos": 14888850766560.0, - "grad_norm": 7.438805986391487, - "learning_rate": 1.6635339954487566e-06, - "loss": 0.7678, - "num_input_tokens_seen": 100497300, - "step": 4717 - }, - { - "epoch": 0.5673059580352312, - "flos": 16998446814720.0, - "grad_norm": 3.4700284784121314, - "learning_rate": 1.6627661518234765e-06, - "loss": 0.8088, - "num_input_tokens_seen": 100516275, - "step": 4718 - }, - { - "epoch": 0.5674262009258703, - "flos": 15886226605920.0, - "grad_norm": 5.12351279661476, - "learning_rate": 1.661998359356302e-06, - "loss": 0.8307, - "num_input_tokens_seen": 100535025, - "step": 4719 - }, - { - "epoch": 0.5675464438165093, - "flos": 49749695960760.0, - "grad_norm": 0.8007205102726476, - "learning_rate": 1.6612306181637077e-06, - "loss": 0.5778, - "num_input_tokens_seen": 100594070, - "step": 4720 - }, - { - "epoch": 0.5676666867071485, - "flos": 13784893807320.0, - "grad_norm": 4.884365378069537, - "learning_rate": 1.6604629283621598e-06, - "loss": 0.6325, - "num_input_tokens_seen": 100611720, - "step": 4721 - }, - { - "epoch": 0.5677869295977875, - "flos": 24639953423760.0, - "grad_norm": 5.164158714887058, - "learning_rate": 1.6596952900681152e-06, - "loss": 0.728, - "num_input_tokens_seen": 100632200, - "step": 4722 - }, - { - "epoch": 0.5679071724884266, - "flos": 20562512171520.0, - "grad_norm": 7.330626884185057, - "learning_rate": 1.658927703398025e-06, - "loss": 0.8042, - "num_input_tokens_seen": 100651985, - "step": 4723 - }, - { - "epoch": 0.5680274153790658, - "flos": 17241808164960.0, - "grad_norm": 4.0159311140771194, - "learning_rate": 1.6581601684683309e-06, - "loss": 0.7398, - "num_input_tokens_seen": 100672130, - "step": 4724 - }, - { - "epoch": 0.5681476582697048, - "flos": 16431416269800.0, - "grad_norm": 7.644269265279557, - "learning_rate": 1.6573926853954674e-06, - "loss": 0.6736, - "num_input_tokens_seen": 100689435, - "step": 4725 - }, - { - "epoch": 0.5682679011603439, - "flos": 14271806467560.0, - "grad_norm": 7.902650518869518, - "learning_rate": 1.6566252542958608e-06, - "loss": 0.8112, - "num_input_tokens_seen": 100708655, - "step": 4726 - }, - { - "epoch": 0.568388144050983, - "flos": 21101243203560.0, - "grad_norm": 3.369053157832012, - "learning_rate": 1.6558578752859305e-06, - "loss": 0.7567, - "num_input_tokens_seen": 100727335, - "step": 4727 - }, - { - "epoch": 0.5685083869416221, - "flos": 15508776121920.0, - "grad_norm": 4.831822235841713, - "learning_rate": 1.6550905484820865e-06, - "loss": 0.7683, - "num_input_tokens_seen": 100745515, - "step": 4728 - }, - { - "epoch": 0.5686286298322611, - "flos": 18186795753240.0, - "grad_norm": 6.723675860542112, - "learning_rate": 1.6543232740007328e-06, - "loss": 0.7819, - "num_input_tokens_seen": 100762350, - "step": 4729 - }, - { - "epoch": 0.5687488727229003, - "flos": 19510151964360.0, - "grad_norm": 9.341547167306063, - "learning_rate": 1.653556051958263e-06, - "loss": 0.6297, - "num_input_tokens_seen": 100781750, - "step": 4730 - }, - { - "epoch": 0.5688691156135394, - "flos": 15212298341520.0, - "grad_norm": 10.23323809882799, - "learning_rate": 1.6527888824710642e-06, - "loss": 0.7254, - "num_input_tokens_seen": 100801070, - "step": 4731 - }, - { - "epoch": 0.5689893585041784, - "flos": 18966197038920.0, - "grad_norm": 18.181052202215568, - "learning_rate": 1.6520217656555166e-06, - "loss": 0.7304, - "num_input_tokens_seen": 100820080, - "step": 4732 - }, - { - "epoch": 0.5691096013948175, - "flos": 17188090195560.0, - "grad_norm": 3.492985917887244, - "learning_rate": 1.65125470162799e-06, - "loss": 0.6879, - "num_input_tokens_seen": 100840155, - "step": 4733 - }, - { - "epoch": 0.5692298442854566, - "flos": 13190070308880.0, - "grad_norm": 4.191535485542559, - "learning_rate": 1.6504876905048485e-06, - "loss": 0.6876, - "num_input_tokens_seen": 100856835, - "step": 4734 - }, - { - "epoch": 0.5693500871760957, - "flos": 16863787801680.0, - "grad_norm": 5.585205883851969, - "learning_rate": 1.6497207324024464e-06, - "loss": 0.7151, - "num_input_tokens_seen": 100875455, - "step": 4735 - }, - { - "epoch": 0.5694703300667348, - "flos": 13866816309720.0, - "grad_norm": 7.662220647054, - "learning_rate": 1.6489538274371305e-06, - "loss": 0.8077, - "num_input_tokens_seen": 100893780, - "step": 4736 - }, - { - "epoch": 0.5695905729573739, - "flos": 16027122631080.0, - "grad_norm": 4.262311270107012, - "learning_rate": 1.6481869757252396e-06, - "loss": 0.8133, - "num_input_tokens_seen": 100911835, - "step": 4737 - }, - { - "epoch": 0.569710815848013, - "flos": 20887922664000.0, - "grad_norm": 3.6635869415033815, - "learning_rate": 1.647420177383105e-06, - "loss": 0.7037, - "num_input_tokens_seen": 100934425, - "step": 4738 - }, - { - "epoch": 0.569831058738652, - "flos": 20806000161600.0, - "grad_norm": 7.800313493346432, - "learning_rate": 1.646653432527049e-06, - "loss": 0.7157, - "num_input_tokens_seen": 100954785, - "step": 4739 - }, - { - "epoch": 0.5699513016292912, - "flos": 18944039558280.0, - "grad_norm": 5.585957304379228, - "learning_rate": 1.645886741273387e-06, - "loss": 0.7343, - "num_input_tokens_seen": 100976320, - "step": 4740 - }, - { - "epoch": 0.5700715445199303, - "flos": 13161960755760.0, - "grad_norm": 3.7953749468186246, - "learning_rate": 1.645120103738424e-06, - "loss": 0.7099, - "num_input_tokens_seen": 100993550, - "step": 4741 - }, - { - "epoch": 0.5701917874105693, - "flos": 8304200079000.0, - "grad_norm": 6.137861489024273, - "learning_rate": 1.6443535200384591e-06, - "loss": 0.8156, - "num_input_tokens_seen": 101011445, - "step": 4742 - }, - { - "epoch": 0.5703120303012085, - "flos": 15918230334120.0, - "grad_norm": 4.119425619512068, - "learning_rate": 1.6435869902897827e-06, - "loss": 0.6901, - "num_input_tokens_seen": 101029745, - "step": 4743 - }, - { - "epoch": 0.5704322731918475, - "flos": 41813516189040.0, - "grad_norm": 0.7983255460899553, - "learning_rate": 1.6428205146086764e-06, - "loss": 0.6309, - "num_input_tokens_seen": 101091445, - "step": 4744 - }, - { - "epoch": 0.5705525160824866, - "flos": 15163044426480.0, - "grad_norm": 4.699556029432919, - "learning_rate": 1.6420540931114142e-06, - "loss": 0.6931, - "num_input_tokens_seen": 101111755, - "step": 4745 - }, - { - "epoch": 0.5706727589731257, - "flos": 13705250822040.0, - "grad_norm": 8.716152009182032, - "learning_rate": 1.6412877259142616e-06, - "loss": 0.7692, - "num_input_tokens_seen": 101131395, - "step": 4746 - }, - { - "epoch": 0.5707930018637648, - "flos": 20264451393120.0, - "grad_norm": 14.90523040931228, - "learning_rate": 1.6405214131334757e-06, - "loss": 0.7208, - "num_input_tokens_seen": 101149640, - "step": 4747 - }, - { - "epoch": 0.5709132447544039, - "flos": 20236943379240.0, - "grad_norm": 5.118427435058446, - "learning_rate": 1.6397551548853052e-06, - "loss": 0.7902, - "num_input_tokens_seen": 101167525, - "step": 4748 - }, - { - "epoch": 0.571033487645043, - "flos": 15861852928080.0, - "grad_norm": 6.622076001071329, - "learning_rate": 1.6389889512859917e-06, - "loss": 0.6906, - "num_input_tokens_seen": 101186905, - "step": 4749 - }, - { - "epoch": 0.5711537305356821, - "flos": 51757649842800.0, - "grad_norm": 0.8867106500331275, - "learning_rate": 1.638222802451767e-06, - "loss": 0.6499, - "num_input_tokens_seen": 101248105, - "step": 4750 - }, - { - "epoch": 0.5712739734263211, - "flos": 18105728069760.0, - "grad_norm": 5.823946930486326, - "learning_rate": 1.6374567084988561e-06, - "loss": 0.7317, - "num_input_tokens_seen": 101269010, - "step": 4751 - }, - { - "epoch": 0.5713942163169603, - "flos": 19482738930360.0, - "grad_norm": 4.002245749428483, - "learning_rate": 1.6366906695434738e-06, - "loss": 0.7474, - "num_input_tokens_seen": 101291250, - "step": 4752 - }, - { - "epoch": 0.5715144592075994, - "flos": 15460028766240.0, - "grad_norm": 13.017560982031098, - "learning_rate": 1.6359246857018275e-06, - "loss": 0.8379, - "num_input_tokens_seen": 101308500, - "step": 4753 - }, - { - "epoch": 0.5716347020982384, - "flos": 17078976278880.0, - "grad_norm": 9.721167555022005, - "learning_rate": 1.6351587570901178e-06, - "loss": 0.7689, - "num_input_tokens_seen": 101328345, - "step": 4754 - }, - { - "epoch": 0.5717549449888776, - "flos": 12399999616680.0, - "grad_norm": 4.462350170708313, - "learning_rate": 1.634392883824534e-06, - "loss": 0.7447, - "num_input_tokens_seen": 101340065, - "step": 4755 - }, - { - "epoch": 0.5718751878795166, - "flos": 26099045086560.0, - "grad_norm": 3.9957361145179235, - "learning_rate": 1.6336270660212595e-06, - "loss": 0.6603, - "num_input_tokens_seen": 101361380, - "step": 4756 - }, - { - "epoch": 0.5719954307701557, - "flos": 28389419726760.0, - "grad_norm": 5.922382906669653, - "learning_rate": 1.6328613037964676e-06, - "loss": 0.6335, - "num_input_tokens_seen": 101384165, - "step": 4757 - }, - { - "epoch": 0.5721156736607949, - "flos": 15081533503560.0, - "grad_norm": 3.5325472989057554, - "learning_rate": 1.6320955972663241e-06, - "loss": 0.6717, - "num_input_tokens_seen": 101403480, - "step": 4758 - }, - { - "epoch": 0.5722359165514339, - "flos": 27502867441920.0, - "grad_norm": 7.681507286933909, - "learning_rate": 1.6313299465469857e-06, - "loss": 0.6349, - "num_input_tokens_seen": 101425930, - "step": 4759 - }, - { - "epoch": 0.572356159442073, - "flos": 16073368849920.0, - "grad_norm": 7.6820252315203215, - "learning_rate": 1.6305643517546014e-06, - "loss": 0.764, - "num_input_tokens_seen": 101441030, - "step": 4760 - }, - { - "epoch": 0.5724764023327121, - "flos": 13974758807880.0, - "grad_norm": 3.478247622018962, - "learning_rate": 1.629798813005311e-06, - "loss": 0.8328, - "num_input_tokens_seen": 101460470, - "step": 4761 - }, - { - "epoch": 0.5725966452233512, - "flos": 16701209195280.0, - "grad_norm": 4.80778887138226, - "learning_rate": 1.6290333304152473e-06, - "loss": 0.6971, - "num_input_tokens_seen": 101480065, - "step": 4762 - }, - { - "epoch": 0.5727168881139902, - "flos": 30522787913520.0, - "grad_norm": 5.780113974638775, - "learning_rate": 1.6282679041005314e-06, - "loss": 0.5583, - "num_input_tokens_seen": 101505375, - "step": 4763 - }, - { - "epoch": 0.5728371310046293, - "flos": 10653421602120.0, - "grad_norm": 4.654201594636883, - "learning_rate": 1.6275025341772789e-06, - "loss": 0.8541, - "num_input_tokens_seen": 101521400, - "step": 4764 - }, - { - "epoch": 0.5729573738952685, - "flos": 15729441772200.0, - "grad_norm": 9.0325187663061, - "learning_rate": 1.626737220761596e-06, - "loss": 0.8154, - "num_input_tokens_seen": 101538585, - "step": 4765 - }, - { - "epoch": 0.5730776167859075, - "flos": 17294576335560.0, - "grad_norm": 10.883710858358, - "learning_rate": 1.62597196396958e-06, - "loss": 0.7771, - "num_input_tokens_seen": 101556475, - "step": 4766 - }, - { - "epoch": 0.5731978596765466, - "flos": 18861895436160.0, - "grad_norm": 12.964350016020495, - "learning_rate": 1.6252067639173197e-06, - "loss": 0.8352, - "num_input_tokens_seen": 101578105, - "step": 4767 - }, - { - "epoch": 0.5733181025671857, - "flos": 19320951822960.0, - "grad_norm": 13.144309526361766, - "learning_rate": 1.6244416207208956e-06, - "loss": 0.6758, - "num_input_tokens_seen": 101598760, - "step": 4768 - }, - { - "epoch": 0.5734383454578248, - "flos": 21588725743080.0, - "grad_norm": 6.639932178932275, - "learning_rate": 1.6236765344963787e-06, - "loss": 0.7251, - "num_input_tokens_seen": 101619740, - "step": 4769 - }, - { - "epoch": 0.5735585883484638, - "flos": 25693231769760.0, - "grad_norm": 10.02399388922454, - "learning_rate": 1.6229115053598322e-06, - "loss": 0.6742, - "num_input_tokens_seen": 101641215, - "step": 4770 - }, - { - "epoch": 0.573678831239103, - "flos": 13704807582600.0, - "grad_norm": 4.675966442395642, - "learning_rate": 1.6221465334273108e-06, - "loss": 0.6904, - "num_input_tokens_seen": 101660145, - "step": 4771 - }, - { - "epoch": 0.5737990741297421, - "flos": 18835020621480.0, - "grad_norm": 9.28777579595028, - "learning_rate": 1.6213816188148593e-06, - "loss": 0.5976, - "num_input_tokens_seen": 101678570, - "step": 4772 - }, - { - "epoch": 0.5739193170203811, - "flos": 19993866968640.0, - "grad_norm": 2.6954557076708534, - "learning_rate": 1.6206167616385162e-06, - "loss": 0.7601, - "num_input_tokens_seen": 101699355, - "step": 4773 - }, - { - "epoch": 0.5740395599110203, - "flos": 8871388923720.0, - "grad_norm": 3.895816632947299, - "learning_rate": 1.6198519620143078e-06, - "loss": 0.73, - "num_input_tokens_seen": 101716230, - "step": 4774 - }, - { - "epoch": 0.5741598028016593, - "flos": 18996681089040.0, - "grad_norm": 3.443819652482999, - "learning_rate": 1.6190872200582546e-06, - "loss": 0.761, - "num_input_tokens_seen": 101737690, - "step": 4775 - }, - { - "epoch": 0.5742800456922984, - "flos": 14055003332400.0, - "grad_norm": 21.938094693461803, - "learning_rate": 1.6183225358863676e-06, - "loss": 0.761, - "num_input_tokens_seen": 101754305, - "step": 4776 - }, - { - "epoch": 0.5744002885829376, - "flos": 22696418577600.0, - "grad_norm": 8.04188445064825, - "learning_rate": 1.617557909614648e-06, - "loss": 0.7044, - "num_input_tokens_seen": 101773460, - "step": 4777 - }, - { - "epoch": 0.5745205314735766, - "flos": 17456078503320.0, - "grad_norm": 3.37435581982569, - "learning_rate": 1.6167933413590899e-06, - "loss": 0.8392, - "num_input_tokens_seen": 101792085, - "step": 4778 - }, - { - "epoch": 0.5746407743642157, - "flos": 8924695313640.0, - "grad_norm": 4.759498735849474, - "learning_rate": 1.6160288312356773e-06, - "loss": 0.8903, - "num_input_tokens_seen": 101808935, - "step": 4779 - }, - { - "epoch": 0.5747610172548548, - "flos": 17673641477520.0, - "grad_norm": 4.524358520562115, - "learning_rate": 1.6152643793603857e-06, - "loss": 0.8134, - "num_input_tokens_seen": 101829005, - "step": 4780 - }, - { - "epoch": 0.5748812601454939, - "flos": 18616761128160.0, - "grad_norm": 3.670242790901583, - "learning_rate": 1.6144999858491815e-06, - "loss": 0.869, - "num_input_tokens_seen": 101847355, - "step": 4781 - }, - { - "epoch": 0.575001503036133, - "flos": 22477177625520.0, - "grad_norm": 3.6752350712545576, - "learning_rate": 1.6137356508180232e-06, - "loss": 0.8419, - "num_input_tokens_seen": 101868785, - "step": 4782 - }, - { - "epoch": 0.5751217459267721, - "flos": 15892368638160.0, - "grad_norm": 15.293076508241043, - "learning_rate": 1.6129713743828593e-06, - "loss": 0.8035, - "num_input_tokens_seen": 101887515, - "step": 4783 - }, - { - "epoch": 0.5752419888174112, - "flos": 15836624431320.0, - "grad_norm": 4.43445450413775, - "learning_rate": 1.6122071566596306e-06, - "loss": 0.7445, - "num_input_tokens_seen": 101907510, - "step": 4784 - }, - { - "epoch": 0.5753622317080502, - "flos": 12967251781320.0, - "grad_norm": 6.9643327201402725, - "learning_rate": 1.6114429977642674e-06, - "loss": 0.8167, - "num_input_tokens_seen": 101921735, - "step": 4785 - }, - { - "epoch": 0.5754824745986894, - "flos": 14458442152200.0, - "grad_norm": 4.048493168579534, - "learning_rate": 1.6106788978126926e-06, - "loss": 0.7108, - "num_input_tokens_seen": 101940430, - "step": 4786 - }, - { - "epoch": 0.5756027174893285, - "flos": 22743551275320.0, - "grad_norm": 5.209462198627204, - "learning_rate": 1.6099148569208196e-06, - "loss": 0.7607, - "num_input_tokens_seen": 101957370, - "step": 4787 - }, - { - "epoch": 0.5757229603799675, - "flos": 20939487756120.0, - "grad_norm": 3.06357073069127, - "learning_rate": 1.6091508752045523e-06, - "loss": 0.6138, - "num_input_tokens_seen": 101977970, - "step": 4788 - }, - { - "epoch": 0.5758432032706067, - "flos": 16833430391400.0, - "grad_norm": 3.617995104017261, - "learning_rate": 1.608386952779787e-06, - "loss": 0.8535, - "num_input_tokens_seen": 101997060, - "step": 4789 - }, - { - "epoch": 0.5759634461612457, - "flos": 18862338675600.0, - "grad_norm": 3.105356658256115, - "learning_rate": 1.6076230897624098e-06, - "loss": 0.7295, - "num_input_tokens_seen": 102018985, - "step": 4790 - }, - { - "epoch": 0.5760836890518848, - "flos": 22453088887320.0, - "grad_norm": 5.214537056465975, - "learning_rate": 1.6068592862682974e-06, - "loss": 0.766, - "num_input_tokens_seen": 102036860, - "step": 4791 - }, - { - "epoch": 0.576203931942524, - "flos": 26854420953960.0, - "grad_norm": 5.087037513950036, - "learning_rate": 1.6060955424133187e-06, - "loss": 0.7166, - "num_input_tokens_seen": 102057505, - "step": 4792 - }, - { - "epoch": 0.576324174833163, - "flos": 18696910672800.0, - "grad_norm": 9.140955522870339, - "learning_rate": 1.6053318583133332e-06, - "loss": 0.8748, - "num_input_tokens_seen": 102078095, - "step": 4793 - }, - { - "epoch": 0.5764444177238021, - "flos": 18403123989000.0, - "grad_norm": 4.849415474501637, - "learning_rate": 1.6045682340841907e-06, - "loss": 0.7416, - "num_input_tokens_seen": 102096740, - "step": 4794 - }, - { - "epoch": 0.5765646606144411, - "flos": 45858130554120.0, - "grad_norm": 0.7901426042340631, - "learning_rate": 1.6038046698417336e-06, - "loss": 0.6017, - "num_input_tokens_seen": 102157355, - "step": 4795 - }, - { - "epoch": 0.5766849035050803, - "flos": 18402680749560.0, - "grad_norm": 4.6041244343795675, - "learning_rate": 1.6030411657017919e-06, - "loss": 0.6632, - "num_input_tokens_seen": 102176730, - "step": 4796 - }, - { - "epoch": 0.5768051463957193, - "flos": 11646713306640.0, - "grad_norm": 4.2595077079759625, - "learning_rate": 1.6022777217801903e-06, - "loss": 0.8338, - "num_input_tokens_seen": 102193405, - "step": 4797 - }, - { - "epoch": 0.5769253892863584, - "flos": 16215721233240.0, - "grad_norm": 5.260253702289575, - "learning_rate": 1.601514338192742e-06, - "loss": 0.7104, - "num_input_tokens_seen": 102213055, - "step": 4798 - }, - { - "epoch": 0.5770456321769976, - "flos": 16728337289640.0, - "grad_norm": 3.9488536088615325, - "learning_rate": 1.6007510150552514e-06, - "loss": 0.699, - "num_input_tokens_seen": 102230835, - "step": 4799 - }, - { - "epoch": 0.5771658750676366, - "flos": 34117970519640.0, - "grad_norm": 3.686835794921024, - "learning_rate": 1.599987752483515e-06, - "loss": 0.6059, - "num_input_tokens_seen": 102255000, - "step": 4800 - }, - { - "epoch": 0.5772861179582757, - "flos": 16212491917320.0, - "grad_norm": 4.074615956563446, - "learning_rate": 1.5992245505933184e-06, - "loss": 0.6725, - "num_input_tokens_seen": 102274420, - "step": 4801 - }, - { - "epoch": 0.5774063608489148, - "flos": 22939494988200.0, - "grad_norm": 3.992586537703017, - "learning_rate": 1.5984614095004388e-06, - "loss": 0.6991, - "num_input_tokens_seen": 102295275, - "step": 4802 - }, - { - "epoch": 0.5775266037395539, - "flos": 16484532699960.0, - "grad_norm": 7.600145699961582, - "learning_rate": 1.5976983293206438e-06, - "loss": 0.7863, - "num_input_tokens_seen": 102310800, - "step": 4803 - }, - { - "epoch": 0.577646846630193, - "flos": 15405424317960.0, - "grad_norm": 4.2896543556523055, - "learning_rate": 1.5969353101696928e-06, - "loss": 0.7031, - "num_input_tokens_seen": 102328960, - "step": 4804 - }, - { - "epoch": 0.5777670895208321, - "flos": 21803439320880.0, - "grad_norm": 4.943529913593301, - "learning_rate": 1.5961723521633341e-06, - "loss": 0.7855, - "num_input_tokens_seen": 102349920, - "step": 4805 - }, - { - "epoch": 0.5778873324114712, - "flos": 14244805013040.0, - "grad_norm": 3.6474686506360885, - "learning_rate": 1.5954094554173097e-06, - "loss": 0.8934, - "num_input_tokens_seen": 102367630, - "step": 4806 - }, - { - "epoch": 0.5780075753021102, - "flos": 10275369578880.0, - "grad_norm": 4.7804283483262715, - "learning_rate": 1.5946466200473482e-06, - "loss": 0.779, - "num_input_tokens_seen": 102385260, - "step": 4807 - }, - { - "epoch": 0.5781278181927494, - "flos": 11107380735360.0, - "grad_norm": 4.079772382632614, - "learning_rate": 1.5938838461691723e-06, - "loss": 0.8283, - "num_input_tokens_seen": 102401890, - "step": 4808 - }, - { - "epoch": 0.5782480610833884, - "flos": 12192061270320.0, - "grad_norm": 5.611712210575857, - "learning_rate": 1.593121133898494e-06, - "loss": 0.8037, - "num_input_tokens_seen": 102418815, - "step": 4809 - }, - { - "epoch": 0.5783683039740275, - "flos": 18671397236400.0, - "grad_norm": 3.1739272821049074, - "learning_rate": 1.592358483351016e-06, - "loss": 0.7645, - "num_input_tokens_seen": 102438710, - "step": 4810 - }, - { - "epoch": 0.5784885468646667, - "flos": 13434729717480.0, - "grad_norm": 3.3735397853424782, - "learning_rate": 1.5915958946424326e-06, - "loss": 0.7109, - "num_input_tokens_seen": 102457115, - "step": 4811 - }, - { - "epoch": 0.5786087897553057, - "flos": 33931113215280.0, - "grad_norm": 2.6536914845221995, - "learning_rate": 1.5908333678884271e-06, - "loss": 0.7346, - "num_input_tokens_seen": 102483255, - "step": 4812 - }, - { - "epoch": 0.5787290326459448, - "flos": 8979648021480.0, - "grad_norm": 5.519448141348389, - "learning_rate": 1.5900709032046743e-06, - "loss": 0.718, - "num_input_tokens_seen": 102501050, - "step": 4813 - }, - { - "epoch": 0.5788492755365839, - "flos": 17048935468200.0, - "grad_norm": 3.4593397460905506, - "learning_rate": 1.5893085007068391e-06, - "loss": 0.7748, - "num_input_tokens_seen": 102518330, - "step": 4814 - }, - { - "epoch": 0.578969518427223, - "flos": 17619955168080.0, - "grad_norm": 9.970121761638346, - "learning_rate": 1.5885461605105786e-06, - "loss": 0.7, - "num_input_tokens_seen": 102539650, - "step": 4815 - }, - { - "epoch": 0.579089761317862, - "flos": 15999772917000.0, - "grad_norm": 4.472052787482858, - "learning_rate": 1.5877838827315375e-06, - "loss": 0.7529, - "num_input_tokens_seen": 102557915, - "step": 4816 - }, - { - "epoch": 0.5792100042085012, - "flos": 16782625138320.0, - "grad_norm": 4.889089910150572, - "learning_rate": 1.587021667485355e-06, - "loss": 0.6791, - "num_input_tokens_seen": 102577005, - "step": 4817 - }, - { - "epoch": 0.5793302470991403, - "flos": 15702376997760.0, - "grad_norm": 8.581658752441708, - "learning_rate": 1.5862595148876559e-06, - "loss": 0.7648, - "num_input_tokens_seen": 102596830, - "step": 4818 - }, - { - "epoch": 0.5794504899897793, - "flos": 9219748395840.0, - "grad_norm": 3.982808601793527, - "learning_rate": 1.58549742505406e-06, - "loss": 0.7608, - "num_input_tokens_seen": 102611295, - "step": 4819 - }, - { - "epoch": 0.5795707328804185, - "flos": 10815841908720.0, - "grad_norm": 5.343613293778589, - "learning_rate": 1.5847353981001747e-06, - "loss": 0.737, - "num_input_tokens_seen": 102628195, - "step": 4820 - }, - { - "epoch": 0.5796909757710575, - "flos": 26773891489800.0, - "grad_norm": 3.608869508958509, - "learning_rate": 1.5839734341415993e-06, - "loss": 0.6821, - "num_input_tokens_seen": 102650115, - "step": 4821 - }, - { - "epoch": 0.5798112186616966, - "flos": 16864294361040.0, - "grad_norm": 3.460135545666494, - "learning_rate": 1.5832115332939238e-06, - "loss": 0.7599, - "num_input_tokens_seen": 102668275, - "step": 4822 - }, - { - "epoch": 0.5799314615523358, - "flos": 12136950262680.0, - "grad_norm": 3.8303709331598297, - "learning_rate": 1.5824496956727272e-06, - "loss": 0.729, - "num_input_tokens_seen": 102685200, - "step": 4823 - }, - { - "epoch": 0.5800517044429748, - "flos": 14973970924920.0, - "grad_norm": 3.400934954727035, - "learning_rate": 1.5816879213935797e-06, - "loss": 0.7057, - "num_input_tokens_seen": 102703730, - "step": 4824 - }, - { - "epoch": 0.5801719473336139, - "flos": 23153258767200.0, - "grad_norm": 4.121668770976338, - "learning_rate": 1.5809262105720416e-06, - "loss": 0.7801, - "num_input_tokens_seen": 102724490, - "step": 4825 - }, - { - "epoch": 0.580292190224253, - "flos": 14892744941640.0, - "grad_norm": 3.3044828888156745, - "learning_rate": 1.5801645633236644e-06, - "loss": 0.7759, - "num_input_tokens_seen": 102745195, - "step": 4826 - }, - { - "epoch": 0.5804124331148921, - "flos": 19510373584080.0, - "grad_norm": 4.836455728745305, - "learning_rate": 1.579402979763989e-06, - "loss": 0.7646, - "num_input_tokens_seen": 102765250, - "step": 4827 - }, - { - "epoch": 0.5805326760055312, - "flos": 9787918699320.0, - "grad_norm": 7.085079321999397, - "learning_rate": 1.578641460008548e-06, - "loss": 0.7754, - "num_input_tokens_seen": 102782705, - "step": 4828 - }, - { - "epoch": 0.5806529188961702, - "flos": 8761261888320.0, - "grad_norm": 3.869588210797446, - "learning_rate": 1.5778800041728613e-06, - "loss": 0.6438, - "num_input_tokens_seen": 102798715, - "step": 4829 - }, - { - "epoch": 0.5807731617868094, - "flos": 19213420904280.0, - "grad_norm": 10.023163099688103, - "learning_rate": 1.577118612372443e-06, - "loss": 0.6488, - "num_input_tokens_seen": 102820275, - "step": 4830 - }, - { - "epoch": 0.5808934046774484, - "flos": 27907699299960.0, - "grad_norm": 3.8678744852240574, - "learning_rate": 1.5763572847227943e-06, - "loss": 0.6841, - "num_input_tokens_seen": 102840880, - "step": 4831 - }, - { - "epoch": 0.5810136475680875, - "flos": 14974129224720.0, - "grad_norm": 4.168709800789477, - "learning_rate": 1.5755960213394091e-06, - "loss": 0.7854, - "num_input_tokens_seen": 102857700, - "step": 4832 - }, - { - "epoch": 0.5811338904587267, - "flos": 12786663149040.0, - "grad_norm": 15.004358294251228, - "learning_rate": 1.5748348223377703e-06, - "loss": 0.768, - "num_input_tokens_seen": 102874975, - "step": 4833 - }, - { - "epoch": 0.5812541333493657, - "flos": 14212579665120.0, - "grad_norm": 4.612034537961062, - "learning_rate": 1.5740736878333507e-06, - "loss": 0.7622, - "num_input_tokens_seen": 102892535, - "step": 4834 - }, - { - "epoch": 0.5813743762400048, - "flos": 15054816988680.0, - "grad_norm": 20.143509781766557, - "learning_rate": 1.5733126179416143e-06, - "loss": 0.762, - "num_input_tokens_seen": 102906740, - "step": 4835 - }, - { - "epoch": 0.5814946191306439, - "flos": 24366677902680.0, - "grad_norm": 5.10228066406127, - "learning_rate": 1.5725516127780137e-06, - "loss": 0.7011, - "num_input_tokens_seen": 102928595, - "step": 4836 - }, - { - "epoch": 0.581614862021283, - "flos": 11760227957760.0, - "grad_norm": 8.274598873131584, - "learning_rate": 1.5717906724579943e-06, - "loss": 0.8619, - "num_input_tokens_seen": 102945375, - "step": 4837 - }, - { - "epoch": 0.581735104911922, - "flos": 24829375184880.0, - "grad_norm": 4.84507073885176, - "learning_rate": 1.571029797096989e-06, - "loss": 0.6602, - "num_input_tokens_seen": 102966200, - "step": 4838 - }, - { - "epoch": 0.5818553478025612, - "flos": 17079767777880.0, - "grad_norm": 3.4849625848981, - "learning_rate": 1.570268986810423e-06, - "loss": 0.7678, - "num_input_tokens_seen": 102985815, - "step": 4839 - }, - { - "epoch": 0.5819755906932003, - "flos": 15351706348560.0, - "grad_norm": 5.720801147162944, - "learning_rate": 1.5695082417137096e-06, - "loss": 0.7403, - "num_input_tokens_seen": 103003410, - "step": 4840 - }, - { - "epoch": 0.5820958335838393, - "flos": 15673697565360.0, - "grad_norm": 3.916401847381826, - "learning_rate": 1.5687475619222539e-06, - "loss": 0.7415, - "num_input_tokens_seen": 103023085, - "step": 4841 - }, - { - "epoch": 0.5822160764744785, - "flos": 13110237363840.0, - "grad_norm": 4.284279746701661, - "learning_rate": 1.5679869475514496e-06, - "loss": 0.7218, - "num_input_tokens_seen": 103039740, - "step": 4842 - }, - { - "epoch": 0.5823363193651175, - "flos": 16858975487760.0, - "grad_norm": 10.653832303975433, - "learning_rate": 1.567226398716682e-06, - "loss": 0.7853, - "num_input_tokens_seen": 103059375, - "step": 4843 - }, - { - "epoch": 0.5824565622557566, - "flos": 24155510240400.0, - "grad_norm": 3.50829463100757, - "learning_rate": 1.566465915533326e-06, - "loss": 0.6177, - "num_input_tokens_seen": 103081125, - "step": 4844 - }, - { - "epoch": 0.5825768051463958, - "flos": 16263613770000.0, - "grad_norm": 5.294964020834664, - "learning_rate": 1.5657054981167458e-06, - "loss": 0.8601, - "num_input_tokens_seen": 103099740, - "step": 4845 - }, - { - "epoch": 0.5826970480370348, - "flos": 20536207236120.0, - "grad_norm": 4.258700496922195, - "learning_rate": 1.5649451465822965e-06, - "loss": 0.661, - "num_input_tokens_seen": 103120850, - "step": 4846 - }, - { - "epoch": 0.5828172909276739, - "flos": 13029264660240.0, - "grad_norm": 3.1281217622692568, - "learning_rate": 1.5641848610453218e-06, - "loss": 0.8161, - "num_input_tokens_seen": 103139230, - "step": 4847 - }, - { - "epoch": 0.582937533818313, - "flos": 14514787898280.0, - "grad_norm": 4.340980580034911, - "learning_rate": 1.563424641621158e-06, - "loss": 0.8375, - "num_input_tokens_seen": 103158130, - "step": 4848 - }, - { - "epoch": 0.5830577767089521, - "flos": 19699352105760.0, - "grad_norm": 4.395988690612518, - "learning_rate": 1.5626644884251282e-06, - "loss": 0.6823, - "num_input_tokens_seen": 103177370, - "step": 4849 - }, - { - "epoch": 0.5831780195995911, - "flos": 18535060245480.0, - "grad_norm": 4.8187090959804335, - "learning_rate": 1.5619044015725488e-06, - "loss": 0.8678, - "num_input_tokens_seen": 103196780, - "step": 4850 - }, - { - "epoch": 0.5832982624902303, - "flos": 10734299325840.0, - "grad_norm": 6.571112802868139, - "learning_rate": 1.5611443811787224e-06, - "loss": 0.8474, - "num_input_tokens_seen": 103210625, - "step": 4851 - }, - { - "epoch": 0.5834185053808694, - "flos": 14942885335560.0, - "grad_norm": 4.0611894107430695, - "learning_rate": 1.560384427358945e-06, - "loss": 0.69, - "num_input_tokens_seen": 103229890, - "step": 4852 - }, - { - "epoch": 0.5835387482715084, - "flos": 19943504955000.0, - "grad_norm": 2.6948308099637415, - "learning_rate": 1.5596245402284998e-06, - "loss": 0.7218, - "num_input_tokens_seen": 103253135, - "step": 4853 - }, - { - "epoch": 0.5836589911621476, - "flos": 12138723220440.0, - "grad_norm": 3.1569604357372074, - "learning_rate": 1.5588647199026619e-06, - "loss": 0.7976, - "num_input_tokens_seen": 103270590, - "step": 4854 - }, - { - "epoch": 0.5837792340527866, - "flos": 14944879913040.0, - "grad_norm": 8.052676344761197, - "learning_rate": 1.5581049664966956e-06, - "loss": 0.8611, - "num_input_tokens_seen": 103288070, - "step": 4855 - }, - { - "epoch": 0.5838994769434257, - "flos": 48655992337200.0, - "grad_norm": 1.0787696017079045, - "learning_rate": 1.5573452801258545e-06, - "loss": 0.6834, - "num_input_tokens_seen": 103334960, - "step": 4856 - }, - { - "epoch": 0.5840197198340649, - "flos": 15702788577240.0, - "grad_norm": 4.952248673279349, - "learning_rate": 1.5565856609053824e-06, - "loss": 0.6133, - "num_input_tokens_seen": 103353475, - "step": 4857 - }, - { - "epoch": 0.5841399627247039, - "flos": 13974410548320.0, - "grad_norm": 3.9766409216082335, - "learning_rate": 1.5558261089505127e-06, - "loss": 0.7826, - "num_input_tokens_seen": 103371925, - "step": 4858 - }, - { - "epoch": 0.584260205615343, - "flos": 19368970999560.0, - "grad_norm": 5.312246698062798, - "learning_rate": 1.5550666243764697e-06, - "loss": 0.7836, - "num_input_tokens_seen": 103389805, - "step": 4859 - }, - { - "epoch": 0.584380448505982, - "flos": 10086675996840.0, - "grad_norm": 13.17715062462232, - "learning_rate": 1.554307207298465e-06, - "loss": 0.7494, - "num_input_tokens_seen": 103407785, - "step": 4860 - }, - { - "epoch": 0.5845006913966212, - "flos": 15756981446040.0, - "grad_norm": 3.044037265263757, - "learning_rate": 1.553547857831704e-06, - "loss": 0.777, - "num_input_tokens_seen": 103424015, - "step": 4861 - }, - { - "epoch": 0.5846209342872603, - "flos": 43017437336520.0, - "grad_norm": 1.01126418340707, - "learning_rate": 1.5527885760913771e-06, - "loss": 0.7125, - "num_input_tokens_seen": 103473625, - "step": 4862 - }, - { - "epoch": 0.5847411771778993, - "flos": 13569547030320.0, - "grad_norm": 4.007131418280307, - "learning_rate": 1.552029362192668e-06, - "loss": 0.748, - "num_input_tokens_seen": 103492605, - "step": 4863 - }, - { - "epoch": 0.5848614200685385, - "flos": 17752587943680.0, - "grad_norm": 6.661722861479176, - "learning_rate": 1.5512702162507478e-06, - "loss": 0.6959, - "num_input_tokens_seen": 103512640, - "step": 4864 - }, - { - "epoch": 0.5849816629591775, - "flos": 52849639337160.0, - "grad_norm": 0.9861840473358543, - "learning_rate": 1.5505111383807792e-06, - "loss": 0.5626, - "num_input_tokens_seen": 103575030, - "step": 4865 - }, - { - "epoch": 0.5851019058498166, - "flos": 17427810650400.0, - "grad_norm": 3.1427691527843518, - "learning_rate": 1.5497521286979138e-06, - "loss": 0.7875, - "num_input_tokens_seen": 103594990, - "step": 4866 - }, - { - "epoch": 0.5852221487404557, - "flos": 17862176759760.0, - "grad_norm": 4.04983518288645, - "learning_rate": 1.5489931873172927e-06, - "loss": 0.7354, - "num_input_tokens_seen": 103616030, - "step": 4867 - }, - { - "epoch": 0.5853423916310948, - "flos": 19995798226200.0, - "grad_norm": 3.8851867416845267, - "learning_rate": 1.5482343143540467e-06, - "loss": 0.7779, - "num_input_tokens_seen": 103637015, - "step": 4868 - }, - { - "epoch": 0.5854626345217339, - "flos": 8681903842680.0, - "grad_norm": 4.03715756502443, - "learning_rate": 1.547475509923295e-06, - "loss": 0.8151, - "num_input_tokens_seen": 103653775, - "step": 4869 - }, - { - "epoch": 0.585582877412373, - "flos": 47436119061240.0, - "grad_norm": 0.7615819563667641, - "learning_rate": 1.5467167741401495e-06, - "loss": 0.5858, - "num_input_tokens_seen": 103714975, - "step": 4870 - }, - { - "epoch": 0.5857031203030121, - "flos": 12402089174040.0, - "grad_norm": 5.717880433056337, - "learning_rate": 1.5459581071197083e-06, - "loss": 0.7008, - "num_input_tokens_seen": 103730355, - "step": 4871 - }, - { - "epoch": 0.5858233631936511, - "flos": 15269815506120.0, - "grad_norm": 7.694626149198396, - "learning_rate": 1.5451995089770624e-06, - "loss": 0.805, - "num_input_tokens_seen": 103749860, - "step": 4872 - }, - { - "epoch": 0.5859436060842903, - "flos": 16977650712360.0, - "grad_norm": 2.838888687246589, - "learning_rate": 1.5444409798272885e-06, - "loss": 0.7045, - "num_input_tokens_seen": 103773670, - "step": 4873 - }, - { - "epoch": 0.5860638489749294, - "flos": 16459304203200.0, - "grad_norm": 5.400495520568069, - "learning_rate": 1.543682519785456e-06, - "loss": 0.7844, - "num_input_tokens_seen": 103791870, - "step": 4874 - }, - { - "epoch": 0.5861840918655684, - "flos": 12813316344000.0, - "grad_norm": 17.289668814210355, - "learning_rate": 1.5429241289666219e-06, - "loss": 0.7711, - "num_input_tokens_seen": 103809090, - "step": 4875 - }, - { - "epoch": 0.5863043347562076, - "flos": 18726318284280.0, - "grad_norm": 7.058772360859333, - "learning_rate": 1.5421658074858342e-06, - "loss": 0.682, - "num_input_tokens_seen": 103826915, - "step": 4876 - }, - { - "epoch": 0.5864245776468466, - "flos": 15108471638160.0, - "grad_norm": 17.472188594093705, - "learning_rate": 1.5414075554581298e-06, - "loss": 0.6371, - "num_input_tokens_seen": 103844680, - "step": 4877 - }, - { - "epoch": 0.5865448205374857, - "flos": 21211433558880.0, - "grad_norm": 8.941851216948042, - "learning_rate": 1.5406493729985348e-06, - "loss": 0.7564, - "num_input_tokens_seen": 103863595, - "step": 4878 - }, - { - "epoch": 0.5866650634281249, - "flos": 18937517606520.0, - "grad_norm": 4.430475181772685, - "learning_rate": 1.5398912602220644e-06, - "loss": 0.7017, - "num_input_tokens_seen": 103882590, - "step": 4879 - }, - { - "epoch": 0.5867853063187639, - "flos": 12432541564200.0, - "grad_norm": 4.698681731706154, - "learning_rate": 1.539133217243724e-06, - "loss": 0.759, - "num_input_tokens_seen": 103899330, - "step": 4880 - }, - { - "epoch": 0.586905549209403, - "flos": 18052168400160.0, - "grad_norm": 4.354964455460594, - "learning_rate": 1.5383752441785081e-06, - "loss": 0.7302, - "num_input_tokens_seen": 103918275, - "step": 4881 - }, - { - "epoch": 0.5870257921000421, - "flos": 10707329531280.0, - "grad_norm": 4.055096026881663, - "learning_rate": 1.5376173411414003e-06, - "loss": 0.8369, - "num_input_tokens_seen": 103936035, - "step": 4882 - }, - { - "epoch": 0.5871460349906812, - "flos": 17511854370120.0, - "grad_norm": 3.299286019508674, - "learning_rate": 1.5368595082473753e-06, - "loss": 0.769, - "num_input_tokens_seen": 103954055, - "step": 4883 - }, - { - "epoch": 0.5872662778813202, - "flos": 16216196132640.0, - "grad_norm": 4.025861583924604, - "learning_rate": 1.5361017456113935e-06, - "loss": 0.7711, - "num_input_tokens_seen": 103974125, - "step": 4884 - }, - { - "epoch": 0.5873865207719594, - "flos": 13461382912440.0, - "grad_norm": 5.264120820951788, - "learning_rate": 1.5353440533484085e-06, - "loss": 0.839, - "num_input_tokens_seen": 103992700, - "step": 4885 - }, - { - "epoch": 0.5875067636625985, - "flos": 39788054349120.0, - "grad_norm": 3.7567625740172375, - "learning_rate": 1.534586431573361e-06, - "loss": 0.6485, - "num_input_tokens_seen": 104017360, - "step": 4886 - }, - { - "epoch": 0.5876270065532375, - "flos": 20531046662640.0, - "grad_norm": 9.265028833055176, - "learning_rate": 1.5338288804011817e-06, - "loss": 0.7611, - "num_input_tokens_seen": 104036580, - "step": 4887 - }, - { - "epoch": 0.5877472494438767, - "flos": 15622132473240.0, - "grad_norm": 5.401200775579004, - "learning_rate": 1.533071399946791e-06, - "loss": 0.6813, - "num_input_tokens_seen": 104055045, - "step": 4888 - }, - { - "epoch": 0.5878674923345157, - "flos": 16378141539840.0, - "grad_norm": 5.896291582431183, - "learning_rate": 1.5323139903250977e-06, - "loss": 0.5557, - "num_input_tokens_seen": 104075370, - "step": 4889 - }, - { - "epoch": 0.5879877352251548, - "flos": 15996986840520.0, - "grad_norm": 4.078520189335153, - "learning_rate": 1.5315566516510002e-06, - "loss": 0.7591, - "num_input_tokens_seen": 104093260, - "step": 4890 - }, - { - "epoch": 0.5881079781157939, - "flos": 12759788334360.0, - "grad_norm": 4.217028005042473, - "learning_rate": 1.5307993840393857e-06, - "loss": 0.6664, - "num_input_tokens_seen": 104111060, - "step": 4891 - }, - { - "epoch": 0.588228221006433, - "flos": 16540086947040.0, - "grad_norm": 4.752802373506743, - "learning_rate": 1.530042187605132e-06, - "loss": 0.7936, - "num_input_tokens_seen": 104130035, - "step": 4892 - }, - { - "epoch": 0.5883484638970721, - "flos": 19185817910520.0, - "grad_norm": 3.8979008674131483, - "learning_rate": 1.5292850624631044e-06, - "loss": 0.8262, - "num_input_tokens_seen": 104151950, - "step": 4893 - }, - { - "epoch": 0.5884687067877111, - "flos": 22343246791560.0, - "grad_norm": 3.8756079658290212, - "learning_rate": 1.5285280087281593e-06, - "loss": 0.7673, - "num_input_tokens_seen": 104172400, - "step": 4894 - }, - { - "epoch": 0.5885889496783503, - "flos": 51998668356000.0, - "grad_norm": 0.670668405690634, - "learning_rate": 1.5277710265151398e-06, - "loss": 0.5623, - "num_input_tokens_seen": 104241600, - "step": 4895 - }, - { - "epoch": 0.5887091925689893, - "flos": 13947504073680.0, - "grad_norm": 10.789530617560043, - "learning_rate": 1.5270141159388803e-06, - "loss": 0.7376, - "num_input_tokens_seen": 104258340, - "step": 4896 - }, - { - "epoch": 0.5888294354596284, - "flos": 17052418063800.0, - "grad_norm": 3.210419030573373, - "learning_rate": 1.526257277114203e-06, - "loss": 0.7789, - "num_input_tokens_seen": 104279135, - "step": 4897 - }, - { - "epoch": 0.5889496783502676, - "flos": 16080998900280.0, - "grad_norm": 3.4760527847900247, - "learning_rate": 1.5255005101559201e-06, - "loss": 0.7843, - "num_input_tokens_seen": 104296465, - "step": 4898 - }, - { - "epoch": 0.5890699212409066, - "flos": 15861093089040.0, - "grad_norm": 3.3690177740283223, - "learning_rate": 1.524743815178833e-06, - "loss": 0.7511, - "num_input_tokens_seen": 104314145, - "step": 4899 - }, - { - "epoch": 0.5891901641315457, - "flos": 14218341777840.0, - "grad_norm": 4.426364089284039, - "learning_rate": 1.5239871922977315e-06, - "loss": 0.7944, - "num_input_tokens_seen": 104333780, - "step": 4900 - }, - { - "epoch": 0.5893104070221848, - "flos": 14325809376600.0, - "grad_norm": 4.430083618752656, - "learning_rate": 1.523230641627394e-06, - "loss": 0.8754, - "num_input_tokens_seen": 104352485, - "step": 4901 - }, - { - "epoch": 0.5894306499128239, - "flos": 21319629336720.0, - "grad_norm": 15.934830026812286, - "learning_rate": 1.5224741632825888e-06, - "loss": 0.7005, - "num_input_tokens_seen": 104372395, - "step": 4902 - }, - { - "epoch": 0.589550892803463, - "flos": 31116471653400.0, - "grad_norm": 6.226772022431521, - "learning_rate": 1.521717757378074e-06, - "loss": 0.6763, - "num_input_tokens_seen": 104392660, - "step": 4903 - }, - { - "epoch": 0.5896711356941021, - "flos": 10275559538640.0, - "grad_norm": 4.538550448137388, - "learning_rate": 1.5209614240285943e-06, - "loss": 0.6688, - "num_input_tokens_seen": 104410035, - "step": 4904 - }, - { - "epoch": 0.5897913785847412, - "flos": 12543270138840.0, - "grad_norm": 4.841110002269846, - "learning_rate": 1.520205163348887e-06, - "loss": 0.8287, - "num_input_tokens_seen": 104427690, - "step": 4905 - }, - { - "epoch": 0.5899116214753802, - "flos": 35928687121800.0, - "grad_norm": 0.7475496636672696, - "learning_rate": 1.519448975453674e-06, - "loss": 0.5718, - "num_input_tokens_seen": 104482510, - "step": 4906 - }, - { - "epoch": 0.5900318643660194, - "flos": 15431001074280.0, - "grad_norm": 13.439825380190026, - "learning_rate": 1.5186928604576696e-06, - "loss": 0.7448, - "num_input_tokens_seen": 104499425, - "step": 4907 - }, - { - "epoch": 0.5901521072566585, - "flos": 15486555321360.0, - "grad_norm": 4.378572585693149, - "learning_rate": 1.5179368184755752e-06, - "loss": 0.7604, - "num_input_tokens_seen": 104517230, - "step": 4908 - }, - { - "epoch": 0.5902723501472975, - "flos": 14781161548080.0, - "grad_norm": 4.5474871980341165, - "learning_rate": 1.5171808496220821e-06, - "loss": 0.8116, - "num_input_tokens_seen": 104535705, - "step": 4909 - }, - { - "epoch": 0.5903925930379367, - "flos": 16808075254800.0, - "grad_norm": 3.1871572572029003, - "learning_rate": 1.5164249540118708e-06, - "loss": 0.798, - "num_input_tokens_seen": 104554550, - "step": 4910 - }, - { - "epoch": 0.5905128359285757, - "flos": 17106832552320.0, - "grad_norm": 4.722371511655113, - "learning_rate": 1.5156691317596093e-06, - "loss": 0.82, - "num_input_tokens_seen": 104575695, - "step": 4911 - }, - { - "epoch": 0.5906330788192148, - "flos": 20558174757000.0, - "grad_norm": 8.949711919655527, - "learning_rate": 1.5149133829799556e-06, - "loss": 0.663, - "num_input_tokens_seen": 104593410, - "step": 4912 - }, - { - "epoch": 0.590753321709854, - "flos": 13487972787480.0, - "grad_norm": 4.345699174857756, - "learning_rate": 1.5141577077875556e-06, - "loss": 0.7858, - "num_input_tokens_seen": 104610455, - "step": 4913 - }, - { - "epoch": 0.590873564600493, - "flos": 12300890247360.0, - "grad_norm": 6.05430788880539, - "learning_rate": 1.5134021062970451e-06, - "loss": 0.6998, - "num_input_tokens_seen": 104628555, - "step": 4914 - }, - { - "epoch": 0.5909938074911321, - "flos": 9815964932520.0, - "grad_norm": 4.012603691876575, - "learning_rate": 1.5126465786230483e-06, - "loss": 0.7888, - "num_input_tokens_seen": 104645050, - "step": 4915 - }, - { - "epoch": 0.5911140503817712, - "flos": 19071701720160.0, - "grad_norm": 3.645627493724235, - "learning_rate": 1.5118911248801787e-06, - "loss": 0.7941, - "num_input_tokens_seen": 104662780, - "step": 4916 - }, - { - "epoch": 0.5912342932724103, - "flos": 17026239768240.0, - "grad_norm": 7.44857834942281, - "learning_rate": 1.5111357451830364e-06, - "loss": 0.7645, - "num_input_tokens_seen": 104681195, - "step": 4917 - }, - { - "epoch": 0.5913545361630493, - "flos": 14298491322480.0, - "grad_norm": 4.719508634303742, - "learning_rate": 1.5103804396462131e-06, - "loss": 0.6999, - "num_input_tokens_seen": 104700850, - "step": 4918 - }, - { - "epoch": 0.5914747790536885, - "flos": 19212914344920.0, - "grad_norm": 4.587876282105613, - "learning_rate": 1.5096252083842877e-06, - "loss": 0.7842, - "num_input_tokens_seen": 104719780, - "step": 4919 - }, - { - "epoch": 0.5915950219443276, - "flos": 20103962344080.0, - "grad_norm": 4.028640730627963, - "learning_rate": 1.5088700515118285e-06, - "loss": 0.8414, - "num_input_tokens_seen": 104738820, - "step": 4920 - }, - { - "epoch": 0.5917152648349666, - "flos": 16026394452000.0, - "grad_norm": 4.270756914649139, - "learning_rate": 1.508114969143392e-06, - "loss": 0.6491, - "num_input_tokens_seen": 104758525, - "step": 4921 - }, - { - "epoch": 0.5918355077256057, - "flos": 20615596941720.0, - "grad_norm": 4.081346430668902, - "learning_rate": 1.5073599613935238e-06, - "loss": 0.7641, - "num_input_tokens_seen": 104780365, - "step": 4922 - }, - { - "epoch": 0.5919557506162448, - "flos": 20671056208920.0, - "grad_norm": 4.805642332397274, - "learning_rate": 1.5066050283767574e-06, - "loss": 0.5617, - "num_input_tokens_seen": 104800765, - "step": 4923 - }, - { - "epoch": 0.5920759935068839, - "flos": 8763889665000.0, - "grad_norm": 5.82564641288241, - "learning_rate": 1.505850170207616e-06, - "loss": 0.8112, - "num_input_tokens_seen": 104817350, - "step": 4924 - }, - { - "epoch": 0.592196236397523, - "flos": 21589517242080.0, - "grad_norm": 4.012849672476715, - "learning_rate": 1.505095387000611e-06, - "loss": 0.7507, - "num_input_tokens_seen": 104839370, - "step": 4925 - }, - { - "epoch": 0.5923164792881621, - "flos": 17858820804000.0, - "grad_norm": 3.9171337633741863, - "learning_rate": 1.504340678870242e-06, - "loss": 0.737, - "num_input_tokens_seen": 104857305, - "step": 4926 - }, - { - "epoch": 0.5924367221788012, - "flos": 17592668773920.0, - "grad_norm": 4.4404403186383075, - "learning_rate": 1.5035860459309989e-06, - "loss": 0.8786, - "num_input_tokens_seen": 104874740, - "step": 4927 - }, - { - "epoch": 0.5925569650694402, - "flos": 19699225465920.0, - "grad_norm": 6.703718821580926, - "learning_rate": 1.5028314882973568e-06, - "loss": 0.625, - "num_input_tokens_seen": 104894865, - "step": 4928 - }, - { - "epoch": 0.5926772079600794, - "flos": 16317743318880.0, - "grad_norm": 3.67153110922884, - "learning_rate": 1.502077006083783e-06, - "loss": 0.8258, - "num_input_tokens_seen": 104913245, - "step": 4929 - }, - { - "epoch": 0.5927974508507184, - "flos": 14514787898280.0, - "grad_norm": 5.7195140557512, - "learning_rate": 1.5013225994047315e-06, - "loss": 0.7628, - "num_input_tokens_seen": 104930595, - "step": 4930 - }, - { - "epoch": 0.5929176937413575, - "flos": 11488725394440.0, - "grad_norm": 3.519289675264088, - "learning_rate": 1.5005682683746452e-06, - "loss": 0.7938, - "num_input_tokens_seen": 104948830, - "step": 4931 - }, - { - "epoch": 0.5930379366319967, - "flos": 12839209699920.0, - "grad_norm": 6.025480208511523, - "learning_rate": 1.4998140131079553e-06, - "loss": 0.7094, - "num_input_tokens_seen": 104964640, - "step": 4932 - }, - { - "epoch": 0.5931581795226357, - "flos": 12914356970880.0, - "grad_norm": 4.144609024511776, - "learning_rate": 1.4990598337190821e-06, - "loss": 0.7354, - "num_input_tokens_seen": 104980715, - "step": 4933 - }, - { - "epoch": 0.5932784224132748, - "flos": 17782090535040.0, - "grad_norm": 5.437860761929979, - "learning_rate": 1.4983057303224338e-06, - "loss": 0.6599, - "num_input_tokens_seen": 105000250, - "step": 4934 - }, - { - "epoch": 0.5933986653039139, - "flos": 16779997361640.0, - "grad_norm": 4.824944593674755, - "learning_rate": 1.4975517030324072e-06, - "loss": 0.8501, - "num_input_tokens_seen": 105017980, - "step": 4935 - }, - { - "epoch": 0.593518908194553, - "flos": 52452342549600.0, - "grad_norm": 0.8571915371445485, - "learning_rate": 1.4967977519633882e-06, - "loss": 0.6422, - "num_input_tokens_seen": 105075160, - "step": 4936 - }, - { - "epoch": 0.593639151085192, - "flos": 14946494571000.0, - "grad_norm": 6.3663274078786305, - "learning_rate": 1.4960438772297494e-06, - "loss": 0.7669, - "num_input_tokens_seen": 105091925, - "step": 4937 - }, - { - "epoch": 0.5937593939758312, - "flos": 22668910563720.0, - "grad_norm": 5.118390170574379, - "learning_rate": 1.495290078945855e-06, - "loss": 0.7237, - "num_input_tokens_seen": 105111410, - "step": 4938 - }, - { - "epoch": 0.5938796368664703, - "flos": 27119369905560.0, - "grad_norm": 2.9933244820897924, - "learning_rate": 1.4945363572260529e-06, - "loss": 0.731, - "num_input_tokens_seen": 105132125, - "step": 4939 - }, - { - "epoch": 0.5939998797571093, - "flos": 17458642960080.0, - "grad_norm": 3.052163879234391, - "learning_rate": 1.4937827121846845e-06, - "loss": 0.6484, - "num_input_tokens_seen": 105152100, - "step": 4940 - }, - { - "epoch": 0.5941201226477485, - "flos": 18456398718960.0, - "grad_norm": 4.396908211328462, - "learning_rate": 1.4930291439360755e-06, - "loss": 0.7229, - "num_input_tokens_seen": 105174385, - "step": 4941 - }, - { - "epoch": 0.5942403655383875, - "flos": 16404699754920.0, - "grad_norm": 3.209104288453007, - "learning_rate": 1.4922756525945427e-06, - "loss": 0.7773, - "num_input_tokens_seen": 105193415, - "step": 4942 - }, - { - "epoch": 0.5943606084290266, - "flos": 49869162684360.0, - "grad_norm": 0.7815330374135685, - "learning_rate": 1.4915222382743894e-06, - "loss": 0.6092, - "num_input_tokens_seen": 105251970, - "step": 4943 - }, - { - "epoch": 0.5944808513196658, - "flos": 13299500825160.0, - "grad_norm": 5.112168414641501, - "learning_rate": 1.4907689010899085e-06, - "loss": 0.7044, - "num_input_tokens_seen": 105269270, - "step": 4944 - }, - { - "epoch": 0.5946010942103048, - "flos": 18160047578400.0, - "grad_norm": 6.47242551310597, - "learning_rate": 1.4900156411553804e-06, - "loss": 0.6054, - "num_input_tokens_seen": 105288820, - "step": 4945 - }, - { - "epoch": 0.5947213371009439, - "flos": 11085603174240.0, - "grad_norm": 8.795447767352305, - "learning_rate": 1.4892624585850739e-06, - "loss": 0.8353, - "num_input_tokens_seen": 105306895, - "step": 4946 - }, - { - "epoch": 0.594841579991583, - "flos": 18942361580400.0, - "grad_norm": 5.9495278050880325, - "learning_rate": 1.4885093534932465e-06, - "loss": 0.7729, - "num_input_tokens_seen": 105324580, - "step": 4947 - }, - { - "epoch": 0.5949618228822221, - "flos": 17559810226800.0, - "grad_norm": 4.9522448217979305, - "learning_rate": 1.4877563259941433e-06, - "loss": 0.703, - "num_input_tokens_seen": 105342155, - "step": 4948 - }, - { - "epoch": 0.5950820657728612, - "flos": 29820306856560.0, - "grad_norm": 3.822039192521757, - "learning_rate": 1.4870033762019988e-06, - "loss": 0.6601, - "num_input_tokens_seen": 105362040, - "step": 4949 - }, - { - "epoch": 0.5952023086635003, - "flos": 16971097100640.0, - "grad_norm": 3.0397989121235405, - "learning_rate": 1.4862505042310334e-06, - "loss": 0.7171, - "num_input_tokens_seen": 105381045, - "step": 4950 - }, - { - "epoch": 0.5953225515541394, - "flos": 24718361670600.0, - "grad_norm": 2.699072296983403, - "learning_rate": 1.4854977101954587e-06, - "loss": 0.686, - "num_input_tokens_seen": 105402985, - "step": 4951 - }, - { - "epoch": 0.5954427944447784, - "flos": 17914533350880.0, - "grad_norm": 10.131706371536012, - "learning_rate": 1.4847449942094716e-06, - "loss": 0.8328, - "num_input_tokens_seen": 105421585, - "step": 4952 - }, - { - "epoch": 0.5955630373354175, - "flos": 13542798855480.0, - "grad_norm": 4.424870962211956, - "learning_rate": 1.4839923563872598e-06, - "loss": 0.8428, - "num_input_tokens_seen": 105439845, - "step": 4953 - }, - { - "epoch": 0.5956832802260567, - "flos": 14460278429880.0, - "grad_norm": 4.63960064715047, - "learning_rate": 1.483239796842997e-06, - "loss": 0.7478, - "num_input_tokens_seen": 105457595, - "step": 4954 - }, - { - "epoch": 0.5958035231166957, - "flos": 14429446120200.0, - "grad_norm": 3.305042004339984, - "learning_rate": 1.4824873156908462e-06, - "loss": 0.8191, - "num_input_tokens_seen": 105475240, - "step": 4955 - }, - { - "epoch": 0.5959237660073348, - "flos": 15837669210000.0, - "grad_norm": 2.9548154160736346, - "learning_rate": 1.4817349130449584e-06, - "loss": 0.7458, - "num_input_tokens_seen": 105494680, - "step": 4956 - }, - { - "epoch": 0.5960440088979739, - "flos": 15480223329360.0, - "grad_norm": 5.489710980643788, - "learning_rate": 1.4809825890194717e-06, - "loss": 0.8111, - "num_input_tokens_seen": 105513070, - "step": 4957 - }, - { - "epoch": 0.596164251788613, - "flos": 10734774225240.0, - "grad_norm": 3.0360717178337664, - "learning_rate": 1.4802303437285139e-06, - "loss": 0.7534, - "num_input_tokens_seen": 105530060, - "step": 4958 - }, - { - "epoch": 0.596284494679252, - "flos": 14974192544640.0, - "grad_norm": 4.724632184279893, - "learning_rate": 1.4794781772861994e-06, - "loss": 0.7715, - "num_input_tokens_seen": 105546275, - "step": 4959 - }, - { - "epoch": 0.5964047375698912, - "flos": 22912335233880.0, - "grad_norm": 4.492831560487017, - "learning_rate": 1.4787260898066324e-06, - "loss": 0.6485, - "num_input_tokens_seen": 105565995, - "step": 4960 - }, - { - "epoch": 0.5965249804605303, - "flos": 20151886540800.0, - "grad_norm": 5.914129259344966, - "learning_rate": 1.4779740814039023e-06, - "loss": 0.8379, - "num_input_tokens_seen": 105585800, - "step": 4961 - }, - { - "epoch": 0.5966452233511693, - "flos": 22587874540200.0, - "grad_norm": 5.137453904142353, - "learning_rate": 1.4772221521920894e-06, - "loss": 0.6642, - "num_input_tokens_seen": 105605545, - "step": 4962 - }, - { - "epoch": 0.5967654662418085, - "flos": 18670542417480.0, - "grad_norm": 3.3593839423481997, - "learning_rate": 1.4764703022852598e-06, - "loss": 0.7236, - "num_input_tokens_seen": 105625785, - "step": 4963 - }, - { - "epoch": 0.5968857091324475, - "flos": 13947820673280.0, - "grad_norm": 4.394563150925879, - "learning_rate": 1.4757185317974696e-06, - "loss": 0.7453, - "num_input_tokens_seen": 105643890, - "step": 4964 - }, - { - "epoch": 0.5970059520230866, - "flos": 17346869606760.0, - "grad_norm": 4.643979898874182, - "learning_rate": 1.474966840842761e-06, - "loss": 0.7031, - "num_input_tokens_seen": 105663190, - "step": 4965 - }, - { - "epoch": 0.5971261949137258, - "flos": 16972046899440.0, - "grad_norm": 5.930078305118237, - "learning_rate": 1.4742152295351655e-06, - "loss": 0.8544, - "num_input_tokens_seen": 105682065, - "step": 4966 - }, - { - "epoch": 0.5972464378043648, - "flos": 15027150675000.0, - "grad_norm": 6.47629391029314, - "learning_rate": 1.4734636979887016e-06, - "loss": 0.6213, - "num_input_tokens_seen": 105699245, - "step": 4967 - }, - { - "epoch": 0.5973666806950039, - "flos": 21561407688960.0, - "grad_norm": 4.999188357280348, - "learning_rate": 1.4727122463173755e-06, - "loss": 0.89, - "num_input_tokens_seen": 105717495, - "step": 4968 - }, - { - "epoch": 0.597486923585643, - "flos": 16297105516320.0, - "grad_norm": 3.6636190898495786, - "learning_rate": 1.471960874635183e-06, - "loss": 0.6263, - "num_input_tokens_seen": 105736775, - "step": 4969 - }, - { - "epoch": 0.5976071664762821, - "flos": 10005766613160.0, - "grad_norm": 4.8438088690349055, - "learning_rate": 1.4712095830561055e-06, - "loss": 0.6797, - "num_input_tokens_seen": 105754985, - "step": 4970 - }, - { - "epoch": 0.5977274093669211, - "flos": 13947377433840.0, - "grad_norm": 4.834087159576442, - "learning_rate": 1.4704583716941147e-06, - "loss": 0.7814, - "num_input_tokens_seen": 105773570, - "step": 4971 - }, - { - "epoch": 0.5978476522575603, - "flos": 14892776601600.0, - "grad_norm": 3.049231923431863, - "learning_rate": 1.4697072406631672e-06, - "loss": 0.6975, - "num_input_tokens_seen": 105793195, - "step": 4972 - }, - { - "epoch": 0.5979678951481994, - "flos": 21292247962680.0, - "grad_norm": 3.56952790502073, - "learning_rate": 1.4689561900772097e-06, - "loss": 0.7208, - "num_input_tokens_seen": 105812975, - "step": 4973 - }, - { - "epoch": 0.5980881380388384, - "flos": 13110427323600.0, - "grad_norm": 7.467407872104559, - "learning_rate": 1.4682052200501758e-06, - "loss": 0.7117, - "num_input_tokens_seen": 105829900, - "step": 4974 - }, - { - "epoch": 0.5982083809294776, - "flos": 16806682216560.0, - "grad_norm": 4.912347394208467, - "learning_rate": 1.4674543306959876e-06, - "loss": 0.7679, - "num_input_tokens_seen": 105849090, - "step": 4975 - }, - { - "epoch": 0.5983286238201166, - "flos": 15348002133240.0, - "grad_norm": 6.33142184808501, - "learning_rate": 1.4667035221285535e-06, - "loss": 0.8282, - "num_input_tokens_seen": 105866450, - "step": 4976 - }, - { - "epoch": 0.5984488667107557, - "flos": 20670739609320.0, - "grad_norm": 2.724727329996003, - "learning_rate": 1.4659527944617715e-06, - "loss": 0.7304, - "num_input_tokens_seen": 105886115, - "step": 4977 - }, - { - "epoch": 0.5985691096013949, - "flos": 12003367688280.0, - "grad_norm": 8.521907623642633, - "learning_rate": 1.465202147809526e-06, - "loss": 0.741, - "num_input_tokens_seen": 105904330, - "step": 4978 - }, - { - "epoch": 0.5986893524920339, - "flos": 19586280694080.0, - "grad_norm": 5.523514678286382, - "learning_rate": 1.4644515822856888e-06, - "loss": 0.7537, - "num_input_tokens_seen": 105922485, - "step": 4979 - }, - { - "epoch": 0.598809595382673, - "flos": 45412877909880.0, - "grad_norm": 0.7676365953845266, - "learning_rate": 1.4637010980041215e-06, - "loss": 0.5805, - "num_input_tokens_seen": 105984315, - "step": 4980 - }, - { - "epoch": 0.5989298382733121, - "flos": 8547213169680.0, - "grad_norm": 9.279249393098366, - "learning_rate": 1.4629506950786707e-06, - "loss": 0.8792, - "num_input_tokens_seen": 106000215, - "step": 4981 - }, - { - "epoch": 0.5990500811639512, - "flos": 44240006200440.0, - "grad_norm": 0.826194961261367, - "learning_rate": 1.4622003736231733e-06, - "loss": 0.5787, - "num_input_tokens_seen": 106058925, - "step": 4982 - }, - { - "epoch": 0.5991703240545903, - "flos": 13299120905640.0, - "grad_norm": 3.597261786192164, - "learning_rate": 1.461450133751451e-06, - "loss": 0.7763, - "num_input_tokens_seen": 106076715, - "step": 4983 - }, - { - "epoch": 0.5992905669452293, - "flos": 20320385559720.0, - "grad_norm": 4.018138529294452, - "learning_rate": 1.4606999755773153e-06, - "loss": 0.7466, - "num_input_tokens_seen": 106097640, - "step": 4984 - }, - { - "epoch": 0.5994108098358685, - "flos": 14946811170600.0, - "grad_norm": 3.0143877564851795, - "learning_rate": 1.4599498992145643e-06, - "loss": 0.7922, - "num_input_tokens_seen": 106117385, - "step": 4985 - }, - { - "epoch": 0.5995310527265075, - "flos": 16293971180280.0, - "grad_norm": 4.168782548906033, - "learning_rate": 1.4591999047769846e-06, - "loss": 0.7035, - "num_input_tokens_seen": 106135960, - "step": 4986 - }, - { - "epoch": 0.5996512956171466, - "flos": 13813035020400.0, - "grad_norm": 3.7521330806661557, - "learning_rate": 1.4584499923783486e-06, - "loss": 0.7418, - "num_input_tokens_seen": 106154260, - "step": 4987 - }, - { - "epoch": 0.5997715385077858, - "flos": 11187751899720.0, - "grad_norm": 4.813548778844303, - "learning_rate": 1.457700162132419e-06, - "loss": 0.7522, - "num_input_tokens_seen": 106170970, - "step": 4988 - }, - { - "epoch": 0.5998917813984248, - "flos": 18510813207480.0, - "grad_norm": 5.0514696305596845, - "learning_rate": 1.4569504141529433e-06, - "loss": 0.6998, - "num_input_tokens_seen": 106188525, - "step": 4989 - }, - { - "epoch": 0.6000120242890639, - "flos": 16135001809320.0, - "grad_norm": 11.959635558428596, - "learning_rate": 1.456200748553658e-06, - "loss": 0.7031, - "num_input_tokens_seen": 106206240, - "step": 4990 - }, - { - "epoch": 0.600132267179703, - "flos": 21914262875400.0, - "grad_norm": 3.663130977295752, - "learning_rate": 1.455451165448287e-06, - "loss": 0.7694, - "num_input_tokens_seen": 106228615, - "step": 4991 - }, - { - "epoch": 0.6002525100703421, - "flos": 18348297921000.0, - "grad_norm": 4.862101780292534, - "learning_rate": 1.4547016649505407e-06, - "loss": 0.6983, - "num_input_tokens_seen": 106246345, - "step": 4992 - }, - { - "epoch": 0.6003727529609811, - "flos": 15243162311160.0, - "grad_norm": 5.2081014759590385, - "learning_rate": 1.4539522471741193e-06, - "loss": 0.8283, - "num_input_tokens_seen": 106263490, - "step": 4993 - }, - { - "epoch": 0.6004929958516203, - "flos": 11353971401520.0, - "grad_norm": 3.9136028210218843, - "learning_rate": 1.4532029122327067e-06, - "loss": 0.6901, - "num_input_tokens_seen": 106279995, - "step": 4994 - }, - { - "epoch": 0.6006132387422594, - "flos": 15919560052440.0, - "grad_norm": 5.219937674782456, - "learning_rate": 1.4524536602399783e-06, - "loss": 0.7388, - "num_input_tokens_seen": 106298805, - "step": 4995 - }, - { - "epoch": 0.6007334816328984, - "flos": 16730236887240.0, - "grad_norm": 6.661803315400864, - "learning_rate": 1.4517044913095938e-06, - "loss": 0.7606, - "num_input_tokens_seen": 106318945, - "step": 4996 - }, - { - "epoch": 0.6008537245235376, - "flos": 20774502992760.0, - "grad_norm": 3.2327801739204394, - "learning_rate": 1.4509554055552022e-06, - "loss": 0.8009, - "num_input_tokens_seen": 106338895, - "step": 4997 - }, - { - "epoch": 0.6009739674141766, - "flos": 15270195425640.0, - "grad_norm": 6.350853161305562, - "learning_rate": 1.450206403090439e-06, - "loss": 0.8176, - "num_input_tokens_seen": 106356810, - "step": 4998 - }, - { - "epoch": 0.6010942103048157, - "flos": 14969855130120.0, - "grad_norm": 4.323791317374275, - "learning_rate": 1.4494574840289274e-06, - "loss": 0.8443, - "num_input_tokens_seen": 106373645, - "step": 4999 - }, - { - "epoch": 0.6012144531954549, - "flos": 17431229926080.0, - "grad_norm": 5.194924797563845, - "learning_rate": 1.4487086484842782e-06, - "loss": 0.7303, - "num_input_tokens_seen": 106392010, - "step": 5000 - }, - { - "epoch": 0.6013346960860939, - "flos": 13865676551160.0, - "grad_norm": 6.8711004121627735, - "learning_rate": 1.4479598965700878e-06, - "loss": 0.5765, - "num_input_tokens_seen": 106408995, - "step": 5001 - }, - { - "epoch": 0.601454938976733, - "flos": 17593618572720.0, - "grad_norm": 4.570099237145023, - "learning_rate": 1.4472112283999427e-06, - "loss": 0.665, - "num_input_tokens_seen": 106427370, - "step": 5002 - }, - { - "epoch": 0.6015751818673721, - "flos": 19370237397960.0, - "grad_norm": 6.219391909493801, - "learning_rate": 1.4464626440874143e-06, - "loss": 0.6869, - "num_input_tokens_seen": 106446205, - "step": 5003 - }, - { - "epoch": 0.6016954247580112, - "flos": 9519328852320.0, - "grad_norm": 5.918052224555674, - "learning_rate": 1.4457141437460636e-06, - "loss": 0.7203, - "num_input_tokens_seen": 106463150, - "step": 5004 - }, - { - "epoch": 0.6018156676486502, - "flos": 17403310332720.0, - "grad_norm": 8.397972370840847, - "learning_rate": 1.444965727489436e-06, - "loss": 0.7175, - "num_input_tokens_seen": 106482315, - "step": 5005 - }, - { - "epoch": 0.6019359105392894, - "flos": 19402304446080.0, - "grad_norm": 12.222761115321962, - "learning_rate": 1.444217395431066e-06, - "loss": 0.6223, - "num_input_tokens_seen": 106504575, - "step": 5006 - }, - { - "epoch": 0.6020561534299285, - "flos": 51023766596880.0, - "grad_norm": 0.8091368394796098, - "learning_rate": 1.4434691476844755e-06, - "loss": 0.5774, - "num_input_tokens_seen": 106565270, - "step": 5007 - }, - { - "epoch": 0.6021763963205675, - "flos": 15972739802520.0, - "grad_norm": 4.781201131733004, - "learning_rate": 1.4427209843631729e-06, - "loss": 0.6589, - "num_input_tokens_seen": 106582040, - "step": 5008 - }, - { - "epoch": 0.6022966392112067, - "flos": 19483150509840.0, - "grad_norm": 3.3326687309206346, - "learning_rate": 1.4419729055806534e-06, - "loss": 0.7991, - "num_input_tokens_seen": 106601195, - "step": 5009 - }, - { - "epoch": 0.6024168821018457, - "flos": 15135219813000.0, - "grad_norm": 4.370201749356444, - "learning_rate": 1.441224911450401e-06, - "loss": 0.8202, - "num_input_tokens_seen": 106616870, - "step": 5010 - }, - { - "epoch": 0.6025371249924848, - "flos": 18078694955280.0, - "grad_norm": 3.9970555196861617, - "learning_rate": 1.4404770020858851e-06, - "loss": 0.8206, - "num_input_tokens_seen": 106636075, - "step": 5011 - }, - { - "epoch": 0.602657367883124, - "flos": 19021846265880.0, - "grad_norm": 2.6467843536744655, - "learning_rate": 1.439729177600563e-06, - "loss": 0.8518, - "num_input_tokens_seen": 106656290, - "step": 5012 - }, - { - "epoch": 0.602777610773763, - "flos": 12165313095480.0, - "grad_norm": 4.34825570223021, - "learning_rate": 1.4389814381078793e-06, - "loss": 0.7136, - "num_input_tokens_seen": 106675250, - "step": 5013 - }, - { - "epoch": 0.6028978536644021, - "flos": 9681400899360.0, - "grad_norm": 5.3225618108526165, - "learning_rate": 1.438233783721265e-06, - "loss": 0.7776, - "num_input_tokens_seen": 106691135, - "step": 5014 - }, - { - "epoch": 0.6030180965550412, - "flos": 14351449452840.0, - "grad_norm": 4.78517857137651, - "learning_rate": 1.43748621455414e-06, - "loss": 0.7633, - "num_input_tokens_seen": 106707290, - "step": 5015 - }, - { - "epoch": 0.6031383394456803, - "flos": 10512367277160.0, - "grad_norm": 15.367247573122286, - "learning_rate": 1.4367387307199082e-06, - "loss": 0.7919, - "num_input_tokens_seen": 106723860, - "step": 5016 - }, - { - "epoch": 0.6032585823363193, - "flos": 10113234211920.0, - "grad_norm": 4.221202448339014, - "learning_rate": 1.4359913323319632e-06, - "loss": 0.8142, - "num_input_tokens_seen": 106740750, - "step": 5017 - }, - { - "epoch": 0.6033788252269584, - "flos": 17836251743880.0, - "grad_norm": 3.126897007225986, - "learning_rate": 1.4352440195036847e-06, - "loss": 0.766, - "num_input_tokens_seen": 106760645, - "step": 5018 - }, - { - "epoch": 0.6034990681175976, - "flos": 18700709868000.0, - "grad_norm": 3.0922618330800726, - "learning_rate": 1.4344967923484395e-06, - "loss": 0.7766, - "num_input_tokens_seen": 106782335, - "step": 5019 - }, - { - "epoch": 0.6036193110082366, - "flos": 19024125783000.0, - "grad_norm": 14.83002449306284, - "learning_rate": 1.433749650979581e-06, - "loss": 0.6994, - "num_input_tokens_seen": 106802040, - "step": 5020 - }, - { - "epoch": 0.6037395538988757, - "flos": 18754016257920.0, - "grad_norm": 3.6434765564274363, - "learning_rate": 1.433002595510451e-06, - "loss": 0.6683, - "num_input_tokens_seen": 106820540, - "step": 5021 - }, - { - "epoch": 0.6038597967895148, - "flos": 12997577531640.0, - "grad_norm": 10.482899864789099, - "learning_rate": 1.4322556260543757e-06, - "loss": 0.7071, - "num_input_tokens_seen": 106836835, - "step": 5022 - }, - { - "epoch": 0.6039800396801539, - "flos": 48046324808880.0, - "grad_norm": 1.0332608702591064, - "learning_rate": 1.4315087427246703e-06, - "loss": 0.6487, - "num_input_tokens_seen": 106890380, - "step": 5023 - }, - { - "epoch": 0.604100282570793, - "flos": 49686005103960.0, - "grad_norm": 0.896210073754889, - "learning_rate": 1.4307619456346372e-06, - "loss": 0.6106, - "num_input_tokens_seen": 106934405, - "step": 5024 - }, - { - "epoch": 0.6042205254614321, - "flos": 25935516681360.0, - "grad_norm": 2.734810581935682, - "learning_rate": 1.430015234897564e-06, - "loss": 0.7151, - "num_input_tokens_seen": 106957405, - "step": 5025 - }, - { - "epoch": 0.6043407683520712, - "flos": 33603011626200.0, - "grad_norm": 6.632011589877651, - "learning_rate": 1.4292686106267274e-06, - "loss": 0.6471, - "num_input_tokens_seen": 106979975, - "step": 5026 - }, - { - "epoch": 0.6044610112427102, - "flos": 11787451032000.0, - "grad_norm": 4.373843012821295, - "learning_rate": 1.4285220729353876e-06, - "loss": 0.749, - "num_input_tokens_seen": 106998235, - "step": 5027 - }, - { - "epoch": 0.6045812541333494, - "flos": 10028462313120.0, - "grad_norm": 27.384417303188254, - "learning_rate": 1.4277756219367957e-06, - "loss": 0.7658, - "num_input_tokens_seen": 107014980, - "step": 5028 - }, - { - "epoch": 0.6047014970239885, - "flos": 14596108861440.0, - "grad_norm": 4.579764739744951, - "learning_rate": 1.4270292577441864e-06, - "loss": 0.7823, - "num_input_tokens_seen": 107034205, - "step": 5029 - }, - { - "epoch": 0.6048217399146275, - "flos": 18564182917320.0, - "grad_norm": 2.73497351499353, - "learning_rate": 1.4262829804707836e-06, - "loss": 0.7042, - "num_input_tokens_seen": 107055915, - "step": 5030 - }, - { - "epoch": 0.6049419828052667, - "flos": 19077653792640.0, - "grad_norm": 4.740210634855056, - "learning_rate": 1.4255367902297958e-06, - "loss": 0.6778, - "num_input_tokens_seen": 107076965, - "step": 5031 - }, - { - "epoch": 0.6050622256959057, - "flos": 10653801521640.0, - "grad_norm": 4.829098501484116, - "learning_rate": 1.4247906871344215e-06, - "loss": 0.7822, - "num_input_tokens_seen": 107092080, - "step": 5032 - }, - { - "epoch": 0.6051824685865448, - "flos": 17080021057560.0, - "grad_norm": 10.100265620298604, - "learning_rate": 1.4240446712978415e-06, - "loss": 0.7372, - "num_input_tokens_seen": 107110785, - "step": 5033 - }, - { - "epoch": 0.605302711477184, - "flos": 20211809862360.0, - "grad_norm": 6.75941601730649, - "learning_rate": 1.423298742833227e-06, - "loss": 0.7207, - "num_input_tokens_seen": 107129165, - "step": 5034 - }, - { - "epoch": 0.605422954367823, - "flos": 11028117669600.0, - "grad_norm": 3.717958895098504, - "learning_rate": 1.4225529018537352e-06, - "loss": 0.717, - "num_input_tokens_seen": 107144390, - "step": 5035 - }, - { - "epoch": 0.6055431972584621, - "flos": 20293827344640.0, - "grad_norm": 3.3019669248517527, - "learning_rate": 1.4218071484725082e-06, - "loss": 0.7665, - "num_input_tokens_seen": 107166230, - "step": 5036 - }, - { - "epoch": 0.6056634401491012, - "flos": 14160729633360.0, - "grad_norm": 7.617652229413026, - "learning_rate": 1.4210614828026786e-06, - "loss": 0.7437, - "num_input_tokens_seen": 107183800, - "step": 5037 - }, - { - "epoch": 0.6057836830397403, - "flos": 18158844499920.0, - "grad_norm": 3.2035690372006718, - "learning_rate": 1.4203159049573605e-06, - "loss": 0.7326, - "num_input_tokens_seen": 107204755, - "step": 5038 - }, - { - "epoch": 0.6059039259303793, - "flos": 15027625574400.0, - "grad_norm": 5.558627034005235, - "learning_rate": 1.4195704150496593e-06, - "loss": 0.8555, - "num_input_tokens_seen": 107222190, - "step": 5039 - }, - { - "epoch": 0.6060241688210185, - "flos": 15406247476920.0, - "grad_norm": 3.978173669605744, - "learning_rate": 1.4188250131926639e-06, - "loss": 0.7151, - "num_input_tokens_seen": 107240710, - "step": 5040 - }, - { - "epoch": 0.6061444117116576, - "flos": 11921255226120.0, - "grad_norm": 4.668788541050027, - "learning_rate": 1.4180796994994525e-06, - "loss": 0.7919, - "num_input_tokens_seen": 107257845, - "step": 5041 - }, - { - "epoch": 0.6062646546022966, - "flos": 15730043311440.0, - "grad_norm": 3.843074956404628, - "learning_rate": 1.4173344740830877e-06, - "loss": 0.7074, - "num_input_tokens_seen": 107276695, - "step": 5042 - }, - { - "epoch": 0.6063848974929358, - "flos": 28663233467160.0, - "grad_norm": 2.9279656398403886, - "learning_rate": 1.4165893370566206e-06, - "loss": 0.6909, - "num_input_tokens_seen": 107300170, - "step": 5043 - }, - { - "epoch": 0.6065051403835748, - "flos": 14353000790880.0, - "grad_norm": 3.082574561981556, - "learning_rate": 1.4158442885330865e-06, - "loss": 0.7506, - "num_input_tokens_seen": 107318460, - "step": 5044 - }, - { - "epoch": 0.6066253832742139, - "flos": 17214331811040.0, - "grad_norm": 3.9725279360653745, - "learning_rate": 1.4150993286255094e-06, - "loss": 0.7772, - "num_input_tokens_seen": 107337430, - "step": 5045 - }, - { - "epoch": 0.6067456261648531, - "flos": 13973144149920.0, - "grad_norm": 5.393540984134095, - "learning_rate": 1.4143544574468993e-06, - "loss": 0.7681, - "num_input_tokens_seen": 107355510, - "step": 5046 - }, - { - "epoch": 0.6068658690554921, - "flos": 14999611001160.0, - "grad_norm": 2.7683132920830653, - "learning_rate": 1.4136096751102523e-06, - "loss": 0.8045, - "num_input_tokens_seen": 107373560, - "step": 5047 - }, - { - "epoch": 0.6069861119461312, - "flos": 20071230436800.0, - "grad_norm": 4.868329693301959, - "learning_rate": 1.4128649817285516e-06, - "loss": 0.8094, - "num_input_tokens_seen": 107391415, - "step": 5048 - }, - { - "epoch": 0.6071063548367702, - "flos": 18778010016240.0, - "grad_norm": 5.823210334082971, - "learning_rate": 1.412120377414766e-06, - "loss": 0.62, - "num_input_tokens_seen": 107411325, - "step": 5049 - }, - { - "epoch": 0.6072265977274094, - "flos": 17915324849880.0, - "grad_norm": 3.262312966316652, - "learning_rate": 1.4113758622818522e-06, - "loss": 0.6857, - "num_input_tokens_seen": 107431110, - "step": 5050 - }, - { - "epoch": 0.6073468406180484, - "flos": 13244674757160.0, - "grad_norm": 3.8598034578400875, - "learning_rate": 1.410631436442751e-06, - "loss": 0.8088, - "num_input_tokens_seen": 107449625, - "step": 5051 - }, - { - "epoch": 0.6074670835086875, - "flos": 15129837619800.0, - "grad_norm": 6.069314803487943, - "learning_rate": 1.4098871000103936e-06, - "loss": 0.8488, - "num_input_tokens_seen": 107467945, - "step": 5052 - }, - { - "epoch": 0.6075873263993267, - "flos": 17404640051040.0, - "grad_norm": 3.7906702150571325, - "learning_rate": 1.409142853097693e-06, - "loss": 0.8129, - "num_input_tokens_seen": 107487905, - "step": 5053 - }, - { - "epoch": 0.6077075692899657, - "flos": 17910005976600.0, - "grad_norm": 5.551647271739224, - "learning_rate": 1.408398695817553e-06, - "loss": 0.7834, - "num_input_tokens_seen": 107504850, - "step": 5054 - }, - { - "epoch": 0.6078278121806048, - "flos": 20077879028400.0, - "grad_norm": 17.003081098603957, - "learning_rate": 1.4076546282828593e-06, - "loss": 0.678, - "num_input_tokens_seen": 107527425, - "step": 5055 - }, - { - "epoch": 0.6079480550712439, - "flos": 28258718208720.0, - "grad_norm": 9.032182163587628, - "learning_rate": 1.4069106506064874e-06, - "loss": 0.6403, - "num_input_tokens_seen": 107548570, - "step": 5056 - }, - { - "epoch": 0.608068297961883, - "flos": 18563454738240.0, - "grad_norm": 5.52897105710614, - "learning_rate": 1.4061667629012989e-06, - "loss": 0.7615, - "num_input_tokens_seen": 107568960, - "step": 5057 - }, - { - "epoch": 0.608188540852522, - "flos": 17723971831200.0, - "grad_norm": 2.818140604327484, - "learning_rate": 1.40542296528014e-06, - "loss": 0.8146, - "num_input_tokens_seen": 107588340, - "step": 5058 - }, - { - "epoch": 0.6083087837431612, - "flos": 15564298709040.0, - "grad_norm": 3.011153310297846, - "learning_rate": 1.4046792578558452e-06, - "loss": 0.7412, - "num_input_tokens_seen": 107605955, - "step": 5059 - }, - { - "epoch": 0.6084290266338003, - "flos": 12003051088680.0, - "grad_norm": 5.165837477729383, - "learning_rate": 1.4039356407412325e-06, - "loss": 0.7464, - "num_input_tokens_seen": 107618915, - "step": 5060 - }, - { - "epoch": 0.6085492695244393, - "flos": 48989064540000.0, - "grad_norm": 0.8326573769563024, - "learning_rate": 1.40319211404911e-06, - "loss": 0.6109, - "num_input_tokens_seen": 107673635, - "step": 5061 - }, - { - "epoch": 0.6086695124150785, - "flos": 17292201838560.0, - "grad_norm": 4.1086244078945064, - "learning_rate": 1.4024486778922691e-06, - "loss": 0.8796, - "num_input_tokens_seen": 107691670, - "step": 5062 - }, - { - "epoch": 0.6087897553057176, - "flos": 14730736214520.0, - "grad_norm": 5.8230459054470245, - "learning_rate": 1.4017053323834884e-06, - "loss": 0.7581, - "num_input_tokens_seen": 107711220, - "step": 5063 - }, - { - "epoch": 0.6089099981963566, - "flos": 18671745495960.0, - "grad_norm": 4.032584982140314, - "learning_rate": 1.4009620776355333e-06, - "loss": 0.7423, - "num_input_tokens_seen": 107732540, - "step": 5064 - }, - { - "epoch": 0.6090302410869958, - "flos": 18560478702000.0, - "grad_norm": 4.6808065818134015, - "learning_rate": 1.4002189137611553e-06, - "loss": 0.7775, - "num_input_tokens_seen": 107751600, - "step": 5065 - }, - { - "epoch": 0.6091504839776348, - "flos": 17565319059840.0, - "grad_norm": 5.307644408997581, - "learning_rate": 1.3994758408730901e-06, - "loss": 0.6755, - "num_input_tokens_seen": 107770505, - "step": 5066 - }, - { - "epoch": 0.6092707268682739, - "flos": 21750956089920.0, - "grad_norm": 5.762908378247412, - "learning_rate": 1.3987328590840629e-06, - "loss": 0.7679, - "num_input_tokens_seen": 107791170, - "step": 5067 - }, - { - "epoch": 0.609390969758913, - "flos": 17593301973120.0, - "grad_norm": 3.8341981630966915, - "learning_rate": 1.397989968506783e-06, - "loss": 0.8473, - "num_input_tokens_seen": 107809900, - "step": 5068 - }, - { - "epoch": 0.6095112126495521, - "flos": 8028581720880.0, - "grad_norm": 6.375806271298623, - "learning_rate": 1.3972471692539458e-06, - "loss": 0.7105, - "num_input_tokens_seen": 107824335, - "step": 5069 - }, - { - "epoch": 0.6096314555401912, - "flos": 12589484697720.0, - "grad_norm": 12.383689218766442, - "learning_rate": 1.3965044614382348e-06, - "loss": 0.7382, - "num_input_tokens_seen": 107839505, - "step": 5070 - }, - { - "epoch": 0.6097516984308303, - "flos": 15831590497680.0, - "grad_norm": 15.254436910583896, - "learning_rate": 1.3957618451723162e-06, - "loss": 0.7411, - "num_input_tokens_seen": 107855255, - "step": 5071 - }, - { - "epoch": 0.6098719413214694, - "flos": 19942460176320.0, - "grad_norm": 4.978144180251323, - "learning_rate": 1.3950193205688457e-06, - "loss": 0.687, - "num_input_tokens_seen": 107874700, - "step": 5072 - }, - { - "epoch": 0.6099921842121084, - "flos": 14920189635600.0, - "grad_norm": 3.658482318413676, - "learning_rate": 1.3942768877404627e-06, - "loss": 0.8222, - "num_input_tokens_seen": 107893385, - "step": 5073 - }, - { - "epoch": 0.6101124271027476, - "flos": 17105439514080.0, - "grad_norm": 3.682724139092658, - "learning_rate": 1.393534546799795e-06, - "loss": 0.7397, - "num_input_tokens_seen": 107912805, - "step": 5074 - }, - { - "epoch": 0.6102326699933867, - "flos": 19563933253680.0, - "grad_norm": 3.5168703035330373, - "learning_rate": 1.3927922978594536e-06, - "loss": 0.6568, - "num_input_tokens_seen": 107933610, - "step": 5075 - }, - { - "epoch": 0.6103529128840257, - "flos": 44697764528880.0, - "grad_norm": 0.8397629100128584, - "learning_rate": 1.3920501410320387e-06, - "loss": 0.6159, - "num_input_tokens_seen": 107989445, - "step": 5076 - }, - { - "epoch": 0.6104731557746649, - "flos": 13889543669640.0, - "grad_norm": 4.89059672484588, - "learning_rate": 1.3913080764301333e-06, - "loss": 0.747, - "num_input_tokens_seen": 108006125, - "step": 5077 - }, - { - "epoch": 0.6105933986653039, - "flos": 17106104373240.0, - "grad_norm": 5.970173131838062, - "learning_rate": 1.3905661041663085e-06, - "loss": 0.7055, - "num_input_tokens_seen": 108027030, - "step": 5078 - }, - { - "epoch": 0.610713641555943, - "flos": 25446672763560.0, - "grad_norm": 5.477987106859592, - "learning_rate": 1.389824224353122e-06, - "loss": 0.6279, - "num_input_tokens_seen": 108048340, - "step": 5079 - }, - { - "epoch": 0.610833884446582, - "flos": 19532626044600.0, - "grad_norm": 17.28499337846995, - "learning_rate": 1.389082437103115e-06, - "loss": 0.7549, - "num_input_tokens_seen": 108067330, - "step": 5080 - }, - { - "epoch": 0.6109541273372212, - "flos": 15513778395600.0, - "grad_norm": 4.767867364357104, - "learning_rate": 1.3883407425288172e-06, - "loss": 0.7571, - "num_input_tokens_seen": 108087385, - "step": 5081 - }, - { - "epoch": 0.6110743702278603, - "flos": 14676733305480.0, - "grad_norm": 7.153439698769102, - "learning_rate": 1.3875991407427417e-06, - "loss": 0.761, - "num_input_tokens_seen": 108105330, - "step": 5082 - }, - { - "epoch": 0.6111946131184993, - "flos": 50364935642040.0, - "grad_norm": 0.7701390765661621, - "learning_rate": 1.38685763185739e-06, - "loss": 0.6027, - "num_input_tokens_seen": 108158710, - "step": 5083 - }, - { - "epoch": 0.6113148560091385, - "flos": 14568315907920.0, - "grad_norm": 4.527344803238122, - "learning_rate": 1.3861162159852476e-06, - "loss": 0.6517, - "num_input_tokens_seen": 108176565, - "step": 5084 - }, - { - "epoch": 0.6114350988997775, - "flos": 17376213898320.0, - "grad_norm": 3.6613236136181264, - "learning_rate": 1.3853748932387875e-06, - "loss": 0.7845, - "num_input_tokens_seen": 108196925, - "step": 5085 - }, - { - "epoch": 0.6115553417904166, - "flos": 17592193874520.0, - "grad_norm": 4.498990687455415, - "learning_rate": 1.3846336637304671e-06, - "loss": 0.7321, - "num_input_tokens_seen": 108214915, - "step": 5086 - }, - { - "epoch": 0.6116755846810558, - "flos": 17377701916440.0, - "grad_norm": 6.403331557033173, - "learning_rate": 1.3838925275727316e-06, - "loss": 0.8031, - "num_input_tokens_seen": 108235375, - "step": 5087 - }, - { - "epoch": 0.6117958275716948, - "flos": 13624088158680.0, - "grad_norm": 3.987677906815065, - "learning_rate": 1.3831514848780089e-06, - "loss": 0.7783, - "num_input_tokens_seen": 108254670, - "step": 5088 - }, - { - "epoch": 0.6119160704623339, - "flos": 12002829468960.0, - "grad_norm": 5.1421215348720155, - "learning_rate": 1.3824105357587152e-06, - "loss": 0.8993, - "num_input_tokens_seen": 108271495, - "step": 5089 - }, - { - "epoch": 0.612036313352973, - "flos": 17512265949600.0, - "grad_norm": 2.9881309321874614, - "learning_rate": 1.381669680327253e-06, - "loss": 0.8142, - "num_input_tokens_seen": 108292895, - "step": 5090 - }, - { - "epoch": 0.6121565562436121, - "flos": 19775575815360.0, - "grad_norm": 4.979213421760356, - "learning_rate": 1.380928918696008e-06, - "loss": 0.6938, - "num_input_tokens_seen": 108311385, - "step": 5091 - }, - { - "epoch": 0.6122767991342511, - "flos": 11409114069120.0, - "grad_norm": 6.870810046907395, - "learning_rate": 1.3801882509773548e-06, - "loss": 0.6979, - "num_input_tokens_seen": 108328965, - "step": 5092 - }, - { - "epoch": 0.6123970420248903, - "flos": 20508540922440.0, - "grad_norm": 4.919040214769694, - "learning_rate": 1.3794476772836503e-06, - "loss": 0.7938, - "num_input_tokens_seen": 108349785, - "step": 5093 - }, - { - "epoch": 0.6125172849155294, - "flos": 15701205579240.0, - "grad_norm": 3.3982583185073203, - "learning_rate": 1.3787071977272402e-06, - "loss": 0.8233, - "num_input_tokens_seen": 108368765, - "step": 5094 - }, - { - "epoch": 0.6126375278061684, - "flos": 11837338146240.0, - "grad_norm": 9.029104855802169, - "learning_rate": 1.3779668124204535e-06, - "loss": 0.6907, - "num_input_tokens_seen": 108384900, - "step": 5095 - }, - { - "epoch": 0.6127577706968076, - "flos": 14946684530760.0, - "grad_norm": 4.610818100368686, - "learning_rate": 1.3772265214756074e-06, - "loss": 0.7956, - "num_input_tokens_seen": 108404380, - "step": 5096 - }, - { - "epoch": 0.6128780135874466, - "flos": 13327167138840.0, - "grad_norm": 3.855659580629075, - "learning_rate": 1.3764863250050025e-06, - "loss": 0.725, - "num_input_tokens_seen": 108422340, - "step": 5097 - }, - { - "epoch": 0.6129982564780857, - "flos": 18268338336120.0, - "grad_norm": 4.44623147217947, - "learning_rate": 1.3757462231209272e-06, - "loss": 0.79, - "num_input_tokens_seen": 108442365, - "step": 5098 - }, - { - "epoch": 0.6131184993687249, - "flos": 16458765983880.0, - "grad_norm": 7.637760428875501, - "learning_rate": 1.3750062159356525e-06, - "loss": 0.8679, - "num_input_tokens_seen": 108461435, - "step": 5099 - }, - { - "epoch": 0.6132387422593639, - "flos": 11569413158400.0, - "grad_norm": 6.692258193813656, - "learning_rate": 1.3742663035614382e-06, - "loss": 0.8118, - "num_input_tokens_seen": 108478525, - "step": 5100 - }, - { - "epoch": 0.613358985150003, - "flos": 18619483884720.0, - "grad_norm": 4.075765460617897, - "learning_rate": 1.3735264861105283e-06, - "loss": 0.7879, - "num_input_tokens_seen": 108498885, - "step": 5101 - }, - { - "epoch": 0.6134792280406421, - "flos": 15621974173440.0, - "grad_norm": 4.619699275810686, - "learning_rate": 1.372786763695152e-06, - "loss": 0.7723, - "num_input_tokens_seen": 108517365, - "step": 5102 - }, - { - "epoch": 0.6135994709312812, - "flos": 15510359119920.0, - "grad_norm": 4.4009577575750445, - "learning_rate": 1.3720471364275257e-06, - "loss": 0.7597, - "num_input_tokens_seen": 108536730, - "step": 5103 - }, - { - "epoch": 0.6137197138219203, - "flos": 10761807339720.0, - "grad_norm": 20.89628128479331, - "learning_rate": 1.3713076044198486e-06, - "loss": 0.7611, - "num_input_tokens_seen": 108553260, - "step": 5104 - }, - { - "epoch": 0.6138399567125594, - "flos": 14677113225000.0, - "grad_norm": 4.720110968562285, - "learning_rate": 1.3705681677843086e-06, - "loss": 0.7859, - "num_input_tokens_seen": 108571575, - "step": 5105 - }, - { - "epoch": 0.6139601996031985, - "flos": 44316107761560.0, - "grad_norm": 0.7994500982278879, - "learning_rate": 1.3698288266330768e-06, - "loss": 0.6221, - "num_input_tokens_seen": 108631920, - "step": 5106 - }, - { - "epoch": 0.6140804424938375, - "flos": 17268904599360.0, - "grad_norm": 5.402311621492497, - "learning_rate": 1.3690895810783113e-06, - "loss": 0.7111, - "num_input_tokens_seen": 108650435, - "step": 5107 - }, - { - "epoch": 0.6142006853844767, - "flos": 15649355547480.0, - "grad_norm": 5.551807542240994, - "learning_rate": 1.3683504312321543e-06, - "loss": 0.6962, - "num_input_tokens_seen": 108670490, - "step": 5108 - }, - { - "epoch": 0.6143209282751158, - "flos": 8736444971040.0, - "grad_norm": 4.463662405914521, - "learning_rate": 1.3676113772067355e-06, - "loss": 0.7763, - "num_input_tokens_seen": 108687265, - "step": 5109 - }, - { - "epoch": 0.6144411711657548, - "flos": 18375679295040.0, - "grad_norm": 5.002905382986786, - "learning_rate": 1.3668724191141671e-06, - "loss": 0.7041, - "num_input_tokens_seen": 108706255, - "step": 5110 - }, - { - "epoch": 0.6145614140563939, - "flos": 14698004307240.0, - "grad_norm": 5.911370212609539, - "learning_rate": 1.3661335570665493e-06, - "loss": 0.6535, - "num_input_tokens_seen": 108723885, - "step": 5111 - }, - { - "epoch": 0.614681656947033, - "flos": 12246570738720.0, - "grad_norm": 5.454999104332785, - "learning_rate": 1.3653947911759676e-06, - "loss": 0.6731, - "num_input_tokens_seen": 108741155, - "step": 5112 - }, - { - "epoch": 0.6148018998376721, - "flos": 28528986033600.0, - "grad_norm": 3.227209437356596, - "learning_rate": 1.3646561215544904e-06, - "loss": 0.7279, - "num_input_tokens_seen": 108765515, - "step": 5113 - }, - { - "epoch": 0.6149221427283111, - "flos": 17076126882480.0, - "grad_norm": 4.164054255092681, - "learning_rate": 1.363917548314176e-06, - "loss": 0.7765, - "num_input_tokens_seen": 108784500, - "step": 5114 - }, - { - "epoch": 0.6150423856189503, - "flos": 16375038863760.0, - "grad_norm": 4.175580953963243, - "learning_rate": 1.3631790715670626e-06, - "loss": 0.7127, - "num_input_tokens_seen": 108802625, - "step": 5115 - }, - { - "epoch": 0.6151626285095894, - "flos": 13645770739920.0, - "grad_norm": 4.374961852861035, - "learning_rate": 1.3624406914251783e-06, - "loss": 0.8427, - "num_input_tokens_seen": 108819465, - "step": 5116 - }, - { - "epoch": 0.6152828714002284, - "flos": 11544469601280.0, - "grad_norm": 3.890710178653108, - "learning_rate": 1.3617024080005335e-06, - "loss": 0.8561, - "num_input_tokens_seen": 108836085, - "step": 5117 - }, - { - "epoch": 0.6154031142908676, - "flos": 18219337700760.0, - "grad_norm": 5.484418993825062, - "learning_rate": 1.3609642214051266e-06, - "loss": 0.7271, - "num_input_tokens_seen": 108860030, - "step": 5118 - }, - { - "epoch": 0.6155233571815066, - "flos": 14055541551720.0, - "grad_norm": 6.928184785012825, - "learning_rate": 1.3602261317509385e-06, - "loss": 0.6462, - "num_input_tokens_seen": 108876410, - "step": 5119 - }, - { - "epoch": 0.6156436000721457, - "flos": 13704206043360.0, - "grad_norm": 6.462020838551288, - "learning_rate": 1.3594881391499387e-06, - "loss": 0.8022, - "num_input_tokens_seen": 108895045, - "step": 5120 - }, - { - "epoch": 0.6157638429627849, - "flos": 13164905132040.0, - "grad_norm": 2.5801298726831776, - "learning_rate": 1.3587502437140778e-06, - "loss": 0.7792, - "num_input_tokens_seen": 108912930, - "step": 5121 - }, - { - "epoch": 0.6158840858534239, - "flos": 18726824843640.0, - "grad_norm": 22.597581636112775, - "learning_rate": 1.3580124455552952e-06, - "loss": 0.8286, - "num_input_tokens_seen": 108932015, - "step": 5122 - }, - { - "epoch": 0.616004328744063, - "flos": 18048432524880.0, - "grad_norm": 2.902298733142639, - "learning_rate": 1.3572747447855148e-06, - "loss": 0.8515, - "num_input_tokens_seen": 108952145, - "step": 5123 - }, - { - "epoch": 0.6161245716347021, - "flos": 15865018924080.0, - "grad_norm": 3.285107334421424, - "learning_rate": 1.356537141516644e-06, - "loss": 0.6708, - "num_input_tokens_seen": 108969285, - "step": 5124 - }, - { - "epoch": 0.6162448145253412, - "flos": 25855557096480.0, - "grad_norm": 17.341936666185365, - "learning_rate": 1.3557996358605775e-06, - "loss": 0.6061, - "num_input_tokens_seen": 108988925, - "step": 5125 - }, - { - "epoch": 0.6163650574159802, - "flos": 15809749616640.0, - "grad_norm": 4.533161049729339, - "learning_rate": 1.3550622279291941e-06, - "loss": 0.6862, - "num_input_tokens_seen": 109006790, - "step": 5126 - }, - { - "epoch": 0.6164853003066194, - "flos": 17998260471000.0, - "grad_norm": 2.229745510445378, - "learning_rate": 1.354324917834358e-06, - "loss": 0.8162, - "num_input_tokens_seen": 109027755, - "step": 5127 - }, - { - "epoch": 0.6166055431972585, - "flos": 15973341341760.0, - "grad_norm": 2.7927850409424835, - "learning_rate": 1.353587705687918e-06, - "loss": 0.7554, - "num_input_tokens_seen": 109045650, - "step": 5128 - }, - { - "epoch": 0.6167257860878975, - "flos": 12975420051000.0, - "grad_norm": 6.317213659163931, - "learning_rate": 1.3528505916017096e-06, - "loss": 0.6906, - "num_input_tokens_seen": 109063070, - "step": 5129 - }, - { - "epoch": 0.6168460289785367, - "flos": 16992684702000.0, - "grad_norm": 4.0864483895019985, - "learning_rate": 1.3521135756875514e-06, - "loss": 0.8602, - "num_input_tokens_seen": 109079105, - "step": 5130 - }, - { - "epoch": 0.6169662718691757, - "flos": 19212692725200.0, - "grad_norm": 3.9155940487697065, - "learning_rate": 1.3513766580572496e-06, - "loss": 0.8429, - "num_input_tokens_seen": 109101645, - "step": 5131 - }, - { - "epoch": 0.6170865147598148, - "flos": 13893786104280.0, - "grad_norm": 6.8065949491831885, - "learning_rate": 1.3506398388225924e-06, - "loss": 0.7555, - "num_input_tokens_seen": 109118685, - "step": 5132 - }, - { - "epoch": 0.617206757650454, - "flos": 13327135478880.0, - "grad_norm": 5.6525484059896085, - "learning_rate": 1.349903118095355e-06, - "loss": 0.6965, - "num_input_tokens_seen": 109137540, - "step": 5133 - }, - { - "epoch": 0.617327000541093, - "flos": 13272752650320.0, - "grad_norm": 3.8646220627700862, - "learning_rate": 1.349166495987298e-06, - "loss": 0.7139, - "num_input_tokens_seen": 109155825, - "step": 5134 - }, - { - "epoch": 0.6174472434317321, - "flos": 45580902029400.0, - "grad_norm": 0.9389280251491711, - "learning_rate": 1.348429972610166e-06, - "loss": 0.6685, - "num_input_tokens_seen": 109219850, - "step": 5135 - }, - { - "epoch": 0.6175674863223712, - "flos": 52533156953400.0, - "grad_norm": 0.860141724230831, - "learning_rate": 1.3476935480756897e-06, - "loss": 0.5994, - "num_input_tokens_seen": 109276320, - "step": 5136 - }, - { - "epoch": 0.6176877292130103, - "flos": 15972866442360.0, - "grad_norm": 5.415998896983273, - "learning_rate": 1.346957222495583e-06, - "loss": 0.7396, - "num_input_tokens_seen": 109293835, - "step": 5137 - }, - { - "epoch": 0.6178079721036493, - "flos": 12942213244320.0, - "grad_norm": 9.22240062781073, - "learning_rate": 1.3462209959815466e-06, - "loss": 0.7077, - "num_input_tokens_seen": 109308295, - "step": 5138 - }, - { - "epoch": 0.6179282149942885, - "flos": 16562497707360.0, - "grad_norm": 4.265470956109887, - "learning_rate": 1.345484868645265e-06, - "loss": 0.7157, - "num_input_tokens_seen": 109326825, - "step": 5139 - }, - { - "epoch": 0.6180484578849276, - "flos": 16324391910480.0, - "grad_norm": 3.5813409604010054, - "learning_rate": 1.3447488405984088e-06, - "loss": 0.7676, - "num_input_tokens_seen": 109344805, - "step": 5140 - }, - { - "epoch": 0.6181687007755666, - "flos": 25882336931280.0, - "grad_norm": 4.329977252010648, - "learning_rate": 1.3440129119526322e-06, - "loss": 0.6708, - "num_input_tokens_seen": 109366950, - "step": 5141 - }, - { - "epoch": 0.6182889436662057, - "flos": 45364537642320.0, - "grad_norm": 0.9350067044594029, - "learning_rate": 1.3432770828195762e-06, - "loss": 0.5589, - "num_input_tokens_seen": 109427655, - "step": 5142 - }, - { - "epoch": 0.6184091865568448, - "flos": 14325904356480.0, - "grad_norm": 5.027206786571562, - "learning_rate": 1.3425413533108635e-06, - "loss": 0.7036, - "num_input_tokens_seen": 109445975, - "step": 5143 - }, - { - "epoch": 0.6185294294474839, - "flos": 17269062899160.0, - "grad_norm": 4.098379764012051, - "learning_rate": 1.341805723538105e-06, - "loss": 0.6873, - "num_input_tokens_seen": 109465800, - "step": 5144 - }, - { - "epoch": 0.618649672338123, - "flos": 19619012601360.0, - "grad_norm": 19.116882596158625, - "learning_rate": 1.3410701936128948e-06, - "loss": 0.7573, - "num_input_tokens_seen": 109488300, - "step": 5145 - }, - { - "epoch": 0.6187699152287621, - "flos": 10510815939120.0, - "grad_norm": 7.420978307262812, - "learning_rate": 1.340334763646812e-06, - "loss": 0.8353, - "num_input_tokens_seen": 109502155, - "step": 5146 - }, - { - "epoch": 0.6188901581194012, - "flos": 14677334844720.0, - "grad_norm": 8.75916268305385, - "learning_rate": 1.3395994337514218e-06, - "loss": 0.7321, - "num_input_tokens_seen": 109522045, - "step": 5147 - }, - { - "epoch": 0.6190104010100402, - "flos": 18833121023880.0, - "grad_norm": 2.88740863738079, - "learning_rate": 1.3388642040382725e-06, - "loss": 0.7755, - "num_input_tokens_seen": 109542190, - "step": 5148 - }, - { - "epoch": 0.6191306439006794, - "flos": 22342043713080.0, - "grad_norm": 2.8513093182766425, - "learning_rate": 1.3381290746188975e-06, - "loss": 0.8184, - "num_input_tokens_seen": 109561280, - "step": 5149 - }, - { - "epoch": 0.6192508867913185, - "flos": 19562128635960.0, - "grad_norm": 16.87990199345345, - "learning_rate": 1.3373940456048152e-06, - "loss": 0.6599, - "num_input_tokens_seen": 109581025, - "step": 5150 - }, - { - "epoch": 0.6193711296819575, - "flos": 26989998105840.0, - "grad_norm": 2.8369698655225966, - "learning_rate": 1.3366591171075299e-06, - "loss": 0.5801, - "num_input_tokens_seen": 109604250, - "step": 5151 - }, - { - "epoch": 0.6194913725725967, - "flos": 18457411837680.0, - "grad_norm": 3.2135676172198924, - "learning_rate": 1.335924289238529e-06, - "loss": 0.8961, - "num_input_tokens_seen": 109623180, - "step": 5152 - }, - { - "epoch": 0.6196116154632357, - "flos": 15486935240880.0, - "grad_norm": 2.713088443758516, - "learning_rate": 1.3351895621092859e-06, - "loss": 0.744, - "num_input_tokens_seen": 109643245, - "step": 5153 - }, - { - "epoch": 0.6197318583538748, - "flos": 11842118800200.0, - "grad_norm": 6.048891505056228, - "learning_rate": 1.3344549358312567e-06, - "loss": 0.7494, - "num_input_tokens_seen": 109661365, - "step": 5154 - }, - { - "epoch": 0.619852101244514, - "flos": 17889716433600.0, - "grad_norm": 7.012031297438694, - "learning_rate": 1.3337204105158852e-06, - "loss": 0.7587, - "num_input_tokens_seen": 109679955, - "step": 5155 - }, - { - "epoch": 0.619972344135153, - "flos": 12192472849800.0, - "grad_norm": 3.400064854844813, - "learning_rate": 1.332985986274597e-06, - "loss": 0.6987, - "num_input_tokens_seen": 109697305, - "step": 5156 - }, - { - "epoch": 0.6200925870257921, - "flos": 9060462425280.0, - "grad_norm": 3.1712724085081123, - "learning_rate": 1.3322516632188047e-06, - "loss": 0.7394, - "num_input_tokens_seen": 109713920, - "step": 5157 - }, - { - "epoch": 0.6202128299164312, - "flos": 19453679578440.0, - "grad_norm": 3.2190009454475144, - "learning_rate": 1.3315174414599045e-06, - "loss": 0.6607, - "num_input_tokens_seen": 109734960, - "step": 5158 - }, - { - "epoch": 0.6203330728070703, - "flos": 13704585962880.0, - "grad_norm": 2.8751268773485545, - "learning_rate": 1.3307833211092768e-06, - "loss": 0.7434, - "num_input_tokens_seen": 109753345, - "step": 5159 - }, - { - "epoch": 0.6204533156977093, - "flos": 15080362085040.0, - "grad_norm": 2.490421255242665, - "learning_rate": 1.3300493022782873e-06, - "loss": 0.7293, - "num_input_tokens_seen": 109773635, - "step": 5160 - }, - { - "epoch": 0.6205735585883485, - "flos": 12651940816080.0, - "grad_norm": 4.588159182220739, - "learning_rate": 1.3293153850782855e-06, - "loss": 0.7029, - "num_input_tokens_seen": 109791675, - "step": 5161 - }, - { - "epoch": 0.6206938014789876, - "flos": 16808613474120.0, - "grad_norm": 2.9096833324842954, - "learning_rate": 1.3285815696206069e-06, - "loss": 0.6955, - "num_input_tokens_seen": 109812940, - "step": 5162 - }, - { - "epoch": 0.6208140443696266, - "flos": 17561646504480.0, - "grad_norm": 4.345061179763041, - "learning_rate": 1.32784785601657e-06, - "loss": 0.7568, - "num_input_tokens_seen": 109832070, - "step": 5163 - }, - { - "epoch": 0.6209342872602658, - "flos": 25772368195680.0, - "grad_norm": 3.2732785716658843, - "learning_rate": 1.3271142443774798e-06, - "loss": 0.7326, - "num_input_tokens_seen": 109854025, - "step": 5164 - }, - { - "epoch": 0.6210545301509048, - "flos": 19779596630280.0, - "grad_norm": 7.0790643131909015, - "learning_rate": 1.3263807348146228e-06, - "loss": 0.8001, - "num_input_tokens_seen": 109873600, - "step": 5165 - }, - { - "epoch": 0.6211747730415439, - "flos": 24692626614480.0, - "grad_norm": 4.7482666065209065, - "learning_rate": 1.3256473274392733e-06, - "loss": 0.7235, - "num_input_tokens_seen": 109894665, - "step": 5166 - }, - { - "epoch": 0.6212950159321831, - "flos": 25099041470520.0, - "grad_norm": 3.4198581552369856, - "learning_rate": 1.3249140223626873e-06, - "loss": 0.6892, - "num_input_tokens_seen": 109916005, - "step": 5167 - }, - { - "epoch": 0.6214152588228221, - "flos": 20509807320840.0, - "grad_norm": 2.9581424132156373, - "learning_rate": 1.3241808196961077e-06, - "loss": 0.748, - "num_input_tokens_seen": 109936850, - "step": 5168 - }, - { - "epoch": 0.6215355017134612, - "flos": 14784897423360.0, - "grad_norm": 3.7452978535709134, - "learning_rate": 1.3234477195507608e-06, - "loss": 0.6903, - "num_input_tokens_seen": 109955400, - "step": 5169 - }, - { - "epoch": 0.6216557446041003, - "flos": 30472742499480.0, - "grad_norm": 4.830912257698038, - "learning_rate": 1.322714722037857e-06, - "loss": 0.618, - "num_input_tokens_seen": 109976565, - "step": 5170 - }, - { - "epoch": 0.6217759874947394, - "flos": 20482362626880.0, - "grad_norm": 4.503976971880787, - "learning_rate": 1.321981827268591e-06, - "loss": 0.7501, - "num_input_tokens_seen": 109996940, - "step": 5171 - }, - { - "epoch": 0.6218962303853784, - "flos": 15921681269760.0, - "grad_norm": 2.9475191385231834, - "learning_rate": 1.3212490353541426e-06, - "loss": 0.8048, - "num_input_tokens_seen": 110018920, - "step": 5172 - }, - { - "epoch": 0.6220164732760175, - "flos": 15536062516080.0, - "grad_norm": 5.276483856780872, - "learning_rate": 1.3205163464056762e-06, - "loss": 0.787, - "num_input_tokens_seen": 110035245, - "step": 5173 - }, - { - "epoch": 0.6221367161666567, - "flos": 19155207220560.0, - "grad_norm": 4.3613394956188545, - "learning_rate": 1.319783760534339e-06, - "loss": 0.7061, - "num_input_tokens_seen": 110054210, - "step": 5174 - }, - { - "epoch": 0.6222569590572957, - "flos": 11863801381440.0, - "grad_norm": 3.749776960733668, - "learning_rate": 1.319051277851266e-06, - "loss": 0.7381, - "num_input_tokens_seen": 110070215, - "step": 5175 - }, - { - "epoch": 0.6223772019479348, - "flos": 13300102364400.0, - "grad_norm": 5.263796525657635, - "learning_rate": 1.3183188984675716e-06, - "loss": 0.8299, - "num_input_tokens_seen": 110088300, - "step": 5176 - }, - { - "epoch": 0.6224974448385739, - "flos": 20157332053920.0, - "grad_norm": 5.353657422419823, - "learning_rate": 1.3175866224943586e-06, - "loss": 0.7128, - "num_input_tokens_seen": 110106740, - "step": 5177 - }, - { - "epoch": 0.622617687729213, - "flos": 14460500049600.0, - "grad_norm": 4.957681287104571, - "learning_rate": 1.316854450042712e-06, - "loss": 0.7152, - "num_input_tokens_seen": 110124400, - "step": 5178 - }, - { - "epoch": 0.622737930619852, - "flos": 16863407882160.0, - "grad_norm": 12.460973623576166, - "learning_rate": 1.3161223812237024e-06, - "loss": 0.7327, - "num_input_tokens_seen": 110143475, - "step": 5179 - }, - { - "epoch": 0.6228581735104912, - "flos": 9162421191000.0, - "grad_norm": 6.850034850094543, - "learning_rate": 1.3153904161483842e-06, - "loss": 0.8293, - "num_input_tokens_seen": 110158495, - "step": 5180 - }, - { - "epoch": 0.6229784164011303, - "flos": 17428443849600.0, - "grad_norm": 3.155235958555651, - "learning_rate": 1.3146585549277953e-06, - "loss": 0.8375, - "num_input_tokens_seen": 110176855, - "step": 5181 - }, - { - "epoch": 0.6230986592917693, - "flos": 16400488980240.0, - "grad_norm": 4.382594900458339, - "learning_rate": 1.3139267976729591e-06, - "loss": 0.763, - "num_input_tokens_seen": 110196765, - "step": 5182 - }, - { - "epoch": 0.6232189021824085, - "flos": 25365763379880.0, - "grad_norm": 3.802590546241565, - "learning_rate": 1.3131951444948815e-06, - "loss": 0.704, - "num_input_tokens_seen": 110215885, - "step": 5183 - }, - { - "epoch": 0.6233391450730476, - "flos": 16270072401840.0, - "grad_norm": 3.91065681599433, - "learning_rate": 1.3124635955045546e-06, - "loss": 0.7294, - "num_input_tokens_seen": 110235420, - "step": 5184 - }, - { - "epoch": 0.6234593879636866, - "flos": 14859759754680.0, - "grad_norm": 4.869802002377345, - "learning_rate": 1.3117321508129537e-06, - "loss": 0.8265, - "num_input_tokens_seen": 110253220, - "step": 5185 - }, - { - "epoch": 0.6235796308543258, - "flos": 15106160461080.0, - "grad_norm": 3.422145049008033, - "learning_rate": 1.3110008105310388e-06, - "loss": 0.7551, - "num_input_tokens_seen": 110272760, - "step": 5186 - }, - { - "epoch": 0.6236998737449648, - "flos": 19513254640440.0, - "grad_norm": 3.681673392800133, - "learning_rate": 1.3102695747697526e-06, - "loss": 0.7691, - "num_input_tokens_seen": 110295350, - "step": 5187 - }, - { - "epoch": 0.6238201166356039, - "flos": 9192810261240.0, - "grad_norm": 3.6838266212098363, - "learning_rate": 1.3095384436400237e-06, - "loss": 0.8759, - "num_input_tokens_seen": 110306600, - "step": 5188 - }, - { - "epoch": 0.623940359526243, - "flos": 7548507612000.0, - "grad_norm": 4.626736289241645, - "learning_rate": 1.3088074172527633e-06, - "loss": 0.805, - "num_input_tokens_seen": 110323450, - "step": 5189 - }, - { - "epoch": 0.6240606024168821, - "flos": 21319154437320.0, - "grad_norm": 5.9582564697998786, - "learning_rate": 1.3080764957188684e-06, - "loss": 0.7021, - "num_input_tokens_seen": 110343415, - "step": 5190 - }, - { - "epoch": 0.6241808453075212, - "flos": 16108063674720.0, - "grad_norm": 3.975401703312227, - "learning_rate": 1.3073456791492192e-06, - "loss": 0.6885, - "num_input_tokens_seen": 110362845, - "step": 5191 - }, - { - "epoch": 0.6243010881981603, - "flos": 15456736130400.0, - "grad_norm": 4.589542252398147, - "learning_rate": 1.3066149676546801e-06, - "loss": 0.7732, - "num_input_tokens_seen": 110380745, - "step": 5192 - }, - { - "epoch": 0.6244213310887994, - "flos": 16349747047080.0, - "grad_norm": 5.917113632011038, - "learning_rate": 1.3058843613460985e-06, - "loss": 0.6451, - "num_input_tokens_seen": 110398405, - "step": 5193 - }, - { - "epoch": 0.6245415739794384, - "flos": 11085919773840.0, - "grad_norm": 3.9791763199028374, - "learning_rate": 1.3051538603343075e-06, - "loss": 0.7216, - "num_input_tokens_seen": 110416055, - "step": 5194 - }, - { - "epoch": 0.6246618168700776, - "flos": 13785527006520.0, - "grad_norm": 3.1208109435096185, - "learning_rate": 1.3044234647301235e-06, - "loss": 0.6613, - "num_input_tokens_seen": 110433800, - "step": 5195 - }, - { - "epoch": 0.6247820597607167, - "flos": 10410313531560.0, - "grad_norm": 6.933642165323922, - "learning_rate": 1.303693174644347e-06, - "loss": 0.6998, - "num_input_tokens_seen": 110450995, - "step": 5196 - }, - { - "epoch": 0.6249023026513557, - "flos": 16566771801960.0, - "grad_norm": 5.009141195246548, - "learning_rate": 1.3029629901877625e-06, - "loss": 0.79, - "num_input_tokens_seen": 110470090, - "step": 5197 - }, - { - "epoch": 0.6250225455419949, - "flos": 14811898877880.0, - "grad_norm": 4.0072461298764965, - "learning_rate": 1.3022329114711376e-06, - "loss": 0.7672, - "num_input_tokens_seen": 110488520, - "step": 5198 - }, - { - "epoch": 0.6251427884326339, - "flos": 17158334324520.0, - "grad_norm": 4.21252071930179, - "learning_rate": 1.3015029386052256e-06, - "loss": 0.6805, - "num_input_tokens_seen": 110508410, - "step": 5199 - }, - { - "epoch": 0.625263031323273, - "flos": 23290355597160.0, - "grad_norm": 3.428299677387978, - "learning_rate": 1.3007730717007622e-06, - "loss": 0.7067, - "num_input_tokens_seen": 110528945, - "step": 5200 - }, - { - "epoch": 0.6253832742139122, - "flos": 17674243016760.0, - "grad_norm": 3.8596655855938073, - "learning_rate": 1.3000433108684676e-06, - "loss": 0.7414, - "num_input_tokens_seen": 110549165, - "step": 5201 - }, - { - "epoch": 0.6255035171045512, - "flos": 20289331630320.0, - "grad_norm": 7.203238860779866, - "learning_rate": 1.2993136562190467e-06, - "loss": 0.7874, - "num_input_tokens_seen": 110568005, - "step": 5202 - }, - { - "epoch": 0.6256237599951903, - "flos": 14782776206040.0, - "grad_norm": 2.9608017379516514, - "learning_rate": 1.2985841078631871e-06, - "loss": 0.6854, - "num_input_tokens_seen": 110587045, - "step": 5203 - }, - { - "epoch": 0.6257440028858293, - "flos": 17701022851560.0, - "grad_norm": 6.918776531220303, - "learning_rate": 1.2978546659115608e-06, - "loss": 0.7699, - "num_input_tokens_seen": 110604845, - "step": 5204 - }, - { - "epoch": 0.6258642457764685, - "flos": 11544691221000.0, - "grad_norm": 2.8920841967301993, - "learning_rate": 1.2971253304748228e-06, - "loss": 0.8435, - "num_input_tokens_seen": 110622280, - "step": 5205 - }, - { - "epoch": 0.6259844886671075, - "flos": 8628375833040.0, - "grad_norm": 6.798278242809012, - "learning_rate": 1.296396101663614e-06, - "loss": 0.7359, - "num_input_tokens_seen": 110638560, - "step": 5206 - }, - { - "epoch": 0.6261047315577466, - "flos": 11571281096040.0, - "grad_norm": 3.822592462552819, - "learning_rate": 1.2956669795885565e-06, - "loss": 0.8268, - "num_input_tokens_seen": 110654910, - "step": 5207 - }, - { - "epoch": 0.6262249744483858, - "flos": 22939494988200.0, - "grad_norm": 4.998797844373787, - "learning_rate": 1.294937964360259e-06, - "loss": 0.6774, - "num_input_tokens_seen": 110674900, - "step": 5208 - }, - { - "epoch": 0.6263452173390248, - "flos": 19941921957000.0, - "grad_norm": 9.525983912795358, - "learning_rate": 1.2942090560893108e-06, - "loss": 0.6767, - "num_input_tokens_seen": 110694025, - "step": 5209 - }, - { - "epoch": 0.6264654602296639, - "flos": 27448832872920.0, - "grad_norm": 4.297463116712225, - "learning_rate": 1.2934802548862882e-06, - "loss": 0.5872, - "num_input_tokens_seen": 110716530, - "step": 5210 - }, - { - "epoch": 0.626585703120303, - "flos": 10734267665880.0, - "grad_norm": 4.1510849603033, - "learning_rate": 1.292751560861749e-06, - "loss": 0.818, - "num_input_tokens_seen": 110731155, - "step": 5211 - }, - { - "epoch": 0.6267059460109421, - "flos": 16351836604440.0, - "grad_norm": 3.495645266275844, - "learning_rate": 1.2920229741262354e-06, - "loss": 0.7806, - "num_input_tokens_seen": 110748880, - "step": 5212 - }, - { - "epoch": 0.6268261889015811, - "flos": 12948450256440.0, - "grad_norm": 8.712522627638299, - "learning_rate": 1.2912944947902739e-06, - "loss": 0.7392, - "num_input_tokens_seen": 110765085, - "step": 5213 - }, - { - "epoch": 0.6269464317922203, - "flos": 24120688775760.0, - "grad_norm": 12.94557711803322, - "learning_rate": 1.2905661229643742e-06, - "loss": 0.6901, - "num_input_tokens_seen": 110784565, - "step": 5214 - }, - { - "epoch": 0.6270666746828594, - "flos": 13081652911320.0, - "grad_norm": 4.264027949801625, - "learning_rate": 1.2898378587590299e-06, - "loss": 0.8085, - "num_input_tokens_seen": 110800885, - "step": 5215 - }, - { - "epoch": 0.6271869175734984, - "flos": 12732786879840.0, - "grad_norm": 4.461879543906475, - "learning_rate": 1.2891097022847173e-06, - "loss": 0.8555, - "num_input_tokens_seen": 110817950, - "step": 5216 - }, - { - "epoch": 0.6273071604641376, - "flos": 19697484168120.0, - "grad_norm": 4.101445310825845, - "learning_rate": 1.2883816536518978e-06, - "loss": 0.662, - "num_input_tokens_seen": 110838810, - "step": 5217 - }, - { - "epoch": 0.6274274033547766, - "flos": 19100317832640.0, - "grad_norm": 4.766773523231133, - "learning_rate": 1.2876537129710155e-06, - "loss": 0.8069, - "num_input_tokens_seen": 110856260, - "step": 5218 - }, - { - "epoch": 0.6275476462454157, - "flos": 14811518958360.0, - "grad_norm": 5.669363427329613, - "learning_rate": 1.286925880352499e-06, - "loss": 0.7298, - "num_input_tokens_seen": 110874840, - "step": 5219 - }, - { - "epoch": 0.6276678891360549, - "flos": 19291639191360.0, - "grad_norm": 2.5907441152100694, - "learning_rate": 1.2861981559067592e-06, - "loss": 0.6857, - "num_input_tokens_seen": 110895165, - "step": 5220 - }, - { - "epoch": 0.6277881320266939, - "flos": 10108580197800.0, - "grad_norm": 3.699476674160643, - "learning_rate": 1.2854705397441917e-06, - "loss": 0.7897, - "num_input_tokens_seen": 110910425, - "step": 5221 - }, - { - "epoch": 0.627908374917333, - "flos": 19830623503080.0, - "grad_norm": 6.584887293008218, - "learning_rate": 1.2847430319751747e-06, - "loss": 0.7555, - "num_input_tokens_seen": 110928240, - "step": 5222 - }, - { - "epoch": 0.6280286178079721, - "flos": 17403975191880.0, - "grad_norm": 5.426099730798746, - "learning_rate": 1.2840156327100712e-06, - "loss": 0.6725, - "num_input_tokens_seen": 110945085, - "step": 5223 - }, - { - "epoch": 0.6281488606986112, - "flos": 19320635223360.0, - "grad_norm": 3.4276264228015147, - "learning_rate": 1.2832883420592272e-06, - "loss": 0.7058, - "num_input_tokens_seen": 110963700, - "step": 5224 - }, - { - "epoch": 0.6282691035892503, - "flos": 26557025034720.0, - "grad_norm": 7.235731883330889, - "learning_rate": 1.282561160132972e-06, - "loss": 0.6213, - "num_input_tokens_seen": 110983940, - "step": 5225 - }, - { - "epoch": 0.6283893464798894, - "flos": 19451811640800.0, - "grad_norm": 2.5519233705382143, - "learning_rate": 1.2818340870416186e-06, - "loss": 0.8004, - "num_input_tokens_seen": 111004795, - "step": 5226 - }, - { - "epoch": 0.6285095893705285, - "flos": 16270262361600.0, - "grad_norm": 8.415717597052543, - "learning_rate": 1.2811071228954626e-06, - "loss": 0.7427, - "num_input_tokens_seen": 111023150, - "step": 5227 - }, - { - "epoch": 0.6286298322611675, - "flos": 19456244035200.0, - "grad_norm": 3.840296737013992, - "learning_rate": 1.2803802678047846e-06, - "loss": 0.7978, - "num_input_tokens_seen": 111043020, - "step": 5228 - }, - { - "epoch": 0.6287500751518067, - "flos": 15942572352000.0, - "grad_norm": 8.162588645371926, - "learning_rate": 1.279653521879848e-06, - "loss": 0.7158, - "num_input_tokens_seen": 111062805, - "step": 5229 - }, - { - "epoch": 0.6288703180424458, - "flos": 14621400678120.0, - "grad_norm": 5.282607767402266, - "learning_rate": 1.2789268852308997e-06, - "loss": 0.8273, - "num_input_tokens_seen": 111077735, - "step": 5230 - }, - { - "epoch": 0.6289905609330848, - "flos": 16186756861200.0, - "grad_norm": 5.424884939072243, - "learning_rate": 1.2782003579681688e-06, - "loss": 0.6864, - "num_input_tokens_seen": 111096985, - "step": 5231 - }, - { - "epoch": 0.629110803823724, - "flos": 18698272051080.0, - "grad_norm": 2.607225274950053, - "learning_rate": 1.2774739402018701e-06, - "loss": 0.732, - "num_input_tokens_seen": 111117540, - "step": 5232 - }, - { - "epoch": 0.629231046714363, - "flos": 15270638665080.0, - "grad_norm": 6.010138846267698, - "learning_rate": 1.2767476320422002e-06, - "loss": 0.7297, - "num_input_tokens_seen": 111137185, - "step": 5233 - }, - { - "epoch": 0.6293512896050021, - "flos": 47961905661000.0, - "grad_norm": 0.7148862732293392, - "learning_rate": 1.2760214335993392e-06, - "loss": 0.5898, - "num_input_tokens_seen": 111203550, - "step": 5234 - }, - { - "epoch": 0.6294715324956413, - "flos": 25477030173840.0, - "grad_norm": 3.601900097297687, - "learning_rate": 1.2752953449834514e-06, - "loss": 0.5641, - "num_input_tokens_seen": 111225720, - "step": 5235 - }, - { - "epoch": 0.6295917753862803, - "flos": 16675030899720.0, - "grad_norm": 8.82770843873408, - "learning_rate": 1.2745693663046836e-06, - "loss": 0.7804, - "num_input_tokens_seen": 111244510, - "step": 5236 - }, - { - "epoch": 0.6297120182769194, - "flos": 15243922150200.0, - "grad_norm": 5.383337429899344, - "learning_rate": 1.2738434976731662e-06, - "loss": 0.7957, - "num_input_tokens_seen": 111262415, - "step": 5237 - }, - { - "epoch": 0.6298322611675584, - "flos": 14242367196120.0, - "grad_norm": 4.241470079544231, - "learning_rate": 1.2731177391990125e-06, - "loss": 0.7366, - "num_input_tokens_seen": 111282060, - "step": 5238 - }, - { - "epoch": 0.6299525040581976, - "flos": 9141308489040.0, - "grad_norm": 9.977921718171817, - "learning_rate": 1.2723920909923203e-06, - "loss": 0.8036, - "num_input_tokens_seen": 111297525, - "step": 5239 - }, - { - "epoch": 0.6300727469488366, - "flos": 42537553187400.0, - "grad_norm": 0.9092350173651357, - "learning_rate": 1.2716665531631688e-06, - "loss": 0.6462, - "num_input_tokens_seen": 111351530, - "step": 5240 - }, - { - "epoch": 0.6301929898394757, - "flos": 16484690999760.0, - "grad_norm": 3.374247007788237, - "learning_rate": 1.270941125821623e-06, - "loss": 0.7548, - "num_input_tokens_seen": 111371675, - "step": 5241 - }, - { - "epoch": 0.6303132327301149, - "flos": 20752313852160.0, - "grad_norm": 3.144261773925051, - "learning_rate": 1.2702158090777278e-06, - "loss": 0.7441, - "num_input_tokens_seen": 111392485, - "step": 5242 - }, - { - "epoch": 0.6304334756207539, - "flos": 18510528267840.0, - "grad_norm": 3.1783487936949784, - "learning_rate": 1.2694906030415148e-06, - "loss": 0.7319, - "num_input_tokens_seen": 111409950, - "step": 5243 - }, - { - "epoch": 0.630553718511393, - "flos": 13158161560560.0, - "grad_norm": 15.941659900579431, - "learning_rate": 1.2687655078229958e-06, - "loss": 0.7882, - "num_input_tokens_seen": 111424000, - "step": 5244 - }, - { - "epoch": 0.6306739614020321, - "flos": 20019443724960.0, - "grad_norm": 4.5174603279875525, - "learning_rate": 1.2680405235321678e-06, - "loss": 0.6932, - "num_input_tokens_seen": 111445055, - "step": 5245 - }, - { - "epoch": 0.6307942042926712, - "flos": 11166924137400.0, - "grad_norm": 3.693351499487571, - "learning_rate": 1.267315650279011e-06, - "loss": 0.7715, - "num_input_tokens_seen": 111463245, - "step": 5246 - }, - { - "epoch": 0.6309144471833102, - "flos": 14322738360480.0, - "grad_norm": 10.08637962879215, - "learning_rate": 1.2665908881734874e-06, - "loss": 0.7217, - "num_input_tokens_seen": 111481800, - "step": 5247 - }, - { - "epoch": 0.6310346900739494, - "flos": 12759376754880.0, - "grad_norm": 3.473349056928291, - "learning_rate": 1.2658662373255432e-06, - "loss": 0.8339, - "num_input_tokens_seen": 111499910, - "step": 5248 - }, - { - "epoch": 0.6311549329645885, - "flos": 40573412198640.0, - "grad_norm": 0.8342714195393883, - "learning_rate": 1.2651416978451063e-06, - "loss": 0.5641, - "num_input_tokens_seen": 111565015, - "step": 5249 - }, - { - "epoch": 0.6312751758552275, - "flos": 30823856388120.0, - "grad_norm": 4.2291265526483635, - "learning_rate": 1.2644172698420903e-06, - "loss": 0.6373, - "num_input_tokens_seen": 111586840, - "step": 5250 - }, - { - "epoch": 0.6313954187458667, - "flos": 14353190750640.0, - "grad_norm": 4.670386648243569, - "learning_rate": 1.2636929534263892e-06, - "loss": 0.828, - "num_input_tokens_seen": 111605545, - "step": 5251 - }, - { - "epoch": 0.6315156616365057, - "flos": 16756573482600.0, - "grad_norm": 2.705504059568034, - "learning_rate": 1.2629687487078821e-06, - "loss": 0.757, - "num_input_tokens_seen": 111624075, - "step": 5252 - }, - { - "epoch": 0.6316359045271448, - "flos": 17371274944560.0, - "grad_norm": 4.14559833620981, - "learning_rate": 1.2622446557964293e-06, - "loss": 0.7562, - "num_input_tokens_seen": 111641800, - "step": 5253 - }, - { - "epoch": 0.631756147417784, - "flos": 24315461070120.0, - "grad_norm": 7.045236468925381, - "learning_rate": 1.261520674801876e-06, - "loss": 0.6964, - "num_input_tokens_seen": 111662115, - "step": 5254 - }, - { - "epoch": 0.631876390308423, - "flos": 22939051748760.0, - "grad_norm": 4.062315716598507, - "learning_rate": 1.2607968058340488e-06, - "loss": 0.7053, - "num_input_tokens_seen": 111681530, - "step": 5255 - }, - { - "epoch": 0.6319966331990621, - "flos": 18078853255080.0, - "grad_norm": 3.1428509315825677, - "learning_rate": 1.2600730490027583e-06, - "loss": 0.7158, - "num_input_tokens_seen": 111701490, - "step": 5256 - }, - { - "epoch": 0.6321168760897012, - "flos": 12757983716640.0, - "grad_norm": 3.3689671037743123, - "learning_rate": 1.2593494044177984e-06, - "loss": 0.7958, - "num_input_tokens_seen": 111719515, - "step": 5257 - }, - { - "epoch": 0.6322371189803403, - "flos": 13352838875040.0, - "grad_norm": 4.011734915072106, - "learning_rate": 1.2586258721889448e-06, - "loss": 0.7832, - "num_input_tokens_seen": 111736585, - "step": 5258 - }, - { - "epoch": 0.6323573618709794, - "flos": 14730926174280.0, - "grad_norm": 4.848377207984473, - "learning_rate": 1.2579024524259573e-06, - "loss": 0.7989, - "num_input_tokens_seen": 111752565, - "step": 5259 - }, - { - "epoch": 0.6324776047616185, - "flos": 14645141156760.0, - "grad_norm": 3.1417671518083004, - "learning_rate": 1.2571791452385768e-06, - "loss": 0.9013, - "num_input_tokens_seen": 111769550, - "step": 5260 - }, - { - "epoch": 0.6325978476522576, - "flos": 22643048867760.0, - "grad_norm": 3.3108748281973335, - "learning_rate": 1.2564559507365301e-06, - "loss": 0.7518, - "num_input_tokens_seen": 111791675, - "step": 5261 - }, - { - "epoch": 0.6327180905428966, - "flos": 17970752457120.0, - "grad_norm": 4.517685364053587, - "learning_rate": 1.2557328690295244e-06, - "loss": 0.77, - "num_input_tokens_seen": 111809585, - "step": 5262 - }, - { - "epoch": 0.6328383334335358, - "flos": 15779645486040.0, - "grad_norm": 5.710706045989513, - "learning_rate": 1.255009900227251e-06, - "loss": 0.7482, - "num_input_tokens_seen": 111828330, - "step": 5263 - }, - { - "epoch": 0.6329585763241748, - "flos": 16782783438120.0, - "grad_norm": 4.074241455248933, - "learning_rate": 1.254287044439383e-06, - "loss": 0.7847, - "num_input_tokens_seen": 111847655, - "step": 5264 - }, - { - "epoch": 0.6330788192148139, - "flos": 52315214059680.0, - "grad_norm": 0.9693041416343804, - "learning_rate": 1.2535643017755776e-06, - "loss": 0.5609, - "num_input_tokens_seen": 111909565, - "step": 5265 - }, - { - "epoch": 0.6331990621054531, - "flos": 15534511178040.0, - "grad_norm": 15.201682670213168, - "learning_rate": 1.2528416723454737e-06, - "loss": 0.7038, - "num_input_tokens_seen": 111925955, - "step": 5266 - }, - { - "epoch": 0.6333193049960921, - "flos": 25315433026200.0, - "grad_norm": 5.516175672150091, - "learning_rate": 1.2521191562586945e-06, - "loss": 0.6958, - "num_input_tokens_seen": 111949325, - "step": 5267 - }, - { - "epoch": 0.6334395478867312, - "flos": 13378099031760.0, - "grad_norm": 3.656245435402132, - "learning_rate": 1.2513967536248445e-06, - "loss": 0.7555, - "num_input_tokens_seen": 111965365, - "step": 5268 - }, - { - "epoch": 0.6335597907773702, - "flos": 17295874393920.0, - "grad_norm": 3.7031774074531962, - "learning_rate": 1.2506744645535117e-06, - "loss": 0.8032, - "num_input_tokens_seen": 111985515, - "step": 5269 - }, - { - "epoch": 0.6336800336680094, - "flos": 16620743051040.0, - "grad_norm": 6.869043162427003, - "learning_rate": 1.249952289154267e-06, - "loss": 0.584, - "num_input_tokens_seen": 112005275, - "step": 5270 - }, - { - "epoch": 0.6338002765586485, - "flos": 17295526134360.0, - "grad_norm": 3.7147233464156213, - "learning_rate": 1.2492302275366635e-06, - "loss": 0.7532, - "num_input_tokens_seen": 112024670, - "step": 5271 - }, - { - "epoch": 0.6339205194492875, - "flos": 19428957641040.0, - "grad_norm": 3.843702858736581, - "learning_rate": 1.2485082798102377e-06, - "loss": 0.616, - "num_input_tokens_seen": 112044805, - "step": 5272 - }, - { - "epoch": 0.6340407623399267, - "flos": 13536150263880.0, - "grad_norm": 3.5519560999004645, - "learning_rate": 1.2477864460845084e-06, - "loss": 0.6638, - "num_input_tokens_seen": 112060925, - "step": 5273 - }, - { - "epoch": 0.6341610052305657, - "flos": 12921670421640.0, - "grad_norm": 13.151937688638139, - "learning_rate": 1.2470647264689776e-06, - "loss": 0.7084, - "num_input_tokens_seen": 112079125, - "step": 5274 - }, - { - "epoch": 0.6342812481212048, - "flos": 17269189539000.0, - "grad_norm": 3.8935831000499563, - "learning_rate": 1.2463431210731282e-06, - "loss": 0.6955, - "num_input_tokens_seen": 112098430, - "step": 5275 - }, - { - "epoch": 0.634401491011844, - "flos": 13002579805320.0, - "grad_norm": 4.17426108942651, - "learning_rate": 1.2456216300064289e-06, - "loss": 0.7434, - "num_input_tokens_seen": 112115700, - "step": 5276 - }, - { - "epoch": 0.634521733902483, - "flos": 15619219756920.0, - "grad_norm": 3.107696386808172, - "learning_rate": 1.244900253378328e-06, - "loss": 0.7563, - "num_input_tokens_seen": 112135475, - "step": 5277 - }, - { - "epoch": 0.6346419767931221, - "flos": 12057623877000.0, - "grad_norm": 3.677368210661955, - "learning_rate": 1.2441789912982583e-06, - "loss": 0.68, - "num_input_tokens_seen": 112152280, - "step": 5278 - }, - { - "epoch": 0.6347622196837612, - "flos": 17834257166400.0, - "grad_norm": 3.5547984346958295, - "learning_rate": 1.2434578438756346e-06, - "loss": 0.6262, - "num_input_tokens_seen": 112172430, - "step": 5279 - }, - { - "epoch": 0.6348824625744003, - "flos": 47564568230760.0, - "grad_norm": 2.8269212384420075, - "learning_rate": 1.242736811219855e-06, - "loss": 0.7648, - "num_input_tokens_seen": 112198110, - "step": 5280 - }, - { - "epoch": 0.6350027054650393, - "flos": 20995516902600.0, - "grad_norm": 3.9955559123412376, - "learning_rate": 1.2420158934402988e-06, - "loss": 0.8107, - "num_input_tokens_seen": 112218445, - "step": 5281 - }, - { - "epoch": 0.6351229483556785, - "flos": 16836533067480.0, - "grad_norm": 4.049140678888146, - "learning_rate": 1.2412950906463286e-06, - "loss": 0.819, - "num_input_tokens_seen": 112235470, - "step": 5282 - }, - { - "epoch": 0.6352431912463176, - "flos": 16048931852160.0, - "grad_norm": 2.986842388734156, - "learning_rate": 1.2405744029472902e-06, - "loss": 0.8771, - "num_input_tokens_seen": 112254675, - "step": 5283 - }, - { - "epoch": 0.6353634341369566, - "flos": 9978131959440.0, - "grad_norm": 4.136372235619812, - "learning_rate": 1.2398538304525108e-06, - "loss": 0.7302, - "num_input_tokens_seen": 112273020, - "step": 5284 - }, - { - "epoch": 0.6354836770275958, - "flos": 14108183082480.0, - "grad_norm": 4.376969684261402, - "learning_rate": 1.2391333732713016e-06, - "loss": 0.7357, - "num_input_tokens_seen": 112290545, - "step": 5285 - }, - { - "epoch": 0.6356039199182348, - "flos": 15808071638760.0, - "grad_norm": 5.2090665784313295, - "learning_rate": 1.2384130315129543e-06, - "loss": 0.7608, - "num_input_tokens_seen": 112308590, - "step": 5286 - }, - { - "epoch": 0.6357241628088739, - "flos": 13216976783520.0, - "grad_norm": 3.9885488153238993, - "learning_rate": 1.2376928052867447e-06, - "loss": 0.7114, - "num_input_tokens_seen": 112327430, - "step": 5287 - }, - { - "epoch": 0.6358444056995131, - "flos": 18266723678160.0, - "grad_norm": 5.8378893181246445, - "learning_rate": 1.2369726947019299e-06, - "loss": 0.771, - "num_input_tokens_seen": 112347625, - "step": 5288 - }, - { - "epoch": 0.6359646485901521, - "flos": 17051594904840.0, - "grad_norm": 4.463216276412034, - "learning_rate": 1.2362526998677511e-06, - "loss": 0.6566, - "num_input_tokens_seen": 112363710, - "step": 5289 - }, - { - "epoch": 0.6360848914807912, - "flos": 15271366844160.0, - "grad_norm": 3.228787816291786, - "learning_rate": 1.2355328208934301e-06, - "loss": 0.842, - "num_input_tokens_seen": 112382305, - "step": 5290 - }, - { - "epoch": 0.6362051343714303, - "flos": 13489587445440.0, - "grad_norm": 5.67055594919486, - "learning_rate": 1.2348130578881728e-06, - "loss": 0.7165, - "num_input_tokens_seen": 112400245, - "step": 5291 - }, - { - "epoch": 0.6363253772620694, - "flos": 17863126558560.0, - "grad_norm": 3.9974135142710905, - "learning_rate": 1.2340934109611664e-06, - "loss": 0.7275, - "num_input_tokens_seen": 112420725, - "step": 5292 - }, - { - "epoch": 0.6364456201527084, - "flos": 19023999143160.0, - "grad_norm": 5.311057758905717, - "learning_rate": 1.2333738802215798e-06, - "loss": 0.6544, - "num_input_tokens_seen": 112440665, - "step": 5293 - }, - { - "epoch": 0.6365658630433476, - "flos": 15162664506960.0, - "grad_norm": 4.003826695093795, - "learning_rate": 1.2326544657785668e-06, - "loss": 0.7781, - "num_input_tokens_seen": 112460075, - "step": 5294 - }, - { - "epoch": 0.6366861059339867, - "flos": 15675882102600.0, - "grad_norm": 4.8942342974065065, - "learning_rate": 1.2319351677412608e-06, - "loss": 0.7296, - "num_input_tokens_seen": 112476840, - "step": 5295 - }, - { - "epoch": 0.6368063488246257, - "flos": 16291438383480.0, - "grad_norm": 3.5561222744013126, - "learning_rate": 1.2312159862187796e-06, - "loss": 0.7295, - "num_input_tokens_seen": 112494970, - "step": 5296 - }, - { - "epoch": 0.6369265917152649, - "flos": 16406219433000.0, - "grad_norm": 5.657606099305814, - "learning_rate": 1.2304969213202217e-06, - "loss": 0.743, - "num_input_tokens_seen": 112515950, - "step": 5297 - }, - { - "epoch": 0.6370468346059039, - "flos": 18106329609000.0, - "grad_norm": 5.057564672715632, - "learning_rate": 1.2297779731546692e-06, - "loss": 0.7636, - "num_input_tokens_seen": 112534765, - "step": 5298 - }, - { - "epoch": 0.637167077496543, - "flos": 18533635547280.0, - "grad_norm": 2.6708810307076747, - "learning_rate": 1.2290591418311853e-06, - "loss": 0.7613, - "num_input_tokens_seen": 112551880, - "step": 5299 - }, - { - "epoch": 0.637287320387182, - "flos": 20291357867760.0, - "grad_norm": 4.2145213091301725, - "learning_rate": 1.2283404274588172e-06, - "loss": 0.6993, - "num_input_tokens_seen": 112570545, - "step": 5300 - }, - { - "epoch": 0.6374075632778212, - "flos": 46570837778160.0, - "grad_norm": 0.7588978496712563, - "learning_rate": 1.227621830146592e-06, - "loss": 0.5438, - "num_input_tokens_seen": 112625630, - "step": 5301 - }, - { - "epoch": 0.6375278061684603, - "flos": 18728059582080.0, - "grad_norm": 9.581863644452415, - "learning_rate": 1.2269033500035217e-06, - "loss": 0.7829, - "num_input_tokens_seen": 112645485, - "step": 5302 - }, - { - "epoch": 0.6376480490590993, - "flos": 18807987507000.0, - "grad_norm": 4.35743872734583, - "learning_rate": 1.2261849871385988e-06, - "loss": 0.7282, - "num_input_tokens_seen": 112666310, - "step": 5303 - }, - { - "epoch": 0.6377682919497385, - "flos": 23152752207840.0, - "grad_norm": 5.486027047630551, - "learning_rate": 1.2254667416607972e-06, - "loss": 0.5989, - "num_input_tokens_seen": 112687630, - "step": 5304 - }, - { - "epoch": 0.6378885348403776, - "flos": 16863692821800.0, - "grad_norm": 4.6529480151763565, - "learning_rate": 1.2247486136790756e-06, - "loss": 0.8202, - "num_input_tokens_seen": 112706830, - "step": 5305 - }, - { - "epoch": 0.6380087777310166, - "flos": 13650171474360.0, - "grad_norm": 5.0185420505107, - "learning_rate": 1.2240306033023726e-06, - "loss": 0.7862, - "num_input_tokens_seen": 112724375, - "step": 5306 - }, - { - "epoch": 0.6381290206216558, - "flos": 17080116037440.0, - "grad_norm": 3.3465909208664835, - "learning_rate": 1.223312710639611e-06, - "loss": 0.7122, - "num_input_tokens_seen": 112742815, - "step": 5307 - }, - { - "epoch": 0.6382492635122948, - "flos": 13786033565880.0, - "grad_norm": 4.30741981152746, - "learning_rate": 1.2225949357996928e-06, - "loss": 0.8399, - "num_input_tokens_seen": 112760660, - "step": 5308 - }, - { - "epoch": 0.6383695064029339, - "flos": 20401199963520.0, - "grad_norm": 2.6742443265145965, - "learning_rate": 1.221877278891505e-06, - "loss": 0.7915, - "num_input_tokens_seen": 112779635, - "step": 5309 - }, - { - "epoch": 0.638489749293573, - "flos": 19347636677880.0, - "grad_norm": 3.5200165171669195, - "learning_rate": 1.221159740023915e-06, - "loss": 0.6819, - "num_input_tokens_seen": 112799185, - "step": 5310 - }, - { - "epoch": 0.6386099921842121, - "flos": 17566173878760.0, - "grad_norm": 34.54244727957908, - "learning_rate": 1.2204423193057735e-06, - "loss": 0.7084, - "num_input_tokens_seen": 112817735, - "step": 5311 - }, - { - "epoch": 0.6387302350748512, - "flos": 52901837628480.0, - "grad_norm": 0.9626277851143207, - "learning_rate": 1.2197250168459122e-06, - "loss": 0.6816, - "num_input_tokens_seen": 112873855, - "step": 5312 - }, - { - "epoch": 0.6388504779654903, - "flos": 10570264361280.0, - "grad_norm": 4.021663889646197, - "learning_rate": 1.2190078327531454e-06, - "loss": 0.7268, - "num_input_tokens_seen": 112889820, - "step": 5313 - }, - { - "epoch": 0.6389707208561294, - "flos": 16350791825760.0, - "grad_norm": 2.5165888675344332, - "learning_rate": 1.2182907671362697e-06, - "loss": 0.7158, - "num_input_tokens_seen": 112910235, - "step": 5314 - }, - { - "epoch": 0.6390909637467684, - "flos": 14190295544640.0, - "grad_norm": 4.323025891253176, - "learning_rate": 1.2175738201040626e-06, - "loss": 0.7649, - "num_input_tokens_seen": 112926995, - "step": 5315 - }, - { - "epoch": 0.6392112066374076, - "flos": 17641067870040.0, - "grad_norm": 3.1872519602289677, - "learning_rate": 1.2168569917652855e-06, - "loss": 0.7742, - "num_input_tokens_seen": 112946570, - "step": 5316 - }, - { - "epoch": 0.6393314495280467, - "flos": 19643038019640.0, - "grad_norm": 3.7069790256628243, - "learning_rate": 1.2161402822286797e-06, - "loss": 0.6288, - "num_input_tokens_seen": 112966975, - "step": 5317 - }, - { - "epoch": 0.6394516924186857, - "flos": 14806611664560.0, - "grad_norm": 15.126724018802472, - "learning_rate": 1.2154236916029703e-06, - "loss": 0.7687, - "num_input_tokens_seen": 112984670, - "step": 5318 - }, - { - "epoch": 0.6395719353093249, - "flos": 13407285023520.0, - "grad_norm": 8.1424818502194, - "learning_rate": 1.2147072199968627e-06, - "loss": 0.71, - "num_input_tokens_seen": 113003025, - "step": 5319 - }, - { - "epoch": 0.6396921781999639, - "flos": 12813379663920.0, - "grad_norm": 6.4264280045911475, - "learning_rate": 1.2139908675190454e-06, - "loss": 0.7008, - "num_input_tokens_seen": 113021955, - "step": 5320 - }, - { - "epoch": 0.639812421090603, - "flos": 15540463250520.0, - "grad_norm": 5.621608813919578, - "learning_rate": 1.2132746342781883e-06, - "loss": 0.7436, - "num_input_tokens_seen": 113042835, - "step": 5321 - }, - { - "epoch": 0.6399326639812422, - "flos": 8087745203400.0, - "grad_norm": 6.173186730167619, - "learning_rate": 1.2125585203829442e-06, - "loss": 0.7713, - "num_input_tokens_seen": 113058195, - "step": 5322 - }, - { - "epoch": 0.6400529068718812, - "flos": 17508371774520.0, - "grad_norm": 9.449770194998461, - "learning_rate": 1.211842525941946e-06, - "loss": 0.7222, - "num_input_tokens_seen": 113077710, - "step": 5323 - }, - { - "epoch": 0.6401731497625203, - "flos": 32389972410240.0, - "grad_norm": 7.184625996390335, - "learning_rate": 1.2111266510638105e-06, - "loss": 0.789, - "num_input_tokens_seen": 113100355, - "step": 5324 - }, - { - "epoch": 0.6402933926531594, - "flos": 15103785964080.0, - "grad_norm": 3.117553536476224, - "learning_rate": 1.2104108958571346e-06, - "loss": 0.7962, - "num_input_tokens_seen": 113118345, - "step": 5325 - }, - { - "epoch": 0.6404136355437985, - "flos": 18213638907960.0, - "grad_norm": 2.2111153143561117, - "learning_rate": 1.2096952604304975e-06, - "loss": 0.7396, - "num_input_tokens_seen": 113138495, - "step": 5326 - }, - { - "epoch": 0.6405338784344375, - "flos": 29770736341920.0, - "grad_norm": 5.00750322026622, - "learning_rate": 1.2089797448924616e-06, - "loss": 0.6807, - "num_input_tokens_seen": 113162090, - "step": 5327 - }, - { - "epoch": 0.6406541213250767, - "flos": 15270132105720.0, - "grad_norm": 3.9084781243047715, - "learning_rate": 1.2082643493515692e-06, - "loss": 0.6402, - "num_input_tokens_seen": 113180130, - "step": 5328 - }, - { - "epoch": 0.6407743642157158, - "flos": 17053906081920.0, - "grad_norm": 2.9651830828334496, - "learning_rate": 1.207549073916346e-06, - "loss": 0.8071, - "num_input_tokens_seen": 113200785, - "step": 5329 - }, - { - "epoch": 0.6408946071063548, - "flos": 10923531127200.0, - "grad_norm": 4.317496238932161, - "learning_rate": 1.2068339186952976e-06, - "loss": 0.771, - "num_input_tokens_seen": 113218045, - "step": 5330 - }, - { - "epoch": 0.6410148499969939, - "flos": 16485830758320.0, - "grad_norm": 3.6765071140045644, - "learning_rate": 1.2061188837969136e-06, - "loss": 0.7125, - "num_input_tokens_seen": 113237375, - "step": 5331 - }, - { - "epoch": 0.641135092887633, - "flos": 9006269556480.0, - "grad_norm": 5.706828380741472, - "learning_rate": 1.2054039693296631e-06, - "loss": 0.8099, - "num_input_tokens_seen": 113255090, - "step": 5332 - }, - { - "epoch": 0.6412553357782721, - "flos": 16188878078520.0, - "grad_norm": 4.404679471672025, - "learning_rate": 1.2046891754019992e-06, - "loss": 0.8059, - "num_input_tokens_seen": 113275420, - "step": 5333 - }, - { - "epoch": 0.6413755786689112, - "flos": 11571850975320.0, - "grad_norm": 3.549373603682152, - "learning_rate": 1.2039745021223548e-06, - "loss": 0.8043, - "num_input_tokens_seen": 113292560, - "step": 5334 - }, - { - "epoch": 0.6414958215595503, - "flos": 50173682094600.0, - "grad_norm": 0.8717240247277966, - "learning_rate": 1.2032599495991456e-06, - "loss": 0.6133, - "num_input_tokens_seen": 113357020, - "step": 5335 - }, - { - "epoch": 0.6416160644501894, - "flos": 32443627059720.0, - "grad_norm": 2.641587663145041, - "learning_rate": 1.2025455179407685e-06, - "loss": 0.6804, - "num_input_tokens_seen": 113377900, - "step": 5336 - }, - { - "epoch": 0.6417363073408284, - "flos": 15324863193840.0, - "grad_norm": 6.465465061722588, - "learning_rate": 1.2018312072556022e-06, - "loss": 0.7185, - "num_input_tokens_seen": 113396120, - "step": 5337 - }, - { - "epoch": 0.6418565502314676, - "flos": 16431384609840.0, - "grad_norm": 3.9191644428129795, - "learning_rate": 1.2011170176520077e-06, - "loss": 0.7305, - "num_input_tokens_seen": 113416755, - "step": 5338 - }, - { - "epoch": 0.6419767931221066, - "flos": 18348424560840.0, - "grad_norm": 2.5843944728721957, - "learning_rate": 1.2004029492383256e-06, - "loss": 0.7933, - "num_input_tokens_seen": 113437815, - "step": 5339 - }, - { - "epoch": 0.6420970360127457, - "flos": 14217581938800.0, - "grad_norm": 4.1963155389189755, - "learning_rate": 1.1996890021228814e-06, - "loss": 0.7251, - "num_input_tokens_seen": 113454310, - "step": 5340 - }, - { - "epoch": 0.6422172789033849, - "flos": 29716353513360.0, - "grad_norm": 3.4005809119377317, - "learning_rate": 1.1989751764139785e-06, - "loss": 0.6837, - "num_input_tokens_seen": 113477680, - "step": 5341 - }, - { - "epoch": 0.6423375217940239, - "flos": 20292909205800.0, - "grad_norm": 3.6467526975127345, - "learning_rate": 1.1982614722199044e-06, - "loss": 0.802, - "num_input_tokens_seen": 113498575, - "step": 5342 - }, - { - "epoch": 0.642457764684663, - "flos": 13407633283080.0, - "grad_norm": 3.350815198410155, - "learning_rate": 1.1975478896489276e-06, - "loss": 0.7675, - "num_input_tokens_seen": 113516130, - "step": 5343 - }, - { - "epoch": 0.6425780075753021, - "flos": 14429129520600.0, - "grad_norm": 4.218478949987691, - "learning_rate": 1.1968344288092981e-06, - "loss": 0.7491, - "num_input_tokens_seen": 113532430, - "step": 5344 - }, - { - "epoch": 0.6426982504659412, - "flos": 15027878854080.0, - "grad_norm": 3.530349751725293, - "learning_rate": 1.1961210898092468e-06, - "loss": 0.633, - "num_input_tokens_seen": 113551100, - "step": 5345 - }, - { - "epoch": 0.6428184933565803, - "flos": 13023914127000.0, - "grad_norm": 5.378414400283939, - "learning_rate": 1.1954078727569874e-06, - "loss": 0.7716, - "num_input_tokens_seen": 113568120, - "step": 5346 - }, - { - "epoch": 0.6429387362472194, - "flos": 16432999267800.0, - "grad_norm": 3.69185510560593, - "learning_rate": 1.1946947777607141e-06, - "loss": 0.7671, - "num_input_tokens_seen": 113588975, - "step": 5347 - }, - { - "epoch": 0.6430589791378585, - "flos": 18131494785840.0, - "grad_norm": 4.114833672871299, - "learning_rate": 1.1939818049286024e-06, - "loss": 0.7851, - "num_input_tokens_seen": 113606855, - "step": 5348 - }, - { - "epoch": 0.6431792220284975, - "flos": 18241906760880.0, - "grad_norm": 3.3148632616291036, - "learning_rate": 1.1932689543688101e-06, - "loss": 0.7281, - "num_input_tokens_seen": 113627680, - "step": 5349 - }, - { - "epoch": 0.6432994649191367, - "flos": 15378422863440.0, - "grad_norm": 12.141118926969957, - "learning_rate": 1.1925562261894756e-06, - "loss": 0.7111, - "num_input_tokens_seen": 113646480, - "step": 5350 - }, - { - "epoch": 0.6434197078097758, - "flos": 22669797042600.0, - "grad_norm": 4.006377143449148, - "learning_rate": 1.1918436204987207e-06, - "loss": 0.7665, - "num_input_tokens_seen": 113668060, - "step": 5351 - }, - { - "epoch": 0.6435399507004148, - "flos": 10923404487360.0, - "grad_norm": 5.018566564340635, - "learning_rate": 1.191131137404645e-06, - "loss": 0.7924, - "num_input_tokens_seen": 113684520, - "step": 5352 - }, - { - "epoch": 0.643660193591054, - "flos": 14542010972520.0, - "grad_norm": 6.80866350410697, - "learning_rate": 1.190418777015333e-06, - "loss": 0.758, - "num_input_tokens_seen": 113703150, - "step": 5353 - }, - { - "epoch": 0.643780436481693, - "flos": 17673609817560.0, - "grad_norm": 2.898187261508648, - "learning_rate": 1.1897065394388487e-06, - "loss": 0.7232, - "num_input_tokens_seen": 113723310, - "step": 5354 - }, - { - "epoch": 0.6439006793723321, - "flos": 16944823825200.0, - "grad_norm": 3.9347851794690567, - "learning_rate": 1.1889944247832385e-06, - "loss": 0.7488, - "num_input_tokens_seen": 113743270, - "step": 5355 - }, - { - "epoch": 0.6440209222629713, - "flos": 17291252039760.0, - "grad_norm": 3.977566995014553, - "learning_rate": 1.1882824331565283e-06, - "loss": 0.6879, - "num_input_tokens_seen": 113762450, - "step": 5356 - }, - { - "epoch": 0.6441411651536103, - "flos": 12057465577200.0, - "grad_norm": 4.149965741119536, - "learning_rate": 1.1875705646667287e-06, - "loss": 0.8714, - "num_input_tokens_seen": 113780060, - "step": 5357 - }, - { - "epoch": 0.6442614080442494, - "flos": 18618977325360.0, - "grad_norm": 8.065336452970167, - "learning_rate": 1.1868588194218282e-06, - "loss": 0.7336, - "num_input_tokens_seen": 113800160, - "step": 5358 - }, - { - "epoch": 0.6443816509348885, - "flos": 20752725431640.0, - "grad_norm": 3.9510689315170295, - "learning_rate": 1.1861471975297979e-06, - "loss": 0.7198, - "num_input_tokens_seen": 113821575, - "step": 5359 - }, - { - "epoch": 0.6445018938255276, - "flos": 26966985806280.0, - "grad_norm": 2.6925452138067345, - "learning_rate": 1.185435699098591e-06, - "loss": 0.7013, - "num_input_tokens_seen": 113847490, - "step": 5360 - }, - { - "epoch": 0.6446221367161666, - "flos": 10545130844400.0, - "grad_norm": 11.306159671656689, - "learning_rate": 1.1847243242361403e-06, - "loss": 0.7511, - "num_input_tokens_seen": 113865800, - "step": 5361 - }, - { - "epoch": 0.6447423796068057, - "flos": 18025895124720.0, - "grad_norm": 8.075832491735468, - "learning_rate": 1.1840130730503624e-06, - "loss": 0.7648, - "num_input_tokens_seen": 113886800, - "step": 5362 - }, - { - "epoch": 0.6448626224974449, - "flos": 18350007558840.0, - "grad_norm": 3.989836894743352, - "learning_rate": 1.1833019456491518e-06, - "loss": 0.7329, - "num_input_tokens_seen": 113908050, - "step": 5363 - }, - { - "epoch": 0.6449828653880839, - "flos": 16487382096360.0, - "grad_norm": 3.815394444103928, - "learning_rate": 1.1825909421403871e-06, - "loss": 0.7467, - "num_input_tokens_seen": 113926865, - "step": 5364 - }, - { - "epoch": 0.645103108278723, - "flos": 18828720289440.0, - "grad_norm": 4.154744728959576, - "learning_rate": 1.181880062631926e-06, - "loss": 0.741, - "num_input_tokens_seen": 113945920, - "step": 5365 - }, - { - "epoch": 0.6452233511693621, - "flos": 20127702822720.0, - "grad_norm": 5.612284211426237, - "learning_rate": 1.1811693072316093e-06, - "loss": 0.8406, - "num_input_tokens_seen": 113963320, - "step": 5366 - }, - { - "epoch": 0.6453435940600012, - "flos": 14028856696800.0, - "grad_norm": 4.246438722186988, - "learning_rate": 1.1804586760472574e-06, - "loss": 0.8123, - "num_input_tokens_seen": 113979505, - "step": 5367 - }, - { - "epoch": 0.6454638369506402, - "flos": 18860534057880.0, - "grad_norm": 3.6545327873476285, - "learning_rate": 1.1797481691866736e-06, - "loss": 0.7729, - "num_input_tokens_seen": 113996450, - "step": 5368 - }, - { - "epoch": 0.6455840798412794, - "flos": 15345564316320.0, - "grad_norm": 4.148935176788894, - "learning_rate": 1.1790377867576393e-06, - "loss": 0.8151, - "num_input_tokens_seen": 114013920, - "step": 5369 - }, - { - "epoch": 0.6457043227319185, - "flos": 19104876866880.0, - "grad_norm": 3.7549761498883676, - "learning_rate": 1.1783275288679203e-06, - "loss": 0.7439, - "num_input_tokens_seen": 114030805, - "step": 5370 - }, - { - "epoch": 0.6458245656225575, - "flos": 44497709512560.0, - "grad_norm": 1.1473577185124268, - "learning_rate": 1.177617395625262e-06, - "loss": 0.6452, - "num_input_tokens_seen": 114088500, - "step": 5371 - }, - { - "epoch": 0.6459448085131967, - "flos": 16891644075120.0, - "grad_norm": 3.1632286951132524, - "learning_rate": 1.1769073871373908e-06, - "loss": 0.7433, - "num_input_tokens_seen": 114108425, - "step": 5372 - }, - { - "epoch": 0.6460650514038357, - "flos": 16536857631120.0, - "grad_norm": 3.261708955121895, - "learning_rate": 1.176197503512015e-06, - "loss": 0.8234, - "num_input_tokens_seen": 114127860, - "step": 5373 - }, - { - "epoch": 0.6461852942944748, - "flos": 14811930537840.0, - "grad_norm": 5.481689317207401, - "learning_rate": 1.1754877448568223e-06, - "loss": 0.7936, - "num_input_tokens_seen": 114147035, - "step": 5374 - }, - { - "epoch": 0.646305537185114, - "flos": 17106294333000.0, - "grad_norm": 4.858751655658806, - "learning_rate": 1.1747781112794837e-06, - "loss": 0.8852, - "num_input_tokens_seen": 114163250, - "step": 5375 - }, - { - "epoch": 0.646425780075753, - "flos": 17779779357960.0, - "grad_norm": 2.5611235480605337, - "learning_rate": 1.1740686028876487e-06, - "loss": 0.8157, - "num_input_tokens_seen": 114181835, - "step": 5376 - }, - { - "epoch": 0.6465460229663921, - "flos": 15216920695680.0, - "grad_norm": 3.4947175625914544, - "learning_rate": 1.1733592197889507e-06, - "loss": 0.7306, - "num_input_tokens_seen": 114198465, - "step": 5377 - }, - { - "epoch": 0.6466662658570312, - "flos": 16725614533080.0, - "grad_norm": 4.187494534161544, - "learning_rate": 1.1726499620910014e-06, - "loss": 0.7081, - "num_input_tokens_seen": 114218465, - "step": 5378 - }, - { - "epoch": 0.6467865087476703, - "flos": 11139574423320.0, - "grad_norm": 3.4741135104559717, - "learning_rate": 1.1719408299013955e-06, - "loss": 0.7405, - "num_input_tokens_seen": 114236910, - "step": 5379 - }, - { - "epoch": 0.6469067516383094, - "flos": 14299029541800.0, - "grad_norm": 6.202152973570416, - "learning_rate": 1.1712318233277067e-06, - "loss": 0.7306, - "num_input_tokens_seen": 114255650, - "step": 5380 - }, - { - "epoch": 0.6470269945289485, - "flos": 47993873237880.0, - "grad_norm": 0.7515613278322338, - "learning_rate": 1.1705229424774916e-06, - "loss": 0.5842, - "num_input_tokens_seen": 114309640, - "step": 5381 - }, - { - "epoch": 0.6471472374195876, - "flos": 22530325715640.0, - "grad_norm": 2.9829406486947163, - "learning_rate": 1.1698141874582867e-06, - "loss": 0.6263, - "num_input_tokens_seen": 114330405, - "step": 5382 - }, - { - "epoch": 0.6472674803102266, - "flos": 15000687439800.0, - "grad_norm": 2.839224263748963, - "learning_rate": 1.169105558377609e-06, - "loss": 0.7035, - "num_input_tokens_seen": 114350215, - "step": 5383 - }, - { - "epoch": 0.6473877232008658, - "flos": 18103923452040.0, - "grad_norm": 3.287586451797055, - "learning_rate": 1.1683970553429587e-06, - "loss": 0.7694, - "num_input_tokens_seen": 114371390, - "step": 5384 - }, - { - "epoch": 0.6475079660915048, - "flos": 11569159878720.0, - "grad_norm": 3.546737466573938, - "learning_rate": 1.1676886784618128e-06, - "loss": 0.8026, - "num_input_tokens_seen": 114387775, - "step": 5385 - }, - { - "epoch": 0.6476282089821439, - "flos": 12676346153880.0, - "grad_norm": 4.4585155319109315, - "learning_rate": 1.1669804278416332e-06, - "loss": 0.8056, - "num_input_tokens_seen": 114402220, - "step": 5386 - }, - { - "epoch": 0.6477484518727831, - "flos": 14758212568440.0, - "grad_norm": 2.949342596220034, - "learning_rate": 1.1662723035898602e-06, - "loss": 0.6965, - "num_input_tokens_seen": 114421700, - "step": 5387 - }, - { - "epoch": 0.6478686947634221, - "flos": 18618502425960.0, - "grad_norm": 2.909520431800816, - "learning_rate": 1.165564305813915e-06, - "loss": 0.8053, - "num_input_tokens_seen": 114440420, - "step": 5388 - }, - { - "epoch": 0.6479889376540612, - "flos": 14676290066040.0, - "grad_norm": 2.968411878103166, - "learning_rate": 1.1648564346212019e-06, - "loss": 0.7946, - "num_input_tokens_seen": 114459260, - "step": 5389 - }, - { - "epoch": 0.6481091805447003, - "flos": 19615530005760.0, - "grad_norm": 7.319225120596223, - "learning_rate": 1.164148690119104e-06, - "loss": 0.7506, - "num_input_tokens_seen": 114480260, - "step": 5390 - }, - { - "epoch": 0.6482294234353394, - "flos": 17539394043960.0, - "grad_norm": 3.57447568876681, - "learning_rate": 1.163441072414985e-06, - "loss": 0.7259, - "num_input_tokens_seen": 114500185, - "step": 5391 - }, - { - "epoch": 0.6483496663259785, - "flos": 19209653369040.0, - "grad_norm": 4.34688032686084, - "learning_rate": 1.16273358161619e-06, - "loss": 0.6812, - "num_input_tokens_seen": 114520240, - "step": 5392 - }, - { - "epoch": 0.6484699092166175, - "flos": 15296880280560.0, - "grad_norm": 4.407065674515989, - "learning_rate": 1.1620262178300446e-06, - "loss": 0.8302, - "num_input_tokens_seen": 114538575, - "step": 5393 - }, - { - "epoch": 0.6485901521072567, - "flos": 24289282774560.0, - "grad_norm": 6.916936417187458, - "learning_rate": 1.1613189811638563e-06, - "loss": 0.735, - "num_input_tokens_seen": 114560020, - "step": 5394 - }, - { - "epoch": 0.6487103949978957, - "flos": 16297580415720.0, - "grad_norm": 3.3651748625396496, - "learning_rate": 1.1606118717249117e-06, - "loss": 0.7569, - "num_input_tokens_seen": 114579840, - "step": 5395 - }, - { - "epoch": 0.6488306378885348, - "flos": 16783258337520.0, - "grad_norm": 2.606509617011884, - "learning_rate": 1.1599048896204787e-06, - "loss": 0.6504, - "num_input_tokens_seen": 114599440, - "step": 5396 - }, - { - "epoch": 0.648950880779174, - "flos": 14892871581480.0, - "grad_norm": 3.6978358873311006, - "learning_rate": 1.1591980349578061e-06, - "loss": 0.7913, - "num_input_tokens_seen": 114617830, - "step": 5397 - }, - { - "epoch": 0.649071123669813, - "flos": 44171824120680.0, - "grad_norm": 0.7894419522558944, - "learning_rate": 1.158491307844123e-06, - "loss": 0.5673, - "num_input_tokens_seen": 114677470, - "step": 5398 - }, - { - "epoch": 0.6491913665604521, - "flos": 14944658293320.0, - "grad_norm": 4.04695512257798, - "learning_rate": 1.1577847083866387e-06, - "loss": 0.8343, - "num_input_tokens_seen": 114696225, - "step": 5399 - }, - { - "epoch": 0.6493116094510912, - "flos": 12355019796240.0, - "grad_norm": 5.1333659670564495, - "learning_rate": 1.1570782366925453e-06, - "loss": 0.7047, - "num_input_tokens_seen": 114714460, - "step": 5400 - }, - { - "epoch": 0.6494318523417303, - "flos": 13728009841920.0, - "grad_norm": 4.667893770645206, - "learning_rate": 1.1563718928690132e-06, - "loss": 0.7476, - "num_input_tokens_seen": 114731615, - "step": 5401 - }, - { - "epoch": 0.6495520952323693, - "flos": 13860800917320.0, - "grad_norm": 3.471620076844063, - "learning_rate": 1.1556656770231942e-06, - "loss": 0.7057, - "num_input_tokens_seen": 114747530, - "step": 5402 - }, - { - "epoch": 0.6496723381230085, - "flos": 16645971547800.0, - "grad_norm": 2.709282889554866, - "learning_rate": 1.1549595892622207e-06, - "loss": 0.7475, - "num_input_tokens_seen": 114766020, - "step": 5403 - }, - { - "epoch": 0.6497925810136476, - "flos": 45809034938880.0, - "grad_norm": 0.8610231028125583, - "learning_rate": 1.1542536296932047e-06, - "loss": 0.6257, - "num_input_tokens_seen": 114829275, - "step": 5404 - }, - { - "epoch": 0.6499128239042866, - "flos": 14730451274880.0, - "grad_norm": 2.4424608073999496, - "learning_rate": 1.1535477984232414e-06, - "loss": 0.6812, - "num_input_tokens_seen": 114848870, - "step": 5405 - }, - { - "epoch": 0.6500330667949258, - "flos": 17917002827760.0, - "grad_norm": 3.224935569415284, - "learning_rate": 1.152842095559404e-06, - "loss": 0.7511, - "num_input_tokens_seen": 114869250, - "step": 5406 - }, - { - "epoch": 0.6501533096855648, - "flos": 18667218121680.0, - "grad_norm": 2.964521428255155, - "learning_rate": 1.1521365212087474e-06, - "loss": 0.7606, - "num_input_tokens_seen": 114888955, - "step": 5407 - }, - { - "epoch": 0.6502735525762039, - "flos": 32982104812080.0, - "grad_norm": 3.5919271075237815, - "learning_rate": 1.1514310754783062e-06, - "loss": 0.683, - "num_input_tokens_seen": 114911625, - "step": 5408 - }, - { - "epoch": 0.6503937954668431, - "flos": 21022455037200.0, - "grad_norm": 3.494214726147951, - "learning_rate": 1.1507257584750964e-06, - "loss": 0.7041, - "num_input_tokens_seen": 114931525, - "step": 5409 - }, - { - "epoch": 0.6505140383574821, - "flos": 15297165220200.0, - "grad_norm": 6.531454420769231, - "learning_rate": 1.150020570306113e-06, - "loss": 0.745, - "num_input_tokens_seen": 114950385, - "step": 5410 - }, - { - "epoch": 0.6506342812481212, - "flos": 15055070268360.0, - "grad_norm": 3.9716805279151433, - "learning_rate": 1.1493155110783338e-06, - "loss": 0.7262, - "num_input_tokens_seen": 114968630, - "step": 5411 - }, - { - "epoch": 0.6507545241387603, - "flos": 22182694422600.0, - "grad_norm": 3.9799175386177006, - "learning_rate": 1.1486105808987155e-06, - "loss": 0.6915, - "num_input_tokens_seen": 114989840, - "step": 5412 - }, - { - "epoch": 0.6508747670293994, - "flos": 12489140589960.0, - "grad_norm": 2.9936953099617067, - "learning_rate": 1.1479057798741947e-06, - "loss": 0.796, - "num_input_tokens_seen": 115007615, - "step": 5413 - }, - { - "epoch": 0.6509950099200384, - "flos": 50555243882040.0, - "grad_norm": 0.819626803141477, - "learning_rate": 1.14720110811169e-06, - "loss": 0.5719, - "num_input_tokens_seen": 115064565, - "step": 5414 - }, - { - "epoch": 0.6511152528106776, - "flos": 16351963244280.0, - "grad_norm": 2.9968114438442828, - "learning_rate": 1.146496565718098e-06, - "loss": 0.7462, - "num_input_tokens_seen": 115084855, - "step": 5415 - }, - { - "epoch": 0.6512354957013167, - "flos": 15000972379440.0, - "grad_norm": 4.950271891823571, - "learning_rate": 1.1457921528002996e-06, - "loss": 0.7412, - "num_input_tokens_seen": 115103010, - "step": 5416 - }, - { - "epoch": 0.6513557385919557, - "flos": 23744124770640.0, - "grad_norm": 3.640161574833397, - "learning_rate": 1.1450878694651522e-06, - "loss": 0.7086, - "num_input_tokens_seen": 115123295, - "step": 5417 - }, - { - "epoch": 0.6514759814825949, - "flos": 8762876546280.0, - "grad_norm": 4.151219126865963, - "learning_rate": 1.1443837158194954e-06, - "loss": 0.6068, - "num_input_tokens_seen": 115138160, - "step": 5418 - }, - { - "epoch": 0.651596224373234, - "flos": 16484342740200.0, - "grad_norm": 2.761413989052186, - "learning_rate": 1.1436796919701484e-06, - "loss": 0.7291, - "num_input_tokens_seen": 115156595, - "step": 5419 - }, - { - "epoch": 0.651716467263873, - "flos": 20401041663720.0, - "grad_norm": 22.89571855659167, - "learning_rate": 1.1429757980239115e-06, - "loss": 0.6007, - "num_input_tokens_seen": 115176740, - "step": 5420 - }, - { - "epoch": 0.6518367101545122, - "flos": 17808838709880.0, - "grad_norm": 5.257373536421475, - "learning_rate": 1.1422720340875636e-06, - "loss": 0.784, - "num_input_tokens_seen": 115195210, - "step": 5421 - }, - { - "epoch": 0.6519569530451512, - "flos": 14623458575520.0, - "grad_norm": 4.431800513243412, - "learning_rate": 1.1415684002678671e-06, - "loss": 0.7798, - "num_input_tokens_seen": 115213690, - "step": 5422 - }, - { - "epoch": 0.6520771959357903, - "flos": 15780342005160.0, - "grad_norm": 13.424578748519274, - "learning_rate": 1.1408648966715617e-06, - "loss": 0.7621, - "num_input_tokens_seen": 115230930, - "step": 5423 - }, - { - "epoch": 0.6521974388264293, - "flos": 16620838030920.0, - "grad_norm": 3.888493210478451, - "learning_rate": 1.1401615234053683e-06, - "loss": 0.7106, - "num_input_tokens_seen": 115249470, - "step": 5424 - }, - { - "epoch": 0.6523176817170685, - "flos": 16836881327040.0, - "grad_norm": 3.2678069349804986, - "learning_rate": 1.1394582805759885e-06, - "loss": 0.7395, - "num_input_tokens_seen": 115268470, - "step": 5425 - }, - { - "epoch": 0.6524379246077076, - "flos": 15863752525680.0, - "grad_norm": 5.0065251687919305, - "learning_rate": 1.1387551682901022e-06, - "loss": 0.747, - "num_input_tokens_seen": 115288795, - "step": 5426 - }, - { - "epoch": 0.6525581674983466, - "flos": 14163768989520.0, - "grad_norm": 3.9881062109605785, - "learning_rate": 1.138052186654373e-06, - "loss": 0.6951, - "num_input_tokens_seen": 115305985, - "step": 5427 - }, - { - "epoch": 0.6526784103889858, - "flos": 12516775243680.0, - "grad_norm": 5.215977463375153, - "learning_rate": 1.1373493357754417e-06, - "loss": 0.8575, - "num_input_tokens_seen": 115324610, - "step": 5428 - }, - { - "epoch": 0.6527986532796248, - "flos": 13866373070280.0, - "grad_norm": 4.348748494033136, - "learning_rate": 1.1366466157599303e-06, - "loss": 0.757, - "num_input_tokens_seen": 115343605, - "step": 5429 - }, - { - "epoch": 0.6529188961702639, - "flos": 10349567051040.0, - "grad_norm": 3.8916883126309663, - "learning_rate": 1.1359440267144412e-06, - "loss": 0.7353, - "num_input_tokens_seen": 115360780, - "step": 5430 - }, - { - "epoch": 0.653039139060903, - "flos": 26960685474240.0, - "grad_norm": 3.5295868054161486, - "learning_rate": 1.1352415687455556e-06, - "loss": 0.7311, - "num_input_tokens_seen": 115381760, - "step": 5431 - }, - { - "epoch": 0.6531593819515421, - "flos": 18593558868840.0, - "grad_norm": 5.97607579462277, - "learning_rate": 1.1345392419598362e-06, - "loss": 0.6216, - "num_input_tokens_seen": 115400360, - "step": 5432 - }, - { - "epoch": 0.6532796248421812, - "flos": 15486681961200.0, - "grad_norm": 3.0668119320119005, - "learning_rate": 1.1338370464638263e-06, - "loss": 0.698, - "num_input_tokens_seen": 115419480, - "step": 5433 - }, - { - "epoch": 0.6533998677328203, - "flos": 12894542327280.0, - "grad_norm": 5.519840175139316, - "learning_rate": 1.1331349823640474e-06, - "loss": 0.6185, - "num_input_tokens_seen": 115436630, - "step": 5434 - }, - { - "epoch": 0.6535201106234594, - "flos": 20831007038640.0, - "grad_norm": 4.159411382480225, - "learning_rate": 1.132433049767003e-06, - "loss": 0.7618, - "num_input_tokens_seen": 115454265, - "step": 5435 - }, - { - "epoch": 0.6536403535140984, - "flos": 17268872939400.0, - "grad_norm": 3.206044276369023, - "learning_rate": 1.1317312487791748e-06, - "loss": 0.7927, - "num_input_tokens_seen": 115475635, - "step": 5436 - }, - { - "epoch": 0.6537605964047376, - "flos": 15783349701360.0, - "grad_norm": 4.851654297579103, - "learning_rate": 1.1310295795070253e-06, - "loss": 0.698, - "num_input_tokens_seen": 115495295, - "step": 5437 - }, - { - "epoch": 0.6538808392953767, - "flos": 19672129031520.0, - "grad_norm": 4.827863075040332, - "learning_rate": 1.1303280420569982e-06, - "loss": 0.7825, - "num_input_tokens_seen": 115516900, - "step": 5438 - }, - { - "epoch": 0.6540010821860157, - "flos": 22561569604800.0, - "grad_norm": 3.35126200575992, - "learning_rate": 1.1296266365355158e-06, - "loss": 0.7578, - "num_input_tokens_seen": 115540005, - "step": 5439 - }, - { - "epoch": 0.6541213250766549, - "flos": 19726290240360.0, - "grad_norm": 3.4564794757398656, - "learning_rate": 1.1289253630489806e-06, - "loss": 0.701, - "num_input_tokens_seen": 115560775, - "step": 5440 - }, - { - "epoch": 0.6542415679672939, - "flos": 14002076862000.0, - "grad_norm": 4.466755028225494, - "learning_rate": 1.1282242217037753e-06, - "loss": 0.7263, - "num_input_tokens_seen": 115577995, - "step": 5441 - }, - { - "epoch": 0.654361810857933, - "flos": 35464149070560.0, - "grad_norm": 4.341557669903583, - "learning_rate": 1.127523212606262e-06, - "loss": 0.6118, - "num_input_tokens_seen": 115600540, - "step": 5442 - }, - { - "epoch": 0.6544820537485722, - "flos": 19752880115400.0, - "grad_norm": 2.5038859683448775, - "learning_rate": 1.1268223358627835e-06, - "loss": 0.7107, - "num_input_tokens_seen": 115622750, - "step": 5443 - }, - { - "epoch": 0.6546022966392112, - "flos": 15270385385400.0, - "grad_norm": 9.605157785862167, - "learning_rate": 1.126121591579663e-06, - "loss": 0.6966, - "num_input_tokens_seen": 115641675, - "step": 5444 - }, - { - "epoch": 0.6547225395298503, - "flos": 18267990076560.0, - "grad_norm": 3.8788907606847083, - "learning_rate": 1.1254209798632018e-06, - "loss": 0.6768, - "num_input_tokens_seen": 115662415, - "step": 5445 - }, - { - "epoch": 0.6548427824204894, - "flos": 16513592051880.0, - "grad_norm": 3.7163680967369657, - "learning_rate": 1.124720500819683e-06, - "loss": 0.8334, - "num_input_tokens_seen": 115680290, - "step": 5446 - }, - { - "epoch": 0.6549630253111285, - "flos": 13461889471800.0, - "grad_norm": 4.773337405290056, - "learning_rate": 1.1240201545553682e-06, - "loss": 0.8053, - "num_input_tokens_seen": 115697810, - "step": 5447 - }, - { - "epoch": 0.6550832682017675, - "flos": 18452789483520.0, - "grad_norm": 4.393675915024303, - "learning_rate": 1.1233199411764987e-06, - "loss": 0.7252, - "num_input_tokens_seen": 115716965, - "step": 5448 - }, - { - "epoch": 0.6552035110924067, - "flos": 16648409364720.0, - "grad_norm": 6.572751421496123, - "learning_rate": 1.1226198607892978e-06, - "loss": 0.6759, - "num_input_tokens_seen": 115737245, - "step": 5449 - }, - { - "epoch": 0.6553237539830458, - "flos": 15946054947600.0, - "grad_norm": 6.9389984986880435, - "learning_rate": 1.1219199134999664e-06, - "loss": 0.7923, - "num_input_tokens_seen": 115755465, - "step": 5450 - }, - { - "epoch": 0.6554439968736848, - "flos": 15271176884400.0, - "grad_norm": 3.8643320966767027, - "learning_rate": 1.1212200994146863e-06, - "loss": 0.7599, - "num_input_tokens_seen": 115772940, - "step": 5451 - }, - { - "epoch": 0.655564239764324, - "flos": 11756492082480.0, - "grad_norm": 3.3342722682982964, - "learning_rate": 1.120520418639618e-06, - "loss": 0.7419, - "num_input_tokens_seen": 115791195, - "step": 5452 - }, - { - "epoch": 0.655684482654963, - "flos": 21697206460560.0, - "grad_norm": 4.260254549829822, - "learning_rate": 1.119820871280903e-06, - "loss": 0.8189, - "num_input_tokens_seen": 115811990, - "step": 5453 - }, - { - "epoch": 0.6558047255456021, - "flos": 21859341827520.0, - "grad_norm": 3.7857424948327663, - "learning_rate": 1.1191214574446614e-06, - "loss": 0.7022, - "num_input_tokens_seen": 115831955, - "step": 5454 - }, - { - "epoch": 0.6559249684362413, - "flos": 21318837837720.0, - "grad_norm": 2.7668214785300433, - "learning_rate": 1.118422177236995e-06, - "loss": 0.7886, - "num_input_tokens_seen": 115853500, - "step": 5455 - }, - { - "epoch": 0.6560452113268803, - "flos": 14780718308640.0, - "grad_norm": 4.924940529091482, - "learning_rate": 1.1177230307639835e-06, - "loss": 0.8342, - "num_input_tokens_seen": 115870760, - "step": 5456 - }, - { - "epoch": 0.6561654542175194, - "flos": 18348456220800.0, - "grad_norm": 4.383834420536954, - "learning_rate": 1.1170240181316865e-06, - "loss": 0.7736, - "num_input_tokens_seen": 115891925, - "step": 5457 - }, - { - "epoch": 0.6562856971081584, - "flos": 16728622229280.0, - "grad_norm": 3.2086956430608002, - "learning_rate": 1.1163251394461442e-06, - "loss": 0.7845, - "num_input_tokens_seen": 115910125, - "step": 5458 - }, - { - "epoch": 0.6564059399987976, - "flos": 13779068374680.0, - "grad_norm": 3.347178475149904, - "learning_rate": 1.1156263948133746e-06, - "loss": 0.8139, - "num_input_tokens_seen": 115926500, - "step": 5459 - }, - { - "epoch": 0.6565261828894366, - "flos": 18673201854120.0, - "grad_norm": 6.098093216345921, - "learning_rate": 1.1149277843393787e-06, - "loss": 0.7619, - "num_input_tokens_seen": 115947380, - "step": 5460 - }, - { - "epoch": 0.6566464257800757, - "flos": 14380382164920.0, - "grad_norm": 3.8687539983429837, - "learning_rate": 1.1142293081301342e-06, - "loss": 0.6184, - "num_input_tokens_seen": 115964980, - "step": 5461 - }, - { - "epoch": 0.6567666686707149, - "flos": 17215249949880.0, - "grad_norm": 2.5091289736947604, - "learning_rate": 1.1135309662915995e-06, - "loss": 0.6526, - "num_input_tokens_seen": 115984865, - "step": 5462 - }, - { - "epoch": 0.6568869115613539, - "flos": 24073587738000.0, - "grad_norm": 4.1991939952031965, - "learning_rate": 1.112832758929712e-06, - "loss": 0.5836, - "num_input_tokens_seen": 116007195, - "step": 5463 - }, - { - "epoch": 0.657007154451993, - "flos": 13461826151880.0, - "grad_norm": 4.135986493854126, - "learning_rate": 1.11213468615039e-06, - "loss": 0.7301, - "num_input_tokens_seen": 116026345, - "step": 5464 - }, - { - "epoch": 0.6571273973426321, - "flos": 18430980262440.0, - "grad_norm": 4.1435554930546585, - "learning_rate": 1.1114367480595292e-06, - "loss": 0.7495, - "num_input_tokens_seen": 116047145, - "step": 5465 - }, - { - "epoch": 0.6572476402332712, - "flos": 12786378209400.0, - "grad_norm": 5.813680549424927, - "learning_rate": 1.1107389447630086e-06, - "loss": 0.8056, - "num_input_tokens_seen": 116065565, - "step": 5466 - }, - { - "epoch": 0.6573678831239103, - "flos": 12404147071440.0, - "grad_norm": 2.8153691386189896, - "learning_rate": 1.1100412763666818e-06, - "loss": 0.7663, - "num_input_tokens_seen": 116080545, - "step": 5467 - }, - { - "epoch": 0.6574881260145494, - "flos": 17507516955600.0, - "grad_norm": 5.491206856186239, - "learning_rate": 1.1093437429763865e-06, - "loss": 0.7825, - "num_input_tokens_seen": 116100530, - "step": 5468 - }, - { - "epoch": 0.6576083689051885, - "flos": 8115633136800.0, - "grad_norm": 3.964987104859134, - "learning_rate": 1.1086463446979361e-06, - "loss": 0.7189, - "num_input_tokens_seen": 116118600, - "step": 5469 - }, - { - "epoch": 0.6577286117958275, - "flos": 16431764529360.0, - "grad_norm": 3.8253884012330155, - "learning_rate": 1.1079490816371277e-06, - "loss": 0.7596, - "num_input_tokens_seen": 116138085, - "step": 5470 - }, - { - "epoch": 0.6578488546864667, - "flos": 16000089516600.0, - "grad_norm": 4.407174087911902, - "learning_rate": 1.1072519538997352e-06, - "loss": 0.7154, - "num_input_tokens_seen": 116156945, - "step": 5471 - }, - { - "epoch": 0.6579690975771058, - "flos": 17236204352040.0, - "grad_norm": 4.169567598753051, - "learning_rate": 1.1065549615915095e-06, - "loss": 0.8053, - "num_input_tokens_seen": 116176495, - "step": 5472 - }, - { - "epoch": 0.6580893404677448, - "flos": 24045826444440.0, - "grad_norm": 6.933718088673863, - "learning_rate": 1.105858104818187e-06, - "loss": 0.7429, - "num_input_tokens_seen": 116197370, - "step": 5473 - }, - { - "epoch": 0.658209583358384, - "flos": 11571439395840.0, - "grad_norm": 5.450012018032997, - "learning_rate": 1.105161383685478e-06, - "loss": 0.7296, - "num_input_tokens_seen": 116213475, - "step": 5474 - }, - { - "epoch": 0.658329826249023, - "flos": 46217001132960.0, - "grad_norm": 0.7553019279273554, - "learning_rate": 1.1044647982990771e-06, - "loss": 0.5912, - "num_input_tokens_seen": 116275080, - "step": 5475 - }, - { - "epoch": 0.6584500691396621, - "flos": 23289975677640.0, - "grad_norm": 3.820971597868949, - "learning_rate": 1.1037683487646536e-06, - "loss": 0.6206, - "num_input_tokens_seen": 116295085, - "step": 5476 - }, - { - "epoch": 0.6585703120303013, - "flos": 13434951337200.0, - "grad_norm": 2.751932158975759, - "learning_rate": 1.1030720351878583e-06, - "loss": 0.7506, - "num_input_tokens_seen": 116312925, - "step": 5477 - }, - { - "epoch": 0.6586905549209403, - "flos": 42969956379240.0, - "grad_norm": 0.8219426986603029, - "learning_rate": 1.102375857674323e-06, - "loss": 0.6068, - "num_input_tokens_seen": 116374560, - "step": 5478 - }, - { - "epoch": 0.6588107978115794, - "flos": 16673637861480.0, - "grad_norm": 3.1530890924925217, - "learning_rate": 1.1016798163296561e-06, - "loss": 0.8895, - "num_input_tokens_seen": 116393480, - "step": 5479 - }, - { - "epoch": 0.6589310407022185, - "flos": 15108471638160.0, - "grad_norm": 3.182279762624566, - "learning_rate": 1.1009839112594471e-06, - "loss": 0.6388, - "num_input_tokens_seen": 116411225, - "step": 5480 - }, - { - "epoch": 0.6590512835928576, - "flos": 18781619251680.0, - "grad_norm": 5.31892007091902, - "learning_rate": 1.1002881425692638e-06, - "loss": 0.7049, - "num_input_tokens_seen": 116431375, - "step": 5481 - }, - { - "epoch": 0.6591715264834966, - "flos": 17371148304720.0, - "grad_norm": 3.859715496904016, - "learning_rate": 1.0995925103646532e-06, - "loss": 0.7504, - "num_input_tokens_seen": 116449695, - "step": 5482 - }, - { - "epoch": 0.6592917693741358, - "flos": 26288340207840.0, - "grad_norm": 4.717020488358902, - "learning_rate": 1.0988970147511437e-06, - "loss": 0.6602, - "num_input_tokens_seen": 116471295, - "step": 5483 - }, - { - "epoch": 0.6594120122647749, - "flos": 15648152469000.0, - "grad_norm": 4.880448255744748, - "learning_rate": 1.0982016558342405e-06, - "loss": 0.7858, - "num_input_tokens_seen": 116489985, - "step": 5484 - }, - { - "epoch": 0.6595322551554139, - "flos": 14134456357920.0, - "grad_norm": 4.214045182907851, - "learning_rate": 1.0975064337194291e-06, - "loss": 0.7002, - "num_input_tokens_seen": 116507750, - "step": 5485 - }, - { - "epoch": 0.6596524980460531, - "flos": 12273793812960.0, - "grad_norm": 6.142895771298033, - "learning_rate": 1.0968113485121743e-06, - "loss": 0.6925, - "num_input_tokens_seen": 116527060, - "step": 5486 - }, - { - "epoch": 0.6597727409366921, - "flos": 15945516728280.0, - "grad_norm": 7.333097482992324, - "learning_rate": 1.0961164003179185e-06, - "loss": 0.7776, - "num_input_tokens_seen": 116545290, - "step": 5487 - }, - { - "epoch": 0.6598929838273312, - "flos": 17375200779600.0, - "grad_norm": 3.198594923296384, - "learning_rate": 1.0954215892420884e-06, - "loss": 0.816, - "num_input_tokens_seen": 116565710, - "step": 5488 - }, - { - "epoch": 0.6600132267179702, - "flos": 14595633962040.0, - "grad_norm": 5.191512980056462, - "learning_rate": 1.094726915390082e-06, - "loss": 0.6919, - "num_input_tokens_seen": 116583765, - "step": 5489 - }, - { - "epoch": 0.6601334696086094, - "flos": 16347720809640.0, - "grad_norm": 3.651025283203515, - "learning_rate": 1.0940323788672836e-06, - "loss": 0.6812, - "num_input_tokens_seen": 116602660, - "step": 5490 - }, - { - "epoch": 0.6602537124992485, - "flos": 18835210581240.0, - "grad_norm": 3.631566644148714, - "learning_rate": 1.0933379797790522e-06, - "loss": 0.7283, - "num_input_tokens_seen": 116621795, - "step": 5491 - }, - { - "epoch": 0.6603739553898875, - "flos": 18942804819840.0, - "grad_norm": 5.906422768947133, - "learning_rate": 1.0926437182307293e-06, - "loss": 0.6939, - "num_input_tokens_seen": 116640325, - "step": 5492 - }, - { - "epoch": 0.6604941982805267, - "flos": 17996614153080.0, - "grad_norm": 2.456288235975851, - "learning_rate": 1.0919495943276338e-06, - "loss": 0.7593, - "num_input_tokens_seen": 116661065, - "step": 5493 - }, - { - "epoch": 0.6606144411711657, - "flos": 9627841229760.0, - "grad_norm": 4.974905555633754, - "learning_rate": 1.0912556081750611e-06, - "loss": 0.7268, - "num_input_tokens_seen": 116678715, - "step": 5494 - }, - { - "epoch": 0.6607346840618048, - "flos": 18429713864040.0, - "grad_norm": 3.493960483058979, - "learning_rate": 1.0905617598782909e-06, - "loss": 0.7432, - "num_input_tokens_seen": 116698640, - "step": 5495 - }, - { - "epoch": 0.660854926952444, - "flos": 12866274474360.0, - "grad_norm": 3.6372136845593768, - "learning_rate": 1.0898680495425775e-06, - "loss": 0.7928, - "num_input_tokens_seen": 116716650, - "step": 5496 - }, - { - "epoch": 0.660975169843083, - "flos": 12272812354200.0, - "grad_norm": 3.438551308519092, - "learning_rate": 1.0891744772731594e-06, - "loss": 0.7901, - "num_input_tokens_seen": 116734185, - "step": 5497 - }, - { - "epoch": 0.6610954127337221, - "flos": 19699067166120.0, - "grad_norm": 4.837343569254394, - "learning_rate": 1.088481043175248e-06, - "loss": 0.6389, - "num_input_tokens_seen": 116754475, - "step": 5498 - }, - { - "epoch": 0.6612156556243612, - "flos": 19399075130160.0, - "grad_norm": 4.236723864209527, - "learning_rate": 1.0877877473540368e-06, - "loss": 0.7428, - "num_input_tokens_seen": 116774780, - "step": 5499 - }, - { - "epoch": 0.6613358985150003, - "flos": 14460468389640.0, - "grad_norm": 3.465386762075281, - "learning_rate": 1.0870945899147002e-06, - "loss": 0.7097, - "num_input_tokens_seen": 116791145, - "step": 5500 - }, - { - "epoch": 0.6614561414056394, - "flos": 19670039474160.0, - "grad_norm": 4.040421336647543, - "learning_rate": 1.0864015709623879e-06, - "loss": 0.7386, - "num_input_tokens_seen": 116811735, - "step": 5501 - }, - { - "epoch": 0.6615763842962785, - "flos": 16756858422240.0, - "grad_norm": 3.9777691102836226, - "learning_rate": 1.0857086906022313e-06, - "loss": 0.7839, - "num_input_tokens_seen": 116829790, - "step": 5502 - }, - { - "epoch": 0.6616966271869176, - "flos": 18159604338960.0, - "grad_norm": 6.055725087505973, - "learning_rate": 1.0850159489393388e-06, - "loss": 0.7148, - "num_input_tokens_seen": 116848770, - "step": 5503 - }, - { - "epoch": 0.6618168700775566, - "flos": 12543934998000.0, - "grad_norm": 6.889700137673667, - "learning_rate": 1.0843233460787992e-06, - "loss": 0.8017, - "num_input_tokens_seen": 116865705, - "step": 5504 - }, - { - "epoch": 0.6619371129681958, - "flos": 18645662180280.0, - "grad_norm": 3.1763607683499826, - "learning_rate": 1.0836308821256805e-06, - "loss": 0.7659, - "num_input_tokens_seen": 116886225, - "step": 5505 - }, - { - "epoch": 0.6620573558588349, - "flos": 13164240272880.0, - "grad_norm": 9.837536791644194, - "learning_rate": 1.0829385571850282e-06, - "loss": 0.7727, - "num_input_tokens_seen": 116902925, - "step": 5506 - }, - { - "epoch": 0.6621775987494739, - "flos": 12975704990640.0, - "grad_norm": 10.194020437165106, - "learning_rate": 1.0822463713618679e-06, - "loss": 0.8085, - "num_input_tokens_seen": 116919500, - "step": 5507 - }, - { - "epoch": 0.6622978416401131, - "flos": 12758711895720.0, - "grad_norm": 3.2537295143517193, - "learning_rate": 1.0815543247612034e-06, - "loss": 0.8362, - "num_input_tokens_seen": 116936290, - "step": 5508 - }, - { - "epoch": 0.6624180845307521, - "flos": 15833743374960.0, - "grad_norm": 3.622068864375216, - "learning_rate": 1.0808624174880168e-06, - "loss": 0.8171, - "num_input_tokens_seen": 116956660, - "step": 5509 - }, - { - "epoch": 0.6625383274213912, - "flos": 17431166606160.0, - "grad_norm": 4.486521987811844, - "learning_rate": 1.080170649647272e-06, - "loss": 0.7828, - "num_input_tokens_seen": 116976185, - "step": 5510 - }, - { - "epoch": 0.6626585703120303, - "flos": 24431033618640.0, - "grad_norm": 2.9803708361619905, - "learning_rate": 1.0794790213439068e-06, - "loss": 0.6528, - "num_input_tokens_seen": 117002805, - "step": 5511 - }, - { - "epoch": 0.6627788132026694, - "flos": 16157412569640.0, - "grad_norm": 5.025297256711343, - "learning_rate": 1.078787532682843e-06, - "loss": 0.7662, - "num_input_tokens_seen": 117020000, - "step": 5512 - }, - { - "epoch": 0.6628990560933085, - "flos": 26583963169320.0, - "grad_norm": 4.257117426422324, - "learning_rate": 1.0780961837689773e-06, - "loss": 0.7497, - "num_input_tokens_seen": 117039230, - "step": 5513 - }, - { - "epoch": 0.6630192989839476, - "flos": 13514214402960.0, - "grad_norm": 6.231779471260032, - "learning_rate": 1.0774049747071883e-06, - "loss": 0.6879, - "num_input_tokens_seen": 117056830, - "step": 5514 - }, - { - "epoch": 0.6631395418745867, - "flos": 26314613483280.0, - "grad_norm": 4.050155193981697, - "learning_rate": 1.076713905602332e-06, - "loss": 0.6736, - "num_input_tokens_seen": 117077125, - "step": 5515 - }, - { - "epoch": 0.6632597847652257, - "flos": 14649858490800.0, - "grad_norm": 3.3471873076232908, - "learning_rate": 1.07602297655924e-06, - "loss": 0.7918, - "num_input_tokens_seen": 117095165, - "step": 5516 - }, - { - "epoch": 0.6633800276558649, - "flos": 15513208516320.0, - "grad_norm": 10.420508894254366, - "learning_rate": 1.0753321876827292e-06, - "loss": 0.7935, - "num_input_tokens_seen": 117114170, - "step": 5517 - }, - { - "epoch": 0.663500270546504, - "flos": 17565667319400.0, - "grad_norm": 4.312672708342499, - "learning_rate": 1.0746415390775893e-06, - "loss": 0.7252, - "num_input_tokens_seen": 117132020, - "step": 5518 - }, - { - "epoch": 0.663620513437143, - "flos": 13084470647760.0, - "grad_norm": 3.14675866132626, - "learning_rate": 1.0739510308485939e-06, - "loss": 0.7627, - "num_input_tokens_seen": 117148955, - "step": 5519 - }, - { - "epoch": 0.6637407563277821, - "flos": 42623053265280.0, - "grad_norm": 0.8217577237775026, - "learning_rate": 1.07326066310049e-06, - "loss": 0.6352, - "num_input_tokens_seen": 117212800, - "step": 5520 - }, - { - "epoch": 0.6638609992184212, - "flos": 20021121702840.0, - "grad_norm": 5.104432494838222, - "learning_rate": 1.0725704359380059e-06, - "loss": 0.7767, - "num_input_tokens_seen": 117232375, - "step": 5521 - }, - { - "epoch": 0.6639812421090603, - "flos": 13596833424480.0, - "grad_norm": 3.307611565124761, - "learning_rate": 1.0718803494658497e-06, - "loss": 0.7075, - "num_input_tokens_seen": 117250985, - "step": 5522 - }, - { - "epoch": 0.6641014849996993, - "flos": 11598314210520.0, - "grad_norm": 3.3780220065441813, - "learning_rate": 1.071190403788707e-06, - "loss": 0.8157, - "num_input_tokens_seen": 117266010, - "step": 5523 - }, - { - "epoch": 0.6642217278903385, - "flos": 19428989301000.0, - "grad_norm": 5.576537347097151, - "learning_rate": 1.0705005990112415e-06, - "loss": 0.7322, - "num_input_tokens_seen": 117285510, - "step": 5524 - }, - { - "epoch": 0.6643419707809776, - "flos": 11193387372600.0, - "grad_norm": 19.04911158935634, - "learning_rate": 1.0698109352380957e-06, - "loss": 0.7158, - "num_input_tokens_seen": 117302830, - "step": 5525 - }, - { - "epoch": 0.6644622136716166, - "flos": 18401667630840.0, - "grad_norm": 3.6650786139785687, - "learning_rate": 1.0691214125738909e-06, - "loss": 0.7712, - "num_input_tokens_seen": 117322755, - "step": 5526 - }, - { - "epoch": 0.6645824565622558, - "flos": 48811990163280.0, - "grad_norm": 0.7796408293369229, - "learning_rate": 1.0684320311232287e-06, - "loss": 0.6071, - "num_input_tokens_seen": 117385380, - "step": 5527 - }, - { - "epoch": 0.6647026994528948, - "flos": 18375552655200.0, - "grad_norm": 3.8320955821323333, - "learning_rate": 1.0677427909906865e-06, - "loss": 0.789, - "num_input_tokens_seen": 117405550, - "step": 5528 - }, - { - "epoch": 0.6648229423435339, - "flos": 13298646006240.0, - "grad_norm": 3.3803459496054056, - "learning_rate": 1.0670536922808216e-06, - "loss": 0.6891, - "num_input_tokens_seen": 117425395, - "step": 5529 - }, - { - "epoch": 0.6649431852341731, - "flos": 13354168593360.0, - "grad_norm": 5.798144589640124, - "learning_rate": 1.06636473509817e-06, - "loss": 0.6964, - "num_input_tokens_seen": 117441495, - "step": 5530 - }, - { - "epoch": 0.6650634281248121, - "flos": 12408832745520.0, - "grad_norm": 3.690812126667776, - "learning_rate": 1.0656759195472447e-06, - "loss": 0.7853, - "num_input_tokens_seen": 117458505, - "step": 5531 - }, - { - "epoch": 0.6651836710154512, - "flos": 51099926986560.0, - "grad_norm": 0.8122804418432216, - "learning_rate": 1.0649872457325414e-06, - "loss": 0.6358, - "num_input_tokens_seen": 117519510, - "step": 5532 - }, - { - "epoch": 0.6653039139060903, - "flos": 49314886611960.0, - "grad_norm": 0.8967791968028129, - "learning_rate": 1.0642987137585278e-06, - "loss": 0.6059, - "num_input_tokens_seen": 117578755, - "step": 5533 - }, - { - "epoch": 0.6654241567967294, - "flos": 15702376997760.0, - "grad_norm": 3.075953768309568, - "learning_rate": 1.0636103237296561e-06, - "loss": 0.8069, - "num_input_tokens_seen": 117597400, - "step": 5534 - }, - { - "epoch": 0.6655443996873684, - "flos": 18403345608720.0, - "grad_norm": 4.489781752211917, - "learning_rate": 1.062922075750353e-06, - "loss": 0.819, - "num_input_tokens_seen": 117617135, - "step": 5535 - }, - { - "epoch": 0.6656646425780076, - "flos": 12732786879840.0, - "grad_norm": 6.161062151010431, - "learning_rate": 1.0622339699250267e-06, - "loss": 0.6934, - "num_input_tokens_seen": 117634775, - "step": 5536 - }, - { - "epoch": 0.6657848854686467, - "flos": 17155105008600.0, - "grad_norm": 5.59627488761925, - "learning_rate": 1.0615460063580624e-06, - "loss": 0.7944, - "num_input_tokens_seen": 117652970, - "step": 5537 - }, - { - "epoch": 0.6659051283592857, - "flos": 8331264853440.0, - "grad_norm": 6.6179726235084875, - "learning_rate": 1.060858185153821e-06, - "loss": 0.7138, - "num_input_tokens_seen": 117670790, - "step": 5538 - }, - { - "epoch": 0.6660253712499249, - "flos": 15054468729120.0, - "grad_norm": 4.889953372163542, - "learning_rate": 1.0601705064166474e-06, - "loss": 0.7383, - "num_input_tokens_seen": 117688905, - "step": 5539 - }, - { - "epoch": 0.666145614140564, - "flos": 15540684870240.0, - "grad_norm": 3.7651585966752683, - "learning_rate": 1.0594829702508596e-06, - "loss": 0.7216, - "num_input_tokens_seen": 117706340, - "step": 5540 - }, - { - "epoch": 0.666265857031203, - "flos": 24772301259720.0, - "grad_norm": 3.461836239648907, - "learning_rate": 1.0587955767607592e-06, - "loss": 0.5449, - "num_input_tokens_seen": 117727920, - "step": 5541 - }, - { - "epoch": 0.6663860999218422, - "flos": 12732185340600.0, - "grad_norm": 8.71350858917312, - "learning_rate": 1.0581083260506206e-06, - "loss": 0.7568, - "num_input_tokens_seen": 117744425, - "step": 5542 - }, - { - "epoch": 0.6665063428124812, - "flos": 12895238846400.0, - "grad_norm": 4.445767076040892, - "learning_rate": 1.0574212182246993e-06, - "loss": 0.7444, - "num_input_tokens_seen": 117762840, - "step": 5543 - }, - { - "epoch": 0.6666265857031203, - "flos": 20293257465360.0, - "grad_norm": 9.7124513288577, - "learning_rate": 1.0567342533872303e-06, - "loss": 0.7176, - "num_input_tokens_seen": 117782590, - "step": 5544 - }, - { - "epoch": 0.6667468285937594, - "flos": 18349595979360.0, - "grad_norm": 5.150008614542154, - "learning_rate": 1.0560474316424255e-06, - "loss": 0.7984, - "num_input_tokens_seen": 117802070, - "step": 5545 - }, - { - "epoch": 0.6668670714843985, - "flos": 16671738263880.0, - "grad_norm": 4.85301159826308, - "learning_rate": 1.0553607530944746e-06, - "loss": 0.7173, - "num_input_tokens_seen": 117819845, - "step": 5546 - }, - { - "epoch": 0.6669873143750376, - "flos": 16215942852960.0, - "grad_norm": 4.906494717660329, - "learning_rate": 1.0546742178475463e-06, - "loss": 0.8811, - "num_input_tokens_seen": 117838560, - "step": 5547 - }, - { - "epoch": 0.6671075572656767, - "flos": 14994513747600.0, - "grad_norm": 4.8567562590543165, - "learning_rate": 1.0539878260057868e-06, - "loss": 0.8426, - "num_input_tokens_seen": 117857320, - "step": 5548 - }, - { - "epoch": 0.6672278001563158, - "flos": 13083425869080.0, - "grad_norm": 7.370416801188532, - "learning_rate": 1.0533015776733226e-06, - "loss": 0.6601, - "num_input_tokens_seen": 117873190, - "step": 5549 - }, - { - "epoch": 0.6673480430469548, - "flos": 16347277570200.0, - "grad_norm": 3.492843259136881, - "learning_rate": 1.0526154729542566e-06, - "loss": 0.7691, - "num_input_tokens_seen": 117892970, - "step": 5550 - }, - { - "epoch": 0.6674682859375939, - "flos": 15135314792880.0, - "grad_norm": 5.769675385386458, - "learning_rate": 1.0519295119526699e-06, - "loss": 0.7694, - "num_input_tokens_seen": 117908995, - "step": 5551 - }, - { - "epoch": 0.667588528828233, - "flos": 19206867292560.0, - "grad_norm": 2.4222880349662836, - "learning_rate": 1.0512436947726227e-06, - "loss": 0.8144, - "num_input_tokens_seen": 117930130, - "step": 5552 - }, - { - "epoch": 0.6677087717188721, - "flos": 16886578481520.0, - "grad_norm": 10.298679037478164, - "learning_rate": 1.0505580215181517e-06, - "loss": 0.6436, - "num_input_tokens_seen": 117948090, - "step": 5553 - }, - { - "epoch": 0.6678290146095112, - "flos": 52319108234760.0, - "grad_norm": 0.8154581944399556, - "learning_rate": 1.0498724922932753e-06, - "loss": 0.5848, - "num_input_tokens_seen": 118005925, - "step": 5554 - }, - { - "epoch": 0.6679492575001503, - "flos": 13623613259280.0, - "grad_norm": 13.284236796505285, - "learning_rate": 1.0491871072019851e-06, - "loss": 0.8372, - "num_input_tokens_seen": 118023535, - "step": 5555 - }, - { - "epoch": 0.6680695003907894, - "flos": 21801413083440.0, - "grad_norm": 5.558798426320027, - "learning_rate": 1.0485018663482555e-06, - "loss": 0.6201, - "num_input_tokens_seen": 118043275, - "step": 5556 - }, - { - "epoch": 0.6681897432814284, - "flos": 20696886244920.0, - "grad_norm": 5.046554508012744, - "learning_rate": 1.0478167698360354e-06, - "loss": 0.6823, - "num_input_tokens_seen": 118062295, - "step": 5557 - }, - { - "epoch": 0.6683099861720676, - "flos": 18349342699680.0, - "grad_norm": 5.092006455158898, - "learning_rate": 1.0471318177692556e-06, - "loss": 0.6787, - "num_input_tokens_seen": 118082315, - "step": 5558 - }, - { - "epoch": 0.6684302290627067, - "flos": 16831625773680.0, - "grad_norm": 6.605885696110562, - "learning_rate": 1.046447010251821e-06, - "loss": 0.7153, - "num_input_tokens_seen": 118099365, - "step": 5559 - }, - { - "epoch": 0.6685504719533457, - "flos": 19478908075200.0, - "grad_norm": 3.487861155196297, - "learning_rate": 1.0457623473876157e-06, - "loss": 0.7413, - "num_input_tokens_seen": 118118590, - "step": 5560 - }, - { - "epoch": 0.6686707148439849, - "flos": 21264739948800.0, - "grad_norm": 3.3829616311130413, - "learning_rate": 1.0450778292805046e-06, - "loss": 0.6909, - "num_input_tokens_seen": 118138295, - "step": 5561 - }, - { - "epoch": 0.6687909577346239, - "flos": 17295874393920.0, - "grad_norm": 4.4256982888158936, - "learning_rate": 1.0443934560343267e-06, - "loss": 0.7799, - "num_input_tokens_seen": 118159425, - "step": 5562 - }, - { - "epoch": 0.668911200625263, - "flos": 16944633865440.0, - "grad_norm": 3.4624686890436167, - "learning_rate": 1.0437092277529034e-06, - "loss": 0.7536, - "num_input_tokens_seen": 118178400, - "step": 5563 - }, - { - "epoch": 0.6690314435159022, - "flos": 13542862175400.0, - "grad_norm": 4.82712759437514, - "learning_rate": 1.0430251445400292e-06, - "loss": 0.7203, - "num_input_tokens_seen": 118196165, - "step": 5564 - }, - { - "epoch": 0.6691516864065412, - "flos": 23317357051680.0, - "grad_norm": 3.6940936604818386, - "learning_rate": 1.0423412064994787e-06, - "loss": 0.61, - "num_input_tokens_seen": 118216655, - "step": 5565 - }, - { - "epoch": 0.6692719292971803, - "flos": 25666578574800.0, - "grad_norm": 2.8301278288315626, - "learning_rate": 1.0416574137350064e-06, - "loss": 0.7224, - "num_input_tokens_seen": 118237080, - "step": 5566 - }, - { - "epoch": 0.6693921721878194, - "flos": 14946969470400.0, - "grad_norm": 6.260498080197629, - "learning_rate": 1.0409737663503428e-06, - "loss": 0.7899, - "num_input_tokens_seen": 118255180, - "step": 5567 - }, - { - "epoch": 0.6695124150784585, - "flos": 12108555769920.0, - "grad_norm": 33.544254962893795, - "learning_rate": 1.040290264449196e-06, - "loss": 0.8214, - "num_input_tokens_seen": 118273005, - "step": 5568 - }, - { - "epoch": 0.6696326579690975, - "flos": 19537628318280.0, - "grad_norm": 6.279507067827265, - "learning_rate": 1.0396069081352532e-06, - "loss": 0.62, - "num_input_tokens_seen": 118291880, - "step": 5569 - }, - { - "epoch": 0.6697529008597367, - "flos": 49376107991880.0, - "grad_norm": 0.8634132180818979, - "learning_rate": 1.0389236975121782e-06, - "loss": 0.5864, - "num_input_tokens_seen": 118346450, - "step": 5570 - }, - { - "epoch": 0.6698731437503758, - "flos": 15270923604720.0, - "grad_norm": 3.798356508923863, - "learning_rate": 1.0382406326836147e-06, - "loss": 0.6963, - "num_input_tokens_seen": 118365315, - "step": 5571 - }, - { - "epoch": 0.6699933866410148, - "flos": 14917118619480.0, - "grad_norm": 3.0746640339741127, - "learning_rate": 1.0375577137531828e-06, - "loss": 0.7241, - "num_input_tokens_seen": 118383595, - "step": 5572 - }, - { - "epoch": 0.670113629531654, - "flos": 21292184642760.0, - "grad_norm": 3.2903670308574857, - "learning_rate": 1.0368749408244802e-06, - "loss": 0.7004, - "num_input_tokens_seen": 118406235, - "step": 5573 - }, - { - "epoch": 0.670233872422293, - "flos": 14460278429880.0, - "grad_norm": 2.859401515149859, - "learning_rate": 1.0361923140010836e-06, - "loss": 0.7631, - "num_input_tokens_seen": 118424440, - "step": 5574 - }, - { - "epoch": 0.6703541153129321, - "flos": 17755152400440.0, - "grad_norm": 4.236759429404327, - "learning_rate": 1.0355098333865455e-06, - "loss": 0.6143, - "num_input_tokens_seen": 118443390, - "step": 5575 - }, - { - "epoch": 0.6704743582035713, - "flos": 19564534792920.0, - "grad_norm": 13.573313818757045, - "learning_rate": 1.0348274990844006e-06, - "loss": 0.6842, - "num_input_tokens_seen": 118465870, - "step": 5576 - }, - { - "epoch": 0.6705946010942103, - "flos": 17215471569600.0, - "grad_norm": 4.716611466011747, - "learning_rate": 1.034145311198155e-06, - "loss": 0.7083, - "num_input_tokens_seen": 118485605, - "step": 5577 - }, - { - "epoch": 0.6707148439848494, - "flos": 17619923508120.0, - "grad_norm": 5.2076099758100405, - "learning_rate": 1.0334632698312989e-06, - "loss": 0.6301, - "num_input_tokens_seen": 118506120, - "step": 5578 - }, - { - "epoch": 0.6708350868754885, - "flos": 16485894078240.0, - "grad_norm": 4.922147034892951, - "learning_rate": 1.032781375087295e-06, - "loss": 0.7373, - "num_input_tokens_seen": 118525740, - "step": 5579 - }, - { - "epoch": 0.6709553297661276, - "flos": 18483273533640.0, - "grad_norm": 5.396270537859776, - "learning_rate": 1.0320996270695891e-06, - "loss": 0.6551, - "num_input_tokens_seen": 118546530, - "step": 5580 - }, - { - "epoch": 0.6710755726567667, - "flos": 14946589550880.0, - "grad_norm": 3.244109170245613, - "learning_rate": 1.0314180258815998e-06, - "loss": 0.7128, - "num_input_tokens_seen": 118564890, - "step": 5581 - }, - { - "epoch": 0.6711958155474057, - "flos": 19051602136920.0, - "grad_norm": 3.4403986413763845, - "learning_rate": 1.0307365716267247e-06, - "loss": 0.7231, - "num_input_tokens_seen": 118585055, - "step": 5582 - }, - { - "epoch": 0.6713160584380449, - "flos": 14568347567880.0, - "grad_norm": 3.9117122454291087, - "learning_rate": 1.0300552644083423e-06, - "loss": 0.7613, - "num_input_tokens_seen": 118603700, - "step": 5583 - }, - { - "epoch": 0.6714363013286839, - "flos": 13299690784920.0, - "grad_norm": 10.022227570315463, - "learning_rate": 1.0293741043298036e-06, - "loss": 0.7003, - "num_input_tokens_seen": 118621770, - "step": 5584 - }, - { - "epoch": 0.671556544219323, - "flos": 18916056645000.0, - "grad_norm": 4.247297662021061, - "learning_rate": 1.0286930914944436e-06, - "loss": 0.7029, - "num_input_tokens_seen": 118641305, - "step": 5585 - }, - { - "epoch": 0.6716767871099621, - "flos": 11543361502680.0, - "grad_norm": 5.14711474732864, - "learning_rate": 1.0280122260055684e-06, - "loss": 0.7583, - "num_input_tokens_seen": 118656735, - "step": 5586 - }, - { - "epoch": 0.6717970300006012, - "flos": 14434068474360.0, - "grad_norm": 3.937438358853755, - "learning_rate": 1.0273315079664652e-06, - "loss": 0.7912, - "num_input_tokens_seen": 118674410, - "step": 5587 - }, - { - "epoch": 0.6719172728912403, - "flos": 18673961693160.0, - "grad_norm": 3.4378032203800823, - "learning_rate": 1.0266509374803992e-06, - "loss": 0.7232, - "num_input_tokens_seen": 118695290, - "step": 5588 - }, - { - "epoch": 0.6720375157818794, - "flos": 11564569184520.0, - "grad_norm": 5.217312552023592, - "learning_rate": 1.0259705146506123e-06, - "loss": 0.8274, - "num_input_tokens_seen": 118709905, - "step": 5589 - }, - { - "epoch": 0.6721577586725185, - "flos": 23502757997880.0, - "grad_norm": 6.801631832667673, - "learning_rate": 1.025290239580324e-06, - "loss": 0.7592, - "num_input_tokens_seen": 118730295, - "step": 5590 - }, - { - "epoch": 0.6722780015631575, - "flos": 15160163370120.0, - "grad_norm": 2.7883609348837957, - "learning_rate": 1.0246101123727313e-06, - "loss": 0.7415, - "num_input_tokens_seen": 118748995, - "step": 5591 - }, - { - "epoch": 0.6723982444537967, - "flos": 12327385142520.0, - "grad_norm": 4.576981437473774, - "learning_rate": 1.0239301331310085e-06, - "loss": 0.7741, - "num_input_tokens_seen": 118766335, - "step": 5592 - }, - { - "epoch": 0.6725184873444358, - "flos": 15108724917840.0, - "grad_norm": 8.61275597179483, - "learning_rate": 1.0232503019583088e-06, - "loss": 0.8716, - "num_input_tokens_seen": 118785665, - "step": 5593 - }, - { - "epoch": 0.6726387302350748, - "flos": 17372478023040.0, - "grad_norm": 3.1457890050598545, - "learning_rate": 1.0225706189577619e-06, - "loss": 0.6794, - "num_input_tokens_seen": 118803910, - "step": 5594 - }, - { - "epoch": 0.672758973125714, - "flos": 11052554667360.0, - "grad_norm": 3.6831541153012517, - "learning_rate": 1.021891084232475e-06, - "loss": 0.7204, - "num_input_tokens_seen": 118821565, - "step": 5595 - }, - { - "epoch": 0.672879216016353, - "flos": 13191526667040.0, - "grad_norm": 4.384010627162889, - "learning_rate": 1.0212116978855325e-06, - "loss": 0.7807, - "num_input_tokens_seen": 118839300, - "step": 5596 - }, - { - "epoch": 0.6729994589069921, - "flos": 17187108736800.0, - "grad_norm": 4.0228529422137385, - "learning_rate": 1.020532460019997e-06, - "loss": 0.7835, - "num_input_tokens_seen": 118858270, - "step": 5597 - }, - { - "epoch": 0.6731197017976313, - "flos": 19293063889560.0, - "grad_norm": 10.018511994331613, - "learning_rate": 1.0198533707389096e-06, - "loss": 0.6851, - "num_input_tokens_seen": 118878865, - "step": 5598 - }, - { - "epoch": 0.6732399446882703, - "flos": 15810762735360.0, - "grad_norm": 2.8743801277979206, - "learning_rate": 1.0191744301452853e-06, - "loss": 0.7173, - "num_input_tokens_seen": 118897885, - "step": 5599 - }, - { - "epoch": 0.6733601875789094, - "flos": 18965500519800.0, - "grad_norm": 4.246865631385109, - "learning_rate": 1.0184956383421208e-06, - "loss": 0.691, - "num_input_tokens_seen": 118916255, - "step": 5600 - }, - { - "epoch": 0.6734804304695485, - "flos": 16782530158440.0, - "grad_norm": 5.0404654508953755, - "learning_rate": 1.017816995432387e-06, - "loss": 0.6459, - "num_input_tokens_seen": 118935075, - "step": 5601 - }, - { - "epoch": 0.6736006733601876, - "flos": 13650962973360.0, - "grad_norm": 3.78127887809382, - "learning_rate": 1.0171385015190353e-06, - "loss": 0.7193, - "num_input_tokens_seen": 118954655, - "step": 5602 - }, - { - "epoch": 0.6737209162508266, - "flos": 14191055383680.0, - "grad_norm": 3.493308755652282, - "learning_rate": 1.0164601567049908e-06, - "loss": 0.7127, - "num_input_tokens_seen": 118972905, - "step": 5603 - }, - { - "epoch": 0.6738411591414658, - "flos": 14731622693400.0, - "grad_norm": 5.769822161758568, - "learning_rate": 1.015781961093158e-06, - "loss": 0.7887, - "num_input_tokens_seen": 118991945, - "step": 5604 - }, - { - "epoch": 0.6739614020321049, - "flos": 15838587348840.0, - "grad_norm": 3.0126035110608163, - "learning_rate": 1.0151039147864197e-06, - "loss": 0.7609, - "num_input_tokens_seen": 119011640, - "step": 5605 - }, - { - "epoch": 0.6740816449227439, - "flos": 14001475322760.0, - "grad_norm": 3.99584351592346, - "learning_rate": 1.0144260178876336e-06, - "loss": 0.6444, - "num_input_tokens_seen": 119030705, - "step": 5606 - }, - { - "epoch": 0.6742018878133831, - "flos": 15425143981680.0, - "grad_norm": 5.300033302922253, - "learning_rate": 1.0137482704996388e-06, - "loss": 0.6595, - "num_input_tokens_seen": 119044775, - "step": 5607 - }, - { - "epoch": 0.6743221307040221, - "flos": 17241681525120.0, - "grad_norm": 3.7240547363323246, - "learning_rate": 1.0130706727252461e-06, - "loss": 0.7801, - "num_input_tokens_seen": 119061550, - "step": 5608 - }, - { - "epoch": 0.6744423735946612, - "flos": 11838414584880.0, - "grad_norm": 6.911091898039306, - "learning_rate": 1.0123932246672468e-06, - "loss": 0.669, - "num_input_tokens_seen": 119075415, - "step": 5609 - }, - { - "epoch": 0.6745626164853004, - "flos": 42625712701920.0, - "grad_norm": 0.7942016863298537, - "learning_rate": 1.0117159264284114e-06, - "loss": 0.5743, - "num_input_tokens_seen": 119138305, - "step": 5610 - }, - { - "epoch": 0.6746828593759394, - "flos": 14973401045640.0, - "grad_norm": 5.618261242614016, - "learning_rate": 1.0110387781114837e-06, - "loss": 0.7594, - "num_input_tokens_seen": 119156640, - "step": 5611 - }, - { - "epoch": 0.6748031022665785, - "flos": 14028508437240.0, - "grad_norm": 4.577447730473798, - "learning_rate": 1.0103617798191872e-06, - "loss": 0.7462, - "num_input_tokens_seen": 119175835, - "step": 5612 - }, - { - "epoch": 0.6749233451572175, - "flos": 11058506739840.0, - "grad_norm": 12.5721847628769, - "learning_rate": 1.0096849316542217e-06, - "loss": 0.7929, - "num_input_tokens_seen": 119192105, - "step": 5613 - }, - { - "epoch": 0.6750435880478567, - "flos": 19423702087680.0, - "grad_norm": 6.388909517706402, - "learning_rate": 1.0090082337192643e-06, - "loss": 0.7176, - "num_input_tokens_seen": 119211470, - "step": 5614 - }, - { - "epoch": 0.6751638309384957, - "flos": 17133928986720.0, - "grad_norm": 8.960374703065488, - "learning_rate": 1.0083316861169705e-06, - "loss": 0.7664, - "num_input_tokens_seen": 119229925, - "step": 5615 - }, - { - "epoch": 0.6752840738291348, - "flos": 17161468660560.0, - "grad_norm": 5.142633016940236, - "learning_rate": 1.0076552889499713e-06, - "loss": 0.6966, - "num_input_tokens_seen": 119250410, - "step": 5616 - }, - { - "epoch": 0.675404316719774, - "flos": 22263603806280.0, - "grad_norm": 7.489137539884589, - "learning_rate": 1.006979042320876e-06, - "loss": 0.7178, - "num_input_tokens_seen": 119270345, - "step": 5617 - }, - { - "epoch": 0.675524559610413, - "flos": 17295399494520.0, - "grad_norm": 3.8933508948578885, - "learning_rate": 1.0063029463322702e-06, - "loss": 0.6207, - "num_input_tokens_seen": 119290340, - "step": 5618 - }, - { - "epoch": 0.6756448025010521, - "flos": 15538152073440.0, - "grad_norm": 4.6454481754961705, - "learning_rate": 1.0056270010867164e-06, - "loss": 0.7337, - "num_input_tokens_seen": 119307630, - "step": 5619 - }, - { - "epoch": 0.6757650453916912, - "flos": 15832128717000.0, - "grad_norm": 6.610436897555449, - "learning_rate": 1.004951206686758e-06, - "loss": 0.7592, - "num_input_tokens_seen": 119325625, - "step": 5620 - }, - { - "epoch": 0.6758852882823303, - "flos": 15942952271520.0, - "grad_norm": 3.3005485136606962, - "learning_rate": 1.0042755632349087e-06, - "loss": 0.6955, - "num_input_tokens_seen": 119342235, - "step": 5621 - }, - { - "epoch": 0.6760055311729694, - "flos": 19860600993840.0, - "grad_norm": 9.157547686278507, - "learning_rate": 1.0036000708336653e-06, - "loss": 0.6109, - "num_input_tokens_seen": 119361085, - "step": 5622 - }, - { - "epoch": 0.6761257740636085, - "flos": 13133566263000.0, - "grad_norm": 3.3957775712066818, - "learning_rate": 1.0029247295854984e-06, - "loss": 0.7881, - "num_input_tokens_seen": 119377425, - "step": 5623 - }, - { - "epoch": 0.6762460169542476, - "flos": 11004377190960.0, - "grad_norm": 4.278448817367916, - "learning_rate": 1.0022495395928588e-06, - "loss": 0.7017, - "num_input_tokens_seen": 119395625, - "step": 5624 - }, - { - "epoch": 0.6763662598448866, - "flos": 50060167443480.0, - "grad_norm": 0.7877930353310527, - "learning_rate": 1.0015745009581697e-06, - "loss": 0.6298, - "num_input_tokens_seen": 119456950, - "step": 5625 - }, - { - "epoch": 0.6764865027355258, - "flos": 15082166702760.0, - "grad_norm": 3.5323182237294826, - "learning_rate": 1.0008996137838343e-06, - "loss": 0.6545, - "num_input_tokens_seen": 119475645, - "step": 5626 - }, - { - "epoch": 0.6766067456261649, - "flos": 15514379934840.0, - "grad_norm": 3.2183530988700335, - "learning_rate": 1.000224878172234e-06, - "loss": 0.7809, - "num_input_tokens_seen": 119494490, - "step": 5627 - }, - { - "epoch": 0.6767269885168039, - "flos": 14568822467280.0, - "grad_norm": 5.47038375428904, - "learning_rate": 9.99550294225724e-07, - "loss": 0.712, - "num_input_tokens_seen": 119513365, - "step": 5628 - }, - { - "epoch": 0.6768472314074431, - "flos": 15217015675560.0, - "grad_norm": 9.191510836634038, - "learning_rate": 9.988758620466402e-07, - "loss": 0.7075, - "num_input_tokens_seen": 119531955, - "step": 5629 - }, - { - "epoch": 0.6769674742980821, - "flos": 16972458478920.0, - "grad_norm": 3.2427110364203986, - "learning_rate": 9.982015817372917e-07, - "loss": 0.7436, - "num_input_tokens_seen": 119552115, - "step": 5630 - }, - { - "epoch": 0.6770877171887212, - "flos": 17754487541280.0, - "grad_norm": 4.008316930365888, - "learning_rate": 9.975274533999657e-07, - "loss": 0.7975, - "num_input_tokens_seen": 119571365, - "step": 5631 - }, - { - "epoch": 0.6772079600793603, - "flos": 13237899525720.0, - "grad_norm": 6.995471034193078, - "learning_rate": 9.96853477136929e-07, - "loss": 0.8088, - "num_input_tokens_seen": 119585830, - "step": 5632 - }, - { - "epoch": 0.6773282029699994, - "flos": 16428693513240.0, - "grad_norm": 5.108465364543182, - "learning_rate": 9.96179653050422e-07, - "loss": 0.7322, - "num_input_tokens_seen": 119605710, - "step": 5633 - }, - { - "epoch": 0.6774484458606385, - "flos": 13647227098080.0, - "grad_norm": 6.232004733278488, - "learning_rate": 9.955059812426635e-07, - "loss": 0.7148, - "num_input_tokens_seen": 119622960, - "step": 5634 - }, - { - "epoch": 0.6775686887512776, - "flos": 19050399058440.0, - "grad_norm": 3.83438992796637, - "learning_rate": 9.948324618158493e-07, - "loss": 0.8113, - "num_input_tokens_seen": 119643020, - "step": 5635 - }, - { - "epoch": 0.6776889316419167, - "flos": 9868954722840.0, - "grad_norm": 5.266173382284822, - "learning_rate": 9.941590948721502e-07, - "loss": 0.7592, - "num_input_tokens_seen": 119659940, - "step": 5636 - }, - { - "epoch": 0.6778091745325557, - "flos": 20239982735400.0, - "grad_norm": 2.9794366531923013, - "learning_rate": 9.934858805137188e-07, - "loss": 0.7438, - "num_input_tokens_seen": 119680310, - "step": 5637 - }, - { - "epoch": 0.6779294174231949, - "flos": 13677616168320.0, - "grad_norm": 5.990334354806336, - "learning_rate": 9.92812818842677e-07, - "loss": 0.7965, - "num_input_tokens_seen": 119699205, - "step": 5638 - }, - { - "epoch": 0.678049660313834, - "flos": 33762044317080.0, - "grad_norm": 7.374464112548493, - "learning_rate": 9.921399099611306e-07, - "loss": 0.6135, - "num_input_tokens_seen": 119720090, - "step": 5639 - }, - { - "epoch": 0.678169903204473, - "flos": 14595665622000.0, - "grad_norm": 4.710980821603254, - "learning_rate": 9.914671539711588e-07, - "loss": 0.6854, - "num_input_tokens_seen": 119739330, - "step": 5640 - }, - { - "epoch": 0.6782901460951122, - "flos": 15647076030360.0, - "grad_norm": 4.560418268060846, - "learning_rate": 9.90794550974817e-07, - "loss": 0.7605, - "num_input_tokens_seen": 119759445, - "step": 5641 - }, - { - "epoch": 0.6784103889857512, - "flos": 15675755462760.0, - "grad_norm": 3.795701712968954, - "learning_rate": 9.901221010741407e-07, - "loss": 0.7939, - "num_input_tokens_seen": 119778485, - "step": 5642 - }, - { - "epoch": 0.6785306318763903, - "flos": 23992615034400.0, - "grad_norm": 3.5558339711961935, - "learning_rate": 9.894498043711375e-07, - "loss": 0.7328, - "num_input_tokens_seen": 119799950, - "step": 5643 - }, - { - "epoch": 0.6786508747670293, - "flos": 18782980629960.0, - "grad_norm": 4.061645183128129, - "learning_rate": 9.887776609677962e-07, - "loss": 0.6821, - "num_input_tokens_seen": 119821040, - "step": 5644 - }, - { - "epoch": 0.6787711176576685, - "flos": 14001633622560.0, - "grad_norm": 3.1411560520585073, - "learning_rate": 9.88105670966079e-07, - "loss": 0.7114, - "num_input_tokens_seen": 119839220, - "step": 5645 - }, - { - "epoch": 0.6788913605483076, - "flos": 10162298167200.0, - "grad_norm": 3.35698284765003, - "learning_rate": 9.874338344679283e-07, - "loss": 0.7746, - "num_input_tokens_seen": 119854785, - "step": 5646 - }, - { - "epoch": 0.6790116034389466, - "flos": 16107620435280.0, - "grad_norm": 3.6727406881363787, - "learning_rate": 9.86762151575259e-07, - "loss": 0.7309, - "num_input_tokens_seen": 119874500, - "step": 5647 - }, - { - "epoch": 0.6791318463295858, - "flos": 15297070240320.0, - "grad_norm": 2.2550036636288944, - "learning_rate": 9.860906223899651e-07, - "loss": 0.7935, - "num_input_tokens_seen": 119893615, - "step": 5648 - }, - { - "epoch": 0.6792520892202248, - "flos": 20914575858960.0, - "grad_norm": 7.187805169302759, - "learning_rate": 9.854192470139184e-07, - "loss": 0.7375, - "num_input_tokens_seen": 119914815, - "step": 5649 - }, - { - "epoch": 0.6793723321108639, - "flos": 14623426915560.0, - "grad_norm": 4.857605050639968, - "learning_rate": 9.847480255489645e-07, - "loss": 0.7053, - "num_input_tokens_seen": 119933560, - "step": 5650 - }, - { - "epoch": 0.6794925750015031, - "flos": 19535285481240.0, - "grad_norm": 3.350000119565136, - "learning_rate": 9.840769580969295e-07, - "loss": 0.6756, - "num_input_tokens_seen": 119953720, - "step": 5651 - }, - { - "epoch": 0.6796128178921421, - "flos": 15784141200360.0, - "grad_norm": 4.805480196930969, - "learning_rate": 9.834060447596114e-07, - "loss": 0.7743, - "num_input_tokens_seen": 119972710, - "step": 5652 - }, - { - "epoch": 0.6797330607827812, - "flos": 16458734323920.0, - "grad_norm": 4.3427219409261255, - "learning_rate": 9.827352856387868e-07, - "loss": 0.7642, - "num_input_tokens_seen": 119992140, - "step": 5653 - }, - { - "epoch": 0.6798533036734203, - "flos": 48889005371880.0, - "grad_norm": 0.8457438963473344, - "learning_rate": 9.820646808362118e-07, - "loss": 0.6478, - "num_input_tokens_seen": 120058115, - "step": 5654 - }, - { - "epoch": 0.6799735465640594, - "flos": 11787577671840.0, - "grad_norm": 5.142814378711983, - "learning_rate": 9.813942304536154e-07, - "loss": 0.6939, - "num_input_tokens_seen": 120075805, - "step": 5655 - }, - { - "epoch": 0.6800937894546984, - "flos": 16187485040280.0, - "grad_norm": 3.292853338678537, - "learning_rate": 9.807239345927043e-07, - "loss": 0.6139, - "num_input_tokens_seen": 120095535, - "step": 5656 - }, - { - "epoch": 0.6802140323453376, - "flos": 23208781354320.0, - "grad_norm": 10.517549290524542, - "learning_rate": 9.80053793355162e-07, - "loss": 0.7083, - "num_input_tokens_seen": 120113950, - "step": 5657 - }, - { - "epoch": 0.6803342752359767, - "flos": 12921987021240.0, - "grad_norm": 3.702965455300248, - "learning_rate": 9.793838068426472e-07, - "loss": 0.7212, - "num_input_tokens_seen": 120131365, - "step": 5658 - }, - { - "epoch": 0.6804545181266157, - "flos": 8195656041600.0, - "grad_norm": 5.844389365403657, - "learning_rate": 9.78713975156799e-07, - "loss": 0.5965, - "num_input_tokens_seen": 120146950, - "step": 5659 - }, - { - "epoch": 0.6805747610172549, - "flos": 21534786153960.0, - "grad_norm": 3.7539653158155564, - "learning_rate": 9.780442983992273e-07, - "loss": 0.7082, - "num_input_tokens_seen": 120165185, - "step": 5660 - }, - { - "epoch": 0.680695003907894, - "flos": 27663008231400.0, - "grad_norm": 4.203398855143871, - "learning_rate": 9.773747766715238e-07, - "loss": 0.7002, - "num_input_tokens_seen": 120185725, - "step": 5661 - }, - { - "epoch": 0.680815246798533, - "flos": 16188909738480.0, - "grad_norm": 9.781890750653211, - "learning_rate": 9.767054100752536e-07, - "loss": 0.7857, - "num_input_tokens_seen": 120205395, - "step": 5662 - }, - { - "epoch": 0.6809354896891722, - "flos": 12543681718320.0, - "grad_norm": 3.065503263243738, - "learning_rate": 9.760361987119584e-07, - "loss": 0.7965, - "num_input_tokens_seen": 120222850, - "step": 5663 - }, - { - "epoch": 0.6810557325798112, - "flos": 9032986071360.0, - "grad_norm": 4.4265241681185366, - "learning_rate": 9.753671426831592e-07, - "loss": 0.657, - "num_input_tokens_seen": 120238585, - "step": 5664 - }, - { - "epoch": 0.6811759754704503, - "flos": 16209135961560.0, - "grad_norm": 5.31978565357895, - "learning_rate": 9.746982420903483e-07, - "loss": 0.7777, - "num_input_tokens_seen": 120256500, - "step": 5665 - }, - { - "epoch": 0.6812962183610894, - "flos": 12782452374360.0, - "grad_norm": 2.544975086982066, - "learning_rate": 9.740294970349993e-07, - "loss": 0.7371, - "num_input_tokens_seen": 120272635, - "step": 5666 - }, - { - "epoch": 0.6814164612517285, - "flos": 44423095969560.0, - "grad_norm": 0.9713943752663188, - "learning_rate": 9.733609076185594e-07, - "loss": 0.6711, - "num_input_tokens_seen": 120328760, - "step": 5667 - }, - { - "epoch": 0.6815367041423676, - "flos": 14109576120720.0, - "grad_norm": 4.030086378965571, - "learning_rate": 9.72692473942455e-07, - "loss": 0.8253, - "num_input_tokens_seen": 120345705, - "step": 5668 - }, - { - "epoch": 0.6816569470330067, - "flos": 16214011595400.0, - "grad_norm": 4.576388605889926, - "learning_rate": 9.720241961080849e-07, - "loss": 0.7609, - "num_input_tokens_seen": 120364740, - "step": 5669 - }, - { - "epoch": 0.6817771899236458, - "flos": 30498762495240.0, - "grad_norm": 4.722659279039041, - "learning_rate": 9.713560742168259e-07, - "loss": 0.6973, - "num_input_tokens_seen": 120387085, - "step": 5670 - }, - { - "epoch": 0.6818974328142848, - "flos": 15433185611520.0, - "grad_norm": 27.6971162582576, - "learning_rate": 9.706881083700333e-07, - "loss": 0.6995, - "num_input_tokens_seen": 120406490, - "step": 5671 - }, - { - "epoch": 0.682017675704924, - "flos": 14940542498520.0, - "grad_norm": 6.104882971839306, - "learning_rate": 9.700202986690357e-07, - "loss": 0.8005, - "num_input_tokens_seen": 120424510, - "step": 5672 - }, - { - "epoch": 0.682137918595563, - "flos": 14646945774480.0, - "grad_norm": 4.309501600722624, - "learning_rate": 9.693526452151413e-07, - "loss": 0.6432, - "num_input_tokens_seen": 120443280, - "step": 5673 - }, - { - "epoch": 0.6822581614862021, - "flos": 23261581184880.0, - "grad_norm": 104.49389747432622, - "learning_rate": 9.686851481096305e-07, - "loss": 0.7456, - "num_input_tokens_seen": 120464310, - "step": 5674 - }, - { - "epoch": 0.6823784043768413, - "flos": 17188185175440.0, - "grad_norm": 20.755951177369436, - "learning_rate": 9.68017807453762e-07, - "loss": 0.7204, - "num_input_tokens_seen": 120482775, - "step": 5675 - }, - { - "epoch": 0.6824986472674803, - "flos": 10275622858560.0, - "grad_norm": 3.6110224411340255, - "learning_rate": 9.673506233487721e-07, - "loss": 0.71, - "num_input_tokens_seen": 120500460, - "step": 5676 - }, - { - "epoch": 0.6826188901581194, - "flos": 15728017074000.0, - "grad_norm": 3.07727286844777, - "learning_rate": 9.666835958958717e-07, - "loss": 0.8483, - "num_input_tokens_seen": 120519500, - "step": 5677 - }, - { - "epoch": 0.6827391330487584, - "flos": 15213533079960.0, - "grad_norm": 4.266715221327435, - "learning_rate": 9.660167251962484e-07, - "loss": 0.7826, - "num_input_tokens_seen": 120537580, - "step": 5678 - }, - { - "epoch": 0.6828593759393976, - "flos": 15864227425080.0, - "grad_norm": 3.517167901450977, - "learning_rate": 9.653500113510654e-07, - "loss": 0.761, - "num_input_tokens_seen": 120556415, - "step": 5679 - }, - { - "epoch": 0.6829796188300367, - "flos": 18564721136640.0, - "grad_norm": 4.294132869702699, - "learning_rate": 9.646834544614627e-07, - "loss": 0.6546, - "num_input_tokens_seen": 120576635, - "step": 5680 - }, - { - "epoch": 0.6830998617206757, - "flos": 15136169611800.0, - "grad_norm": 4.952390598280739, - "learning_rate": 9.64017054628558e-07, - "loss": 0.7463, - "num_input_tokens_seen": 120595180, - "step": 5681 - }, - { - "epoch": 0.6832201046113149, - "flos": 15892431958080.0, - "grad_norm": 3.3005116730708814, - "learning_rate": 9.63350811953441e-07, - "loss": 0.7845, - "num_input_tokens_seen": 120615275, - "step": 5682 - }, - { - "epoch": 0.6833403475019539, - "flos": 14271489867960.0, - "grad_norm": 8.646567979110719, - "learning_rate": 9.626847265371826e-07, - "loss": 0.6896, - "num_input_tokens_seen": 120634315, - "step": 5683 - }, - { - "epoch": 0.683460590392593, - "flos": 14134741297560.0, - "grad_norm": 4.91321550356512, - "learning_rate": 9.620187984808262e-07, - "loss": 0.7606, - "num_input_tokens_seen": 120652835, - "step": 5684 - }, - { - "epoch": 0.6835808332832322, - "flos": 17047320810240.0, - "grad_norm": 4.343350398941845, - "learning_rate": 9.613530278853919e-07, - "loss": 0.8482, - "num_input_tokens_seen": 120672530, - "step": 5685 - }, - { - "epoch": 0.6837010761738712, - "flos": 15838207429320.0, - "grad_norm": 8.944013023107667, - "learning_rate": 9.60687414851879e-07, - "loss": 0.7239, - "num_input_tokens_seen": 120693255, - "step": 5686 - }, - { - "epoch": 0.6838213190645103, - "flos": 12813442983840.0, - "grad_norm": 3.838104816701125, - "learning_rate": 9.600219594812575e-07, - "loss": 0.746, - "num_input_tokens_seen": 120710915, - "step": 5687 - }, - { - "epoch": 0.6839415619551494, - "flos": 16918012330440.0, - "grad_norm": 3.648220276339188, - "learning_rate": 9.593566618744786e-07, - "loss": 0.7099, - "num_input_tokens_seen": 120730785, - "step": 5688 - }, - { - "epoch": 0.6840618048457885, - "flos": 16189352977920.0, - "grad_norm": 3.5263648887506354, - "learning_rate": 9.58691522132466e-07, - "loss": 0.7214, - "num_input_tokens_seen": 120749315, - "step": 5689 - }, - { - "epoch": 0.6841820477364275, - "flos": 16106132417160.0, - "grad_norm": 6.284999658164271, - "learning_rate": 9.58026540356123e-07, - "loss": 0.8387, - "num_input_tokens_seen": 120767300, - "step": 5690 - }, - { - "epoch": 0.6843022906270667, - "flos": 18241273561680.0, - "grad_norm": 5.730279358982633, - "learning_rate": 9.573617166463246e-07, - "loss": 0.845, - "num_input_tokens_seen": 120788235, - "step": 5691 - }, - { - "epoch": 0.6844225335177058, - "flos": 14591581487160.0, - "grad_norm": 5.056991971607561, - "learning_rate": 9.56697051103924e-07, - "loss": 0.5835, - "num_input_tokens_seen": 120805395, - "step": 5692 - }, - { - "epoch": 0.6845427764083448, - "flos": 18968286596280.0, - "grad_norm": 3.4462468616801996, - "learning_rate": 9.560325438297522e-07, - "loss": 0.7929, - "num_input_tokens_seen": 120823425, - "step": 5693 - }, - { - "epoch": 0.684663019298984, - "flos": 13785748626240.0, - "grad_norm": 4.562276338294612, - "learning_rate": 9.553681949246127e-07, - "loss": 0.8645, - "num_input_tokens_seen": 120840770, - "step": 5694 - }, - { - "epoch": 0.684783262189623, - "flos": 39919515706200.0, - "grad_norm": 5.28072395187749, - "learning_rate": 9.547040044892886e-07, - "loss": 0.751, - "num_input_tokens_seen": 120868005, - "step": 5695 - }, - { - "epoch": 0.6849035050802621, - "flos": 47159740864080.0, - "grad_norm": 0.8645151563367115, - "learning_rate": 9.540399726245354e-07, - "loss": 0.6305, - "num_input_tokens_seen": 120924430, - "step": 5696 - }, - { - "epoch": 0.6850237479709013, - "flos": 18480075877680.0, - "grad_norm": 4.2670446695332345, - "learning_rate": 9.533760994310859e-07, - "loss": 0.6694, - "num_input_tokens_seen": 120944550, - "step": 5697 - }, - { - "epoch": 0.6851439908615403, - "flos": 14136482595360.0, - "grad_norm": 2.866789662879264, - "learning_rate": 9.527123850096508e-07, - "loss": 0.7337, - "num_input_tokens_seen": 120962630, - "step": 5698 - }, - { - "epoch": 0.6852642337521794, - "flos": 16969039203240.0, - "grad_norm": 3.6543482864001113, - "learning_rate": 9.520488294609142e-07, - "loss": 0.7004, - "num_input_tokens_seen": 120981130, - "step": 5699 - }, - { - "epoch": 0.6853844766428185, - "flos": 46179235292040.0, - "grad_norm": 0.9157622745450478, - "learning_rate": 9.513854328855368e-07, - "loss": 0.5901, - "num_input_tokens_seen": 121038725, - "step": 5700 - }, - { - "epoch": 0.6855047195334576, - "flos": 17157796105200.0, - "grad_norm": 3.268821648419694, - "learning_rate": 9.507221953841558e-07, - "loss": 0.7891, - "num_input_tokens_seen": 121056075, - "step": 5701 - }, - { - "epoch": 0.6856249624240967, - "flos": 15106097141160.0, - "grad_norm": 3.155410503880287, - "learning_rate": 9.500591170573824e-07, - "loss": 0.7609, - "num_input_tokens_seen": 121075815, - "step": 5702 - }, - { - "epoch": 0.6857452053147358, - "flos": 12458213300400.0, - "grad_norm": 3.3326946282411507, - "learning_rate": 9.493961980058078e-07, - "loss": 0.7331, - "num_input_tokens_seen": 121093130, - "step": 5703 - }, - { - "epoch": 0.6858654482053749, - "flos": 22642194048840.0, - "grad_norm": 4.257740601098794, - "learning_rate": 9.48733438329993e-07, - "loss": 0.6577, - "num_input_tokens_seen": 121113115, - "step": 5704 - }, - { - "epoch": 0.6859856910960139, - "flos": 21184337124480.0, - "grad_norm": 4.205319877369151, - "learning_rate": 9.480708381304807e-07, - "loss": 0.7203, - "num_input_tokens_seen": 121134130, - "step": 5705 - }, - { - "epoch": 0.6861059339866531, - "flos": 14136830854920.0, - "grad_norm": 3.944288305914432, - "learning_rate": 9.474083975077858e-07, - "loss": 0.8321, - "num_input_tokens_seen": 121150975, - "step": 5706 - }, - { - "epoch": 0.6862261768772921, - "flos": 16242596047920.0, - "grad_norm": 5.783278650617104, - "learning_rate": 9.467461165623994e-07, - "loss": 0.7811, - "num_input_tokens_seen": 121169745, - "step": 5707 - }, - { - "epoch": 0.6863464197679312, - "flos": 19266189074880.0, - "grad_norm": 5.515310478428383, - "learning_rate": 9.46083995394791e-07, - "loss": 0.78, - "num_input_tokens_seen": 121187275, - "step": 5708 - }, - { - "epoch": 0.6864666626585703, - "flos": 27798838662960.0, - "grad_norm": 4.357772477910966, - "learning_rate": 9.454220341054012e-07, - "loss": 0.6149, - "num_input_tokens_seen": 121211780, - "step": 5709 - }, - { - "epoch": 0.6865869055492094, - "flos": 14163958949280.0, - "grad_norm": 5.621064229096387, - "learning_rate": 9.447602327946512e-07, - "loss": 0.7967, - "num_input_tokens_seen": 121230140, - "step": 5710 - }, - { - "epoch": 0.6867071484398485, - "flos": 14892744941640.0, - "grad_norm": 5.018935967559256, - "learning_rate": 9.440985915629338e-07, - "loss": 0.7571, - "num_input_tokens_seen": 121247190, - "step": 5711 - }, - { - "epoch": 0.6868273913304875, - "flos": 11572325874720.0, - "grad_norm": 3.1186776865677084, - "learning_rate": 9.434371105106223e-07, - "loss": 0.7107, - "num_input_tokens_seen": 121264510, - "step": 5712 - }, - { - "epoch": 0.6869476342211267, - "flos": 17917066147680.0, - "grad_norm": 3.664597982551191, - "learning_rate": 9.427757897380602e-07, - "loss": 0.6746, - "num_input_tokens_seen": 121283630, - "step": 5713 - }, - { - "epoch": 0.6870678771117658, - "flos": 13461952791720.0, - "grad_norm": 4.449475100988476, - "learning_rate": 9.421146293455695e-07, - "loss": 0.8315, - "num_input_tokens_seen": 121299090, - "step": 5714 - }, - { - "epoch": 0.6871881200024048, - "flos": 16242944307480.0, - "grad_norm": 3.6747501885242597, - "learning_rate": 9.414536294334489e-07, - "loss": 0.6747, - "num_input_tokens_seen": 121318830, - "step": 5715 - }, - { - "epoch": 0.687308362893044, - "flos": 16189226338080.0, - "grad_norm": 4.3163737714417225, - "learning_rate": 9.407927901019708e-07, - "loss": 0.6866, - "num_input_tokens_seen": 121337680, - "step": 5716 - }, - { - "epoch": 0.687428605783683, - "flos": 18344055486360.0, - "grad_norm": 3.879647206263523, - "learning_rate": 9.401321114513854e-07, - "loss": 0.761, - "num_input_tokens_seen": 121356295, - "step": 5717 - }, - { - "epoch": 0.6875488486743221, - "flos": 17404735030920.0, - "grad_norm": 3.822992738110677, - "learning_rate": 9.394715935819155e-07, - "loss": 0.7345, - "num_input_tokens_seen": 121376405, - "step": 5718 - }, - { - "epoch": 0.6876690915649613, - "flos": 18696910672800.0, - "grad_norm": 3.414056354578279, - "learning_rate": 9.388112365937608e-07, - "loss": 0.5952, - "num_input_tokens_seen": 121395590, - "step": 5719 - }, - { - "epoch": 0.6877893344556003, - "flos": 14191277003400.0, - "grad_norm": 4.37737710951281, - "learning_rate": 9.381510405870985e-07, - "loss": 0.7992, - "num_input_tokens_seen": 121414325, - "step": 5720 - }, - { - "epoch": 0.6879095773462394, - "flos": 13623803219040.0, - "grad_norm": 18.34808329620648, - "learning_rate": 9.374910056620791e-07, - "loss": 0.7472, - "num_input_tokens_seen": 121433110, - "step": 5721 - }, - { - "epoch": 0.6880298202368785, - "flos": 15267567648960.0, - "grad_norm": 3.8051364015048943, - "learning_rate": 9.368311319188293e-07, - "loss": 0.7933, - "num_input_tokens_seen": 121450645, - "step": 5722 - }, - { - "epoch": 0.6881500631275176, - "flos": 22128501553800.0, - "grad_norm": 3.7539865284730354, - "learning_rate": 9.361714194574515e-07, - "loss": 0.78, - "num_input_tokens_seen": 121472700, - "step": 5723 - }, - { - "epoch": 0.6882703060181566, - "flos": 48799362330600.0, - "grad_norm": 0.7347666325174387, - "learning_rate": 9.355118683780228e-07, - "loss": 0.59, - "num_input_tokens_seen": 121542490, - "step": 5724 - }, - { - "epoch": 0.6883905489087958, - "flos": 13291997414640.0, - "grad_norm": 4.6110116930027205, - "learning_rate": 9.348524787805987e-07, - "loss": 0.7738, - "num_input_tokens_seen": 121557400, - "step": 5725 - }, - { - "epoch": 0.6885107917994349, - "flos": 10214654758320.0, - "grad_norm": 15.968642023546394, - "learning_rate": 9.341932507652053e-07, - "loss": 0.8318, - "num_input_tokens_seen": 121571610, - "step": 5726 - }, - { - "epoch": 0.6886310346900739, - "flos": 21045657296520.0, - "grad_norm": 2.662109912360089, - "learning_rate": 9.335341844318489e-07, - "loss": 0.7582, - "num_input_tokens_seen": 121591470, - "step": 5727 - }, - { - "epoch": 0.6887512775807131, - "flos": 17970942416880.0, - "grad_norm": 4.2754727107812025, - "learning_rate": 9.328752798805091e-07, - "loss": 0.7182, - "num_input_tokens_seen": 121609660, - "step": 5728 - }, - { - "epoch": 0.6888715204713521, - "flos": 16400615620080.0, - "grad_norm": 3.708551736179426, - "learning_rate": 9.322165372111399e-07, - "loss": 0.7346, - "num_input_tokens_seen": 121627525, - "step": 5729 - }, - { - "epoch": 0.6889917633619912, - "flos": 16134843509520.0, - "grad_norm": 3.9386407319956036, - "learning_rate": 9.315579565236747e-07, - "loss": 0.7462, - "num_input_tokens_seen": 121646350, - "step": 5730 - }, - { - "epoch": 0.6891120062526304, - "flos": 17537304486600.0, - "grad_norm": 4.104143997878518, - "learning_rate": 9.308995379180162e-07, - "loss": 0.7298, - "num_input_tokens_seen": 121665625, - "step": 5731 - }, - { - "epoch": 0.6892322491432694, - "flos": 47268759800880.0, - "grad_norm": 0.8016438745905494, - "learning_rate": 9.302412814940488e-07, - "loss": 0.6293, - "num_input_tokens_seen": 121728120, - "step": 5732 - }, - { - "epoch": 0.6893524920339085, - "flos": 16836438087600.0, - "grad_norm": 7.173293960255245, - "learning_rate": 9.295831873516276e-07, - "loss": 0.6852, - "num_input_tokens_seen": 121747115, - "step": 5733 - }, - { - "epoch": 0.6894727349245476, - "flos": 15647645909640.0, - "grad_norm": 5.874509101000217, - "learning_rate": 9.289252555905873e-07, - "loss": 0.7509, - "num_input_tokens_seen": 121766915, - "step": 5734 - }, - { - "epoch": 0.6895929778151867, - "flos": 14514851218200.0, - "grad_norm": 5.517578816371638, - "learning_rate": 9.282674863107334e-07, - "loss": 0.735, - "num_input_tokens_seen": 121784450, - "step": 5735 - }, - { - "epoch": 0.6897132207058257, - "flos": 13265882439000.0, - "grad_norm": 4.553083343420028, - "learning_rate": 9.276098796118488e-07, - "loss": 0.7521, - "num_input_tokens_seen": 121800655, - "step": 5736 - }, - { - "epoch": 0.6898334635964649, - "flos": 23910882491760.0, - "grad_norm": 3.108437401577971, - "learning_rate": 9.269524355936938e-07, - "loss": 0.6542, - "num_input_tokens_seen": 121823555, - "step": 5737 - }, - { - "epoch": 0.689953706487104, - "flos": 16700544336120.0, - "grad_norm": 4.8816146968334, - "learning_rate": 9.262951543560002e-07, - "loss": 0.8362, - "num_input_tokens_seen": 121842500, - "step": 5738 - }, - { - "epoch": 0.690073949377743, - "flos": 13515797400960.0, - "grad_norm": 7.626899607686758, - "learning_rate": 9.256380359984795e-07, - "loss": 0.8419, - "num_input_tokens_seen": 121859330, - "step": 5739 - }, - { - "epoch": 0.6901941922683821, - "flos": 25609314689880.0, - "grad_norm": 7.1034262113736135, - "learning_rate": 9.249810806208139e-07, - "loss": 0.7305, - "num_input_tokens_seen": 121878315, - "step": 5740 - }, - { - "epoch": 0.6903144351590212, - "flos": 11842055480280.0, - "grad_norm": 5.066249259687022, - "learning_rate": 9.243242883226627e-07, - "loss": 0.79, - "num_input_tokens_seen": 121897130, - "step": 5741 - }, - { - "epoch": 0.6904346780496603, - "flos": 20560644233880.0, - "grad_norm": 3.8392822848586676, - "learning_rate": 9.236676592036628e-07, - "loss": 0.6892, - "num_input_tokens_seen": 121916525, - "step": 5742 - }, - { - "epoch": 0.6905549209402994, - "flos": 17296855852680.0, - "grad_norm": 6.889048147240662, - "learning_rate": 9.230111933634228e-07, - "loss": 0.7165, - "num_input_tokens_seen": 121937840, - "step": 5743 - }, - { - "epoch": 0.6906751638309385, - "flos": 16920133547760.0, - "grad_norm": 4.52585822971225, - "learning_rate": 9.223548909015288e-07, - "loss": 0.7972, - "num_input_tokens_seen": 121959250, - "step": 5744 - }, - { - "epoch": 0.6907954067215776, - "flos": 20020646803440.0, - "grad_norm": 3.796657979908942, - "learning_rate": 9.216987519175407e-07, - "loss": 0.6955, - "num_input_tokens_seen": 121979145, - "step": 5745 - }, - { - "epoch": 0.6909156496122166, - "flos": 15864765644400.0, - "grad_norm": 7.262713893032068, - "learning_rate": 9.210427765109942e-07, - "loss": 0.6753, - "num_input_tokens_seen": 121998540, - "step": 5746 - }, - { - "epoch": 0.6910358925028558, - "flos": 16509634556880.0, - "grad_norm": 7.357816689118048, - "learning_rate": 9.20386964781402e-07, - "loss": 0.7897, - "num_input_tokens_seen": 122016280, - "step": 5747 - }, - { - "epoch": 0.6911561353934949, - "flos": 16134653549760.0, - "grad_norm": 3.039095763193606, - "learning_rate": 9.197313168282472e-07, - "loss": 0.8181, - "num_input_tokens_seen": 122033445, - "step": 5748 - }, - { - "epoch": 0.6912763782841339, - "flos": 17727581066640.0, - "grad_norm": 4.333975573165916, - "learning_rate": 9.190758327509935e-07, - "loss": 0.6997, - "num_input_tokens_seen": 122051910, - "step": 5749 - }, - { - "epoch": 0.6913966211747731, - "flos": 38544662214240.0, - "grad_norm": 0.8862010772287016, - "learning_rate": 9.184205126490767e-07, - "loss": 0.6629, - "num_input_tokens_seen": 122100525, - "step": 5750 - }, - { - "epoch": 0.6915168640654121, - "flos": 49210205089680.0, - "grad_norm": 0.9473717069708295, - "learning_rate": 9.177653566219075e-07, - "loss": 0.6296, - "num_input_tokens_seen": 122154970, - "step": 5751 - }, - { - "epoch": 0.6916371069560512, - "flos": 13353851993760.0, - "grad_norm": 4.205702661326438, - "learning_rate": 9.171103647688744e-07, - "loss": 0.7296, - "num_input_tokens_seen": 122173430, - "step": 5752 - }, - { - "epoch": 0.6917573498466904, - "flos": 14352304271760.0, - "grad_norm": 3.813871168364159, - "learning_rate": 9.164555371893367e-07, - "loss": 0.6674, - "num_input_tokens_seen": 122193080, - "step": 5753 - }, - { - "epoch": 0.6918775927373294, - "flos": 10329815727360.0, - "grad_norm": 2.935614447409246, - "learning_rate": 9.158008739826333e-07, - "loss": 0.7413, - "num_input_tokens_seen": 122210400, - "step": 5754 - }, - { - "epoch": 0.6919978356279685, - "flos": 17563641081960.0, - "grad_norm": 4.266802167727652, - "learning_rate": 9.151463752480744e-07, - "loss": 0.8487, - "num_input_tokens_seen": 122228850, - "step": 5755 - }, - { - "epoch": 0.6921180785186076, - "flos": 17295684434160.0, - "grad_norm": 6.801950107767234, - "learning_rate": 9.144920410849493e-07, - "loss": 0.7886, - "num_input_tokens_seen": 122249805, - "step": 5756 - }, - { - "epoch": 0.6922383214092467, - "flos": 15460187066040.0, - "grad_norm": 3.7249166184892335, - "learning_rate": 9.138378715925176e-07, - "loss": 0.782, - "num_input_tokens_seen": 122268620, - "step": 5757 - }, - { - "epoch": 0.6923585642998857, - "flos": 15702915217080.0, - "grad_norm": 4.223555702872706, - "learning_rate": 9.131838668700167e-07, - "loss": 0.787, - "num_input_tokens_seen": 122288410, - "step": 5758 - }, - { - "epoch": 0.6924788071905249, - "flos": 15432362452560.0, - "grad_norm": 3.607414737151168, - "learning_rate": 9.125300270166598e-07, - "loss": 0.8511, - "num_input_tokens_seen": 122308735, - "step": 5759 - }, - { - "epoch": 0.692599050081164, - "flos": 19239915799440.0, - "grad_norm": 4.436195289186312, - "learning_rate": 9.118763521316324e-07, - "loss": 0.8528, - "num_input_tokens_seen": 122329030, - "step": 5760 - }, - { - "epoch": 0.692719292971803, - "flos": 15269403926640.0, - "grad_norm": 4.0965980930864205, - "learning_rate": 9.112228423140987e-07, - "loss": 0.7522, - "num_input_tokens_seen": 122347670, - "step": 5761 - }, - { - "epoch": 0.6928395358624422, - "flos": 18997155988440.0, - "grad_norm": 6.916235699689688, - "learning_rate": 9.105694976631932e-07, - "loss": 0.8614, - "num_input_tokens_seen": 122365300, - "step": 5762 - }, - { - "epoch": 0.6929597787530812, - "flos": 17268777959520.0, - "grad_norm": 7.036685724393744, - "learning_rate": 9.099163182780283e-07, - "loss": 0.7122, - "num_input_tokens_seen": 122383175, - "step": 5763 - }, - { - "epoch": 0.6930800216437203, - "flos": 13322988024120.0, - "grad_norm": 12.270392435353408, - "learning_rate": 9.092633042576916e-07, - "loss": 0.481, - "num_input_tokens_seen": 122400160, - "step": 5764 - }, - { - "epoch": 0.6932002645343595, - "flos": 21399842201280.0, - "grad_norm": 5.539138255388169, - "learning_rate": 9.086104557012446e-07, - "loss": 0.5513, - "num_input_tokens_seen": 122420450, - "step": 5765 - }, - { - "epoch": 0.6933205074249985, - "flos": 17458484660280.0, - "grad_norm": 6.26342917609398, - "learning_rate": 9.079577727077239e-07, - "loss": 0.653, - "num_input_tokens_seen": 122439000, - "step": 5766 - }, - { - "epoch": 0.6934407503156376, - "flos": 17697603575880.0, - "grad_norm": 3.5000780641841835, - "learning_rate": 9.073052553761404e-07, - "loss": 0.6982, - "num_input_tokens_seen": 122458085, - "step": 5767 - }, - { - "epoch": 0.6935609932062767, - "flos": 15081723463320.0, - "grad_norm": 3.9229405388061007, - "learning_rate": 9.066529038054805e-07, - "loss": 0.7595, - "num_input_tokens_seen": 122477870, - "step": 5768 - }, - { - "epoch": 0.6936812360969158, - "flos": 13321689965760.0, - "grad_norm": 3.357285386396344, - "learning_rate": 9.060007180947071e-07, - "loss": 0.7276, - "num_input_tokens_seen": 122495645, - "step": 5769 - }, - { - "epoch": 0.6938014789875548, - "flos": 22989477082320.0, - "grad_norm": 4.293560511421072, - "learning_rate": 9.053486983427534e-07, - "loss": 0.7155, - "num_input_tokens_seen": 122516615, - "step": 5770 - }, - { - "epoch": 0.6939217218781939, - "flos": 12785143470960.0, - "grad_norm": 3.6758081027761853, - "learning_rate": 9.046968446485326e-07, - "loss": 0.6886, - "num_input_tokens_seen": 122534740, - "step": 5771 - }, - { - "epoch": 0.6940419647688331, - "flos": 13542735535560.0, - "grad_norm": 5.301083673495762, - "learning_rate": 9.040451571109295e-07, - "loss": 0.6899, - "num_input_tokens_seen": 122550080, - "step": 5772 - }, - { - "epoch": 0.6941622076594721, - "flos": 49347491879400.0, - "grad_norm": 0.9241548246846659, - "learning_rate": 9.033936358288042e-07, - "loss": 0.6406, - "num_input_tokens_seen": 122603535, - "step": 5773 - }, - { - "epoch": 0.6942824505501112, - "flos": 19482643950480.0, - "grad_norm": 4.48329771706049, - "learning_rate": 9.027422809009937e-07, - "loss": 0.8102, - "num_input_tokens_seen": 122623200, - "step": 5774 - }, - { - "epoch": 0.6944026934407503, - "flos": 15537867133800.0, - "grad_norm": 3.4698393889533037, - "learning_rate": 9.020910924263054e-07, - "loss": 0.8122, - "num_input_tokens_seen": 122641445, - "step": 5775 - }, - { - "epoch": 0.6945229363313894, - "flos": 52122404682840.0, - "grad_norm": 0.8627509304960191, - "learning_rate": 9.014400705035261e-07, - "loss": 0.6157, - "num_input_tokens_seen": 122698070, - "step": 5776 - }, - { - "epoch": 0.6946431792220285, - "flos": 13839593235480.0, - "grad_norm": 14.791315180224583, - "learning_rate": 9.00789215231414e-07, - "loss": 0.7645, - "num_input_tokens_seen": 122716185, - "step": 5777 - }, - { - "epoch": 0.6947634221126676, - "flos": 14865173607840.0, - "grad_norm": 3.030302705909249, - "learning_rate": 9.001385267087056e-07, - "loss": 0.8077, - "num_input_tokens_seen": 122735050, - "step": 5778 - }, - { - "epoch": 0.6948836650033067, - "flos": 15971346764280.0, - "grad_norm": 3.199900720963611, - "learning_rate": 8.994880050341072e-07, - "loss": 0.6864, - "num_input_tokens_seen": 122754875, - "step": 5779 - }, - { - "epoch": 0.6950039078939457, - "flos": 17321007910800.0, - "grad_norm": 24.043313677968506, - "learning_rate": 8.988376503063026e-07, - "loss": 0.7647, - "num_input_tokens_seen": 122775855, - "step": 5780 - }, - { - "epoch": 0.6951241507845849, - "flos": 15940134535080.0, - "grad_norm": 4.517753759285804, - "learning_rate": 8.981874626239521e-07, - "loss": 0.8024, - "num_input_tokens_seen": 122794150, - "step": 5781 - }, - { - "epoch": 0.695244393675224, - "flos": 10653326622240.0, - "grad_norm": 4.918866306660291, - "learning_rate": 8.975374420856872e-07, - "loss": 0.8643, - "num_input_tokens_seen": 122810765, - "step": 5782 - }, - { - "epoch": 0.695364636565863, - "flos": 12300447007920.0, - "grad_norm": 3.99595485233314, - "learning_rate": 8.968875887901157e-07, - "loss": 0.7084, - "num_input_tokens_seen": 122827865, - "step": 5783 - }, - { - "epoch": 0.6954848794565022, - "flos": 14136830854920.0, - "grad_norm": 3.641334955630179, - "learning_rate": 8.9623790283582e-07, - "loss": 0.5933, - "num_input_tokens_seen": 122845465, - "step": 5784 - }, - { - "epoch": 0.6956051223471412, - "flos": 13867291209120.0, - "grad_norm": 9.655212149045012, - "learning_rate": 8.955883843213561e-07, - "loss": 0.745, - "num_input_tokens_seen": 122864200, - "step": 5785 - }, - { - "epoch": 0.6957253652377803, - "flos": 11733606422760.0, - "grad_norm": 3.803441025604828, - "learning_rate": 8.949390333452569e-07, - "loss": 0.8683, - "num_input_tokens_seen": 122881865, - "step": 5786 - }, - { - "epoch": 0.6958456081284194, - "flos": 21562832387160.0, - "grad_norm": 4.467971627264002, - "learning_rate": 8.942898500060279e-07, - "loss": 0.6588, - "num_input_tokens_seen": 122901300, - "step": 5787 - }, - { - "epoch": 0.6959658510190585, - "flos": 18429333944520.0, - "grad_norm": 5.000434665859977, - "learning_rate": 8.936408344021493e-07, - "loss": 0.6831, - "num_input_tokens_seen": 122917935, - "step": 5788 - }, - { - "epoch": 0.6960860939096976, - "flos": 31498639471440.0, - "grad_norm": 5.842519375561248, - "learning_rate": 8.929919866320765e-07, - "loss": 0.6923, - "num_input_tokens_seen": 122938470, - "step": 5789 - }, - { - "epoch": 0.6962063368003367, - "flos": 12813601283640.0, - "grad_norm": 4.517371239318101, - "learning_rate": 8.923433067942385e-07, - "loss": 0.7979, - "num_input_tokens_seen": 122956755, - "step": 5790 - }, - { - "epoch": 0.6963265796909758, - "flos": 15540811510080.0, - "grad_norm": 3.19475343472184, - "learning_rate": 8.916947949870417e-07, - "loss": 0.6681, - "num_input_tokens_seen": 122976140, - "step": 5791 - }, - { - "epoch": 0.6964468225816148, - "flos": 50755778289120.0, - "grad_norm": 0.8304820076717667, - "learning_rate": 8.910464513088615e-07, - "loss": 0.6245, - "num_input_tokens_seen": 123039900, - "step": 5792 - }, - { - "epoch": 0.696567065472254, - "flos": 13837788617760.0, - "grad_norm": 4.456944163054212, - "learning_rate": 8.903982758580542e-07, - "loss": 0.7724, - "num_input_tokens_seen": 123058560, - "step": 5793 - }, - { - "epoch": 0.696687308362893, - "flos": 16728590569320.0, - "grad_norm": 3.5887802923153753, - "learning_rate": 8.897502687329457e-07, - "loss": 0.7964, - "num_input_tokens_seen": 123078080, - "step": 5794 - }, - { - "epoch": 0.6968075512535321, - "flos": 18294769911360.0, - "grad_norm": 5.397649196808425, - "learning_rate": 8.891024300318382e-07, - "loss": 0.7872, - "num_input_tokens_seen": 123096370, - "step": 5795 - }, - { - "epoch": 0.6969277941441713, - "flos": 15376776545520.0, - "grad_norm": 3.5005199863542735, - "learning_rate": 8.884547598530103e-07, - "loss": 0.7444, - "num_input_tokens_seen": 123116660, - "step": 5796 - }, - { - "epoch": 0.6970480370348103, - "flos": 15783286381440.0, - "grad_norm": 6.4760167682282175, - "learning_rate": 8.8780725829471e-07, - "loss": 0.738, - "num_input_tokens_seen": 123134285, - "step": 5797 - }, - { - "epoch": 0.6971682799254494, - "flos": 16404858054720.0, - "grad_norm": 7.9277110375518784, - "learning_rate": 8.87159925455165e-07, - "loss": 0.7457, - "num_input_tokens_seen": 123153835, - "step": 5798 - }, - { - "epoch": 0.6972885228160886, - "flos": 14618076382320.0, - "grad_norm": 9.466261533566195, - "learning_rate": 8.865127614325738e-07, - "loss": 0.7209, - "num_input_tokens_seen": 123171985, - "step": 5799 - }, - { - "epoch": 0.6974087657067276, - "flos": 27827359795560.0, - "grad_norm": 3.128127554817185, - "learning_rate": 8.85865766325113e-07, - "loss": 0.6494, - "num_input_tokens_seen": 123195635, - "step": 5800 - }, - { - "epoch": 0.6975290085973667, - "flos": 21636048400560.0, - "grad_norm": 6.918387815511668, - "learning_rate": 8.852189402309287e-07, - "loss": 0.6957, - "num_input_tokens_seen": 123214540, - "step": 5801 - }, - { - "epoch": 0.6976492514880057, - "flos": 9356686926000.0, - "grad_norm": 4.380099349266891, - "learning_rate": 8.845722832481441e-07, - "loss": 0.7011, - "num_input_tokens_seen": 123229690, - "step": 5802 - }, - { - "epoch": 0.6977694943786449, - "flos": 17835966804240.0, - "grad_norm": 4.338775420865888, - "learning_rate": 8.83925795474858e-07, - "loss": 0.7557, - "num_input_tokens_seen": 123249535, - "step": 5803 - }, - { - "epoch": 0.6978897372692839, - "flos": 21939997931520.0, - "grad_norm": 6.509119825265026, - "learning_rate": 8.832794770091414e-07, - "loss": 0.5922, - "num_input_tokens_seen": 123270090, - "step": 5804 - }, - { - "epoch": 0.698009980159923, - "flos": 15918072034320.0, - "grad_norm": 4.583464441447184, - "learning_rate": 8.826333279490401e-07, - "loss": 0.8076, - "num_input_tokens_seen": 123290445, - "step": 5805 - }, - { - "epoch": 0.6981302230505622, - "flos": 14507664407280.0, - "grad_norm": 4.054785108034563, - "learning_rate": 8.819873483925748e-07, - "loss": 0.6627, - "num_input_tokens_seen": 123307285, - "step": 5806 - }, - { - "epoch": 0.6982504659412012, - "flos": 16241424629400.0, - "grad_norm": 6.918744552596376, - "learning_rate": 8.81341538437739e-07, - "loss": 0.7317, - "num_input_tokens_seen": 123325295, - "step": 5807 - }, - { - "epoch": 0.6983707088318403, - "flos": 26150736818520.0, - "grad_norm": 3.2866252649213057, - "learning_rate": 8.80695898182503e-07, - "loss": 0.6774, - "num_input_tokens_seen": 123345995, - "step": 5808 - }, - { - "epoch": 0.6984909517224794, - "flos": 48248948773320.0, - "grad_norm": 0.897748002558127, - "learning_rate": 8.800504277248093e-07, - "loss": 0.6779, - "num_input_tokens_seen": 123410465, - "step": 5809 - }, - { - "epoch": 0.6986111946131185, - "flos": 13538461440960.0, - "grad_norm": 3.5083738697697266, - "learning_rate": 8.794051271625753e-07, - "loss": 0.7311, - "num_input_tokens_seen": 123427820, - "step": 5810 - }, - { - "epoch": 0.6987314375037575, - "flos": 16863661161840.0, - "grad_norm": 4.844427442861076, - "learning_rate": 8.787599965936925e-07, - "loss": 0.8211, - "num_input_tokens_seen": 123448470, - "step": 5811 - }, - { - "epoch": 0.6988516803943967, - "flos": 28232096673720.0, - "grad_norm": 4.580696919819105, - "learning_rate": 8.781150361160261e-07, - "loss": 0.7158, - "num_input_tokens_seen": 123470100, - "step": 5812 - }, - { - "epoch": 0.6989719232850358, - "flos": 17646481723200.0, - "grad_norm": 89.98716813906024, - "learning_rate": 8.774702458274181e-07, - "loss": 0.7209, - "num_input_tokens_seen": 123490225, - "step": 5813 - }, - { - "epoch": 0.6990921661756748, - "flos": 10815651948960.0, - "grad_norm": 14.936701158686487, - "learning_rate": 8.768256258256799e-07, - "loss": 0.6758, - "num_input_tokens_seen": 123506570, - "step": 5814 - }, - { - "epoch": 0.699212409066314, - "flos": 14757895968840.0, - "grad_norm": 4.256833809180186, - "learning_rate": 8.76181176208602e-07, - "loss": 0.7342, - "num_input_tokens_seen": 123524390, - "step": 5815 - }, - { - "epoch": 0.699332651956953, - "flos": 14191182023520.0, - "grad_norm": 4.119148033717852, - "learning_rate": 8.755368970739461e-07, - "loss": 0.7162, - "num_input_tokens_seen": 123543470, - "step": 5816 - }, - { - "epoch": 0.6994528948475921, - "flos": 11760607877280.0, - "grad_norm": 4.559393883627469, - "learning_rate": 8.748927885194479e-07, - "loss": 0.5915, - "num_input_tokens_seen": 123561495, - "step": 5817 - }, - { - "epoch": 0.6995731377382313, - "flos": 47886184019400.0, - "grad_norm": 0.7387309647381376, - "learning_rate": 8.742488506428209e-07, - "loss": 0.5757, - "num_input_tokens_seen": 123620305, - "step": 5818 - }, - { - "epoch": 0.6996933806288703, - "flos": 18240925302120.0, - "grad_norm": 4.503640503665327, - "learning_rate": 8.736050835417466e-07, - "loss": 0.7728, - "num_input_tokens_seen": 123640065, - "step": 5819 - }, - { - "epoch": 0.6998136235195094, - "flos": 15190204180800.0, - "grad_norm": 9.746409219593708, - "learning_rate": 8.729614873138862e-07, - "loss": 0.5983, - "num_input_tokens_seen": 123657420, - "step": 5820 - }, - { - "epoch": 0.6999338664101485, - "flos": 17377638596520.0, - "grad_norm": 4.411726990332861, - "learning_rate": 8.723180620568716e-07, - "loss": 0.7678, - "num_input_tokens_seen": 123676395, - "step": 5821 - }, - { - "epoch": 0.7000541093007876, - "flos": 14514218019000.0, - "grad_norm": 5.303839052465488, - "learning_rate": 8.716748078683116e-07, - "loss": 0.8476, - "num_input_tokens_seen": 123692890, - "step": 5822 - }, - { - "epoch": 0.7001743521914267, - "flos": 21777545964960.0, - "grad_norm": 5.787483442707147, - "learning_rate": 8.710317248457855e-07, - "loss": 0.6698, - "num_input_tokens_seen": 123712225, - "step": 5823 - }, - { - "epoch": 0.7002945950820658, - "flos": 20157300393960.0, - "grad_norm": 5.318347898674728, - "learning_rate": 8.703888130868482e-07, - "loss": 0.7004, - "num_input_tokens_seen": 123733795, - "step": 5824 - }, - { - "epoch": 0.7004148379727049, - "flos": 16211162199000.0, - "grad_norm": 3.954271474453513, - "learning_rate": 8.697460726890307e-07, - "loss": 0.8023, - "num_input_tokens_seen": 123750660, - "step": 5825 - }, - { - "epoch": 0.7005350808633439, - "flos": 14187541128120.0, - "grad_norm": 4.942776335505384, - "learning_rate": 8.691035037498354e-07, - "loss": 0.8827, - "num_input_tokens_seen": 123766370, - "step": 5826 - }, - { - "epoch": 0.7006553237539831, - "flos": 17187425336400.0, - "grad_norm": 3.8513406376262345, - "learning_rate": 8.684611063667391e-07, - "loss": 0.7171, - "num_input_tokens_seen": 123786555, - "step": 5827 - }, - { - "epoch": 0.7007755666446221, - "flos": 22912873453200.0, - "grad_norm": 4.547670904285678, - "learning_rate": 8.678188806371935e-07, - "loss": 0.7453, - "num_input_tokens_seen": 123808310, - "step": 5828 - }, - { - "epoch": 0.7008958095352612, - "flos": 13245244636440.0, - "grad_norm": 3.170716473451559, - "learning_rate": 8.671768266586228e-07, - "loss": 0.842, - "num_input_tokens_seen": 123826155, - "step": 5829 - }, - { - "epoch": 0.7010160524259004, - "flos": 20129444120520.0, - "grad_norm": 4.294519062937502, - "learning_rate": 8.665349445284275e-07, - "loss": 0.7702, - "num_input_tokens_seen": 123845615, - "step": 5830 - }, - { - "epoch": 0.7011362953165394, - "flos": 17457883121040.0, - "grad_norm": 10.393218441031069, - "learning_rate": 8.658932343439799e-07, - "loss": 0.7972, - "num_input_tokens_seen": 123865120, - "step": 5831 - }, - { - "epoch": 0.7012565382071785, - "flos": 18183629757240.0, - "grad_norm": 3.8962787774705725, - "learning_rate": 8.65251696202627e-07, - "loss": 0.7488, - "num_input_tokens_seen": 123881220, - "step": 5832 - }, - { - "epoch": 0.7013767810978175, - "flos": 15648690688320.0, - "grad_norm": 6.660492167573403, - "learning_rate": 8.646103302016896e-07, - "loss": 0.8596, - "num_input_tokens_seen": 123899910, - "step": 5833 - }, - { - "epoch": 0.7014970239884567, - "flos": 12162242079360.0, - "grad_norm": 3.989405190629853, - "learning_rate": 8.639691364384614e-07, - "loss": 0.8664, - "num_input_tokens_seen": 123917255, - "step": 5834 - }, - { - "epoch": 0.7016172668790958, - "flos": 9114560314200.0, - "grad_norm": 4.405733335247774, - "learning_rate": 8.633281150102136e-07, - "loss": 0.7064, - "num_input_tokens_seen": 123933825, - "step": 5835 - }, - { - "epoch": 0.7017375097697348, - "flos": 12728734404960.0, - "grad_norm": 4.3356614583442825, - "learning_rate": 8.626872660141855e-07, - "loss": 0.6624, - "num_input_tokens_seen": 123951455, - "step": 5836 - }, - { - "epoch": 0.701857752660374, - "flos": 13513739503560.0, - "grad_norm": 4.992666970690576, - "learning_rate": 8.620465895475957e-07, - "loss": 0.7353, - "num_input_tokens_seen": 123969395, - "step": 5837 - }, - { - "epoch": 0.701977995551013, - "flos": 17890033033200.0, - "grad_norm": 4.465001594910545, - "learning_rate": 8.614060857076333e-07, - "loss": 0.7415, - "num_input_tokens_seen": 123989785, - "step": 5838 - }, - { - "epoch": 0.7020982384416521, - "flos": 16836723027240.0, - "grad_norm": 5.699099272115528, - "learning_rate": 8.60765754591462e-07, - "loss": 0.7338, - "num_input_tokens_seen": 124009200, - "step": 5839 - }, - { - "epoch": 0.7022184813322913, - "flos": 14946874490520.0, - "grad_norm": 2.8999471922703255, - "learning_rate": 8.601255962962211e-07, - "loss": 0.7216, - "num_input_tokens_seen": 124027930, - "step": 5840 - }, - { - "epoch": 0.7023387242229303, - "flos": 14459233651200.0, - "grad_norm": 6.311719487347555, - "learning_rate": 8.594856109190194e-07, - "loss": 0.7141, - "num_input_tokens_seen": 124044680, - "step": 5841 - }, - { - "epoch": 0.7024589671135694, - "flos": 24427804302720.0, - "grad_norm": 8.512044563377579, - "learning_rate": 8.588457985569446e-07, - "loss": 0.6889, - "num_input_tokens_seen": 124067310, - "step": 5842 - }, - { - "epoch": 0.7025792100042085, - "flos": 13947694033440.0, - "grad_norm": 4.556120710801732, - "learning_rate": 8.582061593070542e-07, - "loss": 0.6989, - "num_input_tokens_seen": 124087760, - "step": 5843 - }, - { - "epoch": 0.7026994528948476, - "flos": 13839308295840.0, - "grad_norm": 7.311039966338872, - "learning_rate": 8.57566693266383e-07, - "loss": 0.7434, - "num_input_tokens_seen": 124105170, - "step": 5844 - }, - { - "epoch": 0.7028196957854866, - "flos": 14271584847840.0, - "grad_norm": 4.845635816113541, - "learning_rate": 8.569274005319354e-07, - "loss": 0.6798, - "num_input_tokens_seen": 124123290, - "step": 5845 - }, - { - "epoch": 0.7029399386761258, - "flos": 15239394775920.0, - "grad_norm": 3.1101177612774844, - "learning_rate": 8.562882812006913e-07, - "loss": 0.7862, - "num_input_tokens_seen": 124140500, - "step": 5846 - }, - { - "epoch": 0.7030601815667649, - "flos": 16135445048760.0, - "grad_norm": 3.7110387527305044, - "learning_rate": 8.556493353696066e-07, - "loss": 0.7557, - "num_input_tokens_seen": 124159220, - "step": 5847 - }, - { - "epoch": 0.7031804244574039, - "flos": 19942048596840.0, - "grad_norm": 6.709902309383113, - "learning_rate": 8.550105631356077e-07, - "loss": 0.6579, - "num_input_tokens_seen": 124178665, - "step": 5848 - }, - { - "epoch": 0.7033006673480431, - "flos": 16375355463360.0, - "grad_norm": 19.687100845408033, - "learning_rate": 8.543719645955961e-07, - "loss": 0.75, - "num_input_tokens_seen": 124196715, - "step": 5849 - }, - { - "epoch": 0.7034209102386821, - "flos": 18104398351440.0, - "grad_norm": 23.640001489216456, - "learning_rate": 8.537335398464467e-07, - "loss": 0.7275, - "num_input_tokens_seen": 124216755, - "step": 5850 - }, - { - "epoch": 0.7035411531293212, - "flos": 16504980542760.0, - "grad_norm": 7.783654266645955, - "learning_rate": 8.53095288985007e-07, - "loss": 0.8377, - "num_input_tokens_seen": 124230210, - "step": 5851 - }, - { - "epoch": 0.7036613960199604, - "flos": 16594216495920.0, - "grad_norm": 5.596989210572645, - "learning_rate": 8.524572121081009e-07, - "loss": 0.8054, - "num_input_tokens_seen": 124250030, - "step": 5852 - }, - { - "epoch": 0.7037816389105994, - "flos": 16458860963760.0, - "grad_norm": 3.6128816290838377, - "learning_rate": 8.518193093125232e-07, - "loss": 0.6054, - "num_input_tokens_seen": 124268805, - "step": 5853 - }, - { - "epoch": 0.7039018818012385, - "flos": 19859461235280.0, - "grad_norm": 3.7429798537702523, - "learning_rate": 8.511815806950436e-07, - "loss": 0.7909, - "num_input_tokens_seen": 124289555, - "step": 5854 - }, - { - "epoch": 0.7040221246918776, - "flos": 12949336735320.0, - "grad_norm": 5.710162609878111, - "learning_rate": 8.505440263524044e-07, - "loss": 0.7588, - "num_input_tokens_seen": 124308120, - "step": 5855 - }, - { - "epoch": 0.7041423675825167, - "flos": 11859970526280.0, - "grad_norm": 5.932755994389526, - "learning_rate": 8.49906646381322e-07, - "loss": 0.849, - "num_input_tokens_seen": 124320675, - "step": 5856 - }, - { - "epoch": 0.7042626104731557, - "flos": 18672315375240.0, - "grad_norm": 7.891410078518525, - "learning_rate": 8.492694408784884e-07, - "loss": 0.7048, - "num_input_tokens_seen": 124340650, - "step": 5857 - }, - { - "epoch": 0.7043828533637949, - "flos": 13028884740720.0, - "grad_norm": 4.494660918926259, - "learning_rate": 8.486324099405642e-07, - "loss": 0.5885, - "num_input_tokens_seen": 124357215, - "step": 5858 - }, - { - "epoch": 0.704503096254434, - "flos": 21640449135000.0, - "grad_norm": 4.490606464302603, - "learning_rate": 8.479955536641887e-07, - "loss": 0.7438, - "num_input_tokens_seen": 124378430, - "step": 5859 - }, - { - "epoch": 0.704623339145073, - "flos": 22696640197320.0, - "grad_norm": 3.425383419656152, - "learning_rate": 8.473588721459716e-07, - "loss": 0.6418, - "num_input_tokens_seen": 124398060, - "step": 5860 - }, - { - "epoch": 0.7047435820357122, - "flos": 17511632750400.0, - "grad_norm": 5.137279036102224, - "learning_rate": 8.467223654824967e-07, - "loss": 0.6769, - "num_input_tokens_seen": 124417235, - "step": 5861 - }, - { - "epoch": 0.7048638249263512, - "flos": 34221037383960.0, - "grad_norm": 5.360437048843523, - "learning_rate": 8.460860337703233e-07, - "loss": 0.6106, - "num_input_tokens_seen": 124437560, - "step": 5862 - }, - { - "epoch": 0.7049840678169903, - "flos": 15864607344600.0, - "grad_norm": 5.34278173765006, - "learning_rate": 8.454498771059797e-07, - "loss": 0.6867, - "num_input_tokens_seen": 124456655, - "step": 5863 - }, - { - "epoch": 0.7051043107076294, - "flos": 13434064858320.0, - "grad_norm": 8.785266107166617, - "learning_rate": 8.448138955859725e-07, - "loss": 0.8092, - "num_input_tokens_seen": 124472960, - "step": 5864 - }, - { - "epoch": 0.7052245535982685, - "flos": 14110715879280.0, - "grad_norm": 9.441574732800593, - "learning_rate": 8.44178089306778e-07, - "loss": 0.8909, - "num_input_tokens_seen": 124490615, - "step": 5865 - }, - { - "epoch": 0.7053447964889076, - "flos": 13920375979320.0, - "grad_norm": 3.803299731062399, - "learning_rate": 8.4354245836485e-07, - "loss": 0.7636, - "num_input_tokens_seen": 124508780, - "step": 5866 - }, - { - "epoch": 0.7054650393795466, - "flos": 20075346231600.0, - "grad_norm": 3.422102411107735, - "learning_rate": 8.429070028566108e-07, - "loss": 0.7103, - "num_input_tokens_seen": 124529810, - "step": 5867 - }, - { - "epoch": 0.7055852822701858, - "flos": 11729174028360.0, - "grad_norm": 3.8609053940404907, - "learning_rate": 8.422717228784586e-07, - "loss": 0.7336, - "num_input_tokens_seen": 124546405, - "step": 5868 - }, - { - "epoch": 0.7057055251608249, - "flos": 8466240466080.0, - "grad_norm": 6.121124286977115, - "learning_rate": 8.416366185267663e-07, - "loss": 0.682, - "num_input_tokens_seen": 124563625, - "step": 5869 - }, - { - "epoch": 0.7058257680514639, - "flos": 16621312930320.0, - "grad_norm": 3.09917952231681, - "learning_rate": 8.410016898978778e-07, - "loss": 0.761, - "num_input_tokens_seen": 124580820, - "step": 5870 - }, - { - "epoch": 0.7059460109421031, - "flos": 12786124929720.0, - "grad_norm": 4.161754433701088, - "learning_rate": 8.403669370881115e-07, - "loss": 0.7873, - "num_input_tokens_seen": 124599275, - "step": 5871 - }, - { - "epoch": 0.7060662538327421, - "flos": 17237027511000.0, - "grad_norm": 4.356347786505033, - "learning_rate": 8.397323601937587e-07, - "loss": 0.7638, - "num_input_tokens_seen": 124618895, - "step": 5872 - }, - { - "epoch": 0.7061864967233812, - "flos": 22207732959600.0, - "grad_norm": 4.3101950857526266, - "learning_rate": 8.390979593110838e-07, - "loss": 0.7633, - "num_input_tokens_seen": 124640745, - "step": 5873 - }, - { - "epoch": 0.7063067396140204, - "flos": 15133225235520.0, - "grad_norm": 3.371111431053357, - "learning_rate": 8.384637345363262e-07, - "loss": 0.7882, - "num_input_tokens_seen": 124659655, - "step": 5874 - }, - { - "epoch": 0.7064269825046594, - "flos": 23691704859600.0, - "grad_norm": 3.524990238690219, - "learning_rate": 8.378296859656964e-07, - "loss": 0.7559, - "num_input_tokens_seen": 124680530, - "step": 5875 - }, - { - "epoch": 0.7065472253952985, - "flos": 22183264301880.0, - "grad_norm": 6.007378174320048, - "learning_rate": 8.371958136953792e-07, - "loss": 0.6644, - "num_input_tokens_seen": 124700280, - "step": 5876 - }, - { - "epoch": 0.7066674682859376, - "flos": 11814642446280.0, - "grad_norm": 54.99432676706298, - "learning_rate": 8.365621178215326e-07, - "loss": 0.6388, - "num_input_tokens_seen": 124716470, - "step": 5877 - }, - { - "epoch": 0.7067877111765767, - "flos": 10788745474320.0, - "grad_norm": 5.768803245158189, - "learning_rate": 8.359285984402871e-07, - "loss": 0.7338, - "num_input_tokens_seen": 124733455, - "step": 5878 - }, - { - "epoch": 0.7069079540672157, - "flos": 18639583467960.0, - "grad_norm": 4.250098956582902, - "learning_rate": 8.352952556477489e-07, - "loss": 0.7284, - "num_input_tokens_seen": 124751085, - "step": 5879 - }, - { - "epoch": 0.7070281969578549, - "flos": 18024470426520.0, - "grad_norm": 3.669154941234846, - "learning_rate": 8.34662089539993e-07, - "loss": 0.7548, - "num_input_tokens_seen": 124770315, - "step": 5880 - }, - { - "epoch": 0.707148439848494, - "flos": 19591061348040.0, - "grad_norm": 5.487490385911727, - "learning_rate": 8.340291002130722e-07, - "loss": 0.7612, - "num_input_tokens_seen": 124789225, - "step": 5881 - }, - { - "epoch": 0.707268682739133, - "flos": 10977660676080.0, - "grad_norm": 4.866926109679505, - "learning_rate": 8.3339628776301e-07, - "loss": 0.7784, - "num_input_tokens_seen": 124807085, - "step": 5882 - }, - { - "epoch": 0.7073889256297722, - "flos": 25207142268480.0, - "grad_norm": 5.350533981057058, - "learning_rate": 8.327636522858033e-07, - "loss": 0.5489, - "num_input_tokens_seen": 124826410, - "step": 5883 - }, - { - "epoch": 0.7075091685204112, - "flos": 14676606665640.0, - "grad_norm": 5.36401113279412, - "learning_rate": 8.321311938774225e-07, - "loss": 0.749, - "num_input_tokens_seen": 124845220, - "step": 5884 - }, - { - "epoch": 0.7076294114110503, - "flos": 15189602641560.0, - "grad_norm": 3.5051089192879266, - "learning_rate": 8.314989126338104e-07, - "loss": 0.7744, - "num_input_tokens_seen": 124864950, - "step": 5885 - }, - { - "epoch": 0.7077496543016895, - "flos": 13110427323600.0, - "grad_norm": 4.79384455183501, - "learning_rate": 8.308668086508847e-07, - "loss": 0.836, - "num_input_tokens_seen": 124882750, - "step": 5886 - }, - { - "epoch": 0.7078698971923285, - "flos": 33469998931080.0, - "grad_norm": 5.372036849019798, - "learning_rate": 8.302348820245342e-07, - "loss": 0.7204, - "num_input_tokens_seen": 124905035, - "step": 5887 - }, - { - "epoch": 0.7079901400829676, - "flos": 19753355014800.0, - "grad_norm": 9.909929524776407, - "learning_rate": 8.296031328506232e-07, - "loss": 0.6804, - "num_input_tokens_seen": 124924505, - "step": 5888 - }, - { - "epoch": 0.7081103829736067, - "flos": 17887595216280.0, - "grad_norm": 3.9103234376329863, - "learning_rate": 8.289715612249857e-07, - "loss": 0.751, - "num_input_tokens_seen": 124944840, - "step": 5889 - }, - { - "epoch": 0.7082306258642458, - "flos": 13535263785000.0, - "grad_norm": 5.30527822270083, - "learning_rate": 8.283401672434305e-07, - "loss": 0.7531, - "num_input_tokens_seen": 124959785, - "step": 5890 - }, - { - "epoch": 0.7083508687548848, - "flos": 17188153515480.0, - "grad_norm": 5.570866839418198, - "learning_rate": 8.277089510017412e-07, - "loss": 0.6918, - "num_input_tokens_seen": 124980310, - "step": 5891 - }, - { - "epoch": 0.708471111645524, - "flos": 16405111334400.0, - "grad_norm": 2.9006620440658506, - "learning_rate": 8.270779125956719e-07, - "loss": 0.8001, - "num_input_tokens_seen": 125000410, - "step": 5892 - }, - { - "epoch": 0.7085913545361631, - "flos": 15297450159840.0, - "grad_norm": 7.69619804747684, - "learning_rate": 8.264470521209505e-07, - "loss": 0.7889, - "num_input_tokens_seen": 125018495, - "step": 5893 - }, - { - "epoch": 0.7087115974268021, - "flos": 10923309507480.0, - "grad_norm": 4.240323663629802, - "learning_rate": 8.258163696732785e-07, - "loss": 0.757, - "num_input_tokens_seen": 125035805, - "step": 5894 - }, - { - "epoch": 0.7088318403174413, - "flos": 15752865651240.0, - "grad_norm": 4.5982573318053825, - "learning_rate": 8.251858653483288e-07, - "loss": 0.7491, - "num_input_tokens_seen": 125053690, - "step": 5895 - }, - { - "epoch": 0.7089520832080803, - "flos": 11295694397880.0, - "grad_norm": 4.420392869776276, - "learning_rate": 8.245555392417501e-07, - "loss": 0.841, - "num_input_tokens_seen": 125068065, - "step": 5896 - }, - { - "epoch": 0.7090723260987194, - "flos": 14919936355920.0, - "grad_norm": 5.95627332524699, - "learning_rate": 8.239253914491613e-07, - "loss": 0.7767, - "num_input_tokens_seen": 125086110, - "step": 5897 - }, - { - "epoch": 0.7091925689893585, - "flos": 18809728804800.0, - "grad_norm": 3.150278618010437, - "learning_rate": 8.232954220661556e-07, - "loss": 0.7332, - "num_input_tokens_seen": 125108565, - "step": 5898 - }, - { - "epoch": 0.7093128118799976, - "flos": 17727644386560.0, - "grad_norm": 6.888928539749941, - "learning_rate": 8.226656311882989e-07, - "loss": 0.6501, - "num_input_tokens_seen": 125127595, - "step": 5899 - }, - { - "epoch": 0.7094330547706367, - "flos": 12131821349160.0, - "grad_norm": 17.448808182292048, - "learning_rate": 8.22036018911129e-07, - "loss": 0.7459, - "num_input_tokens_seen": 125145345, - "step": 5900 - }, - { - "epoch": 0.7095532976612757, - "flos": 11863073202360.0, - "grad_norm": 8.459954568366673, - "learning_rate": 8.214065853301599e-07, - "loss": 0.795, - "num_input_tokens_seen": 125160595, - "step": 5901 - }, - { - "epoch": 0.7096735405519149, - "flos": 52156814568000.0, - "grad_norm": 0.76812840894745, - "learning_rate": 8.207773305408734e-07, - "loss": 0.5968, - "num_input_tokens_seen": 125227535, - "step": 5902 - }, - { - "epoch": 0.709793783442554, - "flos": 17294418035760.0, - "grad_norm": 9.068318781206257, - "learning_rate": 8.201482546387288e-07, - "loss": 0.7852, - "num_input_tokens_seen": 125246730, - "step": 5903 - }, - { - "epoch": 0.709914026333193, - "flos": 19049892499080.0, - "grad_norm": 2.55536775543943, - "learning_rate": 8.195193577191553e-07, - "loss": 0.8951, - "num_input_tokens_seen": 125268280, - "step": 5904 - }, - { - "epoch": 0.7100342692238322, - "flos": 18212055909960.0, - "grad_norm": 3.413805500195356, - "learning_rate": 8.188906398775579e-07, - "loss": 0.832, - "num_input_tokens_seen": 125288545, - "step": 5905 - }, - { - "epoch": 0.7101545121144712, - "flos": 18264032581560.0, - "grad_norm": 5.52213635692365, - "learning_rate": 8.18262101209311e-07, - "loss": 0.6711, - "num_input_tokens_seen": 125307475, - "step": 5906 - }, - { - "epoch": 0.7102747550051103, - "flos": 17403658592280.0, - "grad_norm": 3.536680518892313, - "learning_rate": 8.176337418097626e-07, - "loss": 0.7002, - "num_input_tokens_seen": 125327665, - "step": 5907 - }, - { - "epoch": 0.7103949978957494, - "flos": 11139004544040.0, - "grad_norm": 3.627666807854745, - "learning_rate": 8.170055617742364e-07, - "loss": 0.778, - "num_input_tokens_seen": 125344665, - "step": 5908 - }, - { - "epoch": 0.7105152407863885, - "flos": 16567499981040.0, - "grad_norm": 3.5342497111137763, - "learning_rate": 8.163775611980252e-07, - "loss": 0.6871, - "num_input_tokens_seen": 125363495, - "step": 5909 - }, - { - "epoch": 0.7106354836770276, - "flos": 12570936452520.0, - "grad_norm": 5.068015402586067, - "learning_rate": 8.157497401763982e-07, - "loss": 0.7799, - "num_input_tokens_seen": 125380880, - "step": 5910 - }, - { - "epoch": 0.7107557265676667, - "flos": 14757832648920.0, - "grad_norm": 5.142270088749658, - "learning_rate": 8.151220988045935e-07, - "loss": 0.7651, - "num_input_tokens_seen": 125399855, - "step": 5911 - }, - { - "epoch": 0.7108759694583058, - "flos": 15729853351680.0, - "grad_norm": 6.121831268886525, - "learning_rate": 8.144946371778234e-07, - "loss": 0.8178, - "num_input_tokens_seen": 125419685, - "step": 5912 - }, - { - "epoch": 0.7109962123489448, - "flos": 17620715007120.0, - "grad_norm": 4.9379081074541205, - "learning_rate": 8.138673553912751e-07, - "loss": 0.7701, - "num_input_tokens_seen": 125439965, - "step": 5913 - }, - { - "epoch": 0.711116455239584, - "flos": 22370469865800.0, - "grad_norm": 4.429614300266006, - "learning_rate": 8.132402535401059e-07, - "loss": 0.5597, - "num_input_tokens_seen": 125460940, - "step": 5914 - }, - { - "epoch": 0.711236698130223, - "flos": 18348519540720.0, - "grad_norm": 3.316506895373587, - "learning_rate": 8.126133317194465e-07, - "loss": 0.731, - "num_input_tokens_seen": 125480850, - "step": 5915 - }, - { - "epoch": 0.7113569410208621, - "flos": 17727707706480.0, - "grad_norm": 3.911818906348669, - "learning_rate": 8.11986590024401e-07, - "loss": 0.7215, - "num_input_tokens_seen": 125500310, - "step": 5916 - }, - { - "epoch": 0.7114771839115013, - "flos": 26039881604040.0, - "grad_norm": 5.80987653696323, - "learning_rate": 8.113600285500442e-07, - "loss": 0.676, - "num_input_tokens_seen": 125520240, - "step": 5917 - }, - { - "epoch": 0.7115974268021403, - "flos": 15427296858960.0, - "grad_norm": 4.0070607333587045, - "learning_rate": 8.107336473914268e-07, - "loss": 0.7304, - "num_input_tokens_seen": 125538590, - "step": 5918 - }, - { - "epoch": 0.7117176696927794, - "flos": 41819341621680.0, - "grad_norm": 0.8206867272664832, - "learning_rate": 8.101074466435694e-07, - "loss": 0.5988, - "num_input_tokens_seen": 125597785, - "step": 5919 - }, - { - "epoch": 0.7118379125834186, - "flos": 11599010729640.0, - "grad_norm": 4.40876535895895, - "learning_rate": 8.094814264014662e-07, - "loss": 0.6699, - "num_input_tokens_seen": 125616260, - "step": 5920 - }, - { - "epoch": 0.7119581554740576, - "flos": 14757579369240.0, - "grad_norm": 3.3603647592646273, - "learning_rate": 8.088555867600844e-07, - "loss": 0.8032, - "num_input_tokens_seen": 125632145, - "step": 5921 - }, - { - "epoch": 0.7120783983646967, - "flos": 25504949767200.0, - "grad_norm": 4.474761547363358, - "learning_rate": 8.08229927814362e-07, - "loss": 0.603, - "num_input_tokens_seen": 125654755, - "step": 5922 - }, - { - "epoch": 0.7121986412553358, - "flos": 19320603563400.0, - "grad_norm": 4.799642851576099, - "learning_rate": 8.076044496592134e-07, - "loss": 0.6285, - "num_input_tokens_seen": 125676325, - "step": 5923 - }, - { - "epoch": 0.7123188841459749, - "flos": 8061503587920.0, - "grad_norm": 6.0955613794197365, - "learning_rate": 8.069791523895204e-07, - "loss": 0.7628, - "num_input_tokens_seen": 125692495, - "step": 5924 - }, - { - "epoch": 0.7124391270366139, - "flos": 15214894458240.0, - "grad_norm": 3.568058963818719, - "learning_rate": 8.063540361001422e-07, - "loss": 0.7591, - "num_input_tokens_seen": 125710785, - "step": 5925 - }, - { - "epoch": 0.7125593699272531, - "flos": 12841204277400.0, - "grad_norm": 5.626090728125837, - "learning_rate": 8.057291008859069e-07, - "loss": 0.7767, - "num_input_tokens_seen": 125728665, - "step": 5926 - }, - { - "epoch": 0.7126796128178922, - "flos": 21018497542200.0, - "grad_norm": 3.257209703768133, - "learning_rate": 8.051043468416187e-07, - "loss": 0.6666, - "num_input_tokens_seen": 125749635, - "step": 5927 - }, - { - "epoch": 0.7127998557085312, - "flos": 11679761813520.0, - "grad_norm": 8.379776688470452, - "learning_rate": 8.044797740620506e-07, - "loss": 0.8043, - "num_input_tokens_seen": 125767960, - "step": 5928 - }, - { - "epoch": 0.7129200985991703, - "flos": 17133010847880.0, - "grad_norm": 5.02841371948468, - "learning_rate": 8.038553826419494e-07, - "loss": 0.7719, - "num_input_tokens_seen": 125786390, - "step": 5929 - }, - { - "epoch": 0.7130403414898094, - "flos": 15648690688320.0, - "grad_norm": 3.275687307206646, - "learning_rate": 8.032311726760364e-07, - "loss": 0.7933, - "num_input_tokens_seen": 125807330, - "step": 5930 - }, - { - "epoch": 0.7131605843804485, - "flos": 55313985678000.0, - "grad_norm": 12.52973940298983, - "learning_rate": 8.026071442590022e-07, - "loss": 0.6786, - "num_input_tokens_seen": 125833980, - "step": 5931 - }, - { - "epoch": 0.7132808272710875, - "flos": 13407190043640.0, - "grad_norm": 4.074806470897551, - "learning_rate": 8.019832974855134e-07, - "loss": 0.7952, - "num_input_tokens_seen": 125851660, - "step": 5932 - }, - { - "epoch": 0.7134010701617267, - "flos": 17021522434200.0, - "grad_norm": 7.943642469829352, - "learning_rate": 8.013596324502052e-07, - "loss": 0.8173, - "num_input_tokens_seen": 125869845, - "step": 5933 - }, - { - "epoch": 0.7135213130523658, - "flos": 17317145395680.0, - "grad_norm": 5.891915177531709, - "learning_rate": 8.007361492476872e-07, - "loss": 0.767, - "num_input_tokens_seen": 125888890, - "step": 5934 - }, - { - "epoch": 0.7136415559430048, - "flos": 18159889278600.0, - "grad_norm": 3.5072681709834166, - "learning_rate": 8.001128479725426e-07, - "loss": 0.7713, - "num_input_tokens_seen": 125910515, - "step": 5935 - }, - { - "epoch": 0.713761798833644, - "flos": 13354231913280.0, - "grad_norm": 5.950075024043651, - "learning_rate": 7.994897287193248e-07, - "loss": 0.7895, - "num_input_tokens_seen": 125929615, - "step": 5936 - }, - { - "epoch": 0.713882041724283, - "flos": 11327096586840.0, - "grad_norm": 7.318756296365247, - "learning_rate": 7.988667915825605e-07, - "loss": 0.8114, - "num_input_tokens_seen": 125946400, - "step": 5937 - }, - { - "epoch": 0.7140022846149221, - "flos": 17619923508120.0, - "grad_norm": 4.237221074559644, - "learning_rate": 7.982440366567491e-07, - "loss": 0.7306, - "num_input_tokens_seen": 125964610, - "step": 5938 - }, - { - "epoch": 0.7141225275055613, - "flos": 20454854613000.0, - "grad_norm": 3.188846528114924, - "learning_rate": 7.97621464036361e-07, - "loss": 0.7375, - "num_input_tokens_seen": 125986090, - "step": 5939 - }, - { - "epoch": 0.7142427703962003, - "flos": 14379590665920.0, - "grad_norm": 6.398071725751543, - "learning_rate": 7.969990738158417e-07, - "loss": 0.6635, - "num_input_tokens_seen": 126004220, - "step": 5940 - }, - { - "epoch": 0.7143630132868394, - "flos": 15378486183360.0, - "grad_norm": 4.315064041175213, - "learning_rate": 7.963768660896062e-07, - "loss": 0.8306, - "num_input_tokens_seen": 126022350, - "step": 5941 - }, - { - "epoch": 0.7144832561774785, - "flos": 17670348841680.0, - "grad_norm": 5.553185368490449, - "learning_rate": 7.957548409520432e-07, - "loss": 0.8147, - "num_input_tokens_seen": 126041295, - "step": 5942 - }, - { - "epoch": 0.7146034990681176, - "flos": 11895330210240.0, - "grad_norm": 4.41548001644609, - "learning_rate": 7.951329984975135e-07, - "loss": 0.8206, - "num_input_tokens_seen": 126057955, - "step": 5943 - }, - { - "epoch": 0.7147237419587567, - "flos": 51346454332800.0, - "grad_norm": 0.7229939264646509, - "learning_rate": 7.94511338820349e-07, - "loss": 0.5547, - "num_input_tokens_seen": 126119980, - "step": 5944 - }, - { - "epoch": 0.7148439848493958, - "flos": 16291185103800.0, - "grad_norm": 3.4330287637769543, - "learning_rate": 7.938898620148575e-07, - "loss": 0.7743, - "num_input_tokens_seen": 126137460, - "step": 5945 - }, - { - "epoch": 0.7149642277400349, - "flos": 13083584168880.0, - "grad_norm": 5.126715768315209, - "learning_rate": 7.932685681753135e-07, - "loss": 0.7036, - "num_input_tokens_seen": 126154460, - "step": 5946 - }, - { - "epoch": 0.7150844706306739, - "flos": 23259270007800.0, - "grad_norm": 4.888413584367241, - "learning_rate": 7.92647457395969e-07, - "loss": 0.6072, - "num_input_tokens_seen": 126176005, - "step": 5947 - }, - { - "epoch": 0.7152047135213131, - "flos": 7899304901040.0, - "grad_norm": 8.592765834981437, - "learning_rate": 7.920265297710444e-07, - "loss": 0.7306, - "num_input_tokens_seen": 126193115, - "step": 5948 - }, - { - "epoch": 0.7153249564119522, - "flos": 15351263109120.0, - "grad_norm": 5.901542604153814, - "learning_rate": 7.914057853947363e-07, - "loss": 0.7087, - "num_input_tokens_seen": 126212015, - "step": 5949 - }, - { - "epoch": 0.7154451993025912, - "flos": 17754645841080.0, - "grad_norm": 3.9132523907073082, - "learning_rate": 7.907852243612089e-07, - "loss": 0.622, - "num_input_tokens_seen": 126232140, - "step": 5950 - }, - { - "epoch": 0.7155654421932304, - "flos": 17079577818120.0, - "grad_norm": 5.675280848876384, - "learning_rate": 7.901648467646009e-07, - "loss": 0.7182, - "num_input_tokens_seen": 126250800, - "step": 5951 - }, - { - "epoch": 0.7156856850838694, - "flos": 16621597869960.0, - "grad_norm": 3.245888970958802, - "learning_rate": 7.895446526990244e-07, - "loss": 0.7096, - "num_input_tokens_seen": 126270535, - "step": 5952 - }, - { - "epoch": 0.7158059279745085, - "flos": 14514946198080.0, - "grad_norm": 4.818198855060797, - "learning_rate": 7.889246422585609e-07, - "loss": 0.7419, - "num_input_tokens_seen": 126289640, - "step": 5953 - }, - { - "epoch": 0.7159261708651476, - "flos": 17674622936280.0, - "grad_norm": 4.731548208214228, - "learning_rate": 7.883048155372675e-07, - "loss": 0.7304, - "num_input_tokens_seen": 126307875, - "step": 5954 - }, - { - "epoch": 0.7160464137557867, - "flos": 12381926270880.0, - "grad_norm": 5.425544310656262, - "learning_rate": 7.876851726291698e-07, - "loss": 0.6852, - "num_input_tokens_seen": 126325895, - "step": 5955 - }, - { - "epoch": 0.7161666566464258, - "flos": 18483716773080.0, - "grad_norm": 3.6391782055121467, - "learning_rate": 7.870657136282666e-07, - "loss": 0.7619, - "num_input_tokens_seen": 126344475, - "step": 5956 - }, - { - "epoch": 0.7162868995370649, - "flos": 19401449627160.0, - "grad_norm": 4.710634844975802, - "learning_rate": 7.86446438628531e-07, - "loss": 0.8079, - "num_input_tokens_seen": 126365265, - "step": 5957 - }, - { - "epoch": 0.716407142427704, - "flos": 51622547590320.0, - "grad_norm": 0.7856996847629096, - "learning_rate": 7.858273477239059e-07, - "loss": 0.5911, - "num_input_tokens_seen": 126433405, - "step": 5958 - }, - { - "epoch": 0.716527385318343, - "flos": 15162442887240.0, - "grad_norm": 4.83192526378055, - "learning_rate": 7.852084410083067e-07, - "loss": 0.6979, - "num_input_tokens_seen": 126451945, - "step": 5959 - }, - { - "epoch": 0.7166476282089821, - "flos": 18589759673640.0, - "grad_norm": 3.5052029220894494, - "learning_rate": 7.84589718575621e-07, - "loss": 0.6234, - "num_input_tokens_seen": 126472110, - "step": 5960 - }, - { - "epoch": 0.7167678710996213, - "flos": 17673704797440.0, - "grad_norm": 4.514564680064765, - "learning_rate": 7.83971180519708e-07, - "loss": 0.6724, - "num_input_tokens_seen": 126490685, - "step": 5961 - }, - { - "epoch": 0.7168881139902603, - "flos": 22182789402480.0, - "grad_norm": 5.235919052815534, - "learning_rate": 7.833528269344008e-07, - "loss": 0.7323, - "num_input_tokens_seen": 126510310, - "step": 5962 - }, - { - "epoch": 0.7170083568808994, - "flos": 10622209372920.0, - "grad_norm": 8.640328317691406, - "learning_rate": 7.827346579135023e-07, - "loss": 0.7608, - "num_input_tokens_seen": 126527370, - "step": 5963 - }, - { - "epoch": 0.7171285997715385, - "flos": 17079704457960.0, - "grad_norm": 5.972075010550405, - "learning_rate": 7.821166735507885e-07, - "loss": 0.8231, - "num_input_tokens_seen": 126546120, - "step": 5964 - }, - { - "epoch": 0.7172488426621776, - "flos": 12056294158680.0, - "grad_norm": 5.519770201999875, - "learning_rate": 7.81498873940007e-07, - "loss": 0.672, - "num_input_tokens_seen": 126563055, - "step": 5965 - }, - { - "epoch": 0.7173690855528166, - "flos": 19455674155920.0, - "grad_norm": 5.12837164156126, - "learning_rate": 7.808812591748768e-07, - "loss": 0.748, - "num_input_tokens_seen": 126583155, - "step": 5966 - }, - { - "epoch": 0.7174893284434558, - "flos": 16674935919840.0, - "grad_norm": 4.834271416667137, - "learning_rate": 7.802638293490915e-07, - "loss": 0.6258, - "num_input_tokens_seen": 126602520, - "step": 5967 - }, - { - "epoch": 0.7176095713340949, - "flos": 17051784864600.0, - "grad_norm": 3.3518609756554745, - "learning_rate": 7.796465845563123e-07, - "loss": 0.7626, - "num_input_tokens_seen": 126621115, - "step": 5968 - }, - { - "epoch": 0.7177298142247339, - "flos": 18752433259920.0, - "grad_norm": 9.3316281895481, - "learning_rate": 7.790295248901766e-07, - "loss": 0.7884, - "num_input_tokens_seen": 126641965, - "step": 5969 - }, - { - "epoch": 0.7178500571153731, - "flos": 23238727185120.0, - "grad_norm": 5.443597641224139, - "learning_rate": 7.784126504442902e-07, - "loss": 0.6054, - "num_input_tokens_seen": 126664915, - "step": 5970 - }, - { - "epoch": 0.7179703000060121, - "flos": 14190612144240.0, - "grad_norm": 3.9218574918682902, - "learning_rate": 7.777959613122351e-07, - "loss": 0.6631, - "num_input_tokens_seen": 126684460, - "step": 5971 - }, - { - "epoch": 0.7180905428966512, - "flos": 21155942631720.0, - "grad_norm": 5.259511052602815, - "learning_rate": 7.771794575875604e-07, - "loss": 0.7574, - "num_input_tokens_seen": 126706050, - "step": 5972 - }, - { - "epoch": 0.7182107857872904, - "flos": 14649700191000.0, - "grad_norm": 4.645373243235652, - "learning_rate": 7.765631393637888e-07, - "loss": 0.7467, - "num_input_tokens_seen": 126723965, - "step": 5973 - }, - { - "epoch": 0.7183310286779294, - "flos": 16647997785240.0, - "grad_norm": 12.462333997961931, - "learning_rate": 7.75947006734417e-07, - "loss": 0.4523, - "num_input_tokens_seen": 126741465, - "step": 5974 - }, - { - "epoch": 0.7184512715685685, - "flos": 12510443251680.0, - "grad_norm": 5.467553928913383, - "learning_rate": 7.753310597929101e-07, - "loss": 0.8196, - "num_input_tokens_seen": 126757825, - "step": 5975 - }, - { - "epoch": 0.7185715144592076, - "flos": 48299943986160.0, - "grad_norm": 0.8163878151304385, - "learning_rate": 7.747152986327095e-07, - "loss": 0.5695, - "num_input_tokens_seen": 126818090, - "step": 5976 - }, - { - "epoch": 0.7186917573498467, - "flos": 11787894271440.0, - "grad_norm": 4.540073196896578, - "learning_rate": 7.740997233472228e-07, - "loss": 0.6653, - "num_input_tokens_seen": 126835430, - "step": 5977 - }, - { - "epoch": 0.7188120002404857, - "flos": 21454446649560.0, - "grad_norm": 4.6294035696716564, - "learning_rate": 7.734843340298329e-07, - "loss": 0.6935, - "num_input_tokens_seen": 126854975, - "step": 5978 - }, - { - "epoch": 0.7189322431311249, - "flos": 24532232545320.0, - "grad_norm": 3.851991084087507, - "learning_rate": 7.72869130773895e-07, - "loss": 0.733, - "num_input_tokens_seen": 126875295, - "step": 5979 - }, - { - "epoch": 0.719052486021764, - "flos": 45222632989800.0, - "grad_norm": 0.7930277158865351, - "learning_rate": 7.722541136727343e-07, - "loss": 0.6032, - "num_input_tokens_seen": 126931030, - "step": 5980 - }, - { - "epoch": 0.719172728912403, - "flos": 11355902659080.0, - "grad_norm": 5.7436651080378125, - "learning_rate": 7.716392828196483e-07, - "loss": 0.7864, - "num_input_tokens_seen": 126948550, - "step": 5981 - }, - { - "epoch": 0.7192929718030422, - "flos": 11322600872520.0, - "grad_norm": 5.071040167413052, - "learning_rate": 7.710246383079064e-07, - "loss": 0.752, - "num_input_tokens_seen": 126963655, - "step": 5982 - }, - { - "epoch": 0.7194132146936812, - "flos": 15991541327400.0, - "grad_norm": 8.711510718948102, - "learning_rate": 7.704101802307492e-07, - "loss": 0.9006, - "num_input_tokens_seen": 126975675, - "step": 5983 - }, - { - "epoch": 0.7195334575843203, - "flos": 20044957161360.0, - "grad_norm": 6.239130257368266, - "learning_rate": 7.697959086813912e-07, - "loss": 0.8621, - "num_input_tokens_seen": 126991560, - "step": 5984 - }, - { - "epoch": 0.7196537004749595, - "flos": 13704680942760.0, - "grad_norm": 4.3275791746727785, - "learning_rate": 7.691818237530145e-07, - "loss": 0.7901, - "num_input_tokens_seen": 127010140, - "step": 5985 - }, - { - "epoch": 0.7197739433655985, - "flos": 17967143221680.0, - "grad_norm": 8.022922577348538, - "learning_rate": 7.685679255387774e-07, - "loss": 0.7596, - "num_input_tokens_seen": 127028175, - "step": 5986 - }, - { - "epoch": 0.7198941862562376, - "flos": 13164366912720.0, - "grad_norm": 4.638214204395844, - "learning_rate": 7.679542141318065e-07, - "loss": 0.7466, - "num_input_tokens_seen": 127045000, - "step": 5987 - }, - { - "epoch": 0.7200144291468767, - "flos": 21291583103520.0, - "grad_norm": 4.113108748163456, - "learning_rate": 7.673406896252013e-07, - "loss": 0.7516, - "num_input_tokens_seen": 127066095, - "step": 5988 - }, - { - "epoch": 0.7201346720375158, - "flos": 18591785911080.0, - "grad_norm": 3.312766730168048, - "learning_rate": 7.667273521120347e-07, - "loss": 0.7711, - "num_input_tokens_seen": 127085375, - "step": 5989 - }, - { - "epoch": 0.7202549149281549, - "flos": 10437251666160.0, - "grad_norm": 14.384161907095292, - "learning_rate": 7.661142016853468e-07, - "loss": 0.7878, - "num_input_tokens_seen": 127102455, - "step": 5990 - }, - { - "epoch": 0.7203751578187939, - "flos": 16835773228440.0, - "grad_norm": 4.038453283056967, - "learning_rate": 7.655012384381543e-07, - "loss": 0.7297, - "num_input_tokens_seen": 127121660, - "step": 5991 - }, - { - "epoch": 0.7204954007094331, - "flos": 17346616327080.0, - "grad_norm": 4.39313628033134, - "learning_rate": 7.648884624634415e-07, - "loss": 0.7994, - "num_input_tokens_seen": 127139930, - "step": 5992 - }, - { - "epoch": 0.7206156436000721, - "flos": 11841485601000.0, - "grad_norm": 4.270503457828502, - "learning_rate": 7.642758738541683e-07, - "loss": 0.8747, - "num_input_tokens_seen": 127156230, - "step": 5993 - }, - { - "epoch": 0.7207358864907112, - "flos": 40061144401800.0, - "grad_norm": 0.8176325299222883, - "learning_rate": 7.636634727032621e-07, - "loss": 0.6111, - "num_input_tokens_seen": 127213055, - "step": 5994 - }, - { - "epoch": 0.7208561293813504, - "flos": 13974758807880.0, - "grad_norm": 4.32518924894444, - "learning_rate": 7.630512591036231e-07, - "loss": 0.7773, - "num_input_tokens_seen": 127232085, - "step": 5995 - }, - { - "epoch": 0.7209763722719894, - "flos": 12948196976760.0, - "grad_norm": 5.902624238454505, - "learning_rate": 7.624392331481255e-07, - "loss": 0.629, - "num_input_tokens_seen": 127249460, - "step": 5996 - }, - { - "epoch": 0.7210966151626285, - "flos": 49268830352880.0, - "grad_norm": 0.7576516757480072, - "learning_rate": 7.618273949296115e-07, - "loss": 0.533, - "num_input_tokens_seen": 127308690, - "step": 5997 - }, - { - "epoch": 0.7212168580532676, - "flos": 15459522206880.0, - "grad_norm": 7.027297608763194, - "learning_rate": 7.612157445408987e-07, - "loss": 0.6755, - "num_input_tokens_seen": 127326220, - "step": 5998 - }, - { - "epoch": 0.7213371009439067, - "flos": 16350443566200.0, - "grad_norm": 11.544200045369102, - "learning_rate": 7.606042820747716e-07, - "loss": 0.7217, - "num_input_tokens_seen": 127342345, - "step": 5999 - }, - { - "epoch": 0.7214573438345457, - "flos": 13516272300360.0, - "grad_norm": 4.269182559168763, - "learning_rate": 7.599930076239889e-07, - "loss": 0.834, - "num_input_tokens_seen": 127359350, - "step": 6000 - }, - { - "epoch": 0.7215775867251849, - "flos": 26260230654720.0, - "grad_norm": 7.024921468384034, - "learning_rate": 7.593819212812818e-07, - "loss": 0.6956, - "num_input_tokens_seen": 127380650, - "step": 6001 - }, - { - "epoch": 0.721697829615824, - "flos": 14889610605600.0, - "grad_norm": 4.46659767230443, - "learning_rate": 7.587710231393508e-07, - "loss": 0.705, - "num_input_tokens_seen": 127398725, - "step": 6002 - }, - { - "epoch": 0.721818072506463, - "flos": 14784549163800.0, - "grad_norm": 5.96288870578089, - "learning_rate": 7.581603132908685e-07, - "loss": 0.8166, - "num_input_tokens_seen": 127416415, - "step": 6003 - }, - { - "epoch": 0.7219383153971022, - "flos": 13272689330400.0, - "grad_norm": 5.595487186333018, - "learning_rate": 7.575497918284795e-07, - "loss": 0.7617, - "num_input_tokens_seen": 127433680, - "step": 6004 - }, - { - "epoch": 0.7220585582877412, - "flos": 12646716922680.0, - "grad_norm": 7.177146287558016, - "learning_rate": 7.569394588447984e-07, - "loss": 0.7366, - "num_input_tokens_seen": 127450415, - "step": 6005 - }, - { - "epoch": 0.7221788011783803, - "flos": 12375879218520.0, - "grad_norm": 5.637492062717441, - "learning_rate": 7.563293144324146e-07, - "loss": 0.7572, - "num_input_tokens_seen": 127465685, - "step": 6006 - }, - { - "epoch": 0.7222990440690195, - "flos": 19266790614120.0, - "grad_norm": 5.493935102137096, - "learning_rate": 7.557193586838834e-07, - "loss": 0.7895, - "num_input_tokens_seen": 127480770, - "step": 6007 - }, - { - "epoch": 0.7224192869596585, - "flos": 12839399659680.0, - "grad_norm": 10.257629532355264, - "learning_rate": 7.551095916917371e-07, - "loss": 0.6848, - "num_input_tokens_seen": 127497820, - "step": 6008 - }, - { - "epoch": 0.7225395298502976, - "flos": 9383878340280.0, - "grad_norm": 5.181887612493696, - "learning_rate": 7.545000135484758e-07, - "loss": 0.6494, - "num_input_tokens_seen": 127514975, - "step": 6009 - }, - { - "epoch": 0.7226597727409367, - "flos": 21751715928960.0, - "grad_norm": 10.595349396457287, - "learning_rate": 7.538906243465714e-07, - "loss": 0.62, - "num_input_tokens_seen": 127534830, - "step": 6010 - }, - { - "epoch": 0.7227800156315758, - "flos": 10005038434080.0, - "grad_norm": 4.6028126700646865, - "learning_rate": 7.5328142417847e-07, - "loss": 0.7691, - "num_input_tokens_seen": 127551315, - "step": 6011 - }, - { - "epoch": 0.7229002585222148, - "flos": 14837760573840.0, - "grad_norm": 3.7864854366516583, - "learning_rate": 7.526724131365838e-07, - "loss": 0.6824, - "num_input_tokens_seen": 127571990, - "step": 6012 - }, - { - "epoch": 0.723020501412854, - "flos": 12084372051840.0, - "grad_norm": 6.949025246304642, - "learning_rate": 7.520635913133017e-07, - "loss": 0.6834, - "num_input_tokens_seen": 127590340, - "step": 6013 - }, - { - "epoch": 0.7231407443034931, - "flos": 20940754154520.0, - "grad_norm": 3.4152486386132717, - "learning_rate": 7.514549588009798e-07, - "loss": 0.8051, - "num_input_tokens_seen": 127610935, - "step": 6014 - }, - { - "epoch": 0.7232609871941321, - "flos": 22021540514400.0, - "grad_norm": 3.231511566967739, - "learning_rate": 7.508465156919492e-07, - "loss": 0.7007, - "num_input_tokens_seen": 127634165, - "step": 6015 - }, - { - "epoch": 0.7233812300847713, - "flos": 12138185001120.0, - "grad_norm": 7.428625887367252, - "learning_rate": 7.502382620785083e-07, - "loss": 0.5929, - "num_input_tokens_seen": 127650435, - "step": 6016 - }, - { - "epoch": 0.7235014729754103, - "flos": 49594145865480.0, - "grad_norm": 0.8969869098558737, - "learning_rate": 7.496301980529289e-07, - "loss": 0.6849, - "num_input_tokens_seen": 127713365, - "step": 6017 - }, - { - "epoch": 0.7236217158660494, - "flos": 19752943435320.0, - "grad_norm": 7.937079951668604, - "learning_rate": 7.490223237074547e-07, - "loss": 0.7318, - "num_input_tokens_seen": 127732795, - "step": 6018 - }, - { - "epoch": 0.7237419587566886, - "flos": 21588282503640.0, - "grad_norm": 3.854037652133157, - "learning_rate": 7.484146391342989e-07, - "loss": 0.6436, - "num_input_tokens_seen": 127752310, - "step": 6019 - }, - { - "epoch": 0.7238622016473276, - "flos": 13002199885800.0, - "grad_norm": 4.882838867116309, - "learning_rate": 7.478071444256484e-07, - "loss": 0.5442, - "num_input_tokens_seen": 127769790, - "step": 6020 - }, - { - "epoch": 0.7239824445379667, - "flos": 18861800456280.0, - "grad_norm": 7.036669731964691, - "learning_rate": 7.471998396736579e-07, - "loss": 0.7637, - "num_input_tokens_seen": 127789890, - "step": 6021 - }, - { - "epoch": 0.7241026874286057, - "flos": 16944918805080.0, - "grad_norm": 4.476810042375885, - "learning_rate": 7.465927249704549e-07, - "loss": 0.7429, - "num_input_tokens_seen": 127807495, - "step": 6022 - }, - { - "epoch": 0.7242229303192449, - "flos": 14812088837640.0, - "grad_norm": 9.433694842884831, - "learning_rate": 7.459858004081398e-07, - "loss": 0.7518, - "num_input_tokens_seen": 127825185, - "step": 6023 - }, - { - "epoch": 0.724343173209884, - "flos": 45935055274200.0, - "grad_norm": 0.6733216967222403, - "learning_rate": 7.453790660787815e-07, - "loss": 0.5925, - "num_input_tokens_seen": 127893000, - "step": 6024 - }, - { - "epoch": 0.724463416100523, - "flos": 25720201564320.0, - "grad_norm": 9.364085036030573, - "learning_rate": 7.447725220744214e-07, - "loss": 0.6226, - "num_input_tokens_seen": 127914965, - "step": 6025 - }, - { - "epoch": 0.7245836589911622, - "flos": 15999899556840.0, - "grad_norm": 3.654181653423018, - "learning_rate": 7.441661684870717e-07, - "loss": 0.7407, - "num_input_tokens_seen": 127934940, - "step": 6026 - }, - { - "epoch": 0.7247039018818012, - "flos": 16840015663080.0, - "grad_norm": 4.90405924292371, - "learning_rate": 7.435600054087152e-07, - "loss": 0.8024, - "num_input_tokens_seen": 127956825, - "step": 6027 - }, - { - "epoch": 0.7248241447724403, - "flos": 23289944017680.0, - "grad_norm": 4.609717404853828, - "learning_rate": 7.42954032931308e-07, - "loss": 0.732, - "num_input_tokens_seen": 127977585, - "step": 6028 - }, - { - "epoch": 0.7249443876630794, - "flos": 25639228860720.0, - "grad_norm": 3.772633216078213, - "learning_rate": 7.423482511467733e-07, - "loss": 0.7328, - "num_input_tokens_seen": 127998075, - "step": 6029 - }, - { - "epoch": 0.7250646305537185, - "flos": 19320761863200.0, - "grad_norm": 4.590188650605172, - "learning_rate": 7.417426601470099e-07, - "loss": 0.6406, - "num_input_tokens_seen": 128018155, - "step": 6030 - }, - { - "epoch": 0.7251848734443576, - "flos": 22075638403320.0, - "grad_norm": 8.464997871520332, - "learning_rate": 7.411372600238841e-07, - "loss": 0.7625, - "num_input_tokens_seen": 128038490, - "step": 6031 - }, - { - "epoch": 0.7253051163349967, - "flos": 12975420051000.0, - "grad_norm": 5.50103420591026, - "learning_rate": 7.405320508692346e-07, - "loss": 0.714, - "num_input_tokens_seen": 128056950, - "step": 6032 - }, - { - "epoch": 0.7254253592256358, - "flos": 9168658203120.0, - "grad_norm": 3.459643808894758, - "learning_rate": 7.399270327748727e-07, - "loss": 0.7291, - "num_input_tokens_seen": 128074330, - "step": 6033 - }, - { - "epoch": 0.7255456021162748, - "flos": 19942460176320.0, - "grad_norm": 4.333682902003466, - "learning_rate": 7.39322205832577e-07, - "loss": 0.7292, - "num_input_tokens_seen": 128094940, - "step": 6034 - }, - { - "epoch": 0.725665845006914, - "flos": 15567876284520.0, - "grad_norm": 3.809543025728988, - "learning_rate": 7.387175701341009e-07, - "loss": 0.7936, - "num_input_tokens_seen": 128113330, - "step": 6035 - }, - { - "epoch": 0.7257860878975531, - "flos": 11679096954360.0, - "grad_norm": 6.532880807632152, - "learning_rate": 7.381131257711659e-07, - "loss": 0.7062, - "num_input_tokens_seen": 128130155, - "step": 6036 - }, - { - "epoch": 0.7259063307881921, - "flos": 8790099620520.0, - "grad_norm": 5.0412388559639645, - "learning_rate": 7.375088728354677e-07, - "loss": 0.8237, - "num_input_tokens_seen": 128144905, - "step": 6037 - }, - { - "epoch": 0.7260265736788313, - "flos": 22343341771440.0, - "grad_norm": 4.302267124538953, - "learning_rate": 7.369048114186691e-07, - "loss": 0.6496, - "num_input_tokens_seen": 128165670, - "step": 6038 - }, - { - "epoch": 0.7261468165694703, - "flos": 15459870466440.0, - "grad_norm": 3.3415534389526043, - "learning_rate": 7.363009416124055e-07, - "loss": 0.8238, - "num_input_tokens_seen": 128184715, - "step": 6039 - }, - { - "epoch": 0.7262670594601094, - "flos": 16320845994960.0, - "grad_norm": 4.028857800316474, - "learning_rate": 7.356972635082852e-07, - "loss": 0.6127, - "num_input_tokens_seen": 128203290, - "step": 6040 - }, - { - "epoch": 0.7263873023507486, - "flos": 18562378299600.0, - "grad_norm": 4.7765308103299455, - "learning_rate": 7.35093777197884e-07, - "loss": 0.7431, - "num_input_tokens_seen": 128223080, - "step": 6041 - }, - { - "epoch": 0.7265075452413876, - "flos": 17484947895480.0, - "grad_norm": 4.460389753340955, - "learning_rate": 7.344904827727525e-07, - "loss": 0.8403, - "num_input_tokens_seen": 128239980, - "step": 6042 - }, - { - "epoch": 0.7266277881320267, - "flos": 21070315914000.0, - "grad_norm": 4.257384224447015, - "learning_rate": 7.338873803244076e-07, - "loss": 0.698, - "num_input_tokens_seen": 128254935, - "step": 6043 - }, - { - "epoch": 0.7267480310226658, - "flos": 18213797207760.0, - "grad_norm": 5.054319766315829, - "learning_rate": 7.332844699443401e-07, - "loss": 0.7844, - "num_input_tokens_seen": 128273255, - "step": 6044 - }, - { - "epoch": 0.7268682739133049, - "flos": 19941826977120.0, - "grad_norm": 4.647340789415369, - "learning_rate": 7.326817517240121e-07, - "loss": 0.736, - "num_input_tokens_seen": 128294680, - "step": 6045 - }, - { - "epoch": 0.7269885168039439, - "flos": 24611052371640.0, - "grad_norm": 4.053588718307814, - "learning_rate": 7.320792257548545e-07, - "loss": 0.822, - "num_input_tokens_seen": 128315575, - "step": 6046 - }, - { - "epoch": 0.7271087596945831, - "flos": 17806590852720.0, - "grad_norm": 3.96174327471888, - "learning_rate": 7.314768921282704e-07, - "loss": 0.7506, - "num_input_tokens_seen": 128335950, - "step": 6047 - }, - { - "epoch": 0.7272290025852222, - "flos": 17430501747000.0, - "grad_norm": 6.243713879791042, - "learning_rate": 7.30874750935633e-07, - "loss": 0.6984, - "num_input_tokens_seen": 128355355, - "step": 6048 - }, - { - "epoch": 0.7273492454758612, - "flos": 12186362477520.0, - "grad_norm": 3.4983134974813446, - "learning_rate": 7.30272802268286e-07, - "loss": 0.7736, - "num_input_tokens_seen": 128372070, - "step": 6049 - }, - { - "epoch": 0.7274694883665004, - "flos": 20557288278120.0, - "grad_norm": 3.1242613873215848, - "learning_rate": 7.29671046217547e-07, - "loss": 0.7498, - "num_input_tokens_seen": 128390900, - "step": 6050 - }, - { - "epoch": 0.7275897312571394, - "flos": 22290953520360.0, - "grad_norm": 9.7071986592707, - "learning_rate": 7.290694828746988e-07, - "loss": 0.8027, - "num_input_tokens_seen": 128410285, - "step": 6051 - }, - { - "epoch": 0.7277099741477785, - "flos": 14025469081080.0, - "grad_norm": 4.802248253407305, - "learning_rate": 7.284681123310004e-07, - "loss": 0.8439, - "num_input_tokens_seen": 128428720, - "step": 6052 - }, - { - "epoch": 0.7278302170384175, - "flos": 15108788237760.0, - "grad_norm": 3.4878539093815952, - "learning_rate": 7.27866934677678e-07, - "loss": 0.7767, - "num_input_tokens_seen": 128448110, - "step": 6053 - }, - { - "epoch": 0.7279504599290567, - "flos": 13942596779880.0, - "grad_norm": 3.709511316200166, - "learning_rate": 7.272659500059297e-07, - "loss": 0.7596, - "num_input_tokens_seen": 128465170, - "step": 6054 - }, - { - "epoch": 0.7280707028196958, - "flos": 13920407639280.0, - "grad_norm": 4.778042184674626, - "learning_rate": 7.266651584069264e-07, - "loss": 0.7895, - "num_input_tokens_seen": 128482555, - "step": 6055 - }, - { - "epoch": 0.7281909457103348, - "flos": 27340985354640.0, - "grad_norm": 7.376270249453226, - "learning_rate": 7.260645599718045e-07, - "loss": 0.5647, - "num_input_tokens_seen": 128508630, - "step": 6056 - }, - { - "epoch": 0.728311188600974, - "flos": 15108091718640.0, - "grad_norm": 8.44106641206261, - "learning_rate": 7.254641547916767e-07, - "loss": 0.6605, - "num_input_tokens_seen": 128525845, - "step": 6057 - }, - { - "epoch": 0.728431431491613, - "flos": 21157209030120.0, - "grad_norm": 11.046037224022568, - "learning_rate": 7.248639429576226e-07, - "loss": 0.6734, - "num_input_tokens_seen": 128545020, - "step": 6058 - }, - { - "epoch": 0.7285516743822521, - "flos": 19048467800880.0, - "grad_norm": 3.196872022819438, - "learning_rate": 7.242639245606959e-07, - "loss": 0.7116, - "num_input_tokens_seen": 128564530, - "step": 6059 - }, - { - "epoch": 0.7286719172728913, - "flos": 11950156278240.0, - "grad_norm": 7.007820665233537, - "learning_rate": 7.236640996919168e-07, - "loss": 0.8067, - "num_input_tokens_seen": 128583295, - "step": 6060 - }, - { - "epoch": 0.7287921601635303, - "flos": 16108126994640.0, - "grad_norm": 4.985836185584362, - "learning_rate": 7.230644684422782e-07, - "loss": 0.6894, - "num_input_tokens_seen": 128603245, - "step": 6061 - }, - { - "epoch": 0.7289124030541694, - "flos": 18018455034120.0, - "grad_norm": 4.3243856975027795, - "learning_rate": 7.224650309027451e-07, - "loss": 0.808, - "num_input_tokens_seen": 128622715, - "step": 6062 - }, - { - "epoch": 0.7290326459448085, - "flos": 15645334732560.0, - "grad_norm": 4.508480134972473, - "learning_rate": 7.218657871642506e-07, - "loss": 0.6674, - "num_input_tokens_seen": 128641240, - "step": 6063 - }, - { - "epoch": 0.7291528888354476, - "flos": 13569230430720.0, - "grad_norm": 3.9914790232175394, - "learning_rate": 7.212667373177012e-07, - "loss": 0.5991, - "num_input_tokens_seen": 128655955, - "step": 6064 - }, - { - "epoch": 0.7292731317260867, - "flos": 13837946917560.0, - "grad_norm": 4.823551320227368, - "learning_rate": 7.206678814539704e-07, - "loss": 0.7395, - "num_input_tokens_seen": 128673975, - "step": 6065 - }, - { - "epoch": 0.7293933746167258, - "flos": 15404347879320.0, - "grad_norm": 3.768431950903626, - "learning_rate": 7.20069219663904e-07, - "loss": 0.7221, - "num_input_tokens_seen": 128693580, - "step": 6066 - }, - { - "epoch": 0.7295136175073649, - "flos": 16429801611840.0, - "grad_norm": 3.094332553462219, - "learning_rate": 7.1947075203832e-07, - "loss": 0.7702, - "num_input_tokens_seen": 128713280, - "step": 6067 - }, - { - "epoch": 0.7296338603980039, - "flos": 41354048222760.0, - "grad_norm": 0.8702714400133894, - "learning_rate": 7.188724786680049e-07, - "loss": 0.6053, - "num_input_tokens_seen": 128773470, - "step": 6068 - }, - { - "epoch": 0.7297541032886431, - "flos": 18483748433040.0, - "grad_norm": 3.2797487730251773, - "learning_rate": 7.182743996437162e-07, - "loss": 0.7324, - "num_input_tokens_seen": 128792725, - "step": 6069 - }, - { - "epoch": 0.7298743461792822, - "flos": 19400753108040.0, - "grad_norm": 9.644401119894681, - "learning_rate": 7.176765150561819e-07, - "loss": 0.6672, - "num_input_tokens_seen": 128811050, - "step": 6070 - }, - { - "epoch": 0.7299945890699212, - "flos": 14294977066920.0, - "grad_norm": 8.320186875518763, - "learning_rate": 7.170788249961002e-07, - "loss": 0.7687, - "num_input_tokens_seen": 128829280, - "step": 6071 - }, - { - "epoch": 0.7301148319605604, - "flos": 16781991939120.0, - "grad_norm": 6.713367666076533, - "learning_rate": 7.164813295541418e-07, - "loss": 0.8595, - "num_input_tokens_seen": 128848565, - "step": 6072 - }, - { - "epoch": 0.7302350748511994, - "flos": 18587765096160.0, - "grad_norm": 4.232144940394739, - "learning_rate": 7.15884028820944e-07, - "loss": 0.6811, - "num_input_tokens_seen": 128867340, - "step": 6073 - }, - { - "epoch": 0.7303553177418385, - "flos": 20401231623480.0, - "grad_norm": 5.670501794963957, - "learning_rate": 7.152869228871185e-07, - "loss": 0.5729, - "num_input_tokens_seen": 128889545, - "step": 6074 - }, - { - "epoch": 0.7304755606324776, - "flos": 17890507932600.0, - "grad_norm": 6.883870453772094, - "learning_rate": 7.146900118432457e-07, - "loss": 0.7041, - "num_input_tokens_seen": 128909010, - "step": 6075 - }, - { - "epoch": 0.7305958035231167, - "flos": 17456490082800.0, - "grad_norm": 3.9084030931397487, - "learning_rate": 7.140932957798753e-07, - "loss": 0.8462, - "num_input_tokens_seen": 128927170, - "step": 6076 - }, - { - "epoch": 0.7307160464137558, - "flos": 12192156250200.0, - "grad_norm": 5.535227062388873, - "learning_rate": 7.134967747875309e-07, - "loss": 0.6935, - "num_input_tokens_seen": 128945100, - "step": 6077 - }, - { - "epoch": 0.7308362893043949, - "flos": 15945263448600.0, - "grad_norm": 3.778568294193164, - "learning_rate": 7.129004489567014e-07, - "loss": 0.8071, - "num_input_tokens_seen": 128962300, - "step": 6078 - }, - { - "epoch": 0.730956532195034, - "flos": 7737359493840.0, - "grad_norm": 6.62643992772308, - "learning_rate": 7.123043183778512e-07, - "loss": 0.7645, - "num_input_tokens_seen": 128979350, - "step": 6079 - }, - { - "epoch": 0.731076775085673, - "flos": 14460531709560.0, - "grad_norm": 3.048203555251126, - "learning_rate": 7.117083831414114e-07, - "loss": 0.6348, - "num_input_tokens_seen": 128998345, - "step": 6080 - }, - { - "epoch": 0.7311970179763122, - "flos": 14945069872800.0, - "grad_norm": 3.606718596850222, - "learning_rate": 7.11112643337787e-07, - "loss": 0.688, - "num_input_tokens_seen": 129017110, - "step": 6081 - }, - { - "epoch": 0.7313172608669513, - "flos": 13514436022680.0, - "grad_norm": 6.447724454004344, - "learning_rate": 7.10517099057349e-07, - "loss": 0.7548, - "num_input_tokens_seen": 129033780, - "step": 6082 - }, - { - "epoch": 0.7314375037575903, - "flos": 11788084231200.0, - "grad_norm": 5.483671050797939, - "learning_rate": 7.099217503904411e-07, - "loss": 0.569, - "num_input_tokens_seen": 129051355, - "step": 6083 - }, - { - "epoch": 0.7315577466482295, - "flos": 13110648943320.0, - "grad_norm": 12.607855609551097, - "learning_rate": 7.093265974273788e-07, - "loss": 0.8938, - "num_input_tokens_seen": 129068970, - "step": 6084 - }, - { - "epoch": 0.7316779895388685, - "flos": 13434381457920.0, - "grad_norm": 3.729812452494915, - "learning_rate": 7.087316402584447e-07, - "loss": 0.7119, - "num_input_tokens_seen": 129087515, - "step": 6085 - }, - { - "epoch": 0.7317982324295076, - "flos": 13080861412320.0, - "grad_norm": 4.889068453313629, - "learning_rate": 7.081368789738953e-07, - "loss": 0.8429, - "num_input_tokens_seen": 129104435, - "step": 6086 - }, - { - "epoch": 0.7319184753201466, - "flos": 19964301057360.0, - "grad_norm": 4.806527729715945, - "learning_rate": 7.075423136639537e-07, - "loss": 0.7665, - "num_input_tokens_seen": 129123410, - "step": 6087 - }, - { - "epoch": 0.7320387182107858, - "flos": 27527209459800.0, - "grad_norm": 4.713613177296231, - "learning_rate": 7.069479444188149e-07, - "loss": 0.7228, - "num_input_tokens_seen": 129143720, - "step": 6088 - }, - { - "epoch": 0.7321589611014249, - "flos": 13030119479160.0, - "grad_norm": 5.931246707059149, - "learning_rate": 7.063537713286453e-07, - "loss": 0.8083, - "num_input_tokens_seen": 129161120, - "step": 6089 - }, - { - "epoch": 0.7322792039920639, - "flos": 19128838965240.0, - "grad_norm": 8.560923633122096, - "learning_rate": 7.057597944835803e-07, - "loss": 0.7955, - "num_input_tokens_seen": 129180115, - "step": 6090 - }, - { - "epoch": 0.7323994468827031, - "flos": 18587226876840.0, - "grad_norm": 18.12077503289991, - "learning_rate": 7.051660139737253e-07, - "loss": 0.7325, - "num_input_tokens_seen": 129198055, - "step": 6091 - }, - { - "epoch": 0.7325196897733421, - "flos": 19726226920440.0, - "grad_norm": 6.433190245675978, - "learning_rate": 7.045724298891565e-07, - "loss": 0.7475, - "num_input_tokens_seen": 129217245, - "step": 6092 - }, - { - "epoch": 0.7326399326639812, - "flos": 18888960210600.0, - "grad_norm": 6.201490518388156, - "learning_rate": 7.039790423199192e-07, - "loss": 0.6827, - "num_input_tokens_seen": 129236605, - "step": 6093 - }, - { - "epoch": 0.7327601755546204, - "flos": 15378391203480.0, - "grad_norm": 12.213551383702603, - "learning_rate": 7.033858513560322e-07, - "loss": 0.7584, - "num_input_tokens_seen": 129252620, - "step": 6094 - }, - { - "epoch": 0.7328804184452594, - "flos": 11868233775840.0, - "grad_norm": 4.988995267210134, - "learning_rate": 7.027928570874794e-07, - "loss": 0.7452, - "num_input_tokens_seen": 129270530, - "step": 6095 - }, - { - "epoch": 0.7330006613358985, - "flos": 13029422960040.0, - "grad_norm": 3.6678695584048713, - "learning_rate": 7.022000596042194e-07, - "loss": 0.8344, - "num_input_tokens_seen": 129287350, - "step": 6096 - }, - { - "epoch": 0.7331209042265376, - "flos": 16458797643840.0, - "grad_norm": 8.545820711654452, - "learning_rate": 7.016074589961784e-07, - "loss": 0.7989, - "num_input_tokens_seen": 129305635, - "step": 6097 - }, - { - "epoch": 0.7332411471171767, - "flos": 24289187794680.0, - "grad_norm": 6.659439683776366, - "learning_rate": 7.01015055353253e-07, - "loss": 0.6564, - "num_input_tokens_seen": 129327780, - "step": 6098 - }, - { - "epoch": 0.7333613900078157, - "flos": 16643881990440.0, - "grad_norm": 9.28858775857924, - "learning_rate": 7.004228487653123e-07, - "loss": 0.7566, - "num_input_tokens_seen": 129348305, - "step": 6099 - }, - { - "epoch": 0.7334816328984549, - "flos": 16350791825760.0, - "grad_norm": 6.985405830930851, - "learning_rate": 6.998308393221906e-07, - "loss": 0.769, - "num_input_tokens_seen": 129366430, - "step": 6100 - }, - { - "epoch": 0.733601875789094, - "flos": 15158643692040.0, - "grad_norm": 13.107381566815164, - "learning_rate": 6.992390271136977e-07, - "loss": 0.6971, - "num_input_tokens_seen": 129381860, - "step": 6101 - }, - { - "epoch": 0.733722118679733, - "flos": 16512199013640.0, - "grad_norm": 6.939007019769482, - "learning_rate": 6.986474122296094e-07, - "loss": 0.8467, - "num_input_tokens_seen": 129400695, - "step": 6102 - }, - { - "epoch": 0.7338423615703722, - "flos": 14676828285360.0, - "grad_norm": 4.27987203210634, - "learning_rate": 6.980559947596751e-07, - "loss": 0.7068, - "num_input_tokens_seen": 129418955, - "step": 6103 - }, - { - "epoch": 0.7339626044610112, - "flos": 15863245966320.0, - "grad_norm": 3.9961296669100093, - "learning_rate": 6.974647747936109e-07, - "loss": 0.7424, - "num_input_tokens_seen": 129437060, - "step": 6104 - }, - { - "epoch": 0.7340828473516503, - "flos": 11112731268600.0, - "grad_norm": 5.039783514913837, - "learning_rate": 6.968737524211039e-07, - "loss": 0.8101, - "num_input_tokens_seen": 129453590, - "step": 6105 - }, - { - "epoch": 0.7342030902422895, - "flos": 16783005057840.0, - "grad_norm": 5.4359723279977095, - "learning_rate": 6.962829277318132e-07, - "loss": 0.7698, - "num_input_tokens_seen": 129472905, - "step": 6106 - }, - { - "epoch": 0.7343233331329285, - "flos": 18942203280600.0, - "grad_norm": 4.26507743702541, - "learning_rate": 6.956923008153652e-07, - "loss": 0.8088, - "num_input_tokens_seen": 129492390, - "step": 6107 - }, - { - "epoch": 0.7344435760235676, - "flos": 13488890926320.0, - "grad_norm": 4.961310809650124, - "learning_rate": 6.951018717613593e-07, - "loss": 0.8256, - "num_input_tokens_seen": 129511125, - "step": 6108 - }, - { - "epoch": 0.7345638189142067, - "flos": 12868047432120.0, - "grad_norm": 3.6185558523959775, - "learning_rate": 6.945116406593614e-07, - "loss": 0.7536, - "num_input_tokens_seen": 129529700, - "step": 6109 - }, - { - "epoch": 0.7346840618048458, - "flos": 14806200085080.0, - "grad_norm": 12.978418482753458, - "learning_rate": 6.939216075989089e-07, - "loss": 0.7137, - "num_input_tokens_seen": 129547350, - "step": 6110 - }, - { - "epoch": 0.7348043046954849, - "flos": 21292279622640.0, - "grad_norm": 3.4852012031082884, - "learning_rate": 6.933317726695109e-07, - "loss": 0.6477, - "num_input_tokens_seen": 129568300, - "step": 6111 - }, - { - "epoch": 0.734924547586124, - "flos": 13083425869080.0, - "grad_norm": 6.54963629491769, - "learning_rate": 6.92742135960644e-07, - "loss": 0.7631, - "num_input_tokens_seen": 129585720, - "step": 6112 - }, - { - "epoch": 0.7350447904767631, - "flos": 46879821242760.0, - "grad_norm": 0.8816349987683759, - "learning_rate": 6.921526975617556e-07, - "loss": 0.5898, - "num_input_tokens_seen": 129644900, - "step": 6113 - }, - { - "epoch": 0.7351650333674021, - "flos": 15783824600760.0, - "grad_norm": 4.561093884486013, - "learning_rate": 6.915634575622631e-07, - "loss": 0.7286, - "num_input_tokens_seen": 129663135, - "step": 6114 - }, - { - "epoch": 0.7352852762580413, - "flos": 13272214431000.0, - "grad_norm": 4.180769792702094, - "learning_rate": 6.909744160515532e-07, - "loss": 0.7011, - "num_input_tokens_seen": 129680995, - "step": 6115 - }, - { - "epoch": 0.7354055191486804, - "flos": 28609135578240.0, - "grad_norm": 4.759606134484231, - "learning_rate": 6.903855731189849e-07, - "loss": 0.6522, - "num_input_tokens_seen": 129703350, - "step": 6116 - }, - { - "epoch": 0.7355257620393194, - "flos": 11868898635000.0, - "grad_norm": 4.052928270964103, - "learning_rate": 6.897969288538825e-07, - "loss": 0.7903, - "num_input_tokens_seen": 129721015, - "step": 6117 - }, - { - "epoch": 0.7356460049299585, - "flos": 13218971361000.0, - "grad_norm": 3.9480761224892467, - "learning_rate": 6.892084833455452e-07, - "loss": 0.7943, - "num_input_tokens_seen": 129740305, - "step": 6118 - }, - { - "epoch": 0.7357662478205976, - "flos": 15595320978480.0, - "grad_norm": 3.210076655248027, - "learning_rate": 6.886202366832384e-07, - "loss": 0.8285, - "num_input_tokens_seen": 129761710, - "step": 6119 - }, - { - "epoch": 0.7358864907112367, - "flos": 10356848841840.0, - "grad_norm": 24.647481657377924, - "learning_rate": 6.880321889561987e-07, - "loss": 0.7157, - "num_input_tokens_seen": 129779405, - "step": 6120 - }, - { - "epoch": 0.7360067336018757, - "flos": 16323822031200.0, - "grad_norm": 4.8905563593425905, - "learning_rate": 6.874443402536338e-07, - "loss": 0.6319, - "num_input_tokens_seen": 129798215, - "step": 6121 - }, - { - "epoch": 0.7361269764925149, - "flos": 18724956906000.0, - "grad_norm": 6.344574672671101, - "learning_rate": 6.868566906647177e-07, - "loss": 0.8029, - "num_input_tokens_seen": 129818885, - "step": 6122 - }, - { - "epoch": 0.736247219383154, - "flos": 14893219841040.0, - "grad_norm": 6.761698158096173, - "learning_rate": 6.862692402785984e-07, - "loss": 0.8131, - "num_input_tokens_seen": 129838855, - "step": 6123 - }, - { - "epoch": 0.736367462273793, - "flos": 51876356727360.0, - "grad_norm": 0.6970102392782709, - "learning_rate": 6.856819891843899e-07, - "loss": 0.5066, - "num_input_tokens_seen": 129903280, - "step": 6124 - }, - { - "epoch": 0.7364877051644322, - "flos": 16399159261920.0, - "grad_norm": 4.739890332462296, - "learning_rate": 6.8509493747118e-07, - "loss": 0.7057, - "num_input_tokens_seen": 129921810, - "step": 6125 - }, - { - "epoch": 0.7366079480550712, - "flos": 8790384560160.0, - "grad_norm": 5.742130699655297, - "learning_rate": 6.845080852280221e-07, - "loss": 0.8543, - "num_input_tokens_seen": 129938600, - "step": 6126 - }, - { - "epoch": 0.7367281909457103, - "flos": 10950722541480.0, - "grad_norm": 5.662553004675693, - "learning_rate": 6.839214325439409e-07, - "loss": 0.7305, - "num_input_tokens_seen": 129956015, - "step": 6127 - }, - { - "epoch": 0.7368484338363495, - "flos": 17212273913640.0, - "grad_norm": 5.464868879499049, - "learning_rate": 6.833349795079327e-07, - "loss": 0.7024, - "num_input_tokens_seen": 129974845, - "step": 6128 - }, - { - "epoch": 0.7369686767269885, - "flos": 20104215623760.0, - "grad_norm": 3.5894088398651594, - "learning_rate": 6.827487262089613e-07, - "loss": 0.6636, - "num_input_tokens_seen": 129995070, - "step": 6129 - }, - { - "epoch": 0.7370889196176276, - "flos": 51840807083640.0, - "grad_norm": 0.8725933287910893, - "learning_rate": 6.821626727359606e-07, - "loss": 0.5913, - "num_input_tokens_seen": 130060350, - "step": 6130 - }, - { - "epoch": 0.7372091625082667, - "flos": 13164588532440.0, - "grad_norm": 14.951625896315068, - "learning_rate": 6.815768191778348e-07, - "loss": 0.7492, - "num_input_tokens_seen": 130078150, - "step": 6131 - }, - { - "epoch": 0.7373294053989058, - "flos": 24772427899560.0, - "grad_norm": 4.58233636358924, - "learning_rate": 6.809911656234569e-07, - "loss": 0.7171, - "num_input_tokens_seen": 130099845, - "step": 6132 - }, - { - "epoch": 0.7374496482895448, - "flos": 15729188492520.0, - "grad_norm": 4.197605295047724, - "learning_rate": 6.804057121616707e-07, - "loss": 0.7652, - "num_input_tokens_seen": 130117770, - "step": 6133 - }, - { - "epoch": 0.737569891180184, - "flos": 18268180036320.0, - "grad_norm": 3.810801768808604, - "learning_rate": 6.798204588812888e-07, - "loss": 0.7046, - "num_input_tokens_seen": 130136905, - "step": 6134 - }, - { - "epoch": 0.7376901340708231, - "flos": 15105843861480.0, - "grad_norm": 4.559919071483239, - "learning_rate": 6.792354058710937e-07, - "loss": 0.7418, - "num_input_tokens_seen": 130154095, - "step": 6135 - }, - { - "epoch": 0.7378103769614621, - "flos": 17431071626280.0, - "grad_norm": 4.105220652886604, - "learning_rate": 6.786505532198374e-07, - "loss": 0.6426, - "num_input_tokens_seen": 130172760, - "step": 6136 - }, - { - "epoch": 0.7379306198521013, - "flos": 16270262361600.0, - "grad_norm": 5.994068392669214, - "learning_rate": 6.780659010162411e-07, - "loss": 0.8399, - "num_input_tokens_seen": 130191430, - "step": 6137 - }, - { - "epoch": 0.7380508627427403, - "flos": 10842463443720.0, - "grad_norm": 13.58302715557119, - "learning_rate": 6.774814493489975e-07, - "loss": 0.8236, - "num_input_tokens_seen": 130208825, - "step": 6138 - }, - { - "epoch": 0.7381711056333794, - "flos": 15861631308360.0, - "grad_norm": 3.8991793918476705, - "learning_rate": 6.768971983067655e-07, - "loss": 0.6587, - "num_input_tokens_seen": 130228875, - "step": 6139 - }, - { - "epoch": 0.7382913485240186, - "flos": 38600121481440.0, - "grad_norm": 1.042031828136115, - "learning_rate": 6.763131479781772e-07, - "loss": 0.6956, - "num_input_tokens_seen": 130278355, - "step": 6140 - }, - { - "epoch": 0.7384115914146576, - "flos": 15945738348000.0, - "grad_norm": 3.5800815879859136, - "learning_rate": 6.757292984518316e-07, - "loss": 0.7572, - "num_input_tokens_seen": 130297475, - "step": 6141 - }, - { - "epoch": 0.7385318343052967, - "flos": 45328106011080.0, - "grad_norm": 0.8365315260797803, - "learning_rate": 6.751456498162981e-07, - "loss": 0.6059, - "num_input_tokens_seen": 130356230, - "step": 6142 - }, - { - "epoch": 0.7386520771959358, - "flos": 12403703832000.0, - "grad_norm": 6.428572306557247, - "learning_rate": 6.745622021601174e-07, - "loss": 0.8412, - "num_input_tokens_seen": 130372975, - "step": 6143 - }, - { - "epoch": 0.7387723200865749, - "flos": 13704807582600.0, - "grad_norm": 5.360581153849537, - "learning_rate": 6.739789555717954e-07, - "loss": 0.6796, - "num_input_tokens_seen": 130389670, - "step": 6144 - }, - { - "epoch": 0.738892562977214, - "flos": 16483139661720.0, - "grad_norm": 8.85137675293303, - "learning_rate": 6.733959101398124e-07, - "loss": 0.7702, - "num_input_tokens_seen": 130407520, - "step": 6145 - }, - { - "epoch": 0.7390128058678531, - "flos": 15724914397920.0, - "grad_norm": 6.730288511443296, - "learning_rate": 6.728130659526143e-07, - "loss": 0.8043, - "num_input_tokens_seen": 130425050, - "step": 6146 - }, - { - "epoch": 0.7391330487584922, - "flos": 18889308470160.0, - "grad_norm": 4.534010053127941, - "learning_rate": 6.7223042309862e-07, - "loss": 0.6861, - "num_input_tokens_seen": 130444970, - "step": 6147 - }, - { - "epoch": 0.7392532916491312, - "flos": 20806506720960.0, - "grad_norm": 3.5893005690711215, - "learning_rate": 6.716479816662144e-07, - "loss": 0.7202, - "num_input_tokens_seen": 130466420, - "step": 6148 - }, - { - "epoch": 0.7393735345397703, - "flos": 17268176420280.0, - "grad_norm": 3.811141725818191, - "learning_rate": 6.710657417437531e-07, - "loss": 0.7135, - "num_input_tokens_seen": 130485845, - "step": 6149 - }, - { - "epoch": 0.7394937774304094, - "flos": 14595855581760.0, - "grad_norm": 20.26500005059909, - "learning_rate": 6.704837034195628e-07, - "loss": 0.7643, - "num_input_tokens_seen": 130504030, - "step": 6150 - }, - { - "epoch": 0.7396140203210485, - "flos": 17188596754920.0, - "grad_norm": 5.240774131841265, - "learning_rate": 6.699018667819376e-07, - "loss": 0.8358, - "num_input_tokens_seen": 130523150, - "step": 6151 - }, - { - "epoch": 0.7397342632116876, - "flos": 18725685085080.0, - "grad_norm": 2.8816319144252924, - "learning_rate": 6.693202319191415e-07, - "loss": 0.7128, - "num_input_tokens_seen": 130544605, - "step": 6152 - }, - { - "epoch": 0.7398545061023267, - "flos": 18133647663120.0, - "grad_norm": 4.068318118189754, - "learning_rate": 6.687387989194084e-07, - "loss": 0.7214, - "num_input_tokens_seen": 130563840, - "step": 6153 - }, - { - "epoch": 0.7399747489929658, - "flos": 12030654082440.0, - "grad_norm": 4.885294095950689, - "learning_rate": 6.681575678709404e-07, - "loss": 0.7793, - "num_input_tokens_seen": 130582250, - "step": 6154 - }, - { - "epoch": 0.7400949918836048, - "flos": 17646545043120.0, - "grad_norm": 4.243748948289519, - "learning_rate": 6.67576538861911e-07, - "loss": 0.6897, - "num_input_tokens_seen": 130600545, - "step": 6155 - }, - { - "epoch": 0.740215234774244, - "flos": 15948999323880.0, - "grad_norm": 11.787732513820579, - "learning_rate": 6.669957119804612e-07, - "loss": 0.8022, - "num_input_tokens_seen": 130621900, - "step": 6156 - }, - { - "epoch": 0.7403354776648831, - "flos": 13676856329280.0, - "grad_norm": 5.30102520509926, - "learning_rate": 6.66415087314702e-07, - "loss": 0.685, - "num_input_tokens_seen": 130636575, - "step": 6157 - }, - { - "epoch": 0.7404557205555221, - "flos": 12327353482560.0, - "grad_norm": 4.51313454348447, - "learning_rate": 6.65834664952714e-07, - "loss": 0.7064, - "num_input_tokens_seen": 130653745, - "step": 6158 - }, - { - "epoch": 0.7405759634461613, - "flos": 15513145196400.0, - "grad_norm": 3.647998595538622, - "learning_rate": 6.652544449825457e-07, - "loss": 0.7409, - "num_input_tokens_seen": 130673720, - "step": 6159 - }, - { - "epoch": 0.7406962063368003, - "flos": 14969063631120.0, - "grad_norm": 6.981203455757025, - "learning_rate": 6.646744274922182e-07, - "loss": 0.7464, - "num_input_tokens_seen": 130691885, - "step": 6160 - }, - { - "epoch": 0.7408164492274394, - "flos": 14460468389640.0, - "grad_norm": 16.4509621382288, - "learning_rate": 6.640946125697171e-07, - "loss": 0.7316, - "num_input_tokens_seen": 130709135, - "step": 6161 - }, - { - "epoch": 0.7409366921180786, - "flos": 21426622036080.0, - "grad_norm": 4.349249343120352, - "learning_rate": 6.635150003030017e-07, - "loss": 0.7482, - "num_input_tokens_seen": 130727380, - "step": 6162 - }, - { - "epoch": 0.7410569350087176, - "flos": 16782910077960.0, - "grad_norm": 6.43943239115558, - "learning_rate": 6.629355907799981e-07, - "loss": 0.8425, - "num_input_tokens_seen": 130746905, - "step": 6163 - }, - { - "epoch": 0.7411771778993567, - "flos": 22340302415280.0, - "grad_norm": 3.20025193295578, - "learning_rate": 6.623563840886015e-07, - "loss": 0.6843, - "num_input_tokens_seen": 130767550, - "step": 6164 - }, - { - "epoch": 0.7412974207899958, - "flos": 15000877399560.0, - "grad_norm": 3.7079179223944356, - "learning_rate": 6.617773803166795e-07, - "loss": 0.6862, - "num_input_tokens_seen": 130785595, - "step": 6165 - }, - { - "epoch": 0.7414176636806349, - "flos": 16161749984160.0, - "grad_norm": 6.264599038313613, - "learning_rate": 6.611985795520634e-07, - "loss": 0.808, - "num_input_tokens_seen": 130803860, - "step": 6166 - }, - { - "epoch": 0.7415379065712739, - "flos": 18429682204080.0, - "grad_norm": 3.8240544066608857, - "learning_rate": 6.606199818825588e-07, - "loss": 0.7587, - "num_input_tokens_seen": 130824035, - "step": 6167 - }, - { - "epoch": 0.7416581494619131, - "flos": 12299148949560.0, - "grad_norm": 4.852942032567457, - "learning_rate": 6.600415873959377e-07, - "loss": 0.7988, - "num_input_tokens_seen": 130841630, - "step": 6168 - }, - { - "epoch": 0.7417783923525522, - "flos": 20859781450920.0, - "grad_norm": 8.621470738245865, - "learning_rate": 6.594633961799437e-07, - "loss": 0.6292, - "num_input_tokens_seen": 130860390, - "step": 6169 - }, - { - "epoch": 0.7418986352431912, - "flos": 14676923265240.0, - "grad_norm": 3.448763340989538, - "learning_rate": 6.588854083222857e-07, - "loss": 0.8042, - "num_input_tokens_seen": 130879545, - "step": 6170 - }, - { - "epoch": 0.7420188781338304, - "flos": 13326438959760.0, - "grad_norm": 16.102437165411192, - "learning_rate": 6.583076239106444e-07, - "loss": 0.7781, - "num_input_tokens_seen": 130897770, - "step": 6171 - }, - { - "epoch": 0.7421391210244694, - "flos": 10005355033680.0, - "grad_norm": 4.950918963893081, - "learning_rate": 6.577300430326707e-07, - "loss": 0.7414, - "num_input_tokens_seen": 130912435, - "step": 6172 - }, - { - "epoch": 0.7422593639151085, - "flos": 11625948864240.0, - "grad_norm": 4.1892580355946025, - "learning_rate": 6.571526657759821e-07, - "loss": 0.7122, - "num_input_tokens_seen": 130927895, - "step": 6173 - }, - { - "epoch": 0.7423796068057477, - "flos": 22099378881960.0, - "grad_norm": 6.608301675278917, - "learning_rate": 6.565754922281663e-07, - "loss": 0.6975, - "num_input_tokens_seen": 130949860, - "step": 6174 - }, - { - "epoch": 0.7424998496963867, - "flos": 15000750759720.0, - "grad_norm": 6.144832654618959, - "learning_rate": 6.559985224767801e-07, - "loss": 0.7789, - "num_input_tokens_seen": 130967455, - "step": 6175 - }, - { - "epoch": 0.7426200925870258, - "flos": 15999551297280.0, - "grad_norm": 6.473506339865235, - "learning_rate": 6.55421756609349e-07, - "loss": 0.7423, - "num_input_tokens_seen": 130985430, - "step": 6176 - }, - { - "epoch": 0.7427403354776649, - "flos": 19374574812480.0, - "grad_norm": 3.3033614868853074, - "learning_rate": 6.54845194713369e-07, - "loss": 0.7786, - "num_input_tokens_seen": 131006100, - "step": 6177 - }, - { - "epoch": 0.742860578368304, - "flos": 14538844976520.0, - "grad_norm": 5.667420542815829, - "learning_rate": 6.542688368763034e-07, - "loss": 0.7905, - "num_input_tokens_seen": 131024225, - "step": 6178 - }, - { - "epoch": 0.742980821258943, - "flos": 18186954053040.0, - "grad_norm": 4.195114273133503, - "learning_rate": 6.536926831855854e-07, - "loss": 0.7608, - "num_input_tokens_seen": 131043110, - "step": 6179 - }, - { - "epoch": 0.7431010641495821, - "flos": 18484065032640.0, - "grad_norm": 6.424906439786478, - "learning_rate": 6.531167337286165e-07, - "loss": 0.7147, - "num_input_tokens_seen": 131062850, - "step": 6180 - }, - { - "epoch": 0.7432213070402213, - "flos": 15918768553440.0, - "grad_norm": 3.681355934367516, - "learning_rate": 6.52540988592768e-07, - "loss": 0.7852, - "num_input_tokens_seen": 131083590, - "step": 6181 - }, - { - "epoch": 0.7433415499308603, - "flos": 10761617379960.0, - "grad_norm": 5.598581113897663, - "learning_rate": 6.519654478653814e-07, - "loss": 0.8188, - "num_input_tokens_seen": 131101675, - "step": 6182 - }, - { - "epoch": 0.7434617928214994, - "flos": 49518650334960.0, - "grad_norm": 1.4870532318571623, - "learning_rate": 6.51390111633763e-07, - "loss": 0.5694, - "num_input_tokens_seen": 131166670, - "step": 6183 - }, - { - "epoch": 0.7435820357121385, - "flos": 20073889873440.0, - "grad_norm": 3.557334501232926, - "learning_rate": 6.508149799851932e-07, - "loss": 0.7453, - "num_input_tokens_seen": 131188055, - "step": 6184 - }, - { - "epoch": 0.7437022786027776, - "flos": 17565572339520.0, - "grad_norm": 11.368106304146826, - "learning_rate": 6.502400530069183e-07, - "loss": 0.6111, - "num_input_tokens_seen": 131207660, - "step": 6185 - }, - { - "epoch": 0.7438225214934167, - "flos": 15995308862640.0, - "grad_norm": 3.0572311854621503, - "learning_rate": 6.496653307861535e-07, - "loss": 0.6702, - "num_input_tokens_seen": 131228050, - "step": 6186 - }, - { - "epoch": 0.7439427643840558, - "flos": 14784802443480.0, - "grad_norm": 7.67950619302129, - "learning_rate": 6.490908134100857e-07, - "loss": 0.6383, - "num_input_tokens_seen": 131246235, - "step": 6187 - }, - { - "epoch": 0.7440630072746949, - "flos": 15243447250800.0, - "grad_norm": 4.892897982620827, - "learning_rate": 6.48516500965866e-07, - "loss": 0.6795, - "num_input_tokens_seen": 131265890, - "step": 6188 - }, - { - "epoch": 0.7441832501653339, - "flos": 19427469622920.0, - "grad_norm": 4.016859762739499, - "learning_rate": 6.479423935406192e-07, - "loss": 0.8045, - "num_input_tokens_seen": 131285595, - "step": 6189 - }, - { - "epoch": 0.7443034930559731, - "flos": 50588007449280.0, - "grad_norm": 0.9253679471600611, - "learning_rate": 6.473684912214357e-07, - "loss": 0.702, - "num_input_tokens_seen": 131348875, - "step": 6190 - }, - { - "epoch": 0.7444237359466122, - "flos": 13618231066080.0, - "grad_norm": 6.193812069548192, - "learning_rate": 6.467947940953778e-07, - "loss": 0.6914, - "num_input_tokens_seen": 131367120, - "step": 6191 - }, - { - "epoch": 0.7445439788372512, - "flos": 16699309597680.0, - "grad_norm": 4.83646124990938, - "learning_rate": 6.462213022494732e-07, - "loss": 0.7111, - "num_input_tokens_seen": 131386085, - "step": 6192 - }, - { - "epoch": 0.7446642217278904, - "flos": 49437266051880.0, - "grad_norm": 0.8017125366660102, - "learning_rate": 6.456480157707201e-07, - "loss": 0.6436, - "num_input_tokens_seen": 131450580, - "step": 6193 - }, - { - "epoch": 0.7447844646185294, - "flos": 12702809389080.0, - "grad_norm": 11.66542097434932, - "learning_rate": 6.450749347460866e-07, - "loss": 0.8342, - "num_input_tokens_seen": 131467275, - "step": 6194 - }, - { - "epoch": 0.7449047075091685, - "flos": 19510816823520.0, - "grad_norm": 4.7570190770342275, - "learning_rate": 6.445020592625083e-07, - "loss": 0.7791, - "num_input_tokens_seen": 131487645, - "step": 6195 - }, - { - "epoch": 0.7450249503998077, - "flos": 10299838236600.0, - "grad_norm": 6.4303463196225445, - "learning_rate": 6.4392938940689e-07, - "loss": 0.7914, - "num_input_tokens_seen": 131502780, - "step": 6196 - }, - { - "epoch": 0.7451451932904467, - "flos": 14322991640160.0, - "grad_norm": 11.213455183324964, - "learning_rate": 6.433569252661049e-07, - "loss": 0.682, - "num_input_tokens_seen": 131520500, - "step": 6197 - }, - { - "epoch": 0.7452654361810858, - "flos": 9060652385040.0, - "grad_norm": 5.145061955055771, - "learning_rate": 6.427846669269952e-07, - "loss": 0.6878, - "num_input_tokens_seen": 131537840, - "step": 6198 - }, - { - "epoch": 0.7453856790717249, - "flos": 16188878078520.0, - "grad_norm": 5.491706664306824, - "learning_rate": 6.422126144763729e-07, - "loss": 0.8, - "num_input_tokens_seen": 131556950, - "step": 6199 - }, - { - "epoch": 0.745505921962364, - "flos": 14622350476920.0, - "grad_norm": 4.457482491467932, - "learning_rate": 6.416407680010174e-07, - "loss": 0.7521, - "num_input_tokens_seen": 131571030, - "step": 6200 - }, - { - "epoch": 0.745626164853003, - "flos": 18075972198720.0, - "grad_norm": 5.447135368422914, - "learning_rate": 6.410691275876774e-07, - "loss": 0.7931, - "num_input_tokens_seen": 131590170, - "step": 6201 - }, - { - "epoch": 0.7457464077436422, - "flos": 10572955457880.0, - "grad_norm": 7.042336095406414, - "learning_rate": 6.404976933230704e-07, - "loss": 0.7403, - "num_input_tokens_seen": 131606410, - "step": 6202 - }, - { - "epoch": 0.7458666506342813, - "flos": 24991067312400.0, - "grad_norm": 4.140367953401096, - "learning_rate": 6.399264652938813e-07, - "loss": 0.7138, - "num_input_tokens_seen": 131627035, - "step": 6203 - }, - { - "epoch": 0.7459868935249203, - "flos": 17781140736240.0, - "grad_norm": 4.757793457794919, - "learning_rate": 6.393554435867679e-07, - "loss": 0.728, - "num_input_tokens_seen": 131647605, - "step": 6204 - }, - { - "epoch": 0.7461071364155595, - "flos": 16027185951000.0, - "grad_norm": 4.129268864901216, - "learning_rate": 6.387846282883502e-07, - "loss": 0.8114, - "num_input_tokens_seen": 131663855, - "step": 6205 - }, - { - "epoch": 0.7462273793061985, - "flos": 16752552667680.0, - "grad_norm": 3.5071614432265563, - "learning_rate": 6.38214019485223e-07, - "loss": 0.7562, - "num_input_tokens_seen": 131682400, - "step": 6206 - }, - { - "epoch": 0.7463476221968376, - "flos": 14590283428800.0, - "grad_norm": 3.532575375454027, - "learning_rate": 6.376436172639461e-07, - "loss": 0.7, - "num_input_tokens_seen": 131699965, - "step": 6207 - }, - { - "epoch": 0.7464678650874768, - "flos": 12273350573520.0, - "grad_norm": 6.20524687713167, - "learning_rate": 6.370734217110487e-07, - "loss": 0.6288, - "num_input_tokens_seen": 131718430, - "step": 6208 - }, - { - "epoch": 0.7465881079781158, - "flos": 35410431101160.0, - "grad_norm": 5.08272534262568, - "learning_rate": 6.36503432913031e-07, - "loss": 0.6302, - "num_input_tokens_seen": 131741295, - "step": 6209 - }, - { - "epoch": 0.7467083508687549, - "flos": 14374968311760.0, - "grad_norm": 6.159656801943158, - "learning_rate": 6.359336509563569e-07, - "loss": 0.6703, - "num_input_tokens_seen": 131757035, - "step": 6210 - }, - { - "epoch": 0.7468285937593939, - "flos": 13057057613760.0, - "grad_norm": 5.036163395079965, - "learning_rate": 6.353640759274641e-07, - "loss": 0.7925, - "num_input_tokens_seen": 131775645, - "step": 6211 - }, - { - "epoch": 0.7469488366500331, - "flos": 16938333533400.0, - "grad_norm": 4.383520929265045, - "learning_rate": 6.347947079127556e-07, - "loss": 0.7266, - "num_input_tokens_seen": 131793265, - "step": 6212 - }, - { - "epoch": 0.7470690795406721, - "flos": 12165503055240.0, - "grad_norm": 5.056132326772605, - "learning_rate": 6.342255469986053e-07, - "loss": 0.7476, - "num_input_tokens_seen": 131811730, - "step": 6213 - }, - { - "epoch": 0.7471893224313112, - "flos": 18457538477520.0, - "grad_norm": 12.654816421509613, - "learning_rate": 6.336565932713533e-07, - "loss": 0.7539, - "num_input_tokens_seen": 131830875, - "step": 6214 - }, - { - "epoch": 0.7473095653219504, - "flos": 16483741200960.0, - "grad_norm": 4.693616817936693, - "learning_rate": 6.330878468173088e-07, - "loss": 0.7646, - "num_input_tokens_seen": 131850660, - "step": 6215 - }, - { - "epoch": 0.7474298082125894, - "flos": 13218749741280.0, - "grad_norm": 4.067793932741437, - "learning_rate": 6.32519307722752e-07, - "loss": 0.7169, - "num_input_tokens_seen": 131868275, - "step": 6216 - }, - { - "epoch": 0.7475500511032285, - "flos": 45767569374000.0, - "grad_norm": 0.7928183652595443, - "learning_rate": 6.31950976073929e-07, - "loss": 0.5702, - "num_input_tokens_seen": 131922085, - "step": 6217 - }, - { - "epoch": 0.7476702939938676, - "flos": 12975704990640.0, - "grad_norm": 4.0954060007866495, - "learning_rate": 6.31382851957055e-07, - "loss": 0.7842, - "num_input_tokens_seen": 131938625, - "step": 6218 - }, - { - "epoch": 0.7477905368845067, - "flos": 20481634447800.0, - "grad_norm": 5.7092279257620095, - "learning_rate": 6.308149354583143e-07, - "loss": 0.689, - "num_input_tokens_seen": 131957750, - "step": 6219 - }, - { - "epoch": 0.7479107797751458, - "flos": 19699035506160.0, - "grad_norm": 8.788294671321648, - "learning_rate": 6.302472266638586e-07, - "loss": 0.7946, - "num_input_tokens_seen": 131978010, - "step": 6220 - }, - { - "epoch": 0.7480310226657849, - "flos": 24932790308760.0, - "grad_norm": 11.197655868450422, - "learning_rate": 6.296797256598101e-07, - "loss": 0.6917, - "num_input_tokens_seen": 131999210, - "step": 6221 - }, - { - "epoch": 0.748151265556424, - "flos": 18186384173760.0, - "grad_norm": 3.8319513621737724, - "learning_rate": 6.291124325322576e-07, - "loss": 0.7983, - "num_input_tokens_seen": 132019055, - "step": 6222 - }, - { - "epoch": 0.748271508447063, - "flos": 28231178534880.0, - "grad_norm": 3.1369756496020638, - "learning_rate": 6.285453473672595e-07, - "loss": 0.6142, - "num_input_tokens_seen": 132041345, - "step": 6223 - }, - { - "epoch": 0.7483917513377022, - "flos": 15755335128120.0, - "grad_norm": 5.772875850133509, - "learning_rate": 6.279784702508415e-07, - "loss": 0.7262, - "num_input_tokens_seen": 132061815, - "step": 6224 - }, - { - "epoch": 0.7485119942283412, - "flos": 45937588071000.0, - "grad_norm": 0.8358344255163246, - "learning_rate": 6.274118012689979e-07, - "loss": 0.6365, - "num_input_tokens_seen": 132123435, - "step": 6225 - }, - { - "epoch": 0.7486322371189803, - "flos": 21969342223080.0, - "grad_norm": 3.351218466007428, - "learning_rate": 6.268453405076943e-07, - "loss": 0.669, - "num_input_tokens_seen": 132145550, - "step": 6226 - }, - { - "epoch": 0.7487524800096195, - "flos": 13836490559400.0, - "grad_norm": 5.320007763620319, - "learning_rate": 6.262790880528592e-07, - "loss": 0.8116, - "num_input_tokens_seen": 132162890, - "step": 6227 - }, - { - "epoch": 0.7488727229002585, - "flos": 13650456414000.0, - "grad_norm": 6.82196423432462, - "learning_rate": 6.257130439903951e-07, - "loss": 0.7736, - "num_input_tokens_seen": 132179105, - "step": 6228 - }, - { - "epoch": 0.7489929657908976, - "flos": 17296064353680.0, - "grad_norm": 5.91040280591137, - "learning_rate": 6.251472084061695e-07, - "loss": 0.7874, - "num_input_tokens_seen": 132197745, - "step": 6229 - }, - { - "epoch": 0.7491132086815367, - "flos": 15021705161880.0, - "grad_norm": 3.709422748004477, - "learning_rate": 6.245815813860191e-07, - "loss": 0.8849, - "num_input_tokens_seen": 132212975, - "step": 6230 - }, - { - "epoch": 0.7492334515721758, - "flos": 16836944646960.0, - "grad_norm": 6.304279980730508, - "learning_rate": 6.240161630157495e-07, - "loss": 0.6717, - "num_input_tokens_seen": 132232050, - "step": 6231 - }, - { - "epoch": 0.7493536944628149, - "flos": 11949364779240.0, - "grad_norm": 4.559041423765886, - "learning_rate": 6.23450953381133e-07, - "loss": 0.6753, - "num_input_tokens_seen": 132249860, - "step": 6232 - }, - { - "epoch": 0.749473937353454, - "flos": 11164391340600.0, - "grad_norm": 7.2255190864127705, - "learning_rate": 6.228859525679131e-07, - "loss": 0.6693, - "num_input_tokens_seen": 132263995, - "step": 6233 - }, - { - "epoch": 0.7495941802440931, - "flos": 13838010237480.0, - "grad_norm": 7.423089949899359, - "learning_rate": 6.223211606617986e-07, - "loss": 0.7898, - "num_input_tokens_seen": 132282135, - "step": 6234 - }, - { - "epoch": 0.7497144231347321, - "flos": 16459145903400.0, - "grad_norm": 5.048004685328708, - "learning_rate": 6.217565777484701e-07, - "loss": 0.8189, - "num_input_tokens_seen": 132300950, - "step": 6235 - }, - { - "epoch": 0.7498346660253713, - "flos": 17754994100640.0, - "grad_norm": 3.738892699553494, - "learning_rate": 6.211922039135722e-07, - "loss": 0.7852, - "num_input_tokens_seen": 132320815, - "step": 6236 - }, - { - "epoch": 0.7499549089160104, - "flos": 17860910361360.0, - "grad_norm": 4.413502669490371, - "learning_rate": 6.206280392427201e-07, - "loss": 0.7923, - "num_input_tokens_seen": 132340120, - "step": 6237 - }, - { - "epoch": 0.7500751518066494, - "flos": 25017752167320.0, - "grad_norm": 3.8450550872071037, - "learning_rate": 6.200640838214983e-07, - "loss": 0.7189, - "num_input_tokens_seen": 132362615, - "step": 6238 - }, - { - "epoch": 0.7501953946972886, - "flos": 13758683851800.0, - "grad_norm": 4.875493733761661, - "learning_rate": 6.195003377354578e-07, - "loss": 0.6573, - "num_input_tokens_seen": 132381605, - "step": 6239 - }, - { - "epoch": 0.7503156375879276, - "flos": 14803952227920.0, - "grad_norm": 6.229846963977533, - "learning_rate": 6.189368010701183e-07, - "loss": 0.7048, - "num_input_tokens_seen": 132398385, - "step": 6240 - }, - { - "epoch": 0.7504358804785667, - "flos": 9789280077600.0, - "grad_norm": 8.79077544071745, - "learning_rate": 6.183734739109683e-07, - "loss": 0.7457, - "num_input_tokens_seen": 132415925, - "step": 6241 - }, - { - "epoch": 0.7505561233692057, - "flos": 21616328736840.0, - "grad_norm": 5.721047195397263, - "learning_rate": 6.178103563434629e-07, - "loss": 0.6764, - "num_input_tokens_seen": 132434645, - "step": 6242 - }, - { - "epoch": 0.7506763662598449, - "flos": 14838267133200.0, - "grad_norm": 3.556164109947667, - "learning_rate": 6.172474484530283e-07, - "loss": 0.8345, - "num_input_tokens_seen": 132453100, - "step": 6243 - }, - { - "epoch": 0.750796609150484, - "flos": 27366372151200.0, - "grad_norm": 6.1061825423482485, - "learning_rate": 6.166847503250563e-07, - "loss": 0.7376, - "num_input_tokens_seen": 132475060, - "step": 6244 - }, - { - "epoch": 0.750916852041123, - "flos": 14325397797120.0, - "grad_norm": 5.204793393361366, - "learning_rate": 6.161222620449078e-07, - "loss": 0.7708, - "num_input_tokens_seen": 132493555, - "step": 6245 - }, - { - "epoch": 0.7510370949317622, - "flos": 18397330216320.0, - "grad_norm": 6.080944829107434, - "learning_rate": 6.155599836979117e-07, - "loss": 0.7803, - "num_input_tokens_seen": 132511960, - "step": 6246 - }, - { - "epoch": 0.7511573378224012, - "flos": 13920502619160.0, - "grad_norm": 3.863283595131682, - "learning_rate": 6.149979153693649e-07, - "loss": 0.7984, - "num_input_tokens_seen": 132528935, - "step": 6247 - }, - { - "epoch": 0.7512775807130403, - "flos": 14568315907920.0, - "grad_norm": 17.372640984754053, - "learning_rate": 6.144360571445343e-07, - "loss": 0.7419, - "num_input_tokens_seen": 132547800, - "step": 6248 - }, - { - "epoch": 0.7513978236036795, - "flos": 15161967987840.0, - "grad_norm": 15.702794131309034, - "learning_rate": 6.138744091086509e-07, - "loss": 0.7873, - "num_input_tokens_seen": 132567105, - "step": 6249 - }, - { - "epoch": 0.7515180664943185, - "flos": 20211683222520.0, - "grad_norm": 5.355983183545926, - "learning_rate": 6.133129713469183e-07, - "loss": 0.7101, - "num_input_tokens_seen": 132586030, - "step": 6250 - }, - { - "epoch": 0.7516383093849576, - "flos": 24829850084280.0, - "grad_norm": 8.32742277981661, - "learning_rate": 6.127517439445053e-07, - "loss": 0.6204, - "num_input_tokens_seen": 132606595, - "step": 6251 - }, - { - "epoch": 0.7517585522755967, - "flos": 21826388300520.0, - "grad_norm": 6.542185120984163, - "learning_rate": 6.121907269865498e-07, - "loss": 0.8052, - "num_input_tokens_seen": 132625805, - "step": 6252 - }, - { - "epoch": 0.7518787951662358, - "flos": 51480036907200.0, - "grad_norm": 0.9606965546027709, - "learning_rate": 6.116299205581577e-07, - "loss": 0.721, - "num_input_tokens_seen": 132680355, - "step": 6253 - }, - { - "epoch": 0.7519990380568748, - "flos": 25125726325440.0, - "grad_norm": 3.654736458269835, - "learning_rate": 6.110693247444018e-07, - "loss": 0.6603, - "num_input_tokens_seen": 132701910, - "step": 6254 - }, - { - "epoch": 0.752119280947514, - "flos": 15891608799120.0, - "grad_norm": 6.320172773969258, - "learning_rate": 6.105089396303258e-07, - "loss": 0.8091, - "num_input_tokens_seen": 132720020, - "step": 6255 - }, - { - "epoch": 0.7522395238381531, - "flos": 24044750005800.0, - "grad_norm": 4.418781829676179, - "learning_rate": 6.099487653009383e-07, - "loss": 0.743, - "num_input_tokens_seen": 132739085, - "step": 6256 - }, - { - "epoch": 0.7523597667287921, - "flos": 17186823797160.0, - "grad_norm": 5.030606056847856, - "learning_rate": 6.093888018412192e-07, - "loss": 0.815, - "num_input_tokens_seen": 132754995, - "step": 6257 - }, - { - "epoch": 0.7524800096194313, - "flos": 49661350977840.0, - "grad_norm": 0.7409721864507449, - "learning_rate": 6.088290493361125e-07, - "loss": 0.5685, - "num_input_tokens_seen": 132819600, - "step": 6258 - }, - { - "epoch": 0.7526002525100703, - "flos": 9438546108480.0, - "grad_norm": 6.9358207801095055, - "learning_rate": 6.082695078705322e-07, - "loss": 0.6917, - "num_input_tokens_seen": 132836800, - "step": 6259 - }, - { - "epoch": 0.7527204954007094, - "flos": 15648500728560.0, - "grad_norm": 15.036027494080155, - "learning_rate": 6.077101775293618e-07, - "loss": 0.6684, - "num_input_tokens_seen": 132855345, - "step": 6260 - }, - { - "epoch": 0.7528407382913486, - "flos": 13834527641880.0, - "grad_norm": 8.512387393425078, - "learning_rate": 6.071510583974504e-07, - "loss": 0.8164, - "num_input_tokens_seen": 132870250, - "step": 6261 - }, - { - "epoch": 0.7529609811819876, - "flos": 11085761474040.0, - "grad_norm": 5.427382780027808, - "learning_rate": 6.065921505596161e-07, - "loss": 0.6988, - "num_input_tokens_seen": 132888250, - "step": 6262 - }, - { - "epoch": 0.7530812240726267, - "flos": 14136799194960.0, - "grad_norm": 3.0785660016318976, - "learning_rate": 6.060334541006445e-07, - "loss": 0.7626, - "num_input_tokens_seen": 132906465, - "step": 6263 - }, - { - "epoch": 0.7532014669632658, - "flos": 20349033332160.0, - "grad_norm": 3.9324707125223695, - "learning_rate": 6.05474969105289e-07, - "loss": 0.6741, - "num_input_tokens_seen": 132929175, - "step": 6264 - }, - { - "epoch": 0.7533217098539049, - "flos": 10275876138240.0, - "grad_norm": 6.619720082429963, - "learning_rate": 6.049166956582725e-07, - "loss": 0.7206, - "num_input_tokens_seen": 132947160, - "step": 6265 - }, - { - "epoch": 0.753441952744544, - "flos": 19371947035800.0, - "grad_norm": 4.967556408205263, - "learning_rate": 6.043586338442841e-07, - "loss": 0.8583, - "num_input_tokens_seen": 132965935, - "step": 6266 - }, - { - "epoch": 0.7535621956351831, - "flos": 17484947895480.0, - "grad_norm": 4.021217341031021, - "learning_rate": 6.038007837479815e-07, - "loss": 0.7173, - "num_input_tokens_seen": 132986760, - "step": 6267 - }, - { - "epoch": 0.7536824385258222, - "flos": 15943142231280.0, - "grad_norm": 5.3952360473129986, - "learning_rate": 6.032431454539897e-07, - "loss": 0.616, - "num_input_tokens_seen": 133005325, - "step": 6268 - }, - { - "epoch": 0.7538026814164612, - "flos": 21209438981400.0, - "grad_norm": 6.020164936632368, - "learning_rate": 6.026857190469014e-07, - "loss": 0.7985, - "num_input_tokens_seen": 133026800, - "step": 6269 - }, - { - "epoch": 0.7539229243071004, - "flos": 15431919213120.0, - "grad_norm": 5.420938108557101, - "learning_rate": 6.0212850461128e-07, - "loss": 0.7212, - "num_input_tokens_seen": 133045640, - "step": 6270 - }, - { - "epoch": 0.7540431671977395, - "flos": 11031441965400.0, - "grad_norm": 5.538023513606263, - "learning_rate": 6.015715022316516e-07, - "loss": 0.7258, - "num_input_tokens_seen": 133063340, - "step": 6271 - }, - { - "epoch": 0.7541634100883785, - "flos": 13704649282800.0, - "grad_norm": 5.9764115457856155, - "learning_rate": 6.010147119925154e-07, - "loss": 0.7585, - "num_input_tokens_seen": 133080815, - "step": 6272 - }, - { - "epoch": 0.7542836529790176, - "flos": 15054658688880.0, - "grad_norm": 22.312610102363152, - "learning_rate": 6.004581339783348e-07, - "loss": 0.6345, - "num_input_tokens_seen": 133098855, - "step": 6273 - }, - { - "epoch": 0.7544038958696567, - "flos": 13944274757760.0, - "grad_norm": 10.727882165157842, - "learning_rate": 5.999017682735425e-07, - "loss": 0.6578, - "num_input_tokens_seen": 133114965, - "step": 6274 - }, - { - "epoch": 0.7545241387602958, - "flos": 23290577216880.0, - "grad_norm": 3.6568951159888714, - "learning_rate": 5.993456149625387e-07, - "loss": 0.6502, - "num_input_tokens_seen": 133135835, - "step": 6275 - }, - { - "epoch": 0.7546443816509348, - "flos": 14833011579840.0, - "grad_norm": 7.74170536268382, - "learning_rate": 5.987896741296909e-07, - "loss": 0.8125, - "num_input_tokens_seen": 133153295, - "step": 6276 - }, - { - "epoch": 0.754764624541574, - "flos": 17349908962920.0, - "grad_norm": 4.388069902668271, - "learning_rate": 5.982339458593361e-07, - "loss": 0.7647, - "num_input_tokens_seen": 133172955, - "step": 6277 - }, - { - "epoch": 0.7548848674322131, - "flos": 18564277897200.0, - "grad_norm": 4.531014903316601, - "learning_rate": 5.976784302357767e-07, - "loss": 0.8383, - "num_input_tokens_seen": 133193240, - "step": 6278 - }, - { - "epoch": 0.7550051103228521, - "flos": 14298586302360.0, - "grad_norm": 5.4752899302179685, - "learning_rate": 5.971231273432855e-07, - "loss": 0.7119, - "num_input_tokens_seen": 133212445, - "step": 6279 - }, - { - "epoch": 0.7551253532134913, - "flos": 47292436959600.0, - "grad_norm": 0.8260252982622828, - "learning_rate": 5.965680372661e-07, - "loss": 0.5761, - "num_input_tokens_seen": 133269730, - "step": 6280 - }, - { - "epoch": 0.7552455961041303, - "flos": 19105256786400.0, - "grad_norm": 4.496703742015592, - "learning_rate": 5.960131600884266e-07, - "loss": 0.5371, - "num_input_tokens_seen": 133288720, - "step": 6281 - }, - { - "epoch": 0.7553658389947694, - "flos": 17943402743040.0, - "grad_norm": 4.99627126895824, - "learning_rate": 5.954584958944413e-07, - "loss": 0.7436, - "num_input_tokens_seen": 133307105, - "step": 6282 - }, - { - "epoch": 0.7554860818854086, - "flos": 15946308227280.0, - "grad_norm": 4.408102309021069, - "learning_rate": 5.949040447682854e-07, - "loss": 0.794, - "num_input_tokens_seen": 133326650, - "step": 6283 - }, - { - "epoch": 0.7556063247760476, - "flos": 11922173364960.0, - "grad_norm": 4.863690879146773, - "learning_rate": 5.943498067940686e-07, - "loss": 0.6669, - "num_input_tokens_seen": 133343395, - "step": 6284 - }, - { - "epoch": 0.7557265676666867, - "flos": 19968986731440.0, - "grad_norm": 5.002924254736705, - "learning_rate": 5.937957820558686e-07, - "loss": 0.793, - "num_input_tokens_seen": 133362460, - "step": 6285 - }, - { - "epoch": 0.7558468105573258, - "flos": 45842526685200.0, - "grad_norm": 0.8381733806280773, - "learning_rate": 5.932419706377296e-07, - "loss": 0.6624, - "num_input_tokens_seen": 133420485, - "step": 6286 - }, - { - "epoch": 0.7559670534479649, - "flos": 24418812874080.0, - "grad_norm": 7.993349648979446, - "learning_rate": 5.92688372623666e-07, - "loss": 0.7215, - "num_input_tokens_seen": 133438910, - "step": 6287 - }, - { - "epoch": 0.7560872963386039, - "flos": 10221809909280.0, - "grad_norm": 27.129538160837026, - "learning_rate": 5.921349880976574e-07, - "loss": 0.7254, - "num_input_tokens_seen": 133456465, - "step": 6288 - }, - { - "epoch": 0.7562075392292431, - "flos": 14919619756320.0, - "grad_norm": 3.763763474130219, - "learning_rate": 5.915818171436515e-07, - "loss": 0.7932, - "num_input_tokens_seen": 133475520, - "step": 6289 - }, - { - "epoch": 0.7563277821198822, - "flos": 14893473120720.0, - "grad_norm": 17.698441534883003, - "learning_rate": 5.910288598455642e-07, - "loss": 0.7296, - "num_input_tokens_seen": 133494590, - "step": 6290 - }, - { - "epoch": 0.7564480250105212, - "flos": 13569736990080.0, - "grad_norm": 5.088341398277517, - "learning_rate": 5.90476116287278e-07, - "loss": 0.7164, - "num_input_tokens_seen": 133511910, - "step": 6291 - }, - { - "epoch": 0.7565682679011604, - "flos": 15514126655160.0, - "grad_norm": 5.284137545889759, - "learning_rate": 5.899235865526456e-07, - "loss": 0.6612, - "num_input_tokens_seen": 133530925, - "step": 6292 - }, - { - "epoch": 0.7566885107917994, - "flos": 14946811170600.0, - "grad_norm": 3.3018295880526662, - "learning_rate": 5.893712707254825e-07, - "loss": 0.8035, - "num_input_tokens_seen": 133548105, - "step": 6293 - }, - { - "epoch": 0.7568087536824385, - "flos": 13893532824600.0, - "grad_norm": 9.400105950590357, - "learning_rate": 5.888191688895769e-07, - "loss": 0.6312, - "num_input_tokens_seen": 133565085, - "step": 6294 - }, - { - "epoch": 0.7569289965730777, - "flos": 11080157661120.0, - "grad_norm": 4.945869788035779, - "learning_rate": 5.882672811286813e-07, - "loss": 0.5882, - "num_input_tokens_seen": 133581085, - "step": 6295 - }, - { - "epoch": 0.7570492394637167, - "flos": 15182764090200.0, - "grad_norm": 4.317753784284048, - "learning_rate": 5.877156075265166e-07, - "loss": 0.6779, - "num_input_tokens_seen": 133597070, - "step": 6296 - }, - { - "epoch": 0.7571694823543558, - "flos": 11404903294440.0, - "grad_norm": 6.433156742709564, - "learning_rate": 5.871641481667715e-07, - "loss": 0.6799, - "num_input_tokens_seen": 133611235, - "step": 6297 - }, - { - "epoch": 0.7572897252449949, - "flos": 18617457647280.0, - "grad_norm": 11.187851884305417, - "learning_rate": 5.866129031331011e-07, - "loss": 0.8252, - "num_input_tokens_seen": 133630610, - "step": 6298 - }, - { - "epoch": 0.757409968135634, - "flos": 17781425675880.0, - "grad_norm": 7.154906034245783, - "learning_rate": 5.8606187250913e-07, - "loss": 0.8034, - "num_input_tokens_seen": 133648380, - "step": 6299 - }, - { - "epoch": 0.757530211026273, - "flos": 17673736457400.0, - "grad_norm": 5.927619503108547, - "learning_rate": 5.855110563784482e-07, - "loss": 0.8182, - "num_input_tokens_seen": 133666635, - "step": 6300 - }, - { - "epoch": 0.7576504539169122, - "flos": 17539140764280.0, - "grad_norm": 3.3839708419788126, - "learning_rate": 5.849604548246156e-07, - "loss": 0.6266, - "num_input_tokens_seen": 133687465, - "step": 6301 - }, - { - "epoch": 0.7577706968075513, - "flos": 15540558230400.0, - "grad_norm": 11.549788462365473, - "learning_rate": 5.844100679311565e-07, - "loss": 0.7858, - "num_input_tokens_seen": 133706145, - "step": 6302 - }, - { - "epoch": 0.7578909396981903, - "flos": 13353915313680.0, - "grad_norm": 5.128762329592518, - "learning_rate": 5.838598957815637e-07, - "loss": 0.7541, - "num_input_tokens_seen": 133723095, - "step": 6303 - }, - { - "epoch": 0.7580111825888295, - "flos": 18591437651520.0, - "grad_norm": 3.0814544176614067, - "learning_rate": 5.833099384592996e-07, - "loss": 0.8441, - "num_input_tokens_seen": 133743390, - "step": 6304 - }, - { - "epoch": 0.7581314254794685, - "flos": 17403500292480.0, - "grad_norm": 5.360725432882013, - "learning_rate": 5.827601960477913e-07, - "loss": 0.6915, - "num_input_tokens_seen": 133761035, - "step": 6305 - }, - { - "epoch": 0.7582516683701076, - "flos": 16135096789200.0, - "grad_norm": 3.8822224895979667, - "learning_rate": 5.822106686304344e-07, - "loss": 0.7012, - "num_input_tokens_seen": 133780045, - "step": 6306 - }, - { - "epoch": 0.7583719112607467, - "flos": 23236036088520.0, - "grad_norm": 4.350972490017228, - "learning_rate": 5.816613562905919e-07, - "loss": 0.5583, - "num_input_tokens_seen": 133800950, - "step": 6307 - }, - { - "epoch": 0.7584921541513858, - "flos": 24285325279560.0, - "grad_norm": 2.8784979397420316, - "learning_rate": 5.811122591115933e-07, - "loss": 0.6956, - "num_input_tokens_seen": 133821655, - "step": 6308 - }, - { - "epoch": 0.7586123970420249, - "flos": 17075525343240.0, - "grad_norm": 4.298958789827572, - "learning_rate": 5.805633771767376e-07, - "loss": 0.6787, - "num_input_tokens_seen": 133838770, - "step": 6309 - }, - { - "epoch": 0.7587326399326639, - "flos": 13381929886920.0, - "grad_norm": 2.7425070899725683, - "learning_rate": 5.800147105692888e-07, - "loss": 0.759, - "num_input_tokens_seen": 133858065, - "step": 6310 - }, - { - "epoch": 0.7588528828233031, - "flos": 12597937907040.0, - "grad_norm": 5.250532395752146, - "learning_rate": 5.794662593724795e-07, - "loss": 0.7774, - "num_input_tokens_seen": 133876790, - "step": 6311 - }, - { - "epoch": 0.7589731257139422, - "flos": 12922588560480.0, - "grad_norm": 30.309364770206543, - "learning_rate": 5.789180236695091e-07, - "loss": 0.7388, - "num_input_tokens_seen": 133893365, - "step": 6312 - }, - { - "epoch": 0.7590933686045812, - "flos": 11625410644920.0, - "grad_norm": 5.14639653921724, - "learning_rate": 5.78370003543544e-07, - "loss": 0.8444, - "num_input_tokens_seen": 133911840, - "step": 6313 - }, - { - "epoch": 0.7592136114952204, - "flos": 15404791118760.0, - "grad_norm": 5.0948191263814255, - "learning_rate": 5.778221990777203e-07, - "loss": 0.8174, - "num_input_tokens_seen": 133929300, - "step": 6314 - }, - { - "epoch": 0.7593338543858594, - "flos": 18534585346080.0, - "grad_norm": 8.309179575021888, - "learning_rate": 5.772746103551372e-07, - "loss": 0.8149, - "num_input_tokens_seen": 133944415, - "step": 6315 - }, - { - "epoch": 0.7594540972764985, - "flos": 23371233320880.0, - "grad_norm": 3.617237593301895, - "learning_rate": 5.767272374588648e-07, - "loss": 0.7127, - "num_input_tokens_seen": 133965540, - "step": 6316 - }, - { - "epoch": 0.7595743401671377, - "flos": 27637653094800.0, - "grad_norm": 8.428475534110635, - "learning_rate": 5.76180080471939e-07, - "loss": 0.7722, - "num_input_tokens_seen": 133988430, - "step": 6317 - }, - { - "epoch": 0.7596945830577767, - "flos": 13346095303560.0, - "grad_norm": 4.7755803419625416, - "learning_rate": 5.756331394773631e-07, - "loss": 0.703, - "num_input_tokens_seen": 134004365, - "step": 6318 - }, - { - "epoch": 0.7598148259484158, - "flos": 16266653126160.0, - "grad_norm": 6.451193458089085, - "learning_rate": 5.750864145581071e-07, - "loss": 0.7541, - "num_input_tokens_seen": 134023305, - "step": 6319 - }, - { - "epoch": 0.7599350688390549, - "flos": 19914572242920.0, - "grad_norm": 6.2531419104672015, - "learning_rate": 5.745399057971085e-07, - "loss": 0.8404, - "num_input_tokens_seen": 134044160, - "step": 6320 - }, - { - "epoch": 0.760055311729694, - "flos": 11329249464120.0, - "grad_norm": 6.802798020894486, - "learning_rate": 5.739936132772738e-07, - "loss": 0.731, - "num_input_tokens_seen": 134062445, - "step": 6321 - }, - { - "epoch": 0.760175554620333, - "flos": 18429903823800.0, - "grad_norm": 4.249712317210747, - "learning_rate": 5.734475370814733e-07, - "loss": 0.736, - "num_input_tokens_seen": 134081845, - "step": 6322 - }, - { - "epoch": 0.7602957975109722, - "flos": 17836726643280.0, - "grad_norm": 2.8750408003855936, - "learning_rate": 5.729016772925483e-07, - "loss": 0.7691, - "num_input_tokens_seen": 134103140, - "step": 6323 - }, - { - "epoch": 0.7604160404016113, - "flos": 18457506817560.0, - "grad_norm": 3.704386950802307, - "learning_rate": 5.723560339933038e-07, - "loss": 0.6981, - "num_input_tokens_seen": 134123195, - "step": 6324 - }, - { - "epoch": 0.7605362832922503, - "flos": 21912869837160.0, - "grad_norm": 4.4900674345054385, - "learning_rate": 5.71810607266513e-07, - "loss": 0.6275, - "num_input_tokens_seen": 134141500, - "step": 6325 - }, - { - "epoch": 0.7606565261828895, - "flos": 10113360851760.0, - "grad_norm": 4.6275874967459085, - "learning_rate": 5.712653971949184e-07, - "loss": 0.5835, - "num_input_tokens_seen": 134159340, - "step": 6326 - }, - { - "epoch": 0.7607767690735285, - "flos": 13542292296120.0, - "grad_norm": 7.013272645105142, - "learning_rate": 5.707204038612268e-07, - "loss": 0.753, - "num_input_tokens_seen": 134176490, - "step": 6327 - }, - { - "epoch": 0.7608970119641676, - "flos": 15297545139720.0, - "grad_norm": 4.003944359574732, - "learning_rate": 5.701756273481138e-07, - "loss": 0.7155, - "num_input_tokens_seen": 134193630, - "step": 6328 - }, - { - "epoch": 0.7610172548548068, - "flos": 17432401344600.0, - "grad_norm": 5.1912052755727895, - "learning_rate": 5.696310677382212e-07, - "loss": 0.7203, - "num_input_tokens_seen": 134214745, - "step": 6329 - }, - { - "epoch": 0.7611374977454458, - "flos": 49030629576120.0, - "grad_norm": 0.8275007378151361, - "learning_rate": 5.690867251141576e-07, - "loss": 0.6395, - "num_input_tokens_seen": 134281120, - "step": 6330 - }, - { - "epoch": 0.7612577406360849, - "flos": 11382777473760.0, - "grad_norm": 12.4422389831206, - "learning_rate": 5.685425995585013e-07, - "loss": 0.8943, - "num_input_tokens_seen": 134298765, - "step": 6331 - }, - { - "epoch": 0.761377983526724, - "flos": 44614073560080.0, - "grad_norm": 0.8037036696113518, - "learning_rate": 5.679986911537935e-07, - "loss": 0.613, - "num_input_tokens_seen": 134366015, - "step": 6332 - }, - { - "epoch": 0.7614982264173631, - "flos": 26287168789320.0, - "grad_norm": 3.8174145400318755, - "learning_rate": 5.674549999825462e-07, - "loss": 0.6587, - "num_input_tokens_seen": 134388550, - "step": 6333 - }, - { - "epoch": 0.7616184693080021, - "flos": 50088973515720.0, - "grad_norm": 0.9957330387211926, - "learning_rate": 5.669115261272363e-07, - "loss": 0.7648, - "num_input_tokens_seen": 134448590, - "step": 6334 - }, - { - "epoch": 0.7617387121986413, - "flos": 15000814079640.0, - "grad_norm": 8.879789107971584, - "learning_rate": 5.663682696703081e-07, - "loss": 0.7082, - "num_input_tokens_seen": 134466575, - "step": 6335 - }, - { - "epoch": 0.7618589550892804, - "flos": 13596516824880.0, - "grad_norm": 3.7120262074860784, - "learning_rate": 5.658252306941746e-07, - "loss": 0.8202, - "num_input_tokens_seen": 134485615, - "step": 6336 - }, - { - "epoch": 0.7619791979799194, - "flos": 12729620883840.0, - "grad_norm": 3.927331054861245, - "learning_rate": 5.65282409281212e-07, - "loss": 0.7406, - "num_input_tokens_seen": 134502800, - "step": 6337 - }, - { - "epoch": 0.7620994408705585, - "flos": 10275876138240.0, - "grad_norm": 3.734818946913377, - "learning_rate": 5.64739805513768e-07, - "loss": 0.6847, - "num_input_tokens_seen": 134520065, - "step": 6338 - }, - { - "epoch": 0.7622196837611976, - "flos": 52145100382800.0, - "grad_norm": 0.8111191165832388, - "learning_rate": 5.641974194741541e-07, - "loss": 0.5792, - "num_input_tokens_seen": 134575470, - "step": 6339 - }, - { - "epoch": 0.7623399266518367, - "flos": 44727457080000.0, - "grad_norm": 0.7436725415838848, - "learning_rate": 5.636552512446502e-07, - "loss": 0.6423, - "num_input_tokens_seen": 134636245, - "step": 6340 - }, - { - "epoch": 0.7624601695424758, - "flos": 19401892866600.0, - "grad_norm": 3.839369347760826, - "learning_rate": 5.631133009075027e-07, - "loss": 0.7732, - "num_input_tokens_seen": 134655150, - "step": 6341 - }, - { - "epoch": 0.7625804124331149, - "flos": 13974948767640.0, - "grad_norm": 4.319362542840003, - "learning_rate": 5.625715685449242e-07, - "loss": 0.6816, - "num_input_tokens_seen": 134672975, - "step": 6342 - }, - { - "epoch": 0.762700655323754, - "flos": 19212534425400.0, - "grad_norm": 2.911780724506889, - "learning_rate": 5.620300542390966e-07, - "loss": 0.7073, - "num_input_tokens_seen": 134693740, - "step": 6343 - }, - { - "epoch": 0.762820898214393, - "flos": 16377761620320.0, - "grad_norm": 7.630254717133619, - "learning_rate": 5.614887580721659e-07, - "loss": 0.8364, - "num_input_tokens_seen": 134713605, - "step": 6344 - }, - { - "epoch": 0.7629411411050322, - "flos": 11431841429040.0, - "grad_norm": 6.098545789135923, - "learning_rate": 5.609476801262481e-07, - "loss": 0.7153, - "num_input_tokens_seen": 134728185, - "step": 6345 - }, - { - "epoch": 0.7630613839956712, - "flos": 10003898675520.0, - "grad_norm": 7.115778795137475, - "learning_rate": 5.604068204834223e-07, - "loss": 0.617, - "num_input_tokens_seen": 134744800, - "step": 6346 - }, - { - "epoch": 0.7631816268863103, - "flos": 10594891318800.0, - "grad_norm": 5.687031150189232, - "learning_rate": 5.598661792257367e-07, - "loss": 0.7321, - "num_input_tokens_seen": 134761565, - "step": 6347 - }, - { - "epoch": 0.7633018697769495, - "flos": 13920439299240.0, - "grad_norm": 11.95839280879065, - "learning_rate": 5.593257564352071e-07, - "loss": 0.7512, - "num_input_tokens_seen": 134779725, - "step": 6348 - }, - { - "epoch": 0.7634221126675885, - "flos": 16133640431040.0, - "grad_norm": 4.338148938096345, - "learning_rate": 5.58785552193815e-07, - "loss": 0.7407, - "num_input_tokens_seen": 134799690, - "step": 6349 - }, - { - "epoch": 0.7635423555582276, - "flos": 21557608493760.0, - "grad_norm": 3.6557940337346473, - "learning_rate": 5.582455665835086e-07, - "loss": 0.7401, - "num_input_tokens_seen": 134819705, - "step": 6350 - }, - { - "epoch": 0.7636625984488667, - "flos": 12975103451400.0, - "grad_norm": 6.500200415235858, - "learning_rate": 5.577057996862036e-07, - "loss": 0.7105, - "num_input_tokens_seen": 134837050, - "step": 6351 - }, - { - "epoch": 0.7637828413395058, - "flos": 17378620055280.0, - "grad_norm": 4.039896364299132, - "learning_rate": 5.571662515837814e-07, - "loss": 0.738, - "num_input_tokens_seen": 134858730, - "step": 6352 - }, - { - "epoch": 0.7639030842301449, - "flos": 26665157492640.0, - "grad_norm": 4.9688156411495505, - "learning_rate": 5.566269223580926e-07, - "loss": 0.8188, - "num_input_tokens_seen": 134880160, - "step": 6353 - }, - { - "epoch": 0.764023327120784, - "flos": 21185033643600.0, - "grad_norm": 3.1943963936402993, - "learning_rate": 5.560878120909511e-07, - "loss": 0.7377, - "num_input_tokens_seen": 134902480, - "step": 6354 - }, - { - "epoch": 0.7641435700114231, - "flos": 47768558064840.0, - "grad_norm": 0.9200792441240235, - "learning_rate": 5.55548920864141e-07, - "loss": 0.6423, - "num_input_tokens_seen": 134962855, - "step": 6355 - }, - { - "epoch": 0.7642638129020621, - "flos": 12272622394440.0, - "grad_norm": 5.095234660396598, - "learning_rate": 5.550102487594113e-07, - "loss": 0.7635, - "num_input_tokens_seen": 134981245, - "step": 6356 - }, - { - "epoch": 0.7643840557927013, - "flos": 22317733355160.0, - "grad_norm": 4.165577224720759, - "learning_rate": 5.54471795858477e-07, - "loss": 0.701, - "num_input_tokens_seen": 135001035, - "step": 6357 - }, - { - "epoch": 0.7645042986833404, - "flos": 12381957930840.0, - "grad_norm": 3.789030660908433, - "learning_rate": 5.539335622430235e-07, - "loss": 0.8073, - "num_input_tokens_seen": 135019375, - "step": 6358 - }, - { - "epoch": 0.7646245415739794, - "flos": 12624876041640.0, - "grad_norm": 9.092266158337148, - "learning_rate": 5.533955479946975e-07, - "loss": 0.7295, - "num_input_tokens_seen": 135037875, - "step": 6359 - }, - { - "epoch": 0.7647447844646186, - "flos": 51921399867720.0, - "grad_norm": 0.9075986429728874, - "learning_rate": 5.528577531951173e-07, - "loss": 0.6659, - "num_input_tokens_seen": 135098000, - "step": 6360 - }, - { - "epoch": 0.7648650273552576, - "flos": 12894510667320.0, - "grad_norm": 4.90756460667125, - "learning_rate": 5.523201779258653e-07, - "loss": 0.73, - "num_input_tokens_seen": 135116695, - "step": 6361 - }, - { - "epoch": 0.7649852702458967, - "flos": 16214296535040.0, - "grad_norm": 5.287058147609232, - "learning_rate": 5.517828222684912e-07, - "loss": 0.8286, - "num_input_tokens_seen": 135137070, - "step": 6362 - }, - { - "epoch": 0.7651055131365359, - "flos": 51510457637400.0, - "grad_norm": 0.7806029119489553, - "learning_rate": 5.512456863045117e-07, - "loss": 0.5968, - "num_input_tokens_seen": 135197480, - "step": 6363 - }, - { - "epoch": 0.7652257560271749, - "flos": 14217866878440.0, - "grad_norm": 7.139388163021055, - "learning_rate": 5.507087701154089e-07, - "loss": 0.7241, - "num_input_tokens_seen": 135217120, - "step": 6364 - }, - { - "epoch": 0.765345998917814, - "flos": 11625442304880.0, - "grad_norm": 4.205794950586849, - "learning_rate": 5.50172073782634e-07, - "loss": 0.7275, - "num_input_tokens_seen": 135234820, - "step": 6365 - }, - { - "epoch": 0.7654662418084531, - "flos": 17323065808200.0, - "grad_norm": 4.227762274115651, - "learning_rate": 5.496355973876023e-07, - "loss": 0.8632, - "num_input_tokens_seen": 135253795, - "step": 6366 - }, - { - "epoch": 0.7655864846990922, - "flos": 30498857475120.0, - "grad_norm": 5.569490990109959, - "learning_rate": 5.490993410116984e-07, - "loss": 0.6877, - "num_input_tokens_seen": 135276505, - "step": 6367 - }, - { - "epoch": 0.7657067275897312, - "flos": 31741589242200.0, - "grad_norm": 4.286874682143773, - "learning_rate": 5.485633047362704e-07, - "loss": 0.6898, - "num_input_tokens_seen": 135298230, - "step": 6368 - }, - { - "epoch": 0.7658269704803703, - "flos": 12625129321320.0, - "grad_norm": 5.93596267875599, - "learning_rate": 5.480274886426341e-07, - "loss": 0.7683, - "num_input_tokens_seen": 135314590, - "step": 6369 - }, - { - "epoch": 0.7659472133710095, - "flos": 9114465334320.0, - "grad_norm": 5.719840857686153, - "learning_rate": 5.474918928120744e-07, - "loss": 0.7567, - "num_input_tokens_seen": 135330805, - "step": 6370 - }, - { - "epoch": 0.7660674562616485, - "flos": 16617672034920.0, - "grad_norm": 4.83999192670669, - "learning_rate": 5.469565173258392e-07, - "loss": 0.8647, - "num_input_tokens_seen": 135349040, - "step": 6371 - }, - { - "epoch": 0.7661876991522876, - "flos": 12436182459600.0, - "grad_norm": 3.864164770171261, - "learning_rate": 5.464213622651454e-07, - "loss": 0.6228, - "num_input_tokens_seen": 135366575, - "step": 6372 - }, - { - "epoch": 0.7663079420429267, - "flos": 14676859945320.0, - "grad_norm": 9.446579765327519, - "learning_rate": 5.458864277111753e-07, - "loss": 0.8245, - "num_input_tokens_seen": 135384130, - "step": 6373 - }, - { - "epoch": 0.7664281849335658, - "flos": 9195343058040.0, - "grad_norm": 6.178360170742239, - "learning_rate": 5.453517137450769e-07, - "loss": 0.6864, - "num_input_tokens_seen": 135400425, - "step": 6374 - }, - { - "epoch": 0.7665484278242048, - "flos": 16350031986720.0, - "grad_norm": 11.081928670877026, - "learning_rate": 5.448172204479684e-07, - "loss": 0.7445, - "num_input_tokens_seen": 135419425, - "step": 6375 - }, - { - "epoch": 0.766668670714844, - "flos": 17291252039760.0, - "grad_norm": 6.483358648229501, - "learning_rate": 5.442829479009294e-07, - "loss": 0.733, - "num_input_tokens_seen": 135437925, - "step": 6376 - }, - { - "epoch": 0.7667889136054831, - "flos": 14190517164360.0, - "grad_norm": 3.5114649595011196, - "learning_rate": 5.437488961850103e-07, - "loss": 0.7038, - "num_input_tokens_seen": 135457445, - "step": 6377 - }, - { - "epoch": 0.7669091564961221, - "flos": 19695489590640.0, - "grad_norm": 2.8512532941840183, - "learning_rate": 5.432150653812258e-07, - "loss": 0.7418, - "num_input_tokens_seen": 135477200, - "step": 6378 - }, - { - "epoch": 0.7670293993867613, - "flos": 8979078142200.0, - "grad_norm": 4.524630848700985, - "learning_rate": 5.42681455570557e-07, - "loss": 0.8149, - "num_input_tokens_seen": 135493450, - "step": 6379 - }, - { - "epoch": 0.7671496422774003, - "flos": 15919275112800.0, - "grad_norm": 6.170530844037391, - "learning_rate": 5.42148066833954e-07, - "loss": 0.639, - "num_input_tokens_seen": 135512415, - "step": 6380 - }, - { - "epoch": 0.7672698851680394, - "flos": 15405804237480.0, - "grad_norm": 6.080106044647589, - "learning_rate": 5.416148992523289e-07, - "loss": 0.7384, - "num_input_tokens_seen": 135530710, - "step": 6381 - }, - { - "epoch": 0.7673901280586786, - "flos": 12376195818120.0, - "grad_norm": 3.4431749144083486, - "learning_rate": 5.410819529065644e-07, - "loss": 0.7692, - "num_input_tokens_seen": 135548385, - "step": 6382 - }, - { - "epoch": 0.7675103709493176, - "flos": 21454794909120.0, - "grad_norm": 9.069487305001642, - "learning_rate": 5.405492278775079e-07, - "loss": 0.6378, - "num_input_tokens_seen": 135567885, - "step": 6383 - }, - { - "epoch": 0.7676306138399567, - "flos": 21292121322840.0, - "grad_norm": 3.7744717967366443, - "learning_rate": 5.400167242459732e-07, - "loss": 0.7842, - "num_input_tokens_seen": 135586565, - "step": 6384 - }, - { - "epoch": 0.7677508567305958, - "flos": 16512863872800.0, - "grad_norm": 7.244894137138901, - "learning_rate": 5.394844420927405e-07, - "loss": 0.7943, - "num_input_tokens_seen": 135605895, - "step": 6385 - }, - { - "epoch": 0.7678710996212349, - "flos": 18619388904840.0, - "grad_norm": 4.839199167944644, - "learning_rate": 5.389523814985562e-07, - "loss": 0.7054, - "num_input_tokens_seen": 135625035, - "step": 6386 - }, - { - "epoch": 0.767991342511874, - "flos": 19614421907160.0, - "grad_norm": 3.952728130939949, - "learning_rate": 5.384205425441344e-07, - "loss": 0.7425, - "num_input_tokens_seen": 135645665, - "step": 6387 - }, - { - "epoch": 0.7681115854025131, - "flos": 19320951822960.0, - "grad_norm": 3.8977740072910927, - "learning_rate": 5.378889253101537e-07, - "loss": 0.8286, - "num_input_tokens_seen": 135665940, - "step": 6388 - }, - { - "epoch": 0.7682318282931522, - "flos": 17024751750120.0, - "grad_norm": 4.970273917803039, - "learning_rate": 5.373575298772617e-07, - "loss": 0.7914, - "num_input_tokens_seen": 135684780, - "step": 6389 - }, - { - "epoch": 0.7683520711837912, - "flos": 51677057058720.0, - "grad_norm": 0.7215623979663405, - "learning_rate": 5.368263563260689e-07, - "loss": 0.6154, - "num_input_tokens_seen": 135749635, - "step": 6390 - }, - { - "epoch": 0.7684723140744304, - "flos": 13596865084440.0, - "grad_norm": 3.314765731105566, - "learning_rate": 5.362954047371537e-07, - "loss": 0.6156, - "num_input_tokens_seen": 135768465, - "step": 6391 - }, - { - "epoch": 0.7685925569650695, - "flos": 20130045659760.0, - "grad_norm": 3.6677792046173177, - "learning_rate": 5.357646751910627e-07, - "loss": 0.7146, - "num_input_tokens_seen": 135789365, - "step": 6392 - }, - { - "epoch": 0.7687127998557085, - "flos": 17971290676440.0, - "grad_norm": 4.948581091456983, - "learning_rate": 5.352341677683061e-07, - "loss": 0.7834, - "num_input_tokens_seen": 135810385, - "step": 6393 - }, - { - "epoch": 0.7688330427463477, - "flos": 18430315403280.0, - "grad_norm": 3.9143992144027413, - "learning_rate": 5.347038825493617e-07, - "loss": 0.7682, - "num_input_tokens_seen": 135831635, - "step": 6394 - }, - { - "epoch": 0.7689532856369867, - "flos": 15510390779880.0, - "grad_norm": 3.7257955789640453, - "learning_rate": 5.341738196146732e-07, - "loss": 0.6532, - "num_input_tokens_seen": 135849700, - "step": 6395 - }, - { - "epoch": 0.7690735285276258, - "flos": 18402965689200.0, - "grad_norm": 3.6205854172257466, - "learning_rate": 5.336439790446503e-07, - "loss": 0.707, - "num_input_tokens_seen": 135868520, - "step": 6396 - }, - { - "epoch": 0.769193771418265, - "flos": 40327196960640.0, - "grad_norm": 4.14233708523461, - "learning_rate": 5.331143609196711e-07, - "loss": 0.6012, - "num_input_tokens_seen": 135892055, - "step": 6397 - }, - { - "epoch": 0.769314014308904, - "flos": 27448674573120.0, - "grad_norm": 4.607229373495528, - "learning_rate": 5.325849653200758e-07, - "loss": 0.7611, - "num_input_tokens_seen": 135915725, - "step": 6398 - }, - { - "epoch": 0.7694342571995431, - "flos": 15082040062920.0, - "grad_norm": 2.8240195978419416, - "learning_rate": 5.32055792326175e-07, - "loss": 0.7472, - "num_input_tokens_seen": 135933870, - "step": 6399 - }, - { - "epoch": 0.7695545000901821, - "flos": 17728594185360.0, - "grad_norm": 5.32219627965145, - "learning_rate": 5.315268420182437e-07, - "loss": 0.7117, - "num_input_tokens_seen": 135952265, - "step": 6400 - }, - { - "epoch": 0.7696747429808213, - "flos": 20535953956440.0, - "grad_norm": 6.763103905188286, - "learning_rate": 5.309981144765221e-07, - "loss": 0.7427, - "num_input_tokens_seen": 135972130, - "step": 6401 - }, - { - "epoch": 0.7697949858714603, - "flos": 8330948253840.0, - "grad_norm": 5.154455624094723, - "learning_rate": 5.304696097812196e-07, - "loss": 0.7348, - "num_input_tokens_seen": 135988450, - "step": 6402 - }, - { - "epoch": 0.7699152287620994, - "flos": 19564313173200.0, - "grad_norm": 6.466071269885318, - "learning_rate": 5.299413280125078e-07, - "loss": 0.5732, - "num_input_tokens_seen": 136006480, - "step": 6403 - }, - { - "epoch": 0.7700354716527386, - "flos": 12057528897120.0, - "grad_norm": 4.384518323691456, - "learning_rate": 5.294132692505284e-07, - "loss": 0.7136, - "num_input_tokens_seen": 136024610, - "step": 6404 - }, - { - "epoch": 0.7701557145433776, - "flos": 14053768593960.0, - "grad_norm": 4.270436685642815, - "learning_rate": 5.288854335753861e-07, - "loss": 0.7643, - "num_input_tokens_seen": 136042590, - "step": 6405 - }, - { - "epoch": 0.7702759574340167, - "flos": 23262625963560.0, - "grad_norm": 3.7784004508576134, - "learning_rate": 5.283578210671551e-07, - "loss": 0.7485, - "num_input_tokens_seen": 136064550, - "step": 6406 - }, - { - "epoch": 0.7703962003246558, - "flos": 12246317459040.0, - "grad_norm": 4.093815460846497, - "learning_rate": 5.278304318058719e-07, - "loss": 0.7474, - "num_input_tokens_seen": 136082125, - "step": 6407 - }, - { - "epoch": 0.7705164432152949, - "flos": 26260198994760.0, - "grad_norm": 4.366880457257619, - "learning_rate": 5.273032658715411e-07, - "loss": 0.7769, - "num_input_tokens_seen": 136104655, - "step": 6408 - }, - { - "epoch": 0.7706366861059339, - "flos": 17105281214280.0, - "grad_norm": 6.525644917972398, - "learning_rate": 5.267763233441347e-07, - "loss": 0.757, - "num_input_tokens_seen": 136125005, - "step": 6409 - }, - { - "epoch": 0.7707569289965731, - "flos": 16782815098080.0, - "grad_norm": 9.153886196983455, - "learning_rate": 5.26249604303588e-07, - "loss": 0.6793, - "num_input_tokens_seen": 136143230, - "step": 6410 - }, - { - "epoch": 0.7708771718872122, - "flos": 12705627125520.0, - "grad_norm": 4.878144448319155, - "learning_rate": 5.257231088298057e-07, - "loss": 0.7636, - "num_input_tokens_seen": 136161360, - "step": 6411 - }, - { - "epoch": 0.7709974147778512, - "flos": 53279288112480.0, - "grad_norm": 0.840504940492723, - "learning_rate": 5.25196837002655e-07, - "loss": 0.5601, - "num_input_tokens_seen": 136220790, - "step": 6412 - }, - { - "epoch": 0.7711176576684904, - "flos": 29312059874640.0, - "grad_norm": 4.5355553529440185, - "learning_rate": 5.24670788901971e-07, - "loss": 0.6821, - "num_input_tokens_seen": 136243600, - "step": 6413 - }, - { - "epoch": 0.7712379005591294, - "flos": 27178090148640.0, - "grad_norm": 4.200906501308885, - "learning_rate": 5.241449646075557e-07, - "loss": 0.6547, - "num_input_tokens_seen": 136266545, - "step": 6414 - }, - { - "epoch": 0.7713581434497685, - "flos": 16668477288000.0, - "grad_norm": 7.081022296958225, - "learning_rate": 5.236193641991762e-07, - "loss": 0.7085, - "num_input_tokens_seen": 136284195, - "step": 6415 - }, - { - "epoch": 0.7714783863404077, - "flos": 17646893302680.0, - "grad_norm": 5.744419209178701, - "learning_rate": 5.23093987756565e-07, - "loss": 0.6815, - "num_input_tokens_seen": 136302610, - "step": 6416 - }, - { - "epoch": 0.7715986292310467, - "flos": 15400896943680.0, - "grad_norm": 3.587805566833352, - "learning_rate": 5.225688353594217e-07, - "loss": 0.745, - "num_input_tokens_seen": 136321960, - "step": 6417 - }, - { - "epoch": 0.7717188721216858, - "flos": 15054753668760.0, - "grad_norm": 5.105451004476142, - "learning_rate": 5.220439070874108e-07, - "loss": 0.7567, - "num_input_tokens_seen": 136340920, - "step": 6418 - }, - { - "epoch": 0.7718391150123249, - "flos": 19240738958400.0, - "grad_norm": 4.325822679191398, - "learning_rate": 5.215192030201652e-07, - "loss": 0.6971, - "num_input_tokens_seen": 136361630, - "step": 6419 - }, - { - "epoch": 0.771959357902964, - "flos": 16130822694600.0, - "grad_norm": 3.2365607648260863, - "learning_rate": 5.209947232372798e-07, - "loss": 0.844, - "num_input_tokens_seen": 136378840, - "step": 6420 - }, - { - "epoch": 0.772079600793603, - "flos": 22345241369040.0, - "grad_norm": 8.912404072420507, - "learning_rate": 5.204704678183196e-07, - "loss": 0.7898, - "num_input_tokens_seen": 136397295, - "step": 6421 - }, - { - "epoch": 0.7721998436842422, - "flos": 9411797933640.0, - "grad_norm": 4.897106616513569, - "learning_rate": 5.19946436842813e-07, - "loss": 0.8353, - "num_input_tokens_seen": 136414145, - "step": 6422 - }, - { - "epoch": 0.7723200865748813, - "flos": 23965676899800.0, - "grad_norm": 4.449341140148831, - "learning_rate": 5.194226303902546e-07, - "loss": 0.6779, - "num_input_tokens_seen": 136433600, - "step": 6423 - }, - { - "epoch": 0.7724403294655203, - "flos": 15432805692000.0, - "grad_norm": 5.760458018848333, - "learning_rate": 5.188990485401072e-07, - "loss": 0.6887, - "num_input_tokens_seen": 136452525, - "step": 6424 - }, - { - "epoch": 0.7725605723561595, - "flos": 16161781644120.0, - "grad_norm": 3.4273435886123025, - "learning_rate": 5.183756913717954e-07, - "loss": 0.8369, - "num_input_tokens_seen": 136472020, - "step": 6425 - }, - { - "epoch": 0.7726808152467985, - "flos": 25342181201040.0, - "grad_norm": 3.7563403888306306, - "learning_rate": 5.178525589647136e-07, - "loss": 0.7169, - "num_input_tokens_seen": 136493380, - "step": 6426 - }, - { - "epoch": 0.7728010581374376, - "flos": 16320877654920.0, - "grad_norm": 4.456560489490115, - "learning_rate": 5.173296513982197e-07, - "loss": 0.7691, - "num_input_tokens_seen": 136511625, - "step": 6427 - }, - { - "epoch": 0.7729213010280768, - "flos": 19888647227040.0, - "grad_norm": 9.272031373755075, - "learning_rate": 5.168069687516398e-07, - "loss": 0.6406, - "num_input_tokens_seen": 136531115, - "step": 6428 - }, - { - "epoch": 0.7730415439187158, - "flos": 13245529576080.0, - "grad_norm": 3.3056930563259503, - "learning_rate": 5.16284511104263e-07, - "loss": 0.6969, - "num_input_tokens_seen": 136549970, - "step": 6429 - }, - { - "epoch": 0.7731617868093549, - "flos": 8655060687960.0, - "grad_norm": 5.589831636790247, - "learning_rate": 5.157622785353457e-07, - "loss": 0.7632, - "num_input_tokens_seen": 136567805, - "step": 6430 - }, - { - "epoch": 0.7732820296999939, - "flos": 47332197378000.0, - "grad_norm": 0.6707757340654712, - "learning_rate": 5.152402711241113e-07, - "loss": 0.6129, - "num_input_tokens_seen": 136635430, - "step": 6431 - }, - { - "epoch": 0.7734022725906331, - "flos": 18538574501040.0, - "grad_norm": 4.93181969043723, - "learning_rate": 5.147184889497465e-07, - "loss": 0.8228, - "num_input_tokens_seen": 136654620, - "step": 6432 - }, - { - "epoch": 0.7735225154812722, - "flos": 12651402596760.0, - "grad_norm": 5.768909707550222, - "learning_rate": 5.141969320914072e-07, - "loss": 0.7839, - "num_input_tokens_seen": 136671845, - "step": 6433 - }, - { - "epoch": 0.7736427583719112, - "flos": 23960896245840.0, - "grad_norm": 8.184464452487006, - "learning_rate": 5.136756006282113e-07, - "loss": 0.6045, - "num_input_tokens_seen": 136690230, - "step": 6434 - }, - { - "epoch": 0.7737630012625504, - "flos": 14510228864040.0, - "grad_norm": 4.260270891128392, - "learning_rate": 5.131544946392446e-07, - "loss": 0.8482, - "num_input_tokens_seen": 136705230, - "step": 6435 - }, - { - "epoch": 0.7738832441531894, - "flos": 26471524956840.0, - "grad_norm": 15.802920609959651, - "learning_rate": 5.126336142035592e-07, - "loss": 0.6271, - "num_input_tokens_seen": 136724985, - "step": 6436 - }, - { - "epoch": 0.7740034870438285, - "flos": 9733694170560.0, - "grad_norm": 5.631239926819254, - "learning_rate": 5.121129594001721e-07, - "loss": 0.7134, - "num_input_tokens_seen": 136738970, - "step": 6437 - }, - { - "epoch": 0.7741237299344677, - "flos": 16158235728600.0, - "grad_norm": 3.331202413995621, - "learning_rate": 5.115925303080661e-07, - "loss": 0.8002, - "num_input_tokens_seen": 136758400, - "step": 6438 - }, - { - "epoch": 0.7742439728251067, - "flos": 14514376318800.0, - "grad_norm": 3.148646042498258, - "learning_rate": 5.110723270061899e-07, - "loss": 0.7699, - "num_input_tokens_seen": 136774610, - "step": 6439 - }, - { - "epoch": 0.7743642157157458, - "flos": 12164901516000.0, - "grad_norm": 5.574819402805921, - "learning_rate": 5.105523495734572e-07, - "loss": 0.7833, - "num_input_tokens_seen": 136791730, - "step": 6440 - }, - { - "epoch": 0.7744844586063849, - "flos": 14839945111080.0, - "grad_norm": 3.1108428710769487, - "learning_rate": 5.100325980887499e-07, - "loss": 0.728, - "num_input_tokens_seen": 136811375, - "step": 6441 - }, - { - "epoch": 0.774604701497024, - "flos": 16810101492240.0, - "grad_norm": 3.62665112480405, - "learning_rate": 5.095130726309116e-07, - "loss": 0.8142, - "num_input_tokens_seen": 136831270, - "step": 6442 - }, - { - "epoch": 0.774724944387663, - "flos": 44435475013920.0, - "grad_norm": 0.8601702738354086, - "learning_rate": 5.089937732787559e-07, - "loss": 0.6572, - "num_input_tokens_seen": 136895550, - "step": 6443 - }, - { - "epoch": 0.7748451872783022, - "flos": 19618126122480.0, - "grad_norm": 5.13065751589503, - "learning_rate": 5.084747001110592e-07, - "loss": 0.6437, - "num_input_tokens_seen": 136914895, - "step": 6444 - }, - { - "epoch": 0.7749654301689413, - "flos": 22266073283160.0, - "grad_norm": 2.7587559628821894, - "learning_rate": 5.07955853206564e-07, - "loss": 0.6843, - "num_input_tokens_seen": 136939320, - "step": 6445 - }, - { - "epoch": 0.7750856730595803, - "flos": 31769192235960.0, - "grad_norm": 5.2712218652797125, - "learning_rate": 5.074372326439807e-07, - "loss": 0.6901, - "num_input_tokens_seen": 136962050, - "step": 6446 - }, - { - "epoch": 0.7752059159502195, - "flos": 12868047432120.0, - "grad_norm": 3.3105831718142955, - "learning_rate": 5.069188385019814e-07, - "loss": 0.7201, - "num_input_tokens_seen": 136979470, - "step": 6447 - }, - { - "epoch": 0.7753261588408585, - "flos": 9195533017800.0, - "grad_norm": 12.82436807425248, - "learning_rate": 5.064006708592077e-07, - "loss": 0.577, - "num_input_tokens_seen": 136995435, - "step": 6448 - }, - { - "epoch": 0.7754464017314976, - "flos": 12165408075360.0, - "grad_norm": 3.6504706980667367, - "learning_rate": 5.058827297942641e-07, - "loss": 0.7388, - "num_input_tokens_seen": 137010260, - "step": 6449 - }, - { - "epoch": 0.7755666446221368, - "flos": 14406465480600.0, - "grad_norm": 8.731445694780357, - "learning_rate": 5.053650153857237e-07, - "loss": 0.7152, - "num_input_tokens_seen": 137028990, - "step": 6450 - }, - { - "epoch": 0.7756868875127758, - "flos": 13647132118200.0, - "grad_norm": 5.675601124120439, - "learning_rate": 5.048475277121214e-07, - "loss": 0.6888, - "num_input_tokens_seen": 137045925, - "step": 6451 - }, - { - "epoch": 0.7758071304034149, - "flos": 20834362994400.0, - "grad_norm": 4.403611495660342, - "learning_rate": 5.043302668519598e-07, - "loss": 0.7587, - "num_input_tokens_seen": 137064980, - "step": 6452 - }, - { - "epoch": 0.775927373294054, - "flos": 15055165248240.0, - "grad_norm": 4.569605272136713, - "learning_rate": 5.038132328837079e-07, - "loss": 0.7104, - "num_input_tokens_seen": 137083090, - "step": 6453 - }, - { - "epoch": 0.7760476161846931, - "flos": 16483867840800.0, - "grad_norm": 10.298110818289986, - "learning_rate": 5.032964258857993e-07, - "loss": 0.7238, - "num_input_tokens_seen": 137102905, - "step": 6454 - }, - { - "epoch": 0.7761678590753321, - "flos": 35818397295240.0, - "grad_norm": 3.285768431460237, - "learning_rate": 5.027798459366329e-07, - "loss": 0.6675, - "num_input_tokens_seen": 137127990, - "step": 6455 - }, - { - "epoch": 0.7762881019659713, - "flos": 19185912890400.0, - "grad_norm": 5.3478552681648255, - "learning_rate": 5.02263493114573e-07, - "loss": 0.6247, - "num_input_tokens_seen": 137149505, - "step": 6456 - }, - { - "epoch": 0.7764083448566104, - "flos": 15048991556040.0, - "grad_norm": 7.6606843524135915, - "learning_rate": 5.017473674979502e-07, - "loss": 0.7482, - "num_input_tokens_seen": 137165250, - "step": 6457 - }, - { - "epoch": 0.7765285877472494, - "flos": 49619216062440.0, - "grad_norm": 5.527302858450773, - "learning_rate": 5.01231469165061e-07, - "loss": 0.5916, - "num_input_tokens_seen": 137220795, - "step": 6458 - }, - { - "epoch": 0.7766488306378886, - "flos": 45216649257360.0, - "grad_norm": 0.948907275174996, - "learning_rate": 5.007157981941663e-07, - "loss": 0.6072, - "num_input_tokens_seen": 137285875, - "step": 6459 - }, - { - "epoch": 0.7767690735285276, - "flos": 46401515600280.0, - "grad_norm": 1.1664000217810577, - "learning_rate": 5.002003546634928e-07, - "loss": 0.6915, - "num_input_tokens_seen": 137341695, - "step": 6460 - }, - { - "epoch": 0.7768893164191667, - "flos": 15270448705320.0, - "grad_norm": 3.6041954495083695, - "learning_rate": 4.996851386512331e-07, - "loss": 0.7514, - "num_input_tokens_seen": 137360120, - "step": 6461 - }, - { - "epoch": 0.7770095593098058, - "flos": 15135979652040.0, - "grad_norm": 3.625637129993661, - "learning_rate": 4.991701502355444e-07, - "loss": 0.8193, - "num_input_tokens_seen": 137380305, - "step": 6462 - }, - { - "epoch": 0.7771298022004449, - "flos": 18105918029520.0, - "grad_norm": 2.457018150209763, - "learning_rate": 4.986553894945518e-07, - "loss": 0.7549, - "num_input_tokens_seen": 137401235, - "step": 6463 - }, - { - "epoch": 0.777250045091084, - "flos": 18321613066080.0, - "grad_norm": 9.89935775369023, - "learning_rate": 4.981408565063416e-07, - "loss": 0.8619, - "num_input_tokens_seen": 137420900, - "step": 6464 - }, - { - "epoch": 0.777370287981723, - "flos": 14703133220760.0, - "grad_norm": 4.447682906834309, - "learning_rate": 4.976265513489701e-07, - "loss": 0.7418, - "num_input_tokens_seen": 137440590, - "step": 6465 - }, - { - "epoch": 0.7774905308723622, - "flos": 15885530086800.0, - "grad_norm": 4.474281781916429, - "learning_rate": 4.971124741004562e-07, - "loss": 0.8018, - "num_input_tokens_seen": 137459310, - "step": 6466 - }, - { - "epoch": 0.7776107737630013, - "flos": 11679476873880.0, - "grad_norm": 5.176729902743808, - "learning_rate": 4.965986248387846e-07, - "loss": 0.7495, - "num_input_tokens_seen": 137477345, - "step": 6467 - }, - { - "epoch": 0.7777310166536403, - "flos": 18160110898320.0, - "grad_norm": 2.78030207906963, - "learning_rate": 4.960850036419073e-07, - "loss": 0.7597, - "num_input_tokens_seen": 137496165, - "step": 6468 - }, - { - "epoch": 0.7778512595442795, - "flos": 12595690049880.0, - "grad_norm": 3.330878895885465, - "learning_rate": 4.955716105877378e-07, - "loss": 0.774, - "num_input_tokens_seen": 137514655, - "step": 6469 - }, - { - "epoch": 0.7779715024349185, - "flos": 12948101996880.0, - "grad_norm": 2.9543201107174144, - "learning_rate": 4.950584457541598e-07, - "loss": 0.8249, - "num_input_tokens_seen": 137532840, - "step": 6470 - }, - { - "epoch": 0.7780917453255576, - "flos": 17809345269240.0, - "grad_norm": 4.477806038421563, - "learning_rate": 4.945455092190183e-07, - "loss": 0.8117, - "num_input_tokens_seen": 137553815, - "step": 6471 - }, - { - "epoch": 0.7782119882161967, - "flos": 41533849847400.0, - "grad_norm": 0.7870195209718998, - "learning_rate": 4.940328010601271e-07, - "loss": 0.5697, - "num_input_tokens_seen": 137618450, - "step": 6472 - }, - { - "epoch": 0.7783322311068358, - "flos": 34440658255560.0, - "grad_norm": 3.379548151862079, - "learning_rate": 4.935203213552621e-07, - "loss": 0.7461, - "num_input_tokens_seen": 137641910, - "step": 6473 - }, - { - "epoch": 0.7784524739974749, - "flos": 13916608444080.0, - "grad_norm": 5.080531656273819, - "learning_rate": 4.930080701821662e-07, - "loss": 0.6444, - "num_input_tokens_seen": 137659095, - "step": 6474 - }, - { - "epoch": 0.778572716888114, - "flos": 18160300858080.0, - "grad_norm": 4.012172060485983, - "learning_rate": 4.92496047618548e-07, - "loss": 0.7528, - "num_input_tokens_seen": 137678575, - "step": 6475 - }, - { - "epoch": 0.7786929597787531, - "flos": 14671446092160.0, - "grad_norm": 4.396955928315279, - "learning_rate": 4.919842537420811e-07, - "loss": 0.7672, - "num_input_tokens_seen": 137695410, - "step": 6476 - }, - { - "epoch": 0.7788132026693921, - "flos": 16000121176560.0, - "grad_norm": 4.509551402109372, - "learning_rate": 4.91472688630404e-07, - "loss": 0.7845, - "num_input_tokens_seen": 137715870, - "step": 6477 - }, - { - "epoch": 0.7789334455600313, - "flos": 8088600022320.0, - "grad_norm": 4.342195081618404, - "learning_rate": 4.909613523611202e-07, - "loss": 0.7305, - "num_input_tokens_seen": 137732470, - "step": 6478 - }, - { - "epoch": 0.7790536884506704, - "flos": 21049804751280.0, - "grad_norm": 4.117019794675039, - "learning_rate": 4.904502450117991e-07, - "loss": 0.7235, - "num_input_tokens_seen": 137753150, - "step": 6479 - }, - { - "epoch": 0.7791739313413094, - "flos": 8007437358960.0, - "grad_norm": 4.641957306908386, - "learning_rate": 4.899393666599762e-07, - "loss": 0.7097, - "num_input_tokens_seen": 137769445, - "step": 6480 - }, - { - "epoch": 0.7792941742319486, - "flos": 10676117302080.0, - "grad_norm": 3.895419556102578, - "learning_rate": 4.894287173831506e-07, - "loss": 0.7073, - "num_input_tokens_seen": 137785125, - "step": 6481 - }, - { - "epoch": 0.7794144171225876, - "flos": 17026366408080.0, - "grad_norm": 3.3654350092541785, - "learning_rate": 4.889182972587877e-07, - "loss": 0.8262, - "num_input_tokens_seen": 137804140, - "step": 6482 - }, - { - "epoch": 0.7795346600132267, - "flos": 15729853351680.0, - "grad_norm": 6.291932999319043, - "learning_rate": 4.884081063643177e-07, - "loss": 0.6572, - "num_input_tokens_seen": 137822520, - "step": 6483 - }, - { - "epoch": 0.7796549029038659, - "flos": 51659264161200.0, - "grad_norm": 0.8827295517563898, - "learning_rate": 4.878981447771353e-07, - "loss": 0.5589, - "num_input_tokens_seen": 137876620, - "step": 6484 - }, - { - "epoch": 0.7797751457945049, - "flos": 17567123677560.0, - "grad_norm": 4.963182316914975, - "learning_rate": 4.873884125746035e-07, - "loss": 0.7193, - "num_input_tokens_seen": 137898015, - "step": 6485 - }, - { - "epoch": 0.779895388685144, - "flos": 16594026536160.0, - "grad_norm": 4.249275370138378, - "learning_rate": 4.868789098340456e-07, - "loss": 0.7042, - "num_input_tokens_seen": 137915640, - "step": 6486 - }, - { - "epoch": 0.7800156315757831, - "flos": 17403531952440.0, - "grad_norm": 18.99320701834729, - "learning_rate": 4.863696366327543e-07, - "loss": 0.7002, - "num_input_tokens_seen": 137934530, - "step": 6487 - }, - { - "epoch": 0.7801358744664222, - "flos": 19372516915080.0, - "grad_norm": 4.112153365069396, - "learning_rate": 4.85860593047986e-07, - "loss": 0.7759, - "num_input_tokens_seen": 137954315, - "step": 6488 - }, - { - "epoch": 0.7802561173570612, - "flos": 19293190529400.0, - "grad_norm": 4.333760142703713, - "learning_rate": 4.853517791569613e-07, - "loss": 0.7297, - "num_input_tokens_seen": 137976215, - "step": 6489 - }, - { - "epoch": 0.7803763602477004, - "flos": 29662857163680.0, - "grad_norm": 7.396509388573363, - "learning_rate": 4.848431950368684e-07, - "loss": 0.6515, - "num_input_tokens_seen": 137998495, - "step": 6490 - }, - { - "epoch": 0.7804966031383395, - "flos": 49403426046000.0, - "grad_norm": 0.7506962564332612, - "learning_rate": 4.843348407648569e-07, - "loss": 0.5771, - "num_input_tokens_seen": 138059495, - "step": 6491 - }, - { - "epoch": 0.7806168460289785, - "flos": 12941326765440.0, - "grad_norm": 4.402009479115605, - "learning_rate": 4.838267164180457e-07, - "loss": 0.8122, - "num_input_tokens_seen": 138074885, - "step": 6492 - }, - { - "epoch": 0.7807370889196176, - "flos": 17534233470480.0, - "grad_norm": 5.349618110217859, - "learning_rate": 4.833188220735156e-07, - "loss": 0.8242, - "num_input_tokens_seen": 138094275, - "step": 6493 - }, - { - "epoch": 0.7808573318102567, - "flos": 13866246430440.0, - "grad_norm": 4.850242742382507, - "learning_rate": 4.828111578083152e-07, - "loss": 0.7366, - "num_input_tokens_seen": 138110900, - "step": 6494 - }, - { - "epoch": 0.7809775747008958, - "flos": 17559493627200.0, - "grad_norm": 5.191643955595368, - "learning_rate": 4.823037236994556e-07, - "loss": 0.797, - "num_input_tokens_seen": 138128785, - "step": 6495 - }, - { - "epoch": 0.7810978175915348, - "flos": 50539291753560.0, - "grad_norm": 0.7803693767285734, - "learning_rate": 4.817965198239136e-07, - "loss": 0.5837, - "num_input_tokens_seen": 138194965, - "step": 6496 - }, - { - "epoch": 0.781218060482174, - "flos": 14349074955840.0, - "grad_norm": 5.463628321549257, - "learning_rate": 4.812895462586331e-07, - "loss": 0.7063, - "num_input_tokens_seen": 138212510, - "step": 6497 - }, - { - "epoch": 0.7813383033728131, - "flos": 18779213094720.0, - "grad_norm": 5.0625162006636995, - "learning_rate": 4.807828030805207e-07, - "loss": 0.8033, - "num_input_tokens_seen": 138231220, - "step": 6498 - }, - { - "epoch": 0.7814585462634521, - "flos": 14974192544640.0, - "grad_norm": 11.256836911775029, - "learning_rate": 4.802762903664495e-07, - "loss": 0.6651, - "num_input_tokens_seen": 138250120, - "step": 6499 - }, - { - "epoch": 0.7815787891540913, - "flos": 16319073037200.0, - "grad_norm": 5.9145626929947435, - "learning_rate": 4.797700081932565e-07, - "loss": 0.705, - "num_input_tokens_seen": 138267705, - "step": 6500 - }, - { - "epoch": 0.7816990320447303, - "flos": 16539042168360.0, - "grad_norm": 4.669906354532971, - "learning_rate": 4.792639566377442e-07, - "loss": 0.7996, - "num_input_tokens_seen": 138284835, - "step": 6501 - }, - { - "epoch": 0.7818192749353694, - "flos": 18267356877360.0, - "grad_norm": 3.2964173491971387, - "learning_rate": 4.78758135776681e-07, - "loss": 0.7604, - "num_input_tokens_seen": 138304410, - "step": 6502 - }, - { - "epoch": 0.7819395178260086, - "flos": 17377258677000.0, - "grad_norm": 5.881095159359139, - "learning_rate": 4.782525456867989e-07, - "loss": 0.7682, - "num_input_tokens_seen": 138322985, - "step": 6503 - }, - { - "epoch": 0.7820597607166476, - "flos": 16998731754360.0, - "grad_norm": 2.8604379147941823, - "learning_rate": 4.777471864447959e-07, - "loss": 0.8173, - "num_input_tokens_seen": 138343445, - "step": 6504 - }, - { - "epoch": 0.7821800036072867, - "flos": 16323695391360.0, - "grad_norm": 5.60043481092416, - "learning_rate": 4.772420581273344e-07, - "loss": 0.7754, - "num_input_tokens_seen": 138360650, - "step": 6505 - }, - { - "epoch": 0.7823002464979258, - "flos": 15757393025520.0, - "grad_norm": 7.97192176078382, - "learning_rate": 4.7673716081104134e-07, - "loss": 0.7563, - "num_input_tokens_seen": 138380545, - "step": 6506 - }, - { - "epoch": 0.7824204893885649, - "flos": 17971227356520.0, - "grad_norm": 3.1752944329176427, - "learning_rate": 4.762324945725109e-07, - "loss": 0.8288, - "num_input_tokens_seen": 138399710, - "step": 6507 - }, - { - "epoch": 0.782540732279204, - "flos": 20101904446680.0, - "grad_norm": 7.708844582039455, - "learning_rate": 4.7572805948829844e-07, - "loss": 0.7499, - "num_input_tokens_seen": 138419690, - "step": 6508 - }, - { - "epoch": 0.7826609751698431, - "flos": 17836220083920.0, - "grad_norm": 3.5073201061183776, - "learning_rate": 4.7522385563492795e-07, - "loss": 0.6933, - "num_input_tokens_seen": 138439710, - "step": 6509 - }, - { - "epoch": 0.7827812180604822, - "flos": 17566363838520.0, - "grad_norm": 3.167500151985725, - "learning_rate": 4.747198830888863e-07, - "loss": 0.6763, - "num_input_tokens_seen": 138459300, - "step": 6510 - }, - { - "epoch": 0.7829014609511212, - "flos": 20131217078280.0, - "grad_norm": 6.495228703490319, - "learning_rate": 4.742161419266251e-07, - "loss": 0.6731, - "num_input_tokens_seen": 138478180, - "step": 6511 - }, - { - "epoch": 0.7830217038417604, - "flos": 21426337096440.0, - "grad_norm": 4.585978675150545, - "learning_rate": 4.7371263222456304e-07, - "loss": 0.6203, - "num_input_tokens_seen": 138495220, - "step": 6512 - }, - { - "epoch": 0.7831419467323995, - "flos": 44926471809000.0, - "grad_norm": 0.8268031601034785, - "learning_rate": 4.7320935405908004e-07, - "loss": 0.6339, - "num_input_tokens_seen": 138555810, - "step": 6513 - }, - { - "epoch": 0.7832621896230385, - "flos": 14379527346000.0, - "grad_norm": 4.911260876642321, - "learning_rate": 4.7270630750652475e-07, - "loss": 0.8114, - "num_input_tokens_seen": 138571485, - "step": 6514 - }, - { - "epoch": 0.7833824325136777, - "flos": 18321613066080.0, - "grad_norm": 5.524654443213525, - "learning_rate": 4.7220349264320746e-07, - "loss": 0.7966, - "num_input_tokens_seen": 138590290, - "step": 6515 - }, - { - "epoch": 0.7835026754043167, - "flos": 50736121945320.0, - "grad_norm": 1.0450831208641136, - "learning_rate": 4.71700909545407e-07, - "loss": 0.584, - "num_input_tokens_seen": 138652955, - "step": 6516 - }, - { - "epoch": 0.7836229182949558, - "flos": 14514218019000.0, - "grad_norm": 4.883192953928092, - "learning_rate": 4.711985582893627e-07, - "loss": 0.7497, - "num_input_tokens_seen": 138671195, - "step": 6517 - }, - { - "epoch": 0.783743161185595, - "flos": 16809341653200.0, - "grad_norm": 6.251510132501393, - "learning_rate": 4.706964389512811e-07, - "loss": 0.7021, - "num_input_tokens_seen": 138690950, - "step": 6518 - }, - { - "epoch": 0.783863404076234, - "flos": 9033271011000.0, - "grad_norm": 4.409673564705853, - "learning_rate": 4.701945516073345e-07, - "loss": 0.8568, - "num_input_tokens_seen": 138708145, - "step": 6519 - }, - { - "epoch": 0.7839836469668731, - "flos": 17755374020160.0, - "grad_norm": 4.408645952433823, - "learning_rate": 4.696928963336577e-07, - "loss": 0.7412, - "num_input_tokens_seen": 138727295, - "step": 6520 - }, - { - "epoch": 0.7841038898575122, - "flos": 45793652689680.0, - "grad_norm": 0.8737611174524214, - "learning_rate": 4.6919147320635224e-07, - "loss": 0.6252, - "num_input_tokens_seen": 138789725, - "step": 6521 - }, - { - "epoch": 0.7842241327481513, - "flos": 14758054268640.0, - "grad_norm": 3.8350554343622396, - "learning_rate": 4.6869028230148286e-07, - "loss": 0.7014, - "num_input_tokens_seen": 138807240, - "step": 6522 - }, - { - "epoch": 0.7843443756387903, - "flos": 20590210145160.0, - "grad_norm": 7.263959139923256, - "learning_rate": 4.6818932369507957e-07, - "loss": 0.583, - "num_input_tokens_seen": 138826460, - "step": 6523 - }, - { - "epoch": 0.7844646185294295, - "flos": 15593737980480.0, - "grad_norm": 7.814962746597893, - "learning_rate": 4.676885974631386e-07, - "loss": 0.8674, - "num_input_tokens_seen": 138844540, - "step": 6524 - }, - { - "epoch": 0.7845848614200686, - "flos": 17320184751840.0, - "grad_norm": 3.9210024163127555, - "learning_rate": 4.67188103681619e-07, - "loss": 0.7937, - "num_input_tokens_seen": 138864045, - "step": 6525 - }, - { - "epoch": 0.7847051043107076, - "flos": 17132061049080.0, - "grad_norm": 4.0835488450665, - "learning_rate": 4.666878424264453e-07, - "loss": 0.6859, - "num_input_tokens_seen": 138883720, - "step": 6526 - }, - { - "epoch": 0.7848253472013467, - "flos": 13890398488560.0, - "grad_norm": 5.979694657711453, - "learning_rate": 4.661878137735069e-07, - "loss": 0.7169, - "num_input_tokens_seen": 138901630, - "step": 6527 - }, - { - "epoch": 0.7849455900919858, - "flos": 15487251840480.0, - "grad_norm": 4.67383730704541, - "learning_rate": 4.656880177986571e-07, - "loss": 0.7351, - "num_input_tokens_seen": 138919895, - "step": 6528 - }, - { - "epoch": 0.7850658329826249, - "flos": 14271426548040.0, - "grad_norm": 5.142033253630835, - "learning_rate": 4.6518845457771607e-07, - "loss": 0.7942, - "num_input_tokens_seen": 138938475, - "step": 6529 - }, - { - "epoch": 0.7851860758732639, - "flos": 9060589065120.0, - "grad_norm": 6.1236165878299476, - "learning_rate": 4.646891241864652e-07, - "loss": 0.7733, - "num_input_tokens_seen": 138956760, - "step": 6530 - }, - { - "epoch": 0.7853063187639031, - "flos": 16805035898640.0, - "grad_norm": 4.398325618859036, - "learning_rate": 4.6419002670065397e-07, - "loss": 0.7149, - "num_input_tokens_seen": 138976060, - "step": 6531 - }, - { - "epoch": 0.7854265616545422, - "flos": 12651244296960.0, - "grad_norm": 5.700627864589752, - "learning_rate": 4.6369116219599445e-07, - "loss": 0.828, - "num_input_tokens_seen": 138991765, - "step": 6532 - }, - { - "epoch": 0.7855468045451812, - "flos": 17454558825240.0, - "grad_norm": 3.8940720818367036, - "learning_rate": 4.631925307481637e-07, - "loss": 0.7677, - "num_input_tokens_seen": 139011300, - "step": 6533 - }, - { - "epoch": 0.7856670474358204, - "flos": 18808367426520.0, - "grad_norm": 9.487570813383416, - "learning_rate": 4.6269413243280533e-07, - "loss": 0.7246, - "num_input_tokens_seen": 139030440, - "step": 6534 - }, - { - "epoch": 0.7857872903264594, - "flos": 13240622282280.0, - "grad_norm": 3.408011444735227, - "learning_rate": 4.621959673255236e-07, - "loss": 0.7111, - "num_input_tokens_seen": 139046460, - "step": 6535 - }, - { - "epoch": 0.7859075332170985, - "flos": 10458997567320.0, - "grad_norm": 7.2989295174587125, - "learning_rate": 4.6169803550189135e-07, - "loss": 0.8782, - "num_input_tokens_seen": 139061875, - "step": 6536 - }, - { - "epoch": 0.7860277761077377, - "flos": 14513743119600.0, - "grad_norm": 3.519105807214817, - "learning_rate": 4.6120033703744355e-07, - "loss": 0.7477, - "num_input_tokens_seen": 139080490, - "step": 6537 - }, - { - "epoch": 0.7861480189983767, - "flos": 19348111577280.0, - "grad_norm": 7.453818490895444, - "learning_rate": 4.607028720076822e-07, - "loss": 0.7653, - "num_input_tokens_seen": 139096890, - "step": 6538 - }, - { - "epoch": 0.7862682618890158, - "flos": 17748883728360.0, - "grad_norm": 4.609662994181014, - "learning_rate": 4.6020564048807074e-07, - "loss": 0.7146, - "num_input_tokens_seen": 139114285, - "step": 6539 - }, - { - "epoch": 0.7863885047796549, - "flos": 35003573005680.0, - "grad_norm": 3.8488082477935546, - "learning_rate": 4.5970864255403883e-07, - "loss": 0.6993, - "num_input_tokens_seen": 139135530, - "step": 6540 - }, - { - "epoch": 0.786508747670294, - "flos": 17862809958960.0, - "grad_norm": 3.306255716993152, - "learning_rate": 4.59211878280982e-07, - "loss": 0.8079, - "num_input_tokens_seen": 139154765, - "step": 6541 - }, - { - "epoch": 0.786628990560933, - "flos": 13164936792000.0, - "grad_norm": 6.722427851927102, - "learning_rate": 4.587153477442578e-07, - "loss": 0.6765, - "num_input_tokens_seen": 139170800, - "step": 6542 - }, - { - "epoch": 0.7867492334515722, - "flos": 18943058099520.0, - "grad_norm": 4.202765807505726, - "learning_rate": 4.582190510191899e-07, - "loss": 0.7924, - "num_input_tokens_seen": 139189180, - "step": 6543 - }, - { - "epoch": 0.7868694763422113, - "flos": 12083992132320.0, - "grad_norm": 3.8931178184414867, - "learning_rate": 4.5772298818106625e-07, - "loss": 0.8527, - "num_input_tokens_seen": 139204690, - "step": 6544 - }, - { - "epoch": 0.7869897192328503, - "flos": 21560742829800.0, - "grad_norm": 5.287569317013873, - "learning_rate": 4.572271593051384e-07, - "loss": 0.7029, - "num_input_tokens_seen": 139221765, - "step": 6545 - }, - { - "epoch": 0.7871099621234895, - "flos": 12489742129200.0, - "grad_norm": 3.1794508487449464, - "learning_rate": 4.567315644666245e-07, - "loss": 0.7616, - "num_input_tokens_seen": 139240280, - "step": 6546 - }, - { - "epoch": 0.7872302050141285, - "flos": 17160392221920.0, - "grad_norm": 7.993466570424874, - "learning_rate": 4.5623620374070507e-07, - "loss": 0.8384, - "num_input_tokens_seen": 139259315, - "step": 6547 - }, - { - "epoch": 0.7873504479047676, - "flos": 48634820806680.0, - "grad_norm": 0.8279854638924562, - "learning_rate": 4.557410772025263e-07, - "loss": 0.6202, - "num_input_tokens_seen": 139320985, - "step": 6548 - }, - { - "epoch": 0.7874706907954068, - "flos": 17025891508680.0, - "grad_norm": 4.38758152725681, - "learning_rate": 4.5524618492719803e-07, - "loss": 0.6326, - "num_input_tokens_seen": 139339925, - "step": 6549 - }, - { - "epoch": 0.7875909336860458, - "flos": 21103269441000.0, - "grad_norm": 3.0621993248146047, - "learning_rate": 4.54751526989795e-07, - "loss": 0.7709, - "num_input_tokens_seen": 139361485, - "step": 6550 - }, - { - "epoch": 0.7877111765766849, - "flos": 13650361434120.0, - "grad_norm": 6.285006638115994, - "learning_rate": 4.5425710346535775e-07, - "loss": 0.7907, - "num_input_tokens_seen": 139379150, - "step": 6551 - }, - { - "epoch": 0.787831419467324, - "flos": 20232795924480.0, - "grad_norm": 6.125791537462667, - "learning_rate": 4.537629144288877e-07, - "loss": 0.803, - "num_input_tokens_seen": 139396325, - "step": 6552 - }, - { - "epoch": 0.7879516623579631, - "flos": 13245497916120.0, - "grad_norm": 4.574025893604693, - "learning_rate": 4.5326895995535477e-07, - "loss": 0.7277, - "num_input_tokens_seen": 139414945, - "step": 6553 - }, - { - "epoch": 0.7880719052486022, - "flos": 14865490207440.0, - "grad_norm": 5.110180899148433, - "learning_rate": 4.527752401196907e-07, - "loss": 0.8246, - "num_input_tokens_seen": 139432680, - "step": 6554 - }, - { - "epoch": 0.7881921481392413, - "flos": 15838049129520.0, - "grad_norm": 4.68342677845615, - "learning_rate": 4.5228175499679254e-07, - "loss": 0.6554, - "num_input_tokens_seen": 139451985, - "step": 6555 - }, - { - "epoch": 0.7883123910298804, - "flos": 50560562755320.0, - "grad_norm": 0.8668468252937661, - "learning_rate": 4.5178850466152174e-07, - "loss": 0.5563, - "num_input_tokens_seen": 139510535, - "step": 6556 - }, - { - "epoch": 0.7884326339205194, - "flos": 14109987700200.0, - "grad_norm": 9.075711426815642, - "learning_rate": 4.512954891887031e-07, - "loss": 0.801, - "num_input_tokens_seen": 139528555, - "step": 6557 - }, - { - "epoch": 0.7885528768111585, - "flos": 12975103451400.0, - "grad_norm": 5.6736502903499, - "learning_rate": 4.5080270865312806e-07, - "loss": 0.8221, - "num_input_tokens_seen": 139545470, - "step": 6558 - }, - { - "epoch": 0.7886731197017977, - "flos": 13732157296680.0, - "grad_norm": 8.781360340443326, - "learning_rate": 4.5031016312954985e-07, - "loss": 0.6934, - "num_input_tokens_seen": 139563505, - "step": 6559 - }, - { - "epoch": 0.7887933625924367, - "flos": 24505832630040.0, - "grad_norm": 7.580387619293636, - "learning_rate": 4.498178526926886e-07, - "loss": 0.7326, - "num_input_tokens_seen": 139584090, - "step": 6560 - }, - { - "epoch": 0.7889136054830758, - "flos": 12408642785760.0, - "grad_norm": 5.48738489399582, - "learning_rate": 4.4932577741722635e-07, - "loss": 0.7195, - "num_input_tokens_seen": 139602340, - "step": 6561 - }, - { - "epoch": 0.7890338483737149, - "flos": 21589390602240.0, - "grad_norm": 6.594703930737302, - "learning_rate": 4.4883393737780985e-07, - "loss": 0.7407, - "num_input_tokens_seen": 139623010, - "step": 6562 - }, - { - "epoch": 0.789154091264354, - "flos": 14593037845320.0, - "grad_norm": 8.04467174403271, - "learning_rate": 4.4834233264905254e-07, - "loss": 0.7718, - "num_input_tokens_seen": 139639745, - "step": 6563 - }, - { - "epoch": 0.789274334154993, - "flos": 10572068979000.0, - "grad_norm": 4.954156714007768, - "learning_rate": 4.478509633055294e-07, - "loss": 0.6909, - "num_input_tokens_seen": 139657175, - "step": 6564 - }, - { - "epoch": 0.7893945770456322, - "flos": 15965774611320.0, - "grad_norm": 6.111292545438136, - "learning_rate": 4.473598294217813e-07, - "loss": 0.7707, - "num_input_tokens_seen": 139672320, - "step": 6565 - }, - { - "epoch": 0.7895148199362713, - "flos": 15162601187040.0, - "grad_norm": 3.734830820523812, - "learning_rate": 4.468689310723124e-07, - "loss": 0.7085, - "num_input_tokens_seen": 139689855, - "step": 6566 - }, - { - "epoch": 0.7896350628269103, - "flos": 12165344755440.0, - "grad_norm": 5.057972142973573, - "learning_rate": 4.463782683315913e-07, - "loss": 0.7673, - "num_input_tokens_seen": 139708580, - "step": 6567 - }, - { - "epoch": 0.7897553057175495, - "flos": 16567120061520.0, - "grad_norm": 5.065507160489338, - "learning_rate": 4.458878412740523e-07, - "loss": 0.7146, - "num_input_tokens_seen": 139727080, - "step": 6568 - }, - { - "epoch": 0.7898755486081885, - "flos": 10571847359280.0, - "grad_norm": 45.63223936572969, - "learning_rate": 4.453976499740919e-07, - "loss": 0.7481, - "num_input_tokens_seen": 139744445, - "step": 6569 - }, - { - "epoch": 0.7899957914988276, - "flos": 12570841472640.0, - "grad_norm": 3.03466840085727, - "learning_rate": 4.4490769450607215e-07, - "loss": 0.7705, - "num_input_tokens_seen": 139761790, - "step": 6570 - }, - { - "epoch": 0.7901160343894668, - "flos": 30362235544560.0, - "grad_norm": 11.594797585757481, - "learning_rate": 4.4441797494431845e-07, - "loss": 0.7155, - "num_input_tokens_seen": 139783315, - "step": 6571 - }, - { - "epoch": 0.7902362772801058, - "flos": 12273698833080.0, - "grad_norm": 4.52281677520008, - "learning_rate": 4.439284913631207e-07, - "loss": 0.7618, - "num_input_tokens_seen": 139800245, - "step": 6572 - }, - { - "epoch": 0.7903565201707449, - "flos": 19888235647560.0, - "grad_norm": 3.0424609954264668, - "learning_rate": 4.434392438367347e-07, - "loss": 0.8167, - "num_input_tokens_seen": 139819390, - "step": 6573 - }, - { - "epoch": 0.790476763061384, - "flos": 22773212166480.0, - "grad_norm": 4.039613077501108, - "learning_rate": 4.4295023243937677e-07, - "loss": 0.721, - "num_input_tokens_seen": 139839315, - "step": 6574 - }, - { - "epoch": 0.7905970059520231, - "flos": 16160325285960.0, - "grad_norm": 2.5181820591199386, - "learning_rate": 4.4246145724523123e-07, - "loss": 0.7948, - "num_input_tokens_seen": 139856780, - "step": 6575 - }, - { - "epoch": 0.7907172488426621, - "flos": 15027530594520.0, - "grad_norm": 5.605304399267471, - "learning_rate": 4.41972918328444e-07, - "loss": 0.7452, - "num_input_tokens_seen": 139873935, - "step": 6576 - }, - { - "epoch": 0.7908374917333013, - "flos": 22075575083400.0, - "grad_norm": 3.988100830540535, - "learning_rate": 4.4148461576312646e-07, - "loss": 0.7715, - "num_input_tokens_seen": 139893320, - "step": 6577 - }, - { - "epoch": 0.7909577346239404, - "flos": 15351896308320.0, - "grad_norm": 6.103663435710211, - "learning_rate": 4.4099654962335343e-07, - "loss": 0.7323, - "num_input_tokens_seen": 139913490, - "step": 6578 - }, - { - "epoch": 0.7910779775145794, - "flos": 19238079521760.0, - "grad_norm": 3.7766601196349034, - "learning_rate": 4.405087199831636e-07, - "loss": 0.7341, - "num_input_tokens_seen": 139933450, - "step": 6579 - }, - { - "epoch": 0.7911982204052186, - "flos": 16512262333560.0, - "grad_norm": 3.605532760299884, - "learning_rate": 4.400211269165619e-07, - "loss": 0.6603, - "num_input_tokens_seen": 139949625, - "step": 6580 - }, - { - "epoch": 0.7913184632958576, - "flos": 16918012330440.0, - "grad_norm": 4.580363380642464, - "learning_rate": 4.3953377049751416e-07, - "loss": 0.7611, - "num_input_tokens_seen": 139969770, - "step": 6581 - }, - { - "epoch": 0.7914387061864967, - "flos": 8924568673800.0, - "grad_norm": 5.234849230791295, - "learning_rate": 4.390466507999537e-07, - "loss": 0.758, - "num_input_tokens_seen": 139985240, - "step": 6582 - }, - { - "epoch": 0.7915589490771359, - "flos": 12840792697920.0, - "grad_norm": 7.487750922633213, - "learning_rate": 4.385597678977748e-07, - "loss": 0.7476, - "num_input_tokens_seen": 140003795, - "step": 6583 - }, - { - "epoch": 0.7916791919677749, - "flos": 18752116660320.0, - "grad_norm": 3.0011109838101615, - "learning_rate": 4.3807312186483726e-07, - "loss": 0.7312, - "num_input_tokens_seen": 140024235, - "step": 6584 - }, - { - "epoch": 0.791799434858414, - "flos": 13759348710960.0, - "grad_norm": 7.395356871251977, - "learning_rate": 4.375867127749655e-07, - "loss": 0.7695, - "num_input_tokens_seen": 140042230, - "step": 6585 - }, - { - "epoch": 0.7919196777490531, - "flos": 18915740045400.0, - "grad_norm": 7.047688304040425, - "learning_rate": 4.3710054070194744e-07, - "loss": 0.6611, - "num_input_tokens_seen": 140061645, - "step": 6586 - }, - { - "epoch": 0.7920399206396922, - "flos": 8655187327800.0, - "grad_norm": 7.926200934551358, - "learning_rate": 4.3661460571953455e-07, - "loss": 0.6367, - "num_input_tokens_seen": 140078100, - "step": 6587 - }, - { - "epoch": 0.7921601635303313, - "flos": 15783729620880.0, - "grad_norm": 3.7396534919355955, - "learning_rate": 4.36128907901443e-07, - "loss": 0.6733, - "num_input_tokens_seen": 140097415, - "step": 6588 - }, - { - "epoch": 0.7922804064209703, - "flos": 13218749741280.0, - "grad_norm": 6.534559661576526, - "learning_rate": 4.356434473213519e-07, - "loss": 0.7098, - "num_input_tokens_seen": 140114585, - "step": 6589 - }, - { - "epoch": 0.7924006493116095, - "flos": 15837669210000.0, - "grad_norm": 4.0040110504577155, - "learning_rate": 4.351582240529068e-07, - "loss": 0.7681, - "num_input_tokens_seen": 140135135, - "step": 6590 - }, - { - "epoch": 0.7925208922022485, - "flos": 47363821186680.0, - "grad_norm": 0.6984997254373151, - "learning_rate": 4.346732381697149e-07, - "loss": 0.601, - "num_input_tokens_seen": 140198985, - "step": 6591 - }, - { - "epoch": 0.7926411350928876, - "flos": 12350365782120.0, - "grad_norm": 3.4171748519963403, - "learning_rate": 4.3418848974534825e-07, - "loss": 0.797, - "num_input_tokens_seen": 140215645, - "step": 6592 - }, - { - "epoch": 0.7927613779835267, - "flos": 25315939585560.0, - "grad_norm": 5.823264050798509, - "learning_rate": 4.3370397885334276e-07, - "loss": 0.6693, - "num_input_tokens_seen": 140235995, - "step": 6593 - }, - { - "epoch": 0.7928816208741658, - "flos": 13838041897440.0, - "grad_norm": 4.9721276660834075, - "learning_rate": 4.3321970556719777e-07, - "loss": 0.7423, - "num_input_tokens_seen": 140254010, - "step": 6594 - }, - { - "epoch": 0.7930018637648049, - "flos": 13595218766520.0, - "grad_norm": 7.576201330424179, - "learning_rate": 4.3273566996037856e-07, - "loss": 0.6874, - "num_input_tokens_seen": 140270425, - "step": 6595 - }, - { - "epoch": 0.793122106655444, - "flos": 17966826622080.0, - "grad_norm": 4.003640902030564, - "learning_rate": 4.322518721063113e-07, - "loss": 0.7871, - "num_input_tokens_seen": 140288695, - "step": 6596 - }, - { - "epoch": 0.7932423495460831, - "flos": 25286911893600.0, - "grad_norm": 3.8619323165154245, - "learning_rate": 4.3176831207838906e-07, - "loss": 0.6834, - "num_input_tokens_seen": 140311825, - "step": 6597 - }, - { - "epoch": 0.7933625924367221, - "flos": 19780483109160.0, - "grad_norm": 4.2881285270673075, - "learning_rate": 4.3128498994996685e-07, - "loss": 0.7272, - "num_input_tokens_seen": 140331020, - "step": 6598 - }, - { - "epoch": 0.7934828353273613, - "flos": 21695686782480.0, - "grad_norm": 12.232341884867397, - "learning_rate": 4.308019057943646e-07, - "loss": 0.6917, - "num_input_tokens_seen": 140352465, - "step": 6599 - }, - { - "epoch": 0.7936030782180004, - "flos": 20989564830120.0, - "grad_norm": 3.357832159567928, - "learning_rate": 4.3031905968486535e-07, - "loss": 0.7236, - "num_input_tokens_seen": 140373015, - "step": 6600 - }, - { - "epoch": 0.7937233211086394, - "flos": 11943855946200.0, - "grad_norm": 4.628492943705504, - "learning_rate": 4.298364516947162e-07, - "loss": 0.6639, - "num_input_tokens_seen": 140389965, - "step": 6601 - }, - { - "epoch": 0.7938435639992786, - "flos": 16103947879920.0, - "grad_norm": 3.8159956618470297, - "learning_rate": 4.293540818971295e-07, - "loss": 0.6449, - "num_input_tokens_seen": 140407490, - "step": 6602 - }, - { - "epoch": 0.7939638068899176, - "flos": 16239999931200.0, - "grad_norm": 4.50165018978067, - "learning_rate": 4.2887195036527934e-07, - "loss": 0.7476, - "num_input_tokens_seen": 140426015, - "step": 6603 - }, - { - "epoch": 0.7940840497805567, - "flos": 12945252600480.0, - "grad_norm": 4.615904654035488, - "learning_rate": 4.28390057172306e-07, - "loss": 0.6968, - "num_input_tokens_seen": 140442240, - "step": 6604 - }, - { - "epoch": 0.7942042926711959, - "flos": 17430755026680.0, - "grad_norm": 3.2590606274509346, - "learning_rate": 4.279084023913111e-07, - "loss": 0.7026, - "num_input_tokens_seen": 140459835, - "step": 6605 - }, - { - "epoch": 0.7943245355618349, - "flos": 14055731511480.0, - "grad_norm": 17.974606722162413, - "learning_rate": 4.2742698609536096e-07, - "loss": 0.6775, - "num_input_tokens_seen": 140477865, - "step": 6606 - }, - { - "epoch": 0.794444778452474, - "flos": 18320188367880.0, - "grad_norm": 8.065234762843867, - "learning_rate": 4.2694580835748706e-07, - "loss": 0.7665, - "num_input_tokens_seen": 140497445, - "step": 6607 - }, - { - "epoch": 0.7945650213431131, - "flos": 16998573454560.0, - "grad_norm": 6.55662055263907, - "learning_rate": 4.264648692506836e-07, - "loss": 0.7245, - "num_input_tokens_seen": 140515955, - "step": 6608 - }, - { - "epoch": 0.7946852642337522, - "flos": 19100761072080.0, - "grad_norm": 3.479206031683217, - "learning_rate": 4.2598416884790824e-07, - "loss": 0.6987, - "num_input_tokens_seen": 140534725, - "step": 6609 - }, - { - "epoch": 0.7948055071243912, - "flos": 17404418431320.0, - "grad_norm": 5.692207819326209, - "learning_rate": 4.255037072220828e-07, - "loss": 0.7897, - "num_input_tokens_seen": 140555815, - "step": 6610 - }, - { - "epoch": 0.7949257500150304, - "flos": 16080365701080.0, - "grad_norm": 6.783688998759065, - "learning_rate": 4.2502348444609293e-07, - "loss": 0.7035, - "num_input_tokens_seen": 140575155, - "step": 6611 - }, - { - "epoch": 0.7950459929056695, - "flos": 18887503852440.0, - "grad_norm": 5.179394559789922, - "learning_rate": 4.2454350059278844e-07, - "loss": 0.6681, - "num_input_tokens_seen": 140595935, - "step": 6612 - }, - { - "epoch": 0.7951662357963085, - "flos": 16210813939440.0, - "grad_norm": 8.869328365594448, - "learning_rate": 4.240637557349824e-07, - "loss": 0.8319, - "num_input_tokens_seen": 140612870, - "step": 6613 - }, - { - "epoch": 0.7952864786869477, - "flos": 18049445643600.0, - "grad_norm": 3.0542560087271178, - "learning_rate": 4.235842499454516e-07, - "loss": 0.6404, - "num_input_tokens_seen": 140632505, - "step": 6614 - }, - { - "epoch": 0.7954067215775867, - "flos": 15968877287400.0, - "grad_norm": 4.027015768879931, - "learning_rate": 4.2310498329693687e-07, - "loss": 0.8054, - "num_input_tokens_seen": 140653125, - "step": 6615 - }, - { - "epoch": 0.7955269644682258, - "flos": 17619606908520.0, - "grad_norm": 4.576989146904207, - "learning_rate": 4.2262595586214164e-07, - "loss": 0.7967, - "num_input_tokens_seen": 140673940, - "step": 6616 - }, - { - "epoch": 0.795647207358865, - "flos": 18322942784400.0, - "grad_norm": 6.130937773272237, - "learning_rate": 4.221471677137358e-07, - "loss": 0.7549, - "num_input_tokens_seen": 140694475, - "step": 6617 - }, - { - "epoch": 0.795767450249504, - "flos": 10654023141360.0, - "grad_norm": 18.937945571542418, - "learning_rate": 4.216686189243492e-07, - "loss": 0.7001, - "num_input_tokens_seen": 140712985, - "step": 6618 - }, - { - "epoch": 0.7958876931401431, - "flos": 13539316259880.0, - "grad_norm": 4.502812036761397, - "learning_rate": 4.211903095665785e-07, - "loss": 0.7148, - "num_input_tokens_seen": 140732090, - "step": 6619 - }, - { - "epoch": 0.7960079360307821, - "flos": 15757203065760.0, - "grad_norm": 6.493743820114059, - "learning_rate": 4.2071223971298277e-07, - "loss": 0.7377, - "num_input_tokens_seen": 140748995, - "step": 6620 - }, - { - "epoch": 0.7961281789214213, - "flos": 18671048976840.0, - "grad_norm": 25.57532699838301, - "learning_rate": 4.2023440943608433e-07, - "loss": 0.5909, - "num_input_tokens_seen": 140768680, - "step": 6621 - }, - { - "epoch": 0.7962484218120603, - "flos": 16053459226440.0, - "grad_norm": 7.741221963396494, - "learning_rate": 4.1975681880837023e-07, - "loss": 0.7694, - "num_input_tokens_seen": 140788405, - "step": 6622 - }, - { - "epoch": 0.7963686647026994, - "flos": 13782329350560.0, - "grad_norm": 11.307657628028664, - "learning_rate": 4.192794679022895e-07, - "loss": 0.8141, - "num_input_tokens_seen": 140806450, - "step": 6623 - }, - { - "epoch": 0.7964889075933386, - "flos": 21805465558320.0, - "grad_norm": 3.189890993222969, - "learning_rate": 4.1880235679025743e-07, - "loss": 0.7123, - "num_input_tokens_seen": 140826265, - "step": 6624 - }, - { - "epoch": 0.7966091504839776, - "flos": 21637979658120.0, - "grad_norm": 3.8400615579916213, - "learning_rate": 4.1832548554464986e-07, - "loss": 0.6111, - "num_input_tokens_seen": 140844280, - "step": 6625 - }, - { - "epoch": 0.7967293933746167, - "flos": 49615353547320.0, - "grad_norm": 0.7738015060044356, - "learning_rate": 4.178488542378098e-07, - "loss": 0.5977, - "num_input_tokens_seen": 140901580, - "step": 6626 - }, - { - "epoch": 0.7968496362652558, - "flos": 18725020225920.0, - "grad_norm": 4.94891910503976, - "learning_rate": 4.173724629420401e-07, - "loss": 0.8795, - "num_input_tokens_seen": 140922660, - "step": 6627 - }, - { - "epoch": 0.7969698791558949, - "flos": 10544845904760.0, - "grad_norm": 9.425700307878737, - "learning_rate": 4.168963117296087e-07, - "loss": 0.6724, - "num_input_tokens_seen": 140939715, - "step": 6628 - }, - { - "epoch": 0.797090122046534, - "flos": 16189068038280.0, - "grad_norm": 4.111874592542302, - "learning_rate": 4.1642040067274876e-07, - "loss": 0.7308, - "num_input_tokens_seen": 140959105, - "step": 6629 - }, - { - "epoch": 0.7972103649371731, - "flos": 14538211777320.0, - "grad_norm": 3.4151092949687616, - "learning_rate": 4.1594472984365493e-07, - "loss": 0.7141, - "num_input_tokens_seen": 140977510, - "step": 6630 - }, - { - "epoch": 0.7973306078278122, - "flos": 26498083171920.0, - "grad_norm": 4.44221525448593, - "learning_rate": 4.154692993144862e-07, - "loss": 0.762, - "num_input_tokens_seen": 140997000, - "step": 6631 - }, - { - "epoch": 0.7974508507184512, - "flos": 15621372634200.0, - "grad_norm": 3.7761569441814578, - "learning_rate": 4.1499410915736476e-07, - "loss": 0.6919, - "num_input_tokens_seen": 141015650, - "step": 6632 - }, - { - "epoch": 0.7975710936090904, - "flos": 50331321747240.0, - "grad_norm": 0.8161744514191703, - "learning_rate": 4.145191594443762e-07, - "loss": 0.6998, - "num_input_tokens_seen": 141079725, - "step": 6633 - }, - { - "epoch": 0.7976913364997295, - "flos": 16459114243440.0, - "grad_norm": 4.818873359100632, - "learning_rate": 4.140444502475713e-07, - "loss": 0.6945, - "num_input_tokens_seen": 141098995, - "step": 6634 - }, - { - "epoch": 0.7978115793903685, - "flos": 11108298874200.0, - "grad_norm": 10.349144764823867, - "learning_rate": 4.1356998163896216e-07, - "loss": 0.6871, - "num_input_tokens_seen": 141115765, - "step": 6635 - }, - { - "epoch": 0.7979318222810077, - "flos": 14406718760280.0, - "grad_norm": 5.158173358431246, - "learning_rate": 4.130957536905255e-07, - "loss": 0.7233, - "num_input_tokens_seen": 141133500, - "step": 6636 - }, - { - "epoch": 0.7980520651716467, - "flos": 11328869544600.0, - "grad_norm": 4.503617210410458, - "learning_rate": 4.1262176647420134e-07, - "loss": 0.6865, - "num_input_tokens_seen": 141151385, - "step": 6637 - }, - { - "epoch": 0.7981723080622858, - "flos": 16323600411480.0, - "grad_norm": 6.672110274229182, - "learning_rate": 4.121480200618923e-07, - "loss": 0.7821, - "num_input_tokens_seen": 141170760, - "step": 6638 - }, - { - "epoch": 0.798292550952925, - "flos": 16782530158440.0, - "grad_norm": 3.3557282281985112, - "learning_rate": 4.116745145254674e-07, - "loss": 0.7887, - "num_input_tokens_seen": 141190015, - "step": 6639 - }, - { - "epoch": 0.798412793843564, - "flos": 47552641408560.0, - "grad_norm": 0.7899227551830852, - "learning_rate": 4.1120124993675476e-07, - "loss": 0.5969, - "num_input_tokens_seen": 141254165, - "step": 6640 - }, - { - "epoch": 0.7985330367342031, - "flos": 9868004924040.0, - "grad_norm": 3.9225616461904154, - "learning_rate": 4.107282263675498e-07, - "loss": 0.6022, - "num_input_tokens_seen": 141271555, - "step": 6641 - }, - { - "epoch": 0.7986532796248422, - "flos": 49920094577280.0, - "grad_norm": 0.7489090773753727, - "learning_rate": 4.1025544388960907e-07, - "loss": 0.5332, - "num_input_tokens_seen": 141332315, - "step": 6642 - }, - { - "epoch": 0.7987735225154813, - "flos": 16431606229560.0, - "grad_norm": 4.720158246830509, - "learning_rate": 4.097829025746538e-07, - "loss": 0.6926, - "num_input_tokens_seen": 141353580, - "step": 6643 - }, - { - "epoch": 0.7988937654061203, - "flos": 50782178204400.0, - "grad_norm": 0.7286374396785288, - "learning_rate": 4.0931060249436757e-07, - "loss": 0.6165, - "num_input_tokens_seen": 141417140, - "step": 6644 - }, - { - "epoch": 0.7990140082967595, - "flos": 15210683683560.0, - "grad_norm": 8.282983217061416, - "learning_rate": 4.088385437203978e-07, - "loss": 0.6838, - "num_input_tokens_seen": 141433870, - "step": 6645 - }, - { - "epoch": 0.7991342511873986, - "flos": 13862953794600.0, - "grad_norm": 4.764605703942151, - "learning_rate": 4.083667263243564e-07, - "loss": 0.7522, - "num_input_tokens_seen": 141451935, - "step": 6646 - }, - { - "epoch": 0.7992544940780376, - "flos": 15219421832520.0, - "grad_norm": 3.6731557357512123, - "learning_rate": 4.0789515037781653e-07, - "loss": 0.704, - "num_input_tokens_seen": 141472380, - "step": 6647 - }, - { - "epoch": 0.7993747369686768, - "flos": 9168151643760.0, - "grad_norm": 3.77393282528009, - "learning_rate": 4.0742381595231755e-07, - "loss": 0.8091, - "num_input_tokens_seen": 141488825, - "step": 6648 - }, - { - "epoch": 0.7994949798593158, - "flos": 14672047631400.0, - "grad_norm": 9.396382678791294, - "learning_rate": 4.06952723119359e-07, - "loss": 0.7741, - "num_input_tokens_seen": 141508420, - "step": 6649 - }, - { - "epoch": 0.7996152227499549, - "flos": 28419713817120.0, - "grad_norm": 4.4841691345385755, - "learning_rate": 4.0648187195040504e-07, - "loss": 0.6519, - "num_input_tokens_seen": 141530345, - "step": 6650 - }, - { - "epoch": 0.799735465640594, - "flos": 51802313063640.0, - "grad_norm": 0.9465538575617581, - "learning_rate": 4.060112625168848e-07, - "loss": 0.7176, - "num_input_tokens_seen": 141595175, - "step": 6651 - }, - { - "epoch": 0.7998557085312331, - "flos": 17752777903440.0, - "grad_norm": 5.047647800860635, - "learning_rate": 4.055408948901886e-07, - "loss": 0.7214, - "num_input_tokens_seen": 141616295, - "step": 6652 - }, - { - "epoch": 0.7999759514218722, - "flos": 20212443061560.0, - "grad_norm": 2.9641398723922374, - "learning_rate": 4.050707691416708e-07, - "loss": 0.6994, - "num_input_tokens_seen": 141637325, - "step": 6653 - }, - { - "epoch": 0.8000961943125112, - "flos": 49653151048200.0, - "grad_norm": 0.7222878831783198, - "learning_rate": 4.046008853426495e-07, - "loss": 0.6144, - "num_input_tokens_seen": 141700360, - "step": 6654 - }, - { - "epoch": 0.8002164372031504, - "flos": 21078484183680.0, - "grad_norm": 5.895773987308522, - "learning_rate": 4.0413124356440464e-07, - "loss": 0.613, - "num_input_tokens_seen": 141724125, - "step": 6655 - }, - { - "epoch": 0.8003366800937894, - "flos": 12867509212800.0, - "grad_norm": 11.639749337869674, - "learning_rate": 4.0366184387818223e-07, - "loss": 0.8052, - "num_input_tokens_seen": 141742305, - "step": 6656 - }, - { - "epoch": 0.8004569229844285, - "flos": 19051507157040.0, - "grad_norm": 4.258746403456665, - "learning_rate": 4.0319268635518797e-07, - "loss": 0.8331, - "num_input_tokens_seen": 141762600, - "step": 6657 - }, - { - "epoch": 0.8005771658750677, - "flos": 15215495997480.0, - "grad_norm": 4.482962675028995, - "learning_rate": 4.027237710665943e-07, - "loss": 0.7453, - "num_input_tokens_seen": 141780785, - "step": 6658 - }, - { - "epoch": 0.8006974087657067, - "flos": 18916278264720.0, - "grad_norm": 5.298306702079614, - "learning_rate": 4.022550980835344e-07, - "loss": 0.6833, - "num_input_tokens_seen": 141802750, - "step": 6659 - }, - { - "epoch": 0.8008176516563458, - "flos": 12516110384520.0, - "grad_norm": 4.339607074371656, - "learning_rate": 4.017866674771051e-07, - "loss": 0.7942, - "num_input_tokens_seen": 141819955, - "step": 6660 - }, - { - "epoch": 0.8009378945469849, - "flos": 17728245925800.0, - "grad_norm": 4.626599215767962, - "learning_rate": 4.013184793183688e-07, - "loss": 0.7269, - "num_input_tokens_seen": 141841770, - "step": 6661 - }, - { - "epoch": 0.801058137437624, - "flos": 14456415914760.0, - "grad_norm": 3.0450260127417508, - "learning_rate": 4.008505336783472e-07, - "loss": 0.7168, - "num_input_tokens_seen": 141859215, - "step": 6662 - }, - { - "epoch": 0.801178380328263, - "flos": 13622125241160.0, - "grad_norm": 9.591043792659631, - "learning_rate": 4.003828306280284e-07, - "loss": 0.8045, - "num_input_tokens_seen": 141876610, - "step": 6663 - }, - { - "epoch": 0.8012986232189022, - "flos": 11437191962280.0, - "grad_norm": 3.6994120981348066, - "learning_rate": 3.999153702383626e-07, - "loss": 0.7665, - "num_input_tokens_seen": 141894220, - "step": 6664 - }, - { - "epoch": 0.8014188661095413, - "flos": 20967185729760.0, - "grad_norm": 4.271707857736831, - "learning_rate": 3.9944815258026263e-07, - "loss": 0.7148, - "num_input_tokens_seen": 141915760, - "step": 6665 - }, - { - "epoch": 0.8015391090001803, - "flos": 21504143804040.0, - "grad_norm": 16.694464326840688, - "learning_rate": 3.989811777246057e-07, - "loss": 0.8198, - "num_input_tokens_seen": 141935650, - "step": 6666 - }, - { - "epoch": 0.8016593518908195, - "flos": 51916999133280.0, - "grad_norm": 0.8974731909379249, - "learning_rate": 3.985144457422305e-07, - "loss": 0.6923, - "num_input_tokens_seen": 141989655, - "step": 6667 - }, - { - "epoch": 0.8017795947814585, - "flos": 19073506337880.0, - "grad_norm": 4.818268724446216, - "learning_rate": 3.9804795670394096e-07, - "loss": 0.7406, - "num_input_tokens_seen": 142009500, - "step": 6668 - }, - { - "epoch": 0.8018998376720976, - "flos": 16158647308080.0, - "grad_norm": 4.546138743144981, - "learning_rate": 3.975817106805022e-07, - "loss": 0.7, - "num_input_tokens_seen": 142027920, - "step": 6669 - }, - { - "epoch": 0.8020200805627368, - "flos": 25396152450120.0, - "grad_norm": 5.524859859018326, - "learning_rate": 3.97115707742645e-07, - "loss": 0.6197, - "num_input_tokens_seen": 142048315, - "step": 6670 - }, - { - "epoch": 0.8021403234533758, - "flos": 14703544800240.0, - "grad_norm": 11.897915543787885, - "learning_rate": 3.966499479610599e-07, - "loss": 0.6394, - "num_input_tokens_seen": 142066130, - "step": 6671 - }, - { - "epoch": 0.8022605663440149, - "flos": 20347228714440.0, - "grad_norm": 5.149376559252063, - "learning_rate": 3.9618443140640225e-07, - "loss": 0.6393, - "num_input_tokens_seen": 142084760, - "step": 6672 - }, - { - "epoch": 0.802380809234654, - "flos": 44402268207240.0, - "grad_norm": 0.7422804924781129, - "learning_rate": 3.957191581492918e-07, - "loss": 0.5383, - "num_input_tokens_seen": 142145240, - "step": 6673 - }, - { - "epoch": 0.8025010521252931, - "flos": 10973228281680.0, - "grad_norm": 4.957441789417096, - "learning_rate": 3.952541282603097e-07, - "loss": 0.6841, - "num_input_tokens_seen": 142160065, - "step": 6674 - }, - { - "epoch": 0.8026212950159322, - "flos": 16215626253360.0, - "grad_norm": 4.166963922705581, - "learning_rate": 3.9478934181000013e-07, - "loss": 0.8253, - "num_input_tokens_seen": 142179810, - "step": 6675 - }, - { - "epoch": 0.8027415379065713, - "flos": 12894573987240.0, - "grad_norm": 5.595832650115236, - "learning_rate": 3.943247988688714e-07, - "loss": 0.8279, - "num_input_tokens_seen": 142198225, - "step": 6676 - }, - { - "epoch": 0.8028617807972104, - "flos": 16079289262440.0, - "grad_norm": 11.755192684669654, - "learning_rate": 3.938604995073933e-07, - "loss": 0.7131, - "num_input_tokens_seen": 142216415, - "step": 6677 - }, - { - "epoch": 0.8029820236878494, - "flos": 19371408816480.0, - "grad_norm": 3.090239411495706, - "learning_rate": 3.9339644379600157e-07, - "loss": 0.6471, - "num_input_tokens_seen": 142235965, - "step": 6678 - }, - { - "epoch": 0.8031022665784886, - "flos": 12895017226680.0, - "grad_norm": 5.689214091807574, - "learning_rate": 3.929326318050907e-07, - "loss": 0.7006, - "num_input_tokens_seen": 142253355, - "step": 6679 - }, - { - "epoch": 0.8032225094691277, - "flos": 11247390281640.0, - "grad_norm": 3.4176057284896504, - "learning_rate": 3.924690636050225e-07, - "loss": 0.7733, - "num_input_tokens_seen": 142270485, - "step": 6680 - }, - { - "epoch": 0.8033427523597667, - "flos": 19187812488000.0, - "grad_norm": 3.7551479608106746, - "learning_rate": 3.9200573926611915e-07, - "loss": 0.7075, - "num_input_tokens_seen": 142291620, - "step": 6681 - }, - { - "epoch": 0.8034629952504058, - "flos": 15595067698800.0, - "grad_norm": 3.306861252699377, - "learning_rate": 3.9154265885866613e-07, - "loss": 0.712, - "num_input_tokens_seen": 142310650, - "step": 6682 - }, - { - "epoch": 0.8035832381410449, - "flos": 15836592771360.0, - "grad_norm": 6.530271027856778, - "learning_rate": 3.9107982245291394e-07, - "loss": 0.7262, - "num_input_tokens_seen": 142328495, - "step": 6683 - }, - { - "epoch": 0.803703481031684, - "flos": 14998028003160.0, - "grad_norm": 8.210976525240277, - "learning_rate": 3.9061723011907245e-07, - "loss": 0.7468, - "num_input_tokens_seen": 142347570, - "step": 6684 - }, - { - "epoch": 0.803823723922323, - "flos": 16726944251400.0, - "grad_norm": 4.465038972793457, - "learning_rate": 3.901548819273179e-07, - "loss": 0.7639, - "num_input_tokens_seen": 142367305, - "step": 6685 - }, - { - "epoch": 0.8039439668129622, - "flos": 15622734012480.0, - "grad_norm": 3.887345858642762, - "learning_rate": 3.896927779477881e-07, - "loss": 0.6769, - "num_input_tokens_seen": 142386285, - "step": 6686 - }, - { - "epoch": 0.8040642097036013, - "flos": 17428063930080.0, - "grad_norm": 7.587814460283765, - "learning_rate": 3.892309182505833e-07, - "loss": 0.6673, - "num_input_tokens_seen": 142403820, - "step": 6687 - }, - { - "epoch": 0.8041844525942403, - "flos": 18997250968320.0, - "grad_norm": 5.018476679712127, - "learning_rate": 3.887693029057675e-07, - "loss": 0.8438, - "num_input_tokens_seen": 142423050, - "step": 6688 - }, - { - "epoch": 0.8043046954848795, - "flos": 18456145439280.0, - "grad_norm": 2.6172009823579967, - "learning_rate": 3.8830793198336684e-07, - "loss": 0.8041, - "num_input_tokens_seen": 142442360, - "step": 6689 - }, - { - "epoch": 0.8044249383755185, - "flos": 30688025956560.0, - "grad_norm": 21.032091597202715, - "learning_rate": 3.878468055533721e-07, - "loss": 0.6919, - "num_input_tokens_seen": 142464620, - "step": 6690 - }, - { - "epoch": 0.8045451812661576, - "flos": 15081818443200.0, - "grad_norm": 8.91164601475972, - "learning_rate": 3.8738592368573464e-07, - "loss": 0.8253, - "num_input_tokens_seen": 142481895, - "step": 6691 - }, - { - "epoch": 0.8046654241567968, - "flos": 21801254783640.0, - "grad_norm": 4.815896557200768, - "learning_rate": 3.8692528645037137e-07, - "loss": 0.8652, - "num_input_tokens_seen": 142500795, - "step": 6692 - }, - { - "epoch": 0.8047856670474358, - "flos": 12890521512360.0, - "grad_norm": 4.953960552799286, - "learning_rate": 3.8646489391715907e-07, - "loss": 0.7765, - "num_input_tokens_seen": 142514810, - "step": 6693 - }, - { - "epoch": 0.8049059099380749, - "flos": 12482491998360.0, - "grad_norm": 9.388381156761207, - "learning_rate": 3.8600474615593903e-07, - "loss": 0.8669, - "num_input_tokens_seen": 142529145, - "step": 6694 - }, - { - "epoch": 0.805026152828714, - "flos": 45858320513880.0, - "grad_norm": 0.8328297325197919, - "learning_rate": 3.8554484323651605e-07, - "loss": 0.6495, - "num_input_tokens_seen": 142590735, - "step": 6695 - }, - { - "epoch": 0.8051463957193531, - "flos": 15864069125280.0, - "grad_norm": 3.57287932766606, - "learning_rate": 3.85085185228657e-07, - "loss": 0.776, - "num_input_tokens_seen": 142609425, - "step": 6696 - }, - { - "epoch": 0.8052666386099921, - "flos": 23533400347800.0, - "grad_norm": 4.139527373310829, - "learning_rate": 3.8462577220209114e-07, - "loss": 0.7287, - "num_input_tokens_seen": 142629520, - "step": 6697 - }, - { - "epoch": 0.8053868815006313, - "flos": 49520676572400.0, - "grad_norm": 0.6779195219314678, - "learning_rate": 3.8416660422651127e-07, - "loss": 0.6001, - "num_input_tokens_seen": 142698890, - "step": 6698 - }, - { - "epoch": 0.8055071243912704, - "flos": 17453419066680.0, - "grad_norm": 6.570788571486064, - "learning_rate": 3.837076813715723e-07, - "loss": 0.672, - "num_input_tokens_seen": 142718495, - "step": 6699 - }, - { - "epoch": 0.8056273672819094, - "flos": 15594719439240.0, - "grad_norm": 3.7826439676160626, - "learning_rate": 3.832490037068941e-07, - "loss": 0.739, - "num_input_tokens_seen": 142737005, - "step": 6700 - }, - { - "epoch": 0.8057476101725486, - "flos": 18882438258840.0, - "grad_norm": 5.9326386477235316, - "learning_rate": 3.827905713020554e-07, - "loss": 0.7433, - "num_input_tokens_seen": 142754370, - "step": 6701 - }, - { - "epoch": 0.8058678530631876, - "flos": 17970910756920.0, - "grad_norm": 17.539182558052342, - "learning_rate": 3.823323842266017e-07, - "loss": 0.6668, - "num_input_tokens_seen": 142773485, - "step": 6702 - }, - { - "epoch": 0.8059880959538267, - "flos": 18132507904560.0, - "grad_norm": 4.987342835820666, - "learning_rate": 3.818744425500393e-07, - "loss": 0.7101, - "num_input_tokens_seen": 142791220, - "step": 6703 - }, - { - "epoch": 0.8061083388444659, - "flos": 16239398391960.0, - "grad_norm": 3.0963156494277375, - "learning_rate": 3.8141674634183675e-07, - "loss": 0.7971, - "num_input_tokens_seen": 142809970, - "step": 6704 - }, - { - "epoch": 0.8062285817351049, - "flos": 22047813789840.0, - "grad_norm": 3.127875569066251, - "learning_rate": 3.809592956714278e-07, - "loss": 0.6316, - "num_input_tokens_seen": 142832925, - "step": 6705 - }, - { - "epoch": 0.806348824625744, - "flos": 16673606201520.0, - "grad_norm": 3.277684045159041, - "learning_rate": 3.805020906082057e-07, - "loss": 0.7262, - "num_input_tokens_seen": 142851220, - "step": 6706 - }, - { - "epoch": 0.8064690675163831, - "flos": 17134182266400.0, - "grad_norm": 5.290219967745094, - "learning_rate": 3.8004513122152917e-07, - "loss": 0.8033, - "num_input_tokens_seen": 142869250, - "step": 6707 - }, - { - "epoch": 0.8065893104070222, - "flos": 17619828528240.0, - "grad_norm": 4.497950405025531, - "learning_rate": 3.79588417580718e-07, - "loss": 0.662, - "num_input_tokens_seen": 142887080, - "step": 6708 - }, - { - "epoch": 0.8067095532976613, - "flos": 16320054495960.0, - "grad_norm": 3.5946926041760605, - "learning_rate": 3.791319497550558e-07, - "loss": 0.7385, - "num_input_tokens_seen": 142904630, - "step": 6709 - }, - { - "epoch": 0.8068297961883004, - "flos": 12490248688560.0, - "grad_norm": 4.950848269750134, - "learning_rate": 3.78675727813788e-07, - "loss": 0.703, - "num_input_tokens_seen": 142921915, - "step": 6710 - }, - { - "epoch": 0.8069500390789395, - "flos": 16108728533880.0, - "grad_norm": 11.26128008435025, - "learning_rate": 3.782197518261225e-07, - "loss": 0.7133, - "num_input_tokens_seen": 142941075, - "step": 6711 - }, - { - "epoch": 0.8070702819695785, - "flos": 14055098312280.0, - "grad_norm": 5.285064944587329, - "learning_rate": 3.777640218612319e-07, - "loss": 0.9555, - "num_input_tokens_seen": 142958780, - "step": 6712 - }, - { - "epoch": 0.8071905248602176, - "flos": 15757519665360.0, - "grad_norm": 4.460433797908825, - "learning_rate": 3.773085379882488e-07, - "loss": 0.6991, - "num_input_tokens_seen": 142977555, - "step": 6713 - }, - { - "epoch": 0.8073107677508568, - "flos": 27394133444760.0, - "grad_norm": 3.4203843305703288, - "learning_rate": 3.768533002762715e-07, - "loss": 0.7475, - "num_input_tokens_seen": 143000810, - "step": 6714 - }, - { - "epoch": 0.8074310106414958, - "flos": 20806095141480.0, - "grad_norm": 4.26500320094571, - "learning_rate": 3.763983087943572e-07, - "loss": 0.7486, - "num_input_tokens_seen": 143019920, - "step": 6715 - }, - { - "epoch": 0.8075512535321349, - "flos": 17783451913320.0, - "grad_norm": 3.2016364383677116, - "learning_rate": 3.759435636115282e-07, - "loss": 0.7854, - "num_input_tokens_seen": 143040425, - "step": 6716 - }, - { - "epoch": 0.807671496422774, - "flos": 19077685452600.0, - "grad_norm": 4.167007635788391, - "learning_rate": 3.7548906479676967e-07, - "loss": 0.7227, - "num_input_tokens_seen": 143059740, - "step": 6717 - }, - { - "epoch": 0.8077917393134131, - "flos": 17375232439560.0, - "grad_norm": 3.220796249459097, - "learning_rate": 3.7503481241902855e-07, - "loss": 0.6943, - "num_input_tokens_seen": 143079435, - "step": 6718 - }, - { - "epoch": 0.8079119822040521, - "flos": 13431373761720.0, - "grad_norm": 3.8325665301317002, - "learning_rate": 3.745808065472145e-07, - "loss": 0.7832, - "num_input_tokens_seen": 143096450, - "step": 6719 - }, - { - "epoch": 0.8080322250946913, - "flos": 17294671315440.0, - "grad_norm": 5.130019916101653, - "learning_rate": 3.741270472501994e-07, - "loss": 0.7542, - "num_input_tokens_seen": 143116810, - "step": 6720 - }, - { - "epoch": 0.8081524679853304, - "flos": 16702032354240.0, - "grad_norm": 8.947279907828046, - "learning_rate": 3.736735345968183e-07, - "loss": 0.7189, - "num_input_tokens_seen": 143136140, - "step": 6721 - }, - { - "epoch": 0.8082727108759694, - "flos": 12868015772160.0, - "grad_norm": 2.7850387141825963, - "learning_rate": 3.7322026865586986e-07, - "loss": 0.7754, - "num_input_tokens_seen": 143154895, - "step": 6722 - }, - { - "epoch": 0.8083929537666086, - "flos": 19023935823240.0, - "grad_norm": 4.406126196454435, - "learning_rate": 3.7276724949611206e-07, - "loss": 0.7129, - "num_input_tokens_seen": 143174725, - "step": 6723 - }, - { - "epoch": 0.8085131966572476, - "flos": 19860949253400.0, - "grad_norm": 4.47557261975026, - "learning_rate": 3.723144771862694e-07, - "loss": 0.7269, - "num_input_tokens_seen": 143195085, - "step": 6724 - }, - { - "epoch": 0.8086334395478867, - "flos": 17566363838520.0, - "grad_norm": 4.230191912975754, - "learning_rate": 3.718619517950263e-07, - "loss": 0.7459, - "num_input_tokens_seen": 143215400, - "step": 6725 - }, - { - "epoch": 0.8087536824385259, - "flos": 14913952623480.0, - "grad_norm": 4.173207273884879, - "learning_rate": 3.714096733910301e-07, - "loss": 0.7501, - "num_input_tokens_seen": 143232645, - "step": 6726 - }, - { - "epoch": 0.8088739253291649, - "flos": 18995129751000.0, - "grad_norm": 7.965892084791443, - "learning_rate": 3.709576420428926e-07, - "loss": 0.6815, - "num_input_tokens_seen": 143253165, - "step": 6727 - }, - { - "epoch": 0.808994168219804, - "flos": 20832495056760.0, - "grad_norm": 6.951068428300924, - "learning_rate": 3.7050585781918463e-07, - "loss": 0.7175, - "num_input_tokens_seen": 143273185, - "step": 6728 - }, - { - "epoch": 0.8091144111104431, - "flos": 12705975385080.0, - "grad_norm": 5.014618674601306, - "learning_rate": 3.700543207884428e-07, - "loss": 0.6611, - "num_input_tokens_seen": 143289815, - "step": 6729 - }, - { - "epoch": 0.8092346540010822, - "flos": 23608136039280.0, - "grad_norm": 4.880389242439054, - "learning_rate": 3.6960303101916466e-07, - "loss": 0.6973, - "num_input_tokens_seen": 143309450, - "step": 6730 - }, - { - "epoch": 0.8093548968917212, - "flos": 42769014884040.0, - "grad_norm": 0.7868364869162074, - "learning_rate": 3.6915198857981047e-07, - "loss": 0.5767, - "num_input_tokens_seen": 143374370, - "step": 6731 - }, - { - "epoch": 0.8094751397823604, - "flos": 20077435788960.0, - "grad_norm": 8.460242245499774, - "learning_rate": 3.687011935388027e-07, - "loss": 0.666, - "num_input_tokens_seen": 143396985, - "step": 6732 - }, - { - "epoch": 0.8095953826729995, - "flos": 17619986828040.0, - "grad_norm": 7.227271660727787, - "learning_rate": 3.6825064596452646e-07, - "loss": 0.7087, - "num_input_tokens_seen": 143417050, - "step": 6733 - }, - { - "epoch": 0.8097156255636385, - "flos": 17538792504720.0, - "grad_norm": 7.317874856775948, - "learning_rate": 3.678003459253305e-07, - "loss": 0.687, - "num_input_tokens_seen": 143437620, - "step": 6734 - }, - { - "epoch": 0.8098358684542777, - "flos": 15946023287640.0, - "grad_norm": 3.1279952381517306, - "learning_rate": 3.673502934895236e-07, - "loss": 0.7242, - "num_input_tokens_seen": 143456845, - "step": 6735 - }, - { - "epoch": 0.8099561113449167, - "flos": 50744385194880.0, - "grad_norm": 0.6747349925285608, - "learning_rate": 3.669004887253802e-07, - "loss": 0.5822, - "num_input_tokens_seen": 143522855, - "step": 6736 - }, - { - "epoch": 0.8100763542355558, - "flos": 17268334720080.0, - "grad_norm": 8.472610325774571, - "learning_rate": 3.664509317011335e-07, - "loss": 0.7774, - "num_input_tokens_seen": 143542910, - "step": 6737 - }, - { - "epoch": 0.810196597126195, - "flos": 23236637627760.0, - "grad_norm": 4.465570132089995, - "learning_rate": 3.6600162248498134e-07, - "loss": 0.7132, - "num_input_tokens_seen": 143566260, - "step": 6738 - }, - { - "epoch": 0.810316840016834, - "flos": 18241210241760.0, - "grad_norm": 3.562988783115029, - "learning_rate": 3.6555256114508426e-07, - "loss": 0.7492, - "num_input_tokens_seen": 143585775, - "step": 6739 - }, - { - "epoch": 0.8104370829074731, - "flos": 20509142461680.0, - "grad_norm": 4.081213238537419, - "learning_rate": 3.651037477495642e-07, - "loss": 0.7142, - "num_input_tokens_seen": 143606945, - "step": 6740 - }, - { - "epoch": 0.8105573257981122, - "flos": 18048400864920.0, - "grad_norm": 38.06781219344386, - "learning_rate": 3.6465518236650584e-07, - "loss": 0.6543, - "num_input_tokens_seen": 143626810, - "step": 6741 - }, - { - "epoch": 0.8106775686887513, - "flos": 19320445263600.0, - "grad_norm": 3.559339359362895, - "learning_rate": 3.642068650639558e-07, - "loss": 0.7743, - "num_input_tokens_seen": 143646275, - "step": 6742 - }, - { - "epoch": 0.8107978115793903, - "flos": 19996209805680.0, - "grad_norm": 4.199943160626133, - "learning_rate": 3.6375879590992334e-07, - "loss": 0.6355, - "num_input_tokens_seen": 143666340, - "step": 6743 - }, - { - "epoch": 0.8109180544700295, - "flos": 18265900519200.0, - "grad_norm": 5.990678221655702, - "learning_rate": 3.6331097497238173e-07, - "loss": 0.7925, - "num_input_tokens_seen": 143685505, - "step": 6744 - }, - { - "epoch": 0.8110382973606686, - "flos": 15432520752360.0, - "grad_norm": 4.151053715412773, - "learning_rate": 3.628634023192627e-07, - "loss": 0.7792, - "num_input_tokens_seen": 143705470, - "step": 6745 - }, - { - "epoch": 0.8111585402513076, - "flos": 11220642106800.0, - "grad_norm": 6.606057202314699, - "learning_rate": 3.624160780184644e-07, - "loss": 0.7366, - "num_input_tokens_seen": 143722405, - "step": 6746 - }, - { - "epoch": 0.8112787831419467, - "flos": 17645373624600.0, - "grad_norm": 3.5385348366637857, - "learning_rate": 3.6196900213784496e-07, - "loss": 0.7359, - "num_input_tokens_seen": 143741440, - "step": 6747 - }, - { - "epoch": 0.8113990260325858, - "flos": 14971944687480.0, - "grad_norm": 7.877072742706931, - "learning_rate": 3.6152217474522527e-07, - "loss": 0.8523, - "num_input_tokens_seen": 143757975, - "step": 6748 - }, - { - "epoch": 0.8115192689232249, - "flos": 18241811781000.0, - "grad_norm": 3.9055697649906325, - "learning_rate": 3.6107559590838975e-07, - "loss": 0.7175, - "num_input_tokens_seen": 143776680, - "step": 6749 - }, - { - "epoch": 0.811639511813864, - "flos": 17616567552360.0, - "grad_norm": 6.732243671941654, - "learning_rate": 3.606292656950822e-07, - "loss": 0.6234, - "num_input_tokens_seen": 143794810, - "step": 6750 - }, - { - "epoch": 0.8117597547045031, - "flos": 16971698639880.0, - "grad_norm": 5.1942761757172695, - "learning_rate": 3.601831841730121e-07, - "loss": 0.8424, - "num_input_tokens_seen": 143812450, - "step": 6751 - }, - { - "epoch": 0.8118799975951422, - "flos": 16864610960640.0, - "grad_norm": 2.8467021332734297, - "learning_rate": 3.5973735140984916e-07, - "loss": 0.7247, - "num_input_tokens_seen": 143832340, - "step": 6752 - }, - { - "epoch": 0.8120002404857812, - "flos": 18047704345800.0, - "grad_norm": 5.678278899219746, - "learning_rate": 3.5929176747322607e-07, - "loss": 0.7574, - "num_input_tokens_seen": 143851165, - "step": 6753 - }, - { - "epoch": 0.8121204833764204, - "flos": 42310781656200.0, - "grad_norm": 0.9493668680049946, - "learning_rate": 3.588464324307372e-07, - "loss": 0.5659, - "num_input_tokens_seen": 143914510, - "step": 6754 - }, - { - "epoch": 0.8122407262670595, - "flos": 14218373437800.0, - "grad_norm": 2.8415911518105226, - "learning_rate": 3.584013463499391e-07, - "loss": 0.74, - "num_input_tokens_seen": 143932850, - "step": 6755 - }, - { - "epoch": 0.8123609691576985, - "flos": 41577056710080.0, - "grad_norm": 0.7237075993532003, - "learning_rate": 3.579565092983521e-07, - "loss": 0.6446, - "num_input_tokens_seen": 143993690, - "step": 6756 - }, - { - "epoch": 0.8124812120483377, - "flos": 15082293342600.0, - "grad_norm": 5.164146157191614, - "learning_rate": 3.575119213434565e-07, - "loss": 0.8294, - "num_input_tokens_seen": 144011925, - "step": 6757 - }, - { - "epoch": 0.8126014549389767, - "flos": 16459082583480.0, - "grad_norm": 3.7212141832416195, - "learning_rate": 3.5706758255269765e-07, - "loss": 0.8, - "num_input_tokens_seen": 144030100, - "step": 6758 - }, - { - "epoch": 0.8127216978296158, - "flos": 17046845910840.0, - "grad_norm": 2.72180249963988, - "learning_rate": 3.566234929934795e-07, - "loss": 0.687, - "num_input_tokens_seen": 144049020, - "step": 6759 - }, - { - "epoch": 0.812841940720255, - "flos": 18429460584360.0, - "grad_norm": 2.9570960739468304, - "learning_rate": 3.561796527331706e-07, - "loss": 0.71, - "num_input_tokens_seen": 144070415, - "step": 6760 - }, - { - "epoch": 0.812962183610894, - "flos": 19534367342400.0, - "grad_norm": 4.320240537516627, - "learning_rate": 3.5573606183910163e-07, - "loss": 0.7538, - "num_input_tokens_seen": 144090140, - "step": 6761 - }, - { - "epoch": 0.8130824265015331, - "flos": 18289514358000.0, - "grad_norm": 2.821570203168348, - "learning_rate": 3.5529272037856493e-07, - "loss": 0.7628, - "num_input_tokens_seen": 144108075, - "step": 6762 - }, - { - "epoch": 0.8132026693921722, - "flos": 49863527211480.0, - "grad_norm": 0.7544264110754878, - "learning_rate": 3.548496284188149e-07, - "loss": 0.5651, - "num_input_tokens_seen": 144168000, - "step": 6763 - }, - { - "epoch": 0.8133229122828113, - "flos": 14240150998920.0, - "grad_norm": 7.678328835339971, - "learning_rate": 3.544067860270681e-07, - "loss": 0.7773, - "num_input_tokens_seen": 144185295, - "step": 6764 - }, - { - "epoch": 0.8134431551734503, - "flos": 15109009857480.0, - "grad_norm": 4.042738755912825, - "learning_rate": 3.539641932705029e-07, - "loss": 0.6859, - "num_input_tokens_seen": 144203495, - "step": 6765 - }, - { - "epoch": 0.8135633980640895, - "flos": 15730074971400.0, - "grad_norm": 3.8281075613442086, - "learning_rate": 3.53521850216262e-07, - "loss": 0.7265, - "num_input_tokens_seen": 144222785, - "step": 6766 - }, - { - "epoch": 0.8136836409547286, - "flos": 15024269618640.0, - "grad_norm": 6.339473391542872, - "learning_rate": 3.530797569314461e-07, - "loss": 0.7561, - "num_input_tokens_seen": 144241530, - "step": 6767 - }, - { - "epoch": 0.8138038838453676, - "flos": 14835259437000.0, - "grad_norm": 5.033443944023025, - "learning_rate": 3.5263791348312235e-07, - "loss": 0.7535, - "num_input_tokens_seen": 144260445, - "step": 6768 - }, - { - "epoch": 0.8139241267360068, - "flos": 21859721747040.0, - "grad_norm": 3.785110646574486, - "learning_rate": 3.521963199383171e-07, - "loss": 0.688, - "num_input_tokens_seen": 144283120, - "step": 6769 - }, - { - "epoch": 0.8140443696266458, - "flos": 14401463206920.0, - "grad_norm": 4.153005839662697, - "learning_rate": 3.517549763640197e-07, - "loss": 0.7371, - "num_input_tokens_seen": 144300480, - "step": 6770 - }, - { - "epoch": 0.8141646125172849, - "flos": 19913559124200.0, - "grad_norm": 5.209584119017106, - "learning_rate": 3.513138828271829e-07, - "loss": 0.6962, - "num_input_tokens_seen": 144320070, - "step": 6771 - }, - { - "epoch": 0.8142848554079241, - "flos": 29173380046680.0, - "grad_norm": 3.7765514091104726, - "learning_rate": 3.508730393947179e-07, - "loss": 0.6963, - "num_input_tokens_seen": 144343045, - "step": 6772 - }, - { - "epoch": 0.8144050982985631, - "flos": 16270325681520.0, - "grad_norm": 3.165221804881402, - "learning_rate": 3.504324461335024e-07, - "loss": 0.6937, - "num_input_tokens_seen": 144362875, - "step": 6773 - }, - { - "epoch": 0.8145253411892022, - "flos": 16862838002880.0, - "grad_norm": 3.2975635261576706, - "learning_rate": 3.499921031103732e-07, - "loss": 0.8644, - "num_input_tokens_seen": 144383365, - "step": 6774 - }, - { - "epoch": 0.8146455840798413, - "flos": 18187365632520.0, - "grad_norm": 5.809615208921536, - "learning_rate": 3.4955201039212987e-07, - "loss": 0.7631, - "num_input_tokens_seen": 144404005, - "step": 6775 - }, - { - "epoch": 0.8147658269704804, - "flos": 14595633962040.0, - "grad_norm": 5.169476370644833, - "learning_rate": 3.4911216804553465e-07, - "loss": 0.6291, - "num_input_tokens_seen": 144422625, - "step": 6776 - }, - { - "epoch": 0.8148860698611194, - "flos": 15486238721760.0, - "grad_norm": 3.737332118930227, - "learning_rate": 3.4867257613731017e-07, - "loss": 0.688, - "num_input_tokens_seen": 144441540, - "step": 6777 - }, - { - "epoch": 0.8150063127517585, - "flos": 14323181599920.0, - "grad_norm": 3.8716049087816473, - "learning_rate": 3.4823323473414343e-07, - "loss": 0.8344, - "num_input_tokens_seen": 144460780, - "step": 6778 - }, - { - "epoch": 0.8151265556423977, - "flos": 16567120061520.0, - "grad_norm": 12.909994916730215, - "learning_rate": 3.477941439026812e-07, - "loss": 0.7307, - "num_input_tokens_seen": 144478720, - "step": 6779 - }, - { - "epoch": 0.8152467985330367, - "flos": 13110743923200.0, - "grad_norm": 3.2994866174113455, - "learning_rate": 3.473553037095349e-07, - "loss": 0.7054, - "num_input_tokens_seen": 144497465, - "step": 6780 - }, - { - "epoch": 0.8153670414236758, - "flos": 18291318975720.0, - "grad_norm": 5.21682191969138, - "learning_rate": 3.469167142212743e-07, - "loss": 0.8156, - "num_input_tokens_seen": 144519030, - "step": 6781 - }, - { - "epoch": 0.8154872843143149, - "flos": 22804107796080.0, - "grad_norm": 6.728854649477786, - "learning_rate": 3.4647837550443337e-07, - "loss": 0.6141, - "num_input_tokens_seen": 144537315, - "step": 6782 - }, - { - "epoch": 0.815607527204954, - "flos": 14164402188720.0, - "grad_norm": 5.3624050816669655, - "learning_rate": 3.460402876255086e-07, - "loss": 0.7288, - "num_input_tokens_seen": 144554425, - "step": 6783 - }, - { - "epoch": 0.815727770095593, - "flos": 19158784796040.0, - "grad_norm": 4.482767807321217, - "learning_rate": 3.456024506509574e-07, - "loss": 0.6914, - "num_input_tokens_seen": 144575065, - "step": 6784 - }, - { - "epoch": 0.8158480129862322, - "flos": 18564562836840.0, - "grad_norm": 5.253504355562677, - "learning_rate": 3.4516486464719873e-07, - "loss": 0.7287, - "num_input_tokens_seen": 144594175, - "step": 6785 - }, - { - "epoch": 0.8159682558768713, - "flos": 25289064770880.0, - "grad_norm": 5.795694318357028, - "learning_rate": 3.4472752968061445e-07, - "loss": 0.6146, - "num_input_tokens_seen": 144618325, - "step": 6786 - }, - { - "epoch": 0.8160884987675103, - "flos": 13616679728040.0, - "grad_norm": 7.792516189475157, - "learning_rate": 3.442904458175475e-07, - "loss": 0.7247, - "num_input_tokens_seen": 144635365, - "step": 6787 - }, - { - "epoch": 0.8162087416581495, - "flos": 23073774081720.0, - "grad_norm": 4.028550064470869, - "learning_rate": 3.438536131243044e-07, - "loss": 0.7417, - "num_input_tokens_seen": 144656245, - "step": 6788 - }, - { - "epoch": 0.8163289845487885, - "flos": 27634708718520.0, - "grad_norm": 3.421173859969581, - "learning_rate": 3.434170316671503e-07, - "loss": 0.6048, - "num_input_tokens_seen": 144680995, - "step": 6789 - }, - { - "epoch": 0.8164492274394276, - "flos": 10139982386760.0, - "grad_norm": 6.39208997429651, - "learning_rate": 3.4298070151231583e-07, - "loss": 0.875, - "num_input_tokens_seen": 144696115, - "step": 6790 - }, - { - "epoch": 0.8165694703300668, - "flos": 21265151528280.0, - "grad_norm": 4.901135542724186, - "learning_rate": 3.425446227259916e-07, - "loss": 0.586, - "num_input_tokens_seen": 144716800, - "step": 6791 - }, - { - "epoch": 0.8166897132207058, - "flos": 18402332490000.0, - "grad_norm": 12.085743700975296, - "learning_rate": 3.421087953743296e-07, - "loss": 0.8069, - "num_input_tokens_seen": 144736285, - "step": 6792 - }, - { - "epoch": 0.8168099561113449, - "flos": 16944633865440.0, - "grad_norm": 4.0497638303868015, - "learning_rate": 3.416732195234464e-07, - "loss": 0.7784, - "num_input_tokens_seen": 144756060, - "step": 6793 - }, - { - "epoch": 0.816930199001984, - "flos": 13436344375440.0, - "grad_norm": 2.654985553249559, - "learning_rate": 3.4123789523941613e-07, - "loss": 0.7784, - "num_input_tokens_seen": 144775605, - "step": 6794 - }, - { - "epoch": 0.8170504418926231, - "flos": 15540526570440.0, - "grad_norm": 3.6870360001704263, - "learning_rate": 3.4080282258827884e-07, - "loss": 0.6242, - "num_input_tokens_seen": 144793700, - "step": 6795 - }, - { - "epoch": 0.8171706847832622, - "flos": 13947915653160.0, - "grad_norm": 5.667869141095791, - "learning_rate": 3.403680016360342e-07, - "loss": 0.6959, - "num_input_tokens_seen": 144812025, - "step": 6796 - }, - { - "epoch": 0.8172909276739013, - "flos": 15702440317680.0, - "grad_norm": 4.733643327331563, - "learning_rate": 3.3993343244864403e-07, - "loss": 0.656, - "num_input_tokens_seen": 144831335, - "step": 6797 - }, - { - "epoch": 0.8174111705645404, - "flos": 20238811316880.0, - "grad_norm": 3.650401858847749, - "learning_rate": 3.394991150920323e-07, - "loss": 0.7089, - "num_input_tokens_seen": 144854175, - "step": 6798 - }, - { - "epoch": 0.8175314134551794, - "flos": 10221999869040.0, - "grad_norm": 4.001170225626307, - "learning_rate": 3.3906504963208396e-07, - "loss": 0.7278, - "num_input_tokens_seen": 144870590, - "step": 6799 - }, - { - "epoch": 0.8176516563458186, - "flos": 16618241914200.0, - "grad_norm": 4.592382318258756, - "learning_rate": 3.3863123613464774e-07, - "loss": 0.6417, - "num_input_tokens_seen": 144889210, - "step": 6800 - }, - { - "epoch": 0.8177718992364577, - "flos": 16054314045360.0, - "grad_norm": 5.131363988021458, - "learning_rate": 3.381976746655317e-07, - "loss": 0.7249, - "num_input_tokens_seen": 144908685, - "step": 6801 - }, - { - "epoch": 0.8178921421270967, - "flos": 16107493795440.0, - "grad_norm": 5.231498269373321, - "learning_rate": 3.3776436529050756e-07, - "loss": 0.6583, - "num_input_tokens_seen": 144927955, - "step": 6802 - }, - { - "epoch": 0.8180123850177359, - "flos": 24370983657240.0, - "grad_norm": 7.849483021034555, - "learning_rate": 3.373313080753073e-07, - "loss": 0.7095, - "num_input_tokens_seen": 144951735, - "step": 6803 - }, - { - "epoch": 0.8181326279083749, - "flos": 16162161563640.0, - "grad_norm": 6.650151427416398, - "learning_rate": 3.3689850308562527e-07, - "loss": 0.7671, - "num_input_tokens_seen": 144971900, - "step": 6804 - }, - { - "epoch": 0.818252870799014, - "flos": 11436463783200.0, - "grad_norm": 5.096981965701998, - "learning_rate": 3.364659503871183e-07, - "loss": 0.7608, - "num_input_tokens_seen": 144989555, - "step": 6805 - }, - { - "epoch": 0.8183731136896532, - "flos": 13704744262680.0, - "grad_norm": 4.013007472381365, - "learning_rate": 3.3603365004540417e-07, - "loss": 0.8177, - "num_input_tokens_seen": 145007570, - "step": 6806 - }, - { - "epoch": 0.8184933565802922, - "flos": 19455832455720.0, - "grad_norm": 5.599351859050749, - "learning_rate": 3.356016021260624e-07, - "loss": 0.7486, - "num_input_tokens_seen": 145027620, - "step": 6807 - }, - { - "epoch": 0.8186135994709313, - "flos": 12786631489080.0, - "grad_norm": 11.625210011543878, - "learning_rate": 3.35169806694634e-07, - "loss": 0.6259, - "num_input_tokens_seen": 145045590, - "step": 6808 - }, - { - "epoch": 0.8187338423615703, - "flos": 46800083277600.0, - "grad_norm": 0.7422497371879978, - "learning_rate": 3.3473826381662186e-07, - "loss": 0.6141, - "num_input_tokens_seen": 145116450, - "step": 6809 - }, - { - "epoch": 0.8188540852522095, - "flos": 12786504849240.0, - "grad_norm": 12.789406111755396, - "learning_rate": 3.3430697355749216e-07, - "loss": 0.8024, - "num_input_tokens_seen": 145133860, - "step": 6810 - }, - { - "epoch": 0.8189743281428485, - "flos": 10464633040200.0, - "grad_norm": 4.3504356692272745, - "learning_rate": 3.3387593598266907e-07, - "loss": 0.7244, - "num_input_tokens_seen": 145150190, - "step": 6811 - }, - { - "epoch": 0.8190945710334876, - "flos": 18374096297040.0, - "grad_norm": 3.3997574270893676, - "learning_rate": 3.3344515115754225e-07, - "loss": 0.7698, - "num_input_tokens_seen": 145168890, - "step": 6812 - }, - { - "epoch": 0.8192148139241268, - "flos": 15729916671600.0, - "grad_norm": 5.051452261715692, - "learning_rate": 3.33014619147461e-07, - "loss": 0.7755, - "num_input_tokens_seen": 145186635, - "step": 6813 - }, - { - "epoch": 0.8193350568147658, - "flos": 17539457363880.0, - "grad_norm": 6.637879430123556, - "learning_rate": 3.325843400177362e-07, - "loss": 0.6896, - "num_input_tokens_seen": 145207695, - "step": 6814 - }, - { - "epoch": 0.8194552997054049, - "flos": 15028702013040.0, - "grad_norm": 3.817789254712208, - "learning_rate": 3.32154313833642e-07, - "loss": 0.7164, - "num_input_tokens_seen": 145227570, - "step": 6815 - }, - { - "epoch": 0.819575542596044, - "flos": 19078476951600.0, - "grad_norm": 4.093319003880506, - "learning_rate": 3.3172454066041164e-07, - "loss": 0.5726, - "num_input_tokens_seen": 145246795, - "step": 6816 - }, - { - "epoch": 0.8196957854866831, - "flos": 21346979050800.0, - "grad_norm": 6.037955711968041, - "learning_rate": 3.3129502056324234e-07, - "loss": 0.7371, - "num_input_tokens_seen": 145267880, - "step": 6817 - }, - { - "epoch": 0.8198160283773221, - "flos": 50905819551360.0, - "grad_norm": 0.7885394109360259, - "learning_rate": 3.3086575360729165e-07, - "loss": 0.6155, - "num_input_tokens_seen": 145325135, - "step": 6818 - }, - { - "epoch": 0.8199362712679613, - "flos": 12111658446000.0, - "grad_norm": 4.608425236498949, - "learning_rate": 3.3043673985767906e-07, - "loss": 0.6972, - "num_input_tokens_seen": 145343920, - "step": 6819 - }, - { - "epoch": 0.8200565141586004, - "flos": 15914621098680.0, - "grad_norm": 3.198298246169008, - "learning_rate": 3.3000797937948564e-07, - "loss": 0.7541, - "num_input_tokens_seen": 145361935, - "step": 6820 - }, - { - "epoch": 0.8201767570492394, - "flos": 51479277068160.0, - "grad_norm": 0.9332300858497173, - "learning_rate": 3.295794722377534e-07, - "loss": 0.6701, - "num_input_tokens_seen": 145425260, - "step": 6821 - }, - { - "epoch": 0.8202969999398786, - "flos": 16917442451160.0, - "grad_norm": 3.3396779872515725, - "learning_rate": 3.291512184974876e-07, - "loss": 0.7818, - "num_input_tokens_seen": 145445370, - "step": 6822 - }, - { - "epoch": 0.8204172428305176, - "flos": 20698342603080.0, - "grad_norm": 6.162859316984082, - "learning_rate": 3.2872321822365346e-07, - "loss": 0.6526, - "num_input_tokens_seen": 145465305, - "step": 6823 - }, - { - "epoch": 0.8205374857211567, - "flos": 15270986924640.0, - "grad_norm": 8.791014106240148, - "learning_rate": 3.282954714811783e-07, - "loss": 0.7175, - "num_input_tokens_seen": 145483930, - "step": 6824 - }, - { - "epoch": 0.8206577286117959, - "flos": 9546678566400.0, - "grad_norm": 5.13692222743377, - "learning_rate": 3.2786797833495093e-07, - "loss": 0.6933, - "num_input_tokens_seen": 145499005, - "step": 6825 - }, - { - "epoch": 0.8207779715024349, - "flos": 18511509726600.0, - "grad_norm": 4.326603480868574, - "learning_rate": 3.274407388498213e-07, - "loss": 0.7142, - "num_input_tokens_seen": 145516855, - "step": 6826 - }, - { - "epoch": 0.820898214393074, - "flos": 14326125976200.0, - "grad_norm": 3.718393132372832, - "learning_rate": 3.270137530906021e-07, - "loss": 0.7316, - "num_input_tokens_seen": 145535810, - "step": 6827 - }, - { - "epoch": 0.8210184572837131, - "flos": 11355744359280.0, - "grad_norm": 3.872287662366047, - "learning_rate": 3.265870211220665e-07, - "loss": 0.8148, - "num_input_tokens_seen": 145553365, - "step": 6828 - }, - { - "epoch": 0.8211387001743522, - "flos": 15216889035720.0, - "grad_norm": 6.743003893033493, - "learning_rate": 3.2616054300894934e-07, - "loss": 0.7943, - "num_input_tokens_seen": 145572535, - "step": 6829 - }, - { - "epoch": 0.8212589430649913, - "flos": 20315699885640.0, - "grad_norm": 7.295709096161956, - "learning_rate": 3.2573431881594693e-07, - "loss": 0.8387, - "num_input_tokens_seen": 145591800, - "step": 6830 - }, - { - "epoch": 0.8213791859556304, - "flos": 16431827849280.0, - "grad_norm": 5.086741862876572, - "learning_rate": 3.2530834860771663e-07, - "loss": 0.6328, - "num_input_tokens_seen": 145610900, - "step": 6831 - }, - { - "epoch": 0.8214994288462695, - "flos": 12138596580600.0, - "grad_norm": 3.5928594398003657, - "learning_rate": 3.248826324488794e-07, - "loss": 0.7188, - "num_input_tokens_seen": 145627915, - "step": 6832 - }, - { - "epoch": 0.8216196717369085, - "flos": 18478017980280.0, - "grad_norm": 6.1584527581817055, - "learning_rate": 3.244571704040138e-07, - "loss": 0.8614, - "num_input_tokens_seen": 145647795, - "step": 6833 - }, - { - "epoch": 0.8217399146275477, - "flos": 18589728013680.0, - "grad_norm": 5.228928741675723, - "learning_rate": 3.2403196253766374e-07, - "loss": 0.7403, - "num_input_tokens_seen": 145666595, - "step": 6834 - }, - { - "epoch": 0.8218601575181868, - "flos": 18780764432760.0, - "grad_norm": 6.788112841736534, - "learning_rate": 3.2360700891433254e-07, - "loss": 0.7724, - "num_input_tokens_seen": 145685340, - "step": 6835 - }, - { - "epoch": 0.8219804004088258, - "flos": 49891256845080.0, - "grad_norm": 0.8264599741932342, - "learning_rate": 3.231823095984847e-07, - "loss": 0.5848, - "num_input_tokens_seen": 145739700, - "step": 6836 - }, - { - "epoch": 0.822100643299465, - "flos": 14218088498160.0, - "grad_norm": 13.94124481116251, - "learning_rate": 3.2275786465454814e-07, - "loss": 0.7419, - "num_input_tokens_seen": 145756070, - "step": 6837 - }, - { - "epoch": 0.822220886190104, - "flos": 18079043214840.0, - "grad_norm": 4.334080093791463, - "learning_rate": 3.2233367414690917e-07, - "loss": 0.7447, - "num_input_tokens_seen": 145777980, - "step": 6838 - }, - { - "epoch": 0.8223411290807431, - "flos": 20401136643600.0, - "grad_norm": 6.863849544716854, - "learning_rate": 3.219097381399183e-07, - "loss": 0.8205, - "num_input_tokens_seen": 145794875, - "step": 6839 - }, - { - "epoch": 0.8224613719713821, - "flos": 16995977337840.0, - "grad_norm": 3.913583570101507, - "learning_rate": 3.2148605669788584e-07, - "loss": 0.7957, - "num_input_tokens_seen": 145814485, - "step": 6840 - }, - { - "epoch": 0.8225816148620213, - "flos": 11436907022640.0, - "grad_norm": 8.220645816428302, - "learning_rate": 3.2106262988508405e-07, - "loss": 0.7501, - "num_input_tokens_seen": 145832255, - "step": 6841 - }, - { - "epoch": 0.8227018577526604, - "flos": 13515575781240.0, - "grad_norm": 4.437366322397824, - "learning_rate": 3.206394577657465e-07, - "loss": 0.721, - "num_input_tokens_seen": 145849755, - "step": 6842 - }, - { - "epoch": 0.8228221006432994, - "flos": 16269882442080.0, - "grad_norm": 4.3337641037281545, - "learning_rate": 3.202165404040675e-07, - "loss": 0.6945, - "num_input_tokens_seen": 145867395, - "step": 6843 - }, - { - "epoch": 0.8229423435339386, - "flos": 17646956622600.0, - "grad_norm": 5.2792710428173955, - "learning_rate": 3.1979387786420396e-07, - "loss": 0.7227, - "num_input_tokens_seen": 145887355, - "step": 6844 - }, - { - "epoch": 0.8230625864245776, - "flos": 17484884575560.0, - "grad_norm": 4.552638644893909, - "learning_rate": 3.1937147021027346e-07, - "loss": 0.8088, - "num_input_tokens_seen": 145905530, - "step": 6845 - }, - { - "epoch": 0.8231828293152167, - "flos": 12084245412000.0, - "grad_norm": 6.14443595215753, - "learning_rate": 3.189493175063547e-07, - "loss": 0.7519, - "num_input_tokens_seen": 145922485, - "step": 6846 - }, - { - "epoch": 0.8233030722058559, - "flos": 13785685306320.0, - "grad_norm": 4.126322678714906, - "learning_rate": 3.1852741981648776e-07, - "loss": 0.6592, - "num_input_tokens_seen": 145940855, - "step": 6847 - }, - { - "epoch": 0.8234233150964949, - "flos": 20859908090760.0, - "grad_norm": 8.42206342553593, - "learning_rate": 3.1810577720467404e-07, - "loss": 0.6769, - "num_input_tokens_seen": 145962305, - "step": 6848 - }, - { - "epoch": 0.823543557987134, - "flos": 24802183770600.0, - "grad_norm": 3.709124377171074, - "learning_rate": 3.176843897348769e-07, - "loss": 0.5574, - "num_input_tokens_seen": 145985220, - "step": 6849 - }, - { - "epoch": 0.8236638008777731, - "flos": 12462677354760.0, - "grad_norm": 7.411968872186112, - "learning_rate": 3.1726325747102034e-07, - "loss": 0.7379, - "num_input_tokens_seen": 146003315, - "step": 6850 - }, - { - "epoch": 0.8237840437684122, - "flos": 45430693484640.0, - "grad_norm": 3.9704281120176694, - "learning_rate": 3.1684238047698974e-07, - "loss": 0.6306, - "num_input_tokens_seen": 146031305, - "step": 6851 - }, - { - "epoch": 0.8239042866590512, - "flos": 20023781139480.0, - "grad_norm": 8.885637382696716, - "learning_rate": 3.1642175881663155e-07, - "loss": 0.5067, - "num_input_tokens_seen": 146050755, - "step": 6852 - }, - { - "epoch": 0.8240245295496904, - "flos": 15892178678400.0, - "grad_norm": 7.667305325430124, - "learning_rate": 3.160013925537537e-07, - "loss": 0.8312, - "num_input_tokens_seen": 146071310, - "step": 6853 - }, - { - "epoch": 0.8241447724403295, - "flos": 14621147398440.0, - "grad_norm": 5.008552675104056, - "learning_rate": 3.155812817521266e-07, - "loss": 0.7346, - "num_input_tokens_seen": 146091405, - "step": 6854 - }, - { - "epoch": 0.8242650153309685, - "flos": 16296630616920.0, - "grad_norm": 8.055763920102537, - "learning_rate": 3.151614264754787e-07, - "loss": 0.7593, - "num_input_tokens_seen": 146109070, - "step": 6855 - }, - { - "epoch": 0.8243852582216077, - "flos": 16324075310880.0, - "grad_norm": 13.657528745372058, - "learning_rate": 3.147418267875035e-07, - "loss": 0.7825, - "num_input_tokens_seen": 146126920, - "step": 6856 - }, - { - "epoch": 0.8245055011122467, - "flos": 18052516659720.0, - "grad_norm": 6.405173606693636, - "learning_rate": 3.1432248275185315e-07, - "loss": 0.6328, - "num_input_tokens_seen": 146147150, - "step": 6857 - }, - { - "epoch": 0.8246257440028858, - "flos": 12754026221640.0, - "grad_norm": 5.113424860115657, - "learning_rate": 3.139033944321412e-07, - "loss": 0.7648, - "num_input_tokens_seen": 146164230, - "step": 6858 - }, - { - "epoch": 0.824745986893525, - "flos": 18322404565080.0, - "grad_norm": 4.076232003105276, - "learning_rate": 3.1348456189194507e-07, - "loss": 0.779, - "num_input_tokens_seen": 146184410, - "step": 6859 - }, - { - "epoch": 0.824866229784164, - "flos": 13706517220440.0, - "grad_norm": 4.1050652361017805, - "learning_rate": 3.1306598519479876e-07, - "loss": 0.8196, - "num_input_tokens_seen": 146203950, - "step": 6860 - }, - { - "epoch": 0.8249864726748031, - "flos": 17457978100920.0, - "grad_norm": 2.979617656015214, - "learning_rate": 3.1264766440420177e-07, - "loss": 0.7672, - "num_input_tokens_seen": 146226140, - "step": 6861 - }, - { - "epoch": 0.8251067155654422, - "flos": 14838837012480.0, - "grad_norm": 5.552391725984775, - "learning_rate": 3.122295995836124e-07, - "loss": 0.6672, - "num_input_tokens_seen": 146245730, - "step": 6862 - }, - { - "epoch": 0.8252269584560813, - "flos": 18322024645560.0, - "grad_norm": 3.4008230671676656, - "learning_rate": 3.118117907964508e-07, - "loss": 0.7538, - "num_input_tokens_seen": 146267395, - "step": 6863 - }, - { - "epoch": 0.8253472013467203, - "flos": 12489425529600.0, - "grad_norm": 4.016872108450122, - "learning_rate": 3.1139423810609856e-07, - "loss": 0.7999, - "num_input_tokens_seen": 146283810, - "step": 6864 - }, - { - "epoch": 0.8254674442373595, - "flos": 16400995539600.0, - "grad_norm": 3.612194858850118, - "learning_rate": 3.1097694157589714e-07, - "loss": 0.7355, - "num_input_tokens_seen": 146303415, - "step": 6865 - }, - { - "epoch": 0.8255876871279986, - "flos": 18156470002920.0, - "grad_norm": 7.240310508012858, - "learning_rate": 3.105599012691511e-07, - "loss": 0.7323, - "num_input_tokens_seen": 146321565, - "step": 6866 - }, - { - "epoch": 0.8257079300186376, - "flos": 20132198537040.0, - "grad_norm": 2.4234550982093954, - "learning_rate": 3.101431172491249e-07, - "loss": 0.8086, - "num_input_tokens_seen": 146342830, - "step": 6867 - }, - { - "epoch": 0.8258281729092768, - "flos": 12003431008200.0, - "grad_norm": 4.723098872971768, - "learning_rate": 3.097265895790444e-07, - "loss": 0.7063, - "num_input_tokens_seen": 146360760, - "step": 6868 - }, - { - "epoch": 0.8259484157999158, - "flos": 15675597162960.0, - "grad_norm": 3.2870681769180012, - "learning_rate": 3.093103183220962e-07, - "loss": 0.8177, - "num_input_tokens_seen": 146380525, - "step": 6869 - }, - { - "epoch": 0.8260686586905549, - "flos": 42980942385360.0, - "grad_norm": 0.9343758024894815, - "learning_rate": 3.0889430354142796e-07, - "loss": 0.6384, - "num_input_tokens_seen": 146441755, - "step": 6870 - }, - { - "epoch": 0.826188901581194, - "flos": 20185219987320.0, - "grad_norm": 6.938655722638549, - "learning_rate": 3.084785453001497e-07, - "loss": 0.6893, - "num_input_tokens_seen": 146462390, - "step": 6871 - }, - { - "epoch": 0.8263091444718331, - "flos": 17349813983040.0, - "grad_norm": 4.428041720902781, - "learning_rate": 3.080630436613314e-07, - "loss": 0.7967, - "num_input_tokens_seen": 146479880, - "step": 6872 - }, - { - "epoch": 0.8264293873624722, - "flos": 12516521964000.0, - "grad_norm": 4.822351651913402, - "learning_rate": 3.076477986880039e-07, - "loss": 0.8351, - "num_input_tokens_seen": 146497395, - "step": 6873 - }, - { - "epoch": 0.8265496302531112, - "flos": 17647938081360.0, - "grad_norm": 3.474115569270467, - "learning_rate": 3.0723281044315986e-07, - "loss": 0.6815, - "num_input_tokens_seen": 146519070, - "step": 6874 - }, - { - "epoch": 0.8266698731437504, - "flos": 10247956544880.0, - "grad_norm": 5.676678097695299, - "learning_rate": 3.068180789897521e-07, - "loss": 0.7421, - "num_input_tokens_seen": 146537200, - "step": 6875 - }, - { - "epoch": 0.8267901160343895, - "flos": 22591072196160.0, - "grad_norm": 3.4248680367768825, - "learning_rate": 3.064036043906966e-07, - "loss": 0.8074, - "num_input_tokens_seen": 146560360, - "step": 6876 - }, - { - "epoch": 0.8269103589250285, - "flos": 29878235600640.0, - "grad_norm": 4.208887170788814, - "learning_rate": 3.059893867088668e-07, - "loss": 0.6665, - "num_input_tokens_seen": 146584225, - "step": 6877 - }, - { - "epoch": 0.8270306018156677, - "flos": 22210297416360.0, - "grad_norm": 5.830188375737274, - "learning_rate": 3.055754260071004e-07, - "loss": 0.6579, - "num_input_tokens_seen": 146606240, - "step": 6878 - }, - { - "epoch": 0.8271508447063067, - "flos": 18482260414920.0, - "grad_norm": 4.301822879225563, - "learning_rate": 3.051617223481948e-07, - "loss": 0.7274, - "num_input_tokens_seen": 146627280, - "step": 6879 - }, - { - "epoch": 0.8272710875969458, - "flos": 12813632943600.0, - "grad_norm": 5.012574976790793, - "learning_rate": 3.047482757949078e-07, - "loss": 0.7408, - "num_input_tokens_seen": 146644630, - "step": 6880 - }, - { - "epoch": 0.827391330487585, - "flos": 14433498595080.0, - "grad_norm": 11.347540608274787, - "learning_rate": 3.043350864099605e-07, - "loss": 0.8366, - "num_input_tokens_seen": 146662910, - "step": 6881 - }, - { - "epoch": 0.827511573378224, - "flos": 12272084175120.0, - "grad_norm": 4.637568125262968, - "learning_rate": 3.039221542560315e-07, - "loss": 0.796, - "num_input_tokens_seen": 146679195, - "step": 6882 - }, - { - "epoch": 0.8276318162688631, - "flos": 13407633283080.0, - "grad_norm": 5.1408674652521515, - "learning_rate": 3.0350947939576356e-07, - "loss": 0.7323, - "num_input_tokens_seen": 146698070, - "step": 6883 - }, - { - "epoch": 0.8277520591595022, - "flos": 14135216196960.0, - "grad_norm": 5.018431071673953, - "learning_rate": 3.0309706189175876e-07, - "loss": 0.7085, - "num_input_tokens_seen": 146717625, - "step": 6884 - }, - { - "epoch": 0.8278723020501413, - "flos": 50083116423120.0, - "grad_norm": 0.7876317110876067, - "learning_rate": 3.0268490180658045e-07, - "loss": 0.5863, - "num_input_tokens_seen": 146780125, - "step": 6885 - }, - { - "epoch": 0.8279925449407803, - "flos": 13271676211680.0, - "grad_norm": 4.759476294037597, - "learning_rate": 3.0227299920275305e-07, - "loss": 0.7651, - "num_input_tokens_seen": 146796160, - "step": 6886 - }, - { - "epoch": 0.8281127878314195, - "flos": 15081818443200.0, - "grad_norm": 3.9675949698741557, - "learning_rate": 3.018613541427613e-07, - "loss": 0.8365, - "num_input_tokens_seen": 146815400, - "step": 6887 - }, - { - "epoch": 0.8282330307220586, - "flos": 13137523758000.0, - "grad_norm": 3.868294112502346, - "learning_rate": 3.0144996668905243e-07, - "loss": 0.722, - "num_input_tokens_seen": 146832500, - "step": 6888 - }, - { - "epoch": 0.8283532736126976, - "flos": 14676764965440.0, - "grad_norm": 4.663437179997937, - "learning_rate": 3.010388369040331e-07, - "loss": 0.8126, - "num_input_tokens_seen": 146850880, - "step": 6889 - }, - { - "epoch": 0.8284735165033368, - "flos": 23397854855880.0, - "grad_norm": 3.015807151234558, - "learning_rate": 3.0062796485007156e-07, - "loss": 0.8211, - "num_input_tokens_seen": 146871540, - "step": 6890 - }, - { - "epoch": 0.8285937593939758, - "flos": 19536900139200.0, - "grad_norm": 11.184028299931644, - "learning_rate": 3.002173505894965e-07, - "loss": 0.6391, - "num_input_tokens_seen": 146890410, - "step": 6891 - }, - { - "epoch": 0.8287140022846149, - "flos": 14757864308880.0, - "grad_norm": 5.195754504224007, - "learning_rate": 2.998069941845973e-07, - "loss": 0.6007, - "num_input_tokens_seen": 146909200, - "step": 6892 - }, - { - "epoch": 0.8288342451752541, - "flos": 52181536505400.0, - "grad_norm": 0.7624022327343369, - "learning_rate": 2.993968956976258e-07, - "loss": 0.6102, - "num_input_tokens_seen": 146976665, - "step": 6893 - }, - { - "epoch": 0.8289544880658931, - "flos": 17998323790920.0, - "grad_norm": 5.021509444219802, - "learning_rate": 2.9898705519079313e-07, - "loss": 0.682, - "num_input_tokens_seen": 146995490, - "step": 6894 - }, - { - "epoch": 0.8290747309565322, - "flos": 16296978876480.0, - "grad_norm": 3.2511789677716845, - "learning_rate": 2.985774727262715e-07, - "loss": 0.729, - "num_input_tokens_seen": 147014055, - "step": 6895 - }, - { - "epoch": 0.8291949738471713, - "flos": 17023453691760.0, - "grad_norm": 5.2181107363664125, - "learning_rate": 2.981681483661949e-07, - "loss": 0.7944, - "num_input_tokens_seen": 147033360, - "step": 6896 - }, - { - "epoch": 0.8293152167378104, - "flos": 38707869528480.0, - "grad_norm": 3.3615126794646573, - "learning_rate": 2.9775908217265633e-07, - "loss": 0.6905, - "num_input_tokens_seen": 147058315, - "step": 6897 - }, - { - "epoch": 0.8294354596284494, - "flos": 46706129990400.0, - "grad_norm": 0.8169394764047827, - "learning_rate": 2.9735027420771253e-07, - "loss": 0.5142, - "num_input_tokens_seen": 147118370, - "step": 6898 - }, - { - "epoch": 0.8295557025190886, - "flos": 18186795753240.0, - "grad_norm": 6.597033221119675, - "learning_rate": 2.969417245333774e-07, - "loss": 0.6961, - "num_input_tokens_seen": 147137470, - "step": 6899 - }, - { - "epoch": 0.8296759454097277, - "flos": 18402522449760.0, - "grad_norm": 8.513900162186522, - "learning_rate": 2.9653343321162915e-07, - "loss": 0.7643, - "num_input_tokens_seen": 147156700, - "step": 6900 - }, - { - "epoch": 0.8297961883003667, - "flos": 17672691678720.0, - "grad_norm": 4.024272469110366, - "learning_rate": 2.9612540030440446e-07, - "loss": 0.6328, - "num_input_tokens_seen": 147176965, - "step": 6901 - }, - { - "epoch": 0.8299164311910058, - "flos": 49733870472120.0, - "grad_norm": 0.8367147664635128, - "learning_rate": 2.9571762587360206e-07, - "loss": 0.6364, - "num_input_tokens_seen": 147233070, - "step": 6902 - }, - { - "epoch": 0.8300366740816449, - "flos": 18484096692600.0, - "grad_norm": 3.6169929204886806, - "learning_rate": 2.953101099810806e-07, - "loss": 0.7255, - "num_input_tokens_seen": 147252395, - "step": 6903 - }, - { - "epoch": 0.830156916972284, - "flos": 13164746832240.0, - "grad_norm": 4.944370559730894, - "learning_rate": 2.9490285268865965e-07, - "loss": 0.8246, - "num_input_tokens_seen": 147269605, - "step": 6904 - }, - { - "epoch": 0.830277159862923, - "flos": 19293475469040.0, - "grad_norm": 5.530487870119866, - "learning_rate": 2.9449585405812085e-07, - "loss": 0.7873, - "num_input_tokens_seen": 147286705, - "step": 6905 - }, - { - "epoch": 0.8303974027535622, - "flos": 14569360686600.0, - "grad_norm": 3.5010799376735844, - "learning_rate": 2.940891141512043e-07, - "loss": 0.7275, - "num_input_tokens_seen": 147304445, - "step": 6906 - }, - { - "epoch": 0.8305176456442013, - "flos": 12516870223560.0, - "grad_norm": 4.352962467311347, - "learning_rate": 2.9368263302961385e-07, - "loss": 0.6962, - "num_input_tokens_seen": 147322865, - "step": 6907 - }, - { - "epoch": 0.8306378885348403, - "flos": 18778991475000.0, - "grad_norm": 5.988657084177971, - "learning_rate": 2.9327641075501075e-07, - "loss": 0.7849, - "num_input_tokens_seen": 147341575, - "step": 6908 - }, - { - "epoch": 0.8307581314254795, - "flos": 24936747803760.0, - "grad_norm": 5.415161021366687, - "learning_rate": 2.9287044738901866e-07, - "loss": 0.6387, - "num_input_tokens_seen": 147359280, - "step": 6909 - }, - { - "epoch": 0.8308783743161186, - "flos": 12810213667920.0, - "grad_norm": 3.172054642724981, - "learning_rate": 2.9246474299322274e-07, - "loss": 0.9051, - "num_input_tokens_seen": 147374560, - "step": 6910 - }, - { - "epoch": 0.8309986172067576, - "flos": 51188529740520.0, - "grad_norm": 0.9045361064412161, - "learning_rate": 2.920592976291678e-07, - "loss": 0.6554, - "num_input_tokens_seen": 147431610, - "step": 6911 - }, - { - "epoch": 0.8311188600973968, - "flos": 16323885351120.0, - "grad_norm": 5.011056691278598, - "learning_rate": 2.916541113583595e-07, - "loss": 0.7924, - "num_input_tokens_seen": 147449830, - "step": 6912 - }, - { - "epoch": 0.8312391029880358, - "flos": 13704206043360.0, - "grad_norm": 5.546347665569493, - "learning_rate": 2.912491842422642e-07, - "loss": 0.6452, - "num_input_tokens_seen": 147467255, - "step": 6913 - }, - { - "epoch": 0.8313593458786749, - "flos": 14893346480880.0, - "grad_norm": 5.30977264406078, - "learning_rate": 2.9084451634230857e-07, - "loss": 0.6876, - "num_input_tokens_seen": 147486275, - "step": 6914 - }, - { - "epoch": 0.831479588769314, - "flos": 23587086657240.0, - "grad_norm": 9.51987105723019, - "learning_rate": 2.9044010771988125e-07, - "loss": 0.7092, - "num_input_tokens_seen": 147505810, - "step": 6915 - }, - { - "epoch": 0.8315998316599531, - "flos": 33253734015240.0, - "grad_norm": 3.286647752884097, - "learning_rate": 2.900359584363303e-07, - "loss": 0.7226, - "num_input_tokens_seen": 147528635, - "step": 6916 - }, - { - "epoch": 0.8317200745505922, - "flos": 13402789309200.0, - "grad_norm": 5.919141153463812, - "learning_rate": 2.8963206855296494e-07, - "loss": 0.824, - "num_input_tokens_seen": 147544595, - "step": 6917 - }, - { - "epoch": 0.8318403174412313, - "flos": 17727581066640.0, - "grad_norm": 3.254882958999022, - "learning_rate": 2.892284381310548e-07, - "loss": 0.761, - "num_input_tokens_seen": 147565730, - "step": 6918 - }, - { - "epoch": 0.8319605603318704, - "flos": 16404509795160.0, - "grad_norm": 5.312939075949945, - "learning_rate": 2.888250672318302e-07, - "loss": 0.6976, - "num_input_tokens_seen": 147582850, - "step": 6919 - }, - { - "epoch": 0.8320808032225094, - "flos": 27502360882560.0, - "grad_norm": 5.071396722060345, - "learning_rate": 2.884219559164831e-07, - "loss": 0.6813, - "num_input_tokens_seen": 147605715, - "step": 6920 - }, - { - "epoch": 0.8322010461131486, - "flos": 9276759001080.0, - "grad_norm": 3.613020603985903, - "learning_rate": 2.880191042461635e-07, - "loss": 0.7986, - "num_input_tokens_seen": 147621375, - "step": 6921 - }, - { - "epoch": 0.8323212890037877, - "flos": 11517689766480.0, - "grad_norm": 3.5799928812137343, - "learning_rate": 2.876165122819849e-07, - "loss": 0.7911, - "num_input_tokens_seen": 147639075, - "step": 6922 - }, - { - "epoch": 0.8324415318944267, - "flos": 15885941666280.0, - "grad_norm": 5.038939860426217, - "learning_rate": 2.872141800850201e-07, - "loss": 0.7776, - "num_input_tokens_seen": 147655970, - "step": 6923 - }, - { - "epoch": 0.8325617747850659, - "flos": 25121737170480.0, - "grad_norm": 4.186090716170493, - "learning_rate": 2.868121077163024e-07, - "loss": 0.7207, - "num_input_tokens_seen": 147675245, - "step": 6924 - }, - { - "epoch": 0.8326820176757049, - "flos": 13407823242840.0, - "grad_norm": 8.65336257757213, - "learning_rate": 2.864102952368257e-07, - "loss": 0.7093, - "num_input_tokens_seen": 147692890, - "step": 6925 - }, - { - "epoch": 0.832802260566344, - "flos": 26449082536560.0, - "grad_norm": 6.16505816677402, - "learning_rate": 2.860087427075444e-07, - "loss": 0.5766, - "num_input_tokens_seen": 147716860, - "step": 6926 - }, - { - "epoch": 0.8329225034569832, - "flos": 10354727624520.0, - "grad_norm": 5.3068562766726375, - "learning_rate": 2.856074501893744e-07, - "loss": 0.8398, - "num_input_tokens_seen": 147731780, - "step": 6927 - }, - { - "epoch": 0.8330427463476222, - "flos": 13191716626800.0, - "grad_norm": 3.1735937413455146, - "learning_rate": 2.8520641774319054e-07, - "loss": 0.8079, - "num_input_tokens_seen": 147749590, - "step": 6928 - }, - { - "epoch": 0.8331629892382613, - "flos": 13164588532440.0, - "grad_norm": 3.7535754642565986, - "learning_rate": 2.848056454298309e-07, - "loss": 0.7323, - "num_input_tokens_seen": 147766635, - "step": 6929 - }, - { - "epoch": 0.8332832321289004, - "flos": 12732850199760.0, - "grad_norm": 5.981066075350203, - "learning_rate": 2.844051333100905e-07, - "loss": 0.6323, - "num_input_tokens_seen": 147783900, - "step": 6930 - }, - { - "epoch": 0.8334034750195395, - "flos": 10976869177080.0, - "grad_norm": 3.5242573868957634, - "learning_rate": 2.840048814447269e-07, - "loss": 0.8245, - "num_input_tokens_seen": 147801785, - "step": 6931 - }, - { - "epoch": 0.8335237179101785, - "flos": 14190548824320.0, - "grad_norm": 7.236987571166929, - "learning_rate": 2.836048898944587e-07, - "loss": 0.7178, - "num_input_tokens_seen": 147819930, - "step": 6932 - }, - { - "epoch": 0.8336439608008177, - "flos": 15918483613800.0, - "grad_norm": 4.044574805547057, - "learning_rate": 2.832051587199642e-07, - "loss": 0.705, - "num_input_tokens_seen": 147836905, - "step": 6933 - }, - { - "epoch": 0.8337642036914568, - "flos": 44001710443800.0, - "grad_norm": 0.79840016735014, - "learning_rate": 2.828056879818821e-07, - "loss": 0.5915, - "num_input_tokens_seen": 147895700, - "step": 6934 - }, - { - "epoch": 0.8338844465820958, - "flos": 19914983822400.0, - "grad_norm": 4.851747272799882, - "learning_rate": 2.824064777408117e-07, - "loss": 0.8199, - "num_input_tokens_seen": 147915210, - "step": 6935 - }, - { - "epoch": 0.8340046894727349, - "flos": 22371166384920.0, - "grad_norm": 2.8913537009080854, - "learning_rate": 2.8200752805731263e-07, - "loss": 0.7493, - "num_input_tokens_seen": 147937920, - "step": 6936 - }, - { - "epoch": 0.834124932363374, - "flos": 19888393947360.0, - "grad_norm": 9.475709478946394, - "learning_rate": 2.8160883899190625e-07, - "loss": 0.7925, - "num_input_tokens_seen": 147960910, - "step": 6937 - }, - { - "epoch": 0.8342451752540131, - "flos": 17996234233560.0, - "grad_norm": 3.808372890230803, - "learning_rate": 2.8121041060507234e-07, - "loss": 0.7295, - "num_input_tokens_seen": 147979660, - "step": 6938 - }, - { - "epoch": 0.8343654181446521, - "flos": 19510120304400.0, - "grad_norm": 4.55577323104059, - "learning_rate": 2.808122429572528e-07, - "loss": 0.7013, - "num_input_tokens_seen": 147999585, - "step": 6939 - }, - { - "epoch": 0.8344856610352913, - "flos": 15189729281400.0, - "grad_norm": 8.974998503873483, - "learning_rate": 2.804143361088489e-07, - "loss": 0.7366, - "num_input_tokens_seen": 148018485, - "step": 6940 - }, - { - "epoch": 0.8346059039259304, - "flos": 19124754830400.0, - "grad_norm": 5.6522182194941175, - "learning_rate": 2.8001669012022277e-07, - "loss": 0.7405, - "num_input_tokens_seen": 148036175, - "step": 6941 - }, - { - "epoch": 0.8347261468165694, - "flos": 21296268777600.0, - "grad_norm": 3.4800297624038787, - "learning_rate": 2.7961930505169795e-07, - "loss": 0.6792, - "num_input_tokens_seen": 148060140, - "step": 6942 - }, - { - "epoch": 0.8348463897072086, - "flos": 19348364856960.0, - "grad_norm": 5.895602847425385, - "learning_rate": 2.792221809635558e-07, - "loss": 0.7367, - "num_input_tokens_seen": 148080490, - "step": 6943 - }, - { - "epoch": 0.8349666325978476, - "flos": 17106832552320.0, - "grad_norm": 2.608659931815962, - "learning_rate": 2.788253179160411e-07, - "loss": 0.7361, - "num_input_tokens_seen": 148101370, - "step": 6944 - }, - { - "epoch": 0.8350868754884867, - "flos": 9357478425000.0, - "grad_norm": 5.74709080617098, - "learning_rate": 2.7842871596935725e-07, - "loss": 0.6348, - "num_input_tokens_seen": 148119605, - "step": 6945 - }, - { - "epoch": 0.8352071183791259, - "flos": 19429020960960.0, - "grad_norm": 3.584283850524449, - "learning_rate": 2.780323751836682e-07, - "loss": 0.6749, - "num_input_tokens_seen": 148140540, - "step": 6946 - }, - { - "epoch": 0.8353273612697649, - "flos": 15108946537560.0, - "grad_norm": 2.774164926473573, - "learning_rate": 2.7763629561909876e-07, - "loss": 0.7727, - "num_input_tokens_seen": 148161090, - "step": 6947 - }, - { - "epoch": 0.835447604160404, - "flos": 14432327176560.0, - "grad_norm": 4.72862292061249, - "learning_rate": 2.772404773357335e-07, - "loss": 0.7548, - "num_input_tokens_seen": 148180215, - "step": 6948 - }, - { - "epoch": 0.8355678470510431, - "flos": 17156561366760.0, - "grad_norm": 3.3523129065505346, - "learning_rate": 2.7684492039361853e-07, - "loss": 0.7579, - "num_input_tokens_seen": 148199160, - "step": 6949 - }, - { - "epoch": 0.8356880899416822, - "flos": 15513303496200.0, - "grad_norm": 3.5444023487244722, - "learning_rate": 2.764496248527586e-07, - "loss": 0.826, - "num_input_tokens_seen": 148217855, - "step": 6950 - }, - { - "epoch": 0.8358083328323213, - "flos": 20562765451200.0, - "grad_norm": 4.2108127101320445, - "learning_rate": 2.760545907731211e-07, - "loss": 0.7498, - "num_input_tokens_seen": 148238150, - "step": 6951 - }, - { - "epoch": 0.8359285757229604, - "flos": 20375084987880.0, - "grad_norm": 6.504707392474987, - "learning_rate": 2.75659818214631e-07, - "loss": 0.6593, - "num_input_tokens_seen": 148258975, - "step": 6952 - }, - { - "epoch": 0.8360488186135995, - "flos": 15676736921520.0, - "grad_norm": 3.5189883271674436, - "learning_rate": 2.752653072371749e-07, - "loss": 0.7675, - "num_input_tokens_seen": 148278130, - "step": 6953 - }, - { - "epoch": 0.8361690615042385, - "flos": 20262330175800.0, - "grad_norm": 6.090602590129213, - "learning_rate": 2.7487105790060105e-07, - "loss": 0.7368, - "num_input_tokens_seen": 148297260, - "step": 6954 - }, - { - "epoch": 0.8362893043948777, - "flos": 28825653773760.0, - "grad_norm": 4.198383138153478, - "learning_rate": 2.7447707026471587e-07, - "loss": 0.6817, - "num_input_tokens_seen": 148319955, - "step": 6955 - }, - { - "epoch": 0.8364095472855168, - "flos": 18155203604520.0, - "grad_norm": 3.115777559667982, - "learning_rate": 2.740833443892874e-07, - "loss": 0.792, - "num_input_tokens_seen": 148337845, - "step": 6956 - }, - { - "epoch": 0.8365297901761558, - "flos": 16644230250000.0, - "grad_norm": 4.533185874186263, - "learning_rate": 2.7368988033404327e-07, - "loss": 0.7805, - "num_input_tokens_seen": 148355080, - "step": 6957 - }, - { - "epoch": 0.836650033066795, - "flos": 20827144523520.0, - "grad_norm": 3.29114558928285, - "learning_rate": 2.732966781586712e-07, - "loss": 0.8373, - "num_input_tokens_seen": 148374545, - "step": 6958 - }, - { - "epoch": 0.836770275957434, - "flos": 16269945762000.0, - "grad_norm": 5.083774276536616, - "learning_rate": 2.729037379228205e-07, - "loss": 0.6527, - "num_input_tokens_seen": 148394450, - "step": 6959 - }, - { - "epoch": 0.8368905188480731, - "flos": 16458671004000.0, - "grad_norm": 2.863562014606812, - "learning_rate": 2.725110596860998e-07, - "loss": 0.7932, - "num_input_tokens_seen": 148414850, - "step": 6960 - }, - { - "epoch": 0.8370107617387123, - "flos": 9708117414240.0, - "grad_norm": 4.569019623822596, - "learning_rate": 2.7211864350807776e-07, - "loss": 0.6933, - "num_input_tokens_seen": 148432770, - "step": 6961 - }, - { - "epoch": 0.8371310046293513, - "flos": 18507678871440.0, - "grad_norm": 4.129161469539345, - "learning_rate": 2.717264894482836e-07, - "loss": 0.7201, - "num_input_tokens_seen": 148452830, - "step": 6962 - }, - { - "epoch": 0.8372512475199904, - "flos": 14461164908760.0, - "grad_norm": 4.2347060633667635, - "learning_rate": 2.7133459756620646e-07, - "loss": 0.7911, - "num_input_tokens_seen": 148469745, - "step": 6963 - }, - { - "epoch": 0.8373714904106295, - "flos": 14164022269200.0, - "grad_norm": 2.8684538245804134, - "learning_rate": 2.7094296792129733e-07, - "loss": 0.7121, - "num_input_tokens_seen": 148489065, - "step": 6964 - }, - { - "epoch": 0.8374917333012686, - "flos": 10896339712920.0, - "grad_norm": 4.128961452667281, - "learning_rate": 2.7055160057296424e-07, - "loss": 0.7447, - "num_input_tokens_seen": 148506025, - "step": 6965 - }, - { - "epoch": 0.8376119761919076, - "flos": 22260089550720.0, - "grad_norm": 4.35732841989932, - "learning_rate": 2.7016049558057896e-07, - "loss": 0.7058, - "num_input_tokens_seen": 148527705, - "step": 6966 - }, - { - "epoch": 0.8377322190825467, - "flos": 21588250843680.0, - "grad_norm": 4.008887907010691, - "learning_rate": 2.6976965300347074e-07, - "loss": 0.7018, - "num_input_tokens_seen": 148550035, - "step": 6967 - }, - { - "epoch": 0.8378524619731859, - "flos": 19563268394520.0, - "grad_norm": 4.958653971244916, - "learning_rate": 2.693790729009309e-07, - "loss": 0.6711, - "num_input_tokens_seen": 148571365, - "step": 6968 - }, - { - "epoch": 0.8379727048638249, - "flos": 15135346452840.0, - "grad_norm": 4.775896253906294, - "learning_rate": 2.6898875533220946e-07, - "loss": 0.8607, - "num_input_tokens_seen": 148590390, - "step": 6969 - }, - { - "epoch": 0.838092947754464, - "flos": 14676733305480.0, - "grad_norm": 3.5715647698726545, - "learning_rate": 2.685987003565171e-07, - "loss": 0.808, - "num_input_tokens_seen": 148608150, - "step": 6970 - }, - { - "epoch": 0.8382131906451031, - "flos": 13218116542080.0, - "grad_norm": 7.199182718303755, - "learning_rate": 2.6820890803302566e-07, - "loss": 0.7325, - "num_input_tokens_seen": 148623395, - "step": 6971 - }, - { - "epoch": 0.8383334335357422, - "flos": 12462930634440.0, - "grad_norm": 9.071605659189549, - "learning_rate": 2.6781937842086557e-07, - "loss": 0.8043, - "num_input_tokens_seen": 148641905, - "step": 6972 - }, - { - "epoch": 0.8384536764263812, - "flos": 15136106291880.0, - "grad_norm": 6.911625629883049, - "learning_rate": 2.6743011157912933e-07, - "loss": 0.6696, - "num_input_tokens_seen": 148661345, - "step": 6973 - }, - { - "epoch": 0.8385739193170204, - "flos": 21265404807960.0, - "grad_norm": 2.71224416414052, - "learning_rate": 2.6704110756686725e-07, - "loss": 0.629, - "num_input_tokens_seen": 148681890, - "step": 6974 - }, - { - "epoch": 0.8386941622076595, - "flos": 17158524284280.0, - "grad_norm": 5.049081042685922, - "learning_rate": 2.6665236644309085e-07, - "loss": 0.8254, - "num_input_tokens_seen": 148701920, - "step": 6975 - }, - { - "epoch": 0.8388144050982985, - "flos": 16836691367280.0, - "grad_norm": 3.507127452813182, - "learning_rate": 2.662638882667727e-07, - "loss": 0.7751, - "num_input_tokens_seen": 148720580, - "step": 6976 - }, - { - "epoch": 0.8389346479889377, - "flos": 17782312154760.0, - "grad_norm": 4.0868376402278335, - "learning_rate": 2.658756730968443e-07, - "loss": 0.7154, - "num_input_tokens_seen": 148738765, - "step": 6977 - }, - { - "epoch": 0.8390548908795767, - "flos": 15513683415720.0, - "grad_norm": 3.8811739067935864, - "learning_rate": 2.654877209921975e-07, - "loss": 0.8769, - "num_input_tokens_seen": 148756020, - "step": 6978 - }, - { - "epoch": 0.8391751337702158, - "flos": 26179796170440.0, - "grad_norm": 6.01410257470329, - "learning_rate": 2.651000320116843e-07, - "loss": 0.6138, - "num_input_tokens_seen": 148776625, - "step": 6979 - }, - { - "epoch": 0.839295376660855, - "flos": 15595352638440.0, - "grad_norm": 3.5852198994336346, - "learning_rate": 2.647126062141163e-07, - "loss": 0.7339, - "num_input_tokens_seen": 148795420, - "step": 6980 - }, - { - "epoch": 0.839415619551494, - "flos": 13461921131760.0, - "grad_norm": 3.676330719923856, - "learning_rate": 2.643254436582669e-07, - "loss": 0.8206, - "num_input_tokens_seen": 148814630, - "step": 6981 - }, - { - "epoch": 0.8395358624421331, - "flos": 16998985034040.0, - "grad_norm": 6.105565905636936, - "learning_rate": 2.6393854440286743e-07, - "loss": 0.7985, - "num_input_tokens_seen": 148833520, - "step": 6982 - }, - { - "epoch": 0.8396561053327722, - "flos": 17856129707400.0, - "grad_norm": 4.404844235665833, - "learning_rate": 2.6355190850661045e-07, - "loss": 0.693, - "num_input_tokens_seen": 148850075, - "step": 6983 - }, - { - "epoch": 0.8397763482234113, - "flos": 16270737261000.0, - "grad_norm": 3.4425271467968273, - "learning_rate": 2.631655360281486e-07, - "loss": 0.8485, - "num_input_tokens_seen": 148869470, - "step": 6984 - }, - { - "epoch": 0.8398965911140504, - "flos": 16215816213120.0, - "grad_norm": 3.827944879770799, - "learning_rate": 2.6277942702609323e-07, - "loss": 0.6468, - "num_input_tokens_seen": 148888670, - "step": 6985 - }, - { - "epoch": 0.8400168340046895, - "flos": 15756158287080.0, - "grad_norm": 5.056646471546929, - "learning_rate": 2.623935815590186e-07, - "loss": 0.8543, - "num_input_tokens_seen": 148906770, - "step": 6986 - }, - { - "epoch": 0.8401370768953286, - "flos": 16458702663960.0, - "grad_norm": 3.695658435718666, - "learning_rate": 2.6200799968545516e-07, - "loss": 0.7901, - "num_input_tokens_seen": 148926785, - "step": 6987 - }, - { - "epoch": 0.8402573197859676, - "flos": 43659872923440.0, - "grad_norm": 1.339952959331002, - "learning_rate": 2.616226814638969e-07, - "loss": 0.586, - "num_input_tokens_seen": 148991610, - "step": 6988 - }, - { - "epoch": 0.8403775626766068, - "flos": 16593899896320.0, - "grad_norm": 16.397557220894214, - "learning_rate": 2.612376269527954e-07, - "loss": 0.749, - "num_input_tokens_seen": 149011035, - "step": 6989 - }, - { - "epoch": 0.8404978055672458, - "flos": 14325651076800.0, - "grad_norm": 2.864679804246781, - "learning_rate": 2.608528362105635e-07, - "loss": 0.6561, - "num_input_tokens_seen": 149030125, - "step": 6990 - }, - { - "epoch": 0.8406180484578849, - "flos": 20184903387720.0, - "grad_norm": 3.3084320425174303, - "learning_rate": 2.6046830929557374e-07, - "loss": 0.7131, - "num_input_tokens_seen": 149049495, - "step": 6991 - }, - { - "epoch": 0.8407382913485241, - "flos": 16189004718360.0, - "grad_norm": 5.938166209540527, - "learning_rate": 2.6008404626615776e-07, - "loss": 0.8213, - "num_input_tokens_seen": 149067715, - "step": 6992 - }, - { - "epoch": 0.8408585342391631, - "flos": 10113835751160.0, - "grad_norm": 4.758424816599448, - "learning_rate": 2.597000471806092e-07, - "loss": 0.7167, - "num_input_tokens_seen": 149084000, - "step": 6993 - }, - { - "epoch": 0.8409787771298022, - "flos": 14752735395360.0, - "grad_norm": 4.61738330406521, - "learning_rate": 2.593163120971793e-07, - "loss": 0.7098, - "num_input_tokens_seen": 149102585, - "step": 6994 - }, - { - "epoch": 0.8410990200204413, - "flos": 16939283332200.0, - "grad_norm": 3.481559950558234, - "learning_rate": 2.5893284107408165e-07, - "loss": 0.674, - "num_input_tokens_seen": 149119675, - "step": 6995 - }, - { - "epoch": 0.8412192629110804, - "flos": 17595296550600.0, - "grad_norm": 4.030385242535392, - "learning_rate": 2.5854963416948726e-07, - "loss": 0.7703, - "num_input_tokens_seen": 149141660, - "step": 6996 - }, - { - "epoch": 0.8413395058017195, - "flos": 18752496579840.0, - "grad_norm": 3.776605827894231, - "learning_rate": 2.5816669144152816e-07, - "loss": 0.6751, - "num_input_tokens_seen": 149162560, - "step": 6997 - }, - { - "epoch": 0.8414597486923585, - "flos": 46910997320640.0, - "grad_norm": 0.9142953299012502, - "learning_rate": 2.5778401294829777e-07, - "loss": 0.7239, - "num_input_tokens_seen": 149221020, - "step": 6998 - }, - { - "epoch": 0.8415799915829977, - "flos": 13947472413720.0, - "grad_norm": 4.711728967381353, - "learning_rate": 2.574015987478473e-07, - "loss": 0.6416, - "num_input_tokens_seen": 149238870, - "step": 6999 - }, - { - "epoch": 0.8417002344736367, - "flos": 14481486111720.0, - "grad_norm": 4.512335295062134, - "learning_rate": 2.570194488981887e-07, - "loss": 0.8402, - "num_input_tokens_seen": 149255135, - "step": 7000 - }, - { - "epoch": 0.8418204773642758, - "flos": 45823345240800.0, - "grad_norm": 0.8579633041706421, - "learning_rate": 2.566375634572939e-07, - "loss": 0.628, - "num_input_tokens_seen": 149315495, - "step": 7001 - }, - { - "epoch": 0.841940720254915, - "flos": 12463152254160.0, - "grad_norm": 8.159616294515967, - "learning_rate": 2.562559424830943e-07, - "loss": 0.7291, - "num_input_tokens_seen": 149333175, - "step": 7002 - }, - { - "epoch": 0.842060963145554, - "flos": 11863263162120.0, - "grad_norm": 3.906131101706114, - "learning_rate": 2.5587458603348256e-07, - "loss": 0.6743, - "num_input_tokens_seen": 149350185, - "step": 7003 - }, - { - "epoch": 0.8421812060361931, - "flos": 16026711051600.0, - "grad_norm": 5.313299679874944, - "learning_rate": 2.554934941663085e-07, - "loss": 0.83, - "num_input_tokens_seen": 149367440, - "step": 7004 - }, - { - "epoch": 0.8423014489268322, - "flos": 20369829434520.0, - "grad_norm": 5.661670090108441, - "learning_rate": 2.5511266693938484e-07, - "loss": 0.707, - "num_input_tokens_seen": 149385620, - "step": 7005 - }, - { - "epoch": 0.8424216918174713, - "flos": 18401667630840.0, - "grad_norm": 3.1612635913344027, - "learning_rate": 2.547321044104822e-07, - "loss": 0.7575, - "num_input_tokens_seen": 149406835, - "step": 7006 - }, - { - "epoch": 0.8425419347081103, - "flos": 18127853890440.0, - "grad_norm": 3.55998087374232, - "learning_rate": 2.5435180663733113e-07, - "loss": 0.7515, - "num_input_tokens_seen": 149426855, - "step": 7007 - }, - { - "epoch": 0.8426621775987495, - "flos": 18181413560040.0, - "grad_norm": 5.735313899954026, - "learning_rate": 2.539717736776241e-07, - "loss": 0.6926, - "num_input_tokens_seen": 149442800, - "step": 7008 - }, - { - "epoch": 0.8427824204893886, - "flos": 17241776505000.0, - "grad_norm": 2.929274222152459, - "learning_rate": 2.535920055890097e-07, - "loss": 0.7485, - "num_input_tokens_seen": 149463815, - "step": 7009 - }, - { - "epoch": 0.8429026633800276, - "flos": 11760734517120.0, - "grad_norm": 4.997450560057571, - "learning_rate": 2.5321250242910006e-07, - "loss": 0.6325, - "num_input_tokens_seen": 149481450, - "step": 7010 - }, - { - "epoch": 0.8430229062706668, - "flos": 16241329649520.0, - "grad_norm": 4.315256536997834, - "learning_rate": 2.5283326425546493e-07, - "loss": 0.8468, - "num_input_tokens_seen": 149500280, - "step": 7011 - }, - { - "epoch": 0.8431431491613058, - "flos": 25959288819960.0, - "grad_norm": 5.306442217720812, - "learning_rate": 2.5245429112563443e-07, - "loss": 0.6738, - "num_input_tokens_seen": 149520675, - "step": 7012 - }, - { - "epoch": 0.8432633920519449, - "flos": 18916056645000.0, - "grad_norm": 4.599335473200878, - "learning_rate": 2.5207558309709865e-07, - "loss": 0.8109, - "num_input_tokens_seen": 149540130, - "step": 7013 - }, - { - "epoch": 0.8433836349425841, - "flos": 47892452691480.0, - "grad_norm": 0.68413404638637, - "learning_rate": 2.516971402273065e-07, - "loss": 0.5737, - "num_input_tokens_seen": 149605915, - "step": 7014 - }, - { - "epoch": 0.8435038778332231, - "flos": 14784707463600.0, - "grad_norm": 4.601236069504538, - "learning_rate": 2.513189625736687e-07, - "loss": 0.6547, - "num_input_tokens_seen": 149622530, - "step": 7015 - }, - { - "epoch": 0.8436241207238622, - "flos": 15348255412920.0, - "grad_norm": 7.12534853434131, - "learning_rate": 2.509410501935534e-07, - "loss": 0.6996, - "num_input_tokens_seen": 149637885, - "step": 7016 - }, - { - "epoch": 0.8437443636145013, - "flos": 10678745078760.0, - "grad_norm": 4.86349812832178, - "learning_rate": 2.5056340314429116e-07, - "loss": 0.7309, - "num_input_tokens_seen": 149655070, - "step": 7017 - }, - { - "epoch": 0.8438646065051404, - "flos": 15804050823840.0, - "grad_norm": 5.076651491445969, - "learning_rate": 2.5018602148316904e-07, - "loss": 0.7772, - "num_input_tokens_seen": 149670825, - "step": 7018 - }, - { - "epoch": 0.8439848493957794, - "flos": 17048587208640.0, - "grad_norm": 3.2218768292781697, - "learning_rate": 2.498089052674359e-07, - "loss": 0.7808, - "num_input_tokens_seen": 149688520, - "step": 7019 - }, - { - "epoch": 0.8441050922864186, - "flos": 14406813740160.0, - "grad_norm": 5.0820507662000365, - "learning_rate": 2.494320545543007e-07, - "loss": 0.7368, - "num_input_tokens_seen": 149707810, - "step": 7020 - }, - { - "epoch": 0.8442253351770577, - "flos": 15972708142560.0, - "grad_norm": 3.8662869108000164, - "learning_rate": 2.490554694009308e-07, - "loss": 0.6517, - "num_input_tokens_seen": 149728395, - "step": 7021 - }, - { - "epoch": 0.8443455780676967, - "flos": 25231262666640.0, - "grad_norm": 3.3666877110141433, - "learning_rate": 2.4867914986445426e-07, - "loss": 0.7838, - "num_input_tokens_seen": 149750505, - "step": 7022 - }, - { - "epoch": 0.8444658209583359, - "flos": 35495202999960.0, - "grad_norm": 3.856731420603642, - "learning_rate": 2.483030960019581e-07, - "loss": 0.6783, - "num_input_tokens_seen": 149774155, - "step": 7023 - }, - { - "epoch": 0.8445860638489749, - "flos": 50499404695320.0, - "grad_norm": 0.7591323370626859, - "learning_rate": 2.479273078704891e-07, - "loss": 0.5598, - "num_input_tokens_seen": 149827240, - "step": 7024 - }, - { - "epoch": 0.844706306739614, - "flos": 46317503540520.0, - "grad_norm": 0.7921048800330278, - "learning_rate": 2.475517855270552e-07, - "loss": 0.658, - "num_input_tokens_seen": 149887040, - "step": 7025 - }, - { - "epoch": 0.8448265496302532, - "flos": 10896498012720.0, - "grad_norm": 4.416163121914809, - "learning_rate": 2.4717652902862143e-07, - "loss": 0.7164, - "num_input_tokens_seen": 149905735, - "step": 7026 - }, - { - "epoch": 0.8449467925208922, - "flos": 17161342020720.0, - "grad_norm": 2.756963755948077, - "learning_rate": 2.4680153843211495e-07, - "loss": 0.8073, - "num_input_tokens_seen": 149925385, - "step": 7027 - }, - { - "epoch": 0.8450670354115313, - "flos": 16648314384840.0, - "grad_norm": 6.283343673144399, - "learning_rate": 2.464268137944212e-07, - "loss": 0.7071, - "num_input_tokens_seen": 149946400, - "step": 7028 - }, - { - "epoch": 0.8451872783021703, - "flos": 21886058342400.0, - "grad_norm": 4.310315670835603, - "learning_rate": 2.46052355172385e-07, - "loss": 0.7698, - "num_input_tokens_seen": 149964160, - "step": 7029 - }, - { - "epoch": 0.8453075211928095, - "flos": 15998189919000.0, - "grad_norm": 9.96757361666875, - "learning_rate": 2.456781626228128e-07, - "loss": 0.7207, - "num_input_tokens_seen": 149983385, - "step": 7030 - }, - { - "epoch": 0.8454277640834486, - "flos": 43297646388840.0, - "grad_norm": 1.0557208050379618, - "learning_rate": 2.453042362024675e-07, - "loss": 0.7369, - "num_input_tokens_seen": 150036350, - "step": 7031 - }, - { - "epoch": 0.8455480069740876, - "flos": 19861740752400.0, - "grad_norm": 3.206809404438699, - "learning_rate": 2.449305759680751e-07, - "loss": 0.7222, - "num_input_tokens_seen": 150057395, - "step": 7032 - }, - { - "epoch": 0.8456682498647268, - "flos": 19941985276920.0, - "grad_norm": 3.938611675946097, - "learning_rate": 2.445571819763188e-07, - "loss": 0.7317, - "num_input_tokens_seen": 150079415, - "step": 7033 - }, - { - "epoch": 0.8457884927553658, - "flos": 15082230022680.0, - "grad_norm": 3.6941785947775054, - "learning_rate": 2.4418405428384227e-07, - "loss": 0.575, - "num_input_tokens_seen": 150099345, - "step": 7034 - }, - { - "epoch": 0.8459087356460049, - "flos": 11134698789480.0, - "grad_norm": 12.34206129667088, - "learning_rate": 2.4381119294724864e-07, - "loss": 0.7085, - "num_input_tokens_seen": 150116510, - "step": 7035 - }, - { - "epoch": 0.846028978536644, - "flos": 13758303932280.0, - "grad_norm": 10.573296621226026, - "learning_rate": 2.434385980231004e-07, - "loss": 0.5243, - "num_input_tokens_seen": 150135070, - "step": 7036 - }, - { - "epoch": 0.8461492214272831, - "flos": 38785802875920.0, - "grad_norm": 4.014792270630992, - "learning_rate": 2.4306626956792043e-07, - "loss": 0.6395, - "num_input_tokens_seen": 150159735, - "step": 7037 - }, - { - "epoch": 0.8462694643179222, - "flos": 13569863629920.0, - "grad_norm": 3.871725113671548, - "learning_rate": 2.4269420763819017e-07, - "loss": 0.739, - "num_input_tokens_seen": 150177500, - "step": 7038 - }, - { - "epoch": 0.8463897072085613, - "flos": 17860910361360.0, - "grad_norm": 4.789132595327154, - "learning_rate": 2.4232241229035223e-07, - "loss": 0.8133, - "num_input_tokens_seen": 150194975, - "step": 7039 - }, - { - "epoch": 0.8465099500992004, - "flos": 50660052044160.0, - "grad_norm": 0.8229581041808127, - "learning_rate": 2.419508835808064e-07, - "loss": 0.5858, - "num_input_tokens_seen": 150251250, - "step": 7040 - }, - { - "epoch": 0.8466301929898394, - "flos": 9978543538920.0, - "grad_norm": 4.106600328710553, - "learning_rate": 2.415796215659134e-07, - "loss": 0.6033, - "num_input_tokens_seen": 150267675, - "step": 7041 - }, - { - "epoch": 0.8467504358804786, - "flos": 14052470535600.0, - "grad_norm": 7.378318686489782, - "learning_rate": 2.412086263019939e-07, - "loss": 0.7585, - "num_input_tokens_seen": 150285420, - "step": 7042 - }, - { - "epoch": 0.8468706787711177, - "flos": 15594371179680.0, - "grad_norm": 3.0928728806143404, - "learning_rate": 2.408378978453276e-07, - "loss": 0.7934, - "num_input_tokens_seen": 150305260, - "step": 7043 - }, - { - "epoch": 0.8469909216617567, - "flos": 47830408152600.0, - "grad_norm": 0.8453536864451839, - "learning_rate": 2.404674362521533e-07, - "loss": 0.6533, - "num_input_tokens_seen": 150363475, - "step": 7044 - }, - { - "epoch": 0.8471111645523959, - "flos": 14081973126960.0, - "grad_norm": 4.789776349032894, - "learning_rate": 2.4009724157866997e-07, - "loss": 0.7281, - "num_input_tokens_seen": 150380255, - "step": 7045 - }, - { - "epoch": 0.8472314074430349, - "flos": 16106069097240.0, - "grad_norm": 5.4607095957339125, - "learning_rate": 2.3972731388103564e-07, - "loss": 0.7529, - "num_input_tokens_seen": 150398455, - "step": 7046 - }, - { - "epoch": 0.847351650333674, - "flos": 42655215293280.0, - "grad_norm": 0.8076143460759984, - "learning_rate": 2.393576532153687e-07, - "loss": 0.6406, - "num_input_tokens_seen": 150461960, - "step": 7047 - }, - { - "epoch": 0.8474718932243132, - "flos": 30371706363960.0, - "grad_norm": 0.9587492178624033, - "learning_rate": 2.389882596377453e-07, - "loss": 0.6167, - "num_input_tokens_seen": 150515945, - "step": 7048 - }, - { - "epoch": 0.8475921361149522, - "flos": 28069581387240.0, - "grad_norm": 3.939417296564888, - "learning_rate": 2.386191332042031e-07, - "loss": 0.7506, - "num_input_tokens_seen": 150537560, - "step": 7049 - }, - { - "epoch": 0.8477123790055913, - "flos": 18592482430200.0, - "grad_norm": 3.1964667364400916, - "learning_rate": 2.3825027397073794e-07, - "loss": 0.7161, - "num_input_tokens_seen": 150557755, - "step": 7050 - }, - { - "epoch": 0.8478326218962304, - "flos": 22181554664040.0, - "grad_norm": 5.919179499187663, - "learning_rate": 2.3788168199330515e-07, - "loss": 0.6619, - "num_input_tokens_seen": 150579035, - "step": 7051 - }, - { - "epoch": 0.8479528647868695, - "flos": 28093416845760.0, - "grad_norm": 5.100583974278054, - "learning_rate": 2.3751335732782074e-07, - "loss": 0.7268, - "num_input_tokens_seen": 150600015, - "step": 7052 - }, - { - "epoch": 0.8480731076775085, - "flos": 15323058576120.0, - "grad_norm": 3.924333143987651, - "learning_rate": 2.371453000301582e-07, - "loss": 0.7867, - "num_input_tokens_seen": 150618420, - "step": 7053 - }, - { - "epoch": 0.8481933505681477, - "flos": 23936459248080.0, - "grad_norm": 3.0113270228991693, - "learning_rate": 2.3677751015615222e-07, - "loss": 0.7297, - "num_input_tokens_seen": 150640215, - "step": 7054 - }, - { - "epoch": 0.8483135934587868, - "flos": 15163076086440.0, - "grad_norm": 7.024342581508434, - "learning_rate": 2.3640998776159593e-07, - "loss": 0.8396, - "num_input_tokens_seen": 150657440, - "step": 7055 - }, - { - "epoch": 0.8484338363494258, - "flos": 15837890829720.0, - "grad_norm": 3.409896159003313, - "learning_rate": 2.3604273290224253e-07, - "loss": 0.798, - "num_input_tokens_seen": 150677875, - "step": 7056 - }, - { - "epoch": 0.848554079240065, - "flos": 10924702545720.0, - "grad_norm": 3.3620211802762303, - "learning_rate": 2.356757456338039e-07, - "loss": 0.7268, - "num_input_tokens_seen": 150695080, - "step": 7057 - }, - { - "epoch": 0.848674322130704, - "flos": 50187291386040.0, - "grad_norm": 0.828473359738088, - "learning_rate": 2.3530902601195147e-07, - "loss": 0.6428, - "num_input_tokens_seen": 150763290, - "step": 7058 - }, - { - "epoch": 0.8487945650213431, - "flos": 13486073189880.0, - "grad_norm": 3.4483132648083785, - "learning_rate": 2.34942574092317e-07, - "loss": 0.7708, - "num_input_tokens_seen": 150778260, - "step": 7059 - }, - { - "epoch": 0.8489148079119821, - "flos": 17184322660320.0, - "grad_norm": 6.422398009895507, - "learning_rate": 2.3457638993049045e-07, - "loss": 0.7416, - "num_input_tokens_seen": 150795970, - "step": 7060 - }, - { - "epoch": 0.8490350508026213, - "flos": 14568252588000.0, - "grad_norm": 4.349587449428349, - "learning_rate": 2.3421047358202252e-07, - "loss": 0.6213, - "num_input_tokens_seen": 150814540, - "step": 7061 - }, - { - "epoch": 0.8491552936932604, - "flos": 17781963895200.0, - "grad_norm": 19.265503820330572, - "learning_rate": 2.3384482510242144e-07, - "loss": 0.8006, - "num_input_tokens_seen": 150832120, - "step": 7062 - }, - { - "epoch": 0.8492755365838994, - "flos": 16480480225080.0, - "grad_norm": 3.068247326238265, - "learning_rate": 2.3347944454715575e-07, - "loss": 0.7578, - "num_input_tokens_seen": 150848230, - "step": 7063 - }, - { - "epoch": 0.8493957794745386, - "flos": 19780578089040.0, - "grad_norm": 3.5560706497314345, - "learning_rate": 2.331143319716542e-07, - "loss": 0.6463, - "num_input_tokens_seen": 150867480, - "step": 7064 - }, - { - "epoch": 0.8495160223651776, - "flos": 21912996477000.0, - "grad_norm": 7.110420435239336, - "learning_rate": 2.3274948743130363e-07, - "loss": 0.6369, - "num_input_tokens_seen": 150887035, - "step": 7065 - }, - { - "epoch": 0.8496362652558167, - "flos": 16890472656600.0, - "grad_norm": 4.342716011643875, - "learning_rate": 2.3238491098145085e-07, - "loss": 0.7866, - "num_input_tokens_seen": 150906285, - "step": 7066 - }, - { - "epoch": 0.8497565081464559, - "flos": 10624583869920.0, - "grad_norm": 6.515430035864562, - "learning_rate": 2.3202060267740141e-07, - "loss": 0.7018, - "num_input_tokens_seen": 150923530, - "step": 7067 - }, - { - "epoch": 0.8498767510370949, - "flos": 15454741552920.0, - "grad_norm": 3.4478611936841492, - "learning_rate": 2.3165656257442044e-07, - "loss": 0.7548, - "num_input_tokens_seen": 150941770, - "step": 7068 - }, - { - "epoch": 0.849996993927734, - "flos": 17318380134120.0, - "grad_norm": 8.655931818103031, - "learning_rate": 2.31292790727734e-07, - "loss": 0.8897, - "num_input_tokens_seen": 150959055, - "step": 7069 - }, - { - "epoch": 0.8501172368183731, - "flos": 15027467274600.0, - "grad_norm": 3.983842402745996, - "learning_rate": 2.3092928719252392e-07, - "loss": 0.7841, - "num_input_tokens_seen": 150977175, - "step": 7070 - }, - { - "epoch": 0.8502374797090122, - "flos": 16296314017320.0, - "grad_norm": 5.836246344425693, - "learning_rate": 2.3056605202393475e-07, - "loss": 0.7636, - "num_input_tokens_seen": 150994455, - "step": 7071 - }, - { - "epoch": 0.8503577225996513, - "flos": 17323034148240.0, - "grad_norm": 6.828968266632224, - "learning_rate": 2.3020308527706888e-07, - "loss": 0.6598, - "num_input_tokens_seen": 151013590, - "step": 7072 - }, - { - "epoch": 0.8504779654902904, - "flos": 19615909925280.0, - "grad_norm": 3.1445638524610358, - "learning_rate": 2.2984038700698715e-07, - "loss": 0.8684, - "num_input_tokens_seen": 151032620, - "step": 7073 - }, - { - "epoch": 0.8505982083809295, - "flos": 19401322987320.0, - "grad_norm": 5.746379873697181, - "learning_rate": 2.2947795726871222e-07, - "loss": 0.7805, - "num_input_tokens_seen": 151053365, - "step": 7074 - }, - { - "epoch": 0.8507184512715685, - "flos": 14839090292160.0, - "grad_norm": 4.441919194832662, - "learning_rate": 2.2911579611722253e-07, - "loss": 0.8429, - "num_input_tokens_seen": 151072230, - "step": 7075 - }, - { - "epoch": 0.8508386941622077, - "flos": 13892962945320.0, - "grad_norm": 5.8338460787717725, - "learning_rate": 2.2875390360745905e-07, - "loss": 0.8586, - "num_input_tokens_seen": 151091355, - "step": 7076 - }, - { - "epoch": 0.8509589370528468, - "flos": 11974688255880.0, - "grad_norm": 3.6740330624242645, - "learning_rate": 2.2839227979432008e-07, - "loss": 0.7638, - "num_input_tokens_seen": 151108725, - "step": 7077 - }, - { - "epoch": 0.8510791799434858, - "flos": 13326565599600.0, - "grad_norm": 6.596552104266792, - "learning_rate": 2.2803092473266373e-07, - "loss": 0.833, - "num_input_tokens_seen": 151125970, - "step": 7078 - }, - { - "epoch": 0.851199422834125, - "flos": 17161437000600.0, - "grad_norm": 5.536206282449296, - "learning_rate": 2.2766983847730724e-07, - "loss": 0.8399, - "num_input_tokens_seen": 151145360, - "step": 7079 - }, - { - "epoch": 0.851319665724764, - "flos": 11868392075640.0, - "grad_norm": 4.315301178889646, - "learning_rate": 2.2730902108302663e-07, - "loss": 0.6518, - "num_input_tokens_seen": 151161995, - "step": 7080 - }, - { - "epoch": 0.8514399086154031, - "flos": 13866531370080.0, - "grad_norm": 4.494462314649299, - "learning_rate": 2.269484726045583e-07, - "loss": 0.6778, - "num_input_tokens_seen": 151180630, - "step": 7081 - }, - { - "epoch": 0.8515601515060423, - "flos": 17997405652080.0, - "grad_norm": 4.388701428193775, - "learning_rate": 2.2658819309659672e-07, - "loss": 0.7767, - "num_input_tokens_seen": 151200550, - "step": 7082 - }, - { - "epoch": 0.8516803943966813, - "flos": 14265221195880.0, - "grad_norm": 3.2905107875033295, - "learning_rate": 2.2622818261379706e-07, - "loss": 0.8336, - "num_input_tokens_seen": 151217290, - "step": 7083 - }, - { - "epoch": 0.8518006372873204, - "flos": 14811107378880.0, - "grad_norm": 4.390823305477773, - "learning_rate": 2.2586844121077142e-07, - "loss": 0.7451, - "num_input_tokens_seen": 151235520, - "step": 7084 - }, - { - "epoch": 0.8519208801779595, - "flos": 17673261558000.0, - "grad_norm": 3.6798912040550387, - "learning_rate": 2.2550896894209215e-07, - "loss": 0.7023, - "num_input_tokens_seen": 151254755, - "step": 7085 - }, - { - "epoch": 0.8520411230685986, - "flos": 46469670511440.0, - "grad_norm": 0.6922990885530054, - "learning_rate": 2.2514976586229184e-07, - "loss": 0.5818, - "num_input_tokens_seen": 151322420, - "step": 7086 - }, - { - "epoch": 0.8521613659592376, - "flos": 48541975618080.0, - "grad_norm": 0.9550055727870979, - "learning_rate": 2.247908320258609e-07, - "loss": 0.6181, - "num_input_tokens_seen": 151382230, - "step": 7087 - }, - { - "epoch": 0.8522816088498768, - "flos": 16917822370680.0, - "grad_norm": 5.053277551616562, - "learning_rate": 2.2443216748724914e-07, - "loss": 0.7675, - "num_input_tokens_seen": 151402660, - "step": 7088 - }, - { - "epoch": 0.8524018517405159, - "flos": 23316122313240.0, - "grad_norm": 4.512987046097858, - "learning_rate": 2.2407377230086588e-07, - "loss": 0.7309, - "num_input_tokens_seen": 151424735, - "step": 7089 - }, - { - "epoch": 0.8525220946311549, - "flos": 13644219401880.0, - "grad_norm": 5.654775859461801, - "learning_rate": 2.23715646521079e-07, - "loss": 0.8233, - "num_input_tokens_seen": 151441975, - "step": 7090 - }, - { - "epoch": 0.852642337521794, - "flos": 15940799394240.0, - "grad_norm": 6.969903003778466, - "learning_rate": 2.2335779020221724e-07, - "loss": 0.8229, - "num_input_tokens_seen": 151458315, - "step": 7091 - }, - { - "epoch": 0.8527625804124331, - "flos": 50911550004120.0, - "grad_norm": 2.8251164066333714, - "learning_rate": 2.2300020339856497e-07, - "loss": 0.6154, - "num_input_tokens_seen": 151520720, - "step": 7092 - }, - { - "epoch": 0.8528828233030722, - "flos": 19779659950200.0, - "grad_norm": 5.751117100215455, - "learning_rate": 2.2264288616436966e-07, - "loss": 0.7627, - "num_input_tokens_seen": 151540695, - "step": 7093 - }, - { - "epoch": 0.8530030661937112, - "flos": 12754311161280.0, - "grad_norm": 8.319848046749177, - "learning_rate": 2.222858385538351e-07, - "loss": 0.7247, - "num_input_tokens_seen": 151557215, - "step": 7094 - }, - { - "epoch": 0.8531233090843504, - "flos": 16213220096400.0, - "grad_norm": 3.1236325132968186, - "learning_rate": 2.2192906062112527e-07, - "loss": 0.6605, - "num_input_tokens_seen": 151576810, - "step": 7095 - }, - { - "epoch": 0.8532435519749895, - "flos": 27666110907480.0, - "grad_norm": 2.854440068053946, - "learning_rate": 2.2157255242036377e-07, - "loss": 0.69, - "num_input_tokens_seen": 151600195, - "step": 7096 - }, - { - "epoch": 0.8533637948656285, - "flos": 15649323887520.0, - "grad_norm": 4.50705420398471, - "learning_rate": 2.2121631400563135e-07, - "loss": 0.7236, - "num_input_tokens_seen": 151619745, - "step": 7097 - }, - { - "epoch": 0.8534840377562677, - "flos": 39295320747600.0, - "grad_norm": 0.8772111950518727, - "learning_rate": 2.208603454309701e-07, - "loss": 0.5972, - "num_input_tokens_seen": 151677555, - "step": 7098 - }, - { - "epoch": 0.8536042806469067, - "flos": 15217078995480.0, - "grad_norm": 4.405735066944282, - "learning_rate": 2.2050464675037994e-07, - "loss": 0.6885, - "num_input_tokens_seen": 151695900, - "step": 7099 - }, - { - "epoch": 0.8537245235375458, - "flos": 18079043214840.0, - "grad_norm": 4.650320284077951, - "learning_rate": 2.2014921801782016e-07, - "loss": 0.7109, - "num_input_tokens_seen": 151715110, - "step": 7100 - }, - { - "epoch": 0.853844766428185, - "flos": 17858915783880.0, - "grad_norm": 6.355449242728975, - "learning_rate": 2.1979405928720872e-07, - "loss": 0.7334, - "num_input_tokens_seen": 151734485, - "step": 7101 - }, - { - "epoch": 0.853965009318824, - "flos": 15321222298440.0, - "grad_norm": 2.8810217446298085, - "learning_rate": 2.1943917061242257e-07, - "loss": 0.7794, - "num_input_tokens_seen": 151754060, - "step": 7102 - }, - { - "epoch": 0.8540852522094631, - "flos": 17723401951920.0, - "grad_norm": 3.723501423724453, - "learning_rate": 2.1908455204729903e-07, - "loss": 0.6455, - "num_input_tokens_seen": 151772930, - "step": 7103 - }, - { - "epoch": 0.8542054951001022, - "flos": 18376249174320.0, - "grad_norm": 4.294702931123591, - "learning_rate": 2.1873020364563265e-07, - "loss": 0.763, - "num_input_tokens_seen": 151791715, - "step": 7104 - }, - { - "epoch": 0.8543257379907413, - "flos": 17809060329600.0, - "grad_norm": 6.014638844696239, - "learning_rate": 2.183761254611789e-07, - "loss": 0.7389, - "num_input_tokens_seen": 151811760, - "step": 7105 - }, - { - "epoch": 0.8544459808813804, - "flos": 40728102983640.0, - "grad_norm": 7.151205492099523, - "learning_rate": 2.1802231754764987e-07, - "loss": 0.6781, - "num_input_tokens_seen": 151836920, - "step": 7106 - }, - { - "epoch": 0.8545662237720195, - "flos": 18889371790080.0, - "grad_norm": 5.87906262769594, - "learning_rate": 2.17668779958718e-07, - "loss": 0.7494, - "num_input_tokens_seen": 151859220, - "step": 7107 - }, - { - "epoch": 0.8546864666626586, - "flos": 8034027234000.0, - "grad_norm": 9.459866930164809, - "learning_rate": 2.1731551274801553e-07, - "loss": 0.7732, - "num_input_tokens_seen": 151875380, - "step": 7108 - }, - { - "epoch": 0.8548067095532976, - "flos": 18699760069200.0, - "grad_norm": 5.149049482311183, - "learning_rate": 2.169625159691324e-07, - "loss": 0.5859, - "num_input_tokens_seen": 151894975, - "step": 7109 - }, - { - "epoch": 0.8549269524439368, - "flos": 18106044669360.0, - "grad_norm": 4.713682776037262, - "learning_rate": 2.1660978967561784e-07, - "loss": 0.7238, - "num_input_tokens_seen": 151914030, - "step": 7110 - }, - { - "epoch": 0.8550471953345758, - "flos": 14485000367280.0, - "grad_norm": 5.213176560804936, - "learning_rate": 2.1625733392098035e-07, - "loss": 0.777, - "num_input_tokens_seen": 151929360, - "step": 7111 - }, - { - "epoch": 0.8551674382252149, - "flos": 16701810734520.0, - "grad_norm": 3.1883038273941784, - "learning_rate": 2.159051487586867e-07, - "loss": 0.7848, - "num_input_tokens_seen": 151949210, - "step": 7112 - }, - { - "epoch": 0.8552876811158541, - "flos": 15081660143400.0, - "grad_norm": 5.75855514312625, - "learning_rate": 2.155532342421642e-07, - "loss": 0.716, - "num_input_tokens_seen": 151966930, - "step": 7113 - }, - { - "epoch": 0.8554079240064931, - "flos": 16917854030640.0, - "grad_norm": 3.405840507881206, - "learning_rate": 2.1520159042479636e-07, - "loss": 0.7733, - "num_input_tokens_seen": 151984940, - "step": 7114 - }, - { - "epoch": 0.8555281668971322, - "flos": 16189511277720.0, - "grad_norm": 4.573793829671151, - "learning_rate": 2.148502173599287e-07, - "loss": 0.6965, - "num_input_tokens_seen": 152002800, - "step": 7115 - }, - { - "epoch": 0.8556484097877713, - "flos": 22858110705120.0, - "grad_norm": 4.394853286811479, - "learning_rate": 2.1449911510086372e-07, - "loss": 0.6402, - "num_input_tokens_seen": 152021990, - "step": 7116 - }, - { - "epoch": 0.8557686526784104, - "flos": 17809060329600.0, - "grad_norm": 4.6927013785268015, - "learning_rate": 2.141482837008628e-07, - "loss": 0.7626, - "num_input_tokens_seen": 152042250, - "step": 7117 - }, - { - "epoch": 0.8558888955690495, - "flos": 12921797061480.0, - "grad_norm": 5.137416777667638, - "learning_rate": 2.1379772321314826e-07, - "loss": 0.7013, - "num_input_tokens_seen": 152060015, - "step": 7118 - }, - { - "epoch": 0.8560091384596886, - "flos": 14001475322760.0, - "grad_norm": 4.5953098684937, - "learning_rate": 2.1344743369089802e-07, - "loss": 0.8058, - "num_input_tokens_seen": 152075515, - "step": 7119 - }, - { - "epoch": 0.8561293813503277, - "flos": 17511727730280.0, - "grad_norm": 3.3782457356575293, - "learning_rate": 2.130974151872522e-07, - "loss": 0.8114, - "num_input_tokens_seen": 152095570, - "step": 7120 - }, - { - "epoch": 0.8562496242409667, - "flos": 16486368977640.0, - "grad_norm": 3.290318047823791, - "learning_rate": 2.1274766775530773e-07, - "loss": 0.7688, - "num_input_tokens_seen": 152115155, - "step": 7121 - }, - { - "epoch": 0.8563698671316058, - "flos": 10700459319960.0, - "grad_norm": 4.521708999887442, - "learning_rate": 2.1239819144812077e-07, - "loss": 0.7841, - "num_input_tokens_seen": 152129335, - "step": 7122 - }, - { - "epoch": 0.856490110022245, - "flos": 28800171997320.0, - "grad_norm": 3.256657750232351, - "learning_rate": 2.1204898631870716e-07, - "loss": 0.6875, - "num_input_tokens_seen": 152153945, - "step": 7123 - }, - { - "epoch": 0.856610352912884, - "flos": 21318996137520.0, - "grad_norm": 4.022941129120477, - "learning_rate": 2.1170005242004006e-07, - "loss": 0.7406, - "num_input_tokens_seen": 152175015, - "step": 7124 - }, - { - "epoch": 0.8567305958035231, - "flos": 17484694615800.0, - "grad_norm": 9.99546253258119, - "learning_rate": 2.1135138980505384e-07, - "loss": 0.7547, - "num_input_tokens_seen": 152195405, - "step": 7125 - }, - { - "epoch": 0.8568508386941622, - "flos": 16242722687760.0, - "grad_norm": 3.5042619641590935, - "learning_rate": 2.110029985266395e-07, - "loss": 0.7106, - "num_input_tokens_seen": 152214830, - "step": 7126 - }, - { - "epoch": 0.8569710815848013, - "flos": 12621361786080.0, - "grad_norm": 3.1312952339118305, - "learning_rate": 2.1065487863764787e-07, - "loss": 0.7285, - "num_input_tokens_seen": 152232895, - "step": 7127 - }, - { - "epoch": 0.8570913244754403, - "flos": 17376593817840.0, - "grad_norm": 2.976271887982731, - "learning_rate": 2.1030703019088846e-07, - "loss": 0.8461, - "num_input_tokens_seen": 152253245, - "step": 7128 - }, - { - "epoch": 0.8572115673660795, - "flos": 14650744969680.0, - "grad_norm": 3.7695770419012664, - "learning_rate": 2.099594532391291e-07, - "loss": 0.6865, - "num_input_tokens_seen": 152271650, - "step": 7129 - }, - { - "epoch": 0.8573318102567186, - "flos": 19913400824400.0, - "grad_norm": 4.015262572561268, - "learning_rate": 2.0961214783509806e-07, - "loss": 0.7767, - "num_input_tokens_seen": 152294250, - "step": 7130 - }, - { - "epoch": 0.8574520531473576, - "flos": 18267895096680.0, - "grad_norm": 6.4622906117855665, - "learning_rate": 2.0926511403148051e-07, - "loss": 0.7403, - "num_input_tokens_seen": 152312935, - "step": 7131 - }, - { - "epoch": 0.8575722960379968, - "flos": 13705155842160.0, - "grad_norm": 4.189150356920915, - "learning_rate": 2.0891835188092143e-07, - "loss": 0.7355, - "num_input_tokens_seen": 152329655, - "step": 7132 - }, - { - "epoch": 0.8576925389286358, - "flos": 16242880987560.0, - "grad_norm": 4.879362257575206, - "learning_rate": 2.0857186143602434e-07, - "loss": 0.796, - "num_input_tokens_seen": 152348020, - "step": 7133 - }, - { - "epoch": 0.8578127818192749, - "flos": 16756636802520.0, - "grad_norm": 3.4913003976737484, - "learning_rate": 2.0822564274935094e-07, - "loss": 0.6629, - "num_input_tokens_seen": 152367165, - "step": 7134 - }, - { - "epoch": 0.8579330247099141, - "flos": 25585954130760.0, - "grad_norm": 5.461580994420769, - "learning_rate": 2.078796958734239e-07, - "loss": 0.6573, - "num_input_tokens_seen": 152389605, - "step": 7135 - }, - { - "epoch": 0.8580532676005531, - "flos": 14433688554840.0, - "grad_norm": 13.475000475598613, - "learning_rate": 2.0753402086072124e-07, - "loss": 0.7337, - "num_input_tokens_seen": 152407955, - "step": 7136 - }, - { - "epoch": 0.8581735104911922, - "flos": 16640621014560.0, - "grad_norm": 5.2800405347597525, - "learning_rate": 2.071886177636828e-07, - "loss": 0.7447, - "num_input_tokens_seen": 152424460, - "step": 7137 - }, - { - "epoch": 0.8582937533818313, - "flos": 16945108764840.0, - "grad_norm": 3.563301531653048, - "learning_rate": 2.0684348663470575e-07, - "loss": 0.8089, - "num_input_tokens_seen": 152444360, - "step": 7138 - }, - { - "epoch": 0.8584139962724704, - "flos": 14243253675000.0, - "grad_norm": 3.467807126130011, - "learning_rate": 2.0649862752614555e-07, - "loss": 0.6027, - "num_input_tokens_seen": 152462790, - "step": 7139 - }, - { - "epoch": 0.8585342391631094, - "flos": 52568200037760.0, - "grad_norm": 0.7731090965886502, - "learning_rate": 2.0615404049031838e-07, - "loss": 0.5857, - "num_input_tokens_seen": 152519480, - "step": 7140 - }, - { - "epoch": 0.8586544820537486, - "flos": 7817129118960.0, - "grad_norm": 5.438598171141041, - "learning_rate": 2.0580972557949616e-07, - "loss": 0.763, - "num_input_tokens_seen": 152534290, - "step": 7141 - }, - { - "epoch": 0.8587747249443877, - "flos": 47782325656080.0, - "grad_norm": 1.1023303122346875, - "learning_rate": 2.054656828459125e-07, - "loss": 0.541, - "num_input_tokens_seen": 152598120, - "step": 7142 - }, - { - "epoch": 0.8588949678350267, - "flos": 19672667250840.0, - "grad_norm": 4.676895964791019, - "learning_rate": 2.051219123417578e-07, - "loss": 0.7478, - "num_input_tokens_seen": 152617900, - "step": 7143 - }, - { - "epoch": 0.8590152107256659, - "flos": 19132068281160.0, - "grad_norm": 4.920231426657705, - "learning_rate": 2.0477841411918196e-07, - "loss": 0.582, - "num_input_tokens_seen": 152637145, - "step": 7144 - }, - { - "epoch": 0.859135453616305, - "flos": 19159196375520.0, - "grad_norm": 4.842617763964169, - "learning_rate": 2.0443518823029326e-07, - "loss": 0.7105, - "num_input_tokens_seen": 152657405, - "step": 7145 - }, - { - "epoch": 0.859255696506944, - "flos": 9411323034240.0, - "grad_norm": 4.155422042179689, - "learning_rate": 2.0409223472715854e-07, - "loss": 0.7386, - "num_input_tokens_seen": 152674270, - "step": 7146 - }, - { - "epoch": 0.8593759393975832, - "flos": 13486041529920.0, - "grad_norm": 4.526616535911076, - "learning_rate": 2.0374955366180434e-07, - "loss": 0.7252, - "num_input_tokens_seen": 152691630, - "step": 7147 - }, - { - "epoch": 0.8594961822882222, - "flos": 16242754347720.0, - "grad_norm": 3.416399751487359, - "learning_rate": 2.034071450862147e-07, - "loss": 0.7071, - "num_input_tokens_seen": 152708820, - "step": 7148 - }, - { - "epoch": 0.8596164251788613, - "flos": 17052829643280.0, - "grad_norm": 6.656267479528839, - "learning_rate": 2.030650090523327e-07, - "loss": 0.7549, - "num_input_tokens_seen": 152727730, - "step": 7149 - }, - { - "epoch": 0.8597366680695004, - "flos": 23236194388320.0, - "grad_norm": 4.9275427745895986, - "learning_rate": 2.0272314561205995e-07, - "loss": 0.5928, - "num_input_tokens_seen": 152747845, - "step": 7150 - }, - { - "epoch": 0.8598569109601395, - "flos": 15567464705040.0, - "grad_norm": 4.765749269960179, - "learning_rate": 2.023815548172567e-07, - "loss": 0.7179, - "num_input_tokens_seen": 152767635, - "step": 7151 - }, - { - "epoch": 0.8599771538507786, - "flos": 18646295379480.0, - "grad_norm": 4.436443579205052, - "learning_rate": 2.0204023671974267e-07, - "loss": 0.6553, - "num_input_tokens_seen": 152786740, - "step": 7152 - }, - { - "epoch": 0.8600973967414177, - "flos": 12189528473520.0, - "grad_norm": 3.9655555424420514, - "learning_rate": 2.0169919137129532e-07, - "loss": 0.7966, - "num_input_tokens_seen": 152804900, - "step": 7153 - }, - { - "epoch": 0.8602176396320568, - "flos": 18483431833440.0, - "grad_norm": 9.634012337836502, - "learning_rate": 2.013584188236508e-07, - "loss": 0.6771, - "num_input_tokens_seen": 152822525, - "step": 7154 - }, - { - "epoch": 0.8603378825226958, - "flos": 14919556436400.0, - "grad_norm": 2.911139855796998, - "learning_rate": 2.0101791912850396e-07, - "loss": 0.7772, - "num_input_tokens_seen": 152841785, - "step": 7155 - }, - { - "epoch": 0.8604581254133349, - "flos": 25663349258880.0, - "grad_norm": 9.317243967839953, - "learning_rate": 2.006776923375082e-07, - "loss": 0.6244, - "num_input_tokens_seen": 152863160, - "step": 7156 - }, - { - "epoch": 0.860578368303974, - "flos": 16535401272960.0, - "grad_norm": 3.586898774598172, - "learning_rate": 2.003377385022764e-07, - "loss": 0.6962, - "num_input_tokens_seen": 152881705, - "step": 7157 - }, - { - "epoch": 0.8606986111946131, - "flos": 15594561139440.0, - "grad_norm": 8.289107600313216, - "learning_rate": 1.9999805767437826e-07, - "loss": 0.7561, - "num_input_tokens_seen": 152900315, - "step": 7158 - }, - { - "epoch": 0.8608188540852522, - "flos": 21184242144600.0, - "grad_norm": 3.8478643215868864, - "learning_rate": 1.9965864990534386e-07, - "loss": 0.7016, - "num_input_tokens_seen": 152920560, - "step": 7159 - }, - { - "epoch": 0.8609390969758913, - "flos": 21805117298760.0, - "grad_norm": 3.5306175813412968, - "learning_rate": 1.9931951524666092e-07, - "loss": 0.7611, - "num_input_tokens_seen": 152941370, - "step": 7160 - }, - { - "epoch": 0.8610593398665304, - "flos": 15538785272640.0, - "grad_norm": 6.370513534087207, - "learning_rate": 1.9898065374977534e-07, - "loss": 0.7935, - "num_input_tokens_seen": 152961295, - "step": 7161 - }, - { - "epoch": 0.8611795827571694, - "flos": 10788428874720.0, - "grad_norm": 3.615947337771408, - "learning_rate": 1.9864206546609342e-07, - "loss": 0.7163, - "num_input_tokens_seen": 152979855, - "step": 7162 - }, - { - "epoch": 0.8612998256478086, - "flos": 17754772480920.0, - "grad_norm": 3.404751369232635, - "learning_rate": 1.983037504469771e-07, - "loss": 0.8293, - "num_input_tokens_seen": 152998285, - "step": 7163 - }, - { - "epoch": 0.8614200685384477, - "flos": 15541697988960.0, - "grad_norm": 3.250895450184048, - "learning_rate": 1.9796570874374984e-07, - "loss": 0.6539, - "num_input_tokens_seen": 153018110, - "step": 7164 - }, - { - "epoch": 0.8615403114290867, - "flos": 14619754360200.0, - "grad_norm": 3.526705222565061, - "learning_rate": 1.976279404076917e-07, - "loss": 0.7653, - "num_input_tokens_seen": 153037230, - "step": 7165 - }, - { - "epoch": 0.8616605543197259, - "flos": 21859310167560.0, - "grad_norm": 3.5411116370825897, - "learning_rate": 1.9729044549004193e-07, - "loss": 0.7451, - "num_input_tokens_seen": 153058335, - "step": 7166 - }, - { - "epoch": 0.8617807972103649, - "flos": 21209407321440.0, - "grad_norm": 3.630061726321173, - "learning_rate": 1.9695322404199822e-07, - "loss": 0.6894, - "num_input_tokens_seen": 153080100, - "step": 7167 - }, - { - "epoch": 0.861901040101004, - "flos": 20293732364760.0, - "grad_norm": 4.491173246573473, - "learning_rate": 1.9661627611471654e-07, - "loss": 0.813, - "num_input_tokens_seen": 153099615, - "step": 7168 - }, - { - "epoch": 0.8620212829916432, - "flos": 36630213888600.0, - "grad_norm": 3.7110169091010334, - "learning_rate": 1.9627960175931246e-07, - "loss": 0.6864, - "num_input_tokens_seen": 153124035, - "step": 7169 - }, - { - "epoch": 0.8621415258822822, - "flos": 15456704470440.0, - "grad_norm": 3.5945673800839204, - "learning_rate": 1.9594320102685847e-07, - "loss": 0.728, - "num_input_tokens_seen": 153143025, - "step": 7170 - }, - { - "epoch": 0.8622617687729213, - "flos": 15864449044800.0, - "grad_norm": 4.32419493133684, - "learning_rate": 1.956070739683864e-07, - "loss": 0.6143, - "num_input_tokens_seen": 153162080, - "step": 7171 - }, - { - "epoch": 0.8623820116635604, - "flos": 19240422358800.0, - "grad_norm": 4.206951682525775, - "learning_rate": 1.9527122063488678e-07, - "loss": 0.7248, - "num_input_tokens_seen": 153182915, - "step": 7172 - }, - { - "epoch": 0.8625022545541995, - "flos": 14353855609800.0, - "grad_norm": 4.741667214624321, - "learning_rate": 1.9493564107730755e-07, - "loss": 0.7876, - "num_input_tokens_seen": 153202635, - "step": 7173 - }, - { - "epoch": 0.8626224974448385, - "flos": 16026837691440.0, - "grad_norm": 4.277244977812205, - "learning_rate": 1.9460033534655684e-07, - "loss": 0.6029, - "num_input_tokens_seen": 153221715, - "step": 7174 - }, - { - "epoch": 0.8627427403354777, - "flos": 17079799437840.0, - "grad_norm": 2.226230743856112, - "learning_rate": 1.9426530349349978e-07, - "loss": 0.8232, - "num_input_tokens_seen": 153241885, - "step": 7175 - }, - { - "epoch": 0.8628629832261168, - "flos": 11922838224120.0, - "grad_norm": 3.9013156090144947, - "learning_rate": 1.9393054556896038e-07, - "loss": 0.6428, - "num_input_tokens_seen": 153259305, - "step": 7176 - }, - { - "epoch": 0.8629832261167558, - "flos": 20610974587560.0, - "grad_norm": 9.572669113550281, - "learning_rate": 1.9359606162372133e-07, - "loss": 0.6693, - "num_input_tokens_seen": 153280630, - "step": 7177 - }, - { - "epoch": 0.863103469007395, - "flos": 14784770783520.0, - "grad_norm": 4.310437580031482, - "learning_rate": 1.9326185170852293e-07, - "loss": 0.6978, - "num_input_tokens_seen": 153299315, - "step": 7178 - }, - { - "epoch": 0.863223711898034, - "flos": 17943687682680.0, - "grad_norm": 4.718772029193715, - "learning_rate": 1.9292791587406598e-07, - "loss": 0.711, - "num_input_tokens_seen": 153317895, - "step": 7179 - }, - { - "epoch": 0.8633439547886731, - "flos": 12894035767920.0, - "grad_norm": 4.23708467951974, - "learning_rate": 1.9259425417100661e-07, - "loss": 0.8497, - "num_input_tokens_seen": 153333730, - "step": 7180 - }, - { - "epoch": 0.8634641976793123, - "flos": 9356180366640.0, - "grad_norm": 4.5734366408784854, - "learning_rate": 1.9226086664996234e-07, - "loss": 0.7253, - "num_input_tokens_seen": 153351695, - "step": 7181 - }, - { - "epoch": 0.8635844405699513, - "flos": 17485264495080.0, - "grad_norm": 5.088088557810206, - "learning_rate": 1.9192775336150712e-07, - "loss": 0.7294, - "num_input_tokens_seen": 153371715, - "step": 7182 - }, - { - "epoch": 0.8637046834605904, - "flos": 44555000566080.0, - "grad_norm": 0.7862492635023006, - "learning_rate": 1.915949143561739e-07, - "loss": 0.5665, - "num_input_tokens_seen": 153426110, - "step": 7183 - }, - { - "epoch": 0.8638249263512295, - "flos": 15028068813840.0, - "grad_norm": 6.2525626460143515, - "learning_rate": 1.9126234968445498e-07, - "loss": 0.751, - "num_input_tokens_seen": 153445520, - "step": 7184 - }, - { - "epoch": 0.8639451692418686, - "flos": 19510721843640.0, - "grad_norm": 2.19763408368998, - "learning_rate": 1.9093005939679884e-07, - "loss": 0.6603, - "num_input_tokens_seen": 153467195, - "step": 7185 - }, - { - "epoch": 0.8640654121325076, - "flos": 11004852090360.0, - "grad_norm": 16.53494815528754, - "learning_rate": 1.9059804354361452e-07, - "loss": 0.746, - "num_input_tokens_seen": 153484690, - "step": 7186 - }, - { - "epoch": 0.8641856550231467, - "flos": 23398424735160.0, - "grad_norm": 3.4845275845497037, - "learning_rate": 1.902663021752684e-07, - "loss": 0.6938, - "num_input_tokens_seen": 153505840, - "step": 7187 - }, - { - "epoch": 0.8643058979137859, - "flos": 10896877932240.0, - "grad_norm": 4.08526477417884, - "learning_rate": 1.8993483534208556e-07, - "loss": 0.8128, - "num_input_tokens_seen": 153524470, - "step": 7188 - }, - { - "epoch": 0.8644261408044249, - "flos": 9519740431800.0, - "grad_norm": 8.507988709865982, - "learning_rate": 1.8960364309434884e-07, - "loss": 0.7274, - "num_input_tokens_seen": 153541685, - "step": 7189 - }, - { - "epoch": 0.864546383695064, - "flos": 15244333729680.0, - "grad_norm": 8.478676021792525, - "learning_rate": 1.8927272548229967e-07, - "loss": 0.7638, - "num_input_tokens_seen": 153561095, - "step": 7190 - }, - { - "epoch": 0.8646666265857031, - "flos": 15594782759160.0, - "grad_norm": 6.970393376243559, - "learning_rate": 1.8894208255613876e-07, - "loss": 0.8152, - "num_input_tokens_seen": 153580130, - "step": 7191 - }, - { - "epoch": 0.8647868694763422, - "flos": 14429414460240.0, - "grad_norm": 3.613941557064926, - "learning_rate": 1.8861171436602397e-07, - "loss": 0.7527, - "num_input_tokens_seen": 153596965, - "step": 7192 - }, - { - "epoch": 0.8649071123669813, - "flos": 19185659610720.0, - "grad_norm": 6.0037874347791265, - "learning_rate": 1.882816209620719e-07, - "loss": 0.7881, - "num_input_tokens_seen": 153613395, - "step": 7193 - }, - { - "epoch": 0.8650273552576204, - "flos": 14838647052720.0, - "grad_norm": 3.977316434545762, - "learning_rate": 1.8795180239435738e-07, - "loss": 0.7468, - "num_input_tokens_seen": 153631970, - "step": 7194 - }, - { - "epoch": 0.8651475981482595, - "flos": 17538507565080.0, - "grad_norm": 7.590940858528393, - "learning_rate": 1.8762225871291348e-07, - "loss": 0.736, - "num_input_tokens_seen": 153647565, - "step": 7195 - }, - { - "epoch": 0.8652678410388985, - "flos": 15860554869720.0, - "grad_norm": 3.74149209332475, - "learning_rate": 1.8729298996773201e-07, - "loss": 0.7993, - "num_input_tokens_seen": 153666035, - "step": 7196 - }, - { - "epoch": 0.8653880839295377, - "flos": 48090042722280.0, - "grad_norm": 0.8499340270189072, - "learning_rate": 1.8696399620876301e-07, - "loss": 0.6251, - "num_input_tokens_seen": 153722785, - "step": 7197 - }, - { - "epoch": 0.8655083268201768, - "flos": 12948766856040.0, - "grad_norm": 4.811446830150493, - "learning_rate": 1.866352774859141e-07, - "loss": 0.7771, - "num_input_tokens_seen": 153737730, - "step": 7198 - }, - { - "epoch": 0.8656285697108158, - "flos": 15135441432720.0, - "grad_norm": 11.05704928668687, - "learning_rate": 1.8630683384905188e-07, - "loss": 0.6685, - "num_input_tokens_seen": 153756780, - "step": 7199 - }, - { - "epoch": 0.865748812601455, - "flos": 13617217947360.0, - "grad_norm": 16.512267205375487, - "learning_rate": 1.8597866534800045e-07, - "loss": 0.8803, - "num_input_tokens_seen": 153771615, - "step": 7200 - }, - { - "epoch": 0.865869055492094, - "flos": 52100685950280.0, - "grad_norm": 4.676354343664492, - "learning_rate": 1.8565077203254398e-07, - "loss": 0.7325, - "num_input_tokens_seen": 153796795, - "step": 7201 - }, - { - "epoch": 0.8659892983827331, - "flos": 12678435711240.0, - "grad_norm": 51.159495162799445, - "learning_rate": 1.8532315395242203e-07, - "loss": 0.7022, - "num_input_tokens_seen": 153812965, - "step": 7202 - }, - { - "epoch": 0.8661095412733723, - "flos": 13056867654000.0, - "grad_norm": 5.041795755210226, - "learning_rate": 1.849958111573353e-07, - "loss": 0.7036, - "num_input_tokens_seen": 153831290, - "step": 7203 - }, - { - "epoch": 0.8662297841640113, - "flos": 13300387304040.0, - "grad_norm": 3.724457664180511, - "learning_rate": 1.8466874369694074e-07, - "loss": 0.6269, - "num_input_tokens_seen": 153848705, - "step": 7204 - }, - { - "epoch": 0.8663500270546504, - "flos": 11922205024920.0, - "grad_norm": 5.262845264987669, - "learning_rate": 1.843419516208542e-07, - "loss": 0.6941, - "num_input_tokens_seen": 153865350, - "step": 7205 - }, - { - "epoch": 0.8664702699452895, - "flos": 13056171134880.0, - "grad_norm": 4.1487740417868855, - "learning_rate": 1.8401543497865047e-07, - "loss": 0.7694, - "num_input_tokens_seen": 153883070, - "step": 7206 - }, - { - "epoch": 0.8665905128359286, - "flos": 22559923286880.0, - "grad_norm": 11.154371258342861, - "learning_rate": 1.836891938198608e-07, - "loss": 0.6181, - "num_input_tokens_seen": 153903215, - "step": 7207 - }, - { - "epoch": 0.8667107557265676, - "flos": 13619497464480.0, - "grad_norm": 6.472346830828456, - "learning_rate": 1.8336322819397677e-07, - "loss": 0.6949, - "num_input_tokens_seen": 153920470, - "step": 7208 - }, - { - "epoch": 0.8668309986172068, - "flos": 14676416705880.0, - "grad_norm": 5.95906855530921, - "learning_rate": 1.8303753815044654e-07, - "loss": 0.6033, - "num_input_tokens_seen": 153939495, - "step": 7209 - }, - { - "epoch": 0.8669512415078459, - "flos": 15809591316840.0, - "grad_norm": 4.4484354590628, - "learning_rate": 1.827121237386773e-07, - "loss": 0.6815, - "num_input_tokens_seen": 153956660, - "step": 7210 - }, - { - "epoch": 0.8670714843984849, - "flos": 12913755431640.0, - "grad_norm": 7.195850596566432, - "learning_rate": 1.8238698500803374e-07, - "loss": 0.7365, - "num_input_tokens_seen": 153969145, - "step": 7211 - }, - { - "epoch": 0.8671917272891241, - "flos": 44746263096240.0, - "grad_norm": 0.8149739528074365, - "learning_rate": 1.820621220078391e-07, - "loss": 0.6127, - "num_input_tokens_seen": 154032775, - "step": 7212 - }, - { - "epoch": 0.8673119701797631, - "flos": 14949343967400.0, - "grad_norm": 3.2544194030955995, - "learning_rate": 1.8173753478737553e-07, - "loss": 0.6588, - "num_input_tokens_seen": 154052930, - "step": 7213 - }, - { - "epoch": 0.8674322130704022, - "flos": 14353728969960.0, - "grad_norm": 3.880649509140573, - "learning_rate": 1.8141322339588205e-07, - "loss": 0.7734, - "num_input_tokens_seen": 154069990, - "step": 7214 - }, - { - "epoch": 0.8675524559610414, - "flos": 19072524879120.0, - "grad_norm": 5.38494255099029, - "learning_rate": 1.810891878825569e-07, - "loss": 0.6845, - "num_input_tokens_seen": 154089685, - "step": 7215 - }, - { - "epoch": 0.8676726988516804, - "flos": 10950817521360.0, - "grad_norm": 4.54733179688345, - "learning_rate": 1.8076542829655561e-07, - "loss": 0.6921, - "num_input_tokens_seen": 154108210, - "step": 7216 - }, - { - "epoch": 0.8677929417423195, - "flos": 11867252317080.0, - "grad_norm": 6.929441978810077, - "learning_rate": 1.8044194468699203e-07, - "loss": 0.7888, - "num_input_tokens_seen": 154125240, - "step": 7217 - }, - { - "epoch": 0.8679131846329585, - "flos": 13759190411160.0, - "grad_norm": 5.372786665023241, - "learning_rate": 1.8011873710293912e-07, - "loss": 0.7207, - "num_input_tokens_seen": 154143465, - "step": 7218 - }, - { - "epoch": 0.8680334275235977, - "flos": 24694811151720.0, - "grad_norm": 3.2813450390812196, - "learning_rate": 1.7979580559342677e-07, - "loss": 0.671, - "num_input_tokens_seen": 154163915, - "step": 7219 - }, - { - "epoch": 0.8681536704142367, - "flos": 18078853255080.0, - "grad_norm": 3.9014865976328545, - "learning_rate": 1.7947315020744358e-07, - "loss": 0.6555, - "num_input_tokens_seen": 154184730, - "step": 7220 - }, - { - "epoch": 0.8682739133048758, - "flos": 14622667076520.0, - "grad_norm": 4.95464371463673, - "learning_rate": 1.7915077099393594e-07, - "loss": 0.781, - "num_input_tokens_seen": 154201050, - "step": 7221 - }, - { - "epoch": 0.868394156195515, - "flos": 12138849860280.0, - "grad_norm": 17.707604726125968, - "learning_rate": 1.788286680018083e-07, - "loss": 0.722, - "num_input_tokens_seen": 154219480, - "step": 7222 - }, - { - "epoch": 0.868514399086154, - "flos": 20536048936320.0, - "grad_norm": 4.151419909135053, - "learning_rate": 1.7850684127992443e-07, - "loss": 0.7086, - "num_input_tokens_seen": 154238945, - "step": 7223 - }, - { - "epoch": 0.8686346419767931, - "flos": 14677176544920.0, - "grad_norm": 3.250481936594848, - "learning_rate": 1.7818529087710378e-07, - "loss": 0.6899, - "num_input_tokens_seen": 154259020, - "step": 7224 - }, - { - "epoch": 0.8687548848674322, - "flos": 13299817424760.0, - "grad_norm": 3.8409872903809723, - "learning_rate": 1.7786401684212637e-07, - "loss": 0.8315, - "num_input_tokens_seen": 154277570, - "step": 7225 - }, - { - "epoch": 0.8688751277580713, - "flos": 51960047696160.0, - "grad_norm": 0.7516812712469269, - "learning_rate": 1.7754301922372883e-07, - "loss": 0.5642, - "num_input_tokens_seen": 154326935, - "step": 7226 - }, - { - "epoch": 0.8689953706487104, - "flos": 19725973640760.0, - "grad_norm": 4.291929193757002, - "learning_rate": 1.7722229807060617e-07, - "loss": 0.795, - "num_input_tokens_seen": 154345235, - "step": 7227 - }, - { - "epoch": 0.8691156135393495, - "flos": 25446514463760.0, - "grad_norm": 4.281550041878338, - "learning_rate": 1.7690185343141172e-07, - "loss": 0.806, - "num_input_tokens_seen": 154364870, - "step": 7228 - }, - { - "epoch": 0.8692358564299886, - "flos": 13866942949560.0, - "grad_norm": 6.061438398715752, - "learning_rate": 1.7658168535475615e-07, - "loss": 0.6886, - "num_input_tokens_seen": 154382375, - "step": 7229 - }, - { - "epoch": 0.8693560993206276, - "flos": 22289307202440.0, - "grad_norm": 3.341715826749493, - "learning_rate": 1.7626179388920948e-07, - "loss": 0.6317, - "num_input_tokens_seen": 154403375, - "step": 7230 - }, - { - "epoch": 0.8694763422112668, - "flos": 19943536614960.0, - "grad_norm": 3.047115531621556, - "learning_rate": 1.7594217908329866e-07, - "loss": 0.7918, - "num_input_tokens_seen": 154425280, - "step": 7231 - }, - { - "epoch": 0.8695965851019059, - "flos": 19157676697440.0, - "grad_norm": 2.8955239512619375, - "learning_rate": 1.7562284098550895e-07, - "loss": 0.7198, - "num_input_tokens_seen": 154444710, - "step": 7232 - }, - { - "epoch": 0.8697168279925449, - "flos": 49648908613560.0, - "grad_norm": 0.9025305022160012, - "learning_rate": 1.753037796442838e-07, - "loss": 0.6533, - "num_input_tokens_seen": 154503870, - "step": 7233 - }, - { - "epoch": 0.8698370708831841, - "flos": 14406243860880.0, - "grad_norm": 3.5224407672436784, - "learning_rate": 1.74984995108024e-07, - "loss": 0.7374, - "num_input_tokens_seen": 154521520, - "step": 7234 - }, - { - "epoch": 0.8699573137738231, - "flos": 9330413650560.0, - "grad_norm": 6.942929729147267, - "learning_rate": 1.7466648742508981e-07, - "loss": 0.8141, - "num_input_tokens_seen": 154537425, - "step": 7235 - }, - { - "epoch": 0.8700775566644622, - "flos": 12705690445440.0, - "grad_norm": 5.381478951398556, - "learning_rate": 1.7434825664379837e-07, - "loss": 0.828, - "num_input_tokens_seen": 154555650, - "step": 7236 - }, - { - "epoch": 0.8701977995551013, - "flos": 9978100299480.0, - "grad_norm": 7.804235863433997, - "learning_rate": 1.740303028124246e-07, - "loss": 0.8333, - "num_input_tokens_seen": 154571430, - "step": 7237 - }, - { - "epoch": 0.8703180424457404, - "flos": 22426467352320.0, - "grad_norm": 5.05594691251094, - "learning_rate": 1.7371262597920212e-07, - "loss": 0.7444, - "num_input_tokens_seen": 154593210, - "step": 7238 - }, - { - "epoch": 0.8704382853363795, - "flos": 14325017877600.0, - "grad_norm": 5.290402579321723, - "learning_rate": 1.7339522619232195e-07, - "loss": 0.7426, - "num_input_tokens_seen": 154611310, - "step": 7239 - }, - { - "epoch": 0.8705585282270186, - "flos": 19509075525720.0, - "grad_norm": 4.124407407792555, - "learning_rate": 1.730781034999338e-07, - "loss": 0.7193, - "num_input_tokens_seen": 154632610, - "step": 7240 - }, - { - "epoch": 0.8706787711176577, - "flos": 25041809245560.0, - "grad_norm": 3.796213100179907, - "learning_rate": 1.7276125795014497e-07, - "loss": 0.7206, - "num_input_tokens_seen": 154650780, - "step": 7241 - }, - { - "epoch": 0.8707990140082967, - "flos": 10626705087240.0, - "grad_norm": 3.2811803901191476, - "learning_rate": 1.7244468959102054e-07, - "loss": 0.6524, - "num_input_tokens_seen": 154667555, - "step": 7242 - }, - { - "epoch": 0.8709192568989359, - "flos": 14676796625400.0, - "grad_norm": 5.87280232332966, - "learning_rate": 1.7212839847058348e-07, - "loss": 0.8425, - "num_input_tokens_seen": 154682405, - "step": 7243 - }, - { - "epoch": 0.871039499789575, - "flos": 12192282890040.0, - "grad_norm": 4.184690073585246, - "learning_rate": 1.718123846368147e-07, - "loss": 0.723, - "num_input_tokens_seen": 154701170, - "step": 7244 - }, - { - "epoch": 0.871159742680214, - "flos": 15405424317960.0, - "grad_norm": 3.489324227957336, - "learning_rate": 1.714966481376543e-07, - "loss": 0.7088, - "num_input_tokens_seen": 154717570, - "step": 7245 - }, - { - "epoch": 0.8712799855708532, - "flos": 20724837498240.0, - "grad_norm": 3.429156077582201, - "learning_rate": 1.7118118902099797e-07, - "loss": 0.7902, - "num_input_tokens_seen": 154735375, - "step": 7246 - }, - { - "epoch": 0.8714002284614922, - "flos": 16269787462200.0, - "grad_norm": 3.999618154419813, - "learning_rate": 1.7086600733470146e-07, - "loss": 0.7856, - "num_input_tokens_seen": 154755765, - "step": 7247 - }, - { - "epoch": 0.8715204713521313, - "flos": 15673919185080.0, - "grad_norm": 3.657252247698523, - "learning_rate": 1.7055110312657738e-07, - "loss": 0.7524, - "num_input_tokens_seen": 154774980, - "step": 7248 - }, - { - "epoch": 0.8716407142427703, - "flos": 17160613841640.0, - "grad_norm": 4.884248031166727, - "learning_rate": 1.702364764443962e-07, - "loss": 0.7332, - "num_input_tokens_seen": 154793775, - "step": 7249 - }, - { - "epoch": 0.8717609571334095, - "flos": 20503570308720.0, - "grad_norm": 5.631100885277743, - "learning_rate": 1.6992212733588685e-07, - "loss": 0.7128, - "num_input_tokens_seen": 154813160, - "step": 7250 - }, - { - "epoch": 0.8718812000240486, - "flos": 18669054399360.0, - "grad_norm": 4.274345454919755, - "learning_rate": 1.6960805584873538e-07, - "loss": 0.7339, - "num_input_tokens_seen": 154830880, - "step": 7251 - }, - { - "epoch": 0.8720014429146876, - "flos": 17132820888120.0, - "grad_norm": 2.969842328340041, - "learning_rate": 1.6929426203058684e-07, - "loss": 0.7721, - "num_input_tokens_seen": 154851025, - "step": 7252 - }, - { - "epoch": 0.8721216858053268, - "flos": 17835871824360.0, - "grad_norm": 4.660702413576202, - "learning_rate": 1.689807459290431e-07, - "loss": 0.799, - "num_input_tokens_seen": 154869400, - "step": 7253 - }, - { - "epoch": 0.8722419286959658, - "flos": 24878154200520.0, - "grad_norm": 5.9208490695874, - "learning_rate": 1.6866750759166437e-07, - "loss": 0.6955, - "num_input_tokens_seen": 154889100, - "step": 7254 - }, - { - "epoch": 0.8723621715866049, - "flos": 13407759922920.0, - "grad_norm": 6.222958468218149, - "learning_rate": 1.6835454706596865e-07, - "loss": 0.7575, - "num_input_tokens_seen": 154906650, - "step": 7255 - }, - { - "epoch": 0.8724824144772441, - "flos": 16103884560000.0, - "grad_norm": 4.794035925662528, - "learning_rate": 1.680418643994317e-07, - "loss": 0.7272, - "num_input_tokens_seen": 154924940, - "step": 7256 - }, - { - "epoch": 0.8726026573678831, - "flos": 49177853101920.0, - "grad_norm": 0.9456989664823797, - "learning_rate": 1.6772945963948738e-07, - "loss": 0.6795, - "num_input_tokens_seen": 154982825, - "step": 7257 - }, - { - "epoch": 0.8727229002585222, - "flos": 9708560653680.0, - "grad_norm": 10.055881384903369, - "learning_rate": 1.6741733283352733e-07, - "loss": 0.7435, - "num_input_tokens_seen": 155000150, - "step": 7258 - }, - { - "epoch": 0.8728431431491613, - "flos": 15944123690040.0, - "grad_norm": 3.414580351708169, - "learning_rate": 1.6710548402890102e-07, - "loss": 0.8252, - "num_input_tokens_seen": 155020395, - "step": 7259 - }, - { - "epoch": 0.8729633860398004, - "flos": 26584153129080.0, - "grad_norm": 6.14101760036305, - "learning_rate": 1.6679391327291527e-07, - "loss": 0.6511, - "num_input_tokens_seen": 155041320, - "step": 7260 - }, - { - "epoch": 0.8730836289304394, - "flos": 11920020487680.0, - "grad_norm": 16.910724266418743, - "learning_rate": 1.6648262061283492e-07, - "loss": 0.6533, - "num_input_tokens_seen": 155056340, - "step": 7261 - }, - { - "epoch": 0.8732038718210786, - "flos": 15513778395600.0, - "grad_norm": 4.870916759515092, - "learning_rate": 1.6617160609588353e-07, - "loss": 0.7231, - "num_input_tokens_seen": 155075235, - "step": 7262 - }, - { - "epoch": 0.8733241147117177, - "flos": 12105168154200.0, - "grad_norm": 4.945282030539033, - "learning_rate": 1.6586086976924163e-07, - "loss": 0.6983, - "num_input_tokens_seen": 155090455, - "step": 7263 - }, - { - "epoch": 0.8734443576023567, - "flos": 14758085928600.0, - "grad_norm": 6.002242622879073, - "learning_rate": 1.6555041168004747e-07, - "loss": 0.7738, - "num_input_tokens_seen": 155109495, - "step": 7264 - }, - { - "epoch": 0.8735646004929959, - "flos": 13164873472080.0, - "grad_norm": 5.654884974543459, - "learning_rate": 1.6524023187539715e-07, - "loss": 0.6714, - "num_input_tokens_seen": 155127500, - "step": 7265 - }, - { - "epoch": 0.873684843383635, - "flos": 14808068022720.0, - "grad_norm": 3.42455044322904, - "learning_rate": 1.649303304023446e-07, - "loss": 0.7253, - "num_input_tokens_seen": 155146975, - "step": 7266 - }, - { - "epoch": 0.873805086274274, - "flos": 12355431375720.0, - "grad_norm": 3.2323016464139984, - "learning_rate": 1.6462070730790246e-07, - "loss": 0.7739, - "num_input_tokens_seen": 155165855, - "step": 7267 - }, - { - "epoch": 0.8739253291649132, - "flos": 13164968451960.0, - "grad_norm": 6.886958870512504, - "learning_rate": 1.6431136263903912e-07, - "loss": 0.7629, - "num_input_tokens_seen": 155184575, - "step": 7268 - }, - { - "epoch": 0.8740455720555522, - "flos": 15595225998600.0, - "grad_norm": 4.6015527592347025, - "learning_rate": 1.6400229644268282e-07, - "loss": 0.7178, - "num_input_tokens_seen": 155202650, - "step": 7269 - }, - { - "epoch": 0.8741658149461913, - "flos": 11031631925160.0, - "grad_norm": 6.413677097130008, - "learning_rate": 1.6369350876571852e-07, - "loss": 0.7932, - "num_input_tokens_seen": 155220525, - "step": 7270 - }, - { - "epoch": 0.8742860578368304, - "flos": 16864262701080.0, - "grad_norm": 5.00136814472718, - "learning_rate": 1.6338499965498874e-07, - "loss": 0.7829, - "num_input_tokens_seen": 155238975, - "step": 7271 - }, - { - "epoch": 0.8744063007274695, - "flos": 20642313456600.0, - "grad_norm": 3.0133505036137755, - "learning_rate": 1.630767691572943e-07, - "loss": 0.7611, - "num_input_tokens_seen": 155258715, - "step": 7272 - }, - { - "epoch": 0.8745265436181086, - "flos": 47205670483320.0, - "grad_norm": 0.7519884373683949, - "learning_rate": 1.6276881731939306e-07, - "loss": 0.5528, - "num_input_tokens_seen": 155320325, - "step": 7273 - }, - { - "epoch": 0.8746467865087477, - "flos": 21022518357120.0, - "grad_norm": 5.098591194793995, - "learning_rate": 1.6246114418800193e-07, - "loss": 0.7394, - "num_input_tokens_seen": 155340325, - "step": 7274 - }, - { - "epoch": 0.8747670293993868, - "flos": 17563989341520.0, - "grad_norm": 3.0473308515664477, - "learning_rate": 1.6215374980979423e-07, - "loss": 0.7513, - "num_input_tokens_seen": 155360455, - "step": 7275 - }, - { - "epoch": 0.8748872722900258, - "flos": 33280038950640.0, - "grad_norm": 5.233734911597828, - "learning_rate": 1.6184663423140133e-07, - "loss": 0.6646, - "num_input_tokens_seen": 155380475, - "step": 7276 - }, - { - "epoch": 0.875007515180665, - "flos": 14432453816400.0, - "grad_norm": 10.952658998423269, - "learning_rate": 1.615397974994126e-07, - "loss": 0.6312, - "num_input_tokens_seen": 155398000, - "step": 7277 - }, - { - "epoch": 0.875127758071304, - "flos": 16620584751240.0, - "grad_norm": 3.406448078520751, - "learning_rate": 1.6123323966037438e-07, - "loss": 0.7922, - "num_input_tokens_seen": 155416240, - "step": 7278 - }, - { - "epoch": 0.8752480009619431, - "flos": 17133517407240.0, - "grad_norm": 5.000604592963336, - "learning_rate": 1.6092696076079216e-07, - "loss": 0.772, - "num_input_tokens_seen": 155434335, - "step": 7279 - }, - { - "epoch": 0.8753682438525822, - "flos": 19212534425400.0, - "grad_norm": 4.080461205613, - "learning_rate": 1.6062096084712785e-07, - "loss": 0.7275, - "num_input_tokens_seen": 155455405, - "step": 7280 - }, - { - "epoch": 0.8754884867432213, - "flos": 17075873602800.0, - "grad_norm": 27.035070178034495, - "learning_rate": 1.6031523996580098e-07, - "loss": 0.6952, - "num_input_tokens_seen": 155472685, - "step": 7281 - }, - { - "epoch": 0.8756087296338604, - "flos": 9060494085240.0, - "grad_norm": 9.147236941533262, - "learning_rate": 1.6000979816318981e-07, - "loss": 0.6441, - "num_input_tokens_seen": 155490870, - "step": 7282 - }, - { - "epoch": 0.8757289725244994, - "flos": 13839561575520.0, - "grad_norm": 4.8678989043571965, - "learning_rate": 1.5970463548562886e-07, - "loss": 0.7341, - "num_input_tokens_seen": 155509745, - "step": 7283 - }, - { - "epoch": 0.8758492154151386, - "flos": 18511794666240.0, - "grad_norm": 3.563203976325843, - "learning_rate": 1.5939975197941192e-07, - "loss": 0.7, - "num_input_tokens_seen": 155531120, - "step": 7284 - }, - { - "epoch": 0.8759694583057777, - "flos": 48344322267360.0, - "grad_norm": 0.8436252007200878, - "learning_rate": 1.5909514769078892e-07, - "loss": 0.5532, - "num_input_tokens_seen": 155595945, - "step": 7285 - }, - { - "epoch": 0.8760897011964167, - "flos": 18644870681280.0, - "grad_norm": 3.4568931767536353, - "learning_rate": 1.5879082266596867e-07, - "loss": 0.7557, - "num_input_tokens_seen": 155617005, - "step": 7286 - }, - { - "epoch": 0.8762099440870559, - "flos": 21263631850200.0, - "grad_norm": 2.711802056169606, - "learning_rate": 1.5848677695111645e-07, - "loss": 0.7221, - "num_input_tokens_seen": 155638325, - "step": 7287 - }, - { - "epoch": 0.8763301869776949, - "flos": 15804620703120.0, - "grad_norm": 4.178895451019534, - "learning_rate": 1.5818301059235562e-07, - "loss": 0.688, - "num_input_tokens_seen": 155653220, - "step": 7288 - }, - { - "epoch": 0.876450429868334, - "flos": 18051630180840.0, - "grad_norm": 3.0559338614811464, - "learning_rate": 1.578795236357684e-07, - "loss": 0.7983, - "num_input_tokens_seen": 155674405, - "step": 7289 - }, - { - "epoch": 0.8765706727589732, - "flos": 14806041785280.0, - "grad_norm": 6.156482982583786, - "learning_rate": 1.5757631612739218e-07, - "loss": 0.8415, - "num_input_tokens_seen": 155687670, - "step": 7290 - }, - { - "epoch": 0.8766909156496122, - "flos": 52636097177880.0, - "grad_norm": 0.8439182537590415, - "learning_rate": 1.572733881132242e-07, - "loss": 0.6632, - "num_input_tokens_seen": 155748035, - "step": 7291 - }, - { - "epoch": 0.8768111585402513, - "flos": 51270547222800.0, - "grad_norm": 0.7618151090268848, - "learning_rate": 1.5697073963921814e-07, - "loss": 0.6073, - "num_input_tokens_seen": 155806995, - "step": 7292 - }, - { - "epoch": 0.8769314014308904, - "flos": 13754251457400.0, - "grad_norm": 6.328337187800599, - "learning_rate": 1.566683707512857e-07, - "loss": 0.8267, - "num_input_tokens_seen": 155824390, - "step": 7293 - }, - { - "epoch": 0.8770516443215295, - "flos": 10896751292400.0, - "grad_norm": 5.622161827382884, - "learning_rate": 1.5636628149529553e-07, - "loss": 0.7828, - "num_input_tokens_seen": 155841900, - "step": 7294 - }, - { - "epoch": 0.8771718872121685, - "flos": 23235972768600.0, - "grad_norm": 17.908033458577066, - "learning_rate": 1.560644719170743e-07, - "loss": 0.7822, - "num_input_tokens_seen": 155862490, - "step": 7295 - }, - { - "epoch": 0.8772921301028077, - "flos": 26525401226040.0, - "grad_norm": 6.032066487601065, - "learning_rate": 1.5576294206240692e-07, - "loss": 0.7072, - "num_input_tokens_seen": 155881735, - "step": 7296 - }, - { - "epoch": 0.8774123729934468, - "flos": 42083526242880.0, - "grad_norm": 7.238688095689694, - "learning_rate": 1.5546169197703507e-07, - "loss": 0.6734, - "num_input_tokens_seen": 155907730, - "step": 7297 - }, - { - "epoch": 0.8775326158840858, - "flos": 17511411130680.0, - "grad_norm": 6.693127412452729, - "learning_rate": 1.5516072170665774e-07, - "loss": 0.7537, - "num_input_tokens_seen": 155925420, - "step": 7298 - }, - { - "epoch": 0.877652858774725, - "flos": 12484771515480.0, - "grad_norm": 3.448683700449647, - "learning_rate": 1.5486003129693214e-07, - "loss": 0.8432, - "num_input_tokens_seen": 155942505, - "step": 7299 - }, - { - "epoch": 0.877773101665364, - "flos": 12030970682040.0, - "grad_norm": 5.830863575126611, - "learning_rate": 1.545596207934725e-07, - "loss": 0.7589, - "num_input_tokens_seen": 155960710, - "step": 7300 - }, - { - "epoch": 0.8778933445560031, - "flos": 16133862050760.0, - "grad_norm": 3.580968268532182, - "learning_rate": 1.5425949024185147e-07, - "loss": 0.7562, - "num_input_tokens_seen": 155980455, - "step": 7301 - }, - { - "epoch": 0.8780135874466423, - "flos": 16512547273200.0, - "grad_norm": 4.502182960849533, - "learning_rate": 1.5395963968759818e-07, - "loss": 0.659, - "num_input_tokens_seen": 156000450, - "step": 7302 - }, - { - "epoch": 0.8781338303372813, - "flos": 45350987179440.0, - "grad_norm": 5.183229090060445, - "learning_rate": 1.536600691761998e-07, - "loss": 0.631, - "num_input_tokens_seen": 156026000, - "step": 7303 - }, - { - "epoch": 0.8782540732279204, - "flos": 16594026536160.0, - "grad_norm": 4.244675386236442, - "learning_rate": 1.5336077875310084e-07, - "loss": 0.6972, - "num_input_tokens_seen": 156044945, - "step": 7304 - }, - { - "epoch": 0.8783743161185595, - "flos": 11706256708680.0, - "grad_norm": 7.271674019233274, - "learning_rate": 1.5306176846370321e-07, - "loss": 0.7245, - "num_input_tokens_seen": 156062810, - "step": 7305 - }, - { - "epoch": 0.8784945590091986, - "flos": 19105161806520.0, - "grad_norm": 3.5426046737465193, - "learning_rate": 1.5276303835336712e-07, - "loss": 0.7157, - "num_input_tokens_seen": 156083070, - "step": 7306 - }, - { - "epoch": 0.8786148018998376, - "flos": 46233649780560.0, - "grad_norm": 0.7981908443357866, - "learning_rate": 1.524645884674094e-07, - "loss": 0.5494, - "num_input_tokens_seen": 156139720, - "step": 7307 - }, - { - "epoch": 0.8787350447904768, - "flos": 15837605890080.0, - "grad_norm": 5.106315980295872, - "learning_rate": 1.521664188511047e-07, - "loss": 0.7805, - "num_input_tokens_seen": 156159465, - "step": 7308 - }, - { - "epoch": 0.8788552876811159, - "flos": 18668167920480.0, - "grad_norm": 10.313339291722706, - "learning_rate": 1.518685295496851e-07, - "loss": 0.7918, - "num_input_tokens_seen": 156177045, - "step": 7309 - }, - { - "epoch": 0.8789755305717549, - "flos": 16324296930600.0, - "grad_norm": 2.6282751450984514, - "learning_rate": 1.5157092060833975e-07, - "loss": 0.8329, - "num_input_tokens_seen": 156196415, - "step": 7310 - }, - { - "epoch": 0.879095773462394, - "flos": 21504682023360.0, - "grad_norm": 8.776771788218088, - "learning_rate": 1.5127359207221658e-07, - "loss": 0.6377, - "num_input_tokens_seen": 156215615, - "step": 7311 - }, - { - "epoch": 0.8792160163530331, - "flos": 12164774876160.0, - "grad_norm": 5.70471002161057, - "learning_rate": 1.5097654398641923e-07, - "loss": 0.7091, - "num_input_tokens_seen": 156233240, - "step": 7312 - }, - { - "epoch": 0.8793362592436722, - "flos": 17944352541840.0, - "grad_norm": 2.5359050191607797, - "learning_rate": 1.5067977639601014e-07, - "loss": 0.7145, - "num_input_tokens_seen": 156255720, - "step": 7313 - }, - { - "epoch": 0.8794565021343113, - "flos": 10572575538360.0, - "grad_norm": 5.397132550197771, - "learning_rate": 1.5038328934600864e-07, - "loss": 0.6919, - "num_input_tokens_seen": 156272075, - "step": 7314 - }, - { - "epoch": 0.8795767450249504, - "flos": 29068128645120.0, - "grad_norm": 8.857991859072271, - "learning_rate": 1.5008708288139161e-07, - "loss": 0.687, - "num_input_tokens_seen": 156294155, - "step": 7315 - }, - { - "epoch": 0.8796969879155895, - "flos": 16804782618960.0, - "grad_norm": 6.514520418867516, - "learning_rate": 1.497911570470931e-07, - "loss": 0.7187, - "num_input_tokens_seen": 156313880, - "step": 7316 - }, - { - "epoch": 0.8798172308062285, - "flos": 20802707525760.0, - "grad_norm": 3.381160905549138, - "learning_rate": 1.494955118880048e-07, - "loss": 0.8422, - "num_input_tokens_seen": 156334585, - "step": 7317 - }, - { - "epoch": 0.8799374736968677, - "flos": 17566743758040.0, - "grad_norm": 5.387721067349399, - "learning_rate": 1.4920014744897634e-07, - "loss": 0.7185, - "num_input_tokens_seen": 156353720, - "step": 7318 - }, - { - "epoch": 0.8800577165875068, - "flos": 18781334312040.0, - "grad_norm": 3.6201592055239566, - "learning_rate": 1.4890506377481392e-07, - "loss": 0.8444, - "num_input_tokens_seen": 156372530, - "step": 7319 - }, - { - "epoch": 0.8801779594781458, - "flos": 17160518861760.0, - "grad_norm": 2.591670110113508, - "learning_rate": 1.486102609102815e-07, - "loss": 0.6209, - "num_input_tokens_seen": 156392800, - "step": 7320 - }, - { - "epoch": 0.880298202368785, - "flos": 8330093434920.0, - "grad_norm": 7.698700475643397, - "learning_rate": 1.483157389001004e-07, - "loss": 0.829, - "num_input_tokens_seen": 156410080, - "step": 7321 - }, - { - "epoch": 0.880418445259424, - "flos": 16591113819840.0, - "grad_norm": 7.354607789495283, - "learning_rate": 1.4802149778894933e-07, - "loss": 0.7596, - "num_input_tokens_seen": 156428590, - "step": 7322 - }, - { - "epoch": 0.8805386881500631, - "flos": 15000972379440.0, - "grad_norm": 4.144719240920858, - "learning_rate": 1.4772753762146484e-07, - "loss": 0.8575, - "num_input_tokens_seen": 156447565, - "step": 7323 - }, - { - "epoch": 0.8806589310407023, - "flos": 26854357634040.0, - "grad_norm": 3.518083992463418, - "learning_rate": 1.474338584422401e-07, - "loss": 0.6805, - "num_input_tokens_seen": 156472495, - "step": 7324 - }, - { - "epoch": 0.8807791739313413, - "flos": 17160392221920.0, - "grad_norm": 3.2454355906182397, - "learning_rate": 1.4714046029582595e-07, - "loss": 0.7378, - "num_input_tokens_seen": 156491280, - "step": 7325 - }, - { - "epoch": 0.8808994168219804, - "flos": 19022764404720.0, - "grad_norm": 5.088104114702809, - "learning_rate": 1.46847343226731e-07, - "loss": 0.7512, - "num_input_tokens_seen": 156512040, - "step": 7326 - }, - { - "epoch": 0.8810196597126195, - "flos": 12462424075080.0, - "grad_norm": 4.74871085893607, - "learning_rate": 1.465545072794203e-07, - "loss": 0.6828, - "num_input_tokens_seen": 156529635, - "step": 7327 - }, - { - "epoch": 0.8811399026032586, - "flos": 16836786347160.0, - "grad_norm": 5.080066861290858, - "learning_rate": 1.4626195249831774e-07, - "loss": 0.7426, - "num_input_tokens_seen": 156550255, - "step": 7328 - }, - { - "epoch": 0.8812601454938976, - "flos": 10517147931120.0, - "grad_norm": 4.904438048670019, - "learning_rate": 1.4596967892780244e-07, - "loss": 0.7065, - "num_input_tokens_seen": 156566305, - "step": 7329 - }, - { - "epoch": 0.8813803883845368, - "flos": 16459810762560.0, - "grad_norm": 69.619280650415, - "learning_rate": 1.4567768661221314e-07, - "loss": 0.7369, - "num_input_tokens_seen": 156586595, - "step": 7330 - }, - { - "epoch": 0.8815006312751759, - "flos": 15729695051880.0, - "grad_norm": 4.4671696355995545, - "learning_rate": 1.4538597559584442e-07, - "loss": 0.7221, - "num_input_tokens_seen": 156604105, - "step": 7331 - }, - { - "epoch": 0.8816208741658149, - "flos": 16675347499320.0, - "grad_norm": 6.235823154763168, - "learning_rate": 1.4509454592294823e-07, - "loss": 0.7602, - "num_input_tokens_seen": 156624310, - "step": 7332 - }, - { - "epoch": 0.8817411170564541, - "flos": 12970259477520.0, - "grad_norm": 5.438827527983434, - "learning_rate": 1.448033976377354e-07, - "loss": 0.7703, - "num_input_tokens_seen": 156639015, - "step": 7333 - }, - { - "epoch": 0.8818613599470931, - "flos": 13542767195520.0, - "grad_norm": 4.058063373294558, - "learning_rate": 1.445125307843713e-07, - "loss": 0.7207, - "num_input_tokens_seen": 156656960, - "step": 7334 - }, - { - "epoch": 0.8819816028377322, - "flos": 20238811316880.0, - "grad_norm": 3.5703385433524297, - "learning_rate": 1.442219454069813e-07, - "loss": 0.7307, - "num_input_tokens_seen": 156677705, - "step": 7335 - }, - { - "epoch": 0.8821018457283714, - "flos": 17134182266400.0, - "grad_norm": 3.8975621159896923, - "learning_rate": 1.4393164154964676e-07, - "loss": 0.6503, - "num_input_tokens_seen": 156696955, - "step": 7336 - }, - { - "epoch": 0.8822220886190104, - "flos": 21373125686400.0, - "grad_norm": 3.6705526365126095, - "learning_rate": 1.4364161925640649e-07, - "loss": 0.9233, - "num_input_tokens_seen": 156718075, - "step": 7337 - }, - { - "epoch": 0.8823423315096495, - "flos": 14973749305200.0, - "grad_norm": 5.252405069865074, - "learning_rate": 1.4335187857125663e-07, - "loss": 0.8374, - "num_input_tokens_seen": 156736495, - "step": 7338 - }, - { - "epoch": 0.8824625744002886, - "flos": 17727422766840.0, - "grad_norm": 4.976171169734623, - "learning_rate": 1.4306241953815023e-07, - "loss": 0.7261, - "num_input_tokens_seen": 156757275, - "step": 7339 - }, - { - "epoch": 0.8825828172909277, - "flos": 18077396896920.0, - "grad_norm": 6.327695478768039, - "learning_rate": 1.4277324220099862e-07, - "loss": 0.6854, - "num_input_tokens_seen": 156778905, - "step": 7340 - }, - { - "epoch": 0.8827030601815667, - "flos": 16432302748680.0, - "grad_norm": 4.725031227937067, - "learning_rate": 1.4248434660366938e-07, - "loss": 0.7283, - "num_input_tokens_seen": 156798100, - "step": 7341 - }, - { - "epoch": 0.8828233030722058, - "flos": 14515136157840.0, - "grad_norm": 3.8993704067569417, - "learning_rate": 1.4219573278998808e-07, - "loss": 0.6891, - "num_input_tokens_seen": 156816280, - "step": 7342 - }, - { - "epoch": 0.882943545962845, - "flos": 28932551493240.0, - "grad_norm": 4.794576187258858, - "learning_rate": 1.4190740080373685e-07, - "loss": 0.645, - "num_input_tokens_seen": 156836280, - "step": 7343 - }, - { - "epoch": 0.883063788853484, - "flos": 13913664067800.0, - "grad_norm": 4.107985577105765, - "learning_rate": 1.4161935068865538e-07, - "loss": 0.8355, - "num_input_tokens_seen": 156851145, - "step": 7344 - }, - { - "epoch": 0.8831840317441231, - "flos": 13676856329280.0, - "grad_norm": 6.041223373812205, - "learning_rate": 1.4133158248844113e-07, - "loss": 0.7412, - "num_input_tokens_seen": 156869770, - "step": 7345 - }, - { - "epoch": 0.8833042746347622, - "flos": 19666588538520.0, - "grad_norm": 3.035536419442166, - "learning_rate": 1.4104409624674785e-07, - "loss": 0.7159, - "num_input_tokens_seen": 156889275, - "step": 7346 - }, - { - "epoch": 0.8834245175254013, - "flos": 19131909981360.0, - "grad_norm": 3.887603689693706, - "learning_rate": 1.407568920071873e-07, - "loss": 0.774, - "num_input_tokens_seen": 156907860, - "step": 7347 - }, - { - "epoch": 0.8835447604160404, - "flos": 22480786860960.0, - "grad_norm": 7.481048035664125, - "learning_rate": 1.4046996981332782e-07, - "loss": 0.6525, - "num_input_tokens_seen": 156927465, - "step": 7348 - }, - { - "epoch": 0.8836650033066795, - "flos": 18106487908800.0, - "grad_norm": 3.389061070499497, - "learning_rate": 1.4018332970869516e-07, - "loss": 0.7629, - "num_input_tokens_seen": 156945125, - "step": 7349 - }, - { - "epoch": 0.8837852461973186, - "flos": 18621098542680.0, - "grad_norm": 3.8115837371927594, - "learning_rate": 1.398969717367733e-07, - "loss": 0.8254, - "num_input_tokens_seen": 156966170, - "step": 7350 - }, - { - "epoch": 0.8839054890879576, - "flos": 13003149684600.0, - "grad_norm": 4.0996019309722795, - "learning_rate": 1.396108959410014e-07, - "loss": 0.7475, - "num_input_tokens_seen": 156985105, - "step": 7351 - }, - { - "epoch": 0.8840257319785968, - "flos": 17403816892080.0, - "grad_norm": 4.517449139222695, - "learning_rate": 1.3932510236477745e-07, - "loss": 0.7839, - "num_input_tokens_seen": 157005495, - "step": 7352 - }, - { - "epoch": 0.8841459748692359, - "flos": 21319376057040.0, - "grad_norm": 5.4921104024193905, - "learning_rate": 1.3903959105145636e-07, - "loss": 0.5329, - "num_input_tokens_seen": 157025705, - "step": 7353 - }, - { - "epoch": 0.8842662177598749, - "flos": 17804627935200.0, - "grad_norm": 3.1078432230248305, - "learning_rate": 1.387543620443492e-07, - "loss": 0.8164, - "num_input_tokens_seen": 157042270, - "step": 7354 - }, - { - "epoch": 0.8843864606505141, - "flos": 18320409987600.0, - "grad_norm": 4.867960811798916, - "learning_rate": 1.3846941538672606e-07, - "loss": 0.827, - "num_input_tokens_seen": 157060695, - "step": 7355 - }, - { - "epoch": 0.8845067035411531, - "flos": 20671151188800.0, - "grad_norm": 3.6199060362533872, - "learning_rate": 1.3818475112181193e-07, - "loss": 0.7931, - "num_input_tokens_seen": 157079210, - "step": 7356 - }, - { - "epoch": 0.8846269464317922, - "flos": 9324461578080.0, - "grad_norm": 11.624382412820571, - "learning_rate": 1.3790036929279091e-07, - "loss": 0.7803, - "num_input_tokens_seen": 157096085, - "step": 7357 - }, - { - "epoch": 0.8847471893224313, - "flos": 13596390185040.0, - "grad_norm": 7.7478100543590855, - "learning_rate": 1.3761626994280363e-07, - "loss": 0.5732, - "num_input_tokens_seen": 157113275, - "step": 7358 - }, - { - "epoch": 0.8848674322130704, - "flos": 26284446032760.0, - "grad_norm": 5.764909074369013, - "learning_rate": 1.3733245311494735e-07, - "loss": 0.7217, - "num_input_tokens_seen": 157135650, - "step": 7359 - }, - { - "epoch": 0.8849876751037095, - "flos": 17755215720360.0, - "grad_norm": 5.999909400255915, - "learning_rate": 1.3704891885227676e-07, - "loss": 0.701, - "num_input_tokens_seen": 157155415, - "step": 7360 - }, - { - "epoch": 0.8851079179943486, - "flos": 15724122898920.0, - "grad_norm": 4.831144802038809, - "learning_rate": 1.367656671978037e-07, - "loss": 0.7606, - "num_input_tokens_seen": 157172600, - "step": 7361 - }, - { - "epoch": 0.8852281608849877, - "flos": 11136186807600.0, - "grad_norm": 3.307203124455571, - "learning_rate": 1.36482698194498e-07, - "loss": 0.7265, - "num_input_tokens_seen": 157188865, - "step": 7362 - }, - { - "epoch": 0.8853484037756267, - "flos": 17053684462200.0, - "grad_norm": 5.125121324545264, - "learning_rate": 1.3620001188528506e-07, - "loss": 0.6962, - "num_input_tokens_seen": 157209305, - "step": 7363 - }, - { - "epoch": 0.8854686466662659, - "flos": 18399166494000.0, - "grad_norm": 3.5529307289660945, - "learning_rate": 1.3591760831304865e-07, - "loss": 0.711, - "num_input_tokens_seen": 157226715, - "step": 7364 - }, - { - "epoch": 0.885588889556905, - "flos": 15642896915640.0, - "grad_norm": 4.794537599163752, - "learning_rate": 1.356354875206287e-07, - "loss": 0.7821, - "num_input_tokens_seen": 157244270, - "step": 7365 - }, - { - "epoch": 0.885709132447544, - "flos": 19725941980800.0, - "grad_norm": 3.6379468505735013, - "learning_rate": 1.3535364955082296e-07, - "loss": 0.6744, - "num_input_tokens_seen": 157263840, - "step": 7366 - }, - { - "epoch": 0.8858293753381832, - "flos": 19131308442120.0, - "grad_norm": 3.405681037630818, - "learning_rate": 1.3507209444638613e-07, - "loss": 0.6225, - "num_input_tokens_seen": 157285560, - "step": 7367 - }, - { - "epoch": 0.8859496182288222, - "flos": 17051120005440.0, - "grad_norm": 4.023942578658743, - "learning_rate": 1.347908222500298e-07, - "loss": 0.7224, - "num_input_tokens_seen": 157305355, - "step": 7368 - }, - { - "epoch": 0.8860698611194613, - "flos": 12300193728240.0, - "grad_norm": 4.866750143371633, - "learning_rate": 1.3450983300442276e-07, - "loss": 0.6756, - "num_input_tokens_seen": 157324305, - "step": 7369 - }, - { - "epoch": 0.8861901040101005, - "flos": 18079074874800.0, - "grad_norm": 3.1284473477944874, - "learning_rate": 1.3422912675219068e-07, - "loss": 0.7203, - "num_input_tokens_seen": 157343780, - "step": 7370 - }, - { - "epoch": 0.8863103469007395, - "flos": 17887436916480.0, - "grad_norm": 5.747701102410118, - "learning_rate": 1.339487035359166e-07, - "loss": 0.7684, - "num_input_tokens_seen": 157363870, - "step": 7371 - }, - { - "epoch": 0.8864305897913786, - "flos": 16134526909920.0, - "grad_norm": 8.944978159690708, - "learning_rate": 1.336685633981409e-07, - "loss": 0.8395, - "num_input_tokens_seen": 157384675, - "step": 7372 - }, - { - "epoch": 0.8865508326820177, - "flos": 13948105612920.0, - "grad_norm": 9.560373107141473, - "learning_rate": 1.333887063813597e-07, - "loss": 0.7267, - "num_input_tokens_seen": 157402500, - "step": 7373 - }, - { - "epoch": 0.8866710755726568, - "flos": 11220737086680.0, - "grad_norm": 10.665443548966513, - "learning_rate": 1.331091325280278e-07, - "loss": 0.6432, - "num_input_tokens_seen": 157421190, - "step": 7374 - }, - { - "epoch": 0.8867913184632958, - "flos": 14676385045920.0, - "grad_norm": 5.46878086626334, - "learning_rate": 1.3282984188055625e-07, - "loss": 0.7706, - "num_input_tokens_seen": 157440700, - "step": 7375 - }, - { - "epoch": 0.8869115613539349, - "flos": 17105186234400.0, - "grad_norm": 6.207587417066903, - "learning_rate": 1.3255083448131288e-07, - "loss": 0.7758, - "num_input_tokens_seen": 157459465, - "step": 7376 - }, - { - "epoch": 0.8870318042445741, - "flos": 15566673206040.0, - "grad_norm": 11.190063864736207, - "learning_rate": 1.3227211037262365e-07, - "loss": 0.7668, - "num_input_tokens_seen": 157476425, - "step": 7377 - }, - { - "epoch": 0.8871520471352131, - "flos": 14622508776720.0, - "grad_norm": 7.493499990942671, - "learning_rate": 1.319936695967696e-07, - "loss": 0.8426, - "num_input_tokens_seen": 157493970, - "step": 7378 - }, - { - "epoch": 0.8872722900258522, - "flos": 16539707027520.0, - "grad_norm": 10.151079663511707, - "learning_rate": 1.3171551219599097e-07, - "loss": 0.8061, - "num_input_tokens_seen": 157512215, - "step": 7379 - }, - { - "epoch": 0.8873925329164913, - "flos": 16215404633640.0, - "grad_norm": 4.846106171519024, - "learning_rate": 1.3143763821248377e-07, - "loss": 0.7569, - "num_input_tokens_seen": 157529020, - "step": 7380 - }, - { - "epoch": 0.8875127758071304, - "flos": 14028508437240.0, - "grad_norm": 5.8031350443336605, - "learning_rate": 1.3116004768840118e-07, - "loss": 0.7133, - "num_input_tokens_seen": 157547115, - "step": 7381 - }, - { - "epoch": 0.8876330186977694, - "flos": 13216026984720.0, - "grad_norm": 3.490120295115548, - "learning_rate": 1.3088274066585348e-07, - "loss": 0.7192, - "num_input_tokens_seen": 157564445, - "step": 7382 - }, - { - "epoch": 0.8877532615884086, - "flos": 16100433624360.0, - "grad_norm": 5.870855614660725, - "learning_rate": 1.3060571718690749e-07, - "loss": 0.8913, - "num_input_tokens_seen": 157581660, - "step": 7383 - }, - { - "epoch": 0.8878735044790477, - "flos": 53202747803280.0, - "grad_norm": 0.7721418066333972, - "learning_rate": 1.3032897729358805e-07, - "loss": 0.5917, - "num_input_tokens_seen": 157642335, - "step": 7384 - }, - { - "epoch": 0.8879937473696867, - "flos": 20184396828360.0, - "grad_norm": 7.947262984992803, - "learning_rate": 1.3005252102787645e-07, - "loss": 0.7852, - "num_input_tokens_seen": 157660995, - "step": 7385 - }, - { - "epoch": 0.8881139902603259, - "flos": 16267507945080.0, - "grad_norm": 18.644302504227415, - "learning_rate": 1.297763484317105e-07, - "loss": 0.7153, - "num_input_tokens_seen": 157679010, - "step": 7386 - }, - { - "epoch": 0.888234233150965, - "flos": 14835702676440.0, - "grad_norm": 8.969137469172717, - "learning_rate": 1.2950045954698551e-07, - "loss": 0.6806, - "num_input_tokens_seen": 157696565, - "step": 7387 - }, - { - "epoch": 0.888354476041604, - "flos": 13243566658560.0, - "grad_norm": 3.3275269163491856, - "learning_rate": 1.2922485441555343e-07, - "loss": 0.7473, - "num_input_tokens_seen": 157715365, - "step": 7388 - }, - { - "epoch": 0.8884747189322432, - "flos": 16104707718960.0, - "grad_norm": 70.34298297560497, - "learning_rate": 1.2894953307922363e-07, - "loss": 0.8076, - "num_input_tokens_seen": 157734045, - "step": 7389 - }, - { - "epoch": 0.8885949618228822, - "flos": 14455941015360.0, - "grad_norm": 3.812887953733953, - "learning_rate": 1.2867449557976208e-07, - "loss": 0.8323, - "num_input_tokens_seen": 157751865, - "step": 7390 - }, - { - "epoch": 0.8887152047135213, - "flos": 14649731850960.0, - "grad_norm": 9.728207221634946, - "learning_rate": 1.283997419588916e-07, - "loss": 0.7378, - "num_input_tokens_seen": 157771055, - "step": 7391 - }, - { - "epoch": 0.8888354476041604, - "flos": 13569610350240.0, - "grad_norm": 4.457087951887898, - "learning_rate": 1.2812527225829216e-07, - "loss": 0.6059, - "num_input_tokens_seen": 157789000, - "step": 7392 - }, - { - "epoch": 0.8889556904947995, - "flos": 15865113903960.0, - "grad_norm": 4.40303907344409, - "learning_rate": 1.2785108651960052e-07, - "loss": 0.7398, - "num_input_tokens_seen": 157810355, - "step": 7393 - }, - { - "epoch": 0.8890759333854386, - "flos": 20077087529400.0, - "grad_norm": 3.667222732225168, - "learning_rate": 1.2757718478441094e-07, - "loss": 0.7855, - "num_input_tokens_seen": 157830820, - "step": 7394 - }, - { - "epoch": 0.8891961762760777, - "flos": 17943940962360.0, - "grad_norm": 3.1927438125174032, - "learning_rate": 1.2730356709427302e-07, - "loss": 0.7502, - "num_input_tokens_seen": 157849220, - "step": 7395 - }, - { - "epoch": 0.8893164191667168, - "flos": 30525573990000.0, - "grad_norm": 4.335268811049956, - "learning_rate": 1.2703023349069542e-07, - "loss": 0.5754, - "num_input_tokens_seen": 157873790, - "step": 7396 - }, - { - "epoch": 0.8894366620573558, - "flos": 24667271477880.0, - "grad_norm": 3.4638680134619824, - "learning_rate": 1.2675718401514223e-07, - "loss": 0.6022, - "num_input_tokens_seen": 157897690, - "step": 7397 - }, - { - "epoch": 0.889556904947995, - "flos": 12327575102280.0, - "grad_norm": 4.270689699812545, - "learning_rate": 1.264844187090346e-07, - "loss": 0.7271, - "num_input_tokens_seen": 157914535, - "step": 7398 - }, - { - "epoch": 0.889677147838634, - "flos": 19077685452600.0, - "grad_norm": 6.3669011151085355, - "learning_rate": 1.262119376137516e-07, - "loss": 0.7357, - "num_input_tokens_seen": 157935315, - "step": 7399 - }, - { - "epoch": 0.8897973907292731, - "flos": 19401354647280.0, - "grad_norm": 3.0702865638316905, - "learning_rate": 1.2593974077062707e-07, - "loss": 0.8246, - "num_input_tokens_seen": 157956655, - "step": 7400 - }, - { - "epoch": 0.8899176336199123, - "flos": 19239630859800.0, - "grad_norm": 4.293621593259983, - "learning_rate": 1.2566782822095423e-07, - "loss": 0.6206, - "num_input_tokens_seen": 157976630, - "step": 7401 - }, - { - "epoch": 0.8900378765105513, - "flos": 15214957778160.0, - "grad_norm": 3.4753485799816355, - "learning_rate": 1.2539620000598162e-07, - "loss": 0.6975, - "num_input_tokens_seen": 157995685, - "step": 7402 - }, - { - "epoch": 0.8901581194011904, - "flos": 12003842587680.0, - "grad_norm": 4.861002266285927, - "learning_rate": 1.2512485616691492e-07, - "loss": 0.7775, - "num_input_tokens_seen": 158012460, - "step": 7403 - }, - { - "epoch": 0.8902783622918296, - "flos": 25830771839160.0, - "grad_norm": 2.6289983906122494, - "learning_rate": 1.2485379674491681e-07, - "loss": 0.794, - "num_input_tokens_seen": 158038375, - "step": 7404 - }, - { - "epoch": 0.8903986051824686, - "flos": 12543016859160.0, - "grad_norm": 5.388604623085233, - "learning_rate": 1.2458302178110657e-07, - "loss": 0.7597, - "num_input_tokens_seen": 158056460, - "step": 7405 - }, - { - "epoch": 0.8905188480731077, - "flos": 18672916914480.0, - "grad_norm": 4.306333915878325, - "learning_rate": 1.2431253131656118e-07, - "loss": 0.8067, - "num_input_tokens_seen": 158075655, - "step": 7406 - }, - { - "epoch": 0.8906390909637467, - "flos": 17105217894360.0, - "grad_norm": 6.166691788630821, - "learning_rate": 1.240423253923133e-07, - "loss": 0.751, - "num_input_tokens_seen": 158094980, - "step": 7407 - }, - { - "epoch": 0.8907593338543859, - "flos": 15405424317960.0, - "grad_norm": 4.969445329873011, - "learning_rate": 1.237724040493533e-07, - "loss": 0.6779, - "num_input_tokens_seen": 158113325, - "step": 7408 - }, - { - "epoch": 0.8908795767450249, - "flos": 15997778339520.0, - "grad_norm": 4.412231713803012, - "learning_rate": 1.2350276732862773e-07, - "loss": 0.6967, - "num_input_tokens_seen": 158134070, - "step": 7409 - }, - { - "epoch": 0.890999819635664, - "flos": 48890081810520.0, - "grad_norm": 1.1625462840721066, - "learning_rate": 1.2323341527103993e-07, - "loss": 0.6116, - "num_input_tokens_seen": 158188990, - "step": 7410 - }, - { - "epoch": 0.8911200625263032, - "flos": 19698623926680.0, - "grad_norm": 7.404221655348603, - "learning_rate": 1.2296434791745135e-07, - "loss": 0.8247, - "num_input_tokens_seen": 158207160, - "step": 7411 - }, - { - "epoch": 0.8912403054169422, - "flos": 15269942145960.0, - "grad_norm": 4.558505465217302, - "learning_rate": 1.2269556530867875e-07, - "loss": 0.7507, - "num_input_tokens_seen": 158225435, - "step": 7412 - }, - { - "epoch": 0.8913605483075813, - "flos": 19807262943960.0, - "grad_norm": 6.751949735137997, - "learning_rate": 1.2242706748549614e-07, - "loss": 0.8029, - "num_input_tokens_seen": 158243150, - "step": 7413 - }, - { - "epoch": 0.8914807911982204, - "flos": 17294418035760.0, - "grad_norm": 2.9077925331377212, - "learning_rate": 1.2215885448863473e-07, - "loss": 0.7996, - "num_input_tokens_seen": 158263745, - "step": 7414 - }, - { - "epoch": 0.8916010340888595, - "flos": 17916876187920.0, - "grad_norm": 3.915286544700377, - "learning_rate": 1.2189092635878152e-07, - "loss": 0.783, - "num_input_tokens_seen": 158284915, - "step": 7415 - }, - { - "epoch": 0.8917212769794985, - "flos": 15514348274880.0, - "grad_norm": 3.977713247896977, - "learning_rate": 1.216232831365822e-07, - "loss": 0.7601, - "num_input_tokens_seen": 158303580, - "step": 7416 - }, - { - "epoch": 0.8918415198701377, - "flos": 18694314556080.0, - "grad_norm": 5.82933608657153, - "learning_rate": 1.2135592486263678e-07, - "loss": 0.795, - "num_input_tokens_seen": 158322550, - "step": 7417 - }, - { - "epoch": 0.8919617627607768, - "flos": 27826979876040.0, - "grad_norm": 3.4535827333341658, - "learning_rate": 1.2108885157750415e-07, - "loss": 0.5911, - "num_input_tokens_seen": 158344630, - "step": 7418 - }, - { - "epoch": 0.8920820056514158, - "flos": 19213040984760.0, - "grad_norm": 3.871843960124206, - "learning_rate": 1.2082206332169897e-07, - "loss": 0.7828, - "num_input_tokens_seen": 158364445, - "step": 7419 - }, - { - "epoch": 0.892202248542055, - "flos": 12677707532160.0, - "grad_norm": 6.635168656805043, - "learning_rate": 1.2055556013569225e-07, - "loss": 0.7219, - "num_input_tokens_seen": 158379675, - "step": 7420 - }, - { - "epoch": 0.892322491432694, - "flos": 15594466159560.0, - "grad_norm": 3.2874756380636216, - "learning_rate": 1.2028934205991315e-07, - "loss": 0.8008, - "num_input_tokens_seen": 158398715, - "step": 7421 - }, - { - "epoch": 0.8924427343233331, - "flos": 17597196148200.0, - "grad_norm": 2.900392597852864, - "learning_rate": 1.2002340913474607e-07, - "loss": 0.7538, - "num_input_tokens_seen": 158422070, - "step": 7422 - }, - { - "epoch": 0.8925629772139723, - "flos": 22021128934920.0, - "grad_norm": 5.06521194722516, - "learning_rate": 1.1975776140053317e-07, - "loss": 0.7308, - "num_input_tokens_seen": 158441760, - "step": 7423 - }, - { - "epoch": 0.8926832201046113, - "flos": 16539960307200.0, - "grad_norm": 5.407500546861428, - "learning_rate": 1.194923988975729e-07, - "loss": 0.706, - "num_input_tokens_seen": 158461080, - "step": 7424 - }, - { - "epoch": 0.8928034629952504, - "flos": 9654367784880.0, - "grad_norm": 8.069706498000233, - "learning_rate": 1.192273216661206e-07, - "loss": 0.7118, - "num_input_tokens_seen": 158478890, - "step": 7425 - }, - { - "epoch": 0.8929237058858895, - "flos": 40414316187840.0, - "grad_norm": 0.8555594966606369, - "learning_rate": 1.189625297463881e-07, - "loss": 0.6, - "num_input_tokens_seen": 158540300, - "step": 7426 - }, - { - "epoch": 0.8930439487765286, - "flos": 21190099237200.0, - "grad_norm": 3.4421952519825187, - "learning_rate": 1.1869802317854394e-07, - "loss": 0.7754, - "num_input_tokens_seen": 158563805, - "step": 7427 - }, - { - "epoch": 0.8931641916671677, - "flos": 16404953034600.0, - "grad_norm": 8.642576580155229, - "learning_rate": 1.1843380200271425e-07, - "loss": 0.7082, - "num_input_tokens_seen": 158582725, - "step": 7428 - }, - { - "epoch": 0.8932844345578068, - "flos": 18938024165880.0, - "grad_norm": 4.317133087135607, - "learning_rate": 1.181698662589805e-07, - "loss": 0.7897, - "num_input_tokens_seen": 158602030, - "step": 7429 - }, - { - "epoch": 0.8934046774484459, - "flos": 16779205862640.0, - "grad_norm": 5.503490727417243, - "learning_rate": 1.1790621598738249e-07, - "loss": 0.743, - "num_input_tokens_seen": 158620065, - "step": 7430 - }, - { - "epoch": 0.8935249203390849, - "flos": 17916717888120.0, - "grad_norm": 4.214882214983194, - "learning_rate": 1.1764285122791461e-07, - "loss": 0.7399, - "num_input_tokens_seen": 158640505, - "step": 7431 - }, - { - "epoch": 0.8936451632297241, - "flos": 11463908477160.0, - "grad_norm": 3.547512719720964, - "learning_rate": 1.173797720205294e-07, - "loss": 0.7567, - "num_input_tokens_seen": 158658260, - "step": 7432 - }, - { - "epoch": 0.8937654061203631, - "flos": 25800889328280.0, - "grad_norm": 5.4618226563232515, - "learning_rate": 1.1711697840513602e-07, - "loss": 0.7023, - "num_input_tokens_seen": 158677415, - "step": 7433 - }, - { - "epoch": 0.8938856490110022, - "flos": 11733796382520.0, - "grad_norm": 10.362218130072119, - "learning_rate": 1.1685447042160012e-07, - "loss": 0.6912, - "num_input_tokens_seen": 158695170, - "step": 7434 - }, - { - "epoch": 0.8940058919016414, - "flos": 15135631392480.0, - "grad_norm": 4.071510534011235, - "learning_rate": 1.1659224810974367e-07, - "loss": 0.7002, - "num_input_tokens_seen": 158714850, - "step": 7435 - }, - { - "epoch": 0.8941261347922804, - "flos": 18484413292200.0, - "grad_norm": 4.750087737226813, - "learning_rate": 1.1633031150934591e-07, - "loss": 0.6696, - "num_input_tokens_seen": 158737600, - "step": 7436 - }, - { - "epoch": 0.8942463776829195, - "flos": 14272091407200.0, - "grad_norm": 5.9594548732643835, - "learning_rate": 1.1606866066014176e-07, - "loss": 0.777, - "num_input_tokens_seen": 158756370, - "step": 7437 - }, - { - "epoch": 0.8943666205735585, - "flos": 16316666880240.0, - "grad_norm": 4.354737142520124, - "learning_rate": 1.1580729560182434e-07, - "loss": 0.7338, - "num_input_tokens_seen": 158771945, - "step": 7438 - }, - { - "epoch": 0.8944868634641977, - "flos": 13809457444920.0, - "grad_norm": 6.1396958907671895, - "learning_rate": 1.1554621637404171e-07, - "loss": 0.7084, - "num_input_tokens_seen": 158789755, - "step": 7439 - }, - { - "epoch": 0.8946071063548368, - "flos": 10514583474360.0, - "grad_norm": 5.102823052055199, - "learning_rate": 1.1528542301639999e-07, - "loss": 0.5881, - "num_input_tokens_seen": 158806265, - "step": 7440 - }, - { - "epoch": 0.8947273492454758, - "flos": 14676733305480.0, - "grad_norm": 4.662849818138439, - "learning_rate": 1.1502491556846105e-07, - "loss": 0.802, - "num_input_tokens_seen": 158824480, - "step": 7441 - }, - { - "epoch": 0.894847592136115, - "flos": 13541659096920.0, - "grad_norm": 4.754852025309807, - "learning_rate": 1.1476469406974331e-07, - "loss": 0.7964, - "num_input_tokens_seen": 158839800, - "step": 7442 - }, - { - "epoch": 0.894967835026754, - "flos": 17188976674440.0, - "grad_norm": 3.6229621157538343, - "learning_rate": 1.1450475855972341e-07, - "loss": 0.7611, - "num_input_tokens_seen": 158860310, - "step": 7443 - }, - { - "epoch": 0.8950880779173931, - "flos": 11052586327320.0, - "grad_norm": 5.658183236394973, - "learning_rate": 1.1424510907783158e-07, - "loss": 0.6874, - "num_input_tokens_seen": 158877310, - "step": 7444 - }, - { - "epoch": 0.8952083208080323, - "flos": 16162858082760.0, - "grad_norm": 3.7489216843608766, - "learning_rate": 1.1398574566345787e-07, - "loss": 0.8103, - "num_input_tokens_seen": 158897665, - "step": 7445 - }, - { - "epoch": 0.8953285636986713, - "flos": 17022092313480.0, - "grad_norm": 4.152382334604362, - "learning_rate": 1.1372666835594702e-07, - "loss": 0.8017, - "num_input_tokens_seen": 158915710, - "step": 7446 - }, - { - "epoch": 0.8954488065893104, - "flos": 11922648264360.0, - "grad_norm": 5.141732789309471, - "learning_rate": 1.1346787719460071e-07, - "loss": 0.6958, - "num_input_tokens_seen": 158934315, - "step": 7447 - }, - { - "epoch": 0.8955690494799495, - "flos": 13324950941640.0, - "grad_norm": 2.959239140486686, - "learning_rate": 1.1320937221867732e-07, - "loss": 0.7176, - "num_input_tokens_seen": 158951615, - "step": 7448 - }, - { - "epoch": 0.8956892923705886, - "flos": 18646010439840.0, - "grad_norm": 21.413719756241047, - "learning_rate": 1.1295115346739192e-07, - "loss": 0.7841, - "num_input_tokens_seen": 158971335, - "step": 7449 - }, - { - "epoch": 0.8958095352612276, - "flos": 38785929515760.0, - "grad_norm": 4.120052491751778, - "learning_rate": 1.1269322097991629e-07, - "loss": 0.7191, - "num_input_tokens_seen": 158994340, - "step": 7450 - }, - { - "epoch": 0.8959297781518668, - "flos": 16972743418560.0, - "grad_norm": 4.259900622216113, - "learning_rate": 1.1243557479537846e-07, - "loss": 0.6683, - "num_input_tokens_seen": 159013950, - "step": 7451 - }, - { - "epoch": 0.8960500210425059, - "flos": 14861184452880.0, - "grad_norm": 4.243803555989152, - "learning_rate": 1.121782149528634e-07, - "loss": 0.6885, - "num_input_tokens_seen": 159030770, - "step": 7452 - }, - { - "epoch": 0.8961702639331449, - "flos": 14541757692840.0, - "grad_norm": 13.52331846152837, - "learning_rate": 1.1192114149141208e-07, - "loss": 0.7683, - "num_input_tokens_seen": 159050125, - "step": 7453 - }, - { - "epoch": 0.8962905068237841, - "flos": 9357320125200.0, - "grad_norm": 5.044572334423548, - "learning_rate": 1.1166435445002197e-07, - "loss": 0.6403, - "num_input_tokens_seen": 159067515, - "step": 7454 - }, - { - "epoch": 0.8964107497144231, - "flos": 17160518861760.0, - "grad_norm": 4.972799893901828, - "learning_rate": 1.1140785386764818e-07, - "loss": 0.6654, - "num_input_tokens_seen": 159085935, - "step": 7455 - }, - { - "epoch": 0.8965309926050622, - "flos": 14244583393320.0, - "grad_norm": 5.5111271400799655, - "learning_rate": 1.1115163978320153e-07, - "loss": 0.6799, - "num_input_tokens_seen": 159104385, - "step": 7456 - }, - { - "epoch": 0.8966512354957014, - "flos": 21021980137800.0, - "grad_norm": 3.77540464443045, - "learning_rate": 1.1089571223554917e-07, - "loss": 0.8016, - "num_input_tokens_seen": 159124990, - "step": 7457 - }, - { - "epoch": 0.8967714783863404, - "flos": 17109650288760.0, - "grad_norm": 3.768748906832864, - "learning_rate": 1.1064007126351537e-07, - "loss": 0.8339, - "num_input_tokens_seen": 159145425, - "step": 7458 - }, - { - "epoch": 0.8968917212769795, - "flos": 17970815777040.0, - "grad_norm": 4.002671779970702, - "learning_rate": 1.1038471690588003e-07, - "loss": 0.7385, - "num_input_tokens_seen": 159164290, - "step": 7459 - }, - { - "epoch": 0.8970119641676186, - "flos": 17186285577840.0, - "grad_norm": 4.497913724690132, - "learning_rate": 1.1012964920138145e-07, - "loss": 0.7833, - "num_input_tokens_seen": 159183595, - "step": 7460 - }, - { - "epoch": 0.8971322070582577, - "flos": 17973475213680.0, - "grad_norm": 3.4912461755214657, - "learning_rate": 1.0987486818871205e-07, - "loss": 0.7413, - "num_input_tokens_seen": 159206905, - "step": 7461 - }, - { - "epoch": 0.8972524499488967, - "flos": 15944535269520.0, - "grad_norm": 5.879962205311244, - "learning_rate": 1.0962037390652245e-07, - "loss": 0.697, - "num_input_tokens_seen": 159225645, - "step": 7462 - }, - { - "epoch": 0.8973726928395359, - "flos": 15892083698520.0, - "grad_norm": 3.849044750656156, - "learning_rate": 1.0936616639341911e-07, - "loss": 0.7021, - "num_input_tokens_seen": 159245655, - "step": 7463 - }, - { - "epoch": 0.897492935730175, - "flos": 39661916356680.0, - "grad_norm": 0.7435252531261365, - "learning_rate": 1.0911224568796473e-07, - "loss": 0.5561, - "num_input_tokens_seen": 159303570, - "step": 7464 - }, - { - "epoch": 0.897613178620814, - "flos": 13348406480640.0, - "grad_norm": 4.356136965277138, - "learning_rate": 1.0885861182867984e-07, - "loss": 0.6871, - "num_input_tokens_seen": 159321395, - "step": 7465 - }, - { - "epoch": 0.8977334215114532, - "flos": 24229834352400.0, - "grad_norm": 12.86207005381855, - "learning_rate": 1.0860526485403942e-07, - "loss": 0.6897, - "num_input_tokens_seen": 159342390, - "step": 7466 - }, - { - "epoch": 0.8978536644020922, - "flos": 11059044959160.0, - "grad_norm": 4.149522657403109, - "learning_rate": 1.0835220480247675e-07, - "loss": 0.7618, - "num_input_tokens_seen": 159360605, - "step": 7467 - }, - { - "epoch": 0.8979739072927313, - "flos": 13137618737880.0, - "grad_norm": 3.64629776675872, - "learning_rate": 1.0809943171238067e-07, - "loss": 0.8174, - "num_input_tokens_seen": 159378250, - "step": 7468 - }, - { - "epoch": 0.8980941501833704, - "flos": 16295364218520.0, - "grad_norm": 13.75558358310959, - "learning_rate": 1.078469456220965e-07, - "loss": 0.618, - "num_input_tokens_seen": 159398125, - "step": 7469 - }, - { - "epoch": 0.8982143930740095, - "flos": 27610619980320.0, - "grad_norm": 4.735934060604265, - "learning_rate": 1.0759474656992606e-07, - "loss": 0.6746, - "num_input_tokens_seen": 159420615, - "step": 7470 - }, - { - "epoch": 0.8983346359646486, - "flos": 13191210067440.0, - "grad_norm": 6.149469194533112, - "learning_rate": 1.0734283459412785e-07, - "loss": 0.765, - "num_input_tokens_seen": 159437185, - "step": 7471 - }, - { - "epoch": 0.8984548788552876, - "flos": 15028195453680.0, - "grad_norm": 5.154037093207061, - "learning_rate": 1.0709120973291707e-07, - "loss": 0.7793, - "num_input_tokens_seen": 159456685, - "step": 7472 - }, - { - "epoch": 0.8985751217459268, - "flos": 12975578350800.0, - "grad_norm": 4.831919934803106, - "learning_rate": 1.0683987202446475e-07, - "loss": 0.751, - "num_input_tokens_seen": 159474590, - "step": 7473 - }, - { - "epoch": 0.8986953646365659, - "flos": 15811269294720.0, - "grad_norm": 4.0556209129281156, - "learning_rate": 1.0658882150689862e-07, - "loss": 0.6913, - "num_input_tokens_seen": 159493170, - "step": 7474 - }, - { - "epoch": 0.8988156075272049, - "flos": 10194333555360.0, - "grad_norm": 5.660772549371057, - "learning_rate": 1.0633805821830288e-07, - "loss": 0.7639, - "num_input_tokens_seen": 159509575, - "step": 7475 - }, - { - "epoch": 0.8989358504178441, - "flos": 21318869497680.0, - "grad_norm": 5.331236347570078, - "learning_rate": 1.0608758219671753e-07, - "loss": 0.8154, - "num_input_tokens_seen": 159528335, - "step": 7476 - }, - { - "epoch": 0.8990560933084831, - "flos": 14785087383120.0, - "grad_norm": 5.8722017807345575, - "learning_rate": 1.0583739348014065e-07, - "loss": 0.689, - "num_input_tokens_seen": 159549140, - "step": 7477 - }, - { - "epoch": 0.8991763361991222, - "flos": 18700203308640.0, - "grad_norm": 3.4231558018319475, - "learning_rate": 1.0558749210652518e-07, - "loss": 0.8349, - "num_input_tokens_seen": 159568790, - "step": 7478 - }, - { - "epoch": 0.8992965790897613, - "flos": 18403060669080.0, - "grad_norm": 2.735337442227478, - "learning_rate": 1.053378781137808e-07, - "loss": 0.8378, - "num_input_tokens_seen": 159589430, - "step": 7479 - }, - { - "epoch": 0.8994168219804004, - "flos": 11706668288160.0, - "grad_norm": 6.003703881491063, - "learning_rate": 1.0508855153977392e-07, - "loss": 0.766, - "num_input_tokens_seen": 159605615, - "step": 7480 - }, - { - "epoch": 0.8995370648710395, - "flos": 18185687654640.0, - "grad_norm": 7.473062076506284, - "learning_rate": 1.0483951242232669e-07, - "loss": 0.6403, - "num_input_tokens_seen": 159625810, - "step": 7481 - }, - { - "epoch": 0.8996573077616786, - "flos": 46530765251520.0, - "grad_norm": 1.0222739091420165, - "learning_rate": 1.0459076079921936e-07, - "loss": 0.5994, - "num_input_tokens_seen": 159678190, - "step": 7482 - }, - { - "epoch": 0.8997775506523177, - "flos": 13296429809040.0, - "grad_norm": 4.007921005475634, - "learning_rate": 1.0434229670818618e-07, - "loss": 0.8326, - "num_input_tokens_seen": 159694585, - "step": 7483 - }, - { - "epoch": 0.8998977935429567, - "flos": 17697666895800.0, - "grad_norm": 2.389443099128876, - "learning_rate": 1.0409412018691944e-07, - "loss": 0.7871, - "num_input_tokens_seen": 159714770, - "step": 7484 - }, - { - "epoch": 0.9000180364335959, - "flos": 15185993406120.0, - "grad_norm": 3.3956108878516655, - "learning_rate": 1.0384623127306724e-07, - "loss": 0.7425, - "num_input_tokens_seen": 159731835, - "step": 7485 - }, - { - "epoch": 0.900138279324235, - "flos": 14026323900000.0, - "grad_norm": 3.4693784402216212, - "learning_rate": 1.0359863000423397e-07, - "loss": 0.776, - "num_input_tokens_seen": 159749690, - "step": 7486 - }, - { - "epoch": 0.900258522214874, - "flos": 21076141346640.0, - "grad_norm": 4.491224633561146, - "learning_rate": 1.0335131641798112e-07, - "loss": 0.6968, - "num_input_tokens_seen": 159771370, - "step": 7487 - }, - { - "epoch": 0.9003787651055132, - "flos": 42949381896600.0, - "grad_norm": 0.8659167903101826, - "learning_rate": 1.0310429055182512e-07, - "loss": 0.6175, - "num_input_tokens_seen": 159825410, - "step": 7488 - }, - { - "epoch": 0.9004990079961522, - "flos": 18726603223920.0, - "grad_norm": 4.169411378294912, - "learning_rate": 1.0285755244324024e-07, - "loss": 0.722, - "num_input_tokens_seen": 159845875, - "step": 7489 - }, - { - "epoch": 0.9006192508867913, - "flos": 17083313693400.0, - "grad_norm": 2.6435838182674143, - "learning_rate": 1.0261110212965629e-07, - "loss": 0.6748, - "num_input_tokens_seen": 159867390, - "step": 7490 - }, - { - "epoch": 0.9007394937774305, - "flos": 13164113633040.0, - "grad_norm": 3.7762160407834084, - "learning_rate": 1.023649396484596e-07, - "loss": 0.7642, - "num_input_tokens_seen": 159886165, - "step": 7491 - }, - { - "epoch": 0.9008597366680695, - "flos": 31686478234560.0, - "grad_norm": 6.275890588063167, - "learning_rate": 1.0211906503699275e-07, - "loss": 0.6604, - "num_input_tokens_seen": 159908860, - "step": 7492 - }, - { - "epoch": 0.9009799795587086, - "flos": 10868989998840.0, - "grad_norm": 4.874321327897123, - "learning_rate": 1.0187347833255455e-07, - "loss": 0.8006, - "num_input_tokens_seen": 159924485, - "step": 7493 - }, - { - "epoch": 0.9011002224493477, - "flos": 15783444681240.0, - "grad_norm": 4.405504528860297, - "learning_rate": 1.0162817957240056e-07, - "loss": 0.7808, - "num_input_tokens_seen": 159944100, - "step": 7494 - }, - { - "epoch": 0.9012204653399868, - "flos": 52486530815040.0, - "grad_norm": 0.9951875641170022, - "learning_rate": 1.0138316879374253e-07, - "loss": 0.6746, - "num_input_tokens_seen": 160013110, - "step": 7495 - }, - { - "epoch": 0.9013407082306258, - "flos": 11354288001120.0, - "grad_norm": 6.189525234275439, - "learning_rate": 1.0113844603374833e-07, - "loss": 0.7228, - "num_input_tokens_seen": 160029355, - "step": 7496 - }, - { - "epoch": 0.901460951121265, - "flos": 10950564241680.0, - "grad_norm": 8.17222416498314, - "learning_rate": 1.0089401132954178e-07, - "loss": 0.7105, - "num_input_tokens_seen": 160047055, - "step": 7497 - }, - { - "epoch": 0.9015811940119041, - "flos": 16269945762000.0, - "grad_norm": 5.4879953981941885, - "learning_rate": 1.006498647182037e-07, - "loss": 0.7046, - "num_input_tokens_seen": 160065430, - "step": 7498 - }, - { - "epoch": 0.9017014369025431, - "flos": 18295561410360.0, - "grad_norm": 3.115832500377733, - "learning_rate": 1.004060062367713e-07, - "loss": 0.7078, - "num_input_tokens_seen": 160086245, - "step": 7499 - }, - { - "epoch": 0.9018216797931822, - "flos": 13218908041080.0, - "grad_norm": 7.300709927671284, - "learning_rate": 1.0016243592223728e-07, - "loss": 0.6774, - "num_input_tokens_seen": 160106365, - "step": 7500 - }, - { - "epoch": 0.9019419226838213, - "flos": 27394260084600.0, - "grad_norm": 5.145903037315997, - "learning_rate": 9.991915381155114e-08, - "loss": 0.6513, - "num_input_tokens_seen": 160129065, - "step": 7501 - }, - { - "epoch": 0.9020621655744604, - "flos": 17161215380880.0, - "grad_norm": 3.8996753753148368, - "learning_rate": 9.967615994161871e-08, - "loss": 0.736, - "num_input_tokens_seen": 160148445, - "step": 7502 - }, - { - "epoch": 0.9021824084650995, - "flos": 16729287088440.0, - "grad_norm": 10.085051293003895, - "learning_rate": 9.943345434930161e-08, - "loss": 0.7664, - "num_input_tokens_seen": 160168415, - "step": 7503 - }, - { - "epoch": 0.9023026513557386, - "flos": 16189099698240.0, - "grad_norm": 6.452287005308811, - "learning_rate": 9.919103707141885e-08, - "loss": 0.6753, - "num_input_tokens_seen": 160187015, - "step": 7504 - }, - { - "epoch": 0.9024228942463777, - "flos": 17720995794960.0, - "grad_norm": 4.938916353297862, - "learning_rate": 9.89489081447441e-08, - "loss": 0.7474, - "num_input_tokens_seen": 160203935, - "step": 7505 - }, - { - "epoch": 0.9025431371370167, - "flos": 18320758247160.0, - "grad_norm": 4.028647122159956, - "learning_rate": 9.870706760600844e-08, - "loss": 0.8083, - "num_input_tokens_seen": 160223605, - "step": 7506 - }, - { - "epoch": 0.9026633800276559, - "flos": 13839529915560.0, - "grad_norm": 6.685006220378849, - "learning_rate": 9.846551549189918e-08, - "loss": 0.7, - "num_input_tokens_seen": 160242930, - "step": 7507 - }, - { - "epoch": 0.902783622918295, - "flos": 23803193273280.0, - "grad_norm": 5.955682800761892, - "learning_rate": 9.822425183905902e-08, - "loss": 0.6515, - "num_input_tokens_seen": 160263175, - "step": 7508 - }, - { - "epoch": 0.902903865808934, - "flos": 46973010199560.0, - "grad_norm": 0.9567444975081464, - "learning_rate": 9.798327668408823e-08, - "loss": 0.7613, - "num_input_tokens_seen": 160324530, - "step": 7509 - }, - { - "epoch": 0.9030241086995732, - "flos": 17429520288240.0, - "grad_norm": 5.412699474156512, - "learning_rate": 9.774259006354158e-08, - "loss": 0.6678, - "num_input_tokens_seen": 160344320, - "step": 7510 - }, - { - "epoch": 0.9031443515902122, - "flos": 19347858297600.0, - "grad_norm": 3.451499452787906, - "learning_rate": 9.750219201393184e-08, - "loss": 0.7435, - "num_input_tokens_seen": 160364005, - "step": 7511 - }, - { - "epoch": 0.9032645944808513, - "flos": 18270332913600.0, - "grad_norm": 3.6342965981054007, - "learning_rate": 9.726208257172697e-08, - "loss": 0.7704, - "num_input_tokens_seen": 160385420, - "step": 7512 - }, - { - "epoch": 0.9033848373714904, - "flos": 15486935240880.0, - "grad_norm": 4.463636126533252, - "learning_rate": 9.702226177335115e-08, - "loss": 0.7369, - "num_input_tokens_seen": 160403635, - "step": 7513 - }, - { - "epoch": 0.9035050802621295, - "flos": 19266790614120.0, - "grad_norm": 4.061314806082085, - "learning_rate": 9.67827296551853e-08, - "loss": 0.7123, - "num_input_tokens_seen": 160424640, - "step": 7514 - }, - { - "epoch": 0.9036253231527686, - "flos": 17725808108880.0, - "grad_norm": 3.3803638643867697, - "learning_rate": 9.65434862535659e-08, - "loss": 0.656, - "num_input_tokens_seen": 160443730, - "step": 7515 - }, - { - "epoch": 0.9037455660434077, - "flos": 13187474192160.0, - "grad_norm": 7.064257917799945, - "learning_rate": 9.630453160478635e-08, - "loss": 0.6281, - "num_input_tokens_seen": 160458805, - "step": 7516 - }, - { - "epoch": 0.9038658089340468, - "flos": 17619733548360.0, - "grad_norm": 4.570173734204798, - "learning_rate": 9.60658657450959e-08, - "loss": 0.7976, - "num_input_tokens_seen": 160478825, - "step": 7517 - }, - { - "epoch": 0.9039860518246858, - "flos": 15971853323640.0, - "grad_norm": 2.905431324491331, - "learning_rate": 9.582748871069979e-08, - "loss": 0.7755, - "num_input_tokens_seen": 160497985, - "step": 7518 - }, - { - "epoch": 0.904106294715325, - "flos": 19509930344640.0, - "grad_norm": 5.100893967011597, - "learning_rate": 9.558940053775954e-08, - "loss": 0.8112, - "num_input_tokens_seen": 160516345, - "step": 7519 - }, - { - "epoch": 0.904226537605964, - "flos": 12975799970520.0, - "grad_norm": 4.162264494777729, - "learning_rate": 9.535160126239294e-08, - "loss": 0.6712, - "num_input_tokens_seen": 160532690, - "step": 7520 - }, - { - "epoch": 0.9043467804966031, - "flos": 18159699318840.0, - "grad_norm": 2.1870698778442654, - "learning_rate": 9.511409092067424e-08, - "loss": 0.6991, - "num_input_tokens_seen": 160552765, - "step": 7521 - }, - { - "epoch": 0.9044670233872423, - "flos": 16561611228480.0, - "grad_norm": 4.71591950464509, - "learning_rate": 9.487686954863327e-08, - "loss": 0.6645, - "num_input_tokens_seen": 160572205, - "step": 7522 - }, - { - "epoch": 0.9045872662778813, - "flos": 17405843129520.0, - "grad_norm": 4.107817496104084, - "learning_rate": 9.46399371822566e-08, - "loss": 0.7512, - "num_input_tokens_seen": 160591700, - "step": 7523 - }, - { - "epoch": 0.9047075091685204, - "flos": 11054992484280.0, - "grad_norm": 11.57013644046502, - "learning_rate": 9.440329385748657e-08, - "loss": 0.6994, - "num_input_tokens_seen": 160608490, - "step": 7524 - }, - { - "epoch": 0.9048277520591596, - "flos": 13137017198640.0, - "grad_norm": 9.48816787319047, - "learning_rate": 9.416693961022137e-08, - "loss": 0.6968, - "num_input_tokens_seen": 160626460, - "step": 7525 - }, - { - "epoch": 0.9049479949497986, - "flos": 16000342796280.0, - "grad_norm": 21.78023768607931, - "learning_rate": 9.393087447631654e-08, - "loss": 0.7593, - "num_input_tokens_seen": 160644460, - "step": 7526 - }, - { - "epoch": 0.9050682378404377, - "flos": 15186784905120.0, - "grad_norm": 4.032393655527651, - "learning_rate": 9.36950984915823e-08, - "loss": 0.7117, - "num_input_tokens_seen": 160662535, - "step": 7527 - }, - { - "epoch": 0.9051884807310768, - "flos": 15783824600760.0, - "grad_norm": 4.677126787262759, - "learning_rate": 9.345961169178607e-08, - "loss": 0.6863, - "num_input_tokens_seen": 160681940, - "step": 7528 - }, - { - "epoch": 0.9053087236217159, - "flos": 16026552751800.0, - "grad_norm": 2.7370094904987674, - "learning_rate": 9.322441411265081e-08, - "loss": 0.7123, - "num_input_tokens_seen": 160702645, - "step": 7529 - }, - { - "epoch": 0.9054289665123549, - "flos": 12435612580320.0, - "grad_norm": 5.444887076223633, - "learning_rate": 9.298950578985554e-08, - "loss": 0.7153, - "num_input_tokens_seen": 160719440, - "step": 7530 - }, - { - "epoch": 0.905549209402994, - "flos": 15189729281400.0, - "grad_norm": 2.9916330326778837, - "learning_rate": 9.275488675903665e-08, - "loss": 0.6935, - "num_input_tokens_seen": 160738105, - "step": 7531 - }, - { - "epoch": 0.9056694522936332, - "flos": 15862486127280.0, - "grad_norm": 5.3686681312651565, - "learning_rate": 9.252055705578454e-08, - "loss": 0.7207, - "num_input_tokens_seen": 160757325, - "step": 7532 - }, - { - "epoch": 0.9057896951842722, - "flos": 21696130021920.0, - "grad_norm": 3.7066899776640527, - "learning_rate": 9.228651671564747e-08, - "loss": 0.7004, - "num_input_tokens_seen": 160779075, - "step": 7533 - }, - { - "epoch": 0.9059099380749113, - "flos": 20455329512400.0, - "grad_norm": 5.576875434489855, - "learning_rate": 9.205276577412901e-08, - "loss": 0.7743, - "num_input_tokens_seen": 160801575, - "step": 7534 - }, - { - "epoch": 0.9060301809655504, - "flos": 12948608556240.0, - "grad_norm": 5.336024360452433, - "learning_rate": 9.181930426668905e-08, - "loss": 0.7524, - "num_input_tokens_seen": 160818090, - "step": 7535 - }, - { - "epoch": 0.9061504238561895, - "flos": 23317262071800.0, - "grad_norm": 4.418247851553689, - "learning_rate": 9.158613222874346e-08, - "loss": 0.6763, - "num_input_tokens_seen": 160839435, - "step": 7536 - }, - { - "epoch": 0.9062706667468285, - "flos": 14650143430440.0, - "grad_norm": 2.8608239676898832, - "learning_rate": 9.135324969566394e-08, - "loss": 0.8089, - "num_input_tokens_seen": 160858655, - "step": 7537 - }, - { - "epoch": 0.9063909096374677, - "flos": 13457298777600.0, - "grad_norm": 72.47623196068315, - "learning_rate": 9.112065670277913e-08, - "loss": 0.7355, - "num_input_tokens_seen": 160874740, - "step": 7538 - }, - { - "epoch": 0.9065111525281068, - "flos": 24640459983120.0, - "grad_norm": 4.344920926795725, - "learning_rate": 9.088835328537303e-08, - "loss": 0.7071, - "num_input_tokens_seen": 160896050, - "step": 7539 - }, - { - "epoch": 0.9066313954187458, - "flos": 17106959192160.0, - "grad_norm": 4.1178079966340775, - "learning_rate": 9.065633947868568e-08, - "loss": 0.6927, - "num_input_tokens_seen": 160915375, - "step": 7540 - }, - { - "epoch": 0.906751638309385, - "flos": 19239567539880.0, - "grad_norm": 7.801321592609075, - "learning_rate": 9.042461531791379e-08, - "loss": 0.7776, - "num_input_tokens_seen": 160933515, - "step": 7541 - }, - { - "epoch": 0.906871881200024, - "flos": 12057117317640.0, - "grad_norm": 3.259210739312435, - "learning_rate": 9.019318083820903e-08, - "loss": 0.7503, - "num_input_tokens_seen": 160951815, - "step": 7542 - }, - { - "epoch": 0.9069921240906631, - "flos": 18022159249440.0, - "grad_norm": 4.765306005589179, - "learning_rate": 8.996203607468045e-08, - "loss": 0.8312, - "num_input_tokens_seen": 160970535, - "step": 7543 - }, - { - "epoch": 0.9071123669813023, - "flos": 18592197490560.0, - "grad_norm": 3.0358364369155497, - "learning_rate": 8.973118106239241e-08, - "loss": 0.7443, - "num_input_tokens_seen": 160992860, - "step": 7544 - }, - { - "epoch": 0.9072326098719413, - "flos": 19592232766560.0, - "grad_norm": 6.8260031050309795, - "learning_rate": 8.95006158363656e-08, - "loss": 0.9215, - "num_input_tokens_seen": 161012765, - "step": 7545 - }, - { - "epoch": 0.9073528527625804, - "flos": 17485106195280.0, - "grad_norm": 6.408065315399476, - "learning_rate": 8.9270340431576e-08, - "loss": 0.7576, - "num_input_tokens_seen": 161031575, - "step": 7546 - }, - { - "epoch": 0.9074730956532195, - "flos": 27826220037000.0, - "grad_norm": 5.054240706683226, - "learning_rate": 8.904035488295658e-08, - "loss": 0.7141, - "num_input_tokens_seen": 161050795, - "step": 7547 - }, - { - "epoch": 0.9075933385438586, - "flos": 48049934044320.0, - "grad_norm": 0.6924860437481061, - "learning_rate": 8.881065922539632e-08, - "loss": 0.5563, - "num_input_tokens_seen": 161110955, - "step": 7548 - }, - { - "epoch": 0.9077135814344977, - "flos": 14562933714720.0, - "grad_norm": 2.955810133487442, - "learning_rate": 8.85812534937389e-08, - "loss": 0.7293, - "num_input_tokens_seen": 161128775, - "step": 7549 - }, - { - "epoch": 0.9078338243251368, - "flos": 12786219909600.0, - "grad_norm": 6.386187740731029, - "learning_rate": 8.835213772278583e-08, - "loss": 0.6464, - "num_input_tokens_seen": 161145350, - "step": 7550 - }, - { - "epoch": 0.9079540672157759, - "flos": 21129511056480.0, - "grad_norm": 4.040496630595128, - "learning_rate": 8.812331194729373e-08, - "loss": 0.7734, - "num_input_tokens_seen": 161164715, - "step": 7551 - }, - { - "epoch": 0.9080743101064149, - "flos": 17214996670200.0, - "grad_norm": 3.736964393435883, - "learning_rate": 8.789477620197461e-08, - "loss": 0.7064, - "num_input_tokens_seen": 161183960, - "step": 7552 - }, - { - "epoch": 0.9081945529970541, - "flos": 16669648706520.0, - "grad_norm": 6.068645076453627, - "learning_rate": 8.766653052149831e-08, - "loss": 0.7775, - "num_input_tokens_seen": 161198865, - "step": 7553 - }, - { - "epoch": 0.9083147958876931, - "flos": 13780081493400.0, - "grad_norm": 37.39889038398232, - "learning_rate": 8.743857494048823e-08, - "loss": 0.7372, - "num_input_tokens_seen": 161215400, - "step": 7554 - }, - { - "epoch": 0.9084350387783322, - "flos": 13806354768840.0, - "grad_norm": 4.132815038323299, - "learning_rate": 8.721090949352605e-08, - "loss": 0.6263, - "num_input_tokens_seen": 161231360, - "step": 7555 - }, - { - "epoch": 0.9085552816689714, - "flos": 15055291888080.0, - "grad_norm": 14.890117240326353, - "learning_rate": 8.698353421514793e-08, - "loss": 0.7052, - "num_input_tokens_seen": 161249455, - "step": 7556 - }, - { - "epoch": 0.9086755245596104, - "flos": 13218021562200.0, - "grad_norm": 5.037265190918351, - "learning_rate": 8.67564491398467e-08, - "loss": 0.7857, - "num_input_tokens_seen": 161266180, - "step": 7557 - }, - { - "epoch": 0.9087957674502495, - "flos": 13969693214280.0, - "grad_norm": 6.3672486097527585, - "learning_rate": 8.652965430207104e-08, - "loss": 0.7294, - "num_input_tokens_seen": 161283805, - "step": 7558 - }, - { - "epoch": 0.9089160103408886, - "flos": 13215172165800.0, - "grad_norm": 8.285045286709785, - "learning_rate": 8.630314973622521e-08, - "loss": 0.6508, - "num_input_tokens_seen": 161301070, - "step": 7559 - }, - { - "epoch": 0.9090362532315277, - "flos": 24505389390600.0, - "grad_norm": 6.344418044219991, - "learning_rate": 8.607693547666995e-08, - "loss": 0.6951, - "num_input_tokens_seen": 161323330, - "step": 7560 - }, - { - "epoch": 0.9091564961221668, - "flos": 52717069881480.0, - "grad_norm": 0.9211782958594953, - "learning_rate": 8.585101155772201e-08, - "loss": 0.6174, - "num_input_tokens_seen": 161385170, - "step": 7561 - }, - { - "epoch": 0.9092767390128058, - "flos": 18101390655240.0, - "grad_norm": 3.1395369585574158, - "learning_rate": 8.562537801365377e-08, - "loss": 0.6766, - "num_input_tokens_seen": 161404625, - "step": 7562 - }, - { - "epoch": 0.909396981903445, - "flos": 17268714639600.0, - "grad_norm": 3.2143235370893244, - "learning_rate": 8.540003487869362e-08, - "loss": 0.6714, - "num_input_tokens_seen": 161426015, - "step": 7563 - }, - { - "epoch": 0.909517224794084, - "flos": 17133802346880.0, - "grad_norm": 3.84737268942432, - "learning_rate": 8.517498218702557e-08, - "loss": 0.7761, - "num_input_tokens_seen": 161443665, - "step": 7564 - }, - { - "epoch": 0.9096374676847231, - "flos": 14028856696800.0, - "grad_norm": 5.194574885960314, - "learning_rate": 8.49502199727905e-08, - "loss": 0.6888, - "num_input_tokens_seen": 161461410, - "step": 7565 - }, - { - "epoch": 0.9097577105753623, - "flos": 24451924700880.0, - "grad_norm": 6.005155573599636, - "learning_rate": 8.472574827008428e-08, - "loss": 0.631, - "num_input_tokens_seen": 161482015, - "step": 7566 - }, - { - "epoch": 0.9098779534660013, - "flos": 16025128053600.0, - "grad_norm": 4.129835589002001, - "learning_rate": 8.450156711295942e-08, - "loss": 0.8263, - "num_input_tokens_seen": 161501905, - "step": 7567 - }, - { - "epoch": 0.9099981963566404, - "flos": 18854075426040.0, - "grad_norm": 4.839501862230878, - "learning_rate": 8.427767653542383e-08, - "loss": 0.8454, - "num_input_tokens_seen": 161516795, - "step": 7568 - }, - { - "epoch": 0.9101184392472795, - "flos": 15405994197240.0, - "grad_norm": 3.3845369731716795, - "learning_rate": 8.405407657144125e-08, - "loss": 0.6883, - "num_input_tokens_seen": 161535675, - "step": 7569 - }, - { - "epoch": 0.9102386821379186, - "flos": 18131526445800.0, - "grad_norm": 5.920102943865655, - "learning_rate": 8.383076725493232e-08, - "loss": 0.6992, - "num_input_tokens_seen": 161552715, - "step": 7570 - }, - { - "epoch": 0.9103589250285576, - "flos": 16510837635360.0, - "grad_norm": 3.7027523320029547, - "learning_rate": 8.360774861977216e-08, - "loss": 0.6703, - "num_input_tokens_seen": 161571555, - "step": 7571 - }, - { - "epoch": 0.9104791679191968, - "flos": 18591089391960.0, - "grad_norm": 3.638565987455547, - "learning_rate": 8.338502069979281e-08, - "loss": 0.7306, - "num_input_tokens_seen": 161591585, - "step": 7572 - }, - { - "epoch": 0.9105994108098359, - "flos": 10489576597320.0, - "grad_norm": 4.112744244682371, - "learning_rate": 8.316258352878214e-08, - "loss": 0.7746, - "num_input_tokens_seen": 161607725, - "step": 7573 - }, - { - "epoch": 0.9107196537004749, - "flos": 19585615834920.0, - "grad_norm": 3.8820264546408296, - "learning_rate": 8.294043714048338e-08, - "loss": 0.6929, - "num_input_tokens_seen": 161626525, - "step": 7574 - }, - { - "epoch": 0.9108398965911141, - "flos": 49057251111120.0, - "grad_norm": 0.7874160055749256, - "learning_rate": 8.271858156859624e-08, - "loss": 0.6359, - "num_input_tokens_seen": 161691615, - "step": 7575 - }, - { - "epoch": 0.9109601394817531, - "flos": 18619072305240.0, - "grad_norm": 4.632525327379601, - "learning_rate": 8.249701684677557e-08, - "loss": 0.7253, - "num_input_tokens_seen": 161712660, - "step": 7576 - }, - { - "epoch": 0.9110803823723922, - "flos": 16648092765120.0, - "grad_norm": 4.362492147618963, - "learning_rate": 8.227574300863294e-08, - "loss": 0.7937, - "num_input_tokens_seen": 161732550, - "step": 7577 - }, - { - "epoch": 0.9112006252630314, - "flos": 35684149861680.0, - "grad_norm": 3.2026469548476646, - "learning_rate": 8.205476008773548e-08, - "loss": 0.6839, - "num_input_tokens_seen": 161756270, - "step": 7578 - }, - { - "epoch": 0.9113208681536704, - "flos": 22022521973160.0, - "grad_norm": 3.7004736571377466, - "learning_rate": 8.183406811760596e-08, - "loss": 0.8151, - "num_input_tokens_seen": 161775720, - "step": 7579 - }, - { - "epoch": 0.9114411110443095, - "flos": 18755377636200.0, - "grad_norm": 6.076772034206392, - "learning_rate": 8.161366713172313e-08, - "loss": 0.7316, - "num_input_tokens_seen": 161797830, - "step": 7580 - }, - { - "epoch": 0.9115613539349486, - "flos": 13429347524280.0, - "grad_norm": 6.862909401388026, - "learning_rate": 8.139355716352137e-08, - "loss": 0.8354, - "num_input_tokens_seen": 161812390, - "step": 7581 - }, - { - "epoch": 0.9116815968255877, - "flos": 15892020378600.0, - "grad_norm": 3.704731524337416, - "learning_rate": 8.117373824639196e-08, - "loss": 0.6862, - "num_input_tokens_seen": 161832375, - "step": 7582 - }, - { - "epoch": 0.9118018397162267, - "flos": 48191811528240.0, - "grad_norm": 0.7380036735469083, - "learning_rate": 8.095421041368067e-08, - "loss": 0.6019, - "num_input_tokens_seen": 161891510, - "step": 7583 - }, - { - "epoch": 0.9119220826068659, - "flos": 15297450159840.0, - "grad_norm": 4.322437722511481, - "learning_rate": 8.073497369868999e-08, - "loss": 0.691, - "num_input_tokens_seen": 161909690, - "step": 7584 - }, - { - "epoch": 0.912042325497505, - "flos": 20806253441280.0, - "grad_norm": 3.639532490145702, - "learning_rate": 8.051602813467772e-08, - "loss": 0.7347, - "num_input_tokens_seen": 161931265, - "step": 7585 - }, - { - "epoch": 0.912162568388144, - "flos": 12813791243400.0, - "grad_norm": 4.131956765435207, - "learning_rate": 8.029737375485756e-08, - "loss": 0.6951, - "num_input_tokens_seen": 161950215, - "step": 7586 - }, - { - "epoch": 0.9122828112787832, - "flos": 14487026604720.0, - "grad_norm": 7.221240650215191, - "learning_rate": 8.007901059239986e-08, - "loss": 0.7171, - "num_input_tokens_seen": 161969215, - "step": 7587 - }, - { - "epoch": 0.9124030541694222, - "flos": 15216762395880.0, - "grad_norm": 6.731823502020349, - "learning_rate": 7.986093868042964e-08, - "loss": 0.7901, - "num_input_tokens_seen": 161989180, - "step": 7588 - }, - { - "epoch": 0.9125232970600613, - "flos": 18457190217960.0, - "grad_norm": 5.059321406400439, - "learning_rate": 7.964315805202826e-08, - "loss": 0.6675, - "num_input_tokens_seen": 162009480, - "step": 7589 - }, - { - "epoch": 0.9126435399507005, - "flos": 14407035359880.0, - "grad_norm": 6.659253226446307, - "learning_rate": 7.942566874023304e-08, - "loss": 0.7177, - "num_input_tokens_seen": 162028385, - "step": 7590 - }, - { - "epoch": 0.9127637828413395, - "flos": 14298997881840.0, - "grad_norm": 4.100136703445201, - "learning_rate": 7.920847077803649e-08, - "loss": 0.6739, - "num_input_tokens_seen": 162045895, - "step": 7591 - }, - { - "epoch": 0.9128840257319786, - "flos": 14785277342880.0, - "grad_norm": 6.6025332126554845, - "learning_rate": 7.899156419838826e-08, - "loss": 0.788, - "num_input_tokens_seen": 162064585, - "step": 7592 - }, - { - "epoch": 0.9130042686226177, - "flos": 18209523113160.0, - "grad_norm": 5.323712711817183, - "learning_rate": 7.87749490341918e-08, - "loss": 0.6433, - "num_input_tokens_seen": 162084580, - "step": 7593 - }, - { - "epoch": 0.9131245115132568, - "flos": 17264535524880.0, - "grad_norm": 6.027204510705372, - "learning_rate": 7.855862531830836e-08, - "loss": 0.8326, - "num_input_tokens_seen": 162100410, - "step": 7594 - }, - { - "epoch": 0.9132447544038959, - "flos": 14562965374680.0, - "grad_norm": 12.81173361370258, - "learning_rate": 7.834259308355373e-08, - "loss": 0.715, - "num_input_tokens_seen": 162118895, - "step": 7595 - }, - { - "epoch": 0.9133649972945349, - "flos": 16081188860040.0, - "grad_norm": 5.632771866184923, - "learning_rate": 7.812685236269989e-08, - "loss": 0.7334, - "num_input_tokens_seen": 162137275, - "step": 7596 - }, - { - "epoch": 0.9134852401851741, - "flos": 52541578502760.0, - "grad_norm": 0.8413529785634006, - "learning_rate": 7.791140318847445e-08, - "loss": 0.6091, - "num_input_tokens_seen": 162195130, - "step": 7597 - }, - { - "epoch": 0.9136054830758131, - "flos": 17299198689720.0, - "grad_norm": 4.3232268563018765, - "learning_rate": 7.769624559356081e-08, - "loss": 0.7847, - "num_input_tokens_seen": 162218245, - "step": 7598 - }, - { - "epoch": 0.9137257259664522, - "flos": 17159220803400.0, - "grad_norm": 7.785576165450271, - "learning_rate": 7.748137961059842e-08, - "loss": 0.7009, - "num_input_tokens_seen": 162231945, - "step": 7599 - }, - { - "epoch": 0.9138459688570914, - "flos": 13967983576440.0, - "grad_norm": 4.479750957378849, - "learning_rate": 7.726680527218211e-08, - "loss": 0.646, - "num_input_tokens_seen": 162248705, - "step": 7600 - }, - { - "epoch": 0.9139662117477304, - "flos": 34064442510000.0, - "grad_norm": 2.9313367753513657, - "learning_rate": 7.70525226108627e-08, - "loss": 0.7386, - "num_input_tokens_seen": 162272095, - "step": 7601 - }, - { - "epoch": 0.9140864546383695, - "flos": 16296820576680.0, - "grad_norm": 9.868480993256146, - "learning_rate": 7.683853165914666e-08, - "loss": 0.786, - "num_input_tokens_seen": 162289585, - "step": 7602 - }, - { - "epoch": 0.9142066975290086, - "flos": 12786536509200.0, - "grad_norm": 5.318403666210632, - "learning_rate": 7.662483244949602e-08, - "loss": 0.7548, - "num_input_tokens_seen": 162306565, - "step": 7603 - }, - { - "epoch": 0.9143269404196477, - "flos": 12921290502120.0, - "grad_norm": 8.293708383761924, - "learning_rate": 7.641142501432951e-08, - "loss": 0.799, - "num_input_tokens_seen": 162322480, - "step": 7604 - }, - { - "epoch": 0.9144471833102867, - "flos": 24474018861600.0, - "grad_norm": 3.604524598328614, - "learning_rate": 7.619830938602013e-08, - "loss": 0.7295, - "num_input_tokens_seen": 162343425, - "step": 7605 - }, - { - "epoch": 0.9145674262009259, - "flos": 15405582617760.0, - "grad_norm": 5.084743470929594, - "learning_rate": 7.598548559689777e-08, - "loss": 0.7951, - "num_input_tokens_seen": 162361545, - "step": 7606 - }, - { - "epoch": 0.914687669091565, - "flos": 12246475758840.0, - "grad_norm": 6.037949078209, - "learning_rate": 7.577295367924751e-08, - "loss": 0.793, - "num_input_tokens_seen": 162377665, - "step": 7607 - }, - { - "epoch": 0.914807911982204, - "flos": 18887060613000.0, - "grad_norm": 5.798027150285622, - "learning_rate": 7.556071366531002e-08, - "loss": 0.8202, - "num_input_tokens_seen": 162398355, - "step": 7608 - }, - { - "epoch": 0.9149281548728432, - "flos": 14028350137440.0, - "grad_norm": 3.2192263945087123, - "learning_rate": 7.53487655872822e-08, - "loss": 0.7626, - "num_input_tokens_seen": 162417245, - "step": 7609 - }, - { - "epoch": 0.9150483977634822, - "flos": 19699193805960.0, - "grad_norm": 4.084864634181877, - "learning_rate": 7.513710947731656e-08, - "loss": 0.7335, - "num_input_tokens_seen": 162438175, - "step": 7610 - }, - { - "epoch": 0.9151686406541213, - "flos": 16027344250800.0, - "grad_norm": 4.420879777717689, - "learning_rate": 7.492574536752095e-08, - "loss": 0.8307, - "num_input_tokens_seen": 162457885, - "step": 7611 - }, - { - "epoch": 0.9152888835447605, - "flos": 20023369560000.0, - "grad_norm": 4.260770054021582, - "learning_rate": 7.471467328995907e-08, - "loss": 0.7708, - "num_input_tokens_seen": 162476415, - "step": 7612 - }, - { - "epoch": 0.9154091264353995, - "flos": 9708402353880.0, - "grad_norm": 5.506099266734285, - "learning_rate": 7.450389327665018e-08, - "loss": 0.5826, - "num_input_tokens_seen": 162493970, - "step": 7613 - }, - { - "epoch": 0.9155293693260386, - "flos": 14757294429600.0, - "grad_norm": 5.083473933208889, - "learning_rate": 7.429340535957029e-08, - "loss": 0.6465, - "num_input_tokens_seen": 162508885, - "step": 7614 - }, - { - "epoch": 0.9156496122166777, - "flos": 14137242434400.0, - "grad_norm": 11.458721880249396, - "learning_rate": 7.40832095706494e-08, - "loss": 0.6983, - "num_input_tokens_seen": 162525300, - "step": 7615 - }, - { - "epoch": 0.9157698551073168, - "flos": 15434198730240.0, - "grad_norm": 3.6856762530285634, - "learning_rate": 7.387330594177443e-08, - "loss": 0.7879, - "num_input_tokens_seen": 162547095, - "step": 7616 - }, - { - "epoch": 0.9158900979979558, - "flos": 18457950057000.0, - "grad_norm": 3.3991417867565645, - "learning_rate": 7.366369450478749e-08, - "loss": 0.7838, - "num_input_tokens_seen": 162567925, - "step": 7617 - }, - { - "epoch": 0.916010340888595, - "flos": 22122644461200.0, - "grad_norm": 4.452470697888057, - "learning_rate": 7.345437529148646e-08, - "loss": 0.6472, - "num_input_tokens_seen": 162586655, - "step": 7618 - }, - { - "epoch": 0.9161305837792341, - "flos": 12462202455360.0, - "grad_norm": 13.964503383089859, - "learning_rate": 7.324534833362483e-08, - "loss": 0.7244, - "num_input_tokens_seen": 162603950, - "step": 7619 - }, - { - "epoch": 0.9162508266698731, - "flos": 16756066923240.0, - "grad_norm": 4.1862324176828025, - "learning_rate": 7.303661366291192e-08, - "loss": 0.6674, - "num_input_tokens_seen": 162624340, - "step": 7620 - }, - { - "epoch": 0.9163710695605123, - "flos": 14595887241720.0, - "grad_norm": 5.061442135145551, - "learning_rate": 7.28281713110126e-08, - "loss": 0.8091, - "num_input_tokens_seen": 162642135, - "step": 7621 - }, - { - "epoch": 0.9164913124511513, - "flos": 16674682640160.0, - "grad_norm": 5.58161966396136, - "learning_rate": 7.262002130954759e-08, - "loss": 0.7586, - "num_input_tokens_seen": 162660310, - "step": 7622 - }, - { - "epoch": 0.9166115553417904, - "flos": 18241558501320.0, - "grad_norm": 3.9125895822037777, - "learning_rate": 7.241216369009296e-08, - "loss": 0.7731, - "num_input_tokens_seen": 162680215, - "step": 7623 - }, - { - "epoch": 0.9167317982324296, - "flos": 18835052281440.0, - "grad_norm": 4.104217193344134, - "learning_rate": 7.220459848418037e-08, - "loss": 0.6559, - "num_input_tokens_seen": 162700010, - "step": 7624 - }, - { - "epoch": 0.9168520411230686, - "flos": 11382492534120.0, - "grad_norm": 4.80648784867424, - "learning_rate": 7.199732572329708e-08, - "loss": 0.7761, - "num_input_tokens_seen": 162717630, - "step": 7625 - }, - { - "epoch": 0.9169722840137077, - "flos": 22205453442480.0, - "grad_norm": 4.1611308644166405, - "learning_rate": 7.179034543888684e-08, - "loss": 0.7354, - "num_input_tokens_seen": 162736855, - "step": 7626 - }, - { - "epoch": 0.9170925269043467, - "flos": 16458417724320.0, - "grad_norm": 4.420109804265353, - "learning_rate": 7.158365766234808e-08, - "loss": 0.7625, - "num_input_tokens_seen": 162755425, - "step": 7627 - }, - { - "epoch": 0.9172127697949859, - "flos": 16756256883000.0, - "grad_norm": 4.677235873424539, - "learning_rate": 7.137726242503527e-08, - "loss": 0.7077, - "num_input_tokens_seen": 162774065, - "step": 7628 - }, - { - "epoch": 0.917333012685625, - "flos": 12727974565920.0, - "grad_norm": 11.767485249892829, - "learning_rate": 7.11711597582585e-08, - "loss": 0.7661, - "num_input_tokens_seen": 162791145, - "step": 7629 - }, - { - "epoch": 0.917453255576264, - "flos": 10410091911840.0, - "grad_norm": 3.6832033114261806, - "learning_rate": 7.096534969328271e-08, - "loss": 0.7891, - "num_input_tokens_seen": 162808310, - "step": 7630 - }, - { - "epoch": 0.9175734984669032, - "flos": 15163076086440.0, - "grad_norm": 4.42466275928994, - "learning_rate": 7.075983226132987e-08, - "loss": 0.8242, - "num_input_tokens_seen": 162826960, - "step": 7631 - }, - { - "epoch": 0.9176937413575422, - "flos": 10788777134280.0, - "grad_norm": 6.85431096660374, - "learning_rate": 7.055460749357656e-08, - "loss": 0.7695, - "num_input_tokens_seen": 162842960, - "step": 7632 - }, - { - "epoch": 0.9178139842481813, - "flos": 13485028411200.0, - "grad_norm": 5.672027669906379, - "learning_rate": 7.034967542115521e-08, - "loss": 0.694, - "num_input_tokens_seen": 162860945, - "step": 7633 - }, - { - "epoch": 0.9179342271388204, - "flos": 14649605211120.0, - "grad_norm": 5.561203194659411, - "learning_rate": 7.014503607515388e-08, - "loss": 0.7392, - "num_input_tokens_seen": 162879970, - "step": 7634 - }, - { - "epoch": 0.9180544700294595, - "flos": 18074515840560.0, - "grad_norm": 25.70062321601183, - "learning_rate": 6.994068948661592e-08, - "loss": 0.662, - "num_input_tokens_seen": 162897845, - "step": 7635 - }, - { - "epoch": 0.9181747129200986, - "flos": 12192631149600.0, - "grad_norm": 3.994932421708296, - "learning_rate": 6.973663568654142e-08, - "loss": 0.745, - "num_input_tokens_seen": 162915270, - "step": 7636 - }, - { - "epoch": 0.9182949558107377, - "flos": 17774618784480.0, - "grad_norm": 16.55574928756341, - "learning_rate": 6.953287470588386e-08, - "loss": 0.6393, - "num_input_tokens_seen": 162932945, - "step": 7637 - }, - { - "epoch": 0.9184151987013768, - "flos": 16154183253720.0, - "grad_norm": 5.288344995241545, - "learning_rate": 6.932940657555452e-08, - "loss": 0.8367, - "num_input_tokens_seen": 162948795, - "step": 7638 - }, - { - "epoch": 0.9185354415920158, - "flos": 23619533624880.0, - "grad_norm": 4.372485719006415, - "learning_rate": 6.912623132641938e-08, - "loss": 0.7492, - "num_input_tokens_seen": 162973605, - "step": 7639 - }, - { - "epoch": 0.918655684482655, - "flos": 15353194366680.0, - "grad_norm": 4.3076139773069535, - "learning_rate": 6.892334898929952e-08, - "loss": 0.7456, - "num_input_tokens_seen": 162993570, - "step": 7640 - }, - { - "epoch": 0.918775927373294, - "flos": 11329249464120.0, - "grad_norm": 4.989153732674683, - "learning_rate": 6.872075959497236e-08, - "loss": 0.8276, - "num_input_tokens_seen": 163012065, - "step": 7641 - }, - { - "epoch": 0.9188961702639331, - "flos": 21966207887040.0, - "grad_norm": 5.083699426861397, - "learning_rate": 6.85184631741702e-08, - "loss": 0.8087, - "num_input_tokens_seen": 163032350, - "step": 7642 - }, - { - "epoch": 0.9190164131545723, - "flos": 15133035275760.0, - "grad_norm": 5.072617769411164, - "learning_rate": 6.831645975758161e-08, - "loss": 0.7617, - "num_input_tokens_seen": 163050010, - "step": 7643 - }, - { - "epoch": 0.9191366560452113, - "flos": 18781017712440.0, - "grad_norm": 4.956134062634077, - "learning_rate": 6.811474937585026e-08, - "loss": 0.6549, - "num_input_tokens_seen": 163069520, - "step": 7644 - }, - { - "epoch": 0.9192568989358504, - "flos": 15676040402400.0, - "grad_norm": 3.673795417019473, - "learning_rate": 6.79133320595755e-08, - "loss": 0.7703, - "num_input_tokens_seen": 163089160, - "step": 7645 - }, - { - "epoch": 0.9193771418264896, - "flos": 16971635319960.0, - "grad_norm": 3.511265064504594, - "learning_rate": 6.771220783931198e-08, - "loss": 0.722, - "num_input_tokens_seen": 163109040, - "step": 7646 - }, - { - "epoch": 0.9194973847171286, - "flos": 52054824142320.0, - "grad_norm": 0.863323469553633, - "learning_rate": 6.751137674556994e-08, - "loss": 0.6667, - "num_input_tokens_seen": 163169145, - "step": 7647 - }, - { - "epoch": 0.9196176276077677, - "flos": 10707456171120.0, - "grad_norm": 5.74736659243194, - "learning_rate": 6.731083880881572e-08, - "loss": 0.7696, - "num_input_tokens_seen": 163185085, - "step": 7648 - }, - { - "epoch": 0.9197378704984068, - "flos": 17052956283120.0, - "grad_norm": 3.5600486486304135, - "learning_rate": 6.711059405947072e-08, - "loss": 0.7922, - "num_input_tokens_seen": 163202995, - "step": 7649 - }, - { - "epoch": 0.9198581133890459, - "flos": 14838362113080.0, - "grad_norm": 4.602218607060918, - "learning_rate": 6.691064252791156e-08, - "loss": 0.7561, - "num_input_tokens_seen": 163222190, - "step": 7650 - }, - { - "epoch": 0.9199783562796849, - "flos": 12894289047600.0, - "grad_norm": 3.5211879544111993, - "learning_rate": 6.67109842444713e-08, - "loss": 0.7657, - "num_input_tokens_seen": 163240840, - "step": 7651 - }, - { - "epoch": 0.9200985991703241, - "flos": 12894605647200.0, - "grad_norm": 4.864864866726391, - "learning_rate": 6.651161923943704e-08, - "loss": 0.7543, - "num_input_tokens_seen": 163258465, - "step": 7652 - }, - { - "epoch": 0.9202188420609632, - "flos": 15352086268080.0, - "grad_norm": 3.5178237481914643, - "learning_rate": 6.631254754305326e-08, - "loss": 0.7533, - "num_input_tokens_seen": 163277645, - "step": 7653 - }, - { - "epoch": 0.9203390849516022, - "flos": 10113614131440.0, - "grad_norm": 4.17293440836758, - "learning_rate": 6.611376918551848e-08, - "loss": 0.7725, - "num_input_tokens_seen": 163296150, - "step": 7654 - }, - { - "epoch": 0.9204593278422414, - "flos": 15484370784120.0, - "grad_norm": 5.51857827633319, - "learning_rate": 6.591528419698744e-08, - "loss": 0.7807, - "num_input_tokens_seen": 163315655, - "step": 7655 - }, - { - "epoch": 0.9205795707328804, - "flos": 10545099184440.0, - "grad_norm": 4.308023091600563, - "learning_rate": 6.571709260756986e-08, - "loss": 0.8024, - "num_input_tokens_seen": 163332020, - "step": 7656 - }, - { - "epoch": 0.9206998136235195, - "flos": 16403433356520.0, - "grad_norm": 5.383512985648905, - "learning_rate": 6.551919444733122e-08, - "loss": 0.7266, - "num_input_tokens_seen": 163349555, - "step": 7657 - }, - { - "epoch": 0.9208200565141585, - "flos": 39301869867960.0, - "grad_norm": 6.280035859995708, - "learning_rate": 6.53215897462931e-08, - "loss": 0.6448, - "num_input_tokens_seen": 163373030, - "step": 7658 - }, - { - "epoch": 0.9209402994047977, - "flos": 22450777710240.0, - "grad_norm": 4.394422307677414, - "learning_rate": 6.512427853443103e-08, - "loss": 0.7379, - "num_input_tokens_seen": 163394830, - "step": 7659 - }, - { - "epoch": 0.9210605422954368, - "flos": 21373347306120.0, - "grad_norm": 4.7526265963369125, - "learning_rate": 6.492726084167799e-08, - "loss": 0.7456, - "num_input_tokens_seen": 163416665, - "step": 7660 - }, - { - "epoch": 0.9211807851860758, - "flos": 40413682988640.0, - "grad_norm": 0.7857959475747985, - "learning_rate": 6.473053669792072e-08, - "loss": 0.5734, - "num_input_tokens_seen": 163471075, - "step": 7661 - }, - { - "epoch": 0.921301028076715, - "flos": 14022999604200.0, - "grad_norm": 5.127252781421522, - "learning_rate": 6.453410613300248e-08, - "loss": 0.7103, - "num_input_tokens_seen": 163488725, - "step": 7662 - }, - { - "epoch": 0.921421270967354, - "flos": 20184871727760.0, - "grad_norm": 3.3824789750601014, - "learning_rate": 6.43379691767214e-08, - "loss": 0.5648, - "num_input_tokens_seen": 163507650, - "step": 7663 - }, - { - "epoch": 0.9215415138579931, - "flos": 51777052906920.0, - "grad_norm": 0.9315504434311481, - "learning_rate": 6.414212585883105e-08, - "loss": 0.6015, - "num_input_tokens_seen": 163570000, - "step": 7664 - }, - { - "epoch": 0.9216617567486323, - "flos": 26124716822760.0, - "grad_norm": 4.727221633728384, - "learning_rate": 6.394657620904143e-08, - "loss": 0.6791, - "num_input_tokens_seen": 163592830, - "step": 7665 - }, - { - "epoch": 0.9217819996392713, - "flos": 21669730106640.0, - "grad_norm": 3.999157220747557, - "learning_rate": 6.375132025701657e-08, - "loss": 0.7053, - "num_input_tokens_seen": 163614850, - "step": 7666 - }, - { - "epoch": 0.9219022425299104, - "flos": 10599387033120.0, - "grad_norm": 9.447574316269007, - "learning_rate": 6.355635803237724e-08, - "loss": 0.6729, - "num_input_tokens_seen": 163630270, - "step": 7667 - }, - { - "epoch": 0.9220224854205495, - "flos": 13191748286760.0, - "grad_norm": 33.118104382506935, - "learning_rate": 6.336168956469867e-08, - "loss": 0.7806, - "num_input_tokens_seen": 163648465, - "step": 7668 - }, - { - "epoch": 0.9221427283111886, - "flos": 18159730978800.0, - "grad_norm": 3.7290409571310605, - "learning_rate": 6.316731488351168e-08, - "loss": 0.7045, - "num_input_tokens_seen": 163669375, - "step": 7669 - }, - { - "epoch": 0.9222629712018277, - "flos": 10059421262640.0, - "grad_norm": 4.925625005266325, - "learning_rate": 6.297323401830334e-08, - "loss": 0.6287, - "num_input_tokens_seen": 163687880, - "step": 7670 - }, - { - "epoch": 0.9223832140924668, - "flos": 15810541115640.0, - "grad_norm": 4.709777385528985, - "learning_rate": 6.277944699851523e-08, - "loss": 0.668, - "num_input_tokens_seen": 163707120, - "step": 7671 - }, - { - "epoch": 0.9225034569831059, - "flos": 15459712166640.0, - "grad_norm": 4.724807186330258, - "learning_rate": 6.25859538535447e-08, - "loss": 0.7264, - "num_input_tokens_seen": 163727635, - "step": 7672 - }, - { - "epoch": 0.9226236998737449, - "flos": 9060335785440.0, - "grad_norm": 6.589628983792863, - "learning_rate": 6.239275461274474e-08, - "loss": 0.7574, - "num_input_tokens_seen": 163743730, - "step": 7673 - }, - { - "epoch": 0.9227439427643841, - "flos": 19213389244320.0, - "grad_norm": 6.877470370263158, - "learning_rate": 6.219984930542299e-08, - "loss": 0.8522, - "num_input_tokens_seen": 163764190, - "step": 7674 - }, - { - "epoch": 0.9228641856550232, - "flos": 13110332343720.0, - "grad_norm": 4.799467597681164, - "learning_rate": 6.200723796084383e-08, - "loss": 0.7275, - "num_input_tokens_seen": 163782005, - "step": 7675 - }, - { - "epoch": 0.9229844285456622, - "flos": 51932349722520.0, - "grad_norm": 0.7361092887312045, - "learning_rate": 6.181492060822546e-08, - "loss": 0.6357, - "num_input_tokens_seen": 163841900, - "step": 7676 - }, - { - "epoch": 0.9231046714363014, - "flos": 13110553963440.0, - "grad_norm": 7.371100995293206, - "learning_rate": 6.162289727674274e-08, - "loss": 0.7983, - "num_input_tokens_seen": 163859300, - "step": 7677 - }, - { - "epoch": 0.9232249143269404, - "flos": 13029422960040.0, - "grad_norm": 6.66633465258048, - "learning_rate": 6.143116799552527e-08, - "loss": 0.8713, - "num_input_tokens_seen": 163875265, - "step": 7678 - }, - { - "epoch": 0.9233451572175795, - "flos": 17133992306640.0, - "grad_norm": 16.076930113823522, - "learning_rate": 6.123973279365802e-08, - "loss": 0.5398, - "num_input_tokens_seen": 163893960, - "step": 7679 - }, - { - "epoch": 0.9234654001082186, - "flos": 13133154683520.0, - "grad_norm": 3.7941178723225177, - "learning_rate": 6.10485917001824e-08, - "loss": 0.7641, - "num_input_tokens_seen": 163911535, - "step": 7680 - }, - { - "epoch": 0.9235856429988577, - "flos": 18129880127880.0, - "grad_norm": 3.0859874093570845, - "learning_rate": 6.085774474409322e-08, - "loss": 0.7999, - "num_input_tokens_seen": 163931815, - "step": 7681 - }, - { - "epoch": 0.9237058858894968, - "flos": 10247956544880.0, - "grad_norm": 6.5055579851759315, - "learning_rate": 6.066719195434267e-08, - "loss": 0.6948, - "num_input_tokens_seen": 163949335, - "step": 7682 - }, - { - "epoch": 0.9238261287801359, - "flos": 21049013252280.0, - "grad_norm": 4.163701311947377, - "learning_rate": 6.047693335983717e-08, - "loss": 0.6424, - "num_input_tokens_seen": 163971400, - "step": 7683 - }, - { - "epoch": 0.923946371670775, - "flos": 16917410791200.0, - "grad_norm": 4.881286089540281, - "learning_rate": 6.028696898943853e-08, - "loss": 0.792, - "num_input_tokens_seen": 163990180, - "step": 7684 - }, - { - "epoch": 0.924066614561414, - "flos": 15995625462240.0, - "grad_norm": 3.7257703662984514, - "learning_rate": 6.00972988719648e-08, - "loss": 0.6824, - "num_input_tokens_seen": 164008135, - "step": 7685 - }, - { - "epoch": 0.9241868574520532, - "flos": 20913816019920.0, - "grad_norm": 4.323927182892768, - "learning_rate": 5.990792303618807e-08, - "loss": 0.6869, - "num_input_tokens_seen": 164027435, - "step": 7686 - }, - { - "epoch": 0.9243071003426923, - "flos": 22398674398800.0, - "grad_norm": 3.2091980466560694, - "learning_rate": 5.971884151083695e-08, - "loss": 0.6776, - "num_input_tokens_seen": 164049565, - "step": 7687 - }, - { - "epoch": 0.9244273432333313, - "flos": 21022265077440.0, - "grad_norm": 4.3236629470041805, - "learning_rate": 5.9530054324595124e-08, - "loss": 0.7334, - "num_input_tokens_seen": 164069400, - "step": 7688 - }, - { - "epoch": 0.9245475861239704, - "flos": 48832279706280.0, - "grad_norm": 0.7554223820560781, - "learning_rate": 5.934156150610103e-08, - "loss": 0.5889, - "num_input_tokens_seen": 164126485, - "step": 7689 - }, - { - "epoch": 0.9246678290146095, - "flos": 17751828104640.0, - "grad_norm": 5.166292894851261, - "learning_rate": 5.915336308394914e-08, - "loss": 0.7601, - "num_input_tokens_seen": 164145040, - "step": 7690 - }, - { - "epoch": 0.9247880719052486, - "flos": 13866056470680.0, - "grad_norm": 2.9190469202790363, - "learning_rate": 5.89654590866886e-08, - "loss": 0.7629, - "num_input_tokens_seen": 164164260, - "step": 7691 - }, - { - "epoch": 0.9249083147958876, - "flos": 17592542134080.0, - "grad_norm": 5.689031373829126, - "learning_rate": 5.877784954282483e-08, - "loss": 0.8668, - "num_input_tokens_seen": 164183320, - "step": 7692 - }, - { - "epoch": 0.9250285576865268, - "flos": 22587906200160.0, - "grad_norm": 4.091248191882047, - "learning_rate": 5.8590534480817963e-08, - "loss": 0.7103, - "num_input_tokens_seen": 164204765, - "step": 7693 - }, - { - "epoch": 0.9251488005771659, - "flos": 7682628405720.0, - "grad_norm": 5.7160771695287504, - "learning_rate": 5.840351392908349e-08, - "loss": 0.7084, - "num_input_tokens_seen": 164220205, - "step": 7694 - }, - { - "epoch": 0.9252690434678049, - "flos": 17268651319680.0, - "grad_norm": 5.010953349703067, - "learning_rate": 5.821678791599205e-08, - "loss": 0.696, - "num_input_tokens_seen": 164239370, - "step": 7695 - }, - { - "epoch": 0.9253892863584441, - "flos": 15701585498760.0, - "grad_norm": 5.211821382472996, - "learning_rate": 5.803035646986965e-08, - "loss": 0.7876, - "num_input_tokens_seen": 164258425, - "step": 7696 - }, - { - "epoch": 0.9255095292490831, - "flos": 12732217000560.0, - "grad_norm": 5.382228515128515, - "learning_rate": 5.7844219618998766e-08, - "loss": 0.6555, - "num_input_tokens_seen": 164272470, - "step": 7697 - }, - { - "epoch": 0.9256297721397222, - "flos": 18129816807960.0, - "grad_norm": 3.578874667731339, - "learning_rate": 5.765837739161505e-08, - "loss": 0.6858, - "num_input_tokens_seen": 164291310, - "step": 7698 - }, - { - "epoch": 0.9257500150303614, - "flos": 16917727390800.0, - "grad_norm": 96.96901184302385, - "learning_rate": 5.7472829815911504e-08, - "loss": 0.7374, - "num_input_tokens_seen": 164309855, - "step": 7699 - }, - { - "epoch": 0.9258702579210004, - "flos": 16512483953280.0, - "grad_norm": 6.8282657844468, - "learning_rate": 5.7287576920035164e-08, - "loss": 0.799, - "num_input_tokens_seen": 164328590, - "step": 7700 - }, - { - "epoch": 0.9259905008116395, - "flos": 22017804639120.0, - "grad_norm": 3.8994604079780926, - "learning_rate": 5.7102618732088435e-08, - "loss": 0.759, - "num_input_tokens_seen": 164347640, - "step": 7701 - }, - { - "epoch": 0.9261107437022786, - "flos": 17998228811040.0, - "grad_norm": 3.9077520305556765, - "learning_rate": 5.6917955280130216e-08, - "loss": 0.7325, - "num_input_tokens_seen": 164368840, - "step": 7702 - }, - { - "epoch": 0.9262309865929177, - "flos": 16108380274320.0, - "grad_norm": 6.143960682544472, - "learning_rate": 5.6733586592172755e-08, - "loss": 0.6983, - "num_input_tokens_seen": 164388055, - "step": 7703 - }, - { - "epoch": 0.9263512294835567, - "flos": 14865743487120.0, - "grad_norm": 3.9176260131745044, - "learning_rate": 5.6549512696185244e-08, - "loss": 0.7965, - "num_input_tokens_seen": 164406275, - "step": 7704 - }, - { - "epoch": 0.9264714723741959, - "flos": 15514000015320.0, - "grad_norm": 3.348658728756522, - "learning_rate": 5.636573362009156e-08, - "loss": 0.6594, - "num_input_tokens_seen": 164426055, - "step": 7705 - }, - { - "epoch": 0.926591715264835, - "flos": 13138125297240.0, - "grad_norm": 4.442980109157165, - "learning_rate": 5.618224939177074e-08, - "loss": 0.7456, - "num_input_tokens_seen": 164443680, - "step": 7706 - }, - { - "epoch": 0.926711958155474, - "flos": 26578992555600.0, - "grad_norm": 5.263034081257551, - "learning_rate": 5.599906003905719e-08, - "loss": 0.6898, - "num_input_tokens_seen": 164465945, - "step": 7707 - }, - { - "epoch": 0.9268322010461132, - "flos": 15378802782960.0, - "grad_norm": 3.9786213339693703, - "learning_rate": 5.581616558974023e-08, - "loss": 0.8001, - "num_input_tokens_seen": 164484230, - "step": 7708 - }, - { - "epoch": 0.9269524439367522, - "flos": 16808455174320.0, - "grad_norm": 28.01863241143349, - "learning_rate": 5.5633566071565444e-08, - "loss": 0.7736, - "num_input_tokens_seen": 164503190, - "step": 7709 - }, - { - "epoch": 0.9270726868273913, - "flos": 30877890957120.0, - "grad_norm": 4.604062707800405, - "learning_rate": 5.5451261512232896e-08, - "loss": 0.6888, - "num_input_tokens_seen": 164525590, - "step": 7710 - }, - { - "epoch": 0.9271929297180305, - "flos": 14460341749800.0, - "grad_norm": 4.991491708040348, - "learning_rate": 5.5269251939397576e-08, - "loss": 0.611, - "num_input_tokens_seen": 164544825, - "step": 7711 - }, - { - "epoch": 0.9273131726086695, - "flos": 14590188448920.0, - "grad_norm": 5.460451118082083, - "learning_rate": 5.508753738067073e-08, - "loss": 0.7542, - "num_input_tokens_seen": 164564085, - "step": 7712 - }, - { - "epoch": 0.9274334154993086, - "flos": 17026113128400.0, - "grad_norm": 8.433467551307993, - "learning_rate": 5.4906117863617875e-08, - "loss": 0.7733, - "num_input_tokens_seen": 164583190, - "step": 7713 - }, - { - "epoch": 0.9275536583899477, - "flos": 23343756966960.0, - "grad_norm": 5.025120573352782, - "learning_rate": 5.4724993415760533e-08, - "loss": 0.7651, - "num_input_tokens_seen": 164601265, - "step": 7714 - }, - { - "epoch": 0.9276739012805868, - "flos": 13650108154440.0, - "grad_norm": 5.332444395051707, - "learning_rate": 5.454416406457496e-08, - "loss": 0.7251, - "num_input_tokens_seen": 164620080, - "step": 7715 - }, - { - "epoch": 0.9277941441712259, - "flos": 10084269839880.0, - "grad_norm": 5.407416598187846, - "learning_rate": 5.436362983749299e-08, - "loss": 0.7214, - "num_input_tokens_seen": 164634970, - "step": 7716 - }, - { - "epoch": 0.927914387061865, - "flos": 17026176448320.0, - "grad_norm": 6.430787966844477, - "learning_rate": 5.418339076190137e-08, - "loss": 0.6226, - "num_input_tokens_seen": 164654200, - "step": 7717 - }, - { - "epoch": 0.9280346299525041, - "flos": 13188423990960.0, - "grad_norm": 4.361162591672199, - "learning_rate": 5.400344686514202e-08, - "loss": 0.8764, - "num_input_tokens_seen": 164671505, - "step": 7718 - }, - { - "epoch": 0.9281548728431431, - "flos": 16347942429360.0, - "grad_norm": 4.016173091547416, - "learning_rate": 5.38237981745131e-08, - "loss": 0.6547, - "num_input_tokens_seen": 164689340, - "step": 7719 - }, - { - "epoch": 0.9282751157337822, - "flos": 13758842151600.0, - "grad_norm": 5.565251216513525, - "learning_rate": 5.364444471726592e-08, - "loss": 0.7888, - "num_input_tokens_seen": 164708265, - "step": 7720 - }, - { - "epoch": 0.9283953586244214, - "flos": 18726919823520.0, - "grad_norm": 3.15761967013147, - "learning_rate": 5.346538652060939e-08, - "loss": 0.778, - "num_input_tokens_seen": 164729340, - "step": 7721 - }, - { - "epoch": 0.9285156015150604, - "flos": 13299785764800.0, - "grad_norm": 11.79143668625569, - "learning_rate": 5.3286623611705994e-08, - "loss": 0.6949, - "num_input_tokens_seen": 164747105, - "step": 7722 - }, - { - "epoch": 0.9286358444056995, - "flos": 48960991138200.0, - "grad_norm": 0.8942814440681868, - "learning_rate": 5.3108156017673824e-08, - "loss": 0.6432, - "num_input_tokens_seen": 164808585, - "step": 7723 - }, - { - "epoch": 0.9287560872963386, - "flos": 16350348586320.0, - "grad_norm": 2.7947546737635913, - "learning_rate": 5.2929983765586775e-08, - "loss": 0.7074, - "num_input_tokens_seen": 164827085, - "step": 7724 - }, - { - "epoch": 0.9288763301869777, - "flos": 18832677784440.0, - "grad_norm": 3.236333871213592, - "learning_rate": 5.275210688247278e-08, - "loss": 0.6142, - "num_input_tokens_seen": 164847130, - "step": 7725 - }, - { - "epoch": 0.9289965730776167, - "flos": 8924821953480.0, - "grad_norm": 7.5432241387694985, - "learning_rate": 5.257452539531604e-08, - "loss": 0.8268, - "num_input_tokens_seen": 164863920, - "step": 7726 - }, - { - "epoch": 0.9291168159682559, - "flos": 19562286935760.0, - "grad_norm": 3.6042126485430863, - "learning_rate": 5.2397239331055445e-08, - "loss": 0.6775, - "num_input_tokens_seen": 164885640, - "step": 7727 - }, - { - "epoch": 0.929237058858895, - "flos": 10572448898520.0, - "grad_norm": 7.226560916384962, - "learning_rate": 5.2220248716585036e-08, - "loss": 0.7906, - "num_input_tokens_seen": 164903040, - "step": 7728 - }, - { - "epoch": 0.929357301749534, - "flos": 17452057688400.0, - "grad_norm": 7.525718842196862, - "learning_rate": 5.204355357875445e-08, - "loss": 0.7384, - "num_input_tokens_seen": 164921105, - "step": 7729 - }, - { - "epoch": 0.9294775446401732, - "flos": 9411323034240.0, - "grad_norm": 4.254743061130172, - "learning_rate": 5.1867153944367584e-08, - "loss": 0.6935, - "num_input_tokens_seen": 164937215, - "step": 7730 - }, - { - "epoch": 0.9295977875308122, - "flos": 19211394666840.0, - "grad_norm": 3.046616763395575, - "learning_rate": 5.16910498401848e-08, - "loss": 0.7269, - "num_input_tokens_seen": 164956385, - "step": 7731 - }, - { - "epoch": 0.9297180304214513, - "flos": 12003810927720.0, - "grad_norm": 9.338128583756884, - "learning_rate": 5.151524129292073e-08, - "loss": 0.7999, - "num_input_tokens_seen": 164974000, - "step": 7732 - }, - { - "epoch": 0.9298382733120905, - "flos": 17619416948760.0, - "grad_norm": 5.05673247158524, - "learning_rate": 5.1339728329245155e-08, - "loss": 0.6643, - "num_input_tokens_seen": 164994285, - "step": 7733 - }, - { - "epoch": 0.9299585162027295, - "flos": 16189163018160.0, - "grad_norm": 8.161700652679276, - "learning_rate": 5.116451097578367e-08, - "loss": 0.7658, - "num_input_tokens_seen": 165013045, - "step": 7734 - }, - { - "epoch": 0.9300787590933686, - "flos": 15703896675840.0, - "grad_norm": 4.635013154143892, - "learning_rate": 5.0989589259115895e-08, - "loss": 0.7327, - "num_input_tokens_seen": 165033650, - "step": 7735 - }, - { - "epoch": 0.9301990019840077, - "flos": 12970417777320.0, - "grad_norm": 4.889476117114093, - "learning_rate": 5.081496320577816e-08, - "loss": 0.6858, - "num_input_tokens_seen": 165050490, - "step": 7736 - }, - { - "epoch": 0.9303192448746468, - "flos": 43404164188800.0, - "grad_norm": 0.941181900073617, - "learning_rate": 5.0640632842260835e-08, - "loss": 0.6518, - "num_input_tokens_seen": 165110470, - "step": 7737 - }, - { - "epoch": 0.9304394877652858, - "flos": 42487914861480.0, - "grad_norm": 2.7819902099393237, - "learning_rate": 5.0466598195009426e-08, - "loss": 0.7137, - "num_input_tokens_seen": 165137060, - "step": 7738 - }, - { - "epoch": 0.930559730655925, - "flos": 15351706348560.0, - "grad_norm": 14.39658016286306, - "learning_rate": 5.0292859290425036e-08, - "loss": 0.6807, - "num_input_tokens_seen": 165154650, - "step": 7739 - }, - { - "epoch": 0.9306799735465641, - "flos": 17025828188760.0, - "grad_norm": 5.546015971114352, - "learning_rate": 5.011941615486348e-08, - "loss": 0.7591, - "num_input_tokens_seen": 165173485, - "step": 7740 - }, - { - "epoch": 0.9308002164372031, - "flos": 11085539854320.0, - "grad_norm": 12.890854638106088, - "learning_rate": 4.994626881463659e-08, - "loss": 0.8425, - "num_input_tokens_seen": 165189460, - "step": 7741 - }, - { - "epoch": 0.9309204593278423, - "flos": 22642384008600.0, - "grad_norm": 3.5151332935221715, - "learning_rate": 4.9773417296009814e-08, - "loss": 0.6977, - "num_input_tokens_seen": 165210700, - "step": 7742 - }, - { - "epoch": 0.9310407022184813, - "flos": 16861983183960.0, - "grad_norm": 6.902190935896816, - "learning_rate": 4.960086162520527e-08, - "loss": 0.638, - "num_input_tokens_seen": 165230510, - "step": 7743 - }, - { - "epoch": 0.9311609451091204, - "flos": 16189447957800.0, - "grad_norm": 4.015454715428288, - "learning_rate": 4.942860182839936e-08, - "loss": 0.8185, - "num_input_tokens_seen": 165248575, - "step": 7744 - }, - { - "epoch": 0.9312811879997596, - "flos": 15427075239240.0, - "grad_norm": 4.431463514599391, - "learning_rate": 4.925663793172341e-08, - "loss": 0.7748, - "num_input_tokens_seen": 165266255, - "step": 7745 - }, - { - "epoch": 0.9314014308903986, - "flos": 49512128383200.0, - "grad_norm": 0.8454894397173109, - "learning_rate": 4.908496996126477e-08, - "loss": 0.6045, - "num_input_tokens_seen": 165329435, - "step": 7746 - }, - { - "epoch": 0.9315216737810377, - "flos": 16513275452280.0, - "grad_norm": 3.3952707554041166, - "learning_rate": 4.89135979430646e-08, - "loss": 0.7467, - "num_input_tokens_seen": 165349200, - "step": 7747 - }, - { - "epoch": 0.9316419166716768, - "flos": 17562406343520.0, - "grad_norm": 2.9303929814647507, - "learning_rate": 4.874252190312078e-08, - "loss": 0.8282, - "num_input_tokens_seen": 165369305, - "step": 7748 - }, - { - "epoch": 0.9317621595623159, - "flos": 22186208678160.0, - "grad_norm": 9.079299404322992, - "learning_rate": 4.857174186738477e-08, - "loss": 0.6332, - "num_input_tokens_seen": 165392375, - "step": 7749 - }, - { - "epoch": 0.931882402452955, - "flos": 11463908477160.0, - "grad_norm": 5.7191949056359626, - "learning_rate": 4.840125786176408e-08, - "loss": 0.7232, - "num_input_tokens_seen": 165408300, - "step": 7750 - }, - { - "epoch": 0.932002645343594, - "flos": 20806316761200.0, - "grad_norm": 5.231760584845094, - "learning_rate": 4.823106991212067e-08, - "loss": 0.7615, - "num_input_tokens_seen": 165427260, - "step": 7751 - }, - { - "epoch": 0.9321228882342332, - "flos": 11166670857720.0, - "grad_norm": 7.564180297816867, - "learning_rate": 4.806117804427212e-08, - "loss": 0.8267, - "num_input_tokens_seen": 165444915, - "step": 7752 - }, - { - "epoch": 0.9322431311248722, - "flos": 13056994293840.0, - "grad_norm": 3.648130481553122, - "learning_rate": 4.7891582283990926e-08, - "loss": 0.6358, - "num_input_tokens_seen": 165463360, - "step": 7753 - }, - { - "epoch": 0.9323633740155113, - "flos": 17700263012520.0, - "grad_norm": 3.209973861255167, - "learning_rate": 4.772228265700473e-08, - "loss": 0.7107, - "num_input_tokens_seen": 165483940, - "step": 7754 - }, - { - "epoch": 0.9324836169061504, - "flos": 10945561968000.0, - "grad_norm": 4.037027875855525, - "learning_rate": 4.75532791889961e-08, - "loss": 0.7368, - "num_input_tokens_seen": 165500360, - "step": 7755 - }, - { - "epoch": 0.9326038597967895, - "flos": 13596865084440.0, - "grad_norm": 3.5630650657774785, - "learning_rate": 4.738457190560252e-08, - "loss": 0.6376, - "num_input_tokens_seen": 165519190, - "step": 7756 - }, - { - "epoch": 0.9327241026874286, - "flos": 13839339955800.0, - "grad_norm": 6.1161494245120425, - "learning_rate": 4.721616083241664e-08, - "loss": 0.7731, - "num_input_tokens_seen": 165537165, - "step": 7757 - }, - { - "epoch": 0.9328443455780677, - "flos": 21697143140640.0, - "grad_norm": 5.46587727980296, - "learning_rate": 4.7048045994986684e-08, - "loss": 0.7594, - "num_input_tokens_seen": 165557745, - "step": 7758 - }, - { - "epoch": 0.9329645884687068, - "flos": 22075733383200.0, - "grad_norm": 3.4520792348526954, - "learning_rate": 4.688022741881559e-08, - "loss": 0.8856, - "num_input_tokens_seen": 165577990, - "step": 7759 - }, - { - "epoch": 0.9330848313593458, - "flos": 15996385301280.0, - "grad_norm": 2.5867729566142916, - "learning_rate": 4.671270512936076e-08, - "loss": 0.7439, - "num_input_tokens_seen": 165596870, - "step": 7760 - }, - { - "epoch": 0.933205074249985, - "flos": 16188783098640.0, - "grad_norm": 5.302449311801446, - "learning_rate": 4.6545479152035884e-08, - "loss": 0.8121, - "num_input_tokens_seen": 165615760, - "step": 7761 - }, - { - "epoch": 0.9333253171406241, - "flos": 11166765837600.0, - "grad_norm": 4.360171884420881, - "learning_rate": 4.637854951220821e-08, - "loss": 0.7602, - "num_input_tokens_seen": 165632265, - "step": 7762 - }, - { - "epoch": 0.9334455600312631, - "flos": 11436812042760.0, - "grad_norm": 3.6167535047756076, - "learning_rate": 4.621191623520171e-08, - "loss": 0.7312, - "num_input_tokens_seen": 165650415, - "step": 7763 - }, - { - "epoch": 0.9335658029219023, - "flos": 16215657913320.0, - "grad_norm": 15.856687541694258, - "learning_rate": 4.604557934629372e-08, - "loss": 0.8326, - "num_input_tokens_seen": 165669210, - "step": 7764 - }, - { - "epoch": 0.9336860458125413, - "flos": 14812025517720.0, - "grad_norm": 4.567874199332523, - "learning_rate": 4.587953887071805e-08, - "loss": 0.8007, - "num_input_tokens_seen": 165688750, - "step": 7765 - }, - { - "epoch": 0.9338062887031804, - "flos": 15294632423400.0, - "grad_norm": 3.0184012959954467, - "learning_rate": 4.5713794833662554e-08, - "loss": 0.8474, - "num_input_tokens_seen": 165707685, - "step": 7766 - }, - { - "epoch": 0.9339265315938196, - "flos": 16998700094400.0, - "grad_norm": 5.905680634035395, - "learning_rate": 4.5548347260270236e-08, - "loss": 0.6154, - "num_input_tokens_seen": 165726695, - "step": 7767 - }, - { - "epoch": 0.9340467744844586, - "flos": 16701652434720.0, - "grad_norm": 6.473013979062963, - "learning_rate": 4.538319617564012e-08, - "loss": 0.678, - "num_input_tokens_seen": 165745435, - "step": 7768 - }, - { - "epoch": 0.9341670173750977, - "flos": 17323572367560.0, - "grad_norm": 4.227296212020179, - "learning_rate": 4.521834160482485e-08, - "loss": 0.736, - "num_input_tokens_seen": 165763895, - "step": 7769 - }, - { - "epoch": 0.9342872602657368, - "flos": 18184357936320.0, - "grad_norm": 3.2060938033042543, - "learning_rate": 4.5053783572832846e-08, - "loss": 0.8062, - "num_input_tokens_seen": 165783795, - "step": 7770 - }, - { - "epoch": 0.9344075031563759, - "flos": 18885477615000.0, - "grad_norm": 3.996459944158844, - "learning_rate": 4.488952210462771e-08, - "loss": 0.7536, - "num_input_tokens_seen": 165803720, - "step": 7771 - }, - { - "epoch": 0.9345277460470149, - "flos": 18722044189680.0, - "grad_norm": 3.139173181613298, - "learning_rate": 4.4725557225127495e-08, - "loss": 0.8427, - "num_input_tokens_seen": 165821780, - "step": 7772 - }, - { - "epoch": 0.9346479889376541, - "flos": 25207110608520.0, - "grad_norm": 4.386932014587227, - "learning_rate": 4.456188895920565e-08, - "loss": 0.7864, - "num_input_tokens_seen": 165843255, - "step": 7773 - }, - { - "epoch": 0.9347682318282932, - "flos": 13943071679280.0, - "grad_norm": 5.909716360963613, - "learning_rate": 4.439851733169031e-08, - "loss": 0.8267, - "num_input_tokens_seen": 165860765, - "step": 7774 - }, - { - "epoch": 0.9348884747189322, - "flos": 19239535879920.0, - "grad_norm": 4.2440208562110175, - "learning_rate": 4.4235442367365204e-08, - "loss": 0.6669, - "num_input_tokens_seen": 165880795, - "step": 7775 - }, - { - "epoch": 0.9350087176095714, - "flos": 13380916768200.0, - "grad_norm": 6.658775459414177, - "learning_rate": 4.4072664090968545e-08, - "loss": 0.7634, - "num_input_tokens_seen": 165898900, - "step": 7776 - }, - { - "epoch": 0.9351289605002104, - "flos": 14110019360160.0, - "grad_norm": 4.987028844468081, - "learning_rate": 4.391018252719347e-08, - "loss": 0.8198, - "num_input_tokens_seen": 165918415, - "step": 7777 - }, - { - "epoch": 0.9352492033908495, - "flos": 13725413725200.0, - "grad_norm": 4.261696148470659, - "learning_rate": 4.374799770068849e-08, - "loss": 0.6693, - "num_input_tokens_seen": 165934810, - "step": 7778 - }, - { - "epoch": 0.9353694462814887, - "flos": 21666912370200.0, - "grad_norm": 14.252274181194295, - "learning_rate": 4.358610963605658e-08, - "loss": 0.7375, - "num_input_tokens_seen": 165954980, - "step": 7779 - }, - { - "epoch": 0.9354896891721277, - "flos": 22507724995560.0, - "grad_norm": 5.189676035212975, - "learning_rate": 4.342451835785677e-08, - "loss": 0.6638, - "num_input_tokens_seen": 165975610, - "step": 7780 - }, - { - "epoch": 0.9356099320627668, - "flos": 14217771898560.0, - "grad_norm": 3.2207737458619614, - "learning_rate": 4.3263223890601665e-08, - "loss": 0.7423, - "num_input_tokens_seen": 165994040, - "step": 7781 - }, - { - "epoch": 0.9357301749534058, - "flos": 13946965854360.0, - "grad_norm": 4.036749821464509, - "learning_rate": 4.31022262587597e-08, - "loss": 0.7764, - "num_input_tokens_seen": 166012435, - "step": 7782 - }, - { - "epoch": 0.935850417844045, - "flos": 17242029784680.0, - "grad_norm": 3.591449830722501, - "learning_rate": 4.2941525486754225e-08, - "loss": 0.6452, - "num_input_tokens_seen": 166032475, - "step": 7783 - }, - { - "epoch": 0.935970660734684, - "flos": 13569800310000.0, - "grad_norm": 4.279269686272596, - "learning_rate": 4.278112159896286e-08, - "loss": 0.7776, - "num_input_tokens_seen": 166050035, - "step": 7784 - }, - { - "epoch": 0.9360909036253231, - "flos": 15081660143400.0, - "grad_norm": 3.06934914265971, - "learning_rate": 4.2621014619719896e-08, - "loss": 0.6637, - "num_input_tokens_seen": 166067520, - "step": 7785 - }, - { - "epoch": 0.9362111465159623, - "flos": 45548708341440.0, - "grad_norm": 0.8205345943069108, - "learning_rate": 4.246120457331215e-08, - "loss": 0.6206, - "num_input_tokens_seen": 166129415, - "step": 7786 - }, - { - "epoch": 0.9363313894066013, - "flos": 17700864551760.0, - "grad_norm": 4.167496229878295, - "learning_rate": 4.2301691483983325e-08, - "loss": 0.7084, - "num_input_tokens_seen": 166149255, - "step": 7787 - }, - { - "epoch": 0.9364516322972404, - "flos": 14703544800240.0, - "grad_norm": 4.669165396435168, - "learning_rate": 4.214247537593163e-08, - "loss": 0.7432, - "num_input_tokens_seen": 166168225, - "step": 7788 - }, - { - "epoch": 0.9365718751878795, - "flos": 15135251472960.0, - "grad_norm": 4.552566584142593, - "learning_rate": 4.1983556273309293e-08, - "loss": 0.7984, - "num_input_tokens_seen": 166186695, - "step": 7789 - }, - { - "epoch": 0.9366921180785186, - "flos": 13618484345760.0, - "grad_norm": 7.275591440181572, - "learning_rate": 4.182493420022526e-08, - "loss": 0.6836, - "num_input_tokens_seen": 166202085, - "step": 7790 - }, - { - "epoch": 0.9368123609691577, - "flos": 18888232031520.0, - "grad_norm": 14.27372239470057, - "learning_rate": 4.166660918074139e-08, - "loss": 0.7706, - "num_input_tokens_seen": 166221710, - "step": 7791 - }, - { - "epoch": 0.9369326038597968, - "flos": 18723880467360.0, - "grad_norm": 2.9125671640927977, - "learning_rate": 4.15085812388758e-08, - "loss": 0.7208, - "num_input_tokens_seen": 166243650, - "step": 7792 - }, - { - "epoch": 0.9370528467504359, - "flos": 16997275396200.0, - "grad_norm": 3.992399146634129, - "learning_rate": 4.135085039860153e-08, - "loss": 0.7815, - "num_input_tokens_seen": 166262770, - "step": 7793 - }, - { - "epoch": 0.9371730896410749, - "flos": 18289957597440.0, - "grad_norm": 25.053373201807826, - "learning_rate": 4.1193416683845906e-08, - "loss": 0.7719, - "num_input_tokens_seen": 166281420, - "step": 7794 - }, - { - "epoch": 0.9372933325317141, - "flos": 11322537552600.0, - "grad_norm": 5.985837434008458, - "learning_rate": 4.103628011849136e-08, - "loss": 0.816, - "num_input_tokens_seen": 166296500, - "step": 7795 - }, - { - "epoch": 0.9374135754223532, - "flos": 15999804576960.0, - "grad_norm": 3.96228124935303, - "learning_rate": 4.0879440726375506e-08, - "loss": 0.7345, - "num_input_tokens_seen": 166314005, - "step": 7796 - }, - { - "epoch": 0.9375338183129922, - "flos": 16560661429680.0, - "grad_norm": 19.59165658673355, - "learning_rate": 4.0722898531291074e-08, - "loss": 0.5389, - "num_input_tokens_seen": 166330965, - "step": 7797 - }, - { - "epoch": 0.9376540612036314, - "flos": 19132099941120.0, - "grad_norm": 3.4563102013309956, - "learning_rate": 4.0566653556985295e-08, - "loss": 0.7483, - "num_input_tokens_seen": 166351230, - "step": 7798 - }, - { - "epoch": 0.9377743040942704, - "flos": 14405262402120.0, - "grad_norm": 5.7613545965431925, - "learning_rate": 4.0410705827159886e-08, - "loss": 0.7745, - "num_input_tokens_seen": 166368245, - "step": 7799 - }, - { - "epoch": 0.9378945469849095, - "flos": 11112098069400.0, - "grad_norm": 6.542395030816911, - "learning_rate": 4.0255055365472356e-08, - "loss": 0.6917, - "num_input_tokens_seen": 166386060, - "step": 7800 - }, - { - "epoch": 0.9380147898755486, - "flos": 15051682652640.0, - "grad_norm": 7.548166863421004, - "learning_rate": 4.009970219553471e-08, - "loss": 0.7288, - "num_input_tokens_seen": 166402730, - "step": 7801 - }, - { - "epoch": 0.9381350327661877, - "flos": 19262168259960.0, - "grad_norm": 5.3143077691670895, - "learning_rate": 3.99446463409141e-08, - "loss": 0.7336, - "num_input_tokens_seen": 166420305, - "step": 7802 - }, - { - "epoch": 0.9382552756568268, - "flos": 17268587999760.0, - "grad_norm": 10.002607005757104, - "learning_rate": 3.978988782513215e-08, - "loss": 0.6648, - "num_input_tokens_seen": 166437520, - "step": 7803 - }, - { - "epoch": 0.9383755185474659, - "flos": 20859718131000.0, - "grad_norm": 4.440772985238144, - "learning_rate": 3.963542667166586e-08, - "loss": 0.7475, - "num_input_tokens_seen": 166457345, - "step": 7804 - }, - { - "epoch": 0.938495761438105, - "flos": 14947317729960.0, - "grad_norm": 3.163323511551247, - "learning_rate": 3.9481262903946486e-08, - "loss": 0.6651, - "num_input_tokens_seen": 166476510, - "step": 7805 - }, - { - "epoch": 0.938616004328744, - "flos": 51106892177760.0, - "grad_norm": 0.7843668751086836, - "learning_rate": 3.932739654536066e-08, - "loss": 0.5636, - "num_input_tokens_seen": 166538930, - "step": 7806 - }, - { - "epoch": 0.9387362472193832, - "flos": 13808349346320.0, - "grad_norm": 4.233862814243511, - "learning_rate": 3.917382761925014e-08, - "loss": 0.7301, - "num_input_tokens_seen": 166554485, - "step": 7807 - }, - { - "epoch": 0.9388564901100223, - "flos": 19425886624920.0, - "grad_norm": 4.126141939178304, - "learning_rate": 3.9020556148910754e-08, - "loss": 0.7718, - "num_input_tokens_seen": 166573560, - "step": 7808 - }, - { - "epoch": 0.9389767330006613, - "flos": 43438637393880.0, - "grad_norm": 0.7213836599625697, - "learning_rate": 3.8867582157593895e-08, - "loss": 0.5797, - "num_input_tokens_seen": 166627485, - "step": 7809 - }, - { - "epoch": 0.9390969758913005, - "flos": 22831109250600.0, - "grad_norm": 2.772260905242037, - "learning_rate": 3.871490566850544e-08, - "loss": 0.7398, - "num_input_tokens_seen": 166651415, - "step": 7810 - }, - { - "epoch": 0.9392172187819395, - "flos": 16405048014480.0, - "grad_norm": 11.958833017221583, - "learning_rate": 3.856252670480642e-08, - "loss": 0.6923, - "num_input_tokens_seen": 166669795, - "step": 7811 - }, - { - "epoch": 0.9393374616725786, - "flos": 14406940380000.0, - "grad_norm": 5.747425332200389, - "learning_rate": 3.841044528961279e-08, - "loss": 0.7984, - "num_input_tokens_seen": 166687310, - "step": 7812 - }, - { - "epoch": 0.9394577045632178, - "flos": 17700706251960.0, - "grad_norm": 3.6948388987769194, - "learning_rate": 3.825866144599477e-08, - "loss": 0.7766, - "num_input_tokens_seen": 166706085, - "step": 7813 - }, - { - "epoch": 0.9395779474538568, - "flos": 13891854846720.0, - "grad_norm": 4.438476240202133, - "learning_rate": 3.8107175196978145e-08, - "loss": 0.7456, - "num_input_tokens_seen": 166722110, - "step": 7814 - }, - { - "epoch": 0.9396981903444959, - "flos": 10410250211640.0, - "grad_norm": 5.091351277790829, - "learning_rate": 3.7955986565542996e-08, - "loss": 0.731, - "num_input_tokens_seen": 166739910, - "step": 7815 - }, - { - "epoch": 0.9398184332351349, - "flos": 25558256157120.0, - "grad_norm": 5.67337966170859, - "learning_rate": 3.780509557462497e-08, - "loss": 0.6789, - "num_input_tokens_seen": 166759830, - "step": 7816 - }, - { - "epoch": 0.9399386761257741, - "flos": 18590012953320.0, - "grad_norm": 4.138019706609088, - "learning_rate": 3.765450224711375e-08, - "loss": 0.7393, - "num_input_tokens_seen": 166780055, - "step": 7817 - }, - { - "epoch": 0.9400589190164131, - "flos": 20020203564000.0, - "grad_norm": 6.963187559411913, - "learning_rate": 3.750420660585396e-08, - "loss": 0.7788, - "num_input_tokens_seen": 166801715, - "step": 7818 - }, - { - "epoch": 0.9401791619070522, - "flos": 17130003151680.0, - "grad_norm": 2.9008400719737972, - "learning_rate": 3.735420867364603e-08, - "loss": 0.7862, - "num_input_tokens_seen": 166822415, - "step": 7819 - }, - { - "epoch": 0.9402994047976914, - "flos": 26367666593520.0, - "grad_norm": 4.920576620659936, - "learning_rate": 3.7204508473244186e-08, - "loss": 0.6084, - "num_input_tokens_seen": 166845760, - "step": 7820 - }, - { - "epoch": 0.9404196476883304, - "flos": 16269914102040.0, - "grad_norm": 3.5310775579697977, - "learning_rate": 3.7055106027357395e-08, - "loss": 0.6843, - "num_input_tokens_seen": 166865345, - "step": 7821 - }, - { - "epoch": 0.9405398905789695, - "flos": 13812021901680.0, - "grad_norm": 3.862485057221677, - "learning_rate": 3.690600135865063e-08, - "loss": 0.705, - "num_input_tokens_seen": 166881990, - "step": 7822 - }, - { - "epoch": 0.9406601334696086, - "flos": 51823869005040.0, - "grad_norm": 0.7703328097066691, - "learning_rate": 3.675719448974246e-08, - "loss": 0.5925, - "num_input_tokens_seen": 166946800, - "step": 7823 - }, - { - "epoch": 0.9407803763602477, - "flos": 16216987631640.0, - "grad_norm": 5.54964788196238, - "learning_rate": 3.6608685443207054e-08, - "loss": 0.5837, - "num_input_tokens_seen": 166965670, - "step": 7824 - }, - { - "epoch": 0.9409006192508867, - "flos": 13785178746960.0, - "grad_norm": 8.510297035923507, - "learning_rate": 3.646047424157306e-08, - "loss": 0.6542, - "num_input_tokens_seen": 166982365, - "step": 7825 - }, - { - "epoch": 0.9410208621415259, - "flos": 17107307451720.0, - "grad_norm": 5.196046889177977, - "learning_rate": 3.631256090732382e-08, - "loss": 0.6548, - "num_input_tokens_seen": 167002545, - "step": 7826 - }, - { - "epoch": 0.941141105032165, - "flos": 16643185471320.0, - "grad_norm": 7.099330236517309, - "learning_rate": 3.6164945462897833e-08, - "loss": 0.814, - "num_input_tokens_seen": 167021555, - "step": 7827 - }, - { - "epoch": 0.941261347922804, - "flos": 15136264591680.0, - "grad_norm": 3.882172007857997, - "learning_rate": 3.6017627930687856e-08, - "loss": 0.743, - "num_input_tokens_seen": 167041100, - "step": 7828 - }, - { - "epoch": 0.9413815908134432, - "flos": 14185989790080.0, - "grad_norm": 9.388236099011424, - "learning_rate": 3.587060833304267e-08, - "loss": 0.7528, - "num_input_tokens_seen": 167059010, - "step": 7829 - }, - { - "epoch": 0.9415018337040822, - "flos": 12759471734760.0, - "grad_norm": 12.042343455020367, - "learning_rate": 3.5723886692264225e-08, - "loss": 0.6244, - "num_input_tokens_seen": 167076270, - "step": 7830 - }, - { - "epoch": 0.9416220765947213, - "flos": 23370568461720.0, - "grad_norm": 8.342026207410115, - "learning_rate": 3.557746303061071e-08, - "loss": 0.6041, - "num_input_tokens_seen": 167097745, - "step": 7831 - }, - { - "epoch": 0.9417423194853605, - "flos": 17213350352280.0, - "grad_norm": 7.714743373555871, - "learning_rate": 3.543133737029391e-08, - "loss": 0.7145, - "num_input_tokens_seen": 167117975, - "step": 7832 - }, - { - "epoch": 0.9418625623759995, - "flos": 17511949350000.0, - "grad_norm": 3.580729906486666, - "learning_rate": 3.5285509733481214e-08, - "loss": 0.666, - "num_input_tokens_seen": 167137420, - "step": 7833 - }, - { - "epoch": 0.9419828052666386, - "flos": 13191273387360.0, - "grad_norm": 3.4818524864891818, - "learning_rate": 3.513998014229469e-08, - "loss": 0.7459, - "num_input_tokens_seen": 167156090, - "step": 7834 - }, - { - "epoch": 0.9421030481572777, - "flos": 12921448801920.0, - "grad_norm": 5.38931656590011, - "learning_rate": 3.499474861881069e-08, - "loss": 0.8483, - "num_input_tokens_seen": 167173035, - "step": 7835 - }, - { - "epoch": 0.9422232910479168, - "flos": 14758244228400.0, - "grad_norm": 3.9255491273493077, - "learning_rate": 3.4849815185061136e-08, - "loss": 0.6572, - "num_input_tokens_seen": 167192645, - "step": 7836 - }, - { - "epoch": 0.9423435339385559, - "flos": 13461952791720.0, - "grad_norm": 5.191018255817115, - "learning_rate": 3.470517986303223e-08, - "loss": 0.7552, - "num_input_tokens_seen": 167211350, - "step": 7837 - }, - { - "epoch": 0.942463776829195, - "flos": 14673693949320.0, - "grad_norm": 9.405692353304968, - "learning_rate": 3.4560842674664856e-08, - "loss": 0.7731, - "num_input_tokens_seen": 167229585, - "step": 7838 - }, - { - "epoch": 0.9425840197198341, - "flos": 16565252123880.0, - "grad_norm": 5.461510274861879, - "learning_rate": 3.441680364185506e-08, - "loss": 0.7413, - "num_input_tokens_seen": 167249175, - "step": 7839 - }, - { - "epoch": 0.9427042626104731, - "flos": 14568505867680.0, - "grad_norm": 7.4773158029758005, - "learning_rate": 3.427306278645314e-08, - "loss": 0.7364, - "num_input_tokens_seen": 167267350, - "step": 7840 - }, - { - "epoch": 0.9428245055011123, - "flos": 16728875508960.0, - "grad_norm": 4.207945827860663, - "learning_rate": 3.4129620130264767e-08, - "loss": 0.7029, - "num_input_tokens_seen": 167285430, - "step": 7841 - }, - { - "epoch": 0.9429447483917514, - "flos": 15317739702840.0, - "grad_norm": 4.969117157044556, - "learning_rate": 3.398647569505009e-08, - "loss": 0.7748, - "num_input_tokens_seen": 167302575, - "step": 7842 - }, - { - "epoch": 0.9430649912823904, - "flos": 13758747171720.0, - "grad_norm": 6.077787649800251, - "learning_rate": 3.384362950252373e-08, - "loss": 0.7224, - "num_input_tokens_seen": 167319265, - "step": 7843 - }, - { - "epoch": 0.9431852341730296, - "flos": 23905943538000.0, - "grad_norm": 4.888269021493115, - "learning_rate": 3.3701081574355473e-08, - "loss": 0.5521, - "num_input_tokens_seen": 167340945, - "step": 7844 - }, - { - "epoch": 0.9433054770636686, - "flos": 49333439348520.0, - "grad_norm": 0.6557257692846429, - "learning_rate": 3.3558831932169796e-08, - "loss": 0.5276, - "num_input_tokens_seen": 167409335, - "step": 7845 - }, - { - "epoch": 0.9434257199543077, - "flos": 19158721476120.0, - "grad_norm": 5.04573088630398, - "learning_rate": 3.341688059754588e-08, - "loss": 0.8694, - "num_input_tokens_seen": 167424710, - "step": 7846 - }, - { - "epoch": 0.9435459628449467, - "flos": 18316547472480.0, - "grad_norm": 4.28587859610794, - "learning_rate": 3.327522759201762e-08, - "loss": 0.7554, - "num_input_tokens_seen": 167444300, - "step": 7847 - }, - { - "epoch": 0.9436662057355859, - "flos": 16215372973680.0, - "grad_norm": 11.418491264130452, - "learning_rate": 3.313387293707359e-08, - "loss": 0.65, - "num_input_tokens_seen": 167462725, - "step": 7848 - }, - { - "epoch": 0.943786448626225, - "flos": 14701930142280.0, - "grad_norm": 5.521225460689733, - "learning_rate": 3.29928166541571e-08, - "loss": 0.6641, - "num_input_tokens_seen": 167481400, - "step": 7849 - }, - { - "epoch": 0.943906691516864, - "flos": 16161844964040.0, - "grad_norm": 4.5716022986371305, - "learning_rate": 3.2852058764666346e-08, - "loss": 0.7786, - "num_input_tokens_seen": 167500220, - "step": 7850 - }, - { - "epoch": 0.9440269344075032, - "flos": 25936403160240.0, - "grad_norm": 4.326929912072676, - "learning_rate": 3.2711599289954264e-08, - "loss": 0.6747, - "num_input_tokens_seen": 167523975, - "step": 7851 - }, - { - "epoch": 0.9441471772981422, - "flos": 14049716119080.0, - "grad_norm": 3.600762466643214, - "learning_rate": 3.257143825132847e-08, - "loss": 0.7509, - "num_input_tokens_seen": 167541865, - "step": 7852 - }, - { - "epoch": 0.9442674201887813, - "flos": 18862401995520.0, - "grad_norm": 3.1980820570599757, - "learning_rate": 3.243157567005106e-08, - "loss": 0.7375, - "num_input_tokens_seen": 167559765, - "step": 7853 - }, - { - "epoch": 0.9443876630794205, - "flos": 11301456510600.0, - "grad_norm": 4.038576550645032, - "learning_rate": 3.2292011567339296e-08, - "loss": 0.6171, - "num_input_tokens_seen": 167577290, - "step": 7854 - }, - { - "epoch": 0.9445079059700595, - "flos": 9729768335520.0, - "grad_norm": 4.678822942231931, - "learning_rate": 3.21527459643649e-08, - "loss": 0.5291, - "num_input_tokens_seen": 167593895, - "step": 7855 - }, - { - "epoch": 0.9446281488606986, - "flos": 17322527588880.0, - "grad_norm": 3.8406097486300235, - "learning_rate": 3.2013778882254536e-08, - "loss": 0.7257, - "num_input_tokens_seen": 167612410, - "step": 7856 - }, - { - "epoch": 0.9447483917513377, - "flos": 18727363062960.0, - "grad_norm": 3.7119226693944545, - "learning_rate": 3.1875110342088676e-08, - "loss": 0.7483, - "num_input_tokens_seen": 167633580, - "step": 7857 - }, - { - "epoch": 0.9448686346419768, - "flos": 17971069056720.0, - "grad_norm": 3.8061559455663683, - "learning_rate": 3.1736740364904035e-08, - "loss": 0.6366, - "num_input_tokens_seen": 167653830, - "step": 7858 - }, - { - "epoch": 0.9449888775326158, - "flos": 10707962730480.0, - "grad_norm": 4.621898951800471, - "learning_rate": 3.159866897169094e-08, - "loss": 0.7387, - "num_input_tokens_seen": 167671750, - "step": 7859 - }, - { - "epoch": 0.945109120423255, - "flos": 11245269064320.0, - "grad_norm": 5.118445880877674, - "learning_rate": 3.146089618339487e-08, - "loss": 0.7432, - "num_input_tokens_seen": 167688325, - "step": 7860 - }, - { - "epoch": 0.9452293633138941, - "flos": 18646548659160.0, - "grad_norm": 5.0113897297371945, - "learning_rate": 3.132342202091554e-08, - "loss": 0.6656, - "num_input_tokens_seen": 167708270, - "step": 7861 - }, - { - "epoch": 0.9453496062045331, - "flos": 15513968355360.0, - "grad_norm": 6.640955586899955, - "learning_rate": 3.1186246505107595e-08, - "loss": 0.6558, - "num_input_tokens_seen": 167727130, - "step": 7862 - }, - { - "epoch": 0.9454698490951723, - "flos": 14622540436680.0, - "grad_norm": 4.755471140467815, - "learning_rate": 3.104936965678084e-08, - "loss": 0.822, - "num_input_tokens_seen": 167745180, - "step": 7863 - }, - { - "epoch": 0.9455900919858113, - "flos": 15406184157000.0, - "grad_norm": 3.611731350443762, - "learning_rate": 3.091279149669956e-08, - "loss": 0.7865, - "num_input_tokens_seen": 167763690, - "step": 7864 - }, - { - "epoch": 0.9457103348764504, - "flos": 15162791146800.0, - "grad_norm": 4.463343410770206, - "learning_rate": 3.0776512045581624e-08, - "loss": 0.7212, - "num_input_tokens_seen": 167782200, - "step": 7865 - }, - { - "epoch": 0.9458305777670896, - "flos": 15670784849040.0, - "grad_norm": 3.3485524993102924, - "learning_rate": 3.0640531324101384e-08, - "loss": 0.7549, - "num_input_tokens_seen": 167799685, - "step": 7866 - }, - { - "epoch": 0.9459508206577286, - "flos": 14622793716360.0, - "grad_norm": 3.746374157543278, - "learning_rate": 3.0504849352886554e-08, - "loss": 0.7382, - "num_input_tokens_seen": 167817550, - "step": 7867 - }, - { - "epoch": 0.9460710635483677, - "flos": 8817101075040.0, - "grad_norm": 6.930241266980154, - "learning_rate": 3.036946615252023e-08, - "loss": 0.6994, - "num_input_tokens_seen": 167832800, - "step": 7868 - }, - { - "epoch": 0.9461913064390068, - "flos": 25179729234480.0, - "grad_norm": 30.975658088940882, - "learning_rate": 3.0234381743539984e-08, - "loss": 0.6559, - "num_input_tokens_seen": 167850135, - "step": 7869 - }, - { - "epoch": 0.9463115493296459, - "flos": 14217645258720.0, - "grad_norm": 4.708349627351277, - "learning_rate": 3.0099596146437863e-08, - "loss": 0.79, - "num_input_tokens_seen": 167866960, - "step": 7870 - }, - { - "epoch": 0.946431792220285, - "flos": 52044123075840.0, - "grad_norm": 0.8402369089057344, - "learning_rate": 2.996510938166086e-08, - "loss": 0.6201, - "num_input_tokens_seen": 167929655, - "step": 7871 - }, - { - "epoch": 0.9465520351109241, - "flos": 13835160841080.0, - "grad_norm": 5.0976727130960215, - "learning_rate": 2.983092146960997e-08, - "loss": 0.7267, - "num_input_tokens_seen": 167946720, - "step": 7872 - }, - { - "epoch": 0.9466722780015632, - "flos": 13974853787760.0, - "grad_norm": 11.108933409164798, - "learning_rate": 2.9697032430642256e-08, - "loss": 0.7748, - "num_input_tokens_seen": 167964655, - "step": 7873 - }, - { - "epoch": 0.9467925208922022, - "flos": 12570334913280.0, - "grad_norm": 4.580813298872371, - "learning_rate": 2.9563442285067906e-08, - "loss": 0.7055, - "num_input_tokens_seen": 167981420, - "step": 7874 - }, - { - "epoch": 0.9469127637828414, - "flos": 21400190460840.0, - "grad_norm": 4.330928183926186, - "learning_rate": 2.943015105315294e-08, - "loss": 0.785, - "num_input_tokens_seen": 168001335, - "step": 7875 - }, - { - "epoch": 0.9470330066734804, - "flos": 19077337193040.0, - "grad_norm": 9.020380045460989, - "learning_rate": 2.929715875511718e-08, - "loss": 0.6597, - "num_input_tokens_seen": 168020090, - "step": 7876 - }, - { - "epoch": 0.9471532495641195, - "flos": 17161025421120.0, - "grad_norm": 6.96324645172501, - "learning_rate": 2.9164465411135375e-08, - "loss": 0.6856, - "num_input_tokens_seen": 168039580, - "step": 7877 - }, - { - "epoch": 0.9472734924547586, - "flos": 11518038026040.0, - "grad_norm": 6.211929645107991, - "learning_rate": 2.9032071041337426e-08, - "loss": 0.7886, - "num_input_tokens_seen": 168057535, - "step": 7878 - }, - { - "epoch": 0.9473937353453977, - "flos": 8088473382480.0, - "grad_norm": 4.504213268075419, - "learning_rate": 2.889997566580704e-08, - "loss": 0.7141, - "num_input_tokens_seen": 168075410, - "step": 7879 - }, - { - "epoch": 0.9475139782360368, - "flos": 18888738590880.0, - "grad_norm": 5.099725506445309, - "learning_rate": 2.8768179304583086e-08, - "loss": 0.6924, - "num_input_tokens_seen": 168097185, - "step": 7880 - }, - { - "epoch": 0.9476342211266758, - "flos": 16701462474960.0, - "grad_norm": 5.39719828226606, - "learning_rate": 2.8636681977659117e-08, - "loss": 0.7252, - "num_input_tokens_seen": 168116555, - "step": 7881 - }, - { - "epoch": 0.947754464017315, - "flos": 14757800988960.0, - "grad_norm": 11.999858219580725, - "learning_rate": 2.850548370498318e-08, - "loss": 0.7661, - "num_input_tokens_seen": 168134115, - "step": 7882 - }, - { - "epoch": 0.9478747069079541, - "flos": 18106107989280.0, - "grad_norm": 3.5606899013031432, - "learning_rate": 2.8374584506457798e-08, - "loss": 0.694, - "num_input_tokens_seen": 168155110, - "step": 7883 - }, - { - "epoch": 0.9479949497985931, - "flos": 15622037493360.0, - "grad_norm": 4.360834786528881, - "learning_rate": 2.824398440193998e-08, - "loss": 0.6501, - "num_input_tokens_seen": 168173630, - "step": 7884 - }, - { - "epoch": 0.9481151926892323, - "flos": 13244104877880.0, - "grad_norm": 4.917998613019936, - "learning_rate": 2.811368341124232e-08, - "loss": 0.6879, - "num_input_tokens_seen": 168192420, - "step": 7885 - }, - { - "epoch": 0.9482354355798713, - "flos": 16189479617760.0, - "grad_norm": 9.502457150365439, - "learning_rate": 2.7983681554131222e-08, - "loss": 0.6639, - "num_input_tokens_seen": 168212400, - "step": 7886 - }, - { - "epoch": 0.9483556784705104, - "flos": 13921199138280.0, - "grad_norm": 6.386309894692311, - "learning_rate": 2.7853978850327365e-08, - "loss": 0.6629, - "num_input_tokens_seen": 168231290, - "step": 7887 - }, - { - "epoch": 0.9484759213611496, - "flos": 18890131629120.0, - "grad_norm": 3.6569625290621786, - "learning_rate": 2.7724575319507225e-08, - "loss": 0.8577, - "num_input_tokens_seen": 168250720, - "step": 7888 - }, - { - "epoch": 0.9485961642517886, - "flos": 15108661597920.0, - "grad_norm": 5.658266161303163, - "learning_rate": 2.759547098130044e-08, - "loss": 0.7684, - "num_input_tokens_seen": 168269170, - "step": 7889 - }, - { - "epoch": 0.9487164071424277, - "flos": 16593424996920.0, - "grad_norm": 3.911045396377968, - "learning_rate": 2.746666585529267e-08, - "loss": 0.7516, - "num_input_tokens_seen": 168289165, - "step": 7890 - }, - { - "epoch": 0.9488366500330668, - "flos": 28473083526960.0, - "grad_norm": 5.159108057631414, - "learning_rate": 2.73381599610234e-08, - "loss": 0.7213, - "num_input_tokens_seen": 168309285, - "step": 7891 - }, - { - "epoch": 0.9489568929237059, - "flos": 20454158093880.0, - "grad_norm": 3.341900750869497, - "learning_rate": 2.7209953317987033e-08, - "loss": 0.6998, - "num_input_tokens_seen": 168330045, - "step": 7892 - }, - { - "epoch": 0.9490771358143449, - "flos": 24664643701200.0, - "grad_norm": 16.003646435070664, - "learning_rate": 2.7082045945631793e-08, - "loss": 0.7566, - "num_input_tokens_seen": 168351980, - "step": 7893 - }, - { - "epoch": 0.9491973787049841, - "flos": 10761585720000.0, - "grad_norm": 7.626211101468419, - "learning_rate": 2.6954437863361712e-08, - "loss": 0.675, - "num_input_tokens_seen": 168369615, - "step": 7894 - }, - { - "epoch": 0.9493176215956232, - "flos": 18560162102400.0, - "grad_norm": 4.283699170432452, - "learning_rate": 2.6827129090534862e-08, - "loss": 0.6968, - "num_input_tokens_seen": 168389635, - "step": 7895 - }, - { - "epoch": 0.9494378644862622, - "flos": 15376364966040.0, - "grad_norm": 3.4117576098996976, - "learning_rate": 2.670011964646335e-08, - "loss": 0.7692, - "num_input_tokens_seen": 168408035, - "step": 7896 - }, - { - "epoch": 0.9495581073769014, - "flos": 11058601719720.0, - "grad_norm": 7.343347936134635, - "learning_rate": 2.657340955041487e-08, - "loss": 0.6617, - "num_input_tokens_seen": 168426530, - "step": 7897 - }, - { - "epoch": 0.9496783502675404, - "flos": 20990609608800.0, - "grad_norm": 6.410131850768616, - "learning_rate": 2.6446998821611167e-08, - "loss": 0.6962, - "num_input_tokens_seen": 168446445, - "step": 7898 - }, - { - "epoch": 0.9497985931581795, - "flos": 10815683608920.0, - "grad_norm": 6.653148232881288, - "learning_rate": 2.6320887479228228e-08, - "loss": 0.6939, - "num_input_tokens_seen": 168462765, - "step": 7899 - }, - { - "epoch": 0.9499188360488187, - "flos": 19937267942880.0, - "grad_norm": 7.293291212491455, - "learning_rate": 2.619507554239786e-08, - "loss": 0.7006, - "num_input_tokens_seen": 168481045, - "step": 7900 - }, - { - "epoch": 0.9500390789394577, - "flos": 17808997009680.0, - "grad_norm": 4.890989052382193, - "learning_rate": 2.606956303020502e-08, - "loss": 0.6982, - "num_input_tokens_seen": 168501570, - "step": 7901 - }, - { - "epoch": 0.9501593218300968, - "flos": 10436491827120.0, - "grad_norm": 3.4079448438130098, - "learning_rate": 2.5944349961690036e-08, - "loss": 0.8234, - "num_input_tokens_seen": 168518310, - "step": 7902 - }, - { - "epoch": 0.9502795647207359, - "flos": 28474539885120.0, - "grad_norm": 12.677990586544068, - "learning_rate": 2.581943635584749e-08, - "loss": 0.7118, - "num_input_tokens_seen": 168540860, - "step": 7903 - }, - { - "epoch": 0.950399807611375, - "flos": 30013844412480.0, - "grad_norm": 4.615088280921421, - "learning_rate": 2.569482223162689e-08, - "loss": 0.6426, - "num_input_tokens_seen": 168564555, - "step": 7904 - }, - { - "epoch": 0.950520050502014, - "flos": 17160582181680.0, - "grad_norm": 3.3747349387740693, - "learning_rate": 2.5570507607932e-08, - "loss": 0.7171, - "num_input_tokens_seen": 168584190, - "step": 7905 - }, - { - "epoch": 0.9506402933926532, - "flos": 12974058672720.0, - "grad_norm": 6.833409409758569, - "learning_rate": 2.54464925036213e-08, - "loss": 0.5929, - "num_input_tokens_seen": 168601200, - "step": 7906 - }, - { - "epoch": 0.9507605362832923, - "flos": 23911135771440.0, - "grad_norm": 3.5895229524185996, - "learning_rate": 2.532277693750773e-08, - "loss": 0.5854, - "num_input_tokens_seen": 168621845, - "step": 7907 - }, - { - "epoch": 0.9508807791739313, - "flos": 14318907505320.0, - "grad_norm": 5.284463841319189, - "learning_rate": 2.5199360928358948e-08, - "loss": 0.7392, - "num_input_tokens_seen": 168638800, - "step": 7908 - }, - { - "epoch": 0.9510010220645704, - "flos": 15703959995760.0, - "grad_norm": 3.4821521186006787, - "learning_rate": 2.507624449489665e-08, - "loss": 0.8578, - "num_input_tokens_seen": 168657150, - "step": 7909 - }, - { - "epoch": 0.9511212649552095, - "flos": 13785621986400.0, - "grad_norm": 5.452337082392667, - "learning_rate": 2.495342765579811e-08, - "loss": 0.6384, - "num_input_tokens_seen": 168675530, - "step": 7910 - }, - { - "epoch": 0.9512415078458486, - "flos": 15213881339520.0, - "grad_norm": 4.852905677216784, - "learning_rate": 2.4830910429693984e-08, - "loss": 0.6935, - "num_input_tokens_seen": 168695210, - "step": 7911 - }, - { - "epoch": 0.9513617507364877, - "flos": 13407918222720.0, - "grad_norm": 3.339794064871658, - "learning_rate": 2.470869283517052e-08, - "loss": 0.7792, - "num_input_tokens_seen": 168712965, - "step": 7912 - }, - { - "epoch": 0.9514819936271268, - "flos": 18322436225040.0, - "grad_norm": 3.379261999200524, - "learning_rate": 2.458677489076777e-08, - "loss": 0.7582, - "num_input_tokens_seen": 168733695, - "step": 7913 - }, - { - "epoch": 0.9516022365177659, - "flos": 13624531398120.0, - "grad_norm": 3.557733526010914, - "learning_rate": 2.446515661498072e-08, - "loss": 0.8185, - "num_input_tokens_seen": 168752745, - "step": 7914 - }, - { - "epoch": 0.9517224794084049, - "flos": 18589886313480.0, - "grad_norm": 4.703505592535081, - "learning_rate": 2.434383802625861e-08, - "loss": 0.7214, - "num_input_tokens_seen": 168771420, - "step": 7915 - }, - { - "epoch": 0.9518427222990441, - "flos": 15703136836800.0, - "grad_norm": 5.328446454058587, - "learning_rate": 2.4222819143005168e-08, - "loss": 0.7298, - "num_input_tokens_seen": 168790735, - "step": 7916 - }, - { - "epoch": 0.9519629651896832, - "flos": 15379594281960.0, - "grad_norm": 4.258409594744419, - "learning_rate": 2.4102099983579706e-08, - "loss": 0.801, - "num_input_tokens_seen": 168809605, - "step": 7917 - }, - { - "epoch": 0.9520832080803222, - "flos": 15864607344600.0, - "grad_norm": 3.674275623995019, - "learning_rate": 2.3981680566294236e-08, - "loss": 0.7498, - "num_input_tokens_seen": 168828925, - "step": 7918 - }, - { - "epoch": 0.9522034509709614, - "flos": 16941721149120.0, - "grad_norm": 3.416822771057334, - "learning_rate": 2.3861560909416822e-08, - "loss": 0.7308, - "num_input_tokens_seen": 168848195, - "step": 7919 - }, - { - "epoch": 0.9523236938616004, - "flos": 18077333577000.0, - "grad_norm": 3.4088322362153063, - "learning_rate": 2.3741741031169325e-08, - "loss": 0.8176, - "num_input_tokens_seen": 168867485, - "step": 7920 - }, - { - "epoch": 0.9524439367522395, - "flos": 16591873658880.0, - "grad_norm": 3.6412127761169124, - "learning_rate": 2.3622220949728544e-08, - "loss": 0.683, - "num_input_tokens_seen": 168886090, - "step": 7921 - }, - { - "epoch": 0.9525641796428787, - "flos": 25016169169320.0, - "grad_norm": 5.803161995666605, - "learning_rate": 2.3503000683225526e-08, - "loss": 0.5929, - "num_input_tokens_seen": 168903525, - "step": 7922 - }, - { - "epoch": 0.9526844225335177, - "flos": 12192282890040.0, - "grad_norm": 4.374949645706316, - "learning_rate": 2.3384080249745585e-08, - "loss": 0.8188, - "num_input_tokens_seen": 168921135, - "step": 7923 - }, - { - "epoch": 0.9528046654241568, - "flos": 27150613794720.0, - "grad_norm": 10.312196299386947, - "learning_rate": 2.3265459667329178e-08, - "loss": 0.8114, - "num_input_tokens_seen": 168940345, - "step": 7924 - }, - { - "epoch": 0.9529249083147959, - "flos": 13322418144840.0, - "grad_norm": 5.038292167984178, - "learning_rate": 2.31471389539708e-08, - "loss": 0.8342, - "num_input_tokens_seen": 168957190, - "step": 7925 - }, - { - "epoch": 0.953045151205435, - "flos": 21022360057320.0, - "grad_norm": 7.062490007511598, - "learning_rate": 2.3029118127619872e-08, - "loss": 0.7047, - "num_input_tokens_seen": 168976625, - "step": 7926 - }, - { - "epoch": 0.953165394096074, - "flos": 15972676482600.0, - "grad_norm": 7.800633810652849, - "learning_rate": 2.2911397206179628e-08, - "loss": 0.8529, - "num_input_tokens_seen": 168993095, - "step": 7927 - }, - { - "epoch": 0.9532856369867132, - "flos": 14596108861440.0, - "grad_norm": 8.828010490822619, - "learning_rate": 2.279397620750845e-08, - "loss": 0.6079, - "num_input_tokens_seen": 169011860, - "step": 7928 - }, - { - "epoch": 0.9534058798773523, - "flos": 10950944161200.0, - "grad_norm": 3.3965672049093656, - "learning_rate": 2.2676855149419195e-08, - "loss": 0.7611, - "num_input_tokens_seen": 169028750, - "step": 7929 - }, - { - "epoch": 0.9535261227679913, - "flos": 12840381118440.0, - "grad_norm": 14.335647455342475, - "learning_rate": 2.2560034049678988e-08, - "loss": 0.7234, - "num_input_tokens_seen": 169042820, - "step": 7930 - }, - { - "epoch": 0.9536463656586305, - "flos": 16939663251720.0, - "grad_norm": 3.2681559560815767, - "learning_rate": 2.2443512926008988e-08, - "loss": 0.737, - "num_input_tokens_seen": 169061870, - "step": 7931 - }, - { - "epoch": 0.9537666085492695, - "flos": 13596991724280.0, - "grad_norm": 5.5092492792993095, - "learning_rate": 2.2327291796085946e-08, - "loss": 0.6858, - "num_input_tokens_seen": 169079950, - "step": 7932 - }, - { - "epoch": 0.9538868514399086, - "flos": 13866531370080.0, - "grad_norm": 4.852480594177592, - "learning_rate": 2.2211370677540197e-08, - "loss": 0.7554, - "num_input_tokens_seen": 169096195, - "step": 7933 - }, - { - "epoch": 0.9540070943305478, - "flos": 12246729038520.0, - "grad_norm": 4.6209383476485115, - "learning_rate": 2.2095749587957012e-08, - "loss": 0.7556, - "num_input_tokens_seen": 169113820, - "step": 7934 - }, - { - "epoch": 0.9541273372211868, - "flos": 14730609574680.0, - "grad_norm": 4.029821808238219, - "learning_rate": 2.1980428544876138e-08, - "loss": 0.6704, - "num_input_tokens_seen": 169132180, - "step": 7935 - }, - { - "epoch": 0.9542475801118259, - "flos": 19591979486880.0, - "grad_norm": 3.918200348144628, - "learning_rate": 2.1865407565791584e-08, - "loss": 0.7219, - "num_input_tokens_seen": 169153470, - "step": 7936 - }, - { - "epoch": 0.954367823002465, - "flos": 17079546158160.0, - "grad_norm": 11.208941664134954, - "learning_rate": 2.175068666815183e-08, - "loss": 0.7566, - "num_input_tokens_seen": 169174030, - "step": 7937 - }, - { - "epoch": 0.9544880658931041, - "flos": 10842115184160.0, - "grad_norm": 5.153453831680621, - "learning_rate": 2.163626586935985e-08, - "loss": 0.7744, - "num_input_tokens_seen": 169190290, - "step": 7938 - }, - { - "epoch": 0.9546083087837431, - "flos": 21346060911960.0, - "grad_norm": 5.773694849537527, - "learning_rate": 2.1522145186773755e-08, - "loss": 0.6053, - "num_input_tokens_seen": 169208930, - "step": 7939 - }, - { - "epoch": 0.9547285516743822, - "flos": 15459933786360.0, - "grad_norm": 3.2332508142172824, - "learning_rate": 2.140832463770481e-08, - "loss": 0.8388, - "num_input_tokens_seen": 169227845, - "step": 7940 - }, - { - "epoch": 0.9548487945650214, - "flos": 20157648653520.0, - "grad_norm": 4.423354693210048, - "learning_rate": 2.129480423941987e-08, - "loss": 0.7418, - "num_input_tokens_seen": 169244235, - "step": 7941 - }, - { - "epoch": 0.9549690374556604, - "flos": 16296820576680.0, - "grad_norm": 2.9223043360936014, - "learning_rate": 2.1181584009140052e-08, - "loss": 0.7928, - "num_input_tokens_seen": 169263495, - "step": 7942 - }, - { - "epoch": 0.9550892803462995, - "flos": 12833764186800.0, - "grad_norm": 6.38899609388255, - "learning_rate": 2.10686639640405e-08, - "loss": 0.8176, - "num_input_tokens_seen": 169277305, - "step": 7943 - }, - { - "epoch": 0.9552095232369386, - "flos": 17836156764000.0, - "grad_norm": 2.9550376228738777, - "learning_rate": 2.0956044121251294e-08, - "loss": 0.8018, - "num_input_tokens_seen": 169295810, - "step": 7944 - }, - { - "epoch": 0.9553297661275777, - "flos": 16647269606160.0, - "grad_norm": 5.721896280596009, - "learning_rate": 2.084372449785654e-08, - "loss": 0.7953, - "num_input_tokens_seen": 169315365, - "step": 7945 - }, - { - "epoch": 0.9554500090182168, - "flos": 11220452147040.0, - "grad_norm": 5.069606173416239, - "learning_rate": 2.0731705110895282e-08, - "loss": 0.6744, - "num_input_tokens_seen": 169332575, - "step": 7946 - }, - { - "epoch": 0.9555702519088559, - "flos": 17214711730560.0, - "grad_norm": 3.728132193239472, - "learning_rate": 2.0619985977360587e-08, - "loss": 0.8458, - "num_input_tokens_seen": 169350615, - "step": 7947 - }, - { - "epoch": 0.955690494799495, - "flos": 16806017357400.0, - "grad_norm": 3.9216218539908003, - "learning_rate": 2.0508567114200237e-08, - "loss": 0.768, - "num_input_tokens_seen": 169370250, - "step": 7948 - }, - { - "epoch": 0.955810737690134, - "flos": 19078033712160.0, - "grad_norm": 4.010607958438354, - "learning_rate": 2.0397448538316485e-08, - "loss": 0.7677, - "num_input_tokens_seen": 169391010, - "step": 7949 - }, - { - "epoch": 0.9559309805807732, - "flos": 15243478910760.0, - "grad_norm": 4.28567788244863, - "learning_rate": 2.028663026656563e-08, - "loss": 0.6474, - "num_input_tokens_seen": 169409585, - "step": 7950 - }, - { - "epoch": 0.9560512234714122, - "flos": 15782241602760.0, - "grad_norm": 5.821147761480233, - "learning_rate": 2.0176112315758885e-08, - "loss": 0.7089, - "num_input_tokens_seen": 169427095, - "step": 7951 - }, - { - "epoch": 0.9561714663620513, - "flos": 12726454887840.0, - "grad_norm": 6.236664239396004, - "learning_rate": 2.0065894702661957e-08, - "loss": 0.6807, - "num_input_tokens_seen": 169443490, - "step": 7952 - }, - { - "epoch": 0.9562917092526905, - "flos": 19127002687560.0, - "grad_norm": 21.554870140128717, - "learning_rate": 1.9955977443994577e-08, - "loss": 0.7658, - "num_input_tokens_seen": 169463200, - "step": 7953 - }, - { - "epoch": 0.9564119521433295, - "flos": 17646260103480.0, - "grad_norm": 9.144824506057075, - "learning_rate": 1.9846360556430965e-08, - "loss": 0.6141, - "num_input_tokens_seen": 169481220, - "step": 7954 - }, - { - "epoch": 0.9565321950339686, - "flos": 23501048360040.0, - "grad_norm": 4.442909274936568, - "learning_rate": 1.973704405660004e-08, - "loss": 0.5879, - "num_input_tokens_seen": 169502055, - "step": 7955 - }, - { - "epoch": 0.9566524379246077, - "flos": 17270614237200.0, - "grad_norm": 2.2780583668868353, - "learning_rate": 1.9628027961085203e-08, - "loss": 0.7581, - "num_input_tokens_seen": 169525005, - "step": 7956 - }, - { - "epoch": 0.9567726808152468, - "flos": 27985284387840.0, - "grad_norm": 3.4670086863111593, - "learning_rate": 1.9519312286423894e-08, - "loss": 0.8208, - "num_input_tokens_seen": 169547920, - "step": 7957 - }, - { - "epoch": 0.9568929237058859, - "flos": 16644800129280.0, - "grad_norm": 3.5677530476165042, - "learning_rate": 1.9410897049108255e-08, - "loss": 0.7595, - "num_input_tokens_seen": 169566920, - "step": 7958 - }, - { - "epoch": 0.957013166596525, - "flos": 17457503201520.0, - "grad_norm": 3.537503267428234, - "learning_rate": 1.9302782265584905e-08, - "loss": 0.8874, - "num_input_tokens_seen": 169587305, - "step": 7959 - }, - { - "epoch": 0.9571334094871641, - "flos": 12867382572960.0, - "grad_norm": 5.489522490866976, - "learning_rate": 1.9194967952254282e-08, - "loss": 0.8416, - "num_input_tokens_seen": 169605600, - "step": 7960 - }, - { - "epoch": 0.9572536523778031, - "flos": 11186612141160.0, - "grad_norm": 5.901403628698715, - "learning_rate": 1.9087454125472635e-08, - "loss": 0.7795, - "num_input_tokens_seen": 169619795, - "step": 7961 - }, - { - "epoch": 0.9573738952684423, - "flos": 18292047154800.0, - "grad_norm": 6.8110075549491365, - "learning_rate": 1.8980240801548696e-08, - "loss": 0.7648, - "num_input_tokens_seen": 169638705, - "step": 7962 - }, - { - "epoch": 0.9574941381590814, - "flos": 18883388057640.0, - "grad_norm": 3.4946085412398262, - "learning_rate": 1.8873327996747458e-08, - "loss": 0.7218, - "num_input_tokens_seen": 169656925, - "step": 7963 - }, - { - "epoch": 0.9576143810497204, - "flos": 23722695469080.0, - "grad_norm": 4.600082703551818, - "learning_rate": 1.8766715727287053e-08, - "loss": 0.6473, - "num_input_tokens_seen": 169678350, - "step": 7964 - }, - { - "epoch": 0.9577346239403596, - "flos": 20346817134960.0, - "grad_norm": 6.9760870520558536, - "learning_rate": 1.8660404009340546e-08, - "loss": 0.7776, - "num_input_tokens_seen": 169698520, - "step": 7965 - }, - { - "epoch": 0.9578548668309986, - "flos": 42346801707960.0, - "grad_norm": 0.9191769055729743, - "learning_rate": 1.8554392859035485e-08, - "loss": 0.6586, - "num_input_tokens_seen": 169755990, - "step": 7966 - }, - { - "epoch": 0.9579751097216377, - "flos": 14434005154440.0, - "grad_norm": 5.500159694780764, - "learning_rate": 1.8448682292453444e-08, - "loss": 0.7767, - "num_input_tokens_seen": 169774785, - "step": 7967 - }, - { - "epoch": 0.9580953526122769, - "flos": 13164968451960.0, - "grad_norm": 3.5747456813599157, - "learning_rate": 1.8343272325631154e-08, - "loss": 0.6483, - "num_input_tokens_seen": 169793450, - "step": 7968 - }, - { - "epoch": 0.9582155955029159, - "flos": 17773320726120.0, - "grad_norm": 6.176604262543008, - "learning_rate": 1.8238162974558492e-08, - "loss": 0.7741, - "num_input_tokens_seen": 169807100, - "step": 7969 - }, - { - "epoch": 0.958335838393555, - "flos": 16674239400720.0, - "grad_norm": 5.714643828205915, - "learning_rate": 1.8133354255181144e-08, - "loss": 0.7339, - "num_input_tokens_seen": 169827135, - "step": 7970 - }, - { - "epoch": 0.958456081284194, - "flos": 12329031460440.0, - "grad_norm": 3.681727659780258, - "learning_rate": 1.802884618339795e-08, - "loss": 0.7313, - "num_input_tokens_seen": 169845660, - "step": 7971 - }, - { - "epoch": 0.9585763241748332, - "flos": 14596013881560.0, - "grad_norm": 5.808603929358094, - "learning_rate": 1.7924638775062894e-08, - "loss": 0.7874, - "num_input_tokens_seen": 169864500, - "step": 7972 - }, - { - "epoch": 0.9586965670654722, - "flos": 15647107690320.0, - "grad_norm": 3.219116634487341, - "learning_rate": 1.7820732045984444e-08, - "loss": 0.8045, - "num_input_tokens_seen": 169884365, - "step": 7973 - }, - { - "epoch": 0.9588168099561113, - "flos": 15676736921520.0, - "grad_norm": 6.465415205577874, - "learning_rate": 1.7717126011924655e-08, - "loss": 0.7248, - "num_input_tokens_seen": 169905670, - "step": 7974 - }, - { - "epoch": 0.9589370528467505, - "flos": 8518913656800.0, - "grad_norm": 5.806918838461958, - "learning_rate": 1.7613820688600957e-08, - "loss": 0.7467, - "num_input_tokens_seen": 169921295, - "step": 7975 - }, - { - "epoch": 0.9590572957373895, - "flos": 17161563640440.0, - "grad_norm": 5.854817563544792, - "learning_rate": 1.7510816091684588e-08, - "loss": 0.7806, - "num_input_tokens_seen": 169940940, - "step": 7976 - }, - { - "epoch": 0.9591775386280286, - "flos": 16485545818680.0, - "grad_norm": 24.178041644308298, - "learning_rate": 1.740811223680083e-08, - "loss": 0.7623, - "num_input_tokens_seen": 169957515, - "step": 7977 - }, - { - "epoch": 0.9592977815186677, - "flos": 13272752650320.0, - "grad_norm": 6.187619294837215, - "learning_rate": 1.7305709139530334e-08, - "loss": 0.7144, - "num_input_tokens_seen": 169976015, - "step": 7978 - }, - { - "epoch": 0.9594180244093068, - "flos": 12051228565080.0, - "grad_norm": 4.739463765519045, - "learning_rate": 1.7203606815407334e-08, - "loss": 0.7274, - "num_input_tokens_seen": 169990330, - "step": 7979 - }, - { - "epoch": 0.9595382672999458, - "flos": 15024174638760.0, - "grad_norm": 3.895341947576593, - "learning_rate": 1.7101805279920557e-08, - "loss": 0.7788, - "num_input_tokens_seen": 170008210, - "step": 7980 - }, - { - "epoch": 0.959658510190585, - "flos": 16567025081640.0, - "grad_norm": 6.5504845476375175, - "learning_rate": 1.7000304548513643e-08, - "loss": 0.7989, - "num_input_tokens_seen": 170028035, - "step": 7981 - }, - { - "epoch": 0.9597787530812241, - "flos": 13974505528200.0, - "grad_norm": 4.886567306306391, - "learning_rate": 1.6899104636583394e-08, - "loss": 0.8085, - "num_input_tokens_seen": 170045805, - "step": 7982 - }, - { - "epoch": 0.9598989959718631, - "flos": 44293439230200.0, - "grad_norm": 0.7305343516874215, - "learning_rate": 1.6798205559482638e-08, - "loss": 0.6434, - "num_input_tokens_seen": 170107905, - "step": 7983 - }, - { - "epoch": 0.9600192388625023, - "flos": 15270796964880.0, - "grad_norm": 4.612754074583631, - "learning_rate": 1.669760733251713e-08, - "loss": 0.7464, - "num_input_tokens_seen": 170126500, - "step": 7984 - }, - { - "epoch": 0.9601394817531413, - "flos": 14943930114240.0, - "grad_norm": 8.671663408876954, - "learning_rate": 1.659730997094755e-08, - "loss": 0.8213, - "num_input_tokens_seen": 170144710, - "step": 7985 - }, - { - "epoch": 0.9602597246437804, - "flos": 15729916671600.0, - "grad_norm": 3.237692490676743, - "learning_rate": 1.6497313489989283e-08, - "loss": 0.6154, - "num_input_tokens_seen": 170164255, - "step": 7986 - }, - { - "epoch": 0.9603799675344196, - "flos": 21967126025880.0, - "grad_norm": 4.295758022433839, - "learning_rate": 1.639761790481131e-08, - "loss": 0.6647, - "num_input_tokens_seen": 170184855, - "step": 7987 - }, - { - "epoch": 0.9605002104250586, - "flos": 20536175576160.0, - "grad_norm": 4.933550063518972, - "learning_rate": 1.6298223230537754e-08, - "loss": 0.7768, - "num_input_tokens_seen": 170202375, - "step": 7988 - }, - { - "epoch": 0.9606204533156977, - "flos": 26152509776280.0, - "grad_norm": 6.154552244979454, - "learning_rate": 1.619912948224611e-08, - "loss": 0.6766, - "num_input_tokens_seen": 170223300, - "step": 7989 - }, - { - "epoch": 0.9607406962063368, - "flos": 19479604594320.0, - "grad_norm": 5.402736514385286, - "learning_rate": 1.6100336674969682e-08, - "loss": 0.5796, - "num_input_tokens_seen": 170241860, - "step": 7990 - }, - { - "epoch": 0.9608609390969759, - "flos": 18559497243240.0, - "grad_norm": 3.791633913227066, - "learning_rate": 1.600184482369449e-08, - "loss": 0.7508, - "num_input_tokens_seen": 170261495, - "step": 7991 - }, - { - "epoch": 0.960981181987615, - "flos": 15405709257600.0, - "grad_norm": 8.981372205397934, - "learning_rate": 1.5903653943362126e-08, - "loss": 0.878, - "num_input_tokens_seen": 170280210, - "step": 7992 - }, - { - "epoch": 0.9611014248782541, - "flos": 13003751223840.0, - "grad_norm": 3.9691958439825292, - "learning_rate": 1.580576404886802e-08, - "loss": 0.7455, - "num_input_tokens_seen": 170298460, - "step": 7993 - }, - { - "epoch": 0.9612216677688932, - "flos": 14136925834800.0, - "grad_norm": 8.868839328522997, - "learning_rate": 1.570817515506162e-08, - "loss": 0.7838, - "num_input_tokens_seen": 170316870, - "step": 7994 - }, - { - "epoch": 0.9613419106595322, - "flos": 11511769353960.0, - "grad_norm": 6.295674477000961, - "learning_rate": 1.561088727674753e-08, - "loss": 0.7997, - "num_input_tokens_seen": 170330800, - "step": 7995 - }, - { - "epoch": 0.9614621535501714, - "flos": 18834672361920.0, - "grad_norm": 4.932418139513612, - "learning_rate": 1.551390042868417e-08, - "loss": 0.6838, - "num_input_tokens_seen": 170352290, - "step": 7996 - }, - { - "epoch": 0.9615823964408104, - "flos": 13000047008520.0, - "grad_norm": 6.90022919868498, - "learning_rate": 1.5417214625584207e-08, - "loss": 0.7067, - "num_input_tokens_seen": 170369665, - "step": 7997 - }, - { - "epoch": 0.9617026393314495, - "flos": 14754634992960.0, - "grad_norm": 3.3298508731568415, - "learning_rate": 1.5320829882114806e-08, - "loss": 0.8389, - "num_input_tokens_seen": 170387460, - "step": 7998 - }, - { - "epoch": 0.9618228822220887, - "flos": 14812183817520.0, - "grad_norm": 3.97779907905184, - "learning_rate": 1.5224746212897378e-08, - "loss": 0.765, - "num_input_tokens_seen": 170406475, - "step": 7999 - }, - { - "epoch": 0.9619431251127277, - "flos": 15377536384560.0, - "grad_norm": 3.6952752077184736, - "learning_rate": 1.512896363250804e-08, - "loss": 0.7576, - "num_input_tokens_seen": 170426305, - "step": 8000 - }, - { - "epoch": 0.9620633680033668, - "flos": 16377824940240.0, - "grad_norm": 3.3328922138794677, - "learning_rate": 1.503348215547673e-08, - "loss": 0.7455, - "num_input_tokens_seen": 170447115, - "step": 8001 - }, - { - "epoch": 0.962183610894006, - "flos": 13482495614400.0, - "grad_norm": 4.696361253246613, - "learning_rate": 1.4938301796288078e-08, - "loss": 0.788, - "num_input_tokens_seen": 170463405, - "step": 8002 - }, - { - "epoch": 0.962303853784645, - "flos": 13455494159880.0, - "grad_norm": 11.440707397997798, - "learning_rate": 1.4843422569380537e-08, - "loss": 0.8042, - "num_input_tokens_seen": 170479880, - "step": 8003 - }, - { - "epoch": 0.9624240966752841, - "flos": 19343615862960.0, - "grad_norm": 5.447209601047507, - "learning_rate": 1.4748844489147483e-08, - "loss": 0.8119, - "num_input_tokens_seen": 170496590, - "step": 8004 - }, - { - "epoch": 0.9625443395659231, - "flos": 10653421602120.0, - "grad_norm": 4.343570842753186, - "learning_rate": 1.4654567569936326e-08, - "loss": 0.6914, - "num_input_tokens_seen": 170513885, - "step": 8005 - }, - { - "epoch": 0.9626645824565623, - "flos": 13406208584880.0, - "grad_norm": 3.069454005146432, - "learning_rate": 1.456059182604874e-08, - "loss": 0.8086, - "num_input_tokens_seen": 170532410, - "step": 8006 - }, - { - "epoch": 0.9627848253472013, - "flos": 12083643872760.0, - "grad_norm": 6.0304552256253, - "learning_rate": 1.4466917271740653e-08, - "loss": 0.7543, - "num_input_tokens_seen": 170550330, - "step": 8007 - }, - { - "epoch": 0.9629050682378404, - "flos": 15270860284800.0, - "grad_norm": 5.54704297308388, - "learning_rate": 1.4373543921222697e-08, - "loss": 0.6539, - "num_input_tokens_seen": 170569635, - "step": 8008 - }, - { - "epoch": 0.9630253111284796, - "flos": 12408547805880.0, - "grad_norm": 6.610345719569179, - "learning_rate": 1.428047178865932e-08, - "loss": 0.7562, - "num_input_tokens_seen": 170586145, - "step": 8009 - }, - { - "epoch": 0.9631455540191186, - "flos": 14865268587720.0, - "grad_norm": 4.271564526497091, - "learning_rate": 1.4187700888169451e-08, - "loss": 0.7276, - "num_input_tokens_seen": 170605040, - "step": 8010 - }, - { - "epoch": 0.9632657969097577, - "flos": 48632129710080.0, - "grad_norm": 0.8239066088838626, - "learning_rate": 1.40952312338265e-08, - "loss": 0.6385, - "num_input_tokens_seen": 170669405, - "step": 8011 - }, - { - "epoch": 0.9633860398003968, - "flos": 32686513510560.0, - "grad_norm": 4.474735333165514, - "learning_rate": 1.4003062839657909e-08, - "loss": 0.6699, - "num_input_tokens_seen": 170691605, - "step": 8012 - }, - { - "epoch": 0.9635062826910359, - "flos": 18187397292480.0, - "grad_norm": 2.8408297121922526, - "learning_rate": 1.391119571964583e-08, - "loss": 0.7874, - "num_input_tokens_seen": 170712265, - "step": 8013 - }, - { - "epoch": 0.9636265255816749, - "flos": 11625663924600.0, - "grad_norm": 3.5734827836890815, - "learning_rate": 1.3819629887726225e-08, - "loss": 0.7186, - "num_input_tokens_seen": 170730075, - "step": 8014 - }, - { - "epoch": 0.9637467684723141, - "flos": 16540023627120.0, - "grad_norm": 7.80618910143442, - "learning_rate": 1.3728365357789317e-08, - "loss": 0.7522, - "num_input_tokens_seen": 170749160, - "step": 8015 - }, - { - "epoch": 0.9638670113629532, - "flos": 12812936424480.0, - "grad_norm": 5.45012785735335, - "learning_rate": 1.3637402143680254e-08, - "loss": 0.7472, - "num_input_tokens_seen": 170763780, - "step": 8016 - }, - { - "epoch": 0.9639872542535922, - "flos": 40575153496440.0, - "grad_norm": 0.7540308399723413, - "learning_rate": 1.3546740259197998e-08, - "loss": 0.5655, - "num_input_tokens_seen": 170816310, - "step": 8017 - }, - { - "epoch": 0.9641074971442314, - "flos": 17674369656600.0, - "grad_norm": 6.57877359341452, - "learning_rate": 1.3456379718095989e-08, - "loss": 0.6706, - "num_input_tokens_seen": 170836445, - "step": 8018 - }, - { - "epoch": 0.9642277400348704, - "flos": 49215618942840.0, - "grad_norm": 0.8804907185911771, - "learning_rate": 1.3366320534081487e-08, - "loss": 0.6419, - "num_input_tokens_seen": 170898845, - "step": 8019 - }, - { - "epoch": 0.9643479829255095, - "flos": 22696228617840.0, - "grad_norm": 4.329158287786772, - "learning_rate": 1.3276562720816675e-08, - "loss": 0.7308, - "num_input_tokens_seen": 170920075, - "step": 8020 - }, - { - "epoch": 0.9644682258161487, - "flos": 14650301730240.0, - "grad_norm": 3.883202270348085, - "learning_rate": 1.3187106291917549e-08, - "loss": 0.8114, - "num_input_tokens_seen": 170936785, - "step": 8021 - }, - { - "epoch": 0.9645884687067877, - "flos": 15486840261000.0, - "grad_norm": 4.013631956918285, - "learning_rate": 1.309795126095503e-08, - "loss": 0.6934, - "num_input_tokens_seen": 170954805, - "step": 8022 - }, - { - "epoch": 0.9647087115974268, - "flos": 13833451203240.0, - "grad_norm": 5.290042577907499, - "learning_rate": 1.3009097641453192e-08, - "loss": 0.7938, - "num_input_tokens_seen": 170972375, - "step": 8023 - }, - { - "epoch": 0.9648289544880659, - "flos": 12057655536960.0, - "grad_norm": 3.7605593470224647, - "learning_rate": 1.2920545446891474e-08, - "loss": 0.7464, - "num_input_tokens_seen": 170988815, - "step": 8024 - }, - { - "epoch": 0.964949197378705, - "flos": 17616852492000.0, - "grad_norm": 3.375721183300726, - "learning_rate": 1.2832294690703127e-08, - "loss": 0.6887, - "num_input_tokens_seen": 171007510, - "step": 8025 - }, - { - "epoch": 0.965069440269344, - "flos": 17104932954720.0, - "grad_norm": 9.443711415684746, - "learning_rate": 1.2744345386275668e-08, - "loss": 0.7599, - "num_input_tokens_seen": 171026770, - "step": 8026 - }, - { - "epoch": 0.9651896831599832, - "flos": 18592324130400.0, - "grad_norm": 3.5825923035171354, - "learning_rate": 1.265669754695109e-08, - "loss": 0.7609, - "num_input_tokens_seen": 171046060, - "step": 8027 - }, - { - "epoch": 0.9653099260506223, - "flos": 16296535637040.0, - "grad_norm": 5.7923414902943655, - "learning_rate": 1.2569351186025201e-08, - "loss": 0.8115, - "num_input_tokens_seen": 171064235, - "step": 8028 - }, - { - "epoch": 0.9654301689412613, - "flos": 19617967822680.0, - "grad_norm": 2.81365527691355, - "learning_rate": 1.2482306316748737e-08, - "loss": 0.7458, - "num_input_tokens_seen": 171084400, - "step": 8029 - }, - { - "epoch": 0.9655504118319005, - "flos": 12698598614400.0, - "grad_norm": 3.72814600667607, - "learning_rate": 1.2395562952326021e-08, - "loss": 0.7657, - "num_input_tokens_seen": 171101280, - "step": 8030 - }, - { - "epoch": 0.9656706547225395, - "flos": 16188561478920.0, - "grad_norm": 4.019839201790534, - "learning_rate": 1.2309121105916309e-08, - "loss": 0.7934, - "num_input_tokens_seen": 171119290, - "step": 8031 - }, - { - "epoch": 0.9657908976131786, - "flos": 27232029737760.0, - "grad_norm": 3.5228949084626096, - "learning_rate": 1.222298079063222e-08, - "loss": 0.6766, - "num_input_tokens_seen": 171140150, - "step": 8032 - }, - { - "epoch": 0.9659111405038178, - "flos": 17863316518320.0, - "grad_norm": 3.342741912327609, - "learning_rate": 1.2137142019541524e-08, - "loss": 0.7125, - "num_input_tokens_seen": 171158425, - "step": 8033 - }, - { - "epoch": 0.9660313833944568, - "flos": 18321771365880.0, - "grad_norm": 9.390206986198566, - "learning_rate": 1.2051604805666027e-08, - "loss": 0.7279, - "num_input_tokens_seen": 171175270, - "step": 8034 - }, - { - "epoch": 0.9661516262850959, - "flos": 8547086529840.0, - "grad_norm": 5.661184950623501, - "learning_rate": 1.196636916198135e-08, - "loss": 0.764, - "num_input_tokens_seen": 171192530, - "step": 8035 - }, - { - "epoch": 0.9662718691757349, - "flos": 14649890150760.0, - "grad_norm": 3.5055492550859966, - "learning_rate": 1.1881435101418036e-08, - "loss": 0.7626, - "num_input_tokens_seen": 171211665, - "step": 8036 - }, - { - "epoch": 0.9663921120663741, - "flos": 50163582567360.0, - "grad_norm": 0.7796068459422093, - "learning_rate": 1.1796802636860003e-08, - "loss": 0.6946, - "num_input_tokens_seen": 171279915, - "step": 8037 - }, - { - "epoch": 0.9665123549570132, - "flos": 19293855388560.0, - "grad_norm": 4.7220928149474695, - "learning_rate": 1.1712471781146316e-08, - "loss": 0.7036, - "num_input_tokens_seen": 171298970, - "step": 8038 - }, - { - "epoch": 0.9666325978476522, - "flos": 32331885366360.0, - "grad_norm": 5.954651484184083, - "learning_rate": 1.1628442547069628e-08, - "loss": 0.6625, - "num_input_tokens_seen": 171320890, - "step": 8039 - }, - { - "epoch": 0.9667528407382914, - "flos": 15756728166360.0, - "grad_norm": 3.753090030402435, - "learning_rate": 1.1544714947377521e-08, - "loss": 0.754, - "num_input_tokens_seen": 171338295, - "step": 8040 - }, - { - "epoch": 0.9668730836289304, - "flos": 17484916235520.0, - "grad_norm": 5.532811693322361, - "learning_rate": 1.1461288994770945e-08, - "loss": 0.6868, - "num_input_tokens_seen": 171357090, - "step": 8041 - }, - { - "epoch": 0.9669933265195695, - "flos": 20751838952760.0, - "grad_norm": 2.8669539935994814, - "learning_rate": 1.1378164701906002e-08, - "loss": 0.7524, - "num_input_tokens_seen": 171378575, - "step": 8042 - }, - { - "epoch": 0.9671135694102087, - "flos": 16431068010240.0, - "grad_norm": 3.447648759250728, - "learning_rate": 1.1295342081392156e-08, - "loss": 0.6503, - "num_input_tokens_seen": 171397655, - "step": 8043 - }, - { - "epoch": 0.9672338123008477, - "flos": 14729754755760.0, - "grad_norm": 5.898904097335415, - "learning_rate": 1.1212821145793804e-08, - "loss": 0.6753, - "num_input_tokens_seen": 171416990, - "step": 8044 - }, - { - "epoch": 0.9673540551914868, - "flos": 12376892337240.0, - "grad_norm": 3.7083435629942882, - "learning_rate": 1.1130601907629156e-08, - "loss": 0.7677, - "num_input_tokens_seen": 171434440, - "step": 8045 - }, - { - "epoch": 0.9674742980821259, - "flos": 45622684193880.0, - "grad_norm": 0.8558596062148464, - "learning_rate": 1.1048684379370899e-08, - "loss": 0.6636, - "num_input_tokens_seen": 171494845, - "step": 8046 - }, - { - "epoch": 0.967594540972765, - "flos": 13650836333520.0, - "grad_norm": 6.045859554697492, - "learning_rate": 1.0967068573445759e-08, - "loss": 0.7243, - "num_input_tokens_seen": 171512050, - "step": 8047 - }, - { - "epoch": 0.967714783863404, - "flos": 15189855921240.0, - "grad_norm": 9.127478694008017, - "learning_rate": 1.0885754502234945e-08, - "loss": 0.6288, - "num_input_tokens_seen": 171531430, - "step": 8048 - }, - { - "epoch": 0.9678350267540432, - "flos": 16971983579520.0, - "grad_norm": 3.7628018888601575, - "learning_rate": 1.08047421780737e-08, - "loss": 0.7711, - "num_input_tokens_seen": 171550340, - "step": 8049 - }, - { - "epoch": 0.9679552696446823, - "flos": 15891925398720.0, - "grad_norm": 6.538135750020288, - "learning_rate": 1.0724031613251305e-08, - "loss": 0.7192, - "num_input_tokens_seen": 171567960, - "step": 8050 - }, - { - "epoch": 0.9680755125353213, - "flos": 19697864087640.0, - "grad_norm": 3.8508132917758653, - "learning_rate": 1.0643622820011744e-08, - "loss": 0.6398, - "num_input_tokens_seen": 171588735, - "step": 8051 - }, - { - "epoch": 0.9681957554259605, - "flos": 20774882912280.0, - "grad_norm": 29.217887270017275, - "learning_rate": 1.0563515810552814e-08, - "loss": 0.6711, - "num_input_tokens_seen": 171605425, - "step": 8052 - }, - { - "epoch": 0.9683159983165995, - "flos": 15027119015040.0, - "grad_norm": 4.98384000484041, - "learning_rate": 1.0483710597026795e-08, - "loss": 0.7251, - "num_input_tokens_seen": 171625005, - "step": 8053 - }, - { - "epoch": 0.9684362412072386, - "flos": 17728815805080.0, - "grad_norm": 5.292124911113597, - "learning_rate": 1.0404207191540227e-08, - "loss": 0.7203, - "num_input_tokens_seen": 171645180, - "step": 8054 - }, - { - "epoch": 0.9685564840978778, - "flos": 16350886805640.0, - "grad_norm": 3.935359797042417, - "learning_rate": 1.0325005606153236e-08, - "loss": 0.7255, - "num_input_tokens_seen": 171664360, - "step": 8055 - }, - { - "epoch": 0.9686767269885168, - "flos": 10459219187040.0, - "grad_norm": 6.856109467477113, - "learning_rate": 1.0246105852881104e-08, - "loss": 0.7658, - "num_input_tokens_seen": 171679180, - "step": 8056 - }, - { - "epoch": 0.9687969698791559, - "flos": 15567369725160.0, - "grad_norm": 7.28220187665539, - "learning_rate": 1.0167507943692476e-08, - "loss": 0.7671, - "num_input_tokens_seen": 171697985, - "step": 8057 - }, - { - "epoch": 0.968917212769795, - "flos": 14487406524240.0, - "grad_norm": 6.471337432123316, - "learning_rate": 1.008921189051093e-08, - "loss": 0.7006, - "num_input_tokens_seen": 171715050, - "step": 8058 - }, - { - "epoch": 0.9690374556604341, - "flos": 15858022072920.0, - "grad_norm": 8.485936367180232, - "learning_rate": 1.0011217705213848e-08, - "loss": 0.7554, - "num_input_tokens_seen": 171732645, - "step": 8059 - }, - { - "epoch": 0.9691576985510731, - "flos": 23965613579880.0, - "grad_norm": 3.528626944666158, - "learning_rate": 9.933525399632658e-09, - "loss": 0.7427, - "num_input_tokens_seen": 171750600, - "step": 8060 - }, - { - "epoch": 0.9692779414417123, - "flos": 26206259405640.0, - "grad_norm": 10.79966088221748, - "learning_rate": 9.856134985553488e-09, - "loss": 0.6402, - "num_input_tokens_seen": 171770045, - "step": 8061 - }, - { - "epoch": 0.9693981843323514, - "flos": 20806696680720.0, - "grad_norm": 3.370820742765536, - "learning_rate": 9.77904647471628e-09, - "loss": 0.7136, - "num_input_tokens_seen": 171792945, - "step": 8062 - }, - { - "epoch": 0.9695184272229904, - "flos": 17295621114240.0, - "grad_norm": 2.919988829186379, - "learning_rate": 9.702259878815454e-09, - "loss": 0.7341, - "num_input_tokens_seen": 171812990, - "step": 8063 - }, - { - "epoch": 0.9696386701136296, - "flos": 17052766323360.0, - "grad_norm": 4.708206625164633, - "learning_rate": 9.625775209499254e-09, - "loss": 0.7235, - "num_input_tokens_seen": 171832715, - "step": 8064 - }, - { - "epoch": 0.9697589130042686, - "flos": 11004345531000.0, - "grad_norm": 4.08306912456208, - "learning_rate": 9.549592478370172e-09, - "loss": 0.7224, - "num_input_tokens_seen": 171850615, - "step": 8065 - }, - { - "epoch": 0.9698791558949077, - "flos": 13785210406920.0, - "grad_norm": 5.121409577408383, - "learning_rate": 9.473711696985632e-09, - "loss": 0.779, - "num_input_tokens_seen": 171869665, - "step": 8066 - }, - { - "epoch": 0.9699993987855468, - "flos": 13083647488800.0, - "grad_norm": 6.119264992851215, - "learning_rate": 9.398132876856201e-09, - "loss": 0.7338, - "num_input_tokens_seen": 171888350, - "step": 8067 - }, - { - "epoch": 0.9701196416761859, - "flos": 49536945300480.0, - "grad_norm": 0.7756502115730701, - "learning_rate": 9.322856029447379e-09, - "loss": 0.636, - "num_input_tokens_seen": 171949255, - "step": 8068 - }, - { - "epoch": 0.970239884566825, - "flos": 17780064297600.0, - "grad_norm": 3.6134673199556815, - "learning_rate": 9.247881166178695e-09, - "loss": 0.7857, - "num_input_tokens_seen": 171967685, - "step": 8069 - }, - { - "epoch": 0.970360127457464, - "flos": 18538131261600.0, - "grad_norm": 6.618342959645877, - "learning_rate": 9.173208298423274e-09, - "loss": 0.7553, - "num_input_tokens_seen": 171988610, - "step": 8070 - }, - { - "epoch": 0.9704803703481032, - "flos": 21699074398200.0, - "grad_norm": 3.443853021593456, - "learning_rate": 9.09883743750961e-09, - "loss": 0.7553, - "num_input_tokens_seen": 172011220, - "step": 8071 - }, - { - "epoch": 0.9706006132387422, - "flos": 12674984775600.0, - "grad_norm": 3.1279897502428793, - "learning_rate": 9.024768594719124e-09, - "loss": 0.8173, - "num_input_tokens_seen": 172029320, - "step": 8072 - }, - { - "epoch": 0.9707208561293813, - "flos": 13272277750920.0, - "grad_norm": 4.61219210126854, - "learning_rate": 8.95100178128816e-09, - "loss": 0.7067, - "num_input_tokens_seen": 172048180, - "step": 8073 - }, - { - "epoch": 0.9708410990200205, - "flos": 22939020088800.0, - "grad_norm": 4.115836166027634, - "learning_rate": 8.877537008407321e-09, - "loss": 0.6943, - "num_input_tokens_seen": 172067950, - "step": 8074 - }, - { - "epoch": 0.9709613419106595, - "flos": 22425359253720.0, - "grad_norm": 3.9306294883960797, - "learning_rate": 8.804374287221028e-09, - "loss": 0.6664, - "num_input_tokens_seen": 172088905, - "step": 8075 - }, - { - "epoch": 0.9710815848012986, - "flos": 17374725880200.0, - "grad_norm": 4.422245724507493, - "learning_rate": 8.731513628827958e-09, - "loss": 0.8323, - "num_input_tokens_seen": 172107990, - "step": 8076 - }, - { - "epoch": 0.9712018276919377, - "flos": 17397516560040.0, - "grad_norm": 3.6760718203430565, - "learning_rate": 8.658955044280825e-09, - "loss": 0.8115, - "num_input_tokens_seen": 172126635, - "step": 8077 - }, - { - "epoch": 0.9713220705825768, - "flos": 17079609478080.0, - "grad_norm": 3.4611307955597646, - "learning_rate": 8.586698544587268e-09, - "loss": 0.7507, - "num_input_tokens_seen": 172147965, - "step": 8078 - }, - { - "epoch": 0.9714423134732159, - "flos": 16243450866840.0, - "grad_norm": 3.9995561591906696, - "learning_rate": 8.514744140707853e-09, - "loss": 0.733, - "num_input_tokens_seen": 172166825, - "step": 8079 - }, - { - "epoch": 0.971562556363855, - "flos": 14784644143680.0, - "grad_norm": 6.459882620108215, - "learning_rate": 8.443091843558515e-09, - "loss": 0.7469, - "num_input_tokens_seen": 172185630, - "step": 8080 - }, - { - "epoch": 0.9716827992544941, - "flos": 18292363754400.0, - "grad_norm": 4.783282477521647, - "learning_rate": 8.37174166400878e-09, - "loss": 0.6338, - "num_input_tokens_seen": 172200925, - "step": 8081 - }, - { - "epoch": 0.9718030421451331, - "flos": 18079138194720.0, - "grad_norm": 6.261346512056569, - "learning_rate": 8.300693612881992e-09, - "loss": 0.8283, - "num_input_tokens_seen": 172220710, - "step": 8082 - }, - { - "epoch": 0.9719232850357723, - "flos": 16161749984160.0, - "grad_norm": 7.853137717064041, - "learning_rate": 8.22994770095664e-09, - "loss": 0.7843, - "num_input_tokens_seen": 172239005, - "step": 8083 - }, - { - "epoch": 0.9720435279264114, - "flos": 17320469691480.0, - "grad_norm": 5.264555780086816, - "learning_rate": 8.159503938964585e-09, - "loss": 0.7352, - "num_input_tokens_seen": 172256045, - "step": 8084 - }, - { - "epoch": 0.9721637708170504, - "flos": 20805778541880.0, - "grad_norm": 4.023883565230634, - "learning_rate": 8.089362337592164e-09, - "loss": 0.6967, - "num_input_tokens_seen": 172279390, - "step": 8085 - }, - { - "epoch": 0.9722840137076896, - "flos": 21371669328240.0, - "grad_norm": 3.1721905330567925, - "learning_rate": 8.019522907479536e-09, - "loss": 0.7025, - "num_input_tokens_seen": 172299470, - "step": 8086 - }, - { - "epoch": 0.9724042565983286, - "flos": 14054845032600.0, - "grad_norm": 4.380081277728955, - "learning_rate": 7.949985659221558e-09, - "loss": 0.7478, - "num_input_tokens_seen": 172316455, - "step": 8087 - }, - { - "epoch": 0.9725244994889677, - "flos": 16864104401280.0, - "grad_norm": 3.5868228658771537, - "learning_rate": 7.880750603366904e-09, - "loss": 0.7684, - "num_input_tokens_seen": 172335045, - "step": 8088 - }, - { - "epoch": 0.9726447423796069, - "flos": 17106610932600.0, - "grad_norm": 5.470057042467799, - "learning_rate": 7.811817750418282e-09, - "loss": 0.7729, - "num_input_tokens_seen": 172353525, - "step": 8089 - }, - { - "epoch": 0.9727649852702459, - "flos": 19455705815880.0, - "grad_norm": 2.635445837128324, - "learning_rate": 7.743187110833105e-09, - "loss": 0.7825, - "num_input_tokens_seen": 172376005, - "step": 8090 - }, - { - "epoch": 0.972885228160885, - "flos": 15001130679240.0, - "grad_norm": 2.9852520517742, - "learning_rate": 7.674858695022602e-09, - "loss": 0.7978, - "num_input_tokens_seen": 172394080, - "step": 8091 - }, - { - "epoch": 0.9730054710515241, - "flos": 12813158044200.0, - "grad_norm": 5.843240481108779, - "learning_rate": 7.606832513351591e-09, - "loss": 0.7389, - "num_input_tokens_seen": 172411750, - "step": 8092 - }, - { - "epoch": 0.9731257139421632, - "flos": 50861821189680.0, - "grad_norm": 0.7951050343494802, - "learning_rate": 7.539108576140264e-09, - "loss": 0.6674, - "num_input_tokens_seen": 172475580, - "step": 8093 - }, - { - "epoch": 0.9732459568328022, - "flos": 13488764286480.0, - "grad_norm": 7.200463587353438, - "learning_rate": 7.471686893661732e-09, - "loss": 0.6767, - "num_input_tokens_seen": 172493595, - "step": 8094 - }, - { - "epoch": 0.9733661997234414, - "flos": 15268232508120.0, - "grad_norm": 4.2386180831734395, - "learning_rate": 7.4045674761442636e-09, - "loss": 0.6289, - "num_input_tokens_seen": 172510645, - "step": 8095 - }, - { - "epoch": 0.9734864426140805, - "flos": 17401853974560.0, - "grad_norm": 4.429217493597579, - "learning_rate": 7.337750333769488e-09, - "loss": 0.7183, - "num_input_tokens_seen": 172530170, - "step": 8096 - }, - { - "epoch": 0.9736066855047195, - "flos": 25746791439360.0, - "grad_norm": 9.289267009155395, - "learning_rate": 7.2712354766737425e-09, - "loss": 0.7156, - "num_input_tokens_seen": 172550220, - "step": 8097 - }, - { - "epoch": 0.9737269283953586, - "flos": 14918131738200.0, - "grad_norm": 3.339058981265694, - "learning_rate": 7.2050229149469565e-09, - "loss": 0.7928, - "num_input_tokens_seen": 172569950, - "step": 8098 - }, - { - "epoch": 0.9738471712859977, - "flos": 21209470641360.0, - "grad_norm": 3.8143278649268244, - "learning_rate": 7.139112658633984e-09, - "loss": 0.6137, - "num_input_tokens_seen": 172589820, - "step": 8099 - }, - { - "epoch": 0.9739674141766368, - "flos": 20373913569360.0, - "grad_norm": 4.115387090412112, - "learning_rate": 7.073504717733048e-09, - "loss": 0.6807, - "num_input_tokens_seen": 172609105, - "step": 8100 - }, - { - "epoch": 0.9740876570672758, - "flos": 50781260065560.0, - "grad_norm": 0.8851467768405467, - "learning_rate": 7.008199102196855e-09, - "loss": 0.5835, - "num_input_tokens_seen": 172670250, - "step": 8101 - }, - { - "epoch": 0.974207899957915, - "flos": 42915700190520.0, - "grad_norm": 0.8126562408464868, - "learning_rate": 6.9431958219321464e-09, - "loss": 0.6112, - "num_input_tokens_seen": 172726135, - "step": 8102 - }, - { - "epoch": 0.9743281428485541, - "flos": 16538503949040.0, - "grad_norm": 2.9059434954721595, - "learning_rate": 6.878494886800146e-09, - "loss": 0.7712, - "num_input_tokens_seen": 172746630, - "step": 8103 - }, - { - "epoch": 0.9744483857391931, - "flos": 14620704159000.0, - "grad_norm": 4.963898023186489, - "learning_rate": 6.814096306615669e-09, - "loss": 0.7415, - "num_input_tokens_seen": 172764490, - "step": 8104 - }, - { - "epoch": 0.9745686286298323, - "flos": 12893940788040.0, - "grad_norm": 4.4472536163100935, - "learning_rate": 6.750000091148011e-09, - "loss": 0.6248, - "num_input_tokens_seen": 172781505, - "step": 8105 - }, - { - "epoch": 0.9746888715204713, - "flos": 21615632217720.0, - "grad_norm": 7.701398489643882, - "learning_rate": 6.686206250120729e-09, - "loss": 0.7101, - "num_input_tokens_seen": 172802720, - "step": 8106 - }, - { - "epoch": 0.9748091144111104, - "flos": 13489175865960.0, - "grad_norm": 6.971299051649505, - "learning_rate": 6.622714793210749e-09, - "loss": 0.725, - "num_input_tokens_seen": 172821360, - "step": 8107 - }, - { - "epoch": 0.9749293573017496, - "flos": 15106382080800.0, - "grad_norm": 4.2229534542148155, - "learning_rate": 6.559525730050364e-09, - "loss": 0.7711, - "num_input_tokens_seen": 172841180, - "step": 8108 - }, - { - "epoch": 0.9750496001923886, - "flos": 13488542666760.0, - "grad_norm": 4.380891375482211, - "learning_rate": 6.496639070224574e-09, - "loss": 0.7462, - "num_input_tokens_seen": 172859385, - "step": 8109 - }, - { - "epoch": 0.9751698430830277, - "flos": 14298428002560.0, - "grad_norm": 7.1680725256452416, - "learning_rate": 6.4340548232739714e-09, - "loss": 0.8268, - "num_input_tokens_seen": 172875305, - "step": 8110 - }, - { - "epoch": 0.9752900859736668, - "flos": 17241998124720.0, - "grad_norm": 3.3502839323963785, - "learning_rate": 6.371772998692071e-09, - "loss": 0.7732, - "num_input_tokens_seen": 172894280, - "step": 8111 - }, - { - "epoch": 0.9754103288643059, - "flos": 14838931992360.0, - "grad_norm": 7.677887900716368, - "learning_rate": 6.309793605927094e-09, - "loss": 0.6305, - "num_input_tokens_seen": 172912320, - "step": 8112 - }, - { - "epoch": 0.975530571754945, - "flos": 14133443239200.0, - "grad_norm": 3.958926199968789, - "learning_rate": 6.248116654381297e-09, - "loss": 0.7813, - "num_input_tokens_seen": 172930510, - "step": 8113 - }, - { - "epoch": 0.9756508146455841, - "flos": 17265991883040.0, - "grad_norm": 4.636116483632037, - "learning_rate": 6.186742153410751e-09, - "loss": 0.7075, - "num_input_tokens_seen": 172949725, - "step": 8114 - }, - { - "epoch": 0.9757710575362232, - "flos": 16810291452000.0, - "grad_norm": 9.11286683985976, - "learning_rate": 6.125670112326453e-09, - "loss": 0.8483, - "num_input_tokens_seen": 172968705, - "step": 8115 - }, - { - "epoch": 0.9758913004268622, - "flos": 20510028940560.0, - "grad_norm": 3.5626901943901346, - "learning_rate": 6.064900540392548e-09, - "loss": 0.682, - "num_input_tokens_seen": 172990520, - "step": 8116 - }, - { - "epoch": 0.9760115433175014, - "flos": 16242754347720.0, - "grad_norm": 3.2087836176645936, - "learning_rate": 6.0044334468278835e-09, - "loss": 0.775, - "num_input_tokens_seen": 173009585, - "step": 8117 - }, - { - "epoch": 0.9761317862081405, - "flos": 19240548998640.0, - "grad_norm": 3.9564130173976273, - "learning_rate": 5.944268840805345e-09, - "loss": 0.7057, - "num_input_tokens_seen": 173030050, - "step": 8118 - }, - { - "epoch": 0.9762520290987795, - "flos": 19480332773400.0, - "grad_norm": 9.71839045506556, - "learning_rate": 5.88440673145163e-09, - "loss": 0.6309, - "num_input_tokens_seen": 173050820, - "step": 8119 - }, - { - "epoch": 0.9763722719894187, - "flos": 13138188617160.0, - "grad_norm": 3.868227547170597, - "learning_rate": 5.824847127848142e-09, - "loss": 0.8122, - "num_input_tokens_seen": 173069065, - "step": 8120 - }, - { - "epoch": 0.9764925148800577, - "flos": 16431859509240.0, - "grad_norm": 3.951715512101855, - "learning_rate": 5.765590039029433e-09, - "loss": 0.7655, - "num_input_tokens_seen": 173088105, - "step": 8121 - }, - { - "epoch": 0.9766127577706968, - "flos": 27015733161960.0, - "grad_norm": 8.209010630385684, - "learning_rate": 5.706635473985422e-09, - "loss": 0.7023, - "num_input_tokens_seen": 173111695, - "step": 8122 - }, - { - "epoch": 0.976733000661336, - "flos": 16324075310880.0, - "grad_norm": 5.45078120101091, - "learning_rate": 5.6479834416591764e-09, - "loss": 0.84, - "num_input_tokens_seen": 173130775, - "step": 8123 - }, - { - "epoch": 0.976853243551975, - "flos": 18914790246600.0, - "grad_norm": 4.314451135623693, - "learning_rate": 5.589633950947803e-09, - "loss": 0.6677, - "num_input_tokens_seen": 173147995, - "step": 8124 - }, - { - "epoch": 0.9769734864426141, - "flos": 15648690688320.0, - "grad_norm": 5.109523946008472, - "learning_rate": 5.5315870107035535e-09, - "loss": 0.6728, - "num_input_tokens_seen": 173165765, - "step": 8125 - }, - { - "epoch": 0.9770937293332532, - "flos": 10167427080720.0, - "grad_norm": 3.916722899229424, - "learning_rate": 5.473842629731607e-09, - "loss": 0.7658, - "num_input_tokens_seen": 173183985, - "step": 8126 - }, - { - "epoch": 0.9772139722238923, - "flos": 13084122388200.0, - "grad_norm": 5.125392619655126, - "learning_rate": 5.416400816792066e-09, - "loss": 0.7661, - "num_input_tokens_seen": 173201220, - "step": 8127 - }, - { - "epoch": 0.9773342151145313, - "flos": 14944310033760.0, - "grad_norm": 4.894914105200421, - "learning_rate": 5.359261580598407e-09, - "loss": 0.7487, - "num_input_tokens_seen": 173216780, - "step": 8128 - }, - { - "epoch": 0.9774544580051704, - "flos": 8573423125200.0, - "grad_norm": 4.554708228873821, - "learning_rate": 5.302424929819027e-09, - "loss": 0.7607, - "num_input_tokens_seen": 173230510, - "step": 8129 - }, - { - "epoch": 0.9775747008958096, - "flos": 9789185097720.0, - "grad_norm": 4.409974167936274, - "learning_rate": 5.24589087307592e-09, - "loss": 0.7054, - "num_input_tokens_seen": 173247850, - "step": 8130 - }, - { - "epoch": 0.9776949437864486, - "flos": 43872080873040.0, - "grad_norm": 3.5307381858247178, - "learning_rate": 5.189659418944891e-09, - "loss": 0.6396, - "num_input_tokens_seen": 173277745, - "step": 8131 - }, - { - "epoch": 0.9778151866770877, - "flos": 15486745281120.0, - "grad_norm": 4.475332680382282, - "learning_rate": 5.133730575956674e-09, - "loss": 0.7601, - "num_input_tokens_seen": 173297135, - "step": 8132 - }, - { - "epoch": 0.9779354295677268, - "flos": 15270796964880.0, - "grad_norm": 4.136469530013426, - "learning_rate": 5.0781043525953696e-09, - "loss": 0.7054, - "num_input_tokens_seen": 173314920, - "step": 8133 - }, - { - "epoch": 0.9780556724583659, - "flos": 17160772141440.0, - "grad_norm": 3.974689514701903, - "learning_rate": 5.0227807572995605e-09, - "loss": 0.7158, - "num_input_tokens_seen": 173336615, - "step": 8134 - }, - { - "epoch": 0.9781759153490049, - "flos": 15297766759440.0, - "grad_norm": 5.492893656538955, - "learning_rate": 4.967759798461646e-09, - "loss": 0.6591, - "num_input_tokens_seen": 173354680, - "step": 8135 - }, - { - "epoch": 0.9782961582396441, - "flos": 20751997252560.0, - "grad_norm": 5.364146430239281, - "learning_rate": 4.913041484428282e-09, - "loss": 0.7306, - "num_input_tokens_seen": 173374875, - "step": 8136 - }, - { - "epoch": 0.9784164011302832, - "flos": 18723500547840.0, - "grad_norm": 12.030191587982076, - "learning_rate": 4.858625823500384e-09, - "loss": 0.739, - "num_input_tokens_seen": 173392295, - "step": 8137 - }, - { - "epoch": 0.9785366440209222, - "flos": 21989156866680.0, - "grad_norm": 4.1355280761060875, - "learning_rate": 4.80451282393246e-09, - "loss": 0.7282, - "num_input_tokens_seen": 173412000, - "step": 8138 - }, - { - "epoch": 0.9786568869115614, - "flos": 23749633603680.0, - "grad_norm": 3.6391348347204557, - "learning_rate": 4.750702493933722e-09, - "loss": 0.6637, - "num_input_tokens_seen": 173431605, - "step": 8139 - }, - { - "epoch": 0.9787771298022004, - "flos": 17079989397600.0, - "grad_norm": 3.9712935579604958, - "learning_rate": 4.697194841666974e-09, - "loss": 0.8339, - "num_input_tokens_seen": 173450250, - "step": 8140 - }, - { - "epoch": 0.9788973726928395, - "flos": 15701047279440.0, - "grad_norm": 4.396634696244581, - "learning_rate": 4.6439898752492764e-09, - "loss": 0.8023, - "num_input_tokens_seen": 173470110, - "step": 8141 - }, - { - "epoch": 0.9790176155834787, - "flos": 50806425242400.0, - "grad_norm": 0.7593198905382317, - "learning_rate": 4.591087602751731e-09, - "loss": 0.6308, - "num_input_tokens_seen": 173531690, - "step": 8142 - }, - { - "epoch": 0.9791378584741177, - "flos": 15673001046240.0, - "grad_norm": 14.339405873013554, - "learning_rate": 4.538488032199916e-09, - "loss": 0.7073, - "num_input_tokens_seen": 173549510, - "step": 8143 - }, - { - "epoch": 0.9792581013647568, - "flos": 14727190299000.0, - "grad_norm": 10.101463936946644, - "learning_rate": 4.486191171572784e-09, - "loss": 0.6652, - "num_input_tokens_seen": 173566500, - "step": 8144 - }, - { - "epoch": 0.9793783442553959, - "flos": 17372984582400.0, - "grad_norm": 4.480535159163468, - "learning_rate": 4.434197028803766e-09, - "loss": 0.768, - "num_input_tokens_seen": 173585445, - "step": 8145 - }, - { - "epoch": 0.979498587146035, - "flos": 16863281242320.0, - "grad_norm": 4.481873987825742, - "learning_rate": 4.3825056117805514e-09, - "loss": 0.7934, - "num_input_tokens_seen": 173601050, - "step": 8146 - }, - { - "epoch": 0.979618830036674, - "flos": 10409996931960.0, - "grad_norm": 7.427055654879906, - "learning_rate": 4.331116928344425e-09, - "loss": 0.7784, - "num_input_tokens_seen": 173617085, - "step": 8147 - }, - { - "epoch": 0.9797390729273132, - "flos": 12192662809560.0, - "grad_norm": 5.910120104813464, - "learning_rate": 4.28003098629115e-09, - "loss": 0.6093, - "num_input_tokens_seen": 173632940, - "step": 8148 - }, - { - "epoch": 0.9798593158179523, - "flos": 17968599579840.0, - "grad_norm": 4.617012343335853, - "learning_rate": 4.229247793370305e-09, - "loss": 0.7801, - "num_input_tokens_seen": 173651785, - "step": 8149 - }, - { - "epoch": 0.9799795587085913, - "flos": 20023591179720.0, - "grad_norm": 4.225857095854057, - "learning_rate": 4.178767357285951e-09, - "loss": 0.6846, - "num_input_tokens_seen": 173673135, - "step": 8150 - }, - { - "epoch": 0.9800998015992305, - "flos": 19266980573880.0, - "grad_norm": 4.803519849859897, - "learning_rate": 4.128589685695516e-09, - "loss": 0.681, - "num_input_tokens_seen": 173693280, - "step": 8151 - }, - { - "epoch": 0.9802200444898695, - "flos": 12189243533880.0, - "grad_norm": 5.1039975300486935, - "learning_rate": 4.078714786211135e-09, - "loss": 0.8354, - "num_input_tokens_seen": 173708850, - "step": 8152 - }, - { - "epoch": 0.9803402873805086, - "flos": 18241210241760.0, - "grad_norm": 28.11818819419592, - "learning_rate": 4.029142666398977e-09, - "loss": 0.756, - "num_input_tokens_seen": 173728735, - "step": 8153 - }, - { - "epoch": 0.9804605302711478, - "flos": 16512705573000.0, - "grad_norm": 3.3552290061417245, - "learning_rate": 3.979873333778805e-09, - "loss": 0.7905, - "num_input_tokens_seen": 173746630, - "step": 8154 - }, - { - "epoch": 0.9805807731617868, - "flos": 28604988123480.0, - "grad_norm": 3.4367602852897554, - "learning_rate": 3.930906795824862e-09, - "loss": 0.7237, - "num_input_tokens_seen": 173767025, - "step": 8155 - }, - { - "epoch": 0.9807010160524259, - "flos": 13002643125240.0, - "grad_norm": 7.337539969080343, - "learning_rate": 3.882243059965207e-09, - "loss": 0.7647, - "num_input_tokens_seen": 173784460, - "step": 8156 - }, - { - "epoch": 0.980821258943065, - "flos": 9843251326680.0, - "grad_norm": 5.785558796026136, - "learning_rate": 3.833882133582156e-09, - "loss": 0.6509, - "num_input_tokens_seen": 173799840, - "step": 8157 - }, - { - "epoch": 0.9809415018337041, - "flos": 15865113903960.0, - "grad_norm": 3.5173408898519924, - "learning_rate": 3.785824024012285e-09, - "loss": 0.7624, - "num_input_tokens_seen": 173818560, - "step": 8158 - }, - { - "epoch": 0.9810617447243432, - "flos": 17052924623160.0, - "grad_norm": 3.9243819898265757, - "learning_rate": 3.738068738545541e-09, - "loss": 0.7675, - "num_input_tokens_seen": 173837365, - "step": 8159 - }, - { - "epoch": 0.9811819876149822, - "flos": 13379998629360.0, - "grad_norm": 4.965815031185737, - "learning_rate": 3.6906162844265733e-09, - "loss": 0.7635, - "num_input_tokens_seen": 173854170, - "step": 8160 - }, - { - "epoch": 0.9813022305056214, - "flos": 16540181926920.0, - "grad_norm": 3.771677305639492, - "learning_rate": 3.643466668853845e-09, - "loss": 0.7034, - "num_input_tokens_seen": 173871915, - "step": 8161 - }, - { - "epoch": 0.9814224733962604, - "flos": 18621066882720.0, - "grad_norm": 3.6413982045073103, - "learning_rate": 3.59661989898008e-09, - "loss": 0.7276, - "num_input_tokens_seen": 173892690, - "step": 8162 - }, - { - "epoch": 0.9815427162868995, - "flos": 18319808448360.0, - "grad_norm": 5.098007762964383, - "learning_rate": 3.5500759819115934e-09, - "loss": 0.7572, - "num_input_tokens_seen": 173912775, - "step": 8163 - }, - { - "epoch": 0.9816629591775387, - "flos": 15104134223640.0, - "grad_norm": 4.796878578607175, - "learning_rate": 3.5038349247094034e-09, - "loss": 0.8026, - "num_input_tokens_seen": 173929755, - "step": 8164 - }, - { - "epoch": 0.9817832020681777, - "flos": 12921353822040.0, - "grad_norm": 6.675774107204341, - "learning_rate": 3.4578967343878994e-09, - "loss": 0.7616, - "num_input_tokens_seen": 173945680, - "step": 8165 - }, - { - "epoch": 0.9819034449588168, - "flos": 16487097156720.0, - "grad_norm": 2.64453125, - "learning_rate": 3.4122614179161733e-09, - "loss": 0.7909, - "num_input_tokens_seen": 173965360, - "step": 8166 - }, - { - "epoch": 0.9820236878494559, - "flos": 14622825376320.0, - "grad_norm": 3.131706975001095, - "learning_rate": 3.36692898221691e-09, - "loss": 0.7548, - "num_input_tokens_seen": 173983445, - "step": 8167 - }, - { - "epoch": 0.982143930740095, - "flos": 13731872357040.0, - "grad_norm": 5.49812284860919, - "learning_rate": 3.3218994341668305e-09, - "loss": 0.7179, - "num_input_tokens_seen": 174002095, - "step": 8168 - }, - { - "epoch": 0.982264173630734, - "flos": 19482612290520.0, - "grad_norm": 3.797624553272694, - "learning_rate": 3.2771727805971373e-09, - "loss": 0.7386, - "num_input_tokens_seen": 174023200, - "step": 8169 - }, - { - "epoch": 0.9823844165213732, - "flos": 16104866018760.0, - "grad_norm": 6.933317831217724, - "learning_rate": 3.232749028292847e-09, - "loss": 0.7525, - "num_input_tokens_seen": 174039885, - "step": 8170 - }, - { - "epoch": 0.9825046594120123, - "flos": 16026647731680.0, - "grad_norm": 4.243604447982576, - "learning_rate": 3.188628183992792e-09, - "loss": 0.8696, - "num_input_tokens_seen": 174059870, - "step": 8171 - }, - { - "epoch": 0.9826249023026513, - "flos": 43849168044720.0, - "grad_norm": 0.7823916676518372, - "learning_rate": 3.1448102543902844e-09, - "loss": 0.6484, - "num_input_tokens_seen": 174123505, - "step": 8172 - }, - { - "epoch": 0.9827451451932905, - "flos": 11706858247920.0, - "grad_norm": 5.1341391232360625, - "learning_rate": 3.1012952461324515e-09, - "loss": 0.654, - "num_input_tokens_seen": 174142200, - "step": 8173 - }, - { - "epoch": 0.9828653880839295, - "flos": 14808447942240.0, - "grad_norm": 5.256905691501496, - "learning_rate": 3.0580831658204575e-09, - "loss": 0.7311, - "num_input_tokens_seen": 174159500, - "step": 8174 - }, - { - "epoch": 0.9829856309745686, - "flos": 15810509455680.0, - "grad_norm": 3.8120944948763023, - "learning_rate": 3.015174020009281e-09, - "loss": 0.7677, - "num_input_tokens_seen": 174178545, - "step": 8175 - }, - { - "epoch": 0.9831058738652078, - "flos": 17399574457440.0, - "grad_norm": 5.864016716135719, - "learning_rate": 2.9725678152086043e-09, - "loss": 0.7341, - "num_input_tokens_seen": 174196835, - "step": 8176 - }, - { - "epoch": 0.9832261167558468, - "flos": 8190463808160.0, - "grad_norm": 7.063168654395114, - "learning_rate": 2.930264557881257e-09, - "loss": 0.8015, - "num_input_tokens_seen": 174211740, - "step": 8177 - }, - { - "epoch": 0.9833463596464859, - "flos": 44222692693680.0, - "grad_norm": 0.8218002633464555, - "learning_rate": 2.8882642544452163e-09, - "loss": 0.6208, - "num_input_tokens_seen": 174276185, - "step": 8178 - }, - { - "epoch": 0.983466602537125, - "flos": 9897919094880.0, - "grad_norm": 5.324408792978895, - "learning_rate": 2.8465669112716083e-09, - "loss": 0.7293, - "num_input_tokens_seen": 174293430, - "step": 8179 - }, - { - "epoch": 0.9835868454277641, - "flos": 16780124001480.0, - "grad_norm": 4.177781627756965, - "learning_rate": 2.8051725346858177e-09, - "loss": 0.7321, - "num_input_tokens_seen": 174313410, - "step": 8180 - }, - { - "epoch": 0.9837070883184031, - "flos": 20293384105200.0, - "grad_norm": 4.032262630583116, - "learning_rate": 2.7640811309674883e-09, - "loss": 0.6859, - "num_input_tokens_seen": 174332630, - "step": 8181 - }, - { - "epoch": 0.9838273312090423, - "flos": 21454161709920.0, - "grad_norm": 4.95876859576328, - "learning_rate": 2.7232927063498557e-09, - "loss": 0.7982, - "num_input_tokens_seen": 174352725, - "step": 8182 - }, - { - "epoch": 0.9839475740996814, - "flos": 29496764301720.0, - "grad_norm": 3.618932579059713, - "learning_rate": 2.682807267020859e-09, - "loss": 0.667, - "num_input_tokens_seen": 174375205, - "step": 8183 - }, - { - "epoch": 0.9840678169903204, - "flos": 17699978072880.0, - "grad_norm": 3.5891928264254167, - "learning_rate": 2.642624819121808e-09, - "loss": 0.6179, - "num_input_tokens_seen": 174395075, - "step": 8184 - }, - { - "epoch": 0.9841880598809596, - "flos": 10680707996280.0, - "grad_norm": 9.174662021991516, - "learning_rate": 2.6027453687487154e-09, - "loss": 0.6014, - "num_input_tokens_seen": 174411885, - "step": 8185 - }, - { - "epoch": 0.9843083027715986, - "flos": 16349588747280.0, - "grad_norm": 7.093298809474213, - "learning_rate": 2.5631689219509643e-09, - "loss": 0.4902, - "num_input_tokens_seen": 174430285, - "step": 8186 - }, - { - "epoch": 0.9844285456622377, - "flos": 15946878106560.0, - "grad_norm": 9.506898082183248, - "learning_rate": 2.523895484732197e-09, - "loss": 0.8251, - "num_input_tokens_seen": 174449460, - "step": 8187 - }, - { - "epoch": 0.9845487885528769, - "flos": 13159776218520.0, - "grad_norm": 4.697543155457723, - "learning_rate": 2.4849250630505357e-09, - "loss": 0.7404, - "num_input_tokens_seen": 174467425, - "step": 8188 - }, - { - "epoch": 0.9846690314435159, - "flos": 18484033372680.0, - "grad_norm": 4.2570179609153245, - "learning_rate": 2.4462576628172528e-09, - "loss": 0.7182, - "num_input_tokens_seen": 174485775, - "step": 8189 - }, - { - "epoch": 0.984789274334155, - "flos": 13543083795120.0, - "grad_norm": 13.714636253236677, - "learning_rate": 2.407893289898766e-09, - "loss": 0.7267, - "num_input_tokens_seen": 174504525, - "step": 8190 - }, - { - "epoch": 0.984909517224794, - "flos": 20050719274080.0, - "grad_norm": 4.488390038933956, - "learning_rate": 2.3698319501144202e-09, - "loss": 0.8263, - "num_input_tokens_seen": 174525230, - "step": 8191 - }, - { - "epoch": 0.9850297601154332, - "flos": 13677616168320.0, - "grad_norm": 3.322190128569738, - "learning_rate": 2.3320736492382644e-09, - "loss": 0.7142, - "num_input_tokens_seen": 174543785, - "step": 8192 - }, - { - "epoch": 0.9851500030060723, - "flos": 16322555632800.0, - "grad_norm": 3.893129454221457, - "learning_rate": 2.29461839299816e-09, - "loss": 0.6736, - "num_input_tokens_seen": 174563220, - "step": 8193 - }, - { - "epoch": 0.9852702458967113, - "flos": 19315822909440.0, - "grad_norm": 3.5388085145737715, - "learning_rate": 2.257466187076229e-09, - "loss": 0.7876, - "num_input_tokens_seen": 174582145, - "step": 8194 - }, - { - "epoch": 0.9853904887873505, - "flos": 15267979228440.0, - "grad_norm": 6.524683521532086, - "learning_rate": 2.2206170371081854e-09, - "loss": 0.6829, - "num_input_tokens_seen": 174600450, - "step": 8195 - }, - { - "epoch": 0.9855107316779895, - "flos": 18509578469040.0, - "grad_norm": 4.225716497398025, - "learning_rate": 2.1840709486842247e-09, - "loss": 0.8332, - "num_input_tokens_seen": 174619790, - "step": 8196 - }, - { - "epoch": 0.9856309745686286, - "flos": 13920344319360.0, - "grad_norm": 3.9876123777237407, - "learning_rate": 2.1478279273481335e-09, - "loss": 0.7686, - "num_input_tokens_seen": 174637995, - "step": 8197 - }, - { - "epoch": 0.9857512174592677, - "flos": 25072166655840.0, - "grad_norm": 5.622938668527297, - "learning_rate": 2.1118879785981815e-09, - "loss": 0.7756, - "num_input_tokens_seen": 174657855, - "step": 8198 - }, - { - "epoch": 0.9858714603499068, - "flos": 19050968937720.0, - "grad_norm": 4.3688743213433545, - "learning_rate": 2.0762511078862288e-09, - "loss": 0.7829, - "num_input_tokens_seen": 174677920, - "step": 8199 - }, - { - "epoch": 0.9859917032405459, - "flos": 17349972282840.0, - "grad_norm": 5.154105925555441, - "learning_rate": 2.0409173206186183e-09, - "loss": 0.6441, - "num_input_tokens_seen": 174696880, - "step": 8200 - }, - { - "epoch": 0.986111946131185, - "flos": 14568854127240.0, - "grad_norm": 3.4661521462550904, - "learning_rate": 2.0058866221550617e-09, - "loss": 0.8517, - "num_input_tokens_seen": 174714840, - "step": 8201 - }, - { - "epoch": 0.9862321890218241, - "flos": 14487849763680.0, - "grad_norm": 3.3809523192289923, - "learning_rate": 1.971159017809976e-09, - "loss": 0.7405, - "num_input_tokens_seen": 174732850, - "step": 8202 - }, - { - "epoch": 0.9863524319124631, - "flos": 15837004350840.0, - "grad_norm": 4.707433001203037, - "learning_rate": 1.93673451285159e-09, - "loss": 0.7562, - "num_input_tokens_seen": 174751620, - "step": 8203 - }, - { - "epoch": 0.9864726748031023, - "flos": 38870895865680.0, - "grad_norm": 0.7497482672070624, - "learning_rate": 1.9026131125019495e-09, - "loss": 0.59, - "num_input_tokens_seen": 174808710, - "step": 8204 - }, - { - "epoch": 0.9865929176937414, - "flos": 17104109795760.0, - "grad_norm": 2.1741175428898765, - "learning_rate": 1.8687948219371363e-09, - "loss": 0.846, - "num_input_tokens_seen": 174827655, - "step": 8205 - }, - { - "epoch": 0.9867131605843804, - "flos": 15810984355080.0, - "grad_norm": 5.372920144020702, - "learning_rate": 1.835279646287491e-09, - "loss": 0.8682, - "num_input_tokens_seen": 174845385, - "step": 8206 - }, - { - "epoch": 0.9868334034750196, - "flos": 16295237578680.0, - "grad_norm": 3.222933522636328, - "learning_rate": 1.8020675906371685e-09, - "loss": 0.7559, - "num_input_tokens_seen": 174864500, - "step": 8207 - }, - { - "epoch": 0.9869536463656586, - "flos": 18913587168120.0, - "grad_norm": 3.722215208043817, - "learning_rate": 1.7691586600243612e-09, - "loss": 0.7277, - "num_input_tokens_seen": 174883120, - "step": 8208 - }, - { - "epoch": 0.9870738892562977, - "flos": 11949079839600.0, - "grad_norm": 6.932241560489545, - "learning_rate": 1.7365528594415202e-09, - "loss": 0.8525, - "num_input_tokens_seen": 174896910, - "step": 8209 - }, - { - "epoch": 0.9871941321469369, - "flos": 26071790352360.0, - "grad_norm": 4.278621562843265, - "learning_rate": 1.7042501938346888e-09, - "loss": 0.6646, - "num_input_tokens_seen": 174919360, - "step": 8210 - }, - { - "epoch": 0.9873143750375759, - "flos": 15675945422520.0, - "grad_norm": 3.922064666417242, - "learning_rate": 1.6722506681043913e-09, - "loss": 0.757, - "num_input_tokens_seen": 174938040, - "step": 8211 - }, - { - "epoch": 0.987434617928215, - "flos": 11895931749480.0, - "grad_norm": 5.900069297771498, - "learning_rate": 1.640554287104745e-09, - "loss": 0.6696, - "num_input_tokens_seen": 174956035, - "step": 8212 - }, - { - "epoch": 0.9875548608188541, - "flos": 13023914127000.0, - "grad_norm": 3.8937089945438776, - "learning_rate": 1.609161055644348e-09, - "loss": 0.764, - "num_input_tokens_seen": 174971680, - "step": 8213 - }, - { - "epoch": 0.9876751037094932, - "flos": 19152136204440.0, - "grad_norm": 5.208713222636561, - "learning_rate": 1.5780709784849467e-09, - "loss": 0.6617, - "num_input_tokens_seen": 174988420, - "step": 8214 - }, - { - "epoch": 0.9877953466001322, - "flos": 11646934926360.0, - "grad_norm": 3.5818941345928503, - "learning_rate": 1.5472840603436565e-09, - "loss": 0.806, - "num_input_tokens_seen": 175005370, - "step": 8215 - }, - { - "epoch": 0.9879155894907714, - "flos": 13731397457640.0, - "grad_norm": 4.950880245972522, - "learning_rate": 1.5168003058900757e-09, - "loss": 0.7806, - "num_input_tokens_seen": 175023090, - "step": 8216 - }, - { - "epoch": 0.9880358323814105, - "flos": 16377286720920.0, - "grad_norm": 3.0183556403759897, - "learning_rate": 1.4866197197491715e-09, - "loss": 0.9063, - "num_input_tokens_seen": 175042170, - "step": 8217 - }, - { - "epoch": 0.9881560752720495, - "flos": 11409082409160.0, - "grad_norm": 5.321963150808586, - "learning_rate": 1.4567423064988371e-09, - "loss": 0.7413, - "num_input_tokens_seen": 175059240, - "step": 8218 - }, - { - "epoch": 0.9882763181626887, - "flos": 15723711319440.0, - "grad_norm": 6.6465495171102456, - "learning_rate": 1.4271680706718913e-09, - "loss": 0.7649, - "num_input_tokens_seen": 175076635, - "step": 8219 - }, - { - "epoch": 0.9883965610533277, - "flos": 20560169334480.0, - "grad_norm": 4.0949332587424045, - "learning_rate": 1.3978970167543013e-09, - "loss": 0.8076, - "num_input_tokens_seen": 175096535, - "step": 8220 - }, - { - "epoch": 0.9885168039439668, - "flos": 10246816786320.0, - "grad_norm": 3.443124089070392, - "learning_rate": 1.3689291491867372e-09, - "loss": 0.7508, - "num_input_tokens_seen": 175114570, - "step": 8221 - }, - { - "epoch": 0.988637046834606, - "flos": 19374543152520.0, - "grad_norm": 4.782083476105004, - "learning_rate": 1.3402644723636836e-09, - "loss": 0.7227, - "num_input_tokens_seen": 175136320, - "step": 8222 - }, - { - "epoch": 0.988757289725245, - "flos": 18484476612120.0, - "grad_norm": 6.185367101014815, - "learning_rate": 1.311902990633218e-09, - "loss": 0.8114, - "num_input_tokens_seen": 175155005, - "step": 8223 - }, - { - "epoch": 0.9888775326158841, - "flos": 19320540243480.0, - "grad_norm": 3.2793825194016524, - "learning_rate": 1.2838447082978987e-09, - "loss": 0.6977, - "num_input_tokens_seen": 175175880, - "step": 8224 - }, - { - "epoch": 0.9889977755065231, - "flos": 17808680410080.0, - "grad_norm": 11.800322214673512, - "learning_rate": 1.2560896296143208e-09, - "loss": 0.8063, - "num_input_tokens_seen": 175194065, - "step": 8225 - }, - { - "epoch": 0.9891180183971623, - "flos": 13837946917560.0, - "grad_norm": 5.011939289952243, - "learning_rate": 1.2286377587926722e-09, - "loss": 0.7884, - "num_input_tokens_seen": 175210575, - "step": 8226 - }, - { - "epoch": 0.9892382612878013, - "flos": 19185501310920.0, - "grad_norm": 4.199754235479468, - "learning_rate": 1.2014890999973992e-09, - "loss": 0.7413, - "num_input_tokens_seen": 175227215, - "step": 8227 - }, - { - "epoch": 0.9893585041784404, - "flos": 18647878377480.0, - "grad_norm": 11.612380530786824, - "learning_rate": 1.1746436573472073e-09, - "loss": 0.7644, - "num_input_tokens_seen": 175248670, - "step": 8228 - }, - { - "epoch": 0.9894787470690796, - "flos": 14754001793760.0, - "grad_norm": 6.620365177005291, - "learning_rate": 1.1481014349141726e-09, - "loss": 0.6735, - "num_input_tokens_seen": 175265610, - "step": 8229 - }, - { - "epoch": 0.9895989899597186, - "flos": 18051535200960.0, - "grad_norm": 3.8451451196555557, - "learning_rate": 1.121862436724852e-09, - "loss": 0.8235, - "num_input_tokens_seen": 175284170, - "step": 8230 - }, - { - "epoch": 0.9897192328503577, - "flos": 15945928307760.0, - "grad_norm": 6.715972468216919, - "learning_rate": 1.0959266667598388e-09, - "loss": 0.7011, - "num_input_tokens_seen": 175302705, - "step": 8231 - }, - { - "epoch": 0.9898394757409968, - "flos": 15595352638440.0, - "grad_norm": 6.9865653680538795, - "learning_rate": 1.0702941289533196e-09, - "loss": 0.7243, - "num_input_tokens_seen": 175321100, - "step": 8232 - }, - { - "epoch": 0.9899597186316359, - "flos": 13536783463080.0, - "grad_norm": 5.3453141984934085, - "learning_rate": 1.0449648271939615e-09, - "loss": 0.8695, - "num_input_tokens_seen": 175337165, - "step": 8233 - }, - { - "epoch": 0.990079961522275, - "flos": 17401790654640.0, - "grad_norm": 3.40597267071819, - "learning_rate": 1.0199387653240243e-09, - "loss": 0.7209, - "num_input_tokens_seen": 175356575, - "step": 8234 - }, - { - "epoch": 0.9902002044129141, - "flos": 11949681378840.0, - "grad_norm": 5.610932625343799, - "learning_rate": 9.952159471400267e-10, - "loss": 0.6876, - "num_input_tokens_seen": 175373335, - "step": 8235 - }, - { - "epoch": 0.9903204473035532, - "flos": 16508051558880.0, - "grad_norm": 4.0202856662653845, - "learning_rate": 9.707963763923022e-10, - "loss": 0.8251, - "num_input_tokens_seen": 175392105, - "step": 8236 - }, - { - "epoch": 0.9904406901941922, - "flos": 11760861156960.0, - "grad_norm": 3.809909878763769, - "learning_rate": 9.466800567854427e-10, - "loss": 0.7718, - "num_input_tokens_seen": 175410425, - "step": 8237 - }, - { - "epoch": 0.9905609330848314, - "flos": 19537058439000.0, - "grad_norm": 7.3315325462224035, - "learning_rate": 9.228669919778553e-10, - "loss": 0.6778, - "num_input_tokens_seen": 175429070, - "step": 8238 - }, - { - "epoch": 0.9906811759754705, - "flos": 16917379131240.0, - "grad_norm": 8.34375960103029, - "learning_rate": 8.993571855817617e-10, - "loss": 0.7796, - "num_input_tokens_seen": 175447620, - "step": 8239 - }, - { - "epoch": 0.9908014188661095, - "flos": 16161655004280.0, - "grad_norm": 3.0660994260807644, - "learning_rate": 8.761506411638642e-10, - "loss": 0.7219, - "num_input_tokens_seen": 175466805, - "step": 8240 - }, - { - "epoch": 0.9909216617567487, - "flos": 14053958553720.0, - "grad_norm": 4.203634578630057, - "learning_rate": 8.53247362244236e-10, - "loss": 0.7198, - "num_input_tokens_seen": 175485335, - "step": 8241 - }, - { - "epoch": 0.9910419046473877, - "flos": 17294164756080.0, - "grad_norm": 3.5606466458268486, - "learning_rate": 8.306473522976532e-10, - "loss": 0.6728, - "num_input_tokens_seen": 175504460, - "step": 8242 - }, - { - "epoch": 0.9911621475380268, - "flos": 16621154630520.0, - "grad_norm": 9.477767226210943, - "learning_rate": 8.083506147522623e-10, - "loss": 0.7075, - "num_input_tokens_seen": 175523575, - "step": 8243 - }, - { - "epoch": 0.991282390428666, - "flos": 9816661451640.0, - "grad_norm": 4.561731143078088, - "learning_rate": 7.863571529906909e-10, - "loss": 0.8402, - "num_input_tokens_seen": 175538880, - "step": 8244 - }, - { - "epoch": 0.991402633319305, - "flos": 45577989313080.0, - "grad_norm": 0.8172791482506535, - "learning_rate": 7.646669703489372e-10, - "loss": 0.6425, - "num_input_tokens_seen": 175602910, - "step": 8245 - }, - { - "epoch": 0.9915228762099441, - "flos": 13704269363280.0, - "grad_norm": 2.8962578405176163, - "learning_rate": 7.432800701177023e-10, - "loss": 0.5621, - "num_input_tokens_seen": 175620630, - "step": 8246 - }, - { - "epoch": 0.9916431191005832, - "flos": 48614463452400.0, - "grad_norm": 0.8085887374353993, - "learning_rate": 7.221964555415017e-10, - "loss": 0.5933, - "num_input_tokens_seen": 175680010, - "step": 8247 - }, - { - "epoch": 0.9917633619912223, - "flos": 12084656991480.0, - "grad_norm": 3.782719728450199, - "learning_rate": 7.01416129818222e-10, - "loss": 0.7356, - "num_input_tokens_seen": 175697350, - "step": 8248 - }, - { - "epoch": 0.9918836048818613, - "flos": 18619103965200.0, - "grad_norm": 4.663037364766656, - "learning_rate": 6.809390961006745e-10, - "loss": 0.5688, - "num_input_tokens_seen": 175717200, - "step": 8249 - }, - { - "epoch": 0.9920038477725005, - "flos": 18349627639320.0, - "grad_norm": 4.709607089879158, - "learning_rate": 6.607653574948191e-10, - "loss": 0.6783, - "num_input_tokens_seen": 175737700, - "step": 8250 - }, - { - "epoch": 0.9921240906631396, - "flos": 15967864168680.0, - "grad_norm": 3.9943547944566746, - "learning_rate": 6.408949170613187e-10, - "loss": 0.8057, - "num_input_tokens_seen": 175756685, - "step": 8251 - }, - { - "epoch": 0.9922443335537786, - "flos": 18214430406960.0, - "grad_norm": 2.757084909912999, - "learning_rate": 6.213277778144288e-10, - "loss": 0.802, - "num_input_tokens_seen": 175778050, - "step": 8252 - }, - { - "epoch": 0.9923645764444178, - "flos": 15807818359080.0, - "grad_norm": 4.143766773126639, - "learning_rate": 6.020639427224416e-10, - "loss": 0.6607, - "num_input_tokens_seen": 175795415, - "step": 8253 - }, - { - "epoch": 0.9924848193350568, - "flos": 18315059454360.0, - "grad_norm": 5.00826362565584, - "learning_rate": 5.831034147076864e-10, - "loss": 0.7069, - "num_input_tokens_seen": 175812385, - "step": 8254 - }, - { - "epoch": 0.9926050622256959, - "flos": 50819722425600.0, - "grad_norm": 0.7112540494002396, - "learning_rate": 5.644461966463065e-10, - "loss": 0.5669, - "num_input_tokens_seen": 175879715, - "step": 8255 - }, - { - "epoch": 0.9927253051163349, - "flos": 15298146678960.0, - "grad_norm": 8.921529946945103, - "learning_rate": 5.460922913687049e-10, - "loss": 0.7382, - "num_input_tokens_seen": 175898525, - "step": 8256 - }, - { - "epoch": 0.9928455480069741, - "flos": 16322523972840.0, - "grad_norm": 3.678591943755065, - "learning_rate": 5.280417016593208e-10, - "loss": 0.7281, - "num_input_tokens_seen": 175918035, - "step": 8257 - }, - { - "epoch": 0.9929657908976132, - "flos": 12678372391320.0, - "grad_norm": 4.276548602604612, - "learning_rate": 5.102944302559642e-10, - "loss": 0.7392, - "num_input_tokens_seen": 175935250, - "step": 8258 - }, - { - "epoch": 0.9930860337882522, - "flos": 16593963216240.0, - "grad_norm": 5.060716479693542, - "learning_rate": 4.9285047985137e-10, - "loss": 0.7664, - "num_input_tokens_seen": 175954390, - "step": 8259 - }, - { - "epoch": 0.9932062766788914, - "flos": 20644466333880.0, - "grad_norm": 4.36566342969731, - "learning_rate": 4.757098530916436e-10, - "loss": 0.7386, - "num_input_tokens_seen": 175974555, - "step": 8260 - }, - { - "epoch": 0.9933265195695304, - "flos": 14730387954960.0, - "grad_norm": 5.771111583772533, - "learning_rate": 4.5887255257670563e-10, - "loss": 0.77, - "num_input_tokens_seen": 175991315, - "step": 8261 - }, - { - "epoch": 0.9934467624601695, - "flos": 15622639032600.0, - "grad_norm": 4.057890639289049, - "learning_rate": 4.4233858086117906e-10, - "loss": 0.7499, - "num_input_tokens_seen": 176009560, - "step": 8262 - }, - { - "epoch": 0.9935670053508087, - "flos": 14434005154440.0, - "grad_norm": 4.419587423133822, - "learning_rate": 4.261079404528356e-10, - "loss": 0.6548, - "num_input_tokens_seen": 176028760, - "step": 8263 - }, - { - "epoch": 0.9936872482414477, - "flos": 16078624403280.0, - "grad_norm": 4.08067224686425, - "learning_rate": 4.1018063381437205e-10, - "loss": 0.6687, - "num_input_tokens_seen": 176048865, - "step": 8264 - }, - { - "epoch": 0.9938074911320868, - "flos": 51482411404200.0, - "grad_norm": 0.9282482450711618, - "learning_rate": 3.9455666336141167e-10, - "loss": 0.6529, - "num_input_tokens_seen": 176112365, - "step": 8265 - }, - { - "epoch": 0.9939277340227259, - "flos": 10976267637840.0, - "grad_norm": 5.768813494728607, - "learning_rate": 3.7923603146450267e-10, - "loss": 0.8033, - "num_input_tokens_seen": 176128145, - "step": 8266 - }, - { - "epoch": 0.994047976913365, - "flos": 12921480461880.0, - "grad_norm": 4.069601102959236, - "learning_rate": 3.642187404473418e-10, - "loss": 0.7805, - "num_input_tokens_seen": 176146025, - "step": 8267 - }, - { - "epoch": 0.994168219804004, - "flos": 14001348682920.0, - "grad_norm": 6.377623971700908, - "learning_rate": 3.495047925885508e-10, - "loss": 0.8341, - "num_input_tokens_seen": 176164080, - "step": 8268 - }, - { - "epoch": 0.9942884626946432, - "flos": 13023565867440.0, - "grad_norm": 5.056049145833918, - "learning_rate": 3.350941901199e-10, - "loss": 0.8239, - "num_input_tokens_seen": 176180720, - "step": 8269 - }, - { - "epoch": 0.9944087055852823, - "flos": 13722406029000.0, - "grad_norm": 4.053297682990184, - "learning_rate": 3.2098693522764066e-10, - "loss": 0.821, - "num_input_tokens_seen": 176193640, - "step": 8270 - }, - { - "epoch": 0.9945289484759213, - "flos": 15352434527640.0, - "grad_norm": 4.355616326675833, - "learning_rate": 3.071830300516165e-10, - "loss": 0.8052, - "num_input_tokens_seen": 176211190, - "step": 8271 - }, - { - "epoch": 0.9946491913665605, - "flos": 10734267665880.0, - "grad_norm": 4.246376737062599, - "learning_rate": 2.9368247668615234e-10, - "loss": 0.6841, - "num_input_tokens_seen": 176229500, - "step": 8272 - }, - { - "epoch": 0.9947694342571995, - "flos": 9188694466440.0, - "grad_norm": 4.530914083723287, - "learning_rate": 2.804852771789434e-10, - "loss": 0.5926, - "num_input_tokens_seen": 176242520, - "step": 8273 - }, - { - "epoch": 0.9948896771478386, - "flos": 13758557211960.0, - "grad_norm": 5.737581193469959, - "learning_rate": 2.675914335321661e-10, - "loss": 0.5504, - "num_input_tokens_seen": 176260995, - "step": 8274 - }, - { - "epoch": 0.9950099200384778, - "flos": 18240988622040.0, - "grad_norm": 6.239741030967946, - "learning_rate": 2.550009477018111e-10, - "loss": 0.7625, - "num_input_tokens_seen": 176279485, - "step": 8275 - }, - { - "epoch": 0.9951301629291168, - "flos": 17377765236360.0, - "grad_norm": 3.1289947916330725, - "learning_rate": 2.4271382159790634e-10, - "loss": 0.6179, - "num_input_tokens_seen": 176296635, - "step": 8276 - }, - { - "epoch": 0.9952504058197559, - "flos": 16269977421960.0, - "grad_norm": 2.7584597770851897, - "learning_rate": 2.3073005708429406e-10, - "loss": 0.8426, - "num_input_tokens_seen": 176316000, - "step": 8277 - }, - { - "epoch": 0.995370648710395, - "flos": 15510169160160.0, - "grad_norm": 4.051616703940467, - "learning_rate": 2.190496559788535e-10, - "loss": 0.7099, - "num_input_tokens_seen": 176334005, - "step": 8278 - }, - { - "epoch": 0.9954908916010341, - "flos": 10815556969080.0, - "grad_norm": 5.68828468359325, - "learning_rate": 2.0767262005372265e-10, - "loss": 0.7428, - "num_input_tokens_seen": 176351240, - "step": 8279 - }, - { - "epoch": 0.9956111344916732, - "flos": 14028540097200.0, - "grad_norm": 15.866392670731301, - "learning_rate": 1.965989510346322e-10, - "loss": 0.7347, - "num_input_tokens_seen": 176370080, - "step": 8280 - }, - { - "epoch": 0.9957313773823123, - "flos": 14649256951560.0, - "grad_norm": 5.2245429318205, - "learning_rate": 1.8582865060134955e-10, - "loss": 0.6925, - "num_input_tokens_seen": 176387990, - "step": 8281 - }, - { - "epoch": 0.9958516202729514, - "flos": 42359972251320.0, - "grad_norm": 0.8092250478867352, - "learning_rate": 1.7536172038790098e-10, - "loss": 0.5803, - "num_input_tokens_seen": 176448020, - "step": 8282 - }, - { - "epoch": 0.9959718631635904, - "flos": 20374420128720.0, - "grad_norm": 10.04973403806488, - "learning_rate": 1.651981619819054e-10, - "loss": 0.6708, - "num_input_tokens_seen": 176464890, - "step": 8283 - }, - { - "epoch": 0.9960921060542296, - "flos": 17592542134080.0, - "grad_norm": 3.6291703537675164, - "learning_rate": 1.5533797692546257e-10, - "loss": 0.6748, - "num_input_tokens_seen": 176483345, - "step": 8284 - }, - { - "epoch": 0.9962123489448687, - "flos": 13650456414000.0, - "grad_norm": 4.219909282631286, - "learning_rate": 1.4578116671404296e-10, - "loss": 0.8172, - "num_input_tokens_seen": 176501345, - "step": 8285 - }, - { - "epoch": 0.9963325918355077, - "flos": 14622413796840.0, - "grad_norm": 5.353985893473176, - "learning_rate": 1.3652773279759777e-10, - "loss": 0.7038, - "num_input_tokens_seen": 176517715, - "step": 8286 - }, - { - "epoch": 0.9964528347261468, - "flos": 24315524390040.0, - "grad_norm": 4.326027923324558, - "learning_rate": 1.2757767657989305e-10, - "loss": 0.6116, - "num_input_tokens_seen": 176541225, - "step": 8287 - }, - { - "epoch": 0.9965730776167859, - "flos": 16915194594000.0, - "grad_norm": 5.303924280917853, - "learning_rate": 1.1893099941850948e-10, - "loss": 0.8528, - "num_input_tokens_seen": 176559840, - "step": 8288 - }, - { - "epoch": 0.996693320507425, - "flos": 16809341653200.0, - "grad_norm": 6.312868013135514, - "learning_rate": 1.105877026252866e-10, - "loss": 0.7523, - "num_input_tokens_seen": 176577890, - "step": 8289 - }, - { - "epoch": 0.996813563398064, - "flos": 9598591918080.0, - "grad_norm": 3.812950232764527, - "learning_rate": 1.0254778746565663e-10, - "loss": 0.7109, - "num_input_tokens_seen": 176592885, - "step": 8290 - }, - { - "epoch": 0.9969338062887032, - "flos": 10653579901920.0, - "grad_norm": 3.8258726383672683, - "learning_rate": 9.481125515953259e-11, - "loss": 0.7196, - "num_input_tokens_seen": 176610665, - "step": 8291 - }, - { - "epoch": 0.9970540491793423, - "flos": 18858824420040.0, - "grad_norm": 3.340333423076106, - "learning_rate": 8.737810688064228e-11, - "loss": 0.7872, - "num_input_tokens_seen": 176630220, - "step": 8292 - }, - { - "epoch": 0.9971742920699813, - "flos": 15702535297560.0, - "grad_norm": 3.50168868607345, - "learning_rate": 8.024834375608414e-11, - "loss": 0.7735, - "num_input_tokens_seen": 176648530, - "step": 8293 - }, - { - "epoch": 0.9972945349606205, - "flos": 53257922130840.0, - "grad_norm": 0.8343543275136708, - "learning_rate": 7.342196686788149e-11, - "loss": 0.6542, - "num_input_tokens_seen": 176701415, - "step": 8294 - }, - { - "epoch": 0.9974147778512595, - "flos": 14375791470720.0, - "grad_norm": 3.630300921112743, - "learning_rate": 6.689897725142834e-11, - "loss": 0.6619, - "num_input_tokens_seen": 176720610, - "step": 8295 - }, - { - "epoch": 0.9975350207418986, - "flos": 11625189025200.0, - "grad_norm": 3.422513579960946, - "learning_rate": 6.067937589615545e-11, - "loss": 0.8573, - "num_input_tokens_seen": 176738405, - "step": 8296 - }, - { - "epoch": 0.9976552636325378, - "flos": 44193981601320.0, - "grad_norm": 0.764050207058674, - "learning_rate": 5.476316374575241e-11, - "loss": 0.5747, - "num_input_tokens_seen": 176801610, - "step": 8297 - }, - { - "epoch": 0.9977755065231768, - "flos": 16455093428520.0, - "grad_norm": 6.43916136404933, - "learning_rate": 4.9150341697723476e-11, - "loss": 0.7206, - "num_input_tokens_seen": 176821220, - "step": 8298 - }, - { - "epoch": 0.9978957494138159, - "flos": 19077178893240.0, - "grad_norm": 3.271379720807409, - "learning_rate": 4.384091060338768e-11, - "loss": 0.6474, - "num_input_tokens_seen": 176841410, - "step": 8299 - }, - { - "epoch": 0.998015992304455, - "flos": 16566645162120.0, - "grad_norm": 5.216474391006006, - "learning_rate": 3.883487126810081e-11, - "loss": 0.7142, - "num_input_tokens_seen": 176860390, - "step": 8300 - }, - { - "epoch": 0.9981362351950941, - "flos": 13299754104840.0, - "grad_norm": 6.744372247054275, - "learning_rate": 3.41322244516995e-11, - "loss": 0.7871, - "num_input_tokens_seen": 176878055, - "step": 8301 - }, - { - "epoch": 0.9982564780857331, - "flos": 24586203794400.0, - "grad_norm": 3.0528747955848257, - "learning_rate": 2.9732970866946925e-11, - "loss": 0.6089, - "num_input_tokens_seen": 176897655, - "step": 8302 - }, - { - "epoch": 0.9983767209763723, - "flos": 11301583150440.0, - "grad_norm": 3.9762702639656133, - "learning_rate": 2.563711118175327e-11, - "loss": 0.7646, - "num_input_tokens_seen": 176914260, - "step": 8303 - }, - { - "epoch": 0.9984969638670114, - "flos": 14595507322200.0, - "grad_norm": 3.440611384864637, - "learning_rate": 2.184464601717728e-11, - "loss": 0.8147, - "num_input_tokens_seen": 176932295, - "step": 8304 - }, - { - "epoch": 0.9986172067576504, - "flos": 14892554981880.0, - "grad_norm": 4.033156070019091, - "learning_rate": 1.8355575948758585e-11, - "loss": 0.7533, - "num_input_tokens_seen": 176950000, - "step": 8305 - }, - { - "epoch": 0.9987374496482896, - "flos": 17372731302720.0, - "grad_norm": 8.449315818761646, - "learning_rate": 1.5169901505407424e-11, - "loss": 0.7209, - "num_input_tokens_seen": 176966785, - "step": 8306 - }, - { - "epoch": 0.9988576925389286, - "flos": 18347126502480.0, - "grad_norm": 3.3201519377538293, - "learning_rate": 1.228762317073695e-11, - "loss": 0.7301, - "num_input_tokens_seen": 176985335, - "step": 8307 - }, - { - "epoch": 0.9989779354295677, - "flos": 22966686402480.0, - "grad_norm": 6.265713888474845, - "learning_rate": 9.70874138195299e-12, - "loss": 0.7722, - "num_input_tokens_seen": 177006965, - "step": 8308 - }, - { - "epoch": 0.9990981783202069, - "flos": 14298807922080.0, - "grad_norm": 3.5834148900413307, - "learning_rate": 7.433256530076093e-12, - "loss": 0.7302, - "num_input_tokens_seen": 177026640, - "step": 8309 - }, - { - "epoch": 0.9992184212108459, - "flos": 12541782120720.0, - "grad_norm": 4.276149858352018, - "learning_rate": 5.46116896038562e-12, - "loss": 0.7428, - "num_input_tokens_seen": 177040770, - "step": 8310 - }, - { - "epoch": 0.999338664101485, - "flos": 34224424999680.0, - "grad_norm": 3.8161113707087146, - "learning_rate": 3.792478972197699e-12, - "loss": 0.612, - "num_input_tokens_seen": 177061075, - "step": 8311 - }, - { - "epoch": 0.9994589069921241, - "flos": 11031252005640.0, - "grad_norm": 4.530000080167588, - "learning_rate": 2.4271868181990895e-12, - "loss": 0.6652, - "num_input_tokens_seen": 177077960, - "step": 8312 - }, - { - "epoch": 0.9995791498827632, - "flos": 9087273920040.0, - "grad_norm": 3.7507020928854367, - "learning_rate": 1.3652927060014973e-12, - "loss": 0.7908, - "num_input_tokens_seen": 177093275, - "step": 8313 - }, - { - "epoch": 0.9996993927734023, - "flos": 14055921471240.0, - "grad_norm": 4.346244699399764, - "learning_rate": 6.067967965872612e-13, - "loss": 0.6298, - "num_input_tokens_seen": 177112605, - "step": 8314 - }, - { - "epoch": 0.9998196356640414, - "flos": 46402175968080.0, - "grad_norm": 3.445108628242103, - "learning_rate": 1.5169920497548615e-13, - "loss": 0.7604, - "num_input_tokens_seen": 177136945, - "step": 8315 - }, - { - "epoch": 0.9999398785546805, - "flos": 37505409230520.0, - "grad_norm": 1.7315440399887767, - "learning_rate": 0.0, - "loss": 0.5654, - "num_input_tokens_seen": 177185545, - "step": 8316 - } - ], - "logging_steps": 1.0, - "max_steps": 8316, - "num_input_tokens_seen": 177185545, - "num_train_epochs": 1, - "save_steps": 832, - "stateful_callbacks": { - "TrainerControl": { - "args": { - "should_epoch_stop": false, - "should_evaluate": false, - "should_log": false, - "should_save": true, - "should_training_stop": true - }, - "attributes": {} - } - }, - "total_flos": 5.1159586152277606e+17, - "train_batch_size": 5, - "trial_name": null, - "trial_params": null -} diff --git a/sft/hyperrouter/training_args.bin b/sft/hyperrouter/training_args.bin deleted file mode 100644 index 99a3c69d066e49b37c9c8a5d4846721b8292f596..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/training_args.bin +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:91f2748baa877275d8bfdf97bc718c1d2f1df4bf89d2bb2c94a5d9b4a9230cab -size 8120 diff --git a/sft/hyperrouter/zero_to_fp32.py b/sft/hyperrouter/zero_to_fp32.py deleted file mode 100644 index 24cc342e78d1a006c782b3a4cd68d9ce786d8fd8..0000000000000000000000000000000000000000 --- a/sft/hyperrouter/zero_to_fp32.py +++ /dev/null @@ -1,604 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: Apache-2.0 - -# DeepSpeed Team - -# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets -# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in -# the future. Once extracted, the weights don't require DeepSpeed and can be used in any -# application. -# -# example: python zero_to_fp32.py . pytorch_model.bin - -import argparse -import torch -import glob -import math -import os -import re -from collections import OrderedDict -from dataclasses import dataclass - -# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with -# DeepSpeed data structures it has to be available in the current python environment. -from deepspeed.utils import logger -from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS, - FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES, - FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS) - - -@dataclass -class zero_model_state: - buffers: dict() - param_shapes: dict() - shared_params: list - ds_version: int - frozen_param_shapes: dict() - frozen_param_fragments: dict() - - -debug = 0 - -# load to cpu -device = torch.device('cpu') - - -def atoi(text): - return int(text) if text.isdigit() else text - - -def natural_keys(text): - ''' - alist.sort(key=natural_keys) sorts in human order - http://nedbatchelder.com/blog/200712/human_sorting.html - (See Toothy's implementation in the comments) - ''' - return [atoi(c) for c in re.split(r'(\d+)', text)] - - -def get_model_state_file(checkpoint_dir, zero_stage): - if not os.path.isdir(checkpoint_dir): - raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist") - - # there should be only one file - if zero_stage <= 2: - file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt") - elif zero_stage == 3: - file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt") - - if not os.path.exists(file): - raise FileNotFoundError(f"can't find model states file at '{file}'") - - return file - - -def get_checkpoint_files(checkpoint_dir, glob_pattern): - # XXX: need to test that this simple glob rule works for multi-node setup too - ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys) - - if len(ckpt_files) == 0: - raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'") - - return ckpt_files - - -def get_optim_files(checkpoint_dir): - return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt") - - -def get_model_state_files(checkpoint_dir): - return get_checkpoint_files(checkpoint_dir, "*_model_states.pt") - - -def parse_model_states(files): - zero_model_states = [] - for file in files: - state_dict = torch.load(file, map_location=device) - - if BUFFER_NAMES not in state_dict: - raise ValueError(f"{file} is not a model state checkpoint") - buffer_names = state_dict[BUFFER_NAMES] - if debug: - print("Found buffers:", buffer_names) - - # recover just the buffers while restoring them to fp32 if they were saved in fp16 - buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names} - param_shapes = state_dict[PARAM_SHAPES] - - # collect parameters that are included in param_shapes - param_names = [] - for s in param_shapes: - for name in s.keys(): - param_names.append(name) - - # update with frozen parameters - frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None) - if frozen_param_shapes is not None: - if debug: - print(f"Found frozen_param_shapes: {frozen_param_shapes}") - param_names += list(frozen_param_shapes.keys()) - - # handle shared params - shared_params = [[k, v] for k, v in state_dict["shared_params"].items()] - - ds_version = state_dict.get(DS_VERSION, None) - - frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None) - - z_model_state = zero_model_state(buffers=buffers, - param_shapes=param_shapes, - shared_params=shared_params, - ds_version=ds_version, - frozen_param_shapes=frozen_param_shapes, - frozen_param_fragments=frozen_param_fragments) - zero_model_states.append(z_model_state) - - return zero_model_states - - -def parse_optim_states(files, ds_checkpoint_dir): - - total_files = len(files) - state_dicts = [] - for f in files: - state_dict = torch.load(f, map_location=device) - # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights - # and also handle the case where it was already removed by another helper script - state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None) - state_dicts.append(state_dict) - - if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]: - raise ValueError(f"{files[0]} is not a zero checkpoint") - zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE] - world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT] - - # For ZeRO-2 each param group can have different partition_count as data parallelism for expert - # parameters can be different from data parallelism for non-expert parameters. So we can just - # use the max of the partition_count to get the dp world_size. - - if type(world_size) is list: - world_size = max(world_size) - - if world_size != total_files: - raise ValueError( - f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. " - "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes." - ) - - # the groups are named differently in each stage - if zero_stage <= 2: - fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS - elif zero_stage == 3: - fp32_groups_key = FP32_FLAT_GROUPS - else: - raise ValueError(f"unknown zero stage {zero_stage}") - - if zero_stage <= 2: - fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))] - elif zero_stage == 3: - # if there is more than one param group, there will be multiple flattened tensors - one - # flattened tensor per group - for simplicity merge them into a single tensor - # - # XXX: could make the script more memory efficient for when there are multiple groups - it - # will require matching the sub-lists of param_shapes for each param group flattened tensor - - fp32_flat_groups = [ - torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts)) - ] - - return zero_stage, world_size, fp32_flat_groups - - -def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters): - """ - Returns fp32 state_dict reconstructed from ds checkpoint - - Args: - - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are) - - """ - print(f"Processing zero checkpoint '{ds_checkpoint_dir}'") - - optim_files = get_optim_files(ds_checkpoint_dir) - zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir) - print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}") - - model_files = get_model_state_files(ds_checkpoint_dir) - - zero_model_states = parse_model_states(model_files) - print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}') - - if zero_stage <= 2: - return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states, - exclude_frozen_parameters) - elif zero_stage == 3: - return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states, - exclude_frozen_parameters) - - -def _zero2_merge_frozen_params(state_dict, zero_model_states): - if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0: - return - - frozen_param_shapes = zero_model_states[0].frozen_param_shapes - frozen_param_fragments = zero_model_states[0].frozen_param_fragments - - if debug: - num_elem = sum(s.numel() for s in frozen_param_shapes.values()) - print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}') - - wanted_params = len(frozen_param_shapes) - wanted_numel = sum(s.numel() for s in frozen_param_shapes.values()) - avail_numel = sum([p.numel() for p in frozen_param_fragments.values()]) - print(f'Frozen params: Have {avail_numel} numels to process.') - print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params') - - total_params = 0 - total_numel = 0 - for name, shape in frozen_param_shapes.items(): - total_params += 1 - unpartitioned_numel = shape.numel() - total_numel += unpartitioned_numel - - state_dict[name] = frozen_param_fragments[name] - - if debug: - print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ") - - print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements") - - -def _has_callable(obj, fn): - attr = getattr(obj, fn, None) - return callable(attr) - - -def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states): - param_shapes = zero_model_states[0].param_shapes - - # Reconstruction protocol: - # - # XXX: document this - - if debug: - for i in range(world_size): - for j in range(len(fp32_flat_groups[0])): - print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}") - - # XXX: memory usage doubles here (zero2) - num_param_groups = len(fp32_flat_groups[0]) - merged_single_partition_of_fp32_groups = [] - for i in range(num_param_groups): - merged_partitions = [sd[i] for sd in fp32_flat_groups] - full_single_fp32_vector = torch.cat(merged_partitions, 0) - merged_single_partition_of_fp32_groups.append(full_single_fp32_vector) - avail_numel = sum( - [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups]) - - if debug: - wanted_params = sum([len(shapes) for shapes in param_shapes]) - wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes]) - # not asserting if there is a mismatch due to possible padding - print(f"Have {avail_numel} numels to process.") - print(f"Need {wanted_numel} numels in {wanted_params} params.") - - # params - # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support - # out-of-core computing solution - total_numel = 0 - total_params = 0 - for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups): - offset = 0 - avail_numel = full_single_fp32_vector.numel() - for name, shape in shapes.items(): - - unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape) - total_numel += unpartitioned_numel - total_params += 1 - - if debug: - print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ") - state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape) - offset += unpartitioned_numel - - # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and - # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex - # paddings performed in the code it's almost impossible to predict the exact numbers w/o the - # live optimizer object, so we are checking that the numbers are within the right range - align_to = 2 * world_size - - def zero2_align(x): - return align_to * math.ceil(x / align_to) - - if debug: - print(f"original offset={offset}, avail_numel={avail_numel}") - - offset = zero2_align(offset) - avail_numel = zero2_align(avail_numel) - - if debug: - print(f"aligned offset={offset}, avail_numel={avail_numel}") - - # Sanity check - if offset != avail_numel: - raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong") - - print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements") - - -def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states, - exclude_frozen_parameters): - state_dict = OrderedDict() - - # buffers - buffers = zero_model_states[0].buffers - state_dict.update(buffers) - if debug: - print(f"added {len(buffers)} buffers") - - if not exclude_frozen_parameters: - _zero2_merge_frozen_params(state_dict, zero_model_states) - - _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states) - - # recover shared parameters - for pair in zero_model_states[0].shared_params: - if pair[1] in state_dict: - state_dict[pair[0]] = state_dict[pair[1]] - - return state_dict - - -def zero3_partitioned_param_info(unpartitioned_numel, world_size): - remainder = unpartitioned_numel % world_size - padding_numel = (world_size - remainder) if remainder else 0 - partitioned_numel = math.ceil(unpartitioned_numel / world_size) - return partitioned_numel, padding_numel - - -def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states): - if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0: - return - - if debug: - for i in range(world_size): - num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values()) - print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}') - - frozen_param_shapes = zero_model_states[0].frozen_param_shapes - wanted_params = len(frozen_param_shapes) - wanted_numel = sum(s.numel() for s in frozen_param_shapes.values()) - avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size - print(f'Frozen params: Have {avail_numel} numels to process.') - print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params') - - total_params = 0 - total_numel = 0 - for name, shape in zero_model_states[0].frozen_param_shapes.items(): - total_params += 1 - unpartitioned_numel = shape.numel() - total_numel += unpartitioned_numel - - param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states) - state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape) - - partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size) - - if debug: - print( - f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}" - ) - - print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements") - - -def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states): - param_shapes = zero_model_states[0].param_shapes - avail_numel = fp32_flat_groups[0].numel() * world_size - # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each - # param, re-consolidating each param, while dealing with padding if any - - # merge list of dicts, preserving order - param_shapes = {k: v for d in param_shapes for k, v in d.items()} - - if debug: - for i in range(world_size): - print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}") - - wanted_params = len(param_shapes) - wanted_numel = sum(shape.numel() for shape in param_shapes.values()) - # not asserting if there is a mismatch due to possible padding - avail_numel = fp32_flat_groups[0].numel() * world_size - print(f"Trainable params: Have {avail_numel} numels to process.") - print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.") - - # params - # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support - # out-of-core computing solution - offset = 0 - total_numel = 0 - total_params = 0 - for name, shape in param_shapes.items(): - - unpartitioned_numel = shape.numel() - total_numel += unpartitioned_numel - total_params += 1 - - partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size) - - if debug: - print( - f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}" - ) - - # XXX: memory usage doubles here - state_dict[name] = torch.cat( - tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)), - 0).narrow(0, 0, unpartitioned_numel).view(shape) - offset += partitioned_numel - - offset *= world_size - - # Sanity check - if offset != avail_numel: - raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong") - - print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements") - - -def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states, - exclude_frozen_parameters): - state_dict = OrderedDict() - - # buffers - buffers = zero_model_states[0].buffers - state_dict.update(buffers) - if debug: - print(f"added {len(buffers)} buffers") - - if not exclude_frozen_parameters: - _zero3_merge_frozen_params(state_dict, world_size, zero_model_states) - - _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states) - - # recover shared parameters - for pair in zero_model_states[0].shared_params: - if pair[1] in state_dict: - state_dict[pair[0]] = state_dict[pair[1]] - - return state_dict - - -def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False): - """ - Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with - ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example - via a model hub. - - Args: - - ``checkpoint_dir``: path to the desired checkpoint folder - - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14`` - - ``exclude_frozen_parameters``: exclude frozen parameters - - Returns: - - pytorch ``state_dict`` - - Note: this approach may not work if your application doesn't have sufficient free CPU memory and - you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with - the checkpoint. - - A typical usage might be :: - - from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint - # do the training and checkpoint saving - state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu - model = model.cpu() # move to cpu - model.load_state_dict(state_dict) - # submit to model hub or save the model to share with others - - In this example the ``model`` will no longer be usable in the deepspeed context of the same - application. i.e. you will need to re-initialize the deepspeed engine, since - ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it. - - If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead. - - """ - if tag is None: - latest_path = os.path.join(checkpoint_dir, 'latest') - if os.path.isfile(latest_path): - with open(latest_path, 'r') as fd: - tag = fd.read().strip() - else: - raise ValueError(f"Unable to find 'latest' file at {latest_path}") - - ds_checkpoint_dir = os.path.join(checkpoint_dir, tag) - - if not os.path.isdir(ds_checkpoint_dir): - raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist") - - return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters) - - -def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False): - """ - Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be - loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed. - - Args: - - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``) - - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin) - - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14`` - - ``exclude_frozen_parameters``: exclude frozen parameters - """ - - state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters) - print(f"Saving fp32 state dict to {output_file}") - torch.save(state_dict, output_file) - - -def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None): - """ - 1. Put the provided model to cpu - 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` - 3. Load it into the provided model - - Args: - - ``model``: the model object to update - - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``) - - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14`` - - Returns: - - ``model`: modified model - - Make sure you have plenty of CPU memory available before you call this function. If you don't - have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it - conveniently placed for you in the checkpoint folder. - - A typical usage might be :: - - from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint - model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir) - # submit to model hub or save the model to share with others - - Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context - of the same application. i.e. you will need to re-initialize the deepspeed engine, since - ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it. - - """ - logger.info(f"Extracting fp32 weights") - state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag) - - logger.info(f"Overwriting model with fp32 weights") - model = model.cpu() - model.load_state_dict(state_dict, strict=False) - - return model - - -if __name__ == "__main__": - - parser = argparse.ArgumentParser() - parser.add_argument("checkpoint_dir", - type=str, - help="path to the desired checkpoint folder, e.g., path/checkpoint-12") - parser.add_argument( - "output_file", - type=str, - help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)") - parser.add_argument("-t", - "--tag", - type=str, - default=None, - help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1") - parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters") - parser.add_argument("-d", "--debug", action='store_true', help="enable debug") - args = parser.parse_args() - - debug = args.debug - - convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir, - args.output_file, - tag=args.tag, - exclude_frozen_parameters=args.exclude_frozen_parameters) diff --git a/sft/smoe_cosinegating/added_tokens.json b/sft/smoe_cosinegating/added_tokens.json deleted file mode 100644 index c9d3d3a1b74d87e381e471f7b33784015d2dc0ea..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/added_tokens.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "<|assistant|>": 32001, - "<|endoftext|>": 32000, - "<|end|>": 32007, - "<|placeholder1|>": 32002, - "<|placeholder2|>": 32003, - "<|placeholder3|>": 32004, - "<|placeholder4|>": 32005, - "<|placeholder5|>": 32008, - "<|placeholder6|>": 32009, - "<|system|>": 32006, - "<|user|>": 32010 -} diff --git a/sft/smoe_cosinegating/config.json b/sft/smoe_cosinegating/config.json deleted file mode 100644 index b52002bcbdeac8fed3b82d46824cf97b7f31431c..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/config.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "_name_or_path": "/cm/archive/namnv78/checkpoints/phi3mini-clip/pft", - "architectures": [ - "LlavaPhiForCausalLM" - ], - "attention_bias": false, - "attention_dropout": 0.0, - "auto_map": { - "AutoConfig": "microsoft/Phi-3-mini-4k-instruct--configuration_phi3.Phi3Config", - "AutoModelForCausalLM": "microsoft/Phi-3-mini-4k-instruct--modeling_phi3.Phi3ForCausalLM" - }, - "balance_loss_coef": 0.1, - "bos_token_id": 1, - "clip_smoe": true, - "dropout": false, - "embd_pdrop": 0.0, - "eos_token_id": 32000, - "freeze_mm_mlp_adapter": false, - "hidden_act": "silu", - "hidden_size": 3072, - "image_aspect_ratio": "pad", - "initializer_range": 0.02, - "intermediate_size": 8192, - "local_rank": 0, - "max_position_embeddings": 4096, - "mlp_smoe": true, - "mm_hidden_size": 1024, - "mm_patch_merge_type": "flat", - "mm_projector_lr": null, - "mm_projector_type": "moe", - "mm_use_im_patch_token": false, - "mm_use_im_start_end": false, - "mm_vision_select_feature": "patch", - "mm_vision_select_layer": -2, - "mm_vision_tower": "openai/clip-vit-large-patch14-336", - "model_type": "llava_phi", - "moe_name": "smoe_cosinegating", - "num_attention_heads": 32, - "num_experts": 4, - "num_hidden_layers": 32, - "num_key_value_heads": 32, - "num_layers": 3, - "num_selected": 2, - "original_max_position_embeddings": 4096, - "pad_token_id": 32000, - "resid_pdrop": 0.0, - "rms_norm_eps": 1e-05, - "rope_scaling": null, - "rope_theta": 10000.0, - "router_z_loss_coef": 0.01, - "scales": [ - 1, - 3 - ], - "sliding_window": 2047, - "tie_word_embeddings": false, - "tokenizer_model_max_length": 2048, - "tokenizer_padding_side": "right", - "torch_dtype": "bfloat16", - "training": true, - "transformers_version": "4.43.0", - "tune_mm_mlp_adapter": false, - "use_cache": false, - "use_mm_proj": true, - "vocab_size": 32064 -} diff --git a/sft/smoe_cosinegating/generation_config.json b/sft/smoe_cosinegating/generation_config.json deleted file mode 100644 index f79d092444f37c54d37a669a57923ca3276d762c..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/generation_config.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "_from_model_config": true, - "bos_token_id": 1, - "do_sample": true, - "eos_token_id": [ - 32000, - 32001, - 32007 - ], - "pad_token_id": 32000, - "transformers_version": "4.43.0" -} diff --git a/sft/smoe_cosinegating/latest b/sft/smoe_cosinegating/latest deleted file mode 100644 index 15b842fabe685a86c9c52effdd8958a64045bed5..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/latest +++ /dev/null @@ -1 +0,0 @@ -global_step8316 \ No newline at end of file diff --git a/sft/smoe_cosinegating/model-00001-of-00002.safetensors b/sft/smoe_cosinegating/model-00001-of-00002.safetensors deleted file mode 100644 index db5c40f8033618d4e508638ada9f876a118348e7..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/model-00001-of-00002.safetensors +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4510d7f2a2b9ebe679c89addd6a84d00cd98eb42593c1166ef3b585f1d73dbcf -size 4972489328 diff --git a/sft/smoe_cosinegating/model-00002-of-00002.safetensors b/sft/smoe_cosinegating/model-00002-of-00002.safetensors deleted file mode 100644 index 838d8b67e5f0806bb93772834625f4defcd2ecf1..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/model-00002-of-00002.safetensors +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f7e51e8a60bee1d2552155ac57db6c6fc4359dbb9507408736f9fac287e1e92e -size 4611602520 diff --git a/sft/smoe_cosinegating/model.safetensors.index.json b/sft/smoe_cosinegating/model.safetensors.index.json deleted file mode 100644 index 6620b4c4a8f524334bc853c81e8131e24c2a88e2..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/model.safetensors.index.json +++ /dev/null @@ -1,945 +0,0 @@ -{ - "metadata": { - "total_size": 9583958216 - }, - "weight_map": { - "lm_head.weight": "model-00002-of-00002.safetensors", - "model.embed_tokens.weight": "model-00001-of-00002.safetensors", - "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.0.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.0.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.1.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.1.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.10.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.10.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.11.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.11.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.12.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.12.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.13.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.13.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.14.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.14.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.15.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.15.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.16.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.16.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.17.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.17.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.18.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.18.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.19.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.19.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.2.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.2.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.20.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.20.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.21.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.21.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.22.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.22.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.23.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.23.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.24.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.24.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.25.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.25.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.26.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.26.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.27.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.27.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.28.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.28.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.29.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.29.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.3.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.3.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.30.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.30.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.31.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.31.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.4.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.4.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.5.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.5.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.6.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.6.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.7.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.7.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.8.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.8.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.9.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.9.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.mm_projector.moelayer.experts.0.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.0.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.0.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.0.2.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.1.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.1.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.1.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.1.2.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.2.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.2.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.2.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.2.2.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.3.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.3.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.3.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.3.2.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.norm.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.embeddings.class_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.embeddings.position_embedding.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.pre_layrnorm.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.pre_layrnorm.weight": "model-00002-of-00002.safetensors" - } -} diff --git a/sft/smoe_cosinegating/rng_state_0.pth b/sft/smoe_cosinegating/rng_state_0.pth deleted file mode 100644 index 71ea030e2b6ccf2942e534710e59240994fbf63d..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/rng_state_0.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:98d7182be6bef60e0a2c9cafee3f25cd25e5efbb81e449bb83476786049e3afd -size 15024 diff --git a/sft/smoe_cosinegating/rng_state_1.pth b/sft/smoe_cosinegating/rng_state_1.pth deleted file mode 100644 index 9d2963e51043b85c2837399b5ae8212b62ea2cf9..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/rng_state_1.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ca439560b72fdfdeca80538c1d7fd13a79cb40a4113abdd40bed2ee18c276f6e -size 15024 diff --git a/sft/smoe_cosinegating/rng_state_2.pth b/sft/smoe_cosinegating/rng_state_2.pth deleted file mode 100644 index 51d113c2fd99f3ab9ae0a827bc55e4424d99d271..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/rng_state_2.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:37d8a08a60f1e45bbe669ccc291b732178afde092185a2275107087813030b6c -size 15024 diff --git a/sft/smoe_cosinegating/rng_state_3.pth b/sft/smoe_cosinegating/rng_state_3.pth deleted file mode 100644 index 2e37d90e8d2dbd6c0377326df7ded780972f9ced..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/rng_state_3.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5990ef8a1c2a5a5daffd1a6f0a3bfedabc1eacf2b1a98ac77694877c0faf73e4 -size 15024 diff --git a/sft/smoe_cosinegating/special_tokens_map.json b/sft/smoe_cosinegating/special_tokens_map.json deleted file mode 100644 index 3e4d5a5bc1cb51753cc9ae0305ece0da60052b10..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/special_tokens_map.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "bos_token": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - }, - "eos_token": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - }, - "pad_token": "", - "unk_token": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - } -} diff --git a/sft/smoe_cosinegating/tokenizer.model b/sft/smoe_cosinegating/tokenizer.model deleted file mode 100644 index 6c00c742ce03c627d6cd5b795984876fa49fa899..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/tokenizer.model +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347 -size 499723 diff --git a/sft/smoe_cosinegating/tokenizer_config.json b/sft/smoe_cosinegating/tokenizer_config.json deleted file mode 100644 index 3bd56c6314b14d6a33a69cd1802e04dbc1e47840..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/tokenizer_config.json +++ /dev/null @@ -1,132 +0,0 @@ -{ - "add_bos_token": true, - "add_eos_token": false, - "add_prefix_space": true, - "added_tokens_decoder": { - "0": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "1": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "2": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": false - }, - "32000": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32001": { - "content": "<|assistant|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32002": { - "content": "<|placeholder1|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32003": { - "content": "<|placeholder2|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32004": { - "content": "<|placeholder3|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32005": { - "content": "<|placeholder4|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32006": { - "content": "<|system|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32007": { - "content": "<|end|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32008": { - "content": "<|placeholder5|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32009": { - "content": "<|placeholder6|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32010": { - "content": "<|user|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - } - }, - "bos_token": "", - "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') %}{{'<|user|>' + '\n' + message['content'] + '<|end|>' + '\n' + '<|assistant|>' + '\n'}}{% elif (message['role'] == 'assistant') %}{{message['content'] + '<|end|>' + '\n'}}{% endif %}{% endfor %}", - "clean_up_tokenization_spaces": false, - "eos_token": "<|endoftext|>", - "legacy": false, - "model_max_length": 2048, - "pad_token": "", - "padding_side": "right", - "sp_model_kwargs": {}, - "spaces_between_special_tokens": false, - "tokenizer_class": "LlamaTokenizer", - "unk_token": "", - "use_default_system_prompt": false -} diff --git a/sft/smoe_cosinegating/trainer_state.json b/sft/smoe_cosinegating/trainer_state.json deleted file mode 100644 index 9855152cd3ae8137fcab929751d599ca547ce568..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/trainer_state.json +++ /dev/null @@ -1,74877 +0,0 @@ -{ - "best_metric": null, - "best_model_checkpoint": null, - "epoch": 0.9999398785546805, - "eval_steps": 500, - "global_step": 8316, - "is_hyper_param_search": false, - "is_local_process_zero": true, - "is_world_process_zero": true, - "log_history": [ - { - "epoch": 0.00012024289063909097, - "flos": 17735413654320.0, - "grad_norm": 13.741682276457617, - "learning_rate": 0.0, - "loss": 1.6652, - "num_input_tokens_seen": 20095, - "step": 1 - }, - { - "epoch": 0.00024048578127818193, - "flos": 21856563454560.0, - "grad_norm": 16.876366227969424, - "learning_rate": 5.021476677069823e-07, - "loss": 1.6218, - "num_input_tokens_seen": 38475, - "step": 2 - }, - { - "epoch": 0.0003607286719172729, - "flos": 13492343712000.0, - "grad_norm": 12.480206496657768, - "learning_rate": 7.958852231401551e-07, - "loss": 1.478, - "num_input_tokens_seen": 56760, - "step": 3 - }, - { - "epoch": 0.00048097156255636386, - "flos": 13700580089040.0, - "grad_norm": 11.187655144153833, - "learning_rate": 1.0042953354139647e-06, - "loss": 1.5591, - "num_input_tokens_seen": 75345, - "step": 4 - }, - { - "epoch": 0.0006012144531954548, - "flos": 9873820612560.0, - "grad_norm": 14.574125569254521, - "learning_rate": 1.1659507774310057e-06, - "loss": 1.6262, - "num_input_tokens_seen": 92950, - "step": 5 - }, - { - "epoch": 0.0007214573438345458, - "flos": 16455275941320.0, - "grad_norm": 15.607283412952638, - "learning_rate": 1.2980328908471373e-06, - "loss": 1.4983, - "num_input_tokens_seen": 112915, - "step": 6 - }, - { - "epoch": 0.0008417002344736367, - "flos": 48451980476280.0, - "grad_norm": 3.076232483281901, - "learning_rate": 1.4097067265369432e-06, - "loss": 1.0102, - "num_input_tokens_seen": 177630, - "step": 7 - }, - { - "epoch": 0.0009619431251127277, - "flos": 15112139307480.0, - "grad_norm": 39.768924838528136, - "learning_rate": 1.506443003120947e-06, - "loss": 1.4469, - "num_input_tokens_seen": 192850, - "step": 8 - }, - { - "epoch": 0.0010821860157518186, - "flos": 16586320527600.0, - "grad_norm": 10.7408766955934, - "learning_rate": 1.5917704462803102e-06, - "loss": 1.5508, - "num_input_tokens_seen": 209115, - "step": 9 - }, - { - "epoch": 0.0012024289063909096, - "flos": 12042176041440.0, - "grad_norm": 12.563059931939973, - "learning_rate": 1.6680984451379884e-06, - "loss": 1.5953, - "num_input_tokens_seen": 221905, - "step": 10 - }, - { - "epoch": 0.0013226717970300007, - "flos": 15117903680760.0, - "grad_norm": 9.828295780511429, - "learning_rate": 1.7371455188905097e-06, - "loss": 1.3788, - "num_input_tokens_seen": 241555, - "step": 11 - }, - { - "epoch": 0.0014429146876690916, - "flos": 19394225282640.0, - "grad_norm": 7.405118316591224, - "learning_rate": 1.8001805585541196e-06, - "loss": 1.357, - "num_input_tokens_seen": 262405, - "step": 12 - }, - { - "epoch": 0.0015631575783081825, - "flos": 13518104970480.0, - "grad_norm": 8.27765528554839, - "learning_rate": 1.8581671739548328e-06, - "loss": 1.4365, - "num_input_tokens_seen": 279860, - "step": 13 - }, - { - "epoch": 0.0016834004689472734, - "flos": 34415160466440.0, - "grad_norm": 5.7479688954742825, - "learning_rate": 1.9118543942439254e-06, - "loss": 1.2716, - "num_input_tokens_seen": 301765, - "step": 14 - }, - { - "epoch": 0.0018036433595863645, - "flos": 24950785562640.0, - "grad_norm": 8.310536547070551, - "learning_rate": 1.961836000571161e-06, - "loss": 1.2812, - "num_input_tokens_seen": 323140, - "step": 15 - }, - { - "epoch": 0.0019238862502254555, - "flos": 42776124535560.0, - "grad_norm": 2.5765227916796936, - "learning_rate": 2.0085906708279293e-06, - "loss": 0.8637, - "num_input_tokens_seen": 378230, - "step": 16 - }, - { - "epoch": 0.0020441291408645466, - "flos": 14777641547520.0, - "grad_norm": 5.366696175598776, - "learning_rate": 2.0525099325728135e-06, - "loss": 1.331, - "num_input_tokens_seen": 396130, - "step": 17 - }, - { - "epoch": 0.0021643720315036373, - "flos": 45714920569080.0, - "grad_norm": 2.2591624728503596, - "learning_rate": 2.0939181139872922e-06, - "loss": 0.9481, - "num_input_tokens_seen": 457565, - "step": 18 - }, - { - "epoch": 0.0022846149221427284, - "flos": 22302415531200.0, - "grad_norm": 5.163754267878952, - "learning_rate": 2.1330868934640175e-06, - "loss": 1.1772, - "num_input_tokens_seen": 477960, - "step": 19 - }, - { - "epoch": 0.002404857812781819, - "flos": 36534502078200.0, - "grad_norm": 2.0865012560762253, - "learning_rate": 2.170246112844971e-06, - "loss": 0.9961, - "num_input_tokens_seen": 532020, - "step": 20 - }, - { - "epoch": 0.0025251007034209102, - "flos": 11184726398760.0, - "grad_norm": 5.8499872712869, - "learning_rate": 2.2055919496770983e-06, - "loss": 1.2043, - "num_input_tokens_seen": 549880, - "step": 21 - }, - { - "epoch": 0.0026453435940600014, - "flos": 27021829669320.0, - "grad_norm": 4.833855743238057, - "learning_rate": 2.2392931865974923e-06, - "loss": 1.0988, - "num_input_tokens_seen": 572290, - "step": 22 - }, - { - "epoch": 0.002765586484699092, - "flos": 15012834983880.0, - "grad_norm": 4.559393256126705, - "learning_rate": 2.271496085962064e-06, - "loss": 1.2202, - "num_input_tokens_seen": 589705, - "step": 23 - }, - { - "epoch": 0.002885829375338183, - "flos": 14671714326960.0, - "grad_norm": 5.027854768972634, - "learning_rate": 2.3023282262611022e-06, - "loss": 1.2324, - "num_input_tokens_seen": 608200, - "step": 24 - }, - { - "epoch": 0.003006072265977274, - "flos": 24845716865760.0, - "grad_norm": 4.804228374595503, - "learning_rate": 2.3319015548620114e-06, - "loss": 1.1139, - "num_input_tokens_seen": 629060, - "step": 25 - }, - { - "epoch": 0.003126315156616365, - "flos": 17369644535520.0, - "grad_norm": 2.8953492196765898, - "learning_rate": 2.3603148416618152e-06, - "loss": 1.1352, - "num_input_tokens_seen": 648295, - "step": 26 - }, - { - "epoch": 0.003246558047255456, - "flos": 16795323443640.0, - "grad_norm": 4.0735508781899545, - "learning_rate": 2.3876556694204647e-06, - "loss": 1.2132, - "num_input_tokens_seen": 668170, - "step": 27 - }, - { - "epoch": 0.003366800937894547, - "flos": 12625388985720.0, - "grad_norm": 9.565612286494124, - "learning_rate": 2.414002061950908e-06, - "loss": 1.1023, - "num_input_tokens_seen": 686765, - "step": 28 - }, - { - "epoch": 0.003487043828533638, - "flos": 17373139953360.0, - "grad_norm": 7.2308546691816575, - "learning_rate": 2.4394238264681557e-06, - "loss": 1.2163, - "num_input_tokens_seen": 706220, - "step": 29 - }, - { - "epoch": 0.003607286719172729, - "flos": 18605014255200.0, - "grad_norm": 4.533220796497224, - "learning_rate": 2.4639836682781433e-06, - "loss": 1.2044, - "num_input_tokens_seen": 726070, - "step": 30 - }, - { - "epoch": 0.00372752960981182, - "flos": 14618137485480.0, - "grad_norm": 4.692547331435632, - "learning_rate": 2.487738122623307e-06, - "loss": 1.1912, - "num_input_tokens_seen": 744700, - "step": 31 - }, - { - "epoch": 0.003847772500450911, - "flos": 16114339253760.0, - "grad_norm": 4.109189569619219, - "learning_rate": 2.510738338534912e-06, - "loss": 1.1814, - "num_input_tokens_seen": 763145, - "step": 32 - }, - { - "epoch": 0.003968015391090002, - "flos": 12731622821880.0, - "grad_norm": 3.226463039241471, - "learning_rate": 2.5330307420306648e-06, - "loss": 1.2389, - "num_input_tokens_seen": 779715, - "step": 33 - }, - { - "epoch": 0.004088258281729093, - "flos": 19440903273120.0, - "grad_norm": 23.411124482000407, - "learning_rate": 2.554657600279796e-06, - "loss": 1.0803, - "num_input_tokens_seen": 800710, - "step": 34 - }, - { - "epoch": 0.004208501172368184, - "flos": 16534062133200.0, - "grad_norm": 3.381896143748298, - "learning_rate": 2.5756575039679493e-06, - "loss": 1.257, - "num_input_tokens_seen": 819980, - "step": 35 - }, - { - "epoch": 0.0043287440630072746, - "flos": 12260530733280.0, - "grad_norm": 3.3389423068472732, - "learning_rate": 2.5960657816942747e-06, - "loss": 1.166, - "num_input_tokens_seen": 838615, - "step": 36 - }, - { - "epoch": 0.004448986953646365, - "flos": 37978506775200.0, - "grad_norm": 1.073024849236854, - "learning_rate": 2.6159148575788668e-06, - "loss": 0.8707, - "num_input_tokens_seen": 896370, - "step": 37 - }, - { - "epoch": 0.004569229844285457, - "flos": 9480656192160.0, - "grad_norm": 4.873051571891524, - "learning_rate": 2.635234561171e-06, - "loss": 1.2, - "num_input_tokens_seen": 914485, - "step": 38 - }, - { - "epoch": 0.0046894727349245475, - "flos": 11467119072120.0, - "grad_norm": 4.147394138891377, - "learning_rate": 2.6540523970949877e-06, - "loss": 1.1625, - "num_input_tokens_seen": 929050, - "step": 39 - }, - { - "epoch": 0.004809715625563638, - "flos": 17005123560240.0, - "grad_norm": 5.218202482272616, - "learning_rate": 2.6723937805519533e-06, - "loss": 1.1499, - "num_input_tokens_seen": 946270, - "step": 40 - }, - { - "epoch": 0.00492995851620273, - "flos": 14747127747240.0, - "grad_norm": 7.243510268476292, - "learning_rate": 2.690282243737839e-06, - "loss": 1.1509, - "num_input_tokens_seen": 964925, - "step": 41 - }, - { - "epoch": 0.0050502014068418205, - "flos": 14435417074440.0, - "grad_norm": 8.618780132459563, - "learning_rate": 2.7077396173840807e-06, - "loss": 1.2077, - "num_input_tokens_seen": 982930, - "step": 42 - }, - { - "epoch": 0.005170444297480911, - "flos": 18500558789520.0, - "grad_norm": 6.085251736476402, - "learning_rate": 2.7247861909342594e-06, - "loss": 1.1352, - "num_input_tokens_seen": 1003575, - "step": 43 - }, - { - "epoch": 0.005290687188120003, - "flos": 14877037855800.0, - "grad_norm": 5.499660654870042, - "learning_rate": 2.7414408543044743e-06, - "loss": 1.0446, - "num_input_tokens_seen": 1018935, - "step": 44 - }, - { - "epoch": 0.005410930078759093, - "flos": 11210579641920.0, - "grad_norm": 8.097652481306406, - "learning_rate": 2.7577212237113157e-06, - "loss": 1.0142, - "num_input_tokens_seen": 1035695, - "step": 45 - }, - { - "epoch": 0.005531172969398184, - "flos": 14986184540160.0, - "grad_norm": 4.858900040846034, - "learning_rate": 2.7736437536690466e-06, - "loss": 1.2599, - "num_input_tokens_seen": 1055045, - "step": 46 - }, - { - "epoch": 0.005651415860037276, - "flos": 14798619602640.0, - "grad_norm": 3.2661098868577256, - "learning_rate": 2.789223836941131e-06, - "loss": 1.2903, - "num_input_tokens_seen": 1074900, - "step": 47 - }, - { - "epoch": 0.005771658750676366, - "flos": 9348231835680.0, - "grad_norm": 4.8102619862733995, - "learning_rate": 2.8044758939680847e-06, - "loss": 1.2779, - "num_input_tokens_seen": 1090690, - "step": 48 - }, - { - "epoch": 0.005891901641315457, - "flos": 17372526722160.0, - "grad_norm": 5.4512841907768195, - "learning_rate": 2.8194134530738863e-06, - "loss": 1.2253, - "num_input_tokens_seen": 1109180, - "step": 49 - }, - { - "epoch": 0.006012144531954548, - "flos": 16402986885360.0, - "grad_norm": 7.85604854741217, - "learning_rate": 2.834049222568994e-06, - "loss": 1.1187, - "num_input_tokens_seen": 1126250, - "step": 50 - }, - { - "epoch": 0.006132387422593639, - "flos": 16009546510920.0, - "grad_norm": 3.8656584828436986, - "learning_rate": 2.848395155712969e-06, - "loss": 1.1476, - "num_input_tokens_seen": 1146190, - "step": 51 - }, - { - "epoch": 0.00625263031323273, - "flos": 19673367830640.0, - "grad_norm": 4.912903568687231, - "learning_rate": 2.8624625093687977e-06, - "loss": 1.2086, - "num_input_tokens_seen": 1163045, - "step": 52 - }, - { - "epoch": 0.006372873203871821, - "flos": 16427491019880.0, - "grad_norm": 9.316522126506833, - "learning_rate": 2.876261897070029e-06, - "loss": 1.1042, - "num_input_tokens_seen": 1182895, - "step": 53 - }, - { - "epoch": 0.006493116094510912, - "flos": 16244586639480.0, - "grad_norm": 4.92872322433207, - "learning_rate": 2.889803337127447e-06, - "loss": 1.1498, - "num_input_tokens_seen": 1201215, - "step": 54 - }, - { - "epoch": 0.006613358985150003, - "flos": 16399154190360.0, - "grad_norm": 5.203594198063969, - "learning_rate": 2.903096296321516e-06, - "loss": 1.0729, - "num_input_tokens_seen": 1219080, - "step": 55 - }, - { - "epoch": 0.006733601875789094, - "flos": 18890043822720.0, - "grad_norm": 4.702455593388213, - "learning_rate": 2.9161497296578907e-06, - "loss": 1.1493, - "num_input_tokens_seen": 1238190, - "step": 56 - }, - { - "epoch": 0.006853844766428185, - "flos": 10974711651240.0, - "grad_norm": 5.115956585017621, - "learning_rate": 2.928972116604173e-06, - "loss": 1.0878, - "num_input_tokens_seen": 1254185, - "step": 57 - }, - { - "epoch": 0.006974087657067276, - "flos": 17242095367080.0, - "grad_norm": 3.7401047168698174, - "learning_rate": 2.9415714941751377e-06, - "loss": 1.2278, - "num_input_tokens_seen": 1275125, - "step": 58 - }, - { - "epoch": 0.007094330547706367, - "flos": 18340288188480.0, - "grad_norm": 6.116271046552186, - "learning_rate": 2.9539554871897396e-06, - "loss": 1.157, - "num_input_tokens_seen": 1295015, - "step": 59 - }, - { - "epoch": 0.007214573438345458, - "flos": 15139341659280.0, - "grad_norm": 2.9773954092212143, - "learning_rate": 2.9661313359851253e-06, - "loss": 1.1953, - "num_input_tokens_seen": 1312620, - "step": 60 - }, - { - "epoch": 0.007334816328984549, - "flos": 17740389807480.0, - "grad_norm": 2.9745189630465356, - "learning_rate": 2.978105921839922e-06, - "loss": 1.1667, - "num_input_tokens_seen": 1332885, - "step": 61 - }, - { - "epoch": 0.00745505921962364, - "flos": 13121107855080.0, - "grad_norm": 7.477834372127578, - "learning_rate": 2.9898857903302893e-06, - "loss": 0.9593, - "num_input_tokens_seen": 1351555, - "step": 62 - }, - { - "epoch": 0.007575302110262731, - "flos": 13097922167640.0, - "grad_norm": 8.170417962093063, - "learning_rate": 3.001477172817253e-06, - "loss": 1.1033, - "num_input_tokens_seen": 1369165, - "step": 63 - }, - { - "epoch": 0.007695545000901822, - "flos": 17766273712200.0, - "grad_norm": 4.017170293319334, - "learning_rate": 3.012886006241894e-06, - "loss": 1.1932, - "num_input_tokens_seen": 1388270, - "step": 64 - }, - { - "epoch": 0.007815787891540913, - "flos": 15143787585480.0, - "grad_norm": 3.6763761162681114, - "learning_rate": 3.0241179513858383e-06, - "loss": 1.1198, - "num_input_tokens_seen": 1407300, - "step": 65 - }, - { - "epoch": 0.007936030782180003, - "flos": 15324055071720.0, - "grad_norm": 3.535468898035944, - "learning_rate": 3.035178409737647e-06, - "loss": 1.1112, - "num_input_tokens_seen": 1424470, - "step": 66 - }, - { - "epoch": 0.008056273672819095, - "flos": 14278979168400.0, - "grad_norm": 3.73855331531041, - "learning_rate": 3.046072539090907e-06, - "loss": 1.1087, - "num_input_tokens_seen": 1442155, - "step": 67 - }, - { - "epoch": 0.008176516563458186, - "flos": 12994233240960.0, - "grad_norm": 4.146297384134508, - "learning_rate": 3.056805267986779e-06, - "loss": 1.2756, - "num_input_tokens_seen": 1459345, - "step": 68 - }, - { - "epoch": 0.008296759454097276, - "flos": 15537595898640.0, - "grad_norm": 5.554581739059966, - "learning_rate": 3.0673813091022194e-06, - "loss": 1.178, - "num_input_tokens_seen": 1478285, - "step": 69 - }, - { - "epoch": 0.008417002344736368, - "flos": 45395845573800.0, - "grad_norm": 1.092165098383516, - "learning_rate": 3.0778051716749317e-06, - "loss": 0.8933, - "num_input_tokens_seen": 1541550, - "step": 70 - }, - { - "epoch": 0.008537245235375458, - "flos": 16324078047240.0, - "grad_norm": 3.2573263779903088, - "learning_rate": 3.0880811730470094e-06, - "loss": 1.1355, - "num_input_tokens_seen": 1560725, - "step": 71 - }, - { - "epoch": 0.008657488126014549, - "flos": 44372146325880.0, - "grad_norm": 0.9727499123943116, - "learning_rate": 3.098213449401257e-06, - "loss": 0.8463, - "num_input_tokens_seen": 1627375, - "step": 72 - }, - { - "epoch": 0.00877773101665364, - "flos": 21591806726280.0, - "grad_norm": 5.141993192735733, - "learning_rate": 3.1082059657570015e-06, - "loss": 1.207, - "num_input_tokens_seen": 1646330, - "step": 73 - }, - { - "epoch": 0.00889797390729273, - "flos": 16717671729480.0, - "grad_norm": 3.517209115760571, - "learning_rate": 3.1180625252858496e-06, - "loss": 1.1789, - "num_input_tokens_seen": 1664480, - "step": 74 - }, - { - "epoch": 0.009018216797931822, - "flos": 16402680269760.0, - "grad_norm": 3.5163180198016555, - "learning_rate": 3.1277867780021663e-06, - "loss": 1.0238, - "num_input_tokens_seen": 1680835, - "step": 75 - }, - { - "epoch": 0.009138459688570914, - "flos": 11258330787000.0, - "grad_norm": 3.456659915614609, - "learning_rate": 3.1373822288779824e-06, - "loss": 1.182, - "num_input_tokens_seen": 1697135, - "step": 76 - }, - { - "epoch": 0.009258702579210003, - "flos": 19234813205280.0, - "grad_norm": 2.6475897138198374, - "learning_rate": 3.1468522454274533e-06, - "loss": 1.0114, - "num_input_tokens_seen": 1717210, - "step": 77 - }, - { - "epoch": 0.009378945469849095, - "flos": 19152838211160.0, - "grad_norm": 5.455569181319497, - "learning_rate": 3.15620006480197e-06, - "loss": 1.1379, - "num_input_tokens_seen": 1736200, - "step": 78 - }, - { - "epoch": 0.009499188360488187, - "flos": 25470517981560.0, - "grad_norm": 7.233922297441404, - "learning_rate": 3.1654288004333087e-06, - "loss": 0.9778, - "num_input_tokens_seen": 1754585, - "step": 79 - }, - { - "epoch": 0.009619431251127276, - "flos": 15271949985120.0, - "grad_norm": 3.161164791215295, - "learning_rate": 3.1745414482589353e-06, - "loss": 0.9761, - "num_input_tokens_seen": 1773515, - "step": 80 - }, - { - "epoch": 0.009739674141766368, - "flos": 12338765017080.0, - "grad_norm": 3.498707532662438, - "learning_rate": 3.1835408925606204e-06, - "loss": 1.0795, - "num_input_tokens_seen": 1791375, - "step": 81 - }, - { - "epoch": 0.00985991703240546, - "flos": 19601725782240.0, - "grad_norm": 3.026144545749191, - "learning_rate": 3.1924299114448214e-06, - "loss": 1.1118, - "num_input_tokens_seen": 1811575, - "step": 82 - }, - { - "epoch": 0.00998015992304455, - "flos": 9742070810400.0, - "grad_norm": 5.760905994052885, - "learning_rate": 3.2012111819909055e-06, - "loss": 1.0639, - "num_input_tokens_seen": 1828625, - "step": 83 - }, - { - "epoch": 0.010100402813683641, - "flos": 14329397869200.0, - "grad_norm": 3.5894342132843975, - "learning_rate": 3.2098872850910627e-06, - "loss": 1.1761, - "num_input_tokens_seen": 1845020, - "step": 84 - }, - { - "epoch": 0.010220645704322733, - "flos": 12181008663960.0, - "grad_norm": 10.104198799213297, - "learning_rate": 3.2184607100038194e-06, - "loss": 1.119, - "num_input_tokens_seen": 1863380, - "step": 85 - }, - { - "epoch": 0.010340888594961822, - "flos": 15248886943920.0, - "grad_norm": 3.3034642618243035, - "learning_rate": 3.2269338586412414e-06, - "loss": 1.1496, - "num_input_tokens_seen": 1880685, - "step": 86 - }, - { - "epoch": 0.010461131485600914, - "flos": 16350207244440.0, - "grad_norm": 4.283025443048498, - "learning_rate": 3.2353090496083106e-06, - "loss": 1.1953, - "num_input_tokens_seen": 1898240, - "step": 87 - }, - { - "epoch": 0.010581374376240005, - "flos": 23927944838400.0, - "grad_norm": 3.515243306102528, - "learning_rate": 3.2435885220114572e-06, - "loss": 1.0471, - "num_input_tokens_seen": 1919310, - "step": 88 - }, - { - "epoch": 0.010701617266879095, - "flos": 15459330953280.0, - "grad_norm": 3.4984033212359416, - "learning_rate": 3.2517744390519113e-06, - "loss": 1.1618, - "num_input_tokens_seen": 1937895, - "step": 89 - }, - { - "epoch": 0.010821860157518187, - "flos": 13516418584680.0, - "grad_norm": 3.912029172215194, - "learning_rate": 3.259868891418298e-06, - "loss": 0.9764, - "num_input_tokens_seen": 1955380, - "step": 90 - }, - { - "epoch": 0.010942103048157278, - "flos": 18107057091960.0, - "grad_norm": 3.109120574203272, - "learning_rate": 3.2678739004917757e-06, - "loss": 1.0598, - "num_input_tokens_seen": 1974835, - "step": 91 - }, - { - "epoch": 0.011062345938796368, - "flos": 19578325463880.0, - "grad_norm": 2.670136360086788, - "learning_rate": 3.275791421376029e-06, - "loss": 1.1467, - "num_input_tokens_seen": 1998000, - "step": 92 - }, - { - "epoch": 0.01118258882943546, - "flos": 11368673272200.0, - "grad_norm": 3.309197416935183, - "learning_rate": 3.2836233457634622e-06, - "loss": 1.1776, - "num_input_tokens_seen": 2015855, - "step": 93 - }, - { - "epoch": 0.011302831720074551, - "flos": 14672756820000.0, - "grad_norm": 3.2299337809007644, - "learning_rate": 3.2913715046481135e-06, - "loss": 1.0686, - "num_input_tokens_seen": 2035320, - "step": 94 - }, - { - "epoch": 0.011423074610713641, - "flos": 9212404046040.0, - "grad_norm": 8.294579484707166, - "learning_rate": 3.299037670895023e-06, - "loss": 1.1058, - "num_input_tokens_seen": 2051655, - "step": 95 - }, - { - "epoch": 0.011543317501352733, - "flos": 21620051571120.0, - "grad_norm": 3.1028309109368553, - "learning_rate": 3.3066235616750667e-06, - "loss": 1.0366, - "num_input_tokens_seen": 2072610, - "step": 96 - }, - { - "epoch": 0.011663560391991824, - "flos": 10974527681880.0, - "grad_norm": 4.429009064352234, - "learning_rate": 3.3141308407736276e-06, - "loss": 1.1375, - "num_input_tokens_seen": 2088965, - "step": 97 - }, - { - "epoch": 0.011783803282630914, - "flos": 14121866708040.0, - "grad_norm": 3.8289515011800126, - "learning_rate": 3.321561120780869e-06, - "loss": 1.0851, - "num_input_tokens_seen": 2107395, - "step": 98 - }, - { - "epoch": 0.011904046173270006, - "flos": 15873596075040.0, - "grad_norm": 5.302071961676114, - "learning_rate": 3.3289159651708192e-06, - "loss": 1.2375, - "num_input_tokens_seen": 2124690, - "step": 99 - }, - { - "epoch": 0.012024289063909096, - "flos": 13545521953200.0, - "grad_norm": 3.012381593268747, - "learning_rate": 3.3361968902759768e-06, - "loss": 1.2004, - "num_input_tokens_seen": 2144090, - "step": 100 - }, - { - "epoch": 0.012144531954548187, - "flos": 10607461797120.0, - "grad_norm": 4.06174191957892, - "learning_rate": 3.343405367163663e-06, - "loss": 1.1722, - "num_input_tokens_seen": 2160740, - "step": 101 - }, - { - "epoch": 0.012264774845187279, - "flos": 10686431958360.0, - "grad_norm": 4.605503124209082, - "learning_rate": 3.350542823419951e-06, - "loss": 1.04, - "num_input_tokens_seen": 2177060, - "step": 102 - }, - { - "epoch": 0.012385017735826368, - "flos": 9842877550440.0, - "grad_norm": 5.670719417835828, - "learning_rate": 3.3576106448465615e-06, - "loss": 1.0951, - "num_input_tokens_seen": 2190160, - "step": 103 - }, - { - "epoch": 0.01250526062646546, - "flos": 16796120644200.0, - "grad_norm": 6.17891922719193, - "learning_rate": 3.3646101770757797e-06, - "loss": 1.1012, - "num_input_tokens_seen": 2208670, - "step": 104 - }, - { - "epoch": 0.012625503517104552, - "flos": 24713445817080.0, - "grad_norm": 2.3344796521118507, - "learning_rate": 3.371542727108104e-06, - "loss": 1.0738, - "num_input_tokens_seen": 2230565, - "step": 105 - }, - { - "epoch": 0.012745746407743641, - "flos": 12626768755920.0, - "grad_norm": 6.575664866667149, - "learning_rate": 3.3784095647770114e-06, - "loss": 1.1197, - "num_input_tokens_seen": 2248930, - "step": 106 - }, - { - "epoch": 0.012865989298382733, - "flos": 14619823871280.0, - "grad_norm": 10.490374558328384, - "learning_rate": 3.3852119241449547e-06, - "loss": 1.1037, - "num_input_tokens_seen": 2267770, - "step": 107 - }, - { - "epoch": 0.012986232189021825, - "flos": 17029842325680.0, - "grad_norm": 4.13922302695152, - "learning_rate": 3.3919510048344295e-06, - "loss": 1.1836, - "num_input_tokens_seen": 2285500, - "step": 108 - }, - { - "epoch": 0.013106475079660914, - "flos": 16868559893160.0, - "grad_norm": 3.7098842049596352, - "learning_rate": 3.3986279732976907e-06, - "loss": 1.0795, - "num_input_tokens_seen": 2303695, - "step": 109 - }, - { - "epoch": 0.013226717970300006, - "flos": 14984007569400.0, - "grad_norm": 4.139760283380841, - "learning_rate": 3.4052439640284983e-06, - "loss": 1.1726, - "num_input_tokens_seen": 2322330, - "step": 110 - }, - { - "epoch": 0.013346960860939098, - "flos": 17687763474360.0, - "grad_norm": 3.6089162824036856, - "learning_rate": 3.4118000807190217e-06, - "loss": 1.0416, - "num_input_tokens_seen": 2342930, - "step": 111 - }, - { - "epoch": 0.013467203751578187, - "flos": 20072081993400.0, - "grad_norm": 2.9402825994326407, - "learning_rate": 3.4182973973648723e-06, - "loss": 0.9926, - "num_input_tokens_seen": 2363860, - "step": 112 - }, - { - "epoch": 0.013587446642217279, - "flos": 13413220242960.0, - "grad_norm": 4.602634894338356, - "learning_rate": 3.424736959321014e-06, - "loss": 1.1555, - "num_input_tokens_seen": 2381385, - "step": 113 - }, - { - "epoch": 0.01370768953285637, - "flos": 17058761724840.0, - "grad_norm": 3.691015733518426, - "learning_rate": 3.431119784311155e-06, - "loss": 1.1063, - "num_input_tokens_seen": 2400780, - "step": 114 - }, - { - "epoch": 0.01382793242349546, - "flos": 27992657291640.0, - "grad_norm": 3.82697711784035, - "learning_rate": 3.43744686339307e-06, - "loss": 1.0016, - "num_input_tokens_seen": 2422820, - "step": 115 - }, - { - "epoch": 0.013948175314134552, - "flos": 29538265929240.0, - "grad_norm": 5.304543314843676, - "learning_rate": 3.44371916188212e-06, - "loss": 1.1264, - "num_input_tokens_seen": 2443295, - "step": 116 - }, - { - "epoch": 0.014068418204773643, - "flos": 15955172468880.0, - "grad_norm": 5.698800064405863, - "learning_rate": 3.449937620235143e-06, - "loss": 1.0923, - "num_input_tokens_seen": 2463610, - "step": 117 - }, - { - "epoch": 0.014188661095412733, - "flos": 16926766630200.0, - "grad_norm": 14.593874945095491, - "learning_rate": 3.456103154896722e-06, - "loss": 1.1038, - "num_input_tokens_seen": 2484605, - "step": 118 - }, - { - "epoch": 0.014308903986051825, - "flos": 16822771087920.0, - "grad_norm": 5.630395632677046, - "learning_rate": 3.462216659109757e-06, - "loss": 1.148, - "num_input_tokens_seen": 2504505, - "step": 119 - }, - { - "epoch": 0.014429146876690916, - "flos": 14672235573480.0, - "grad_norm": 4.485271510450726, - "learning_rate": 3.4682790036921077e-06, - "loss": 1.07, - "num_input_tokens_seen": 2522885, - "step": 120 - }, - { - "epoch": 0.014549389767330006, - "flos": 14356753528800.0, - "grad_norm": 3.064331596887183, - "learning_rate": 3.4742910377810193e-06, - "loss": 1.0594, - "num_input_tokens_seen": 2540065, - "step": 121 - }, - { - "epoch": 0.014669632657969098, - "flos": 12757752019080.0, - "grad_norm": 3.755512128261606, - "learning_rate": 3.4802535895469042e-06, - "loss": 1.1038, - "num_input_tokens_seen": 2558535, - "step": 122 - }, - { - "epoch": 0.01478987554860819, - "flos": 16161845106360.0, - "grad_norm": 3.569443611577024, - "learning_rate": 3.4861674668779934e-06, - "loss": 1.1213, - "num_input_tokens_seen": 2576485, - "step": 123 - }, - { - "epoch": 0.01491011843924728, - "flos": 12178402431360.0, - "grad_norm": 5.304475355892374, - "learning_rate": 3.492033458037272e-06, - "loss": 1.0722, - "num_input_tokens_seen": 2594775, - "step": 124 - }, - { - "epoch": 0.01503036132988637, - "flos": 12520105657920.0, - "grad_norm": 3.9785031602652063, - "learning_rate": 3.497852332293018e-06, - "loss": 1.0898, - "num_input_tokens_seen": 2610070, - "step": 125 - }, - { - "epoch": 0.015150604220525462, - "flos": 13385711275560.0, - "grad_norm": 4.5342584554258245, - "learning_rate": 3.5036248405242356e-06, - "loss": 1.2025, - "num_input_tokens_seen": 2628545, - "step": 126 - }, - { - "epoch": 0.015270847111164552, - "flos": 28148880566760.0, - "grad_norm": 3.744613339029531, - "learning_rate": 3.509351715802146e-06, - "loss": 1.0508, - "num_input_tokens_seen": 2649150, - "step": 127 - }, - { - "epoch": 0.015391090001803644, - "flos": 31269661133880.0, - "grad_norm": 3.8817273471446705, - "learning_rate": 3.5150336739488763e-06, - "loss": 1.0092, - "num_input_tokens_seen": 2671155, - "step": 128 - }, - { - "epoch": 0.015511332892442733, - "flos": 13413312227640.0, - "grad_norm": 3.733842628231659, - "learning_rate": 3.5206714140744143e-06, - "loss": 1.0527, - "num_input_tokens_seen": 2690930, - "step": 129 - }, - { - "epoch": 0.015631575783081827, - "flos": 17451558206520.0, - "grad_norm": 4.16567658423202, - "learning_rate": 3.5262656190928208e-06, - "loss": 1.0889, - "num_input_tokens_seen": 2708950, - "step": 130 - }, - { - "epoch": 0.015751818673720917, - "flos": 44616973040160.0, - "grad_norm": 0.9478993920788796, - "learning_rate": 3.5318169562186737e-06, - "loss": 0.9467, - "num_input_tokens_seen": 2777515, - "step": 131 - }, - { - "epoch": 0.015872061564360006, - "flos": 16979576932680.0, - "grad_norm": 2.56726180727702, - "learning_rate": 3.5373260774446292e-06, - "loss": 1.052, - "num_input_tokens_seen": 2797685, - "step": 132 - }, - { - "epoch": 0.0159923044549991, - "flos": 16612603032600.0, - "grad_norm": 4.291477655983048, - "learning_rate": 3.542793620000961e-06, - "loss": 1.1315, - "num_input_tokens_seen": 2816880, - "step": 133 - }, - { - "epoch": 0.01611254734563819, - "flos": 12652928614680.0, - "grad_norm": 3.9593886859178546, - "learning_rate": 3.5482202067978894e-06, - "loss": 1.0861, - "num_input_tokens_seen": 2833810, - "step": 134 - }, - { - "epoch": 0.01623279023627728, - "flos": 14877651087000.0, - "grad_norm": 3.51777888121768, - "learning_rate": 3.553606446851471e-06, - "loss": 0.9783, - "num_input_tokens_seen": 2850270, - "step": 135 - }, - { - "epoch": 0.016353033126916373, - "flos": 11132314696560.0, - "grad_norm": 3.6016449825746806, - "learning_rate": 3.5589529356937613e-06, - "loss": 1.0624, - "num_input_tokens_seen": 2868385, - "step": 136 - }, - { - "epoch": 0.016473276017555463, - "flos": 13334985959160.0, - "grad_norm": 2.884576025474233, - "learning_rate": 3.5642602557679627e-06, - "loss": 0.9942, - "num_input_tokens_seen": 2886555, - "step": 137 - }, - { - "epoch": 0.016593518908194552, - "flos": 17320268327760.0, - "grad_norm": 4.954947440198719, - "learning_rate": 3.569528976809202e-06, - "loss": 1.0689, - "num_input_tokens_seen": 2903490, - "step": 138 - }, - { - "epoch": 0.016713761798833646, - "flos": 15878256632160.0, - "grad_norm": 6.115382838145138, - "learning_rate": 3.5747596562115522e-06, - "loss": 1.1097, - "num_input_tokens_seen": 2923825, - "step": 139 - }, - { - "epoch": 0.016834004689472735, - "flos": 12626707432800.0, - "grad_norm": 5.866379927361374, - "learning_rate": 3.5799528393819138e-06, - "loss": 1.1327, - "num_input_tokens_seen": 2942625, - "step": 140 - }, - { - "epoch": 0.016954247580111825, - "flos": 14565173875200.0, - "grad_norm": 2.6187896834760362, - "learning_rate": 3.585109060081286e-06, - "loss": 1.099, - "num_input_tokens_seen": 2962145, - "step": 141 - }, - { - "epoch": 0.017074490470750915, - "flos": 15693113957880.0, - "grad_norm": 2.930172974099326, - "learning_rate": 3.590228840753992e-06, - "loss": 1.009, - "num_input_tokens_seen": 2982295, - "step": 142 - }, - { - "epoch": 0.01719473336139001, - "flos": 11315403046320.0, - "grad_norm": 4.045500179341215, - "learning_rate": 3.5953126928453423e-06, - "loss": 1.0942, - "num_input_tokens_seen": 2999565, - "step": 143 - }, - { - "epoch": 0.017314976252029098, - "flos": 15983110698120.0, - "grad_norm": 3.803874154072015, - "learning_rate": 3.600361117108239e-06, - "loss": 1.0272, - "num_input_tokens_seen": 3019085, - "step": 144 - }, - { - "epoch": 0.017435219142668188, - "flos": 15642511287720.0, - "grad_norm": 3.57380448739143, - "learning_rate": 3.6053746038991616e-06, - "loss": 1.1876, - "num_input_tokens_seen": 3037890, - "step": 145 - }, - { - "epoch": 0.01755546203330728, - "flos": 51738893550480.0, - "grad_norm": 0.9692315781214998, - "learning_rate": 3.6103536334639843e-06, - "loss": 0.8519, - "num_input_tokens_seen": 3090875, - "step": 146 - }, - { - "epoch": 0.01767570492394637, - "flos": 18027657668880.0, - "grad_norm": 4.412172273995547, - "learning_rate": 3.615298676214041e-06, - "loss": 1.0783, - "num_input_tokens_seen": 3110875, - "step": 147 - }, - { - "epoch": 0.01779594781458546, - "flos": 14515000466880.0, - "grad_norm": 3.565525978311497, - "learning_rate": 3.6202101929928317e-06, - "loss": 1.1167, - "num_input_tokens_seen": 3129185, - "step": 148 - }, - { - "epoch": 0.017916190705224554, - "flos": 11499748520040.0, - "grad_norm": 3.4543105631920215, - "learning_rate": 3.6250886353337413e-06, - "loss": 1.11, - "num_input_tokens_seen": 3146435, - "step": 149 - }, - { - "epoch": 0.018036433595863644, - "flos": 16585676634840.0, - "grad_norm": 4.440649499899427, - "learning_rate": 3.6299344457091488e-06, - "loss": 1.0959, - "num_input_tokens_seen": 3167015, - "step": 150 - }, - { - "epoch": 0.018156676486502734, - "flos": 13177352252280.0, - "grad_norm": 3.7526467201984537, - "learning_rate": 3.634748057771256e-06, - "loss": 1.1505, - "num_input_tokens_seen": 3182675, - "step": 151 - }, - { - "epoch": 0.018276919377141827, - "flos": 18108191569680.0, - "grad_norm": 4.696711124040995, - "learning_rate": 3.639529896584965e-06, - "loss": 1.0842, - "num_input_tokens_seen": 3203770, - "step": 152 - }, - { - "epoch": 0.018397162267780917, - "flos": 14226107542800.0, - "grad_norm": 4.573623475156747, - "learning_rate": 3.6442803788531233e-06, - "loss": 1.1005, - "num_input_tokens_seen": 3221450, - "step": 153 - }, - { - "epoch": 0.018517405158420007, - "flos": 19629388057440.0, - "grad_norm": 4.065221725741827, - "learning_rate": 3.6489999131344357e-06, - "loss": 1.175, - "num_input_tokens_seen": 3243945, - "step": 154 - }, - { - "epoch": 0.0186376480490591, - "flos": 13727874425520.0, - "grad_norm": 3.2955491713089544, - "learning_rate": 3.653688900054313e-06, - "loss": 1.1315, - "num_input_tokens_seen": 3262195, - "step": 155 - }, - { - "epoch": 0.01875789093969819, - "flos": 18998546614320.0, - "grad_norm": 5.514379430944415, - "learning_rate": 3.6583477325089526e-06, - "loss": 0.9856, - "num_input_tokens_seen": 3282455, - "step": 156 - }, - { - "epoch": 0.01887813383033728, - "flos": 17320574943360.0, - "grad_norm": 5.345703660200466, - "learning_rate": 3.6629767958628916e-06, - "loss": 1.26, - "num_input_tokens_seen": 3299550, - "step": 157 - }, - { - "epoch": 0.018998376720976373, - "flos": 10345403286120.0, - "grad_norm": 4.674090112276993, - "learning_rate": 3.667576468140291e-06, - "loss": 1.0754, - "num_input_tokens_seen": 3317085, - "step": 158 - }, - { - "epoch": 0.019118619611615463, - "flos": 20878346396280.0, - "grad_norm": 5.3165662574653885, - "learning_rate": 3.672147120210184e-06, - "loss": 1.1097, - "num_input_tokens_seen": 3333405, - "step": 159 - }, - { - "epoch": 0.019238862502254553, - "flos": 14829409356960.0, - "grad_norm": 15.71477481031361, - "learning_rate": 3.6766891159659177e-06, - "loss": 1.098, - "num_input_tokens_seen": 3351535, - "step": 160 - }, - { - "epoch": 0.019359105392893646, - "flos": 15169978105800.0, - "grad_norm": 4.094896927538954, - "learning_rate": 3.6812028124990075e-06, - "loss": 1.0961, - "num_input_tokens_seen": 3368525, - "step": 161 - }, - { - "epoch": 0.019479348283532736, - "flos": 11520542605800.0, - "grad_norm": 5.080319725548181, - "learning_rate": 3.6856885602676016e-06, - "loss": 1.0447, - "num_input_tokens_seen": 3384280, - "step": 162 - }, - { - "epoch": 0.019599591174171826, - "flos": 15695137620840.0, - "grad_norm": 3.837276683360939, - "learning_rate": 3.6901467032597733e-06, - "loss": 1.1721, - "num_input_tokens_seen": 3402485, - "step": 163 - }, - { - "epoch": 0.01971983406481092, - "flos": 13911606668040.0, - "grad_norm": 3.78142427405406, - "learning_rate": 3.694577579151804e-06, - "loss": 1.0903, - "num_input_tokens_seen": 3420615, - "step": 164 - }, - { - "epoch": 0.01984007695545001, - "flos": 13544908722000.0, - "grad_norm": 4.957054336530163, - "learning_rate": 3.6989815194616703e-06, - "loss": 0.969, - "num_input_tokens_seen": 3437530, - "step": 165 - }, - { - "epoch": 0.0199603198460891, - "flos": 14801624435520.0, - "grad_norm": 10.369240138614115, - "learning_rate": 3.703358849697888e-06, - "loss": 1.026, - "num_input_tokens_seen": 3457160, - "step": 166 - }, - { - "epoch": 0.020080562736728192, - "flos": 15536461420920.0, - "grad_norm": 3.130831351247009, - "learning_rate": 3.7077098895038803e-06, - "loss": 1.0569, - "num_input_tokens_seen": 3476250, - "step": 167 - }, - { - "epoch": 0.020200805627367282, - "flos": 15405754111800.0, - "grad_norm": 4.874849366037143, - "learning_rate": 3.712034952798045e-06, - "loss": 1.195, - "num_input_tokens_seen": 3494085, - "step": 168 - }, - { - "epoch": 0.02032104851800637, - "flos": 23924694713040.0, - "grad_norm": 2.983661346588773, - "learning_rate": 3.7163343479096656e-06, - "loss": 1.0569, - "num_input_tokens_seen": 3515380, - "step": 169 - }, - { - "epoch": 0.020441291408645465, - "flos": 22591400439960.0, - "grad_norm": 5.585641279569278, - "learning_rate": 3.720608377710802e-06, - "loss": 1.0444, - "num_input_tokens_seen": 3535190, - "step": 170 - }, - { - "epoch": 0.020561534299284555, - "flos": 14829378695400.0, - "grad_norm": 4.397867873930259, - "learning_rate": 3.7248573397443277e-06, - "loss": 1.0847, - "num_input_tokens_seen": 3553835, - "step": 171 - }, - { - "epoch": 0.020681777189923645, - "flos": 14908440841320.0, - "grad_norm": 2.600080738281228, - "learning_rate": 3.729081526348224e-06, - "loss": 1.2025, - "num_input_tokens_seen": 3572085, - "step": 172 - }, - { - "epoch": 0.020802020080562738, - "flos": 20126762651040.0, - "grad_norm": 14.80516675510561, - "learning_rate": 3.7332812247762777e-06, - "loss": 1.0612, - "num_input_tokens_seen": 3593105, - "step": 173 - }, - { - "epoch": 0.020922262971201828, - "flos": 13962883892520.0, - "grad_norm": 5.417224835711944, - "learning_rate": 3.737456717315293e-06, - "loss": 1.1738, - "num_input_tokens_seen": 3611790, - "step": 174 - }, - { - "epoch": 0.021042505861840918, - "flos": 11077235438640.0, - "grad_norm": 2.4522469279366934, - "learning_rate": 3.7416082813989552e-06, - "loss": 1.1354, - "num_input_tokens_seen": 3628505, - "step": 175 - }, - { - "epoch": 0.02116274875248001, - "flos": 15012865645440.0, - "grad_norm": 8.453321754295208, - "learning_rate": 3.745736189718439e-06, - "loss": 1.1285, - "num_input_tokens_seen": 3647480, - "step": 176 - }, - { - "epoch": 0.0212829916431191, - "flos": 17581008391680.0, - "grad_norm": 4.725555766687887, - "learning_rate": 3.749840710329894e-06, - "loss": 0.9335, - "num_input_tokens_seen": 3667905, - "step": 177 - }, - { - "epoch": 0.02140323453375819, - "flos": 11780117530440.0, - "grad_norm": 5.080316346598758, - "learning_rate": 3.7539221067588938e-06, - "loss": 1.1974, - "num_input_tokens_seen": 3681600, - "step": 178 - }, - { - "epoch": 0.021523477424397284, - "flos": 14406773629320.0, - "grad_norm": 7.435322418712913, - "learning_rate": 3.757980638101964e-06, - "loss": 1.1492, - "num_input_tokens_seen": 3694815, - "step": 179 - }, - { - "epoch": 0.021643720315036374, - "flos": 18578946381120.0, - "grad_norm": 6.162781324793746, - "learning_rate": 3.7620165591252806e-06, - "loss": 1.1219, - "num_input_tokens_seen": 3712635, - "step": 180 - }, - { - "epoch": 0.021763963205675464, - "flos": 17632530908640.0, - "grad_norm": 3.706113357984211, - "learning_rate": 3.766030120360636e-06, - "loss": 1.1598, - "num_input_tokens_seen": 3731985, - "step": 181 - }, - { - "epoch": 0.021884206096314557, - "flos": 18370403388480.0, - "grad_norm": 4.537976343462275, - "learning_rate": 3.7700215681987578e-06, - "loss": 1.1072, - "num_input_tokens_seen": 3751475, - "step": 182 - }, - { - "epoch": 0.022004448986953647, - "flos": 14251408877880.0, - "grad_norm": 3.200572862608309, - "learning_rate": 3.7739911449800767e-06, - "loss": 1.0475, - "num_input_tokens_seen": 3771250, - "step": 183 - }, - { - "epoch": 0.022124691877592736, - "flos": 14536898368800.0, - "grad_norm": 7.382662397200953, - "learning_rate": 3.7779390890830114e-06, - "loss": 1.0283, - "num_input_tokens_seen": 3789620, - "step": 184 - }, - { - "epoch": 0.02224493476823183, - "flos": 16769960785440.0, - "grad_norm": 2.8699696526601013, - "learning_rate": 3.7818656350098723e-06, - "loss": 1.0795, - "num_input_tokens_seen": 3810290, - "step": 185 - }, - { - "epoch": 0.02236517765887092, - "flos": 11971392516720.0, - "grad_norm": 5.25894728682003, - "learning_rate": 3.7857710134704447e-06, - "loss": 0.9898, - "num_input_tokens_seen": 3828945, - "step": 186 - }, - { - "epoch": 0.02248542054951001, - "flos": 31087063369080.0, - "grad_norm": 2.611016311232228, - "learning_rate": 3.7896554514633234e-06, - "loss": 1.0094, - "num_input_tokens_seen": 3852950, - "step": 187 - }, - { - "epoch": 0.022605663440149103, - "flos": 16612388401680.0, - "grad_norm": 3.737341177637381, - "learning_rate": 3.7935191723550955e-06, - "loss": 1.077, - "num_input_tokens_seen": 3871625, - "step": 188 - }, - { - "epoch": 0.022725906330788193, - "flos": 20674157345160.0, - "grad_norm": 4.684617236937956, - "learning_rate": 3.797362395957408e-06, - "loss": 1.1143, - "num_input_tokens_seen": 3891910, - "step": 189 - }, - { - "epoch": 0.022846149221427282, - "flos": 17423098730760.0, - "grad_norm": 4.827971915870531, - "learning_rate": 3.8011853386020055e-06, - "loss": 1.0056, - "num_input_tokens_seen": 3912535, - "step": 190 - }, - { - "epoch": 0.022966392112066376, - "flos": 10975018266840.0, - "grad_norm": 6.105311610761727, - "learning_rate": 3.804988213213804e-06, - "loss": 1.1079, - "num_input_tokens_seen": 3930495, - "step": 191 - }, - { - "epoch": 0.023086635002705466, - "flos": 39817362278400.0, - "grad_norm": 1.017636464033649, - "learning_rate": 3.808771229382049e-06, - "loss": 0.8834, - "num_input_tokens_seen": 3989680, - "step": 192 - }, - { - "epoch": 0.023206877893344555, - "flos": 13698127164240.0, - "grad_norm": 16.781470043366472, - "learning_rate": 3.8125345934296324e-06, - "loss": 1.0774, - "num_input_tokens_seen": 4007710, - "step": 193 - }, - { - "epoch": 0.02332712078398365, - "flos": 16400227344960.0, - "grad_norm": 5.576203856902593, - "learning_rate": 3.81627850848061e-06, - "loss": 1.0926, - "num_input_tokens_seen": 4028025, - "step": 194 - }, - { - "epoch": 0.02344736367462274, - "flos": 17373017307120.0, - "grad_norm": 3.7458720057057318, - "learning_rate": 3.820003174525994e-06, - "loss": 1.0644, - "num_input_tokens_seen": 4047170, - "step": 195 - }, - { - "epoch": 0.02356760656526183, - "flos": 15327489166440.0, - "grad_norm": 3.5130106381701274, - "learning_rate": 3.823708788487851e-06, - "loss": 1.0616, - "num_input_tokens_seen": 4063890, - "step": 196 - }, - { - "epoch": 0.02368784945590092, - "flos": 17924888589000.0, - "grad_norm": 3.5005998097490574, - "learning_rate": 3.827395544281781e-06, - "loss": 1.0667, - "num_input_tokens_seen": 4085950, - "step": 197 - }, - { - "epoch": 0.02380809234654001, - "flos": 19626996455760.0, - "grad_norm": 7.964677076480703, - "learning_rate": 3.831063632877802e-06, - "loss": 1.0188, - "num_input_tokens_seen": 4105990, - "step": 198 - }, - { - "epoch": 0.0239283352371791, - "flos": 12941484261600.0, - "grad_norm": 4.840756974412901, - "learning_rate": 3.834713242359712e-06, - "loss": 0.9787, - "num_input_tokens_seen": 4123540, - "step": 199 - }, - { - "epoch": 0.02404857812781819, - "flos": 15194451578760.0, - "grad_norm": 2.9241334071586627, - "learning_rate": 3.838344557982959e-06, - "loss": 1.0985, - "num_input_tokens_seen": 4144640, - "step": 200 - }, - { - "epoch": 0.024168821018457284, - "flos": 11787598951080.0, - "grad_norm": 6.626207925331544, - "learning_rate": 3.841957762231063e-06, - "loss": 1.0729, - "num_input_tokens_seen": 4161890, - "step": 201 - }, - { - "epoch": 0.024289063909096374, - "flos": 16219499935320.0, - "grad_norm": 4.7859420193492985, - "learning_rate": 3.8455530348706454e-06, - "loss": 1.1061, - "num_input_tokens_seen": 4181210, - "step": 202 - }, - { - "epoch": 0.024409306799735464, - "flos": 12574050438120.0, - "grad_norm": 3.6291729815691296, - "learning_rate": 3.849130553005099e-06, - "loss": 1.002, - "num_input_tokens_seen": 4199145, - "step": 203 - }, - { - "epoch": 0.024529549690374557, - "flos": 15353649025200.0, - "grad_norm": 3.733182453330991, - "learning_rate": 3.852690491126933e-06, - "loss": 1.0634, - "num_input_tokens_seen": 4218915, - "step": 204 - }, - { - "epoch": 0.024649792581013647, - "flos": 18181305372960.0, - "grad_norm": 5.649076180339948, - "learning_rate": 3.856233021168845e-06, - "loss": 1.1289, - "num_input_tokens_seen": 4238330, - "step": 205 - }, - { - "epoch": 0.024770035471652737, - "flos": 24609787551960.0, - "grad_norm": 3.920422045506399, - "learning_rate": 3.859758312553544e-06, - "loss": 1.1482, - "num_input_tokens_seen": 4260270, - "step": 206 - }, - { - "epoch": 0.02489027836229183, - "flos": 15273851001840.0, - "grad_norm": 3.323515516189527, - "learning_rate": 3.8632665322423735e-06, - "loss": 1.1494, - "num_input_tokens_seen": 4279645, - "step": 207 - }, - { - "epoch": 0.02501052125293092, - "flos": 16505204057160.0, - "grad_norm": 3.3048592020073673, - "learning_rate": 3.866757844782762e-06, - "loss": 1.0801, - "num_input_tokens_seen": 4299870, - "step": 208 - }, - { - "epoch": 0.02513076414357001, - "flos": 18782767493520.0, - "grad_norm": 5.498243571636935, - "learning_rate": 3.870232412354527e-06, - "loss": 1.1394, - "num_input_tokens_seen": 4316435, - "step": 209 - }, - { - "epoch": 0.025251007034209103, - "flos": 9585816873720.0, - "grad_norm": 3.2714941406104243, - "learning_rate": 3.873690394815086e-06, - "loss": 1.1522, - "num_input_tokens_seen": 4332735, - "step": 210 - }, - { - "epoch": 0.025371249924848193, - "flos": 10634142902400.0, - "grad_norm": 5.2226787101587915, - "learning_rate": 3.877131949743587e-06, - "loss": 1.1346, - "num_input_tokens_seen": 4349320, - "step": 211 - }, - { - "epoch": 0.025491492815487283, - "flos": 18183175728120.0, - "grad_norm": 3.127004667068185, - "learning_rate": 3.880557232483993e-06, - "loss": 1.0106, - "num_input_tokens_seen": 4368480, - "step": 212 - }, - { - "epoch": 0.025611735706126376, - "flos": 14645370498840.0, - "grad_norm": 3.2107325578985257, - "learning_rate": 3.883966396187164e-06, - "loss": 1.1112, - "num_input_tokens_seen": 4387470, - "step": 213 - }, - { - "epoch": 0.025731978596765466, - "flos": 13518196955160.0, - "grad_norm": 4.977684385600534, - "learning_rate": 3.887359591851937e-06, - "loss": 1.1299, - "num_input_tokens_seen": 4404795, - "step": 214 - }, - { - "epoch": 0.025852221487404556, - "flos": 15747242707440.0, - "grad_norm": 1.969775462642882, - "learning_rate": 3.890736968365265e-06, - "loss": 1.1587, - "num_input_tokens_seen": 4424830, - "step": 215 - }, - { - "epoch": 0.02597246437804365, - "flos": 18893723209920.0, - "grad_norm": 2.659160231492128, - "learning_rate": 3.894098672541412e-06, - "loss": 1.0816, - "num_input_tokens_seen": 4445455, - "step": 216 - }, - { - "epoch": 0.02609270726868274, - "flos": 23299035073560.0, - "grad_norm": 4.0601048378022595, - "learning_rate": 3.89744484916025e-06, - "loss": 0.9776, - "num_input_tokens_seen": 4466285, - "step": 217 - }, - { - "epoch": 0.02621295015932183, - "flos": 18678373350960.0, - "grad_norm": 4.925724909533562, - "learning_rate": 3.900775641004673e-06, - "loss": 1.1023, - "num_input_tokens_seen": 4485320, - "step": 218 - }, - { - "epoch": 0.026333193049960922, - "flos": 30664948887960.0, - "grad_norm": 4.862919437783647, - "learning_rate": 3.904091188897156e-06, - "loss": 0.9576, - "num_input_tokens_seen": 4504175, - "step": 219 - }, - { - "epoch": 0.026453435940600012, - "flos": 12728250050280.0, - "grad_norm": 5.290605013021534, - "learning_rate": 3.90739163173548e-06, - "loss": 1.041, - "num_input_tokens_seen": 4521730, - "step": 220 - }, - { - "epoch": 0.026573678831239102, - "flos": 13461492634560.0, - "grad_norm": 9.601379740429074, - "learning_rate": 3.910677106527646e-06, - "loss": 1.109, - "num_input_tokens_seen": 4538640, - "step": 221 - }, - { - "epoch": 0.026693921721878195, - "flos": 20833722730320.0, - "grad_norm": 3.4341825256006158, - "learning_rate": 3.913947748426004e-06, - "loss": 1.0647, - "num_input_tokens_seen": 4555180, - "step": 222 - }, - { - "epoch": 0.026814164612517285, - "flos": 9970917303840.0, - "grad_norm": 4.945998591699334, - "learning_rate": 3.9172036907606136e-06, - "loss": 0.9893, - "num_input_tokens_seen": 4568380, - "step": 223 - }, - { - "epoch": 0.026934407503156375, - "flos": 16715893359000.0, - "grad_norm": 2.2007121407379935, - "learning_rate": 3.920445065071855e-06, - "loss": 1.1716, - "num_input_tokens_seen": 4589265, - "step": 224 - }, - { - "epoch": 0.027054650393795468, - "flos": 20624627829600.0, - "grad_norm": 4.062346235446396, - "learning_rate": 3.923672001142322e-06, - "loss": 1.0012, - "num_input_tokens_seen": 4609295, - "step": 225 - }, - { - "epoch": 0.027174893284434558, - "flos": 22407852166800.0, - "grad_norm": 2.607824081660312, - "learning_rate": 3.926884627027996e-06, - "loss": 1.0734, - "num_input_tokens_seen": 4632785, - "step": 226 - }, - { - "epoch": 0.027295136175073648, - "flos": 15668671146480.0, - "grad_norm": 3.45359792103231, - "learning_rate": 3.930083069088744e-06, - "loss": 1.0035, - "num_input_tokens_seen": 4652505, - "step": 227 - }, - { - "epoch": 0.02741537906571274, - "flos": 43519608080880.0, - "grad_norm": 1.032362655575083, - "learning_rate": 3.933267452018137e-06, - "loss": 0.8655, - "num_input_tokens_seen": 4712020, - "step": 228 - }, - { - "epoch": 0.02753562195635183, - "flos": 17502314184480.0, - "grad_norm": 3.8940184474639374, - "learning_rate": 3.936437898872622e-06, - "loss": 1.0685, - "num_input_tokens_seen": 4731715, - "step": 229 - }, - { - "epoch": 0.02765586484699092, - "flos": 24321354551280.0, - "grad_norm": 7.265848398363267, - "learning_rate": 3.9395945311000525e-06, - "loss": 1.0254, - "num_input_tokens_seen": 4753060, - "step": 230 - }, - { - "epoch": 0.027776107737630014, - "flos": 10476478533960.0, - "grad_norm": 4.311650068037074, - "learning_rate": 3.942737468567608e-06, - "loss": 1.1323, - "num_input_tokens_seen": 4770795, - "step": 231 - }, - { - "epoch": 0.027896350628269104, - "flos": 34257649405800.0, - "grad_norm": 3.973372283292335, - "learning_rate": 3.9458668295891026e-06, - "loss": 1.0905, - "num_input_tokens_seen": 4792800, - "step": 232 - }, - { - "epoch": 0.028016593518908194, - "flos": 15402136047720.0, - "grad_norm": 9.951883906383394, - "learning_rate": 3.948982730951712e-06, - "loss": 1.0927, - "num_input_tokens_seen": 4810615, - "step": 233 - }, - { - "epoch": 0.028136836409547287, - "flos": 13070259892440.0, - "grad_norm": 7.264604525530176, - "learning_rate": 3.9520852879421254e-06, - "loss": 1.0477, - "num_input_tokens_seen": 4827680, - "step": 234 - }, - { - "epoch": 0.028257079300186377, - "flos": 22512246309360.0, - "grad_norm": 4.89390887590823, - "learning_rate": 3.955174614372137e-06, - "loss": 1.0385, - "num_input_tokens_seen": 4847165, - "step": 235 - }, - { - "epoch": 0.028377322190825467, - "flos": 16717058498280.0, - "grad_norm": 4.631028653722795, - "learning_rate": 3.9582508226037045e-06, - "loss": 1.075, - "num_input_tokens_seen": 4867025, - "step": 236 - }, - { - "epoch": 0.02849756508146456, - "flos": 14536039845120.0, - "grad_norm": 16.08294255600528, - "learning_rate": 3.9613140235734636e-06, - "loss": 1.1513, - "num_input_tokens_seen": 4883725, - "step": 237 - }, - { - "epoch": 0.02861780797210365, - "flos": 10083774036960.0, - "grad_norm": 3.9636507691828684, - "learning_rate": 3.96436432681674e-06, - "loss": 1.0395, - "num_input_tokens_seen": 4901435, - "step": 238 - }, - { - "epoch": 0.02873805086274274, - "flos": 18366110770080.0, - "grad_norm": 8.762140189931214, - "learning_rate": 3.967401840491044e-06, - "loss": 1.1196, - "num_input_tokens_seen": 4920435, - "step": 239 - }, - { - "epoch": 0.028858293753381833, - "flos": 12253815851640.0, - "grad_norm": 11.816311983095062, - "learning_rate": 3.97042667139909e-06, - "loss": 1.0983, - "num_input_tokens_seen": 4937480, - "step": 240 - }, - { - "epoch": 0.028978536644020923, - "flos": 16375600564200.0, - "grad_norm": 4.303490778738864, - "learning_rate": 3.973438925011327e-06, - "loss": 1.1053, - "num_input_tokens_seen": 4955485, - "step": 241 - }, - { - "epoch": 0.029098779534660012, - "flos": 20178530460480.0, - "grad_norm": 4.565337252573607, - "learning_rate": 3.976438705488002e-06, - "loss": 1.1468, - "num_input_tokens_seen": 4974865, - "step": 242 - }, - { - "epoch": 0.029219022425299106, - "flos": 9795034420680.0, - "grad_norm": 3.67278872544958, - "learning_rate": 3.9794261157007744e-06, - "loss": 1.1543, - "num_input_tokens_seen": 4992340, - "step": 243 - }, - { - "epoch": 0.029339265315938196, - "flos": 13776453432720.0, - "grad_norm": 2.691959318740698, - "learning_rate": 3.982401257253887e-06, - "loss": 1.0589, - "num_input_tokens_seen": 5010400, - "step": 244 - }, - { - "epoch": 0.029459508206577285, - "flos": 11079964317480.0, - "grad_norm": 23.66381747149472, - "learning_rate": 3.985364230504893e-06, - "loss": 1.1204, - "num_input_tokens_seen": 5028005, - "step": 245 - }, - { - "epoch": 0.02957975109721638, - "flos": 20100480146040.0, - "grad_norm": 3.815549662578132, - "learning_rate": 3.988315134584976e-06, - "loss": 1.065, - "num_input_tokens_seen": 5047405, - "step": 246 - }, - { - "epoch": 0.02969999398785547, - "flos": 17293801853400.0, - "grad_norm": 4.2382936257739825, - "learning_rate": 3.991254067418851e-06, - "loss": 1.031, - "num_input_tokens_seen": 5067665, - "step": 247 - }, - { - "epoch": 0.02982023687849456, - "flos": 25028682569280.0, - "grad_norm": 6.895398482550106, - "learning_rate": 3.994181125744254e-06, - "loss": 1.0555, - "num_input_tokens_seen": 5089190, - "step": 248 - }, - { - "epoch": 0.02994047976913365, - "flos": 18631542052680.0, - "grad_norm": 4.7699855178837085, - "learning_rate": 3.99709640513106e-06, - "loss": 0.9807, - "num_input_tokens_seen": 5109790, - "step": 249 - }, - { - "epoch": 0.03006072265977274, - "flos": 18234085013880.0, - "grad_norm": 4.30738084447845, - "learning_rate": 4e-06, - "loss": 1.0735, - "num_input_tokens_seen": 5129345, - "step": 250 - }, - { - "epoch": 0.03018096555041183, - "flos": 15720806894640.0, - "grad_norm": 7.51100876617428, - "learning_rate": 3.999999848300794e-06, - "loss": 1.117, - "num_input_tokens_seen": 5148050, - "step": 251 - }, - { - "epoch": 0.030301208441050925, - "flos": 21513879058080.0, - "grad_norm": 3.8911641065294162, - "learning_rate": 3.999999393203203e-06, - "loss": 1.1147, - "num_input_tokens_seen": 5170180, - "step": 252 - }, - { - "epoch": 0.030421451331690014, - "flos": 16794526243080.0, - "grad_norm": 3.898056592660625, - "learning_rate": 3.999998634707293e-06, - "loss": 1.0788, - "num_input_tokens_seen": 5189450, - "step": 253 - }, - { - "epoch": 0.030541694222329104, - "flos": 19890158782920.0, - "grad_norm": 4.959536280951104, - "learning_rate": 3.999997572813182e-06, - "loss": 1.2017, - "num_input_tokens_seen": 5206980, - "step": 254 - }, - { - "epoch": 0.030661937112968194, - "flos": 13177720191000.0, - "grad_norm": 3.893957342563676, - "learning_rate": 3.999996207521028e-06, - "loss": 1.0993, - "num_input_tokens_seen": 5225410, - "step": 255 - }, - { - "epoch": 0.030782180003607287, - "flos": 9139167596520.0, - "grad_norm": 5.55918734203198, - "learning_rate": 3.999994538831039e-06, - "loss": 1.0363, - "num_input_tokens_seen": 5241715, - "step": 256 - }, - { - "epoch": 0.030902422894246377, - "flos": 16589754622320.0, - "grad_norm": 4.30407333839075, - "learning_rate": 3.99999256674347e-06, - "loss": 1.0629, - "num_input_tokens_seen": 5261585, - "step": 257 - }, - { - "epoch": 0.031022665784885467, - "flos": 39416379160200.0, - "grad_norm": 0.8918615089684633, - "learning_rate": 3.999990291258618e-06, - "loss": 0.7904, - "num_input_tokens_seen": 5319995, - "step": 258 - }, - { - "epoch": 0.03114290867552456, - "flos": 13701837213000.0, - "grad_norm": 4.647150557145879, - "learning_rate": 3.999987712376829e-06, - "loss": 1.085, - "num_input_tokens_seen": 5338035, - "step": 259 - }, - { - "epoch": 0.031263151566163654, - "flos": 14881637089800.0, - "grad_norm": 5.1747107899517975, - "learning_rate": 3.999984830098494e-06, - "loss": 1.0303, - "num_input_tokens_seen": 5357335, - "step": 260 - }, - { - "epoch": 0.03138339445680274, - "flos": 10449889413360.0, - "grad_norm": 7.576399962730562, - "learning_rate": 3.999981644424051e-06, - "loss": 1.1865, - "num_input_tokens_seen": 5371855, - "step": 261 - }, - { - "epoch": 0.03150363734744183, - "flos": 8195143725720.0, - "grad_norm": 5.501952431664848, - "learning_rate": 3.999978155353982e-06, - "loss": 1.0917, - "num_input_tokens_seen": 5388720, - "step": 262 - }, - { - "epoch": 0.03162388023808092, - "flos": 24057732300720.0, - "grad_norm": 9.14934271447592, - "learning_rate": 3.9999743628888186e-06, - "loss": 1.0245, - "num_input_tokens_seen": 5410230, - "step": 263 - }, - { - "epoch": 0.03174412312872001, - "flos": 14774575391520.0, - "grad_norm": 4.489157011084154, - "learning_rate": 3.999970267029133e-06, - "loss": 1.0965, - "num_input_tokens_seen": 5428910, - "step": 264 - }, - { - "epoch": 0.0318643660193591, - "flos": 16870706202360.0, - "grad_norm": 11.013638451192856, - "learning_rate": 3.999965867775548e-06, - "loss": 1.0279, - "num_input_tokens_seen": 5449025, - "step": 265 - }, - { - "epoch": 0.0319846089099982, - "flos": 9821010310080.0, - "grad_norm": 4.964917510270343, - "learning_rate": 3.9999611651287315e-06, - "loss": 1.0836, - "num_input_tokens_seen": 5466900, - "step": 266 - }, - { - "epoch": 0.03210485180063729, - "flos": 10420878029520.0, - "grad_norm": 7.330311788508593, - "learning_rate": 3.999956159089396e-06, - "loss": 1.0249, - "num_input_tokens_seen": 5484070, - "step": 267 - }, - { - "epoch": 0.03222509469127638, - "flos": 20597793416520.0, - "grad_norm": 8.311607829236847, - "learning_rate": 3.999950849658302e-06, - "loss": 1.0054, - "num_input_tokens_seen": 5502710, - "step": 268 - }, - { - "epoch": 0.03234533758191547, - "flos": 11997583037040.0, - "grad_norm": 5.207559980833586, - "learning_rate": 3.999945236836254e-06, - "loss": 1.068, - "num_input_tokens_seen": 5521395, - "step": 269 - }, - { - "epoch": 0.03246558047255456, - "flos": 13465570622040.0, - "grad_norm": 11.439274217751153, - "learning_rate": 3.999939320624103e-06, - "loss": 1.1539, - "num_input_tokens_seen": 5536265, - "step": 270 - }, - { - "epoch": 0.03258582336319365, - "flos": 16871043479520.0, - "grad_norm": 8.594064713697627, - "learning_rate": 3.999933101022749e-06, - "loss": 1.1214, - "num_input_tokens_seen": 5556390, - "step": 271 - }, - { - "epoch": 0.032706066253832745, - "flos": 19704709493040.0, - "grad_norm": 5.355390040336078, - "learning_rate": 3.999926578033132e-06, - "loss": 1.0846, - "num_input_tokens_seen": 5575925, - "step": 272 - }, - { - "epoch": 0.032826309144471835, - "flos": 32608198533720.0, - "grad_norm": 3.700415283816815, - "learning_rate": 3.999919751656244e-06, - "loss": 0.869, - "num_input_tokens_seen": 5602545, - "step": 273 - }, - { - "epoch": 0.032946552035110925, - "flos": 18369391557000.0, - "grad_norm": 43.57746811758415, - "learning_rate": 3.9999126218931195e-06, - "loss": 0.9948, - "num_input_tokens_seen": 5620300, - "step": 274 - }, - { - "epoch": 0.033066794925750015, - "flos": 10686401296800.0, - "grad_norm": 6.362373863423818, - "learning_rate": 3.99990518874484e-06, - "loss": 1.1229, - "num_input_tokens_seen": 5636460, - "step": 275 - }, - { - "epoch": 0.033187037816389105, - "flos": 16186625194920.0, - "grad_norm": 4.01628706999553, - "learning_rate": 3.999897452212534e-06, - "loss": 1.1448, - "num_input_tokens_seen": 5653510, - "step": 276 - }, - { - "epoch": 0.033307280707028195, - "flos": 16586381850720.0, - "grad_norm": 9.351560052474618, - "learning_rate": 3.999889412297374e-06, - "loss": 1.2184, - "num_input_tokens_seen": 5672655, - "step": 277 - }, - { - "epoch": 0.03342752359766729, - "flos": 20545443037440.0, - "grad_norm": 4.39133151740193, - "learning_rate": 3.999881069000581e-06, - "loss": 1.0125, - "num_input_tokens_seen": 5692105, - "step": 278 - }, - { - "epoch": 0.03354776648830638, - "flos": 13748975126880.0, - "grad_norm": 5.426914478489222, - "learning_rate": 3.99987242232342e-06, - "loss": 1.0921, - "num_input_tokens_seen": 5706830, - "step": 279 - }, - { - "epoch": 0.03366800937894547, - "flos": 12653204568720.0, - "grad_norm": 4.538249955846765, - "learning_rate": 3.9998634722672026e-06, - "loss": 1.0248, - "num_input_tokens_seen": 5726605, - "step": 280 - }, - { - "epoch": 0.03378825226958456, - "flos": 25655262055560.0, - "grad_norm": 4.317410976108378, - "learning_rate": 3.999854218833286e-06, - "loss": 1.0105, - "num_input_tokens_seen": 5747145, - "step": 281 - }, - { - "epoch": 0.03390849516022365, - "flos": 18290360072640.0, - "grad_norm": 4.514078370380331, - "learning_rate": 3.999844662023075e-06, - "loss": 1.0486, - "num_input_tokens_seen": 5766740, - "step": 282 - }, - { - "epoch": 0.03402873805086274, - "flos": 15114806863200.0, - "grad_norm": 2.724661352830818, - "learning_rate": 3.999834801838018e-06, - "loss": 1.1475, - "num_input_tokens_seen": 5785440, - "step": 283 - }, - { - "epoch": 0.03414898094150183, - "flos": 16140407127840.0, - "grad_norm": 2.7191313552862844, - "learning_rate": 3.9998246382796115e-06, - "loss": 0.9653, - "num_input_tokens_seen": 5804740, - "step": 284 - }, - { - "epoch": 0.03426922383214093, - "flos": 13387152368880.0, - "grad_norm": 4.015047855685816, - "learning_rate": 3.999814171349399e-06, - "loss": 1.1267, - "num_input_tokens_seen": 5822320, - "step": 285 - }, - { - "epoch": 0.03438946672278002, - "flos": 24794225010360.0, - "grad_norm": 3.73515101075179, - "learning_rate": 3.9998034010489655e-06, - "loss": 0.9628, - "num_input_tokens_seen": 5845730, - "step": 286 - }, - { - "epoch": 0.03450970961341911, - "flos": 15747487999920.0, - "grad_norm": 3.5370434523726404, - "learning_rate": 3.999792327379946e-06, - "loss": 0.9889, - "num_input_tokens_seen": 5864825, - "step": 287 - }, - { - "epoch": 0.034629952504058197, - "flos": 15432373893960.0, - "grad_norm": 9.549567759054105, - "learning_rate": 3.999780950344021e-06, - "loss": 1.1954, - "num_input_tokens_seen": 5882735, - "step": 288 - }, - { - "epoch": 0.034750195394697286, - "flos": 14226690112440.0, - "grad_norm": 2.7148869572775767, - "learning_rate": 3.999769269942916e-06, - "loss": 1.0384, - "num_input_tokens_seen": 5902495, - "step": 289 - }, - { - "epoch": 0.034870438285336376, - "flos": 19917177165360.0, - "grad_norm": 5.067139183564552, - "learning_rate": 3.999757286178402e-06, - "loss": 1.0425, - "num_input_tokens_seen": 5924650, - "step": 290 - }, - { - "epoch": 0.03499068117597547, - "flos": 15642051364320.0, - "grad_norm": 3.055653451064189, - "learning_rate": 3.999744999052299e-06, - "loss": 1.1327, - "num_input_tokens_seen": 5945760, - "step": 291 - }, - { - "epoch": 0.03511092406661456, - "flos": 40959473549880.0, - "grad_norm": 0.9551402190760363, - "learning_rate": 3.9997324085664675e-06, - "loss": 0.8954, - "num_input_tokens_seen": 6005710, - "step": 292 - }, - { - "epoch": 0.03523116695725365, - "flos": 16296231802680.0, - "grad_norm": 3.6454832217728077, - "learning_rate": 3.999719514722821e-06, - "loss": 1.1499, - "num_input_tokens_seen": 6025560, - "step": 293 - }, - { - "epoch": 0.03535140984789274, - "flos": 26340078940440.0, - "grad_norm": 4.177112278001661, - "learning_rate": 3.999706317523314e-06, - "loss": 0.9819, - "num_input_tokens_seen": 6043840, - "step": 294 - }, - { - "epoch": 0.03547165273853183, - "flos": 14514969805320.0, - "grad_norm": 3.952987125691323, - "learning_rate": 3.999692816969948e-06, - "loss": 1.0776, - "num_input_tokens_seen": 6063095, - "step": 295 - }, - { - "epoch": 0.03559189562917092, - "flos": 50023718311080.0, - "grad_norm": 0.964496627327108, - "learning_rate": 3.999679013064772e-06, - "loss": 0.9397, - "num_input_tokens_seen": 6129560, - "step": 296 - }, - { - "epoch": 0.03571213851981002, - "flos": 15379226314320.0, - "grad_norm": 7.026479501469446, - "learning_rate": 3.99966490580988e-06, - "loss": 1.0818, - "num_input_tokens_seen": 6146640, - "step": 297 - }, - { - "epoch": 0.03583238141044911, - "flos": 31401380274480.0, - "grad_norm": 14.420125968901393, - "learning_rate": 3.999650495207411e-06, - "loss": 0.8884, - "num_input_tokens_seen": 6172385, - "step": 298 - }, - { - "epoch": 0.0359526243010882, - "flos": 13408590347400.0, - "grad_norm": 3.702987861508329, - "learning_rate": 3.999635781259553e-06, - "loss": 1.129, - "num_input_tokens_seen": 6187370, - "step": 299 - }, - { - "epoch": 0.03607286719172729, - "flos": 44142508179960.0, - "grad_norm": 0.9095124681529368, - "learning_rate": 3.999620763968535e-06, - "loss": 0.7903, - "num_input_tokens_seen": 6245965, - "step": 300 - }, - { - "epoch": 0.03619311008236638, - "flos": 19811556560400.0, - "grad_norm": 2.5252531623454204, - "learning_rate": 3.999605443336638e-06, - "loss": 1.101, - "num_input_tokens_seen": 6267815, - "step": 301 - }, - { - "epoch": 0.03631335297300547, - "flos": 9607101544440.0, - "grad_norm": 4.033599405471292, - "learning_rate": 3.999589819366185e-06, - "loss": 1.1242, - "num_input_tokens_seen": 6281325, - "step": 302 - }, - { - "epoch": 0.036433595863644565, - "flos": 19676311340400.0, - "grad_norm": 3.3509135295989245, - "learning_rate": 3.999573892059547e-06, - "loss": 1.0625, - "num_input_tokens_seen": 6300175, - "step": 303 - }, - { - "epoch": 0.036553838754283655, - "flos": 17477994019320.0, - "grad_norm": 5.64616852876141, - "learning_rate": 3.999557661419138e-06, - "loss": 1.0351, - "num_input_tokens_seen": 6320045, - "step": 304 - }, - { - "epoch": 0.036674081644922744, - "flos": 16638762891360.0, - "grad_norm": 2.968483842664082, - "learning_rate": 3.9995411274474225e-06, - "loss": 1.0288, - "num_input_tokens_seen": 6339045, - "step": 305 - }, - { - "epoch": 0.036794324535561834, - "flos": 19574707399800.0, - "grad_norm": 2.6561156294999977, - "learning_rate": 3.999524290146908e-06, - "loss": 1.0538, - "num_input_tokens_seen": 6358970, - "step": 306 - }, - { - "epoch": 0.036914567426200924, - "flos": 13806507309600.0, - "grad_norm": 9.649186300649834, - "learning_rate": 3.9995071495201485e-06, - "loss": 1.1285, - "num_input_tokens_seen": 6375795, - "step": 307 - }, - { - "epoch": 0.037034810316840014, - "flos": 15851759496240.0, - "grad_norm": 4.711668852913647, - "learning_rate": 3.999489705569744e-06, - "loss": 1.183, - "num_input_tokens_seen": 6393215, - "step": 308 - }, - { - "epoch": 0.03715505320747911, - "flos": 13177628206320.0, - "grad_norm": 2.7603306499011033, - "learning_rate": 3.999471958298341e-06, - "loss": 1.0908, - "num_input_tokens_seen": 6411845, - "step": 309 - }, - { - "epoch": 0.0372752960981182, - "flos": 25658941442760.0, - "grad_norm": 4.716014549187631, - "learning_rate": 3.999453907708631e-06, - "loss": 0.9974, - "num_input_tokens_seen": 6433970, - "step": 310 - }, - { - "epoch": 0.03739553898875729, - "flos": 14777212285680.0, - "grad_norm": 3.491155484699467, - "learning_rate": 3.999435553803353e-06, - "loss": 1.0495, - "num_input_tokens_seen": 6453090, - "step": 311 - }, - { - "epoch": 0.03751578187939638, - "flos": 14381656263600.0, - "grad_norm": 5.188014544970286, - "learning_rate": 3.999416896585292e-06, - "loss": 1.0732, - "num_input_tokens_seen": 6469840, - "step": 312 - }, - { - "epoch": 0.03763602477003547, - "flos": 14672296896600.0, - "grad_norm": 12.99701480336266, - "learning_rate": 3.9993979360572775e-06, - "loss": 1.0825, - "num_input_tokens_seen": 6489700, - "step": 313 - }, - { - "epoch": 0.03775626766067456, - "flos": 11814341379480.0, - "grad_norm": 3.944986643412018, - "learning_rate": 3.999378672222185e-06, - "loss": 1.039, - "num_input_tokens_seen": 6507205, - "step": 314 - }, - { - "epoch": 0.03787651055131366, - "flos": 15012681676080.0, - "grad_norm": 3.8233612938423636, - "learning_rate": 3.9993591050829385e-06, - "loss": 1.0615, - "num_input_tokens_seen": 6524790, - "step": 315 - }, - { - "epoch": 0.037996753441952746, - "flos": 15642541949280.0, - "grad_norm": 4.294634758692675, - "learning_rate": 3.999339234642506e-06, - "loss": 1.0292, - "num_input_tokens_seen": 6544260, - "step": 316 - }, - { - "epoch": 0.038116996332591836, - "flos": 19732709045400.0, - "grad_norm": 4.002667967817928, - "learning_rate": 3.9993190609038994e-06, - "loss": 1.0634, - "num_input_tokens_seen": 6562745, - "step": 317 - }, - { - "epoch": 0.038237239223230926, - "flos": 15038718888600.0, - "grad_norm": 4.077497295142516, - "learning_rate": 3.999298583870182e-06, - "loss": 1.0714, - "num_input_tokens_seen": 6582050, - "step": 318 - }, - { - "epoch": 0.038357482113870016, - "flos": 18185536668240.0, - "grad_norm": 2.843104194838367, - "learning_rate": 3.999277803544458e-06, - "loss": 0.9976, - "num_input_tokens_seen": 6601925, - "step": 319 - }, - { - "epoch": 0.038477725004509106, - "flos": 42388417872840.0, - "grad_norm": 0.9482430360831521, - "learning_rate": 3.999256719929882e-06, - "loss": 0.8996, - "num_input_tokens_seen": 6662920, - "step": 320 - }, - { - "epoch": 0.0385979678951482, - "flos": 48201302952120.0, - "grad_norm": 1.2497358519884025, - "learning_rate": 3.999235333029651e-06, - "loss": 0.9817, - "num_input_tokens_seen": 6716580, - "step": 321 - }, - { - "epoch": 0.03871821078578729, - "flos": 16166076401640.0, - "grad_norm": 6.75259801528676, - "learning_rate": 3.999213642847009e-06, - "loss": 1.0508, - "num_input_tokens_seen": 6736885, - "step": 322 - }, - { - "epoch": 0.03883845367642638, - "flos": 18705085117800.0, - "grad_norm": 5.198576350832933, - "learning_rate": 3.999191649385247e-06, - "loss": 1.1458, - "num_input_tokens_seen": 6757780, - "step": 323 - }, - { - "epoch": 0.03895869656706547, - "flos": 40760435640840.0, - "grad_norm": 0.8593441350769492, - "learning_rate": 3.999169352647702e-06, - "loss": 0.8656, - "num_input_tokens_seen": 6818680, - "step": 324 - }, - { - "epoch": 0.03907893945770456, - "flos": 17687303550960.0, - "grad_norm": 8.473215425012624, - "learning_rate": 3.999146752637755e-06, - "loss": 1.0462, - "num_input_tokens_seen": 6839445, - "step": 325 - }, - { - "epoch": 0.03919918234834365, - "flos": 13019595899160.0, - "grad_norm": 39.3671681199099, - "learning_rate": 3.999123849358836e-06, - "loss": 1.1343, - "num_input_tokens_seen": 6856830, - "step": 326 - }, - { - "epoch": 0.03931942523898275, - "flos": 17947123768080.0, - "grad_norm": 4.364996672778703, - "learning_rate": 3.999100642814418e-06, - "loss": 0.9711, - "num_input_tokens_seen": 6876990, - "step": 327 - }, - { - "epoch": 0.03943966812962184, - "flos": 16533479563560.0, - "grad_norm": 26.471250665890462, - "learning_rate": 3.999077133008022e-06, - "loss": 1.1201, - "num_input_tokens_seen": 6895295, - "step": 328 - }, - { - "epoch": 0.03955991102026093, - "flos": 20781586982160.0, - "grad_norm": 4.722400995996866, - "learning_rate": 3.9990533199432145e-06, - "loss": 1.1342, - "num_input_tokens_seen": 6916510, - "step": 329 - }, - { - "epoch": 0.03968015391090002, - "flos": 12468981741240.0, - "grad_norm": 3.7605405806451815, - "learning_rate": 3.999029203623608e-06, - "loss": 0.9885, - "num_input_tokens_seen": 6933950, - "step": 330 - }, - { - "epoch": 0.03980039680153911, - "flos": 15485306842680.0, - "grad_norm": 4.364933093979447, - "learning_rate": 3.99900478405286e-06, - "loss": 1.0905, - "num_input_tokens_seen": 6952980, - "step": 331 - }, - { - "epoch": 0.0399206396921782, - "flos": 10738996968360.0, - "grad_norm": 9.702304872999653, - "learning_rate": 3.998980061234676e-06, - "loss": 1.0372, - "num_input_tokens_seen": 6970615, - "step": 332 - }, - { - "epoch": 0.040040882582817294, - "flos": 10183201006800.0, - "grad_norm": 5.496877477492424, - "learning_rate": 3.9989550351728055e-06, - "loss": 0.9892, - "num_input_tokens_seen": 6987265, - "step": 333 - }, - { - "epoch": 0.040161125473456384, - "flos": 13675278753960.0, - "grad_norm": 5.301876980926097, - "learning_rate": 3.998929705871046e-06, - "loss": 1.0627, - "num_input_tokens_seen": 7004340, - "step": 334 - }, - { - "epoch": 0.040281368364095474, - "flos": 12626094201600.0, - "grad_norm": 4.591541200413296, - "learning_rate": 3.99890407333324e-06, - "loss": 1.1188, - "num_input_tokens_seen": 7022590, - "step": 335 - }, - { - "epoch": 0.040401611254734564, - "flos": 13885876071120.0, - "grad_norm": 3.4257431115144734, - "learning_rate": 3.998878137563275e-06, - "loss": 1.0909, - "num_input_tokens_seen": 7041860, - "step": 336 - }, - { - "epoch": 0.040521854145373654, - "flos": 15668671146480.0, - "grad_norm": 4.470462070964218, - "learning_rate": 3.998851898565085e-06, - "loss": 1.0693, - "num_input_tokens_seen": 7061385, - "step": 337 - }, - { - "epoch": 0.04064209703601274, - "flos": 16114369915320.0, - "grad_norm": 4.120891258724908, - "learning_rate": 3.998825356342653e-06, - "loss": 1.0634, - "num_input_tokens_seen": 7081280, - "step": 338 - }, - { - "epoch": 0.04076233992665183, - "flos": 27546774553440.0, - "grad_norm": 4.532882291196709, - "learning_rate": 3.998798510900003e-06, - "loss": 0.9489, - "num_input_tokens_seen": 7103800, - "step": 339 - }, - { - "epoch": 0.04088258281729093, - "flos": 18421803259200.0, - "grad_norm": 4.518610089722761, - "learning_rate": 3.998771362241207e-06, - "loss": 1.0673, - "num_input_tokens_seen": 7123925, - "step": 340 - }, - { - "epoch": 0.04100282570793002, - "flos": 14040780899160.0, - "grad_norm": 5.94131732500432, - "learning_rate": 3.998743910370385e-06, - "loss": 1.1039, - "num_input_tokens_seen": 7142505, - "step": 341 - }, - { - "epoch": 0.04112306859856911, - "flos": 16035675708120.0, - "grad_norm": 5.28324915500974, - "learning_rate": 3.998716155291702e-06, - "loss": 0.9608, - "num_input_tokens_seen": 7160065, - "step": 342 - }, - { - "epoch": 0.0412433114892082, - "flos": 17814024857280.0, - "grad_norm": 3.503590921525792, - "learning_rate": 3.998688097009366e-06, - "loss": 1.1327, - "num_input_tokens_seen": 7180550, - "step": 343 - }, - { - "epoch": 0.04136355437984729, - "flos": 17922987572280.0, - "grad_norm": 9.50789153944653, - "learning_rate": 3.998659735527636e-06, - "loss": 1.032, - "num_input_tokens_seen": 7199360, - "step": 344 - }, - { - "epoch": 0.04148379727048638, - "flos": 16324354001280.0, - "grad_norm": 5.819035291387405, - "learning_rate": 3.998631070850813e-06, - "loss": 1.0021, - "num_input_tokens_seen": 7219700, - "step": 345 - }, - { - "epoch": 0.041604040161125476, - "flos": 9925619083560.0, - "grad_norm": 4.1011972028548085, - "learning_rate": 3.9986021029832455e-06, - "loss": 1.0619, - "num_input_tokens_seen": 7236735, - "step": 346 - }, - { - "epoch": 0.041724283051764566, - "flos": 8508479461200.0, - "grad_norm": 4.613545021830505, - "learning_rate": 3.9985728319293285e-06, - "loss": 1.1372, - "num_input_tokens_seen": 7250430, - "step": 347 - }, - { - "epoch": 0.041844525942403656, - "flos": 8719567363320.0, - "grad_norm": 4.367864402762289, - "learning_rate": 3.998543257693501e-06, - "loss": 1.0776, - "num_input_tokens_seen": 7266905, - "step": 348 - }, - { - "epoch": 0.041964768833042745, - "flos": 16901128017960.0, - "grad_norm": 4.391936516961345, - "learning_rate": 3.998513380280251e-06, - "loss": 1.1106, - "num_input_tokens_seen": 7286905, - "step": 349 - }, - { - "epoch": 0.042085011723681835, - "flos": 8353176032880.0, - "grad_norm": 3.2137272728109316, - "learning_rate": 3.99848319969411e-06, - "loss": 1.1758, - "num_input_tokens_seen": 7304225, - "step": 350 - }, - { - "epoch": 0.042205254614320925, - "flos": 11945600596680.0, - "grad_norm": 8.154946877162104, - "learning_rate": 3.9984527159396564e-06, - "loss": 1.0091, - "num_input_tokens_seen": 7322585, - "step": 351 - }, - { - "epoch": 0.04232549750496002, - "flos": 17870637193200.0, - "grad_norm": 7.2046603802725055, - "learning_rate": 3.9984219290215154e-06, - "loss": 1.0555, - "num_input_tokens_seen": 7342480, - "step": 352 - }, - { - "epoch": 0.04244574039559911, - "flos": 19024951765560.0, - "grad_norm": 2.8256847328195605, - "learning_rate": 3.998390838944356e-06, - "loss": 1.1194, - "num_input_tokens_seen": 7363705, - "step": 353 - }, - { - "epoch": 0.0425659832862382, - "flos": 14855661200400.0, - "grad_norm": 4.784146298075719, - "learning_rate": 3.998359445712895e-06, - "loss": 1.1078, - "num_input_tokens_seen": 7382530, - "step": 354 - }, - { - "epoch": 0.04268622617687729, - "flos": 16586136558240.0, - "grad_norm": 5.571967650802265, - "learning_rate": 3.9983277493318955e-06, - "loss": 1.0317, - "num_input_tokens_seen": 7401545, - "step": 355 - }, - { - "epoch": 0.04280646906751638, - "flos": 18499822912080.0, - "grad_norm": 2.8904219272524045, - "learning_rate": 3.998295749806165e-06, - "loss": 1.0382, - "num_input_tokens_seen": 7422490, - "step": 356 - }, - { - "epoch": 0.04292671195815547, - "flos": 19155935028720.0, - "grad_norm": 4.204311954847294, - "learning_rate": 3.998263447140558e-06, - "loss": 1.0608, - "num_input_tokens_seen": 7442410, - "step": 357 - }, - { - "epoch": 0.04304695484879457, - "flos": 28175653656720.0, - "grad_norm": 9.798919805045, - "learning_rate": 3.998230841339976e-06, - "loss": 1.0465, - "num_input_tokens_seen": 7464140, - "step": 358 - }, - { - "epoch": 0.04316719773943366, - "flos": 13937950496160.0, - "grad_norm": 6.839564606935968, - "learning_rate": 3.998197932409363e-06, - "loss": 1.076, - "num_input_tokens_seen": 7481870, - "step": 359 - }, - { - "epoch": 0.04328744063007275, - "flos": 15954129975840.0, - "grad_norm": 3.338376013841167, - "learning_rate": 3.9981647203537125e-06, - "loss": 1.0893, - "num_input_tokens_seen": 7499090, - "step": 360 - }, - { - "epoch": 0.04340768352071184, - "flos": 15113549739240.0, - "grad_norm": 3.8438060725394325, - "learning_rate": 3.998131205178063e-06, - "loss": 1.1818, - "num_input_tokens_seen": 7517280, - "step": 361 - }, - { - "epoch": 0.04352792641135093, - "flos": 8143621208760.0, - "grad_norm": 4.837256626049498, - "learning_rate": 3.998097386887498e-06, - "loss": 0.9816, - "num_input_tokens_seen": 7534075, - "step": 362 - }, - { - "epoch": 0.04364816930199002, - "flos": 16796365936680.0, - "grad_norm": 2.8391058591513936, - "learning_rate": 3.998063265487148e-06, - "loss": 1.0659, - "num_input_tokens_seen": 7554845, - "step": 363 - }, - { - "epoch": 0.043768412192629114, - "flos": 10189026703200.0, - "grad_norm": 14.422072321408782, - "learning_rate": 3.99802884098219e-06, - "loss": 1.0383, - "num_input_tokens_seen": 7572675, - "step": 364 - }, - { - "epoch": 0.043888655083268203, - "flos": 18841188861480.0, - "grad_norm": 6.40426232740758, - "learning_rate": 3.997994113377845e-06, - "loss": 1.0452, - "num_input_tokens_seen": 7591295, - "step": 365 - }, - { - "epoch": 0.04400889797390729, - "flos": 19391986988760.0, - "grad_norm": 4.668984427943501, - "learning_rate": 3.9979590826793815e-06, - "loss": 1.067, - "num_input_tokens_seen": 7612205, - "step": 366 - }, - { - "epoch": 0.04412914086454638, - "flos": 14277906013800.0, - "grad_norm": 5.432314449956218, - "learning_rate": 3.997923748892113e-06, - "loss": 1.0257, - "num_input_tokens_seen": 7631245, - "step": 367 - }, - { - "epoch": 0.04424938375518547, - "flos": 16064441799480.0, - "grad_norm": 2.3039888001739297, - "learning_rate": 3.9978881120214015e-06, - "loss": 1.1176, - "num_input_tokens_seen": 7652485, - "step": 368 - }, - { - "epoch": 0.04436962664582456, - "flos": 17161776097200.0, - "grad_norm": 12.502210497907335, - "learning_rate": 3.997852172072652e-06, - "loss": 1.0297, - "num_input_tokens_seen": 7673420, - "step": 369 - }, - { - "epoch": 0.04448986953646366, - "flos": 12914925802560.0, - "grad_norm": 4.379655159740011, - "learning_rate": 3.9978159290513155e-06, - "loss": 1.122, - "num_input_tokens_seen": 7691220, - "step": 370 - }, - { - "epoch": 0.04461011242710275, - "flos": 21463736311320.0, - "grad_norm": 2.645698864340748, - "learning_rate": 3.997779382962892e-06, - "loss": 1.0347, - "num_input_tokens_seen": 7713825, - "step": 371 - }, - { - "epoch": 0.04473035531774184, - "flos": 21201064569120.0, - "grad_norm": 7.216311352493933, - "learning_rate": 3.997742533812924e-06, - "loss": 0.9614, - "num_input_tokens_seen": 7736810, - "step": 372 - }, - { - "epoch": 0.04485059820838093, - "flos": 9270273505920.0, - "grad_norm": 5.292952894348945, - "learning_rate": 3.997705381607001e-06, - "loss": 1.1393, - "num_input_tokens_seen": 7753345, - "step": 373 - }, - { - "epoch": 0.04497084109902002, - "flos": 48760778300880.0, - "grad_norm": 1.0114748156246018, - "learning_rate": 3.997667926350761e-06, - "loss": 0.8657, - "num_input_tokens_seen": 7811395, - "step": 374 - }, - { - "epoch": 0.04509108398965911, - "flos": 46065147709320.0, - "grad_norm": 1.0789950080840958, - "learning_rate": 3.997630168049886e-06, - "loss": 0.8353, - "num_input_tokens_seen": 7869480, - "step": 375 - }, - { - "epoch": 0.045211326880298205, - "flos": 15824066559480.0, - "grad_norm": 4.533925286147337, - "learning_rate": 3.997592106710101e-06, - "loss": 1.0021, - "num_input_tokens_seen": 7888660, - "step": 376 - }, - { - "epoch": 0.045331569770937295, - "flos": 22930712064840.0, - "grad_norm": 4.513130951198515, - "learning_rate": 3.997553742337182e-06, - "loss": 0.8937, - "num_input_tokens_seen": 7907805, - "step": 377 - }, - { - "epoch": 0.045451812661576385, - "flos": 15746936091840.0, - "grad_norm": 28.59718537252344, - "learning_rate": 3.997515074936949e-06, - "loss": 1.1426, - "num_input_tokens_seen": 7928400, - "step": 378 - }, - { - "epoch": 0.045572055552215475, - "flos": 11735739156960.0, - "grad_norm": 5.659906880721405, - "learning_rate": 3.997476104515268e-06, - "loss": 1.0784, - "num_input_tokens_seen": 7946310, - "step": 379 - }, - { - "epoch": 0.045692298442854565, - "flos": 12469778941800.0, - "grad_norm": 12.406313956069406, - "learning_rate": 3.9974368310780485e-06, - "loss": 1.0019, - "num_input_tokens_seen": 7963205, - "step": 380 - }, - { - "epoch": 0.045812541333493655, - "flos": 19051234270560.0, - "grad_norm": 8.033836334225212, - "learning_rate": 3.997397254631251e-06, - "loss": 0.9599, - "num_input_tokens_seen": 7983545, - "step": 381 - }, - { - "epoch": 0.04593278422413275, - "flos": 43123622796960.0, - "grad_norm": 1.0221322158795758, - "learning_rate": 3.997357375180878e-06, - "loss": 0.8839, - "num_input_tokens_seen": 8047545, - "step": 382 - }, - { - "epoch": 0.04605302711477184, - "flos": 15485276181120.0, - "grad_norm": 3.0199981762710015, - "learning_rate": 3.997317192732979e-06, - "loss": 0.9749, - "num_input_tokens_seen": 8066045, - "step": 383 - }, - { - "epoch": 0.04617327000541093, - "flos": 13803287845800.0, - "grad_norm": 9.752560401617979, - "learning_rate": 3.99727670729365e-06, - "loss": 1.0492, - "num_input_tokens_seen": 8084325, - "step": 384 - }, - { - "epoch": 0.04629351289605002, - "flos": 18081326495040.0, - "grad_norm": 3.395434927631263, - "learning_rate": 3.997235918869033e-06, - "loss": 1.0089, - "num_input_tokens_seen": 8105080, - "step": 385 - }, - { - "epoch": 0.04641375578668911, - "flos": 14593357396920.0, - "grad_norm": 19.270977529725155, - "learning_rate": 3.997194827465315e-06, - "loss": 1.064, - "num_input_tokens_seen": 8123395, - "step": 386 - }, - { - "epoch": 0.0465339986773282, - "flos": 9297015934320.0, - "grad_norm": 5.239797760940924, - "learning_rate": 3.997153433088728e-06, - "loss": 1.1452, - "num_input_tokens_seen": 8140240, - "step": 387 - }, - { - "epoch": 0.0466542415679673, - "flos": 18185475345120.0, - "grad_norm": 3.528244900418839, - "learning_rate": 3.997111735745554e-06, - "loss": 1.0368, - "num_input_tokens_seen": 8162930, - "step": 388 - }, - { - "epoch": 0.04677448445860639, - "flos": 15799654409640.0, - "grad_norm": 3.908169450285957, - "learning_rate": 3.997069735442118e-06, - "loss": 1.0585, - "num_input_tokens_seen": 8182345, - "step": 389 - }, - { - "epoch": 0.04689472734924548, - "flos": 20047884474480.0, - "grad_norm": 2.7844872654332367, - "learning_rate": 3.997027432184792e-06, - "loss": 1.027, - "num_input_tokens_seen": 8206725, - "step": 390 - }, - { - "epoch": 0.04701497023988457, - "flos": 16455245279760.0, - "grad_norm": 7.5954138420421025, - "learning_rate": 3.99698482597999e-06, - "loss": 1.1134, - "num_input_tokens_seen": 8224125, - "step": 391 - }, - { - "epoch": 0.04713521313052366, - "flos": 46415497495800.0, - "grad_norm": 0.8984334199232253, - "learning_rate": 3.99694191683418e-06, - "loss": 0.8898, - "num_input_tokens_seen": 8284645, - "step": 392 - }, - { - "epoch": 0.047255456021162746, - "flos": 13309439331600.0, - "grad_norm": 13.905108252813458, - "learning_rate": 3.996898704753867e-06, - "loss": 1.0567, - "num_input_tokens_seen": 8302315, - "step": 393 - }, - { - "epoch": 0.04737569891180184, - "flos": 15668119238400.0, - "grad_norm": 5.695540895840483, - "learning_rate": 3.996855189745609e-06, - "loss": 1.1121, - "num_input_tokens_seen": 8321300, - "step": 394 - }, - { - "epoch": 0.04749594180244093, - "flos": 20701206389160.0, - "grad_norm": 5.409011752814911, - "learning_rate": 3.996811371816007e-06, - "loss": 1.152, - "num_input_tokens_seen": 8343445, - "step": 395 - }, - { - "epoch": 0.04761618469308002, - "flos": 25052512149480.0, - "grad_norm": 14.915261541638106, - "learning_rate": 3.996767250971707e-06, - "loss": 1.0024, - "num_input_tokens_seen": 8365905, - "step": 396 - }, - { - "epoch": 0.04773642758371911, - "flos": 18238929540360.0, - "grad_norm": 11.111830556677532, - "learning_rate": 3.996722827219403e-06, - "loss": 1.0957, - "num_input_tokens_seen": 8387240, - "step": 397 - }, - { - "epoch": 0.0478566704743582, - "flos": 14646014391600.0, - "grad_norm": 5.540304069024199, - "learning_rate": 3.996678100565833e-06, - "loss": 1.0365, - "num_input_tokens_seen": 8406015, - "step": 398 - }, - { - "epoch": 0.04797691336499729, - "flos": 13354338951600.0, - "grad_norm": 4.375132531474761, - "learning_rate": 3.996633071017783e-06, - "loss": 1.102, - "num_input_tokens_seen": 8422365, - "step": 399 - }, - { - "epoch": 0.04809715625563638, - "flos": 14981646629280.0, - "grad_norm": 14.716990630982338, - "learning_rate": 3.996587738582084e-06, - "loss": 1.0485, - "num_input_tokens_seen": 8438885, - "step": 400 - }, - { - "epoch": 0.04821739914627548, - "flos": 16927195892040.0, - "grad_norm": 4.671560627748555, - "learning_rate": 3.9965421032656115e-06, - "loss": 1.0755, - "num_input_tokens_seen": 8458535, - "step": 401 - }, - { - "epoch": 0.04833764203691457, - "flos": 15773709181800.0, - "grad_norm": 6.378381468098932, - "learning_rate": 3.99649616507529e-06, - "loss": 1.1654, - "num_input_tokens_seen": 8477350, - "step": 402 - }, - { - "epoch": 0.04845788492755366, - "flos": 47186894156880.0, - "grad_norm": 0.9715281612773287, - "learning_rate": 3.996449924018088e-06, - "loss": 0.9083, - "num_input_tokens_seen": 8537530, - "step": 403 - }, - { - "epoch": 0.04857812781819275, - "flos": 13675616031120.0, - "grad_norm": 5.2573624711634865, - "learning_rate": 3.99640338010102e-06, - "loss": 1.0277, - "num_input_tokens_seen": 8556355, - "step": 404 - }, - { - "epoch": 0.04869837070883184, - "flos": 17111878642920.0, - "grad_norm": 6.810392561253639, - "learning_rate": 3.996356533331146e-06, - "loss": 1.0089, - "num_input_tokens_seen": 8577945, - "step": 405 - }, - { - "epoch": 0.04881861359947093, - "flos": 17919400169760.0, - "grad_norm": 11.569801458371392, - "learning_rate": 3.996309383715573e-06, - "loss": 0.8305, - "num_input_tokens_seen": 8596445, - "step": 406 - }, - { - "epoch": 0.048938856490110025, - "flos": 11578289419440.0, - "grad_norm": 6.260322591251445, - "learning_rate": 3.996261931261454e-06, - "loss": 0.9509, - "num_input_tokens_seen": 8614745, - "step": 407 - }, - { - "epoch": 0.049059099380749115, - "flos": 21303067110000.0, - "grad_norm": 4.750708677731548, - "learning_rate": 3.996214175975987e-06, - "loss": 1.0911, - "num_input_tokens_seen": 8634985, - "step": 408 - }, - { - "epoch": 0.049179342271388204, - "flos": 25632720260880.0, - "grad_norm": 7.741155191805244, - "learning_rate": 3.996166117866417e-06, - "loss": 1.0275, - "num_input_tokens_seen": 8656640, - "step": 409 - }, - { - "epoch": 0.049299585162027294, - "flos": 10319335412040.0, - "grad_norm": 7.180041880365922, - "learning_rate": 3.996117756940035e-06, - "loss": 1.0981, - "num_input_tokens_seen": 8673045, - "step": 410 - }, - { - "epoch": 0.049419828052666384, - "flos": 13880908898400.0, - "grad_norm": 17.087485105093638, - "learning_rate": 3.996069093204175e-06, - "loss": 1.2033, - "num_input_tokens_seen": 8688725, - "step": 411 - }, - { - "epoch": 0.049540070943305474, - "flos": 9635285066160.0, - "grad_norm": 7.060517429485626, - "learning_rate": 3.996020126666221e-06, - "loss": 1.1039, - "num_input_tokens_seen": 8705425, - "step": 412 - }, - { - "epoch": 0.04966031383394457, - "flos": 15508339222320.0, - "grad_norm": 6.681825886700115, - "learning_rate": 3.995970857333601e-06, - "loss": 1.0518, - "num_input_tokens_seen": 8725555, - "step": 413 - }, - { - "epoch": 0.04978055672458366, - "flos": 20385908313840.0, - "grad_norm": 3.277282350531083, - "learning_rate": 3.995921285213789e-06, - "loss": 1.025, - "num_input_tokens_seen": 8745535, - "step": 414 - }, - { - "epoch": 0.04990079961522275, - "flos": 14068627143720.0, - "grad_norm": 13.788779210971008, - "learning_rate": 3.995871410314305e-06, - "loss": 1.0256, - "num_input_tokens_seen": 8763815, - "step": 415 - }, - { - "epoch": 0.05002104250586184, - "flos": 44909821305480.0, - "grad_norm": 1.020551791908949, - "learning_rate": 3.995821232642714e-06, - "loss": 0.8891, - "num_input_tokens_seen": 8821940, - "step": 416 - }, - { - "epoch": 0.05014128539650093, - "flos": 19890220106040.0, - "grad_norm": 6.431550174451287, - "learning_rate": 3.995770752206629e-06, - "loss": 1.0492, - "num_input_tokens_seen": 8842735, - "step": 417 - }, - { - "epoch": 0.05026152828714002, - "flos": 12542248852320.0, - "grad_norm": 4.564050280415215, - "learning_rate": 3.995719969013709e-06, - "loss": 1.2002, - "num_input_tokens_seen": 8859635, - "step": 418 - }, - { - "epoch": 0.05038177117777912, - "flos": 13568952933120.0, - "grad_norm": 5.038780310957676, - "learning_rate": 3.995668883071655e-06, - "loss": 1.0696, - "num_input_tokens_seen": 8875580, - "step": 419 - }, - { - "epoch": 0.050502014068418206, - "flos": 14671990281000.0, - "grad_norm": 6.445199750434275, - "learning_rate": 3.995617494388219e-06, - "loss": 1.1274, - "num_input_tokens_seen": 8893420, - "step": 420 - }, - { - "epoch": 0.050622256959057296, - "flos": 15170346044520.0, - "grad_norm": 6.7421473284217495, - "learning_rate": 3.995565802971196e-06, - "loss": 1.0351, - "num_input_tokens_seen": 8913115, - "step": 421 - }, - { - "epoch": 0.050742499849696386, - "flos": 19707070433160.0, - "grad_norm": 9.304530067937673, - "learning_rate": 3.995513808828427e-06, - "loss": 0.9091, - "num_input_tokens_seen": 8935630, - "step": 422 - }, - { - "epoch": 0.050862742740335476, - "flos": 14095952141760.0, - "grad_norm": 5.153749541681173, - "learning_rate": 3.9954615119678e-06, - "loss": 0.9879, - "num_input_tokens_seen": 8953905, - "step": 423 - }, - { - "epoch": 0.050982985630974566, - "flos": 15689894494080.0, - "grad_norm": 3.434278747487148, - "learning_rate": 3.995408912397248e-06, - "loss": 1.0287, - "num_input_tokens_seen": 8971520, - "step": 424 - }, - { - "epoch": 0.05110322852161366, - "flos": 14724677937240.0, - "grad_norm": 5.720775808261382, - "learning_rate": 3.99535601012475e-06, - "loss": 1.1524, - "num_input_tokens_seen": 8986570, - "step": 425 - }, - { - "epoch": 0.05122347141225275, - "flos": 20334723074040.0, - "grad_norm": 2.8483509111243355, - "learning_rate": 3.995302805158333e-06, - "loss": 0.9883, - "num_input_tokens_seen": 9008945, - "step": 426 - }, - { - "epoch": 0.05134371430289184, - "flos": 13993305708120.0, - "grad_norm": 90.84386287402688, - "learning_rate": 3.9952492975060665e-06, - "loss": 1.061, - "num_input_tokens_seen": 9028735, - "step": 427 - }, - { - "epoch": 0.05146395719353093, - "flos": 24583443723840.0, - "grad_norm": 10.418481002474678, - "learning_rate": 3.995195487176067e-06, - "loss": 1.0949, - "num_input_tokens_seen": 9048685, - "step": 428 - }, - { - "epoch": 0.05158420008417002, - "flos": 15456939351600.0, - "grad_norm": 25.705295311107594, - "learning_rate": 3.995141374176499e-06, - "loss": 1.0872, - "num_input_tokens_seen": 9066800, - "step": 429 - }, - { - "epoch": 0.05170444297480911, - "flos": 51967059941520.0, - "grad_norm": 0.9277238704689015, - "learning_rate": 3.995086958515572e-06, - "loss": 0.907, - "num_input_tokens_seen": 9124540, - "step": 430 - }, - { - "epoch": 0.05182468586544821, - "flos": 44681925320400.0, - "grad_norm": 0.9393996701329095, - "learning_rate": 3.995032240201538e-06, - "loss": 0.8762, - "num_input_tokens_seen": 9186655, - "step": 431 - }, - { - "epoch": 0.0519449287560873, - "flos": 29449110582000.0, - "grad_norm": 1.0385030516725167, - "learning_rate": 3.9949772192427e-06, - "loss": 0.8847, - "num_input_tokens_seen": 9233000, - "step": 432 - }, - { - "epoch": 0.05206517164672639, - "flos": 12391636642680.0, - "grad_norm": 8.022883112845582, - "learning_rate": 3.994921895647405e-06, - "loss": 1.0248, - "num_input_tokens_seen": 9250890, - "step": 433 - }, - { - "epoch": 0.05218541453736548, - "flos": 45819651988800.0, - "grad_norm": 0.8149245839242616, - "learning_rate": 3.994866269424043e-06, - "loss": 0.8025, - "num_input_tokens_seen": 9306980, - "step": 434 - }, - { - "epoch": 0.05230565742800457, - "flos": 13701438612720.0, - "grad_norm": 22.641649390213935, - "learning_rate": 3.9948103405810545e-06, - "loss": 1.0119, - "num_input_tokens_seen": 9325650, - "step": 435 - }, - { - "epoch": 0.05242590031864366, - "flos": 17999780762760.0, - "grad_norm": 8.269466668382682, - "learning_rate": 3.994754109126923e-06, - "loss": 1.0854, - "num_input_tokens_seen": 9346865, - "step": 436 - }, - { - "epoch": 0.052546143209282754, - "flos": 18656046187200.0, - "grad_norm": 3.5666547941444042, - "learning_rate": 3.994697575070181e-06, - "loss": 1.1548, - "num_input_tokens_seen": 9366045, - "step": 437 - }, - { - "epoch": 0.052666386099921844, - "flos": 15742398180960.0, - "grad_norm": 4.008247455990839, - "learning_rate": 3.994640738419402e-06, - "loss": 1.1363, - "num_input_tokens_seen": 9385140, - "step": 438 - }, - { - "epoch": 0.052786628990560934, - "flos": 16982367134640.0, - "grad_norm": 3.993631180262805, - "learning_rate": 3.9945835991832075e-06, - "loss": 1.0357, - "num_input_tokens_seen": 9406745, - "step": 439 - }, - { - "epoch": 0.052906871881200024, - "flos": 17501241029880.0, - "grad_norm": 7.8639278989263195, - "learning_rate": 3.994526157370268e-06, - "loss": 1.155, - "num_input_tokens_seen": 9425080, - "step": 440 - }, - { - "epoch": 0.053027114771839114, - "flos": 40401525731040.0, - "grad_norm": 1.161905376576917, - "learning_rate": 3.994468412989296e-06, - "loss": 0.8428, - "num_input_tokens_seen": 9486210, - "step": 441 - }, - { - "epoch": 0.053147357662478203, - "flos": 12259672209600.0, - "grad_norm": 3.3935417603171047, - "learning_rate": 3.994410366049052e-06, - "loss": 1.1723, - "num_input_tokens_seen": 9503790, - "step": 442 - }, - { - "epoch": 0.0532676005531173, - "flos": 12154634174280.0, - "grad_norm": 5.231715377835241, - "learning_rate": 3.994352016558341e-06, - "loss": 1.0585, - "num_input_tokens_seen": 9520815, - "step": 443 - }, - { - "epoch": 0.05338784344375639, - "flos": 19812323099400.0, - "grad_norm": 3.984351064572714, - "learning_rate": 3.994293364526014e-06, - "loss": 0.9678, - "num_input_tokens_seen": 9541420, - "step": 444 - }, - { - "epoch": 0.05350808633439548, - "flos": 15275782680120.0, - "grad_norm": 3.086374604377661, - "learning_rate": 3.99423440996097e-06, - "loss": 1.0569, - "num_input_tokens_seen": 9560680, - "step": 445 - }, - { - "epoch": 0.05362832922503457, - "flos": 14252757986520.0, - "grad_norm": 27.488166621279397, - "learning_rate": 3.994175152872152e-06, - "loss": 1.0347, - "num_input_tokens_seen": 9579485, - "step": 446 - }, - { - "epoch": 0.05374857211567366, - "flos": 18602377361040.0, - "grad_norm": 3.412830959941147, - "learning_rate": 3.994115593268548e-06, - "loss": 1.0244, - "num_input_tokens_seen": 9598985, - "step": 447 - }, - { - "epoch": 0.05386881500631275, - "flos": 19572254475000.0, - "grad_norm": 2.7717185267652575, - "learning_rate": 3.994055731159195e-06, - "loss": 1.0462, - "num_input_tokens_seen": 9616175, - "step": 448 - }, - { - "epoch": 0.053989057896951846, - "flos": 16768458369000.0, - "grad_norm": 5.780114964592508, - "learning_rate": 3.993995566553172e-06, - "loss": 1.1002, - "num_input_tokens_seen": 9634860, - "step": 449 - }, - { - "epoch": 0.054109300787590936, - "flos": 17950067277840.0, - "grad_norm": 3.4695301725046175, - "learning_rate": 3.993935099459607e-06, - "loss": 0.9976, - "num_input_tokens_seen": 9656195, - "step": 450 - }, - { - "epoch": 0.054229543678230026, - "flos": 16952895827400.0, - "grad_norm": 3.5176827745052255, - "learning_rate": 3.993874329887673e-06, - "loss": 0.9724, - "num_input_tokens_seen": 9674570, - "step": 451 - }, - { - "epoch": 0.054349786568869116, - "flos": 11547377018880.0, - "grad_norm": 4.02324621245824, - "learning_rate": 3.993813257846589e-06, - "loss": 1.0828, - "num_input_tokens_seen": 9691045, - "step": 452 - }, - { - "epoch": 0.054470029459508205, - "flos": 13229579985120.0, - "grad_norm": 4.566643283719159, - "learning_rate": 3.993751883345619e-06, - "loss": 1.1639, - "num_input_tokens_seen": 9709125, - "step": 453 - }, - { - "epoch": 0.054590272350147295, - "flos": 12600455589360.0, - "grad_norm": 3.52198331813555, - "learning_rate": 3.993690206394073e-06, - "loss": 1.0988, - "num_input_tokens_seen": 9725145, - "step": 454 - }, - { - "epoch": 0.054710515240786385, - "flos": 12601007497440.0, - "grad_norm": 3.6978976540233437, - "learning_rate": 3.993628227001307e-06, - "loss": 1.1064, - "num_input_tokens_seen": 9743065, - "step": 455 - }, - { - "epoch": 0.05483075813142548, - "flos": 34466284383120.0, - "grad_norm": 3.905320934437034, - "learning_rate": 3.993565945176726e-06, - "loss": 0.9468, - "num_input_tokens_seen": 9763810, - "step": 456 - }, - { - "epoch": 0.05495100102206457, - "flos": 13963711754640.0, - "grad_norm": 3.9536488302808523, - "learning_rate": 3.993503360929776e-06, - "loss": 1.0542, - "num_input_tokens_seen": 9782415, - "step": 457 - }, - { - "epoch": 0.05507124391270366, - "flos": 18763046562360.0, - "grad_norm": 8.009518206295098, - "learning_rate": 3.99344047426995e-06, - "loss": 1.041, - "num_input_tokens_seen": 9803395, - "step": 458 - }, - { - "epoch": 0.05519148680334275, - "flos": 16061620935960.0, - "grad_norm": 4.335768577790592, - "learning_rate": 3.993377285206789e-06, - "loss": 1.1496, - "num_input_tokens_seen": 9822900, - "step": 459 - }, - { - "epoch": 0.05531172969398184, - "flos": 28962841021200.0, - "grad_norm": 3.289315909366464, - "learning_rate": 3.99331379374988e-06, - "loss": 1.0984, - "num_input_tokens_seen": 9846225, - "step": 460 - }, - { - "epoch": 0.05543197258462093, - "flos": 16691726501640.0, - "grad_norm": 6.067400966821585, - "learning_rate": 3.993249999908852e-06, - "loss": 1.0292, - "num_input_tokens_seen": 9866095, - "step": 461 - }, - { - "epoch": 0.05555221547526003, - "flos": 13203665418840.0, - "grad_norm": 13.177658965111416, - "learning_rate": 3.993185903693384e-06, - "loss": 1.0976, - "num_input_tokens_seen": 9882615, - "step": 462 - }, - { - "epoch": 0.05567245836589912, - "flos": 16770175416360.0, - "grad_norm": 7.463806814917583, - "learning_rate": 3.9931215051131995e-06, - "loss": 1.056, - "num_input_tokens_seen": 9902980, - "step": 463 - }, - { - "epoch": 0.05579270125653821, - "flos": 19759604781600.0, - "grad_norm": 20.044446005660223, - "learning_rate": 3.993056804178068e-06, - "loss": 1.0326, - "num_input_tokens_seen": 9924245, - "step": 464 - }, - { - "epoch": 0.0559129441471773, - "flos": 19232789542320.0, - "grad_norm": 3.898405208721474, - "learning_rate": 3.992991800897803e-06, - "loss": 1.0706, - "num_input_tokens_seen": 9943770, - "step": 465 - }, - { - "epoch": 0.05603318703781639, - "flos": 10763654410680.0, - "grad_norm": 5.202459252641065, - "learning_rate": 3.9929264952822665e-06, - "loss": 1.1197, - "num_input_tokens_seen": 9961025, - "step": 466 - }, - { - "epoch": 0.05615342992845548, - "flos": 15820111218240.0, - "grad_norm": 4.224636464025494, - "learning_rate": 3.992860887341366e-06, - "loss": 1.111, - "num_input_tokens_seen": 9978915, - "step": 467 - }, - { - "epoch": 0.056273672819094574, - "flos": 16768366384320.0, - "grad_norm": 7.87693544854513, - "learning_rate": 3.992794977085052e-06, - "loss": 1.0363, - "num_input_tokens_seen": 9996635, - "step": 468 - }, - { - "epoch": 0.056393915709733664, - "flos": 14089727845080.0, - "grad_norm": 7.204486974681776, - "learning_rate": 3.992728764523326e-06, - "loss": 1.0739, - "num_input_tokens_seen": 10015300, - "step": 469 - }, - { - "epoch": 0.05651415860037275, - "flos": 15747212045880.0, - "grad_norm": 2.7120563935405886, - "learning_rate": 3.99266224966623e-06, - "loss": 1.0294, - "num_input_tokens_seen": 10035935, - "step": 470 - }, - { - "epoch": 0.05663440149101184, - "flos": 13806568632720.0, - "grad_norm": 6.94832584333424, - "learning_rate": 3.992595432523855e-06, - "loss": 1.106, - "num_input_tokens_seen": 10052945, - "step": 471 - }, - { - "epoch": 0.05675464438165093, - "flos": 16110598543440.0, - "grad_norm": 4.1448064455643046, - "learning_rate": 3.992528313106338e-06, - "loss": 1.0934, - "num_input_tokens_seen": 10070865, - "step": 472 - }, - { - "epoch": 0.05687488727229002, - "flos": 11963911096080.0, - "grad_norm": 3.818172050232386, - "learning_rate": 3.9924608914238595e-06, - "loss": 1.0413, - "num_input_tokens_seen": 10085580, - "step": 473 - }, - { - "epoch": 0.05699513016292912, - "flos": 20781709628400.0, - "grad_norm": 5.5954063989280565, - "learning_rate": 3.992393167486648e-06, - "loss": 1.0622, - "num_input_tokens_seen": 10104450, - "step": 474 - }, - { - "epoch": 0.05711537305356821, - "flos": 13413189581400.0, - "grad_norm": 5.221139691927155, - "learning_rate": 3.992325141304977e-06, - "loss": 1.0289, - "num_input_tokens_seen": 10122125, - "step": 475 - }, - { - "epoch": 0.0572356159442073, - "flos": 19049425238520.0, - "grad_norm": 10.842551745079502, - "learning_rate": 3.992256812889166e-06, - "loss": 1.0854, - "num_input_tokens_seen": 10137950, - "step": 476 - }, - { - "epoch": 0.05735585883484639, - "flos": 25055823597960.0, - "grad_norm": 7.848178297407025, - "learning_rate": 3.992188182249582e-06, - "loss": 0.9905, - "num_input_tokens_seen": 10159565, - "step": 477 - }, - { - "epoch": 0.05747610172548548, - "flos": 13282604918520.0, - "grad_norm": 38.36672854668482, - "learning_rate": 3.992119249396633e-06, - "loss": 1.135, - "num_input_tokens_seen": 10177970, - "step": 478 - }, - { - "epoch": 0.05759634461612457, - "flos": 19916226657000.0, - "grad_norm": 4.237414405881016, - "learning_rate": 3.992050014340778e-06, - "loss": 1.0519, - "num_input_tokens_seen": 10198045, - "step": 479 - }, - { - "epoch": 0.057716587506763666, - "flos": 49620772853040.0, - "grad_norm": 1.1861651597872152, - "learning_rate": 3.99198047709252e-06, - "loss": 0.803, - "num_input_tokens_seen": 10259285, - "step": 480 - }, - { - "epoch": 0.057836830397402755, - "flos": 17792096293800.0, - "grad_norm": 3.7053221288920914, - "learning_rate": 3.991910637662408e-06, - "loss": 1.0085, - "num_input_tokens_seen": 10279295, - "step": 481 - }, - { - "epoch": 0.057957073288041845, - "flos": 18212432404440.0, - "grad_norm": 2.9255501384647125, - "learning_rate": 3.9918404960610355e-06, - "loss": 1.0335, - "num_input_tokens_seen": 10298045, - "step": 482 - }, - { - "epoch": 0.058077316178680935, - "flos": 14750868457560.0, - "grad_norm": 3.4934911150886894, - "learning_rate": 3.991770052299043e-06, - "loss": 0.996, - "num_input_tokens_seen": 10315995, - "step": 483 - }, - { - "epoch": 0.058197559069320025, - "flos": 13413434873880.0, - "grad_norm": 5.622337558921987, - "learning_rate": 3.991699306387118e-06, - "loss": 1.0995, - "num_input_tokens_seen": 10334185, - "step": 484 - }, - { - "epoch": 0.058317801959959115, - "flos": 17687272889400.0, - "grad_norm": 12.747459607326853, - "learning_rate": 3.991628258335991e-06, - "loss": 1.0203, - "num_input_tokens_seen": 10356110, - "step": 485 - }, - { - "epoch": 0.05843804485059821, - "flos": 16533540886680.0, - "grad_norm": 7.560938145794558, - "learning_rate": 3.991556908156442e-06, - "loss": 1.0915, - "num_input_tokens_seen": 10372355, - "step": 486 - }, - { - "epoch": 0.0585582877412373, - "flos": 16456594388400.0, - "grad_norm": 4.313302449190003, - "learning_rate": 3.9914852558592914e-06, - "loss": 1.0966, - "num_input_tokens_seen": 10393125, - "step": 487 - }, - { - "epoch": 0.05867853063187639, - "flos": 16711140817200.0, - "grad_norm": 5.8652698212174, - "learning_rate": 3.991413301455413e-06, - "loss": 1.01, - "num_input_tokens_seen": 10409295, - "step": 488 - }, - { - "epoch": 0.05879877352251548, - "flos": 21016381818240.0, - "grad_norm": 5.127407531946064, - "learning_rate": 3.991341044955719e-06, - "loss": 0.9992, - "num_input_tokens_seen": 10428770, - "step": 489 - }, - { - "epoch": 0.05891901641315457, - "flos": 14305384319640.0, - "grad_norm": 4.762036783113972, - "learning_rate": 3.991268486371172e-06, - "loss": 1.0469, - "num_input_tokens_seen": 10447045, - "step": 490 - }, - { - "epoch": 0.05903925930379366, - "flos": 17529761828760.0, - "grad_norm": 12.464851706946693, - "learning_rate": 3.991195625712779e-06, - "loss": 1.087, - "num_input_tokens_seen": 10463730, - "step": 491 - }, - { - "epoch": 0.05915950219443276, - "flos": 15090609344280.0, - "grad_norm": 7.426177309762752, - "learning_rate": 3.991122462991592e-06, - "loss": 1.0332, - "num_input_tokens_seen": 10482970, - "step": 492 - }, - { - "epoch": 0.05927974508507185, - "flos": 6935177886840.0, - "grad_norm": 15.84338450715634, - "learning_rate": 3.991048998218712e-06, - "loss": 1.0313, - "num_input_tokens_seen": 10495995, - "step": 493 - }, - { - "epoch": 0.05939998797571094, - "flos": 12941238969120.0, - "grad_norm": 6.545302463941787, - "learning_rate": 3.990975231405281e-06, - "loss": 0.9847, - "num_input_tokens_seen": 10514165, - "step": 494 - }, - { - "epoch": 0.05952023086635003, - "flos": 20125965450480.0, - "grad_norm": 3.1944399810612407, - "learning_rate": 3.990901162562491e-06, - "loss": 1.0221, - "num_input_tokens_seen": 10534575, - "step": 495 - }, - { - "epoch": 0.05964047375698912, - "flos": 10528583620560.0, - "grad_norm": 14.379763668065413, - "learning_rate": 3.9908267917015765e-06, - "loss": 1.1195, - "num_input_tokens_seen": 10552355, - "step": 496 - }, - { - "epoch": 0.059760716647628206, - "flos": 16481589107880.0, - "grad_norm": 12.256735059602498, - "learning_rate": 3.990752118833821e-06, - "loss": 1.155, - "num_input_tokens_seen": 10569515, - "step": 497 - }, - { - "epoch": 0.0598809595382673, - "flos": 16166260371000.0, - "grad_norm": 13.277932145317811, - "learning_rate": 3.990677143970553e-06, - "loss": 1.0088, - "num_input_tokens_seen": 10590045, - "step": 498 - }, - { - "epoch": 0.06000120242890639, - "flos": 15720929540880.0, - "grad_norm": 5.6225192215634605, - "learning_rate": 3.990601867123144e-06, - "loss": 1.0321, - "num_input_tokens_seen": 10609490, - "step": 499 - }, - { - "epoch": 0.06012144531954548, - "flos": 13596891162360.0, - "grad_norm": 7.301053731455454, - "learning_rate": 3.990526288303014e-06, - "loss": 1.0819, - "num_input_tokens_seen": 10628000, - "step": 500 - }, - { - "epoch": 0.06024168821018457, - "flos": 16192389568200.0, - "grad_norm": 5.137971048038176, - "learning_rate": 3.9904504075216295e-06, - "loss": 1.1259, - "num_input_tokens_seen": 10648480, - "step": 501 - }, - { - "epoch": 0.06036193110082366, - "flos": 13308672792600.0, - "grad_norm": 10.798428414050765, - "learning_rate": 3.990374224790501e-06, - "loss": 1.1704, - "num_input_tokens_seen": 10666405, - "step": 502 - }, - { - "epoch": 0.06048217399146275, - "flos": 12180947340840.0, - "grad_norm": 4.679834430406754, - "learning_rate": 3.990297740121185e-06, - "loss": 0.9332, - "num_input_tokens_seen": 10684060, - "step": 503 - }, - { - "epoch": 0.06060241688210185, - "flos": 17084155044600.0, - "grad_norm": 2.57819130552165, - "learning_rate": 3.990220953525284e-06, - "loss": 1.0109, - "num_input_tokens_seen": 10700890, - "step": 504 - }, - { - "epoch": 0.06072265977274094, - "flos": 10319304750480.0, - "grad_norm": 5.689966693908076, - "learning_rate": 3.9901438650144465e-06, - "loss": 0.964, - "num_input_tokens_seen": 10716860, - "step": 505 - }, - { - "epoch": 0.06084290266338003, - "flos": 14593357396920.0, - "grad_norm": 4.8789209101436075, - "learning_rate": 3.990066474600367e-06, - "loss": 1.1358, - "num_input_tokens_seen": 10734550, - "step": 506 - }, - { - "epoch": 0.06096314555401912, - "flos": 15851667511560.0, - "grad_norm": 5.7526938720819025, - "learning_rate": 3.989988782294786e-06, - "loss": 0.9088, - "num_input_tokens_seen": 10754360, - "step": 507 - }, - { - "epoch": 0.06108338844465821, - "flos": 13567327870440.0, - "grad_norm": 4.465546426689126, - "learning_rate": 3.989910788109489e-06, - "loss": 1.1671, - "num_input_tokens_seen": 10770730, - "step": 508 - }, - { - "epoch": 0.0612036313352973, - "flos": 23954748589920.0, - "grad_norm": 5.0283150968304176, - "learning_rate": 3.989832492056307e-06, - "loss": 0.9737, - "num_input_tokens_seen": 10791475, - "step": 509 - }, - { - "epoch": 0.06132387422593639, - "flos": 19575443277240.0, - "grad_norm": 5.014095275144126, - "learning_rate": 3.989753894147119e-06, - "loss": 1.0189, - "num_input_tokens_seen": 10811320, - "step": 510 - }, - { - "epoch": 0.061444117116575485, - "flos": 18421588628280.0, - "grad_norm": 4.094016030090585, - "learning_rate": 3.989674994393846e-06, - "loss": 1.016, - "num_input_tokens_seen": 10830515, - "step": 511 - }, - { - "epoch": 0.061564360007214575, - "flos": 20309544385200.0, - "grad_norm": 3.8510178496195495, - "learning_rate": 3.98959579280846e-06, - "loss": 1.1695, - "num_input_tokens_seen": 10848635, - "step": 512 - }, - { - "epoch": 0.061684602897853665, - "flos": 8510595108840.0, - "grad_norm": 3.3341411247568575, - "learning_rate": 3.989516289402973e-06, - "loss": 1.0525, - "num_input_tokens_seen": 10863985, - "step": 513 - }, - { - "epoch": 0.061804845788492754, - "flos": 13855699548000.0, - "grad_norm": 4.96063567917991, - "learning_rate": 3.989436484189447e-06, - "loss": 1.0403, - "num_input_tokens_seen": 10881650, - "step": 514 - }, - { - "epoch": 0.061925088679131844, - "flos": 10843912357440.0, - "grad_norm": 4.96898089178307, - "learning_rate": 3.9893563771799885e-06, - "loss": 1.0273, - "num_input_tokens_seen": 10897845, - "step": 515 - }, - { - "epoch": 0.062045331569770934, - "flos": 18446430039960.0, - "grad_norm": 4.482703133196654, - "learning_rate": 3.989275968386749e-06, - "loss": 1.0893, - "num_input_tokens_seen": 10915475, - "step": 516 - }, - { - "epoch": 0.06216557446041003, - "flos": 20021050061400.0, - "grad_norm": 3.4733030662999527, - "learning_rate": 3.989195257821926e-06, - "loss": 0.9889, - "num_input_tokens_seen": 10933680, - "step": 517 - }, - { - "epoch": 0.06228581735104912, - "flos": 16691971794120.0, - "grad_norm": 3.1948416439923886, - "learning_rate": 3.989114245497765e-06, - "loss": 1.0672, - "num_input_tokens_seen": 10953200, - "step": 518 - }, - { - "epoch": 0.06240606024168821, - "flos": 10739058291480.0, - "grad_norm": 5.932103646111798, - "learning_rate": 3.989032931426554e-06, - "loss": 1.1761, - "num_input_tokens_seen": 10970075, - "step": 519 - }, - { - "epoch": 0.06252630313232731, - "flos": 14646290345640.0, - "grad_norm": 8.370784324805074, - "learning_rate": 3.9889513156206295e-06, - "loss": 1.0917, - "num_input_tokens_seen": 10989235, - "step": 520 - }, - { - "epoch": 0.06264654602296639, - "flos": 14751420365640.0, - "grad_norm": 6.6392510642092395, - "learning_rate": 3.988869398092371e-06, - "loss": 0.9264, - "num_input_tokens_seen": 11008865, - "step": 521 - }, - { - "epoch": 0.06276678891360549, - "flos": 21095505287280.0, - "grad_norm": 3.0660742318759238, - "learning_rate": 3.988787178854206e-06, - "loss": 0.9956, - "num_input_tokens_seen": 11028120, - "step": 522 - }, - { - "epoch": 0.06288703180424457, - "flos": 15720132340320.0, - "grad_norm": 4.820960398554287, - "learning_rate": 3.988704657918608e-06, - "loss": 1.0994, - "num_input_tokens_seen": 11046900, - "step": 523 - }, - { - "epoch": 0.06300727469488367, - "flos": 10582068477360.0, - "grad_norm": 3.9370589236306395, - "learning_rate": 3.988621835298094e-06, - "loss": 1.0293, - "num_input_tokens_seen": 11063835, - "step": 524 - }, - { - "epoch": 0.06312751758552275, - "flos": 17451650191200.0, - "grad_norm": 3.8052830161510736, - "learning_rate": 3.988538711005229e-06, - "loss": 1.1486, - "num_input_tokens_seen": 11083010, - "step": 525 - }, - { - "epoch": 0.06324776047616185, - "flos": 15275200110480.0, - "grad_norm": 4.651918995618569, - "learning_rate": 3.988455285052622e-06, - "loss": 1.116, - "num_input_tokens_seen": 11098910, - "step": 526 - }, - { - "epoch": 0.06336800336680094, - "flos": 15405110219040.0, - "grad_norm": 5.495458635345458, - "learning_rate": 3.98837155745293e-06, - "loss": 1.0586, - "num_input_tokens_seen": 11116670, - "step": 527 - }, - { - "epoch": 0.06348824625744003, - "flos": 13833525692040.0, - "grad_norm": 5.098243463245205, - "learning_rate": 3.988287528218854e-06, - "loss": 0.9926, - "num_input_tokens_seen": 11135175, - "step": 528 - }, - { - "epoch": 0.06360848914807912, - "flos": 10943829912240.0, - "grad_norm": 3.574738878437309, - "learning_rate": 3.98820319736314e-06, - "loss": 1.1261, - "num_input_tokens_seen": 11151510, - "step": 529 - }, - { - "epoch": 0.0637287320387182, - "flos": 14618382777960.0, - "grad_norm": 2.8634228014337157, - "learning_rate": 3.988118564898582e-06, - "loss": 1.0808, - "num_input_tokens_seen": 11170770, - "step": 530 - }, - { - "epoch": 0.0638489749293573, - "flos": 12331222273320.0, - "grad_norm": 5.530049684104745, - "learning_rate": 3.988033630838019e-06, - "loss": 1.1227, - "num_input_tokens_seen": 11184530, - "step": 531 - }, - { - "epoch": 0.0639692178199964, - "flos": 16928912939400.0, - "grad_norm": 5.39639707391835, - "learning_rate": 3.987948395194334e-06, - "loss": 1.1009, - "num_input_tokens_seen": 11206630, - "step": 532 - }, - { - "epoch": 0.06408946071063548, - "flos": 13097554228920.0, - "grad_norm": 6.74230857908372, - "learning_rate": 3.987862857980458e-06, - "loss": 1.0069, - "num_input_tokens_seen": 11222295, - "step": 533 - }, - { - "epoch": 0.06420970360127458, - "flos": 19339483301880.0, - "grad_norm": 6.284654596104227, - "learning_rate": 3.987777019209368e-06, - "loss": 0.9892, - "num_input_tokens_seen": 11242530, - "step": 534 - }, - { - "epoch": 0.06432994649191366, - "flos": 16822280502960.0, - "grad_norm": 6.664902485436742, - "learning_rate": 3.987690878894084e-06, - "loss": 1.0364, - "num_input_tokens_seen": 11261965, - "step": 535 - }, - { - "epoch": 0.06445018938255276, - "flos": 16638456275760.0, - "grad_norm": 5.194969039692562, - "learning_rate": 3.987604437047673e-06, - "loss": 1.0747, - "num_input_tokens_seen": 11281485, - "step": 536 - }, - { - "epoch": 0.06457043227319184, - "flos": 13938287773320.0, - "grad_norm": 5.221430838030692, - "learning_rate": 3.987517693683251e-06, - "loss": 1.0067, - "num_input_tokens_seen": 11299780, - "step": 537 - }, - { - "epoch": 0.06469067516383094, - "flos": 12020278139520.0, - "grad_norm": 41.52080262689114, - "learning_rate": 3.9874306488139745e-06, - "loss": 1.1892, - "num_input_tokens_seen": 11314760, - "step": 538 - }, - { - "epoch": 0.06481091805447003, - "flos": 16561417792800.0, - "grad_norm": 7.47649948747284, - "learning_rate": 3.987343302453049e-06, - "loss": 1.0875, - "num_input_tokens_seen": 11335755, - "step": 539 - }, - { - "epoch": 0.06493116094510912, - "flos": 20784591815040.0, - "grad_norm": 6.185914732411057, - "learning_rate": 3.987255654613724e-06, - "loss": 1.0487, - "num_input_tokens_seen": 11359240, - "step": 540 - }, - { - "epoch": 0.06505140383574821, - "flos": 14095154941200.0, - "grad_norm": 4.376568322414459, - "learning_rate": 3.987167705309296e-06, - "loss": 0.9282, - "num_input_tokens_seen": 11378235, - "step": 541 - }, - { - "epoch": 0.0651716467263873, - "flos": 12699759912960.0, - "grad_norm": 3.9839471325488054, - "learning_rate": 3.987079454553108e-06, - "loss": 1.1777, - "num_input_tokens_seen": 11395905, - "step": 542 - }, - { - "epoch": 0.0652918896170264, - "flos": 14796871893720.0, - "grad_norm": 5.511823603161938, - "learning_rate": 3.986990902358546e-06, - "loss": 1.1489, - "num_input_tokens_seen": 11412565, - "step": 543 - }, - { - "epoch": 0.06541213250766549, - "flos": 15537963837360.0, - "grad_norm": 6.536188767993153, - "learning_rate": 3.986902048739045e-06, - "loss": 1.1545, - "num_input_tokens_seen": 11432230, - "step": 544 - }, - { - "epoch": 0.06553237539830457, - "flos": 16427552343000.0, - "grad_norm": 5.9432964741577825, - "learning_rate": 3.986812893708082e-06, - "loss": 1.0205, - "num_input_tokens_seen": 11448140, - "step": 545 - }, - { - "epoch": 0.06565261828894367, - "flos": 12698962712400.0, - "grad_norm": 5.4949020688022765, - "learning_rate": 3.9867234372791826e-06, - "loss": 1.0413, - "num_input_tokens_seen": 11465815, - "step": 546 - }, - { - "epoch": 0.06577286117958275, - "flos": 16192144275720.0, - "grad_norm": 3.3342851392512225, - "learning_rate": 3.986633679465918e-06, - "loss": 1.1001, - "num_input_tokens_seen": 11485690, - "step": 547 - }, - { - "epoch": 0.06589310407022185, - "flos": 16848532346400.0, - "grad_norm": 4.400168476347156, - "learning_rate": 3.986543620281904e-06, - "loss": 1.039, - "num_input_tokens_seen": 11505060, - "step": 548 - }, - { - "epoch": 0.06601334696086093, - "flos": 19124961305040.0, - "grad_norm": 4.148321635034501, - "learning_rate": 3.986453259740802e-06, - "loss": 1.142, - "num_input_tokens_seen": 11522950, - "step": 549 - }, - { - "epoch": 0.06613358985150003, - "flos": 8850642611160.0, - "grad_norm": 8.25446268238004, - "learning_rate": 3.986362597856319e-06, - "loss": 1.0005, - "num_input_tokens_seen": 11539170, - "step": 550 - }, - { - "epoch": 0.06625383274213913, - "flos": 12993711994440.0, - "grad_norm": 5.05944788728912, - "learning_rate": 3.986271634642211e-06, - "loss": 1.0284, - "num_input_tokens_seen": 11555870, - "step": 551 - }, - { - "epoch": 0.06637407563277821, - "flos": 10868416491960.0, - "grad_norm": 7.579327989245134, - "learning_rate": 3.986180370112274e-06, - "loss": 1.0419, - "num_input_tokens_seen": 11572110, - "step": 552 - }, - { - "epoch": 0.0664943185234173, - "flos": 17085105552960.0, - "grad_norm": 4.885329039004019, - "learning_rate": 3.986088804280354e-06, - "loss": 0.9688, - "num_input_tokens_seen": 11591560, - "step": 553 - }, - { - "epoch": 0.06661456141405639, - "flos": 14879981365560.0, - "grad_norm": 5.284722445010786, - "learning_rate": 3.985996937160342e-06, - "loss": 1.1592, - "num_input_tokens_seen": 11610470, - "step": 554 - }, - { - "epoch": 0.06673480430469549, - "flos": 37350338435880.0, - "grad_norm": 5.633742426326951, - "learning_rate": 3.985904768766173e-06, - "loss": 0.9174, - "num_input_tokens_seen": 11632965, - "step": 555 - }, - { - "epoch": 0.06685504719533458, - "flos": 11473496676600.0, - "grad_norm": 11.601336277255683, - "learning_rate": 3.98581229911183e-06, - "loss": 0.9766, - "num_input_tokens_seen": 11651605, - "step": 556 - }, - { - "epoch": 0.06697529008597367, - "flos": 15982681436280.0, - "grad_norm": 4.892202299687172, - "learning_rate": 3.985719528211341e-06, - "loss": 1.1365, - "num_input_tokens_seen": 11670695, - "step": 557 - }, - { - "epoch": 0.06709553297661276, - "flos": 44874769594320.0, - "grad_norm": 0.9649602533960429, - "learning_rate": 3.985626456078777e-06, - "loss": 0.9026, - "num_input_tokens_seen": 11735070, - "step": 558 - }, - { - "epoch": 0.06721577586725185, - "flos": 7879477711680.0, - "grad_norm": 131.44371864746904, - "learning_rate": 3.985533082728259e-06, - "loss": 1.0877, - "num_input_tokens_seen": 11750445, - "step": 559 - }, - { - "epoch": 0.06733601875789094, - "flos": 18448361718240.0, - "grad_norm": 3.2704824433548763, - "learning_rate": 3.985439408173951e-06, - "loss": 0.9702, - "num_input_tokens_seen": 11770390, - "step": 560 - }, - { - "epoch": 0.06745626164853002, - "flos": 14776875008520.0, - "grad_norm": 7.719014892974928, - "learning_rate": 3.9853454324300634e-06, - "loss": 0.9327, - "num_input_tokens_seen": 11789320, - "step": 561 - }, - { - "epoch": 0.06757650453916912, - "flos": 14069516328960.0, - "grad_norm": 7.136338431043793, - "learning_rate": 3.985251155510852e-06, - "loss": 0.9989, - "num_input_tokens_seen": 11808070, - "step": 562 - }, - { - "epoch": 0.06769674742980822, - "flos": 18316795885440.0, - "grad_norm": 7.071028969223203, - "learning_rate": 3.98515657743062e-06, - "loss": 1.0365, - "num_input_tokens_seen": 11827255, - "step": 563 - }, - { - "epoch": 0.0678169903204473, - "flos": 9214581016800.0, - "grad_norm": 6.047518752574823, - "learning_rate": 3.985061698203711e-06, - "loss": 0.9982, - "num_input_tokens_seen": 11844090, - "step": 564 - }, - { - "epoch": 0.0679372332110864, - "flos": 50752514969160.0, - "grad_norm": 0.8596365790641912, - "learning_rate": 3.984966517844523e-06, - "loss": 0.8984, - "num_input_tokens_seen": 11899055, - "step": 565 - }, - { - "epoch": 0.06805747610172548, - "flos": 20125812142680.0, - "grad_norm": 9.761101295998428, - "learning_rate": 3.984871036367492e-06, - "loss": 1.0467, - "num_input_tokens_seen": 11918800, - "step": 566 - }, - { - "epoch": 0.06817771899236458, - "flos": 14278764537480.0, - "grad_norm": 9.884082335662935, - "learning_rate": 3.984775253787102e-06, - "loss": 1.0645, - "num_input_tokens_seen": 11936810, - "step": 567 - }, - { - "epoch": 0.06829796188300366, - "flos": 12704819070360.0, - "grad_norm": 7.659140278749979, - "learning_rate": 3.984679170117885e-06, - "loss": 1.095, - "num_input_tokens_seen": 11952735, - "step": 568 - }, - { - "epoch": 0.06841820477364276, - "flos": 10240549220160.0, - "grad_norm": 5.884307630804606, - "learning_rate": 3.984582785374415e-06, - "loss": 1.0097, - "num_input_tokens_seen": 11969895, - "step": 569 - }, - { - "epoch": 0.06853844766428185, - "flos": 15584427196920.0, - "grad_norm": 7.275425492785574, - "learning_rate": 3.9844860995713155e-06, - "loss": 1.0413, - "num_input_tokens_seen": 11989155, - "step": 570 - }, - { - "epoch": 0.06865869055492094, - "flos": 11892851617320.0, - "grad_norm": 48.355414316806346, - "learning_rate": 3.9843891127232524e-06, - "loss": 1.0411, - "num_input_tokens_seen": 12006410, - "step": 571 - }, - { - "epoch": 0.06877893344556003, - "flos": 14146953412200.0, - "grad_norm": 6.577679326778384, - "learning_rate": 3.984291824844938e-06, - "loss": 0.8919, - "num_input_tokens_seen": 12021225, - "step": 572 - }, - { - "epoch": 0.06889917633619912, - "flos": 16376489749440.0, - "grad_norm": 21.661913462874114, - "learning_rate": 3.984194235951132e-06, - "loss": 1.067, - "num_input_tokens_seen": 12037090, - "step": 573 - }, - { - "epoch": 0.06901941922683821, - "flos": 14882587598160.0, - "grad_norm": 12.209804684938081, - "learning_rate": 3.9840963460566375e-06, - "loss": 1.0531, - "num_input_tokens_seen": 12055590, - "step": 574 - }, - { - "epoch": 0.06913966211747731, - "flos": 17656789750680.0, - "grad_norm": 10.180258581928156, - "learning_rate": 3.983998155176305e-06, - "loss": 1.1197, - "num_input_tokens_seen": 12075670, - "step": 575 - }, - { - "epoch": 0.06925990500811639, - "flos": 41772355301640.0, - "grad_norm": 0.8150926818688555, - "learning_rate": 3.9838996633250305e-06, - "loss": 0.8232, - "num_input_tokens_seen": 12135905, - "step": 576 - }, - { - "epoch": 0.06938014789875549, - "flos": 8981595212760.0, - "grad_norm": 4.41341882924441, - "learning_rate": 3.983800870517753e-06, - "loss": 1.1173, - "num_input_tokens_seen": 12152415, - "step": 577 - }, - { - "epoch": 0.06950039078939457, - "flos": 16218948027240.0, - "grad_norm": 11.276620860858575, - "learning_rate": 3.983701776769463e-06, - "loss": 1.0188, - "num_input_tokens_seen": 12169545, - "step": 578 - }, - { - "epoch": 0.06962063368003367, - "flos": 15586880121720.0, - "grad_norm": 4.23518617187018, - "learning_rate": 3.9836023820951885e-06, - "loss": 1.0816, - "num_input_tokens_seen": 12188480, - "step": 579 - }, - { - "epoch": 0.06974087657067275, - "flos": 14699775202440.0, - "grad_norm": 10.38456046587124, - "learning_rate": 3.983502686510011e-06, - "loss": 0.9128, - "num_input_tokens_seen": 12209030, - "step": 580 - }, - { - "epoch": 0.06986111946131185, - "flos": 16088210056560.0, - "grad_norm": 4.886758756893089, - "learning_rate": 3.9834026900290525e-06, - "loss": 0.9472, - "num_input_tokens_seen": 12228145, - "step": 581 - }, - { - "epoch": 0.06998136235195095, - "flos": 19182278856840.0, - "grad_norm": 21.60102352966861, - "learning_rate": 3.983302392667482e-06, - "loss": 1.2191, - "num_input_tokens_seen": 12248710, - "step": 582 - }, - { - "epoch": 0.07010160524259003, - "flos": 15983509298400.0, - "grad_norm": 5.13198302373063, - "learning_rate": 3.983201794440517e-06, - "loss": 1.1635, - "num_input_tokens_seen": 12268005, - "step": 583 - }, - { - "epoch": 0.07022184813322913, - "flos": 12993742656000.0, - "grad_norm": 5.794505825349541, - "learning_rate": 3.9831008953634165e-06, - "loss": 0.901, - "num_input_tokens_seen": 12287015, - "step": 584 - }, - { - "epoch": 0.07034209102386821, - "flos": 17551598407560.0, - "grad_norm": 6.680362207791949, - "learning_rate": 3.9829996954514864e-06, - "loss": 1.0413, - "num_input_tokens_seen": 12305875, - "step": 585 - }, - { - "epoch": 0.0704623339145073, - "flos": 18501417313200.0, - "grad_norm": 6.548734924217502, - "learning_rate": 3.982898194720079e-06, - "loss": 1.0661, - "num_input_tokens_seen": 12326325, - "step": 586 - }, - { - "epoch": 0.0705825768051464, - "flos": 18028730823480.0, - "grad_norm": 9.96005663997371, - "learning_rate": 3.982796393184592e-06, - "loss": 1.0308, - "num_input_tokens_seen": 12345125, - "step": 587 - }, - { - "epoch": 0.07070281969578548, - "flos": 47289417944280.0, - "grad_norm": 0.8050139052214844, - "learning_rate": 3.98269429086047e-06, - "loss": 0.8808, - "num_input_tokens_seen": 12402685, - "step": 588 - }, - { - "epoch": 0.07082306258642458, - "flos": 16817068037760.0, - "grad_norm": 8.727273254683508, - "learning_rate": 3.982591887763199e-06, - "loss": 1.0791, - "num_input_tokens_seen": 12419865, - "step": 589 - }, - { - "epoch": 0.07094330547706366, - "flos": 9793654650480.0, - "grad_norm": 5.644791604343753, - "learning_rate": 3.982489183908316e-06, - "loss": 1.0331, - "num_input_tokens_seen": 12436005, - "step": 590 - }, - { - "epoch": 0.07106354836770276, - "flos": 17530528367760.0, - "grad_norm": 6.02854075879229, - "learning_rate": 3.982386179311399e-06, - "loss": 1.0721, - "num_input_tokens_seen": 12456245, - "step": 591 - }, - { - "epoch": 0.07118379125834184, - "flos": 11473619322840.0, - "grad_norm": 15.208002011944648, - "learning_rate": 3.982282873988075e-06, - "loss": 1.0878, - "num_input_tokens_seen": 12473840, - "step": 592 - }, - { - "epoch": 0.07130403414898094, - "flos": 13991006091120.0, - "grad_norm": 8.018839111784423, - "learning_rate": 3.982179267954016e-06, - "loss": 1.1016, - "num_input_tokens_seen": 12493990, - "step": 593 - }, - { - "epoch": 0.07142427703962004, - "flos": 15537473252400.0, - "grad_norm": 11.687837483638507, - "learning_rate": 3.982075361224937e-06, - "loss": 1.179, - "num_input_tokens_seen": 12512075, - "step": 594 - }, - { - "epoch": 0.07154451993025912, - "flos": 12967981397520.0, - "grad_norm": 13.015747509364918, - "learning_rate": 3.981971153816602e-06, - "loss": 1.1159, - "num_input_tokens_seen": 12529400, - "step": 595 - }, - { - "epoch": 0.07166476282089822, - "flos": 15744575151720.0, - "grad_norm": 8.83942558321033, - "learning_rate": 3.981866645744819e-06, - "loss": 1.1854, - "num_input_tokens_seen": 12549835, - "step": 596 - }, - { - "epoch": 0.0717850057115373, - "flos": 9977969462640.0, - "grad_norm": 11.913388172515132, - "learning_rate": 3.9817618370254416e-06, - "loss": 1.0327, - "num_input_tokens_seen": 12566210, - "step": 597 - }, - { - "epoch": 0.0719052486021764, - "flos": 22038210711000.0, - "grad_norm": 15.173448380859295, - "learning_rate": 3.9816567276743684e-06, - "loss": 1.0992, - "num_input_tokens_seen": 12585795, - "step": 598 - }, - { - "epoch": 0.0720254914928155, - "flos": 15116953172400.0, - "grad_norm": 4.199491270226781, - "learning_rate": 3.9815513177075466e-06, - "loss": 0.9881, - "num_input_tokens_seen": 12604300, - "step": 599 - }, - { - "epoch": 0.07214573438345458, - "flos": 19811648545080.0, - "grad_norm": 13.843587422331199, - "learning_rate": 3.9814456071409646e-06, - "loss": 0.9362, - "num_input_tokens_seen": 12624555, - "step": 600 - }, - { - "epoch": 0.07226597727409367, - "flos": 18133032981360.0, - "grad_norm": 31.690922599307715, - "learning_rate": 3.981339595990659e-06, - "loss": 1.0799, - "num_input_tokens_seen": 12642805, - "step": 601 - }, - { - "epoch": 0.07238622016473276, - "flos": 16717487760120.0, - "grad_norm": 24.479938623687993, - "learning_rate": 3.981233284272713e-06, - "loss": 1.0442, - "num_input_tokens_seen": 12662270, - "step": 602 - }, - { - "epoch": 0.07250646305537185, - "flos": 18112300218720.0, - "grad_norm": 8.593091238955608, - "learning_rate": 3.981126672003253e-06, - "loss": 1.1259, - "num_input_tokens_seen": 12684665, - "step": 603 - }, - { - "epoch": 0.07262670594601094, - "flos": 19333350989880.0, - "grad_norm": 6.839695395664104, - "learning_rate": 3.981019759198451e-06, - "loss": 1.005, - "num_input_tokens_seen": 12703335, - "step": 604 - }, - { - "epoch": 0.07274694883665003, - "flos": 18972540063360.0, - "grad_norm": 14.527629442270134, - "learning_rate": 3.980912545874528e-06, - "loss": 1.0502, - "num_input_tokens_seen": 12723220, - "step": 605 - }, - { - "epoch": 0.07286719172728913, - "flos": 21280034730360.0, - "grad_norm": 11.158147570369755, - "learning_rate": 3.980805032047746e-06, - "loss": 1.0823, - "num_input_tokens_seen": 12744410, - "step": 606 - }, - { - "epoch": 0.07298743461792821, - "flos": 12310397526000.0, - "grad_norm": 6.978023363288042, - "learning_rate": 3.980697217734415e-06, - "loss": 1.0175, - "num_input_tokens_seen": 12761870, - "step": 607 - }, - { - "epoch": 0.07310767750856731, - "flos": 13831226075040.0, - "grad_norm": 20.68577766739936, - "learning_rate": 3.980589102950891e-06, - "loss": 1.1343, - "num_input_tokens_seen": 12779755, - "step": 608 - }, - { - "epoch": 0.07322792039920639, - "flos": 20781464335920.0, - "grad_norm": 10.030069917566355, - "learning_rate": 3.9804806877135755e-06, - "loss": 1.0074, - "num_input_tokens_seen": 12797520, - "step": 609 - }, - { - "epoch": 0.07334816328984549, - "flos": 16691665178520.0, - "grad_norm": 10.07746939485786, - "learning_rate": 3.980371972038915e-06, - "loss": 1.0927, - "num_input_tokens_seen": 12817730, - "step": 610 - }, - { - "epoch": 0.07346840618048459, - "flos": 16320674614080.0, - "grad_norm": 17.666451698770754, - "learning_rate": 3.980262955943399e-06, - "loss": 1.0639, - "num_input_tokens_seen": 12837115, - "step": 611 - }, - { - "epoch": 0.07358864907112367, - "flos": 12520136319480.0, - "grad_norm": 7.258624241893898, - "learning_rate": 3.980153639443569e-06, - "loss": 1.1066, - "num_input_tokens_seen": 12852820, - "step": 612 - }, - { - "epoch": 0.07370889196176277, - "flos": 17136536085240.0, - "grad_norm": 5.647808710030613, - "learning_rate": 3.980044022556005e-06, - "loss": 1.0327, - "num_input_tokens_seen": 12872225, - "step": 613 - }, - { - "epoch": 0.07382913485240185, - "flos": 18422324505720.0, - "grad_norm": 7.644949408913296, - "learning_rate": 3.9799341052973375e-06, - "loss": 0.9535, - "num_input_tokens_seen": 12891780, - "step": 614 - }, - { - "epoch": 0.07394937774304094, - "flos": 11945508612000.0, - "grad_norm": 13.130431586302794, - "learning_rate": 3.979823887684241e-06, - "loss": 0.9614, - "num_input_tokens_seen": 12910440, - "step": 615 - }, - { - "epoch": 0.07406962063368003, - "flos": 14697598231680.0, - "grad_norm": 17.377493041411263, - "learning_rate": 3.979713369733434e-06, - "loss": 1.0812, - "num_input_tokens_seen": 12928025, - "step": 616 - }, - { - "epoch": 0.07418986352431912, - "flos": 15220151514120.0, - "grad_norm": 326.644864015187, - "learning_rate": 3.979602551461683e-06, - "loss": 1.0601, - "num_input_tokens_seen": 12948525, - "step": 617 - }, - { - "epoch": 0.07431010641495822, - "flos": 8457938114160.0, - "grad_norm": 7.590274721986327, - "learning_rate": 3.979491432885799e-06, - "loss": 1.1466, - "num_input_tokens_seen": 12964510, - "step": 618 - }, - { - "epoch": 0.0744303493055973, - "flos": 14880042688680.0, - "grad_norm": 10.431766602813083, - "learning_rate": 3.97938001402264e-06, - "loss": 1.0491, - "num_input_tokens_seen": 12983355, - "step": 619 - }, - { - "epoch": 0.0745505921962364, - "flos": 11499349919760.0, - "grad_norm": 7.21915424750825, - "learning_rate": 3.979268294889105e-06, - "loss": 1.0361, - "num_input_tokens_seen": 12998625, - "step": 620 - }, - { - "epoch": 0.07467083508687548, - "flos": 36430604068680.0, - "grad_norm": 14.277539328345796, - "learning_rate": 3.979156275502143e-06, - "loss": 0.9814, - "num_input_tokens_seen": 13022005, - "step": 621 - }, - { - "epoch": 0.07479107797751458, - "flos": 12416723346840.0, - "grad_norm": 8.203524450839854, - "learning_rate": 3.979043955878749e-06, - "loss": 1.1368, - "num_input_tokens_seen": 13039570, - "step": 622 - }, - { - "epoch": 0.07491132086815366, - "flos": 16688353730040.0, - "grad_norm": 12.620770945268347, - "learning_rate": 3.978931336035959e-06, - "loss": 1.0549, - "num_input_tokens_seen": 13058100, - "step": 623 - }, - { - "epoch": 0.07503156375879276, - "flos": 14305445642760.0, - "grad_norm": 21.31931909558133, - "learning_rate": 3.9788184159908595e-06, - "loss": 1.0416, - "num_input_tokens_seen": 13074950, - "step": 624 - }, - { - "epoch": 0.07515180664943186, - "flos": 10680882216000.0, - "grad_norm": 11.297522646329856, - "learning_rate": 3.97870519576058e-06, - "loss": 1.0376, - "num_input_tokens_seen": 13091095, - "step": 625 - }, - { - "epoch": 0.07527204954007094, - "flos": 15117535742040.0, - "grad_norm": 52.540878802247875, - "learning_rate": 3.978591675362295e-06, - "loss": 1.0198, - "num_input_tokens_seen": 13109530, - "step": 626 - }, - { - "epoch": 0.07539229243071004, - "flos": 15143756923920.0, - "grad_norm": 46.89955086238833, - "learning_rate": 3.978477854813226e-06, - "loss": 1.1044, - "num_input_tokens_seen": 13128590, - "step": 627 - }, - { - "epoch": 0.07551253532134912, - "flos": 9191977899000.0, - "grad_norm": 7.2982521472964725, - "learning_rate": 3.97836373413064e-06, - "loss": 1.0514, - "num_input_tokens_seen": 13146365, - "step": 628 - }, - { - "epoch": 0.07563277821198822, - "flos": 13623541606080.0, - "grad_norm": 7.173992966218026, - "learning_rate": 3.978249313331848e-06, - "loss": 0.9773, - "num_input_tokens_seen": 13164315, - "step": 629 - }, - { - "epoch": 0.07575302110262731, - "flos": 13859593566120.0, - "grad_norm": 9.726387187802612, - "learning_rate": 3.978134592434208e-06, - "loss": 0.8349, - "num_input_tokens_seen": 13181785, - "step": 630 - }, - { - "epoch": 0.0758732639932664, - "flos": 48667452981480.0, - "grad_norm": 0.9842810207494693, - "learning_rate": 3.978019571455123e-06, - "loss": 0.8754, - "num_input_tokens_seen": 13233450, - "step": 631 - }, - { - "epoch": 0.07599350688390549, - "flos": 13465662606720.0, - "grad_norm": 4.738050186297217, - "learning_rate": 3.977904250412042e-06, - "loss": 1.058, - "num_input_tokens_seen": 13252125, - "step": 632 - }, - { - "epoch": 0.07611374977454458, - "flos": 14960392620120.0, - "grad_norm": 6.3271308777393696, - "learning_rate": 3.97778862932246e-06, - "loss": 1.0812, - "num_input_tokens_seen": 13269010, - "step": 633 - }, - { - "epoch": 0.07623399266518367, - "flos": 13124480626680.0, - "grad_norm": 5.855421517800665, - "learning_rate": 3.9776727082039144e-06, - "loss": 1.1637, - "num_input_tokens_seen": 13285700, - "step": 634 - }, - { - "epoch": 0.07635423555582276, - "flos": 31920248621640.0, - "grad_norm": 0.8169469913435903, - "learning_rate": 3.977556487073991e-06, - "loss": 0.809, - "num_input_tokens_seen": 13339975, - "step": 635 - }, - { - "epoch": 0.07647447844646185, - "flos": 15457675229040.0, - "grad_norm": 3.3383284494253522, - "learning_rate": 3.97743996595032e-06, - "loss": 1.0294, - "num_input_tokens_seen": 13359735, - "step": 636 - }, - { - "epoch": 0.07659472133710095, - "flos": 16928177061960.0, - "grad_norm": 7.16248672777048, - "learning_rate": 3.9773231448505804e-06, - "loss": 1.0393, - "num_input_tokens_seen": 13381245, - "step": 637 - }, - { - "epoch": 0.07671496422774003, - "flos": 15248519005200.0, - "grad_norm": 8.717191823550415, - "learning_rate": 3.977206023792491e-06, - "loss": 0.9977, - "num_input_tokens_seen": 13400855, - "step": 638 - }, - { - "epoch": 0.07683520711837913, - "flos": 12022179156240.0, - "grad_norm": 17.131601216871303, - "learning_rate": 3.97708860279382e-06, - "loss": 1.0343, - "num_input_tokens_seen": 13418685, - "step": 639 - }, - { - "epoch": 0.07695545000901821, - "flos": 16690745331720.0, - "grad_norm": 5.1873270028923875, - "learning_rate": 3.97697088187238e-06, - "loss": 1.012, - "num_input_tokens_seen": 13438920, - "step": 640 - }, - { - "epoch": 0.07707569289965731, - "flos": 12491216920320.0, - "grad_norm": 9.183421364954818, - "learning_rate": 3.976852861046029e-06, - "loss": 1.1306, - "num_input_tokens_seen": 13455255, - "step": 641 - }, - { - "epoch": 0.0771959357902964, - "flos": 18342986405760.0, - "grad_norm": 7.00538264817585, - "learning_rate": 3.97673454033267e-06, - "loss": 1.0252, - "num_input_tokens_seen": 13477075, - "step": 642 - }, - { - "epoch": 0.07731617868093549, - "flos": 14068995082440.0, - "grad_norm": 4.700718057850248, - "learning_rate": 3.976615919750254e-06, - "loss": 1.049, - "num_input_tokens_seen": 13494495, - "step": 643 - }, - { - "epoch": 0.07743642157157458, - "flos": 15144155524200.0, - "grad_norm": 19.240126418493443, - "learning_rate": 3.976496999316775e-06, - "loss": 1.0839, - "num_input_tokens_seen": 13512970, - "step": 644 - }, - { - "epoch": 0.07755666446221367, - "flos": 14169219252840.0, - "grad_norm": 36.21855114807362, - "learning_rate": 3.976377779050271e-06, - "loss": 1.0631, - "num_input_tokens_seen": 13530820, - "step": 645 - }, - { - "epoch": 0.07767690735285276, - "flos": 16796089982640.0, - "grad_norm": 70.35073613132097, - "learning_rate": 3.976258258968831e-06, - "loss": 1.0559, - "num_input_tokens_seen": 13549085, - "step": 646 - }, - { - "epoch": 0.07779715024349185, - "flos": 15799225147800.0, - "grad_norm": 9.31754366091444, - "learning_rate": 3.976138439090583e-06, - "loss": 0.9572, - "num_input_tokens_seen": 13566885, - "step": 647 - }, - { - "epoch": 0.07791739313413094, - "flos": 14877927041040.0, - "grad_norm": 5.888880463760075, - "learning_rate": 3.976018319433706e-06, - "loss": 1.0663, - "num_input_tokens_seen": 13584150, - "step": 648 - }, - { - "epoch": 0.07803763602477004, - "flos": 13697176655880.0, - "grad_norm": 9.148651824186738, - "learning_rate": 3.9758979000164205e-06, - "loss": 1.1499, - "num_input_tokens_seen": 13600690, - "step": 649 - }, - { - "epoch": 0.07815787891540912, - "flos": 16140315143160.0, - "grad_norm": 8.108255827305596, - "learning_rate": 3.975777180856995e-06, - "loss": 0.9332, - "num_input_tokens_seen": 13619530, - "step": 650 - }, - { - "epoch": 0.07827812180604822, - "flos": 16140591097200.0, - "grad_norm": 14.289230687740364, - "learning_rate": 3.975656161973742e-06, - "loss": 1.0693, - "num_input_tokens_seen": 13638335, - "step": 651 - }, - { - "epoch": 0.0783983646966873, - "flos": 15432251247720.0, - "grad_norm": 8.12783716990854, - "learning_rate": 3.9755348433850194e-06, - "loss": 1.1081, - "num_input_tokens_seen": 13653395, - "step": 652 - }, - { - "epoch": 0.0785186075873264, - "flos": 43402826668080.0, - "grad_norm": 1.1210788101104663, - "learning_rate": 3.975413225109232e-06, - "loss": 0.9465, - "num_input_tokens_seen": 13713665, - "step": 653 - }, - { - "epoch": 0.0786388504779655, - "flos": 16979025024600.0, - "grad_norm": 14.319934762987257, - "learning_rate": 3.975291307164829e-06, - "loss": 1.1468, - "num_input_tokens_seen": 13732030, - "step": 654 - }, - { - "epoch": 0.07875909336860458, - "flos": 10712806448040.0, - "grad_norm": 4.428518097200399, - "learning_rate": 3.975169089570306e-06, - "loss": 1.0781, - "num_input_tokens_seen": 13750125, - "step": 655 - }, - { - "epoch": 0.07887933625924368, - "flos": 15799746394320.0, - "grad_norm": 3.7338113399303374, - "learning_rate": 3.975046572344202e-06, - "loss": 1.146, - "num_input_tokens_seen": 13766305, - "step": 656 - }, - { - "epoch": 0.07899957914988276, - "flos": 14751144411600.0, - "grad_norm": 4.202366643511028, - "learning_rate": 3.974923755505103e-06, - "loss": 0.938, - "num_input_tokens_seen": 13785255, - "step": 657 - }, - { - "epoch": 0.07911982204052186, - "flos": 16350391213800.0, - "grad_norm": 4.004185870571443, - "learning_rate": 3.974800639071641e-06, - "loss": 1.1334, - "num_input_tokens_seen": 13805695, - "step": 658 - }, - { - "epoch": 0.07924006493116094, - "flos": 16428104251080.0, - "grad_norm": 19.563664508321047, - "learning_rate": 3.974677223062492e-06, - "loss": 1.2347, - "num_input_tokens_seen": 13822630, - "step": 659 - }, - { - "epoch": 0.07936030782180004, - "flos": 11656768995720.0, - "grad_norm": 6.413862069607455, - "learning_rate": 3.974553507496378e-06, - "loss": 0.9523, - "num_input_tokens_seen": 13840925, - "step": 660 - }, - { - "epoch": 0.07948055071243913, - "flos": 16875274774800.0, - "grad_norm": 6.681930076308621, - "learning_rate": 3.974429492392068e-06, - "loss": 1.119, - "num_input_tokens_seen": 13860670, - "step": 661 - }, - { - "epoch": 0.07960079360307822, - "flos": 13885508132400.0, - "grad_norm": 5.218135957513349, - "learning_rate": 3.974305177768373e-06, - "loss": 1.1295, - "num_input_tokens_seen": 13878600, - "step": 662 - }, - { - "epoch": 0.07972103649371731, - "flos": 16717119821400.0, - "grad_norm": 7.022128958156527, - "learning_rate": 3.974180563644152e-06, - "loss": 1.0783, - "num_input_tokens_seen": 13896885, - "step": 663 - }, - { - "epoch": 0.0798412793843564, - "flos": 11839734699240.0, - "grad_norm": 7.8737295427796985, - "learning_rate": 3.97405565003831e-06, - "loss": 1.1118, - "num_input_tokens_seen": 13912690, - "step": 664 - }, - { - "epoch": 0.07996152227499549, - "flos": 12915293741280.0, - "grad_norm": 30.7543723866776, - "learning_rate": 3.973930436969794e-06, - "loss": 1.0075, - "num_input_tokens_seen": 13930865, - "step": 665 - }, - { - "epoch": 0.08008176516563459, - "flos": 14619670563480.0, - "grad_norm": 19.163930327204216, - "learning_rate": 3.973804924457602e-06, - "loss": 1.0821, - "num_input_tokens_seen": 13948665, - "step": 666 - }, - { - "epoch": 0.08020200805627367, - "flos": 22697480968320.0, - "grad_norm": 4.177084880774664, - "learning_rate": 3.973679112520771e-06, - "loss": 1.0865, - "num_input_tokens_seen": 13970100, - "step": 667 - }, - { - "epoch": 0.08032225094691277, - "flos": 12598922511360.0, - "grad_norm": 4.439900138694242, - "learning_rate": 3.973553001178389e-06, - "loss": 1.2176, - "num_input_tokens_seen": 13987325, - "step": 668 - }, - { - "epoch": 0.08044249383755185, - "flos": 17111296073280.0, - "grad_norm": 4.079258085375281, - "learning_rate": 3.973426590449585e-06, - "loss": 0.9803, - "num_input_tokens_seen": 14005000, - "step": 669 - }, - { - "epoch": 0.08056273672819095, - "flos": 12915539033760.0, - "grad_norm": 6.149211442216309, - "learning_rate": 3.9732998803535364e-06, - "loss": 0.9821, - "num_input_tokens_seen": 14022780, - "step": 670 - }, - { - "epoch": 0.08068297961883003, - "flos": 13959112520640.0, - "grad_norm": 7.292397017459616, - "learning_rate": 3.973172870909465e-06, - "loss": 1.0865, - "num_input_tokens_seen": 14037265, - "step": 671 - }, - { - "epoch": 0.08080322250946913, - "flos": 16455061310400.0, - "grad_norm": 9.029815031841867, - "learning_rate": 3.973045562136638e-06, - "loss": 1.0259, - "num_input_tokens_seen": 14053800, - "step": 672 - }, - { - "epoch": 0.08092346540010822, - "flos": 15459790876680.0, - "grad_norm": 7.386901820923498, - "learning_rate": 3.972917954054368e-06, - "loss": 1.1372, - "num_input_tokens_seen": 14072075, - "step": 673 - }, - { - "epoch": 0.08104370829074731, - "flos": 14934140776680.0, - "grad_norm": 6.747440276643079, - "learning_rate": 3.972790046682013e-06, - "loss": 1.0441, - "num_input_tokens_seen": 14090470, - "step": 674 - }, - { - "epoch": 0.0811639511813864, - "flos": 14251807478160.0, - "grad_norm": 13.207035871412975, - "learning_rate": 3.972661840038977e-06, - "loss": 1.0125, - "num_input_tokens_seen": 14110480, - "step": 675 - }, - { - "epoch": 0.08128419407202549, - "flos": 11918735522040.0, - "grad_norm": 4.162482615025046, - "learning_rate": 3.972533334144707e-06, - "loss": 1.0467, - "num_input_tokens_seen": 14127125, - "step": 676 - }, - { - "epoch": 0.08140443696266458, - "flos": 16901066694840.0, - "grad_norm": 9.906615434037507, - "learning_rate": 3.972404529018699e-06, - "loss": 1.0044, - "num_input_tokens_seen": 14146705, - "step": 677 - }, - { - "epoch": 0.08152467985330367, - "flos": 17347348033320.0, - "grad_norm": 4.786819106803064, - "learning_rate": 3.972275424680493e-06, - "loss": 1.0798, - "num_input_tokens_seen": 14166535, - "step": 678 - }, - { - "epoch": 0.08164492274394276, - "flos": 13701806551440.0, - "grad_norm": 5.043086942178577, - "learning_rate": 3.972146021149673e-06, - "loss": 1.1315, - "num_input_tokens_seen": 14184530, - "step": 679 - }, - { - "epoch": 0.08176516563458186, - "flos": 10476938457360.0, - "grad_norm": 5.061675958324483, - "learning_rate": 3.972016318445868e-06, - "loss": 1.0186, - "num_input_tokens_seen": 14202250, - "step": 680 - }, - { - "epoch": 0.08188540852522094, - "flos": 16062479459640.0, - "grad_norm": 6.1932866944253195, - "learning_rate": 3.971886316588757e-06, - "loss": 1.1312, - "num_input_tokens_seen": 14222475, - "step": 681 - }, - { - "epoch": 0.08200565141586004, - "flos": 13806721940520.0, - "grad_norm": 6.281187142584275, - "learning_rate": 3.9717560155980595e-06, - "loss": 0.9619, - "num_input_tokens_seen": 14237845, - "step": 682 - }, - { - "epoch": 0.08212589430649912, - "flos": 14619486594120.0, - "grad_norm": 5.2527674692313795, - "learning_rate": 3.971625415493542e-06, - "loss": 1.1384, - "num_input_tokens_seen": 14255885, - "step": 683 - }, - { - "epoch": 0.08224613719713822, - "flos": 18470259620160.0, - "grad_norm": 7.074887644815822, - "learning_rate": 3.971494516295017e-06, - "loss": 1.1067, - "num_input_tokens_seen": 14275055, - "step": 684 - }, - { - "epoch": 0.08236638008777732, - "flos": 16900606771440.0, - "grad_norm": 14.537134960049029, - "learning_rate": 3.971363318022341e-06, - "loss": 1.0666, - "num_input_tokens_seen": 14296115, - "step": 685 - }, - { - "epoch": 0.0824866229784164, - "flos": 27702445920480.0, - "grad_norm": 15.628653381483469, - "learning_rate": 3.971231820695417e-06, - "loss": 0.9148, - "num_input_tokens_seen": 14319450, - "step": 686 - }, - { - "epoch": 0.0826068658690555, - "flos": 16425007433520.0, - "grad_norm": 29.03506874944773, - "learning_rate": 3.971100024334193e-06, - "loss": 1.0407, - "num_input_tokens_seen": 14336690, - "step": 687 - }, - { - "epoch": 0.08272710875969458, - "flos": 15008051780520.0, - "grad_norm": 7.56736258556869, - "learning_rate": 3.970967928958663e-06, - "loss": 1.0903, - "num_input_tokens_seen": 14353525, - "step": 688 - }, - { - "epoch": 0.08284735165033368, - "flos": 13518810186360.0, - "grad_norm": 6.986488107966954, - "learning_rate": 3.970835534588865e-06, - "loss": 1.0598, - "num_input_tokens_seen": 14370740, - "step": 689 - }, - { - "epoch": 0.08296759454097276, - "flos": 11840225284200.0, - "grad_norm": 5.666047268095026, - "learning_rate": 3.970702841244883e-06, - "loss": 1.0903, - "num_input_tokens_seen": 14388780, - "step": 690 - }, - { - "epoch": 0.08308783743161186, - "flos": 12758395911840.0, - "grad_norm": 5.831152408351528, - "learning_rate": 3.970569848946847e-06, - "loss": 1.042, - "num_input_tokens_seen": 14408315, - "step": 691 - }, - { - "epoch": 0.08320808032225095, - "flos": 10655304926880.0, - "grad_norm": 6.598817152898148, - "learning_rate": 3.970436557714932e-06, - "loss": 1.0263, - "num_input_tokens_seen": 14424555, - "step": 692 - }, - { - "epoch": 0.08332832321289003, - "flos": 15904876414320.0, - "grad_norm": 9.435836948617672, - "learning_rate": 3.970302967569358e-06, - "loss": 1.0834, - "num_input_tokens_seen": 14442865, - "step": 693 - }, - { - "epoch": 0.08344856610352913, - "flos": 17582388161880.0, - "grad_norm": 21.4518404226661, - "learning_rate": 3.9701690785303896e-06, - "loss": 0.9072, - "num_input_tokens_seen": 14461780, - "step": 694 - }, - { - "epoch": 0.08356880899416821, - "flos": 18051027325680.0, - "grad_norm": 7.037795347585221, - "learning_rate": 3.970034890618339e-06, - "loss": 1.1033, - "num_input_tokens_seen": 14481190, - "step": 695 - }, - { - "epoch": 0.08368905188480731, - "flos": 17320636266480.0, - "grad_norm": 6.722054454016545, - "learning_rate": 3.969900403853562e-06, - "loss": 1.0997, - "num_input_tokens_seen": 14499950, - "step": 696 - }, - { - "epoch": 0.08380929477544641, - "flos": 12781121675880.0, - "grad_norm": 7.030625243511224, - "learning_rate": 3.96976561825646e-06, - "loss": 1.0123, - "num_input_tokens_seen": 14516760, - "step": 697 - }, - { - "epoch": 0.08392953766608549, - "flos": 18709929644280.0, - "grad_norm": 12.954471973654925, - "learning_rate": 3.969630533847479e-06, - "loss": 1.1002, - "num_input_tokens_seen": 14535440, - "step": 698 - }, - { - "epoch": 0.08404978055672459, - "flos": 15983202682800.0, - "grad_norm": 9.015915256105362, - "learning_rate": 3.969495150647113e-06, - "loss": 1.0654, - "num_input_tokens_seen": 14553330, - "step": 699 - }, - { - "epoch": 0.08417002344736367, - "flos": 17661082369080.0, - "grad_norm": 4.325323306661979, - "learning_rate": 3.969359468675899e-06, - "loss": 0.995, - "num_input_tokens_seen": 14573180, - "step": 700 - }, - { - "epoch": 0.08429026633800277, - "flos": 11997184436760.0, - "grad_norm": 12.070650025620642, - "learning_rate": 3.969223487954418e-06, - "loss": 1.1177, - "num_input_tokens_seen": 14590360, - "step": 701 - }, - { - "epoch": 0.08441050922864185, - "flos": 16953233104560.0, - "grad_norm": 11.894953297110069, - "learning_rate": 3.969087208503301e-06, - "loss": 1.0523, - "num_input_tokens_seen": 14610160, - "step": 702 - }, - { - "epoch": 0.08453075211928095, - "flos": 18159376809480.0, - "grad_norm": 12.478501409885862, - "learning_rate": 3.968950630343219e-06, - "loss": 1.0605, - "num_input_tokens_seen": 14626865, - "step": 703 - }, - { - "epoch": 0.08465099500992004, - "flos": 13855576901760.0, - "grad_norm": 6.845480504136984, - "learning_rate": 3.968813753494892e-06, - "loss": 1.1579, - "num_input_tokens_seen": 14644745, - "step": 704 - }, - { - "epoch": 0.08477123790055913, - "flos": 20912723553120.0, - "grad_norm": 9.638042778038942, - "learning_rate": 3.968676577979084e-06, - "loss": 0.9892, - "num_input_tokens_seen": 14664015, - "step": 705 - }, - { - "epoch": 0.08489148079119822, - "flos": 13203573434160.0, - "grad_norm": 9.481556707763714, - "learning_rate": 3.968539103816605e-06, - "loss": 1.0098, - "num_input_tokens_seen": 14681535, - "step": 706 - }, - { - "epoch": 0.0850117236818373, - "flos": 16686176759280.0, - "grad_norm": 8.070153204351156, - "learning_rate": 3.9684013310283085e-06, - "loss": 1.1208, - "num_input_tokens_seen": 14699940, - "step": 707 - }, - { - "epoch": 0.0851319665724764, - "flos": 29014915446240.0, - "grad_norm": 8.870879170263676, - "learning_rate": 3.9682632596350956e-06, - "loss": 0.8566, - "num_input_tokens_seen": 14720825, - "step": 708 - }, - { - "epoch": 0.0852522094631155, - "flos": 11229687341880.0, - "grad_norm": 4.975167215115425, - "learning_rate": 3.968124889657911e-06, - "loss": 1.0096, - "num_input_tokens_seen": 14735645, - "step": 709 - }, - { - "epoch": 0.08537245235375458, - "flos": 10286583317880.0, - "grad_norm": 6.445055629351052, - "learning_rate": 3.967986221117746e-06, - "loss": 1.1373, - "num_input_tokens_seen": 14751305, - "step": 710 - }, - { - "epoch": 0.08549269524439368, - "flos": 18841219523040.0, - "grad_norm": 5.005672670142869, - "learning_rate": 3.967847254035635e-06, - "loss": 1.0985, - "num_input_tokens_seen": 14770410, - "step": 711 - }, - { - "epoch": 0.08561293813503276, - "flos": 9585632904360.0, - "grad_norm": 10.707738441735541, - "learning_rate": 3.967707988432661e-06, - "loss": 1.0905, - "num_input_tokens_seen": 14787835, - "step": 712 - }, - { - "epoch": 0.08573318102567186, - "flos": 19182340179960.0, - "grad_norm": 23.967492022072836, - "learning_rate": 3.967568424329949e-06, - "loss": 1.1109, - "num_input_tokens_seen": 14807980, - "step": 713 - }, - { - "epoch": 0.08585342391631094, - "flos": 48193478706240.0, - "grad_norm": 0.8124365415000353, - "learning_rate": 3.967428561748671e-06, - "loss": 0.8246, - "num_input_tokens_seen": 14875670, - "step": 714 - }, - { - "epoch": 0.08597366680695004, - "flos": 15957778701480.0, - "grad_norm": 5.636361579391238, - "learning_rate": 3.967288400710045e-06, - "loss": 1.0884, - "num_input_tokens_seen": 14894855, - "step": 715 - }, - { - "epoch": 0.08609390969758914, - "flos": 16743862249800.0, - "grad_norm": 5.4197844506789465, - "learning_rate": 3.9671479412353335e-06, - "loss": 1.1105, - "num_input_tokens_seen": 14913040, - "step": 716 - }, - { - "epoch": 0.08621415258822822, - "flos": 18422263182600.0, - "grad_norm": 11.133165770162499, - "learning_rate": 3.967007183345843e-06, - "loss": 0.9698, - "num_input_tokens_seen": 14932615, - "step": 717 - }, - { - "epoch": 0.08633439547886732, - "flos": 9585080996280.0, - "grad_norm": 10.89943711813539, - "learning_rate": 3.966866127062927e-06, - "loss": 1.1185, - "num_input_tokens_seen": 14949460, - "step": 718 - }, - { - "epoch": 0.0864546383695064, - "flos": 41338865381760.0, - "grad_norm": 0.9188686423997032, - "learning_rate": 3.966724772407982e-06, - "loss": 0.9136, - "num_input_tokens_seen": 15006695, - "step": 719 - }, - { - "epoch": 0.0865748812601455, - "flos": 14225800927200.0, - "grad_norm": 6.242031969178631, - "learning_rate": 3.966583119402454e-06, - "loss": 1.1227, - "num_input_tokens_seen": 15023180, - "step": 720 - }, - { - "epoch": 0.08669512415078459, - "flos": 25161014941080.0, - "grad_norm": 8.90690915529532, - "learning_rate": 3.9664411680678305e-06, - "loss": 1.0436, - "num_input_tokens_seen": 15044655, - "step": 721 - }, - { - "epoch": 0.08681536704142367, - "flos": 47008374379560.0, - "grad_norm": 0.8730805665754214, - "learning_rate": 3.966298918425644e-06, - "loss": 0.8708, - "num_input_tokens_seen": 15101865, - "step": 722 - }, - { - "epoch": 0.08693560993206277, - "flos": 24634199701800.0, - "grad_norm": 12.483949045260015, - "learning_rate": 3.966156370497476e-06, - "loss": 1.0585, - "num_input_tokens_seen": 15125195, - "step": 723 - }, - { - "epoch": 0.08705585282270185, - "flos": 16455643880040.0, - "grad_norm": 4.803114818197365, - "learning_rate": 3.96601352430495e-06, - "loss": 1.1104, - "num_input_tokens_seen": 15144685, - "step": 724 - }, - { - "epoch": 0.08717609571334095, - "flos": 21017914896240.0, - "grad_norm": 11.200110121594333, - "learning_rate": 3.965870379869735e-06, - "loss": 1.0518, - "num_input_tokens_seen": 15166450, - "step": 725 - }, - { - "epoch": 0.08729633860398003, - "flos": 14671806311640.0, - "grad_norm": 4.983561195258833, - "learning_rate": 3.965726937213547e-06, - "loss": 1.0851, - "num_input_tokens_seen": 15184805, - "step": 726 - }, - { - "epoch": 0.08741658149461913, - "flos": 13020362438160.0, - "grad_norm": 8.482688328692637, - "learning_rate": 3.965583196358144e-06, - "loss": 1.0307, - "num_input_tokens_seen": 15203560, - "step": 727 - }, - { - "epoch": 0.08753682438525823, - "flos": 13278588254160.0, - "grad_norm": 13.30237004093837, - "learning_rate": 3.965439157325335e-06, - "loss": 0.953, - "num_input_tokens_seen": 15220645, - "step": 728 - }, - { - "epoch": 0.08765706727589731, - "flos": 19779816297720.0, - "grad_norm": 4.722676241621769, - "learning_rate": 3.965294820136968e-06, - "loss": 0.9783, - "num_input_tokens_seen": 15242165, - "step": 729 - }, - { - "epoch": 0.08777731016653641, - "flos": 17346489509640.0, - "grad_norm": 6.28862515463032, - "learning_rate": 3.965150184814938e-06, - "loss": 1.0801, - "num_input_tokens_seen": 15261370, - "step": 730 - }, - { - "epoch": 0.08789755305717549, - "flos": 15616535398320.0, - "grad_norm": 4.851589036951881, - "learning_rate": 3.965005251381189e-06, - "loss": 0.9761, - "num_input_tokens_seen": 15279025, - "step": 731 - }, - { - "epoch": 0.08801779594781459, - "flos": 41766100343400.0, - "grad_norm": 0.894952799655289, - "learning_rate": 3.964860019857705e-06, - "loss": 0.8967, - "num_input_tokens_seen": 15343660, - "step": 732 - }, - { - "epoch": 0.08813803883845367, - "flos": 16560038022600.0, - "grad_norm": 7.498523566834543, - "learning_rate": 3.964714490266518e-06, - "loss": 1.0616, - "num_input_tokens_seen": 15364025, - "step": 733 - }, - { - "epoch": 0.08825828172909277, - "flos": 45405172236120.0, - "grad_norm": 0.8705629176825483, - "learning_rate": 3.964568662629706e-06, - "loss": 0.8897, - "num_input_tokens_seen": 15425050, - "step": 734 - }, - { - "epoch": 0.08837852461973186, - "flos": 19021916271120.0, - "grad_norm": 4.722123108864638, - "learning_rate": 3.9644225369693895e-06, - "loss": 1.0588, - "num_input_tokens_seen": 15445070, - "step": 735 - }, - { - "epoch": 0.08849876751037095, - "flos": 19412811736080.0, - "grad_norm": 8.27361362979425, - "learning_rate": 3.964276113307735e-06, - "loss": 1.0993, - "num_input_tokens_seen": 15464755, - "step": 736 - }, - { - "epoch": 0.08861901040101004, - "flos": 14069087067120.0, - "grad_norm": 11.169745481045132, - "learning_rate": 3.9641293916669574e-06, - "loss": 1.0248, - "num_input_tokens_seen": 15483435, - "step": 737 - }, - { - "epoch": 0.08873925329164913, - "flos": 16821452640840.0, - "grad_norm": 16.020505163488426, - "learning_rate": 3.9639823720693115e-06, - "loss": 1.0476, - "num_input_tokens_seen": 15505010, - "step": 738 - }, - { - "epoch": 0.08885949618228822, - "flos": 51448860600600.0, - "grad_norm": 0.8504341419249271, - "learning_rate": 3.963835054537102e-06, - "loss": 0.8826, - "num_input_tokens_seen": 15573695, - "step": 739 - }, - { - "epoch": 0.08897973907292732, - "flos": 15878287293720.0, - "grad_norm": 8.168118198656687, - "learning_rate": 3.963687439092676e-06, - "loss": 0.8433, - "num_input_tokens_seen": 15594100, - "step": 740 - }, - { - "epoch": 0.0890999819635664, - "flos": 15091835806680.0, - "grad_norm": 10.495946919330263, - "learning_rate": 3.963539525758427e-06, - "loss": 1.032, - "num_input_tokens_seen": 15613380, - "step": 741 - }, - { - "epoch": 0.0892202248542055, - "flos": 18051609895320.0, - "grad_norm": 9.110779014468976, - "learning_rate": 3.9633913145567925e-06, - "loss": 0.8952, - "num_input_tokens_seen": 15633590, - "step": 742 - }, - { - "epoch": 0.08934046774484458, - "flos": 17395344470880.0, - "grad_norm": 13.031215457275861, - "learning_rate": 3.9632428055102575e-06, - "loss": 1.0454, - "num_input_tokens_seen": 15653320, - "step": 743 - }, - { - "epoch": 0.08946071063548368, - "flos": 25527712887120.0, - "grad_norm": 51.607823356935185, - "learning_rate": 3.9630939986413495e-06, - "loss": 0.8936, - "num_input_tokens_seen": 15674840, - "step": 744 - }, - { - "epoch": 0.08958095352612276, - "flos": 10136431031640.0, - "grad_norm": 10.208867231667213, - "learning_rate": 3.962944893972643e-06, - "loss": 1.0118, - "num_input_tokens_seen": 15693010, - "step": 745 - }, - { - "epoch": 0.08970119641676186, - "flos": 12648635996280.0, - "grad_norm": 14.772306482684394, - "learning_rate": 3.962795491526756e-06, - "loss": 1.1342, - "num_input_tokens_seen": 15709890, - "step": 746 - }, - { - "epoch": 0.08982143930740095, - "flos": 14775188622720.0, - "grad_norm": 8.700301803089577, - "learning_rate": 3.962645791326354e-06, - "loss": 1.112, - "num_input_tokens_seen": 15728865, - "step": 747 - }, - { - "epoch": 0.08994168219804004, - "flos": 17137118654880.0, - "grad_norm": 8.020291343971708, - "learning_rate": 3.962495793394146e-06, - "loss": 1.062, - "num_input_tokens_seen": 15747775, - "step": 748 - }, - { - "epoch": 0.09006192508867913, - "flos": 40923864382560.0, - "grad_norm": 0.6896438550945404, - "learning_rate": 3.9623454977528864e-06, - "loss": 0.8416, - "num_input_tokens_seen": 15806150, - "step": 749 - }, - { - "epoch": 0.09018216797931822, - "flos": 14542785388320.0, - "grad_norm": 12.97483855571525, - "learning_rate": 3.962194904425375e-06, - "loss": 1.0746, - "num_input_tokens_seen": 15826500, - "step": 750 - }, - { - "epoch": 0.09030241086995731, - "flos": 16088118071880.0, - "grad_norm": 9.215686974587094, - "learning_rate": 3.9620440134344566e-06, - "loss": 0.9048, - "num_input_tokens_seen": 15844375, - "step": 751 - }, - { - "epoch": 0.09042265376059641, - "flos": 15537105313680.0, - "grad_norm": 9.095252892751551, - "learning_rate": 3.9618928248030215e-06, - "loss": 1.0474, - "num_input_tokens_seen": 15863605, - "step": 752 - }, - { - "epoch": 0.0905428966512355, - "flos": 17294077807440.0, - "grad_norm": 6.248270024246587, - "learning_rate": 3.961741338554005e-06, - "loss": 1.0607, - "num_input_tokens_seen": 15881665, - "step": 753 - }, - { - "epoch": 0.09066313954187459, - "flos": 25579572681240.0, - "grad_norm": 12.575896905723999, - "learning_rate": 3.9615895547103865e-06, - "loss": 0.9789, - "num_input_tokens_seen": 15905030, - "step": 754 - }, - { - "epoch": 0.09078338243251367, - "flos": 20781494997480.0, - "grad_norm": 8.306471395304724, - "learning_rate": 3.961437473295193e-06, - "loss": 0.9975, - "num_input_tokens_seen": 15924895, - "step": 755 - }, - { - "epoch": 0.09090362532315277, - "flos": 15563081203080.0, - "grad_norm": 12.112199771337213, - "learning_rate": 3.961285094331495e-06, - "loss": 0.936, - "num_input_tokens_seen": 15942530, - "step": 756 - }, - { - "epoch": 0.09102386821379185, - "flos": 19470466565040.0, - "grad_norm": 11.358145163179222, - "learning_rate": 3.961132417842406e-06, - "loss": 1.0798, - "num_input_tokens_seen": 15962035, - "step": 757 - }, - { - "epoch": 0.09114411110443095, - "flos": 14776752362280.0, - "grad_norm": 13.028704131557223, - "learning_rate": 3.960979443851089e-06, - "loss": 0.9717, - "num_input_tokens_seen": 15978780, - "step": 758 - }, - { - "epoch": 0.09126435399507005, - "flos": 18609828120120.0, - "grad_norm": 5.3537238664009275, - "learning_rate": 3.96082617238075e-06, - "loss": 1.0253, - "num_input_tokens_seen": 16001125, - "step": 759 - }, - { - "epoch": 0.09138459688570913, - "flos": 17346305540280.0, - "grad_norm": 4.460903935882379, - "learning_rate": 3.960672603454639e-06, - "loss": 1.014, - "num_input_tokens_seen": 16020825, - "step": 760 - }, - { - "epoch": 0.09150483977634823, - "flos": 14934294084480.0, - "grad_norm": 11.201425761344582, - "learning_rate": 3.960518737096054e-06, - "loss": 0.9756, - "num_input_tokens_seen": 16040175, - "step": 761 - }, - { - "epoch": 0.09162508266698731, - "flos": 16245414501600.0, - "grad_norm": 103.5078169225035, - "learning_rate": 3.960364573328334e-06, - "loss": 0.9559, - "num_input_tokens_seen": 16059220, - "step": 762 - }, - { - "epoch": 0.0917453255576264, - "flos": 15431239416240.0, - "grad_norm": 6.062197962836994, - "learning_rate": 3.9602101121748675e-06, - "loss": 1.1106, - "num_input_tokens_seen": 16079435, - "step": 763 - }, - { - "epoch": 0.0918655684482655, - "flos": 10317618364680.0, - "grad_norm": 7.943850882645212, - "learning_rate": 3.960055353659085e-06, - "loss": 0.9483, - "num_input_tokens_seen": 16096265, - "step": 764 - }, - { - "epoch": 0.09198581133890459, - "flos": 16660047562080.0, - "grad_norm": 25.647071525459417, - "learning_rate": 3.959900297804465e-06, - "loss": 1.0688, - "num_input_tokens_seen": 16116155, - "step": 765 - }, - { - "epoch": 0.09210605422954368, - "flos": 11888804291400.0, - "grad_norm": 11.018004161845703, - "learning_rate": 3.9597449446345276e-06, - "loss": 0.9973, - "num_input_tokens_seen": 16133120, - "step": 766 - }, - { - "epoch": 0.09222629712018277, - "flos": 16114032638160.0, - "grad_norm": 7.394065136937027, - "learning_rate": 3.95958929417284e-06, - "loss": 1.0555, - "num_input_tokens_seen": 16150995, - "step": 767 - }, - { - "epoch": 0.09234654001082186, - "flos": 50116670143680.0, - "grad_norm": 0.7402612686304414, - "learning_rate": 3.9594333464430145e-06, - "loss": 0.841, - "num_input_tokens_seen": 16220205, - "step": 768 - }, - { - "epoch": 0.09246678290146094, - "flos": 14200407607440.0, - "grad_norm": 6.585925626008721, - "learning_rate": 3.959277101468709e-06, - "loss": 1.1037, - "num_input_tokens_seen": 16239475, - "step": 769 - }, - { - "epoch": 0.09258702579210004, - "flos": 12573069268200.0, - "grad_norm": 9.527810198324323, - "learning_rate": 3.959120559273624e-06, - "loss": 1.0043, - "num_input_tokens_seen": 16256980, - "step": 770 - }, - { - "epoch": 0.09270726868273914, - "flos": 14829256049160.0, - "grad_norm": 15.485190422998468, - "learning_rate": 3.958963719881509e-06, - "loss": 1.0735, - "num_input_tokens_seen": 16274790, - "step": 771 - }, - { - "epoch": 0.09282751157337822, - "flos": 12046897921680.0, - "grad_norm": 11.368745195032313, - "learning_rate": 3.958806583316154e-06, - "loss": 1.153, - "num_input_tokens_seen": 16292480, - "step": 772 - }, - { - "epoch": 0.09294775446401732, - "flos": 23192371975560.0, - "grad_norm": 8.020676120026495, - "learning_rate": 3.9586491496013985e-06, - "loss": 1.0128, - "num_input_tokens_seen": 16314595, - "step": 773 - }, - { - "epoch": 0.0930679973546564, - "flos": 12941453600040.0, - "grad_norm": 6.193440985645684, - "learning_rate": 3.958491418761124e-06, - "loss": 1.0432, - "num_input_tokens_seen": 16331885, - "step": 774 - }, - { - "epoch": 0.0931882402452955, - "flos": 14981707952400.0, - "grad_norm": 9.225750040303083, - "learning_rate": 3.958333390819258e-06, - "loss": 0.9585, - "num_input_tokens_seen": 16348535, - "step": 775 - }, - { - "epoch": 0.0933084831359346, - "flos": 17215874185200.0, - "grad_norm": 5.341910597795406, - "learning_rate": 3.9581750657997754e-06, - "loss": 1.0266, - "num_input_tokens_seen": 16367620, - "step": 776 - }, - { - "epoch": 0.09342872602657368, - "flos": 18130212117840.0, - "grad_norm": 5.285163828639245, - "learning_rate": 3.95801644372669e-06, - "loss": 1.1229, - "num_input_tokens_seen": 16387245, - "step": 777 - }, - { - "epoch": 0.09354896891721277, - "flos": 16455337264440.0, - "grad_norm": 18.80761249711927, - "learning_rate": 3.957857524624068e-06, - "loss": 1.0612, - "num_input_tokens_seen": 16405845, - "step": 778 - }, - { - "epoch": 0.09366921180785186, - "flos": 17267733979320.0, - "grad_norm": 4.954487225187969, - "learning_rate": 3.957698308516016e-06, - "loss": 1.1277, - "num_input_tokens_seen": 16426865, - "step": 779 - }, - { - "epoch": 0.09378945469849095, - "flos": 13278894869760.0, - "grad_norm": 7.098654509377524, - "learning_rate": 3.957538795426688e-06, - "loss": 1.0528, - "num_input_tokens_seen": 16444010, - "step": 780 - }, - { - "epoch": 0.09390969758913004, - "flos": 16505050749360.0, - "grad_norm": 10.608485773094538, - "learning_rate": 3.9573789853802804e-06, - "loss": 0.9948, - "num_input_tokens_seen": 16462205, - "step": 781 - }, - { - "epoch": 0.09402994047976913, - "flos": 13938318434880.0, - "grad_norm": 7.4364525834814, - "learning_rate": 3.957218878401037e-06, - "loss": 0.9766, - "num_input_tokens_seen": 16480415, - "step": 782 - }, - { - "epoch": 0.09415018337040823, - "flos": 20961915791520.0, - "grad_norm": 16.31646095798226, - "learning_rate": 3.957058474513246e-06, - "loss": 1.1104, - "num_input_tokens_seen": 16499990, - "step": 783 - }, - { - "epoch": 0.09427042626104731, - "flos": 17478055342440.0, - "grad_norm": 4.798662380761044, - "learning_rate": 3.956897773741241e-06, - "loss": 1.0038, - "num_input_tokens_seen": 16518700, - "step": 784 - }, - { - "epoch": 0.09439066915168641, - "flos": 18762647962080.0, - "grad_norm": 10.684131470700045, - "learning_rate": 3.956736776109398e-06, - "loss": 0.9483, - "num_input_tokens_seen": 16539595, - "step": 785 - }, - { - "epoch": 0.09451091204232549, - "flos": 13780531420200.0, - "grad_norm": 5.548987617603977, - "learning_rate": 3.956575481642143e-06, - "loss": 1.0639, - "num_input_tokens_seen": 16558205, - "step": 786 - }, - { - "epoch": 0.09463115493296459, - "flos": 18049187632080.0, - "grad_norm": 8.437591326183991, - "learning_rate": 3.956413890363943e-06, - "loss": 0.9563, - "num_input_tokens_seen": 16574905, - "step": 787 - }, - { - "epoch": 0.09475139782360369, - "flos": 7093884748320.0, - "grad_norm": 17.345059951938136, - "learning_rate": 3.956252002299312e-06, - "loss": 1.0485, - "num_input_tokens_seen": 16590525, - "step": 788 - }, - { - "epoch": 0.09487164071424277, - "flos": 12201220180080.0, - "grad_norm": 31.417348785145762, - "learning_rate": 3.956089817472807e-06, - "loss": 1.1324, - "num_input_tokens_seen": 16607550, - "step": 789 - }, - { - "epoch": 0.09499188360488187, - "flos": 21989815673160.0, - "grad_norm": 10.440461064238816, - "learning_rate": 3.955927335909032e-06, - "loss": 1.0838, - "num_input_tokens_seen": 16630480, - "step": 790 - }, - { - "epoch": 0.09511212649552095, - "flos": 20913060830280.0, - "grad_norm": 17.943554006135287, - "learning_rate": 3.955764557632634e-06, - "loss": 0.9902, - "num_input_tokens_seen": 16650010, - "step": 791 - }, - { - "epoch": 0.09523236938616005, - "flos": 7277463683040.0, - "grad_norm": 18.787136888072432, - "learning_rate": 3.955601482668309e-06, - "loss": 1.1699, - "num_input_tokens_seen": 16667590, - "step": 792 - }, - { - "epoch": 0.09535261227679913, - "flos": 13517798354880.0, - "grad_norm": 17.819802162793327, - "learning_rate": 3.955438111040794e-06, - "loss": 1.106, - "num_input_tokens_seen": 16685585, - "step": 793 - }, - { - "epoch": 0.09547285516743823, - "flos": 14855446569480.0, - "grad_norm": 12.301686762081971, - "learning_rate": 3.955274442774873e-06, - "loss": 1.0282, - "num_input_tokens_seen": 16703885, - "step": 794 - }, - { - "epoch": 0.09559309805807732, - "flos": 21489957493200.0, - "grad_norm": 33.02258083079762, - "learning_rate": 3.9551104778953725e-06, - "loss": 0.9457, - "num_input_tokens_seen": 16723900, - "step": 795 - }, - { - "epoch": 0.0957133409487164, - "flos": 14958460941840.0, - "grad_norm": 5.782336653576387, - "learning_rate": 3.954946216427167e-06, - "loss": 1.0795, - "num_input_tokens_seen": 16744080, - "step": 796 - }, - { - "epoch": 0.0958335838393555, - "flos": 51062324625240.0, - "grad_norm": 0.9136493107625846, - "learning_rate": 3.954781658395176e-06, - "loss": 0.8774, - "num_input_tokens_seen": 16800055, - "step": 797 - }, - { - "epoch": 0.09595382672999458, - "flos": 15538362437640.0, - "grad_norm": 10.205803465550442, - "learning_rate": 3.95461680382436e-06, - "loss": 1.1405, - "num_input_tokens_seen": 16818700, - "step": 798 - }, - { - "epoch": 0.09607406962063368, - "flos": 13254390735240.0, - "grad_norm": 12.198423214934955, - "learning_rate": 3.9544516527397295e-06, - "loss": 1.0835, - "num_input_tokens_seen": 16834770, - "step": 799 - }, - { - "epoch": 0.09619431251127276, - "flos": 16038496571640.0, - "grad_norm": 15.884487891222879, - "learning_rate": 3.954286205166338e-06, - "loss": 1.0312, - "num_input_tokens_seen": 16855655, - "step": 800 - }, - { - "epoch": 0.09631455540191186, - "flos": 10057000947000.0, - "grad_norm": 8.803245934145586, - "learning_rate": 3.954120461129282e-06, - "loss": 1.0551, - "num_input_tokens_seen": 16872785, - "step": 801 - }, - { - "epoch": 0.09643479829255096, - "flos": 14724187352280.0, - "grad_norm": 14.12991544553777, - "learning_rate": 3.953954420653706e-06, - "loss": 1.0727, - "num_input_tokens_seen": 16889530, - "step": 802 - }, - { - "epoch": 0.09655504118319004, - "flos": 17368326088440.0, - "grad_norm": 5.716688107523549, - "learning_rate": 3.953788083764798e-06, - "loss": 1.1102, - "num_input_tokens_seen": 16908485, - "step": 803 - }, - { - "epoch": 0.09667528407382914, - "flos": 13072375540080.0, - "grad_norm": 31.276733682151317, - "learning_rate": 3.953621450487792e-06, - "loss": 1.1464, - "num_input_tokens_seen": 16926825, - "step": 804 - }, - { - "epoch": 0.09679552696446822, - "flos": 50720217250320.0, - "grad_norm": 0.8402595377922407, - "learning_rate": 3.953454520847964e-06, - "loss": 0.8793, - "num_input_tokens_seen": 16991390, - "step": 805 - }, - { - "epoch": 0.09691576985510732, - "flos": 15590436862680.0, - "grad_norm": 19.63553798108523, - "learning_rate": 3.9532872948706395e-06, - "loss": 0.9583, - "num_input_tokens_seen": 17010605, - "step": 806 - }, - { - "epoch": 0.09703601274574641, - "flos": 12729415189560.0, - "grad_norm": 6.313352621680782, - "learning_rate": 3.9531197725811845e-06, - "loss": 1.0595, - "num_input_tokens_seen": 17025710, - "step": 807 - }, - { - "epoch": 0.0971562556363855, - "flos": 15745832275680.0, - "grad_norm": 17.915457656640395, - "learning_rate": 3.952951954005013e-06, - "loss": 1.1038, - "num_input_tokens_seen": 17045115, - "step": 808 - }, - { - "epoch": 0.0972764985270246, - "flos": 18395490092640.0, - "grad_norm": 15.715248157703495, - "learning_rate": 3.952783839167584e-06, - "loss": 1.0738, - "num_input_tokens_seen": 17064880, - "step": 809 - }, - { - "epoch": 0.09739674141766368, - "flos": 14436030305640.0, - "grad_norm": 10.867948153083242, - "learning_rate": 3.952615428094398e-06, - "loss": 0.9532, - "num_input_tokens_seen": 17084120, - "step": 810 - }, - { - "epoch": 0.09751698430830277, - "flos": 11132866604640.0, - "grad_norm": 10.654062533227128, - "learning_rate": 3.952446720811004e-06, - "loss": 0.9753, - "num_input_tokens_seen": 17102165, - "step": 811 - }, - { - "epoch": 0.09763722719894186, - "flos": 45614941691160.0, - "grad_norm": 0.8485607823398544, - "learning_rate": 3.952277717342995e-06, - "loss": 0.9022, - "num_input_tokens_seen": 17168320, - "step": 812 - }, - { - "epoch": 0.09775747008958095, - "flos": 15694861666800.0, - "grad_norm": 18.010723734308783, - "learning_rate": 3.952108417716009e-06, - "loss": 1.0844, - "num_input_tokens_seen": 17187495, - "step": 813 - }, - { - "epoch": 0.09787771298022005, - "flos": 15116125310280.0, - "grad_norm": 5.0461693615880625, - "learning_rate": 3.951938821955727e-06, - "loss": 1.0739, - "num_input_tokens_seen": 17206615, - "step": 814 - }, - { - "epoch": 0.09799795587085913, - "flos": 15668763131160.0, - "grad_norm": 4.339332365024084, - "learning_rate": 3.9517689300878786e-06, - "loss": 0.9944, - "num_input_tokens_seen": 17226070, - "step": 815 - }, - { - "epoch": 0.09811819876149823, - "flos": 15747181384320.0, - "grad_norm": 5.179418864699106, - "learning_rate": 3.951598742138236e-06, - "loss": 1.007, - "num_input_tokens_seen": 17244515, - "step": 816 - }, - { - "epoch": 0.09823844165213731, - "flos": 15794840544720.0, - "grad_norm": 5.869810368920775, - "learning_rate": 3.951428258132615e-06, - "loss": 1.0131, - "num_input_tokens_seen": 17262355, - "step": 817 - }, - { - "epoch": 0.09835868454277641, - "flos": 15979431310920.0, - "grad_norm": 19.34793267455975, - "learning_rate": 3.951257478096879e-06, - "loss": 1.0653, - "num_input_tokens_seen": 17280440, - "step": 818 - }, - { - "epoch": 0.0984789274334155, - "flos": 11577982803840.0, - "grad_norm": 8.960740823092296, - "learning_rate": 3.951086402056936e-06, - "loss": 0.9152, - "num_input_tokens_seen": 17294760, - "step": 819 - }, - { - "epoch": 0.09859917032405459, - "flos": 17241911397720.0, - "grad_norm": 6.126986123197719, - "learning_rate": 3.950915030038735e-06, - "loss": 1.065, - "num_input_tokens_seen": 17314275, - "step": 820 - }, - { - "epoch": 0.09871941321469369, - "flos": 12338029139640.0, - "grad_norm": 7.501220858071855, - "learning_rate": 3.9507433620682765e-06, - "loss": 1.0559, - "num_input_tokens_seen": 17330930, - "step": 821 - }, - { - "epoch": 0.09883965610533277, - "flos": 20284856281320.0, - "grad_norm": 5.899435643505677, - "learning_rate": 3.9505713981716e-06, - "loss": 1.1069, - "num_input_tokens_seen": 17353480, - "step": 822 - }, - { - "epoch": 0.09895989899597187, - "flos": 16846600668120.0, - "grad_norm": 4.570277443775808, - "learning_rate": 3.950399138374795e-06, - "loss": 1.0384, - "num_input_tokens_seen": 17372280, - "step": 823 - }, - { - "epoch": 0.09908014188661095, - "flos": 17555063163840.0, - "grad_norm": 7.21306224198421, - "learning_rate": 3.95022658270399e-06, - "loss": 0.9563, - "num_input_tokens_seen": 17392365, - "step": 824 - }, - { - "epoch": 0.09920038477725004, - "flos": 9926324299440.0, - "grad_norm": 4.684605429531446, - "learning_rate": 3.9500537311853635e-06, - "loss": 1.0015, - "num_input_tokens_seen": 17410040, - "step": 825 - }, - { - "epoch": 0.09932062766788914, - "flos": 9453913763760.0, - "grad_norm": 6.090702175005698, - "learning_rate": 3.949880583845136e-06, - "loss": 1.0524, - "num_input_tokens_seen": 17427835, - "step": 826 - }, - { - "epoch": 0.09944087055852822, - "flos": 13833403045800.0, - "grad_norm": 11.34351460026072, - "learning_rate": 3.949707140709575e-06, - "loss": 1.0334, - "num_input_tokens_seen": 17447285, - "step": 827 - }, - { - "epoch": 0.09956111344916732, - "flos": 12574479699960.0, - "grad_norm": 4.7546622835682175, - "learning_rate": 3.949533401804991e-06, - "loss": 1.0506, - "num_input_tokens_seen": 17463910, - "step": 828 - }, - { - "epoch": 0.0996813563398064, - "flos": 12731438852520.0, - "grad_norm": 4.931330822571187, - "learning_rate": 3.949359367157739e-06, - "loss": 1.1407, - "num_input_tokens_seen": 17482325, - "step": 829 - }, - { - "epoch": 0.0998015992304455, - "flos": 12364556937120.0, - "grad_norm": 5.375333553876419, - "learning_rate": 3.949185036794222e-06, - "loss": 0.9885, - "num_input_tokens_seen": 17500055, - "step": 830 - }, - { - "epoch": 0.0999218421210846, - "flos": 18424746768960.0, - "grad_norm": 3.2354430863054415, - "learning_rate": 3.949010410740884e-06, - "loss": 1.0004, - "num_input_tokens_seen": 17522600, - "step": 831 - }, - { - "epoch": 0.10004208501172368, - "flos": 15066105209760.0, - "grad_norm": 12.045182046048991, - "learning_rate": 3.948835489024216e-06, - "loss": 1.095, - "num_input_tokens_seen": 17542055, - "step": 832 - }, - { - "epoch": 0.10016232790236278, - "flos": 12286598607360.0, - "grad_norm": 7.079137329735309, - "learning_rate": 3.948660271670755e-06, - "loss": 1.1177, - "num_input_tokens_seen": 17558925, - "step": 833 - }, - { - "epoch": 0.10028257079300186, - "flos": 18264598814160.0, - "grad_norm": 4.678648462358138, - "learning_rate": 3.948484758707079e-06, - "loss": 1.0691, - "num_input_tokens_seen": 17578245, - "step": 834 - }, - { - "epoch": 0.10040281368364096, - "flos": 17897747560320.0, - "grad_norm": 4.697110103324587, - "learning_rate": 3.948308950159815e-06, - "loss": 1.0643, - "num_input_tokens_seen": 17596645, - "step": 835 - }, - { - "epoch": 0.10052305657428004, - "flos": 12469717618680.0, - "grad_norm": 7.616498867109204, - "learning_rate": 3.9481328460556326e-06, - "loss": 0.987, - "num_input_tokens_seen": 17613585, - "step": 836 - }, - { - "epoch": 0.10064329946491914, - "flos": 13229089400160.0, - "grad_norm": 6.319765046013689, - "learning_rate": 3.9479564464212455e-06, - "loss": 1.1039, - "num_input_tokens_seen": 17632465, - "step": 837 - }, - { - "epoch": 0.10076354235555823, - "flos": 12178801031640.0, - "grad_norm": 9.821491762428614, - "learning_rate": 3.947779751283414e-06, - "loss": 0.9884, - "num_input_tokens_seen": 17649355, - "step": 838 - }, - { - "epoch": 0.10088378524619732, - "flos": 16320490644720.0, - "grad_norm": 20.348872183025126, - "learning_rate": 3.947602760668944e-06, - "loss": 0.9836, - "num_input_tokens_seen": 17668865, - "step": 839 - }, - { - "epoch": 0.10100402813683641, - "flos": 27045904542000.0, - "grad_norm": 5.8905842018232395, - "learning_rate": 3.947425474604684e-06, - "loss": 0.9411, - "num_input_tokens_seen": 17692520, - "step": 840 - }, - { - "epoch": 0.1011242710274755, - "flos": 15301390630800.0, - "grad_norm": 3.5099051505638426, - "learning_rate": 3.947247893117528e-06, - "loss": 1.1537, - "num_input_tokens_seen": 17710745, - "step": 841 - }, - { - "epoch": 0.10124451391811459, - "flos": 9607377498480.0, - "grad_norm": 7.986993707362321, - "learning_rate": 3.947070016234413e-06, - "loss": 0.9126, - "num_input_tokens_seen": 17726255, - "step": 842 - }, - { - "epoch": 0.10136475680875369, - "flos": 11783306332680.0, - "grad_norm": 8.589615482699152, - "learning_rate": 3.946891843982326e-06, - "loss": 0.9712, - "num_input_tokens_seen": 17743640, - "step": 843 - }, - { - "epoch": 0.10148499969939277, - "flos": 13804422323520.0, - "grad_norm": 5.661931509150092, - "learning_rate": 3.9467133763882935e-06, - "loss": 0.9752, - "num_input_tokens_seen": 17761825, - "step": 844 - }, - { - "epoch": 0.10160524259003187, - "flos": 14986184540160.0, - "grad_norm": 4.828167467640394, - "learning_rate": 3.9465346134793905e-06, - "loss": 1.0859, - "num_input_tokens_seen": 17781355, - "step": 845 - }, - { - "epoch": 0.10172548548067095, - "flos": 12548319841200.0, - "grad_norm": 4.283156589963237, - "learning_rate": 3.9463555552827335e-06, - "loss": 1.027, - "num_input_tokens_seen": 17798245, - "step": 846 - }, - { - "epoch": 0.10184572837131005, - "flos": 14986061893920.0, - "grad_norm": 5.358626866742324, - "learning_rate": 3.946176201825487e-06, - "loss": 1.0796, - "num_input_tokens_seen": 17816000, - "step": 847 - }, - { - "epoch": 0.10196597126194913, - "flos": 18552786522360.0, - "grad_norm": 5.787005884157323, - "learning_rate": 3.9459965531348575e-06, - "loss": 1.0565, - "num_input_tokens_seen": 17835375, - "step": 848 - }, - { - "epoch": 0.10208621415258823, - "flos": 20886349063440.0, - "grad_norm": 10.742906581046027, - "learning_rate": 3.945816609238098e-06, - "loss": 1.0774, - "num_input_tokens_seen": 17854505, - "step": 849 - }, - { - "epoch": 0.10220645704322733, - "flos": 16927318538280.0, - "grad_norm": 9.9540666407247, - "learning_rate": 3.945636370162507e-06, - "loss": 1.0778, - "num_input_tokens_seen": 17874335, - "step": 850 - }, - { - "epoch": 0.10232669993386641, - "flos": 16504805456880.0, - "grad_norm": 3.234821380919676, - "learning_rate": 3.945455835935425e-06, - "loss": 1.0178, - "num_input_tokens_seen": 17893240, - "step": 851 - }, - { - "epoch": 0.1024469428245055, - "flos": 16291847199600.0, - "grad_norm": 12.599659482199622, - "learning_rate": 3.94527500658424e-06, - "loss": 0.9714, - "num_input_tokens_seen": 17910625, - "step": 852 - }, - { - "epoch": 0.10256718571514459, - "flos": 22356176342040.0, - "grad_norm": 3.70974230341727, - "learning_rate": 3.945093882136382e-06, - "loss": 1.0261, - "num_input_tokens_seen": 17934120, - "step": 853 - }, - { - "epoch": 0.10268742860578368, - "flos": 16689150930600.0, - "grad_norm": 2.6458107854728414, - "learning_rate": 3.944912462619329e-06, - "loss": 1.0673, - "num_input_tokens_seen": 17952805, - "step": 854 - }, - { - "epoch": 0.10280767149642277, - "flos": 18159468794160.0, - "grad_norm": 6.3601793535002935, - "learning_rate": 3.9447307480606025e-06, - "loss": 1.0419, - "num_input_tokens_seen": 17972610, - "step": 855 - }, - { - "epoch": 0.10292791438706186, - "flos": 12285985376160.0, - "grad_norm": 4.178704204941702, - "learning_rate": 3.944548738487767e-06, - "loss": 1.1296, - "num_input_tokens_seen": 17989845, - "step": 856 - }, - { - "epoch": 0.10304815727770096, - "flos": 19260758433120.0, - "grad_norm": 39.85110532598971, - "learning_rate": 3.944366433928434e-06, - "loss": 1.135, - "num_input_tokens_seen": 18009545, - "step": 857 - }, - { - "epoch": 0.10316840016834004, - "flos": 16191745675440.0, - "grad_norm": 4.797849077860389, - "learning_rate": 3.9441838344102594e-06, - "loss": 1.059, - "num_input_tokens_seen": 18028990, - "step": 858 - }, - { - "epoch": 0.10328864305897914, - "flos": 14698548740040.0, - "grad_norm": 6.12677194304951, - "learning_rate": 3.944000939960943e-06, - "loss": 0.9096, - "num_input_tokens_seen": 18047435, - "step": 859 - }, - { - "epoch": 0.10340888594961822, - "flos": 20286113405280.0, - "grad_norm": 5.33006595821257, - "learning_rate": 3.943817750608229e-06, - "loss": 1.0356, - "num_input_tokens_seen": 18069705, - "step": 860 - }, - { - "epoch": 0.10352912884025732, - "flos": 9427477950960.0, - "grad_norm": 8.944398151959765, - "learning_rate": 3.943634266379908e-06, - "loss": 1.0406, - "num_input_tokens_seen": 18086320, - "step": 861 - }, - { - "epoch": 0.10364937173089642, - "flos": 18187192392480.0, - "grad_norm": 3.732280574820785, - "learning_rate": 3.943450487303815e-06, - "loss": 1.0779, - "num_input_tokens_seen": 18106535, - "step": 862 - }, - { - "epoch": 0.1037696146215355, - "flos": 15065737271040.0, - "grad_norm": 3.712627853092944, - "learning_rate": 3.943266413407827e-06, - "loss": 1.0731, - "num_input_tokens_seen": 18125530, - "step": 863 - }, - { - "epoch": 0.1038898575121746, - "flos": 18365129600160.0, - "grad_norm": 6.233692103106162, - "learning_rate": 3.94308204471987e-06, - "loss": 1.0721, - "num_input_tokens_seen": 18144265, - "step": 864 - }, - { - "epoch": 0.10401010040281368, - "flos": 13518319601400.0, - "grad_norm": 6.738007240805581, - "learning_rate": 3.942897381267912e-06, - "loss": 0.9642, - "num_input_tokens_seen": 18160350, - "step": 865 - }, - { - "epoch": 0.10413034329345278, - "flos": 11571635860920.0, - "grad_norm": 24.08300414954886, - "learning_rate": 3.942712423079965e-06, - "loss": 0.9115, - "num_input_tokens_seen": 18176460, - "step": 866 - }, - { - "epoch": 0.10425058618409186, - "flos": 12205850075640.0, - "grad_norm": 6.447083681827643, - "learning_rate": 3.942527170184088e-06, - "loss": 1.1333, - "num_input_tokens_seen": 18192800, - "step": 867 - }, - { - "epoch": 0.10437082907473096, - "flos": 12731254883160.0, - "grad_norm": 14.711645400021595, - "learning_rate": 3.942341622608385e-06, - "loss": 1.0001, - "num_input_tokens_seen": 18209550, - "step": 868 - }, - { - "epoch": 0.10449107196537005, - "flos": 25894594802520.0, - "grad_norm": 4.134935608884862, - "learning_rate": 3.942155780381001e-06, - "loss": 0.9985, - "num_input_tokens_seen": 18233005, - "step": 869 - }, - { - "epoch": 0.10461131485600914, - "flos": 16924160397600.0, - "grad_norm": 3.636035301817689, - "learning_rate": 3.94196964353013e-06, - "loss": 0.9834, - "num_input_tokens_seen": 18252175, - "step": 870 - }, - { - "epoch": 0.10473155774664823, - "flos": 13046154358200.0, - "grad_norm": 6.479294590642872, - "learning_rate": 3.941783212084008e-06, - "loss": 1.0297, - "num_input_tokens_seen": 18269650, - "step": 871 - }, - { - "epoch": 0.10485180063728732, - "flos": 18211359249840.0, - "grad_norm": 8.014688359018553, - "learning_rate": 3.941596486070916e-06, - "loss": 0.9981, - "num_input_tokens_seen": 18287415, - "step": 872 - }, - { - "epoch": 0.10497204352792641, - "flos": 19286611676280.0, - "grad_norm": 11.884925308345903, - "learning_rate": 3.941409465519182e-06, - "loss": 0.801, - "num_input_tokens_seen": 18307660, - "step": 873 - }, - { - "epoch": 0.10509228641856551, - "flos": 23433881693280.0, - "grad_norm": 10.930205435815953, - "learning_rate": 3.941222150457176e-06, - "loss": 1.0783, - "num_input_tokens_seen": 18330635, - "step": 874 - }, - { - "epoch": 0.10521252930920459, - "flos": 10109412649200.0, - "grad_norm": 7.571947234246478, - "learning_rate": 3.941034540913311e-06, - "loss": 0.9383, - "num_input_tokens_seen": 18347885, - "step": 875 - }, - { - "epoch": 0.10533277219984369, - "flos": 15404251695360.0, - "grad_norm": 5.607382077640906, - "learning_rate": 3.940846636916051e-06, - "loss": 1.0502, - "num_input_tokens_seen": 18367640, - "step": 876 - }, - { - "epoch": 0.10545301509048277, - "flos": 15822932081760.0, - "grad_norm": 3.5889997849654343, - "learning_rate": 3.940658438493899e-06, - "loss": 1.0866, - "num_input_tokens_seen": 18385205, - "step": 877 - }, - { - "epoch": 0.10557325798112187, - "flos": 15772973304360.0, - "grad_norm": 7.703026699344261, - "learning_rate": 3.940469945675405e-06, - "loss": 0.9879, - "num_input_tokens_seen": 18403310, - "step": 878 - }, - { - "epoch": 0.10569350087176095, - "flos": 18343170375120.0, - "grad_norm": 4.453399114370393, - "learning_rate": 3.940281158489163e-06, - "loss": 1.1385, - "num_input_tokens_seen": 18422260, - "step": 879 - }, - { - "epoch": 0.10581374376240005, - "flos": 12259917502080.0, - "grad_norm": 8.720263278031437, - "learning_rate": 3.940092076963812e-06, - "loss": 1.0595, - "num_input_tokens_seen": 18439475, - "step": 880 - }, - { - "epoch": 0.10593398665303914, - "flos": 24504749516640.0, - "grad_norm": 3.339428685146049, - "learning_rate": 3.9399027011280355e-06, - "loss": 1.0172, - "num_input_tokens_seen": 18461290, - "step": 881 - }, - { - "epoch": 0.10605422954367823, - "flos": 16533540886680.0, - "grad_norm": 8.143144272639379, - "learning_rate": 3.939713031010561e-06, - "loss": 1.0001, - "num_input_tokens_seen": 18479375, - "step": 882 - }, - { - "epoch": 0.10617447243431732, - "flos": 16218856042560.0, - "grad_norm": 3.218431475535828, - "learning_rate": 3.939523066640163e-06, - "loss": 1.0022, - "num_input_tokens_seen": 18497990, - "step": 883 - }, - { - "epoch": 0.10629471532495641, - "flos": 17344312538880.0, - "grad_norm": 3.4439930021810823, - "learning_rate": 3.939332808045657e-06, - "loss": 1.034, - "num_input_tokens_seen": 18517360, - "step": 884 - }, - { - "epoch": 0.1064149582155955, - "flos": 14986613802000.0, - "grad_norm": 4.310726077352265, - "learning_rate": 3.939142255255906e-06, - "loss": 1.0667, - "num_input_tokens_seen": 18537965, - "step": 885 - }, - { - "epoch": 0.1065352011062346, - "flos": 14696371769280.0, - "grad_norm": 4.237874179399951, - "learning_rate": 3.938951408299817e-06, - "loss": 1.0996, - "num_input_tokens_seen": 18556525, - "step": 886 - }, - { - "epoch": 0.10665544399687368, - "flos": 44855293955640.0, - "grad_norm": 0.8169785095572772, - "learning_rate": 3.938760267206342e-06, - "loss": 0.8099, - "num_input_tokens_seen": 18618065, - "step": 887 - }, - { - "epoch": 0.10677568688751278, - "flos": 18605075578320.0, - "grad_norm": 3.2899129678641357, - "learning_rate": 3.938568832004475e-06, - "loss": 1.0047, - "num_input_tokens_seen": 18636490, - "step": 888 - }, - { - "epoch": 0.10689592977815186, - "flos": 9007908379320.0, - "grad_norm": 3.320378273705146, - "learning_rate": 3.938377102723257e-06, - "loss": 0.9837, - "num_input_tokens_seen": 18653345, - "step": 889 - }, - { - "epoch": 0.10701617266879096, - "flos": 15720469617480.0, - "grad_norm": 11.16155657396443, - "learning_rate": 3.938185079391774e-06, - "loss": 1.0553, - "num_input_tokens_seen": 18670110, - "step": 890 - }, - { - "epoch": 0.10713641555943004, - "flos": 14007967481880.0, - "grad_norm": 10.993743504386227, - "learning_rate": 3.937992762039157e-06, - "loss": 1.2911, - "num_input_tokens_seen": 18683155, - "step": 891 - }, - { - "epoch": 0.10725665845006914, - "flos": 17033276420400.0, - "grad_norm": 7.170098776675616, - "learning_rate": 3.937800150694577e-06, - "loss": 1.025, - "num_input_tokens_seen": 18704050, - "step": 892 - }, - { - "epoch": 0.10737690134070824, - "flos": 13151284378200.0, - "grad_norm": 3.6230266066650048, - "learning_rate": 3.937607245387255e-06, - "loss": 0.9764, - "num_input_tokens_seen": 18723135, - "step": 893 - }, - { - "epoch": 0.10749714423134732, - "flos": 16137586264320.0, - "grad_norm": 4.406956176442973, - "learning_rate": 3.937414046146455e-06, - "loss": 0.9423, - "num_input_tokens_seen": 18740810, - "step": 894 - }, - { - "epoch": 0.10761738712198642, - "flos": 14987104386960.0, - "grad_norm": 7.3171705670581355, - "learning_rate": 3.9372205530014845e-06, - "loss": 0.9839, - "num_input_tokens_seen": 18759010, - "step": 895 - }, - { - "epoch": 0.1077376300126255, - "flos": 16898889724080.0, - "grad_norm": 9.195510797298983, - "learning_rate": 3.937026765981696e-06, - "loss": 0.9508, - "num_input_tokens_seen": 18778800, - "step": 896 - }, - { - "epoch": 0.1078578729032646, - "flos": 14852533721280.0, - "grad_norm": 4.48176418956358, - "learning_rate": 3.936832685116488e-06, - "loss": 1.0181, - "num_input_tokens_seen": 18796615, - "step": 897 - }, - { - "epoch": 0.10797811579390369, - "flos": 10476447872400.0, - "grad_norm": 5.630614805649523, - "learning_rate": 3.936638310435301e-06, - "loss": 1.1215, - "num_input_tokens_seen": 18814200, - "step": 898 - }, - { - "epoch": 0.10809835868454278, - "flos": 13859409596760.0, - "grad_norm": 5.1958897779019955, - "learning_rate": 3.936443641967623e-06, - "loss": 1.0489, - "num_input_tokens_seen": 18832750, - "step": 899 - }, - { - "epoch": 0.10821860157518187, - "flos": 13072682155680.0, - "grad_norm": 5.097443160357091, - "learning_rate": 3.936248679742983e-06, - "loss": 1.0648, - "num_input_tokens_seen": 18850965, - "step": 900 - }, - { - "epoch": 0.10833884446582095, - "flos": 35294103862680.0, - "grad_norm": 2.4292492241241357, - "learning_rate": 3.936053423790959e-06, - "loss": 0.9845, - "num_input_tokens_seen": 18899005, - "step": 901 - }, - { - "epoch": 0.10845908735646005, - "flos": 14488104730680.0, - "grad_norm": 3.229145484772808, - "learning_rate": 3.935857874141168e-06, - "loss": 0.9949, - "num_input_tokens_seen": 18917560, - "step": 902 - }, - { - "epoch": 0.10857933024709913, - "flos": 9900041794440.0, - "grad_norm": 9.349840507574974, - "learning_rate": 3.935662030823279e-06, - "loss": 1.0667, - "num_input_tokens_seen": 18933465, - "step": 903 - }, - { - "epoch": 0.10869957313773823, - "flos": 9426343473240.0, - "grad_norm": 7.487372385507336, - "learning_rate": 3.935465893866998e-06, - "loss": 0.9394, - "num_input_tokens_seen": 18951410, - "step": 904 - }, - { - "epoch": 0.10881981602837733, - "flos": 18364884307680.0, - "grad_norm": 7.262598863673986, - "learning_rate": 3.935269463302079e-06, - "loss": 1.0343, - "num_input_tokens_seen": 18969335, - "step": 905 - }, - { - "epoch": 0.10894005891901641, - "flos": 14750991103800.0, - "grad_norm": 6.8187663804512235, - "learning_rate": 3.935072739158322e-06, - "loss": 0.9862, - "num_input_tokens_seen": 18988765, - "step": 906 - }, - { - "epoch": 0.10906030180965551, - "flos": 18971068308480.0, - "grad_norm": 4.696956606909148, - "learning_rate": 3.934875721465569e-06, - "loss": 1.0227, - "num_input_tokens_seen": 19008905, - "step": 907 - }, - { - "epoch": 0.10918054470029459, - "flos": 26074463688480.0, - "grad_norm": 7.06927867096314, - "learning_rate": 3.9346784102537076e-06, - "loss": 0.9246, - "num_input_tokens_seen": 19030760, - "step": 908 - }, - { - "epoch": 0.10930078759093369, - "flos": 15458871029880.0, - "grad_norm": 3.4986546519303845, - "learning_rate": 3.934480805552669e-06, - "loss": 1.0062, - "num_input_tokens_seen": 19051490, - "step": 909 - }, - { - "epoch": 0.10942103048157277, - "flos": 16062019536240.0, - "grad_norm": 7.194426541038176, - "learning_rate": 3.93428290739243e-06, - "loss": 1.0998, - "num_input_tokens_seen": 19070580, - "step": 910 - }, - { - "epoch": 0.10954127337221187, - "flos": 10631199392640.0, - "grad_norm": 6.729941907067977, - "learning_rate": 3.9340847158030125e-06, - "loss": 1.0223, - "num_input_tokens_seen": 19083880, - "step": 911 - }, - { - "epoch": 0.10966151626285096, - "flos": 15378398452200.0, - "grad_norm": 4.04625753162191, - "learning_rate": 3.9338862308144814e-06, - "loss": 0.9935, - "num_input_tokens_seen": 19102420, - "step": 912 - }, - { - "epoch": 0.10978175915349005, - "flos": 14279469753360.0, - "grad_norm": 3.016874538727435, - "learning_rate": 3.933687452456946e-06, - "loss": 1.0851, - "num_input_tokens_seen": 19122040, - "step": 913 - }, - { - "epoch": 0.10990200204412914, - "flos": 14488748623440.0, - "grad_norm": 5.943938288228401, - "learning_rate": 3.933488380760562e-06, - "loss": 1.077, - "num_input_tokens_seen": 19141120, - "step": 914 - }, - { - "epoch": 0.11002224493476823, - "flos": 12417091285560.0, - "grad_norm": 5.6269873605128256, - "learning_rate": 3.9332890157555286e-06, - "loss": 1.1058, - "num_input_tokens_seen": 19157775, - "step": 915 - }, - { - "epoch": 0.11014248782540732, - "flos": 8588645423280.0, - "grad_norm": 7.016412295746755, - "learning_rate": 3.933089357472088e-06, - "loss": 0.993, - "num_input_tokens_seen": 19175525, - "step": 916 - }, - { - "epoch": 0.11026273071604642, - "flos": 15904784429640.0, - "grad_norm": 13.169550390773301, - "learning_rate": 3.932889405940529e-06, - "loss": 1.087, - "num_input_tokens_seen": 19193340, - "step": 917 - }, - { - "epoch": 0.1103829736066855, - "flos": 14069332359600.0, - "grad_norm": 9.728002136807458, - "learning_rate": 3.932689161191184e-06, - "loss": 1.0341, - "num_input_tokens_seen": 19210765, - "step": 918 - }, - { - "epoch": 0.1105032164973246, - "flos": 16110077296920.0, - "grad_norm": 26.560924815427292, - "learning_rate": 3.93248862325443e-06, - "loss": 1.1054, - "num_input_tokens_seen": 19229390, - "step": 919 - }, - { - "epoch": 0.11062345938796368, - "flos": 47602477725720.0, - "grad_norm": 1.006309630718089, - "learning_rate": 3.932287792160688e-06, - "loss": 0.8851, - "num_input_tokens_seen": 19287570, - "step": 920 - }, - { - "epoch": 0.11074370227860278, - "flos": 15563142526200.0, - "grad_norm": 9.547318118935818, - "learning_rate": 3.932086667940424e-06, - "loss": 1.0293, - "num_input_tokens_seen": 19303995, - "step": 921 - }, - { - "epoch": 0.11086394516924186, - "flos": 20414551758960.0, - "grad_norm": 28.789895679938105, - "learning_rate": 3.93188525062415e-06, - "loss": 1.0476, - "num_input_tokens_seen": 19324180, - "step": 922 - }, - { - "epoch": 0.11098418805988096, - "flos": 17451527544960.0, - "grad_norm": 5.422528933794802, - "learning_rate": 3.931683540242418e-06, - "loss": 1.098, - "num_input_tokens_seen": 19344965, - "step": 923 - }, - { - "epoch": 0.11110443095052006, - "flos": 16318896243600.0, - "grad_norm": 7.319074756118052, - "learning_rate": 3.9314815368258295e-06, - "loss": 1.1312, - "num_input_tokens_seen": 19361165, - "step": 924 - }, - { - "epoch": 0.11122467384115914, - "flos": 13437141807840.0, - "grad_norm": 7.3007540796712185, - "learning_rate": 3.9312792404050275e-06, - "loss": 1.0072, - "num_input_tokens_seen": 19378940, - "step": 925 - }, - { - "epoch": 0.11134491673179824, - "flos": 17845213211880.0, - "grad_norm": 7.278753679508129, - "learning_rate": 3.9310766510107e-06, - "loss": 1.0116, - "num_input_tokens_seen": 19397835, - "step": 926 - }, - { - "epoch": 0.11146515962243732, - "flos": 17425612978680.0, - "grad_norm": 5.251726502408852, - "learning_rate": 3.9308737686735806e-06, - "loss": 1.1578, - "num_input_tokens_seen": 19417515, - "step": 927 - }, - { - "epoch": 0.11158540251307641, - "flos": 15876232969200.0, - "grad_norm": 5.765331891474042, - "learning_rate": 3.9306705934244455e-06, - "loss": 1.047, - "num_input_tokens_seen": 19437315, - "step": 928 - }, - { - "epoch": 0.11170564540371551, - "flos": 14122020015840.0, - "grad_norm": 9.866281257868893, - "learning_rate": 3.930467125294116e-06, - "loss": 1.1111, - "num_input_tokens_seen": 19456585, - "step": 929 - }, - { - "epoch": 0.1118258882943546, - "flos": 46238945606400.0, - "grad_norm": 0.9766834947017301, - "learning_rate": 3.930263364313458e-06, - "loss": 0.8664, - "num_input_tokens_seen": 19506875, - "step": 930 - }, - { - "epoch": 0.11194613118499369, - "flos": 12181284618000.0, - "grad_norm": 5.815566699818605, - "learning_rate": 3.930059310513384e-06, - "loss": 1.0661, - "num_input_tokens_seen": 19525635, - "step": 931 - }, - { - "epoch": 0.11206637407563277, - "flos": 22717447191960.0, - "grad_norm": 3.558524205421237, - "learning_rate": 3.929854963924846e-06, - "loss": 1.0622, - "num_input_tokens_seen": 19545620, - "step": 932 - }, - { - "epoch": 0.11218661696627187, - "flos": 15590528847360.0, - "grad_norm": 14.677775828492694, - "learning_rate": 3.929650324578845e-06, - "loss": 1.0013, - "num_input_tokens_seen": 19564805, - "step": 933 - }, - { - "epoch": 0.11230685985691095, - "flos": 18415732270320.0, - "grad_norm": 6.748080510498662, - "learning_rate": 3.929445392506423e-06, - "loss": 1.0467, - "num_input_tokens_seen": 19582465, - "step": 934 - }, - { - "epoch": 0.11242710274755005, - "flos": 15795177821880.0, - "grad_norm": 14.943342399064008, - "learning_rate": 3.92924016773867e-06, - "loss": 0.9911, - "num_input_tokens_seen": 19598680, - "step": 935 - }, - { - "epoch": 0.11254734563818915, - "flos": 12547890579360.0, - "grad_norm": 15.917478424196643, - "learning_rate": 3.9290346503067175e-06, - "loss": 0.9637, - "num_input_tokens_seen": 19615065, - "step": 936 - }, - { - "epoch": 0.11266758852882823, - "flos": 39296286298920.0, - "grad_norm": 4.9747508537743785, - "learning_rate": 3.9288288402417415e-06, - "loss": 1.0088, - "num_input_tokens_seen": 19641045, - "step": 937 - }, - { - "epoch": 0.11278783141946733, - "flos": 13385619290880.0, - "grad_norm": 8.679903729867378, - "learning_rate": 3.928622737574964e-06, - "loss": 0.9308, - "num_input_tokens_seen": 19656100, - "step": 938 - }, - { - "epoch": 0.11290807431010641, - "flos": 18841495477080.0, - "grad_norm": 5.99111025600509, - "learning_rate": 3.928416342337652e-06, - "loss": 1.1429, - "num_input_tokens_seen": 19675555, - "step": 939 - }, - { - "epoch": 0.1130283172007455, - "flos": 16140131173800.0, - "grad_norm": 6.723644088702418, - "learning_rate": 3.928209654561113e-06, - "loss": 1.055, - "num_input_tokens_seen": 19696110, - "step": 940 - }, - { - "epoch": 0.1131485600913846, - "flos": 16505970596160.0, - "grad_norm": 12.688656307953462, - "learning_rate": 3.928002674276703e-06, - "loss": 1.0293, - "num_input_tokens_seen": 19715220, - "step": 941 - }, - { - "epoch": 0.11326880298202369, - "flos": 9926293637880.0, - "grad_norm": 5.588005142078478, - "learning_rate": 3.92779540151582e-06, - "loss": 0.9855, - "num_input_tokens_seen": 19732025, - "step": 942 - }, - { - "epoch": 0.11338904587266278, - "flos": 11551179052320.0, - "grad_norm": 4.050508848648945, - "learning_rate": 3.927587836309907e-06, - "loss": 1.0877, - "num_input_tokens_seen": 19749575, - "step": 943 - }, - { - "epoch": 0.11350928876330187, - "flos": 17373262599600.0, - "grad_norm": 20.77821729515898, - "learning_rate": 3.927379978690452e-06, - "loss": 1.0034, - "num_input_tokens_seen": 19768560, - "step": 944 - }, - { - "epoch": 0.11362953165394096, - "flos": 17423865269760.0, - "grad_norm": 4.957726299836137, - "learning_rate": 3.927171828688987e-06, - "loss": 1.1007, - "num_input_tokens_seen": 19787805, - "step": 945 - }, - { - "epoch": 0.11374977454458005, - "flos": 17110560195840.0, - "grad_norm": 14.89810361122977, - "learning_rate": 3.926963386337088e-06, - "loss": 1.043, - "num_input_tokens_seen": 19805755, - "step": 946 - }, - { - "epoch": 0.11387001743521914, - "flos": 28175929610760.0, - "grad_norm": 18.21663175802327, - "learning_rate": 3.926754651666375e-06, - "loss": 0.93, - "num_input_tokens_seen": 19826035, - "step": 947 - }, - { - "epoch": 0.11399026032585824, - "flos": 17844783950040.0, - "grad_norm": 5.7251534387274425, - "learning_rate": 3.926545624708513e-06, - "loss": 1.0025, - "num_input_tokens_seen": 19844995, - "step": 948 - }, - { - "epoch": 0.11411050321649732, - "flos": 12726533002920.0, - "grad_norm": 5.3001115751218135, - "learning_rate": 3.926336305495213e-06, - "loss": 1.0898, - "num_input_tokens_seen": 19863275, - "step": 949 - }, - { - "epoch": 0.11423074610713642, - "flos": 15956766870000.0, - "grad_norm": 5.744265474884258, - "learning_rate": 3.926126694058226e-06, - "loss": 1.1109, - "num_input_tokens_seen": 19882145, - "step": 950 - }, - { - "epoch": 0.1143509889977755, - "flos": 13989043751280.0, - "grad_norm": 6.377793242420642, - "learning_rate": 3.92591679042935e-06, - "loss": 1.0505, - "num_input_tokens_seen": 19901755, - "step": 951 - }, - { - "epoch": 0.1144712318884146, - "flos": 14064058571280.0, - "grad_norm": 3.353257966404883, - "learning_rate": 3.92570659464043e-06, - "loss": 1.0543, - "num_input_tokens_seen": 19919535, - "step": 952 - }, - { - "epoch": 0.1145914747790537, - "flos": 10554988771800.0, - "grad_norm": 4.396974364285135, - "learning_rate": 3.925496106723349e-06, - "loss": 1.0292, - "num_input_tokens_seen": 19936695, - "step": 953 - }, - { - "epoch": 0.11471171766969278, - "flos": 14095124279640.0, - "grad_norm": 39.02222767769272, - "learning_rate": 3.9252853267100405e-06, - "loss": 1.0651, - "num_input_tokens_seen": 19955660, - "step": 954 - }, - { - "epoch": 0.11483196056033187, - "flos": 16007584171080.0, - "grad_norm": 9.849275715335581, - "learning_rate": 3.9250742546324786e-06, - "loss": 1.065, - "num_input_tokens_seen": 19975615, - "step": 955 - }, - { - "epoch": 0.11495220345097096, - "flos": 20099897576400.0, - "grad_norm": 4.226681635335736, - "learning_rate": 3.924862890522683e-06, - "loss": 1.0911, - "num_input_tokens_seen": 19995345, - "step": 956 - }, - { - "epoch": 0.11507244634161005, - "flos": 12389704964400.0, - "grad_norm": 5.040428177644163, - "learning_rate": 3.9246512344127174e-06, - "loss": 1.0911, - "num_input_tokens_seen": 20012725, - "step": 957 - }, - { - "epoch": 0.11519268923224914, - "flos": 16035491738760.0, - "grad_norm": 8.53225333214082, - "learning_rate": 3.9244392863346895e-06, - "loss": 1.0506, - "num_input_tokens_seen": 20031850, - "step": 958 - }, - { - "epoch": 0.11531293212288823, - "flos": 12023773557360.0, - "grad_norm": 4.987812064979658, - "learning_rate": 3.9242270463207524e-06, - "loss": 1.1366, - "num_input_tokens_seen": 20049960, - "step": 959 - }, - { - "epoch": 0.11543317501352733, - "flos": 8745972514560.0, - "grad_norm": 31.615885389853865, - "learning_rate": 3.924014514403102e-06, - "loss": 1.065, - "num_input_tokens_seen": 20065835, - "step": 960 - }, - { - "epoch": 0.11555341790416641, - "flos": 14063016078240.0, - "grad_norm": 6.4655025273298135, - "learning_rate": 3.92380169061398e-06, - "loss": 1.1508, - "num_input_tokens_seen": 20083335, - "step": 961 - }, - { - "epoch": 0.11567366079480551, - "flos": 18316489269840.0, - "grad_norm": 4.742611207839579, - "learning_rate": 3.9235885749856705e-06, - "loss": 1.0648, - "num_input_tokens_seen": 20101735, - "step": 962 - }, - { - "epoch": 0.1157939036854446, - "flos": 12915263079720.0, - "grad_norm": 5.029668713499301, - "learning_rate": 3.9233751675505035e-06, - "loss": 1.057, - "num_input_tokens_seen": 20120165, - "step": 963 - }, - { - "epoch": 0.11591414657608369, - "flos": 16400901899280.0, - "grad_norm": 6.115683496163298, - "learning_rate": 3.923161468340853e-06, - "loss": 1.0784, - "num_input_tokens_seen": 20139720, - "step": 964 - }, - { - "epoch": 0.11603438946672277, - "flos": 13805004893160.0, - "grad_norm": 5.691449481326029, - "learning_rate": 3.9229474773891374e-06, - "loss": 1.0407, - "num_input_tokens_seen": 20157980, - "step": 965 - }, - { - "epoch": 0.11615463235736187, - "flos": 19101898263840.0, - "grad_norm": 5.4367452514195485, - "learning_rate": 3.922733194727818e-06, - "loss": 1.0722, - "num_input_tokens_seen": 20177495, - "step": 966 - }, - { - "epoch": 0.11627487524800097, - "flos": 13170790678440.0, - "grad_norm": 4.150527571270072, - "learning_rate": 3.922518620389402e-06, - "loss": 1.0756, - "num_input_tokens_seen": 20194080, - "step": 967 - }, - { - "epoch": 0.11639511813864005, - "flos": 12862943362200.0, - "grad_norm": 3.490952924250904, - "learning_rate": 3.922303754406439e-06, - "loss": 1.1348, - "num_input_tokens_seen": 20211640, - "step": 968 - }, - { - "epoch": 0.11651536102927915, - "flos": 14854618707360.0, - "grad_norm": 4.746747559776087, - "learning_rate": 3.922088596811526e-06, - "loss": 1.0193, - "num_input_tokens_seen": 20230490, - "step": 969 - }, - { - "epoch": 0.11663560391991823, - "flos": 11682959516040.0, - "grad_norm": 6.052080146758072, - "learning_rate": 3.9218731476373e-06, - "loss": 1.0867, - "num_input_tokens_seen": 20246395, - "step": 970 - }, - { - "epoch": 0.11675584681055733, - "flos": 14095124279640.0, - "grad_norm": 4.953239945001533, - "learning_rate": 3.9216574069164455e-06, - "loss": 1.0824, - "num_input_tokens_seen": 20265090, - "step": 971 - }, - { - "epoch": 0.11687608970119642, - "flos": 15589455692760.0, - "grad_norm": 3.4376822683389108, - "learning_rate": 3.921441374681691e-06, - "loss": 1.0348, - "num_input_tokens_seen": 20284870, - "step": 972 - }, - { - "epoch": 0.1169963325918355, - "flos": 17110836149880.0, - "grad_norm": 5.198487010338226, - "learning_rate": 3.921225050965808e-06, - "loss": 0.8735, - "num_input_tokens_seen": 20304475, - "step": 973 - }, - { - "epoch": 0.1171165754824746, - "flos": 16612817663520.0, - "grad_norm": 3.566621638099953, - "learning_rate": 3.921008435801612e-06, - "loss": 0.9622, - "num_input_tokens_seen": 20323280, - "step": 974 - }, - { - "epoch": 0.11723681837311369, - "flos": 13151744301600.0, - "grad_norm": 5.8797445415616005, - "learning_rate": 3.920791529221963e-06, - "loss": 0.9799, - "num_input_tokens_seen": 20341675, - "step": 975 - }, - { - "epoch": 0.11735706126375278, - "flos": 16743892911360.0, - "grad_norm": 4.043719029492225, - "learning_rate": 3.920574331259768e-06, - "loss": 0.9933, - "num_input_tokens_seen": 20362595, - "step": 976 - }, - { - "epoch": 0.11747730415439187, - "flos": 15903557967240.0, - "grad_norm": 6.006616758646577, - "learning_rate": 3.9203568419479716e-06, - "loss": 1.0127, - "num_input_tokens_seen": 20382870, - "step": 977 - }, - { - "epoch": 0.11759754704503096, - "flos": 15773831828040.0, - "grad_norm": 27.942971054411093, - "learning_rate": 3.92013906131957e-06, - "loss": 0.9697, - "num_input_tokens_seen": 20401520, - "step": 978 - }, - { - "epoch": 0.11771778993567006, - "flos": 15852127434960.0, - "grad_norm": 10.232382916811215, - "learning_rate": 3.9199209894076e-06, - "loss": 1.0484, - "num_input_tokens_seen": 20421555, - "step": 979 - }, - { - "epoch": 0.11783803282630914, - "flos": 15117934342320.0, - "grad_norm": 4.435682327439707, - "learning_rate": 3.919702626245142e-06, - "loss": 1.1259, - "num_input_tokens_seen": 20440930, - "step": 980 - }, - { - "epoch": 0.11795827571694824, - "flos": 18052161803400.0, - "grad_norm": 4.646015979044741, - "learning_rate": 3.919483971865322e-06, - "loss": 0.8828, - "num_input_tokens_seen": 20460645, - "step": 981 - }, - { - "epoch": 0.11807851860758732, - "flos": 16795231458960.0, - "grad_norm": 6.850318299854845, - "learning_rate": 3.91926502630131e-06, - "loss": 1.0988, - "num_input_tokens_seen": 20480980, - "step": 982 - }, - { - "epoch": 0.11819876149822642, - "flos": 17765691142560.0, - "grad_norm": 11.724971005011456, - "learning_rate": 3.91904578958632e-06, - "loss": 0.9507, - "num_input_tokens_seen": 20500115, - "step": 983 - }, - { - "epoch": 0.11831900438886551, - "flos": 16350544521600.0, - "grad_norm": 5.627022273742077, - "learning_rate": 3.918826261753608e-06, - "loss": 1.0774, - "num_input_tokens_seen": 20519415, - "step": 984 - }, - { - "epoch": 0.1184392472795046, - "flos": 19916563934160.0, - "grad_norm": 3.720363234967475, - "learning_rate": 3.918606442836478e-06, - "loss": 0.9252, - "num_input_tokens_seen": 20541355, - "step": 985 - }, - { - "epoch": 0.1185594901701437, - "flos": 14118769890480.0, - "grad_norm": 5.60336500293164, - "learning_rate": 3.918386332868277e-06, - "loss": 1.0142, - "num_input_tokens_seen": 20559045, - "step": 986 - }, - { - "epoch": 0.11867973306078278, - "flos": 13409908794480.0, - "grad_norm": 9.492976352894866, - "learning_rate": 3.918165931882394e-06, - "loss": 1.1778, - "num_input_tokens_seen": 20577165, - "step": 987 - }, - { - "epoch": 0.11879997595142187, - "flos": 12023742895800.0, - "grad_norm": 5.813165113890833, - "learning_rate": 3.917945239912264e-06, - "loss": 0.9799, - "num_input_tokens_seen": 20594360, - "step": 988 - }, - { - "epoch": 0.11892021884206096, - "flos": 12417305916480.0, - "grad_norm": 4.793126732446265, - "learning_rate": 3.917724256991367e-06, - "loss": 0.993, - "num_input_tokens_seen": 20612825, - "step": 989 - }, - { - "epoch": 0.11904046173270005, - "flos": 22065811663080.0, - "grad_norm": 3.570635958320392, - "learning_rate": 3.9175029831532245e-06, - "loss": 1.0385, - "num_input_tokens_seen": 20632060, - "step": 990 - }, - { - "epoch": 0.11916070462333915, - "flos": 14305169688720.0, - "grad_norm": 3.6108979154073526, - "learning_rate": 3.917281418431404e-06, - "loss": 1.1044, - "num_input_tokens_seen": 20650825, - "step": 991 - }, - { - "epoch": 0.11928094751397823, - "flos": 16744383496320.0, - "grad_norm": 2.9055204398677454, - "learning_rate": 3.917059562859516e-06, - "loss": 0.9927, - "num_input_tokens_seen": 20669870, - "step": 992 - }, - { - "epoch": 0.11940119040461733, - "flos": 17000156387520.0, - "grad_norm": 5.031104044248413, - "learning_rate": 3.916837416471218e-06, - "loss": 1.1189, - "num_input_tokens_seen": 20686210, - "step": 993 - }, - { - "epoch": 0.11952143329525641, - "flos": 9768261330720.0, - "grad_norm": 5.4098467025614365, - "learning_rate": 3.916614979300207e-06, - "loss": 0.9431, - "num_input_tokens_seen": 20700775, - "step": 994 - }, - { - "epoch": 0.11964167618589551, - "flos": 19233954681600.0, - "grad_norm": 5.419461727370045, - "learning_rate": 3.9163922513802274e-06, - "loss": 1.0116, - "num_input_tokens_seen": 20722830, - "step": 995 - }, - { - "epoch": 0.1197619190765346, - "flos": 8850581288040.0, - "grad_norm": 4.389177728690558, - "learning_rate": 3.916169232745067e-06, - "loss": 1.0537, - "num_input_tokens_seen": 20740225, - "step": 996 - }, - { - "epoch": 0.11988216196717369, - "flos": 11970963254880.0, - "grad_norm": 9.256617472976597, - "learning_rate": 3.915945923428559e-06, - "loss": 1.1312, - "num_input_tokens_seen": 20756470, - "step": 997 - }, - { - "epoch": 0.12000240485781279, - "flos": 11473067414760.0, - "grad_norm": 4.920171146052271, - "learning_rate": 3.915722323464577e-06, - "loss": 1.0609, - "num_input_tokens_seen": 20774795, - "step": 998 - }, - { - "epoch": 0.12012264774845187, - "flos": 35281655269320.0, - "grad_norm": 16.45761490553556, - "learning_rate": 3.91549843288704e-06, - "loss": 0.9303, - "num_input_tokens_seen": 20798195, - "step": 999 - }, - { - "epoch": 0.12024289063909097, - "flos": 19208070776880.0, - "grad_norm": 7.14429791769747, - "learning_rate": 3.915274251729916e-06, - "loss": 1.0309, - "num_input_tokens_seen": 20819205, - "step": 1000 - }, - { - "epoch": 0.12036313352973005, - "flos": 13859685550800.0, - "grad_norm": 5.573011089725299, - "learning_rate": 3.91504978002721e-06, - "loss": 1.1362, - "num_input_tokens_seen": 20837980, - "step": 1001 - }, - { - "epoch": 0.12048337642036915, - "flos": 12228023931600.0, - "grad_norm": 3.4044009448288923, - "learning_rate": 3.914825017812974e-06, - "loss": 0.9981, - "num_input_tokens_seen": 20854350, - "step": 1002 - }, - { - "epoch": 0.12060361931100824, - "flos": 16245383840040.0, - "grad_norm": 8.509767642227784, - "learning_rate": 3.9145999651213065e-06, - "loss": 0.9598, - "num_input_tokens_seen": 20873310, - "step": 1003 - }, - { - "epoch": 0.12072386220164733, - "flos": 11839673376120.0, - "grad_norm": 7.152908811222881, - "learning_rate": 3.9143746219863465e-06, - "loss": 1.1035, - "num_input_tokens_seen": 20890135, - "step": 1004 - }, - { - "epoch": 0.12084410509228642, - "flos": 39454159750200.0, - "grad_norm": 1.0379253273971014, - "learning_rate": 3.914148988442278e-06, - "loss": 0.9491, - "num_input_tokens_seen": 20945645, - "step": 1005 - }, - { - "epoch": 0.1209643479829255, - "flos": 19050498393120.0, - "grad_norm": 6.630116052776981, - "learning_rate": 3.91392306452333e-06, - "loss": 1.1771, - "num_input_tokens_seen": 20962440, - "step": 1006 - }, - { - "epoch": 0.1210845908735646, - "flos": 7749107679720.0, - "grad_norm": 7.944152207175508, - "learning_rate": 3.913696850263774e-06, - "loss": 0.8844, - "num_input_tokens_seen": 20976525, - "step": 1007 - }, - { - "epoch": 0.1212048337642037, - "flos": 14540332463520.0, - "grad_norm": 3.8021518788190107, - "learning_rate": 3.913470345697929e-06, - "loss": 1.0183, - "num_input_tokens_seen": 20994875, - "step": 1008 - }, - { - "epoch": 0.12132507665484278, - "flos": 15878195309040.0, - "grad_norm": 5.740607842350687, - "learning_rate": 3.913243550860153e-06, - "loss": 1.0884, - "num_input_tokens_seen": 21012360, - "step": 1009 - }, - { - "epoch": 0.12144531954548188, - "flos": 20886625017480.0, - "grad_norm": 10.205585551080825, - "learning_rate": 3.913016465784852e-06, - "loss": 0.9976, - "num_input_tokens_seen": 21032755, - "step": 1010 - }, - { - "epoch": 0.12156556243612096, - "flos": 14540853710040.0, - "grad_norm": 5.3569453212104445, - "learning_rate": 3.912789090506474e-06, - "loss": 0.9489, - "num_input_tokens_seen": 21051735, - "step": 1011 - }, - { - "epoch": 0.12168580532676006, - "flos": 11656646349480.0, - "grad_norm": 4.401797594990113, - "learning_rate": 3.9125614250595114e-06, - "loss": 0.9524, - "num_input_tokens_seen": 21067665, - "step": 1012 - }, - { - "epoch": 0.12180604821739914, - "flos": 10844065665240.0, - "grad_norm": 3.719480426764008, - "learning_rate": 3.912333469478502e-06, - "loss": 1.1196, - "num_input_tokens_seen": 21085350, - "step": 1013 - }, - { - "epoch": 0.12192629110803824, - "flos": 13702051843920.0, - "grad_norm": 3.68356294103301, - "learning_rate": 3.912105223798025e-06, - "loss": 1.0083, - "num_input_tokens_seen": 21104490, - "step": 1014 - }, - { - "epoch": 0.12204653399867733, - "flos": 34120742913480.0, - "grad_norm": 1.032296025908216, - "learning_rate": 3.9118766880527065e-06, - "loss": 0.9493, - "num_input_tokens_seen": 21158645, - "step": 1015 - }, - { - "epoch": 0.12216677688931642, - "flos": 12914097940440.0, - "grad_norm": 4.121153685509066, - "learning_rate": 3.9116478622772145e-06, - "loss": 0.9569, - "num_input_tokens_seen": 21176940, - "step": 1016 - }, - { - "epoch": 0.12228701977995551, - "flos": 19600468658280.0, - "grad_norm": 3.632045082333442, - "learning_rate": 3.911418746506261e-06, - "loss": 1.1091, - "num_input_tokens_seen": 21196790, - "step": 1017 - }, - { - "epoch": 0.1224072626705946, - "flos": 15484325672760.0, - "grad_norm": 5.872978024829222, - "learning_rate": 3.911189340774604e-06, - "loss": 1.0141, - "num_input_tokens_seen": 21216640, - "step": 1018 - }, - { - "epoch": 0.1225275055612337, - "flos": 14697843524160.0, - "grad_norm": 6.4508729565738205, - "learning_rate": 3.910959645117043e-06, - "loss": 1.0265, - "num_input_tokens_seen": 21235695, - "step": 1019 - }, - { - "epoch": 0.12264774845187278, - "flos": 41324970147000.0, - "grad_norm": 0.8018707307388223, - "learning_rate": 3.910729659568423e-06, - "loss": 0.8252, - "num_input_tokens_seen": 21292600, - "step": 1020 - }, - { - "epoch": 0.12276799134251187, - "flos": 18789053113320.0, - "grad_norm": 4.727999281236107, - "learning_rate": 3.9104993841636344e-06, - "loss": 1.0556, - "num_input_tokens_seen": 21312890, - "step": 1021 - }, - { - "epoch": 0.12288823423315097, - "flos": 14956283971080.0, - "grad_norm": 3.4625949722651024, - "learning_rate": 3.910268818937608e-06, - "loss": 1.0393, - "num_input_tokens_seen": 21330765, - "step": 1022 - }, - { - "epoch": 0.12300847712379005, - "flos": 8667216984240.0, - "grad_norm": 7.970396582334376, - "learning_rate": 3.9100379639253196e-06, - "loss": 1.1112, - "num_input_tokens_seen": 21347205, - "step": 1023 - }, - { - "epoch": 0.12312872001442915, - "flos": 11865005372760.0, - "grad_norm": 11.036467358506123, - "learning_rate": 3.909806819161791e-06, - "loss": 1.0826, - "num_input_tokens_seen": 21362400, - "step": 1024 - }, - { - "epoch": 0.12324896290506823, - "flos": 13045694434800.0, - "grad_norm": 7.398381814072377, - "learning_rate": 3.909575384682086e-06, - "loss": 1.0859, - "num_input_tokens_seen": 21381000, - "step": 1025 - }, - { - "epoch": 0.12336920579570733, - "flos": 13412515027080.0, - "grad_norm": 4.277753084009631, - "learning_rate": 3.9093436605213144e-06, - "loss": 0.9116, - "num_input_tokens_seen": 21401220, - "step": 1026 - }, - { - "epoch": 0.12348944868634643, - "flos": 16979423624880.0, - "grad_norm": 3.5834578706849785, - "learning_rate": 3.909111646714627e-06, - "loss": 1.0135, - "num_input_tokens_seen": 21421785, - "step": 1027 - }, - { - "epoch": 0.12360969157698551, - "flos": 13492221065760.0, - "grad_norm": 6.58991263579141, - "learning_rate": 3.9088793432972206e-06, - "loss": 0.9426, - "num_input_tokens_seen": 21440325, - "step": 1028 - }, - { - "epoch": 0.1237299344676246, - "flos": 9322777192800.0, - "grad_norm": 169.58288672136112, - "learning_rate": 3.908646750304336e-06, - "loss": 1.0479, - "num_input_tokens_seen": 21457730, - "step": 1029 - }, - { - "epoch": 0.12385017735826369, - "flos": 14541221648760.0, - "grad_norm": 3.064578849396752, - "learning_rate": 3.908413867771257e-06, - "loss": 1.091, - "num_input_tokens_seen": 21476360, - "step": 1030 - }, - { - "epoch": 0.12397042024890279, - "flos": 12286108022400.0, - "grad_norm": 6.483041502097601, - "learning_rate": 3.908180695733311e-06, - "loss": 1.0325, - "num_input_tokens_seen": 21495570, - "step": 1031 - }, - { - "epoch": 0.12409066313954187, - "flos": 14488472669400.0, - "grad_norm": 6.274051013937349, - "learning_rate": 3.907947234225871e-06, - "loss": 1.0534, - "num_input_tokens_seen": 21514300, - "step": 1032 - }, - { - "epoch": 0.12421090603018096, - "flos": 14721029211600.0, - "grad_norm": 4.959830092934762, - "learning_rate": 3.907713483284352e-06, - "loss": 1.0969, - "num_input_tokens_seen": 21533495, - "step": 1033 - }, - { - "epoch": 0.12433114892082006, - "flos": 17425367686200.0, - "grad_norm": 9.696528995498692, - "learning_rate": 3.907479442944216e-06, - "loss": 1.2069, - "num_input_tokens_seen": 21551620, - "step": 1034 - }, - { - "epoch": 0.12445139181145914, - "flos": 13963773077760.0, - "grad_norm": 5.5717898171375255, - "learning_rate": 3.907245113240963e-06, - "loss": 1.1532, - "num_input_tokens_seen": 21569460, - "step": 1035 - }, - { - "epoch": 0.12457163470209824, - "flos": 33181415809440.0, - "grad_norm": 7.254857770707387, - "learning_rate": 3.907010494210144e-06, - "loss": 0.9767, - "num_input_tokens_seen": 21591840, - "step": 1036 - }, - { - "epoch": 0.12469187759273732, - "flos": 14462619426240.0, - "grad_norm": 5.583175125537607, - "learning_rate": 3.9067755858873495e-06, - "loss": 1.1562, - "num_input_tokens_seen": 21608360, - "step": 1037 - }, - { - "epoch": 0.12481212048337642, - "flos": 49573113692640.0, - "grad_norm": 0.8576984350525761, - "learning_rate": 3.906540388308214e-06, - "loss": 0.8928, - "num_input_tokens_seen": 21667665, - "step": 1038 - }, - { - "epoch": 0.12493236337401552, - "flos": 12915600356880.0, - "grad_norm": 11.569338534082704, - "learning_rate": 3.906304901508417e-06, - "loss": 1.0477, - "num_input_tokens_seen": 21686285, - "step": 1039 - }, - { - "epoch": 0.12505260626465461, - "flos": 21410803362600.0, - "grad_norm": 6.656880093691572, - "learning_rate": 3.9060691255236835e-06, - "loss": 0.9909, - "num_input_tokens_seen": 21706570, - "step": 1040 - }, - { - "epoch": 0.1251728491552937, - "flos": 17379854835000.0, - "grad_norm": 220.01840299449952, - "learning_rate": 3.905833060389778e-06, - "loss": 1.0474, - "num_input_tokens_seen": 21730410, - "step": 1041 - }, - { - "epoch": 0.12529309204593278, - "flos": 19308202962600.0, - "grad_norm": 16.099526799989924, - "learning_rate": 3.905596706142513e-06, - "loss": 0.9938, - "num_input_tokens_seen": 21751540, - "step": 1042 - }, - { - "epoch": 0.12541333493657186, - "flos": 21935288323320.0, - "grad_norm": 8.99523884615712, - "learning_rate": 3.9053600628177435e-06, - "loss": 1.0736, - "num_input_tokens_seen": 21770870, - "step": 1043 - }, - { - "epoch": 0.12553357782721097, - "flos": 16822188518280.0, - "grad_norm": 15.582937034966873, - "learning_rate": 3.905123130451367e-06, - "loss": 1.0772, - "num_input_tokens_seen": 21791690, - "step": 1044 - }, - { - "epoch": 0.12565382071785006, - "flos": 17687640828120.0, - "grad_norm": 11.9789364646133, - "learning_rate": 3.904885909079326e-06, - "loss": 1.0223, - "num_input_tokens_seen": 21810195, - "step": 1045 - }, - { - "epoch": 0.12577406360848914, - "flos": 15170070090480.0, - "grad_norm": 13.937887006783603, - "learning_rate": 3.904648398737607e-06, - "loss": 1.0085, - "num_input_tokens_seen": 21828480, - "step": 1046 - }, - { - "epoch": 0.12589430649912825, - "flos": 25790292644640.0, - "grad_norm": 9.570104828741714, - "learning_rate": 3.9044105994622406e-06, - "loss": 1.0052, - "num_input_tokens_seen": 21849345, - "step": 1047 - }, - { - "epoch": 0.12601454938976733, - "flos": 18028178915400.0, - "grad_norm": 42.347700749734486, - "learning_rate": 3.9041725112893005e-06, - "loss": 1.0427, - "num_input_tokens_seen": 21870290, - "step": 1048 - }, - { - "epoch": 0.12613479228040642, - "flos": 11001300771840.0, - "grad_norm": 9.061408036314551, - "learning_rate": 3.903934134254904e-06, - "loss": 0.9816, - "num_input_tokens_seen": 21887800, - "step": 1049 - }, - { - "epoch": 0.1262550351710455, - "flos": 15249040251720.0, - "grad_norm": 13.727192221077624, - "learning_rate": 3.903695468395213e-06, - "loss": 1.079, - "num_input_tokens_seen": 21905390, - "step": 1050 - }, - { - "epoch": 0.1263752780616846, - "flos": 22512062340000.0, - "grad_norm": 10.446958721714331, - "learning_rate": 3.903456513746434e-06, - "loss": 0.7971, - "num_input_tokens_seen": 21926085, - "step": 1051 - }, - { - "epoch": 0.1264955209523237, - "flos": 20493767212680.0, - "grad_norm": 4.599373003480636, - "learning_rate": 3.903217270344815e-06, - "loss": 1.0993, - "num_input_tokens_seen": 21946055, - "step": 1052 - }, - { - "epoch": 0.12661576384296278, - "flos": 20833446776280.0, - "grad_norm": 9.764477276305792, - "learning_rate": 3.902977738226648e-06, - "loss": 1.0466, - "num_input_tokens_seen": 21966510, - "step": 1053 - }, - { - "epoch": 0.12673600673360189, - "flos": 14803280159760.0, - "grad_norm": 5.415591950048821, - "learning_rate": 3.902737917428273e-06, - "loss": 1.1463, - "num_input_tokens_seen": 21984395, - "step": 1054 - }, - { - "epoch": 0.12685624962424097, - "flos": 17974878027960.0, - "grad_norm": 3.7209141949235924, - "learning_rate": 3.902497807986068e-06, - "loss": 1.0606, - "num_input_tokens_seen": 22004135, - "step": 1055 - }, - { - "epoch": 0.12697649251488005, - "flos": 19601725782240.0, - "grad_norm": 7.446788469420948, - "learning_rate": 3.902257409936458e-06, - "loss": 1.061, - "num_input_tokens_seen": 22024620, - "step": 1056 - }, - { - "epoch": 0.12709673540551916, - "flos": 15091866468240.0, - "grad_norm": 8.41922708475011, - "learning_rate": 3.902016723315912e-06, - "loss": 1.061, - "num_input_tokens_seen": 22042280, - "step": 1057 - }, - { - "epoch": 0.12721697829615825, - "flos": 18028209576960.0, - "grad_norm": 5.810455290769834, - "learning_rate": 3.901775748160941e-06, - "loss": 0.9134, - "num_input_tokens_seen": 22061180, - "step": 1058 - }, - { - "epoch": 0.12733722118679733, - "flos": 44342276418360.0, - "grad_norm": 0.8234950861619217, - "learning_rate": 3.901534484508101e-06, - "loss": 0.8676, - "num_input_tokens_seen": 22123575, - "step": 1059 - }, - { - "epoch": 0.1274574640774364, - "flos": 19205771159880.0, - "grad_norm": 4.724710703879293, - "learning_rate": 3.901292932393991e-06, - "loss": 0.9664, - "num_input_tokens_seen": 22142175, - "step": 1060 - }, - { - "epoch": 0.12757770696807552, - "flos": 15799286470920.0, - "grad_norm": 13.235835151335193, - "learning_rate": 3.9010510918552555e-06, - "loss": 1.0859, - "num_input_tokens_seen": 22160970, - "step": 1061 - }, - { - "epoch": 0.1276979498587146, - "flos": 20335244320560.0, - "grad_norm": 9.241398291536433, - "learning_rate": 3.900808962928581e-06, - "loss": 0.9748, - "num_input_tokens_seen": 22178305, - "step": 1062 - }, - { - "epoch": 0.1278181927493537, - "flos": 12338151785880.0, - "grad_norm": 10.906247901369785, - "learning_rate": 3.900566545650698e-06, - "loss": 1.1156, - "num_input_tokens_seen": 22195695, - "step": 1063 - }, - { - "epoch": 0.1279384356399928, - "flos": 15009983458800.0, - "grad_norm": 8.432078336232378, - "learning_rate": 3.900323840058381e-06, - "loss": 1.0393, - "num_input_tokens_seen": 22213125, - "step": 1064 - }, - { - "epoch": 0.12805867853063188, - "flos": 18918472636920.0, - "grad_norm": 8.484885297010088, - "learning_rate": 3.900080846188449e-06, - "loss": 1.0407, - "num_input_tokens_seen": 22231435, - "step": 1065 - }, - { - "epoch": 0.12817892142127096, - "flos": 11631007737240.0, - "grad_norm": 3.4493663924728963, - "learning_rate": 3.8998375640777625e-06, - "loss": 1.0335, - "num_input_tokens_seen": 22249025, - "step": 1066 - }, - { - "epoch": 0.12829916431191005, - "flos": 43487223925440.0, - "grad_norm": 1.6117273564370083, - "learning_rate": 3.899593993763229e-06, - "loss": 0.7825, - "num_input_tokens_seen": 22309705, - "step": 1067 - }, - { - "epoch": 0.12841940720254916, - "flos": 21225476718960.0, - "grad_norm": 8.398501998298844, - "learning_rate": 3.899350135281796e-06, - "loss": 1.023, - "num_input_tokens_seen": 22330425, - "step": 1068 - }, - { - "epoch": 0.12853965009318824, - "flos": 18468757203720.0, - "grad_norm": 15.349757368425445, - "learning_rate": 3.8991059886704585e-06, - "loss": 1.0259, - "num_input_tokens_seen": 22349650, - "step": 1069 - }, - { - "epoch": 0.12865989298382732, - "flos": 21987516056160.0, - "grad_norm": 3.8926187944752004, - "learning_rate": 3.898861553966252e-06, - "loss": 1.0433, - "num_input_tokens_seen": 22369020, - "step": 1070 - }, - { - "epoch": 0.12878013587446643, - "flos": 18422201859480.0, - "grad_norm": 4.245105617903201, - "learning_rate": 3.898616831206257e-06, - "loss": 1.1079, - "num_input_tokens_seen": 22389165, - "step": 1071 - }, - { - "epoch": 0.12890037876510552, - "flos": 16587853605600.0, - "grad_norm": 15.865884882369024, - "learning_rate": 3.8983718204276e-06, - "loss": 0.9908, - "num_input_tokens_seen": 22411105, - "step": 1072 - }, - { - "epoch": 0.1290206216557446, - "flos": 16770298062600.0, - "grad_norm": 5.4083286765401315, - "learning_rate": 3.898126521667446e-06, - "loss": 1.0692, - "num_input_tokens_seen": 22430980, - "step": 1073 - }, - { - "epoch": 0.12914086454638368, - "flos": 17189499695520.0, - "grad_norm": 8.224947257293138, - "learning_rate": 3.897880934963007e-06, - "loss": 1.073, - "num_input_tokens_seen": 22450250, - "step": 1074 - }, - { - "epoch": 0.1292611074370228, - "flos": 14384231834640.0, - "grad_norm": 9.165221314200833, - "learning_rate": 3.89763506035154e-06, - "loss": 1.0078, - "num_input_tokens_seen": 22467820, - "step": 1075 - }, - { - "epoch": 0.12938135032766188, - "flos": 19493866883400.0, - "grad_norm": 3.6668695624886447, - "learning_rate": 3.897388897870343e-06, - "loss": 1.0499, - "num_input_tokens_seen": 22488180, - "step": 1076 - }, - { - "epoch": 0.12950159321830096, - "flos": 20859238696320.0, - "grad_norm": 33.52423039291879, - "learning_rate": 3.89714244755676e-06, - "loss": 0.9639, - "num_input_tokens_seen": 22509260, - "step": 1077 - }, - { - "epoch": 0.12962183610894007, - "flos": 17451220929360.0, - "grad_norm": 9.425856930254612, - "learning_rate": 3.896895709448175e-06, - "loss": 1.0896, - "num_input_tokens_seen": 22528730, - "step": 1078 - }, - { - "epoch": 0.12974207899957915, - "flos": 7878925803600.0, - "grad_norm": 7.294164117270582, - "learning_rate": 3.896648683582019e-06, - "loss": 0.9973, - "num_input_tokens_seen": 22543785, - "step": 1079 - }, - { - "epoch": 0.12986232189021824, - "flos": 17582817423720.0, - "grad_norm": 10.72628901633793, - "learning_rate": 3.896401369995766e-06, - "loss": 1.0276, - "num_input_tokens_seen": 22563310, - "step": 1080 - }, - { - "epoch": 0.12998256478085732, - "flos": 17006074068600.0, - "grad_norm": 3.6685378473083508, - "learning_rate": 3.896153768726932e-06, - "loss": 1.011, - "num_input_tokens_seen": 22583340, - "step": 1081 - }, - { - "epoch": 0.13010280767149643, - "flos": 13203328141680.0, - "grad_norm": 14.052558334764283, - "learning_rate": 3.8959058798130806e-06, - "loss": 1.1013, - "num_input_tokens_seen": 22601035, - "step": 1082 - }, - { - "epoch": 0.1302230505621355, - "flos": 16192941476280.0, - "grad_norm": 3.6045705021463856, - "learning_rate": 3.895657703291814e-06, - "loss": 0.9698, - "num_input_tokens_seen": 22620860, - "step": 1083 - }, - { - "epoch": 0.1303432934527746, - "flos": 15143266338960.0, - "grad_norm": 6.033102274873353, - "learning_rate": 3.895409239200781e-06, - "loss": 1.0269, - "num_input_tokens_seen": 22636465, - "step": 1084 - }, - { - "epoch": 0.1304635363434137, - "flos": 14855017307640.0, - "grad_norm": 10.070619328384256, - "learning_rate": 3.895160487577673e-06, - "loss": 1.148, - "num_input_tokens_seen": 22653755, - "step": 1085 - }, - { - "epoch": 0.1305837792340528, - "flos": 45276764544000.0, - "grad_norm": 0.8422921973777254, - "learning_rate": 3.894911448460226e-06, - "loss": 0.8633, - "num_input_tokens_seen": 22712790, - "step": 1086 - }, - { - "epoch": 0.13070402212469187, - "flos": 18811932185160.0, - "grad_norm": 7.918634779045944, - "learning_rate": 3.8946621218862195e-06, - "loss": 0.9562, - "num_input_tokens_seen": 22733510, - "step": 1087 - }, - { - "epoch": 0.13082426501533098, - "flos": 19706917125360.0, - "grad_norm": 5.437542443822314, - "learning_rate": 3.894412507893475e-06, - "loss": 1.1096, - "num_input_tokens_seen": 22753510, - "step": 1088 - }, - { - "epoch": 0.13094450790597006, - "flos": 17660990384400.0, - "grad_norm": 15.716847238214635, - "learning_rate": 3.894162606519859e-06, - "loss": 0.9417, - "num_input_tokens_seen": 22772180, - "step": 1089 - }, - { - "epoch": 0.13106475079660915, - "flos": 13518472909200.0, - "grad_norm": 7.214826301921241, - "learning_rate": 3.893912417803282e-06, - "loss": 1.0004, - "num_input_tokens_seen": 22791615, - "step": 1090 - }, - { - "epoch": 0.13118499368724823, - "flos": 20597824078080.0, - "grad_norm": 4.531112142636232, - "learning_rate": 3.8936619417816975e-06, - "loss": 0.9884, - "num_input_tokens_seen": 22811665, - "step": 1091 - }, - { - "epoch": 0.13130523657788734, - "flos": 10083804698520.0, - "grad_norm": 3.5299445245722514, - "learning_rate": 3.8934111784931015e-06, - "loss": 0.9491, - "num_input_tokens_seen": 22828835, - "step": 1092 - }, - { - "epoch": 0.13142547946852642, - "flos": 50255630960520.0, - "grad_norm": 0.9867100948579353, - "learning_rate": 3.893160127975535e-06, - "loss": 0.8733, - "num_input_tokens_seen": 22889245, - "step": 1093 - }, - { - "epoch": 0.1315457223591655, - "flos": 32738353934760.0, - "grad_norm": 7.116525797227705, - "learning_rate": 3.8929087902670826e-06, - "loss": 1.0435, - "num_input_tokens_seen": 22910595, - "step": 1094 - }, - { - "epoch": 0.13166596524980462, - "flos": 45014859340800.0, - "grad_norm": 0.8881566828446179, - "learning_rate": 3.8926571654058715e-06, - "loss": 0.8554, - "num_input_tokens_seen": 22966960, - "step": 1095 - }, - { - "epoch": 0.1317862081404437, - "flos": 16769684831400.0, - "grad_norm": 19.996928932914845, - "learning_rate": 3.892405253430074e-06, - "loss": 0.9825, - "num_input_tokens_seen": 22984200, - "step": 1096 - }, - { - "epoch": 0.13190645103108278, - "flos": 14378774076960.0, - "grad_norm": 3.8802933992965047, - "learning_rate": 3.892153054377904e-06, - "loss": 1.0539, - "num_input_tokens_seen": 23001325, - "step": 1097 - }, - { - "epoch": 0.13202669392172187, - "flos": 38239093531320.0, - "grad_norm": 0.9743032949513095, - "learning_rate": 3.891900568287619e-06, - "loss": 0.8676, - "num_input_tokens_seen": 23053430, - "step": 1098 - }, - { - "epoch": 0.13214693681236098, - "flos": 11210487657240.0, - "grad_norm": 7.776295014989866, - "learning_rate": 3.891647795197523e-06, - "loss": 0.9521, - "num_input_tokens_seen": 23069190, - "step": 1099 - }, - { - "epoch": 0.13226717970300006, - "flos": 13727690456160.0, - "grad_norm": 3.735993230221351, - "learning_rate": 3.8913947351459605e-06, - "loss": 0.9077, - "num_input_tokens_seen": 23086450, - "step": 1100 - }, - { - "epoch": 0.13238742259363914, - "flos": 14692998997680.0, - "grad_norm": 4.069412219744884, - "learning_rate": 3.89114138817132e-06, - "loss": 0.9053, - "num_input_tokens_seen": 23102835, - "step": 1101 - }, - { - "epoch": 0.13250766548427825, - "flos": 14934110115120.0, - "grad_norm": 3.9369494340869577, - "learning_rate": 3.890887754312035e-06, - "loss": 1.0752, - "num_input_tokens_seen": 23120800, - "step": 1102 - }, - { - "epoch": 0.13262790837491734, - "flos": 16087903440960.0, - "grad_norm": 5.559876069533325, - "learning_rate": 3.890633833606581e-06, - "loss": 1.1021, - "num_input_tokens_seen": 23140210, - "step": 1103 - }, - { - "epoch": 0.13274815126555642, - "flos": 13964478293640.0, - "grad_norm": 3.966289448366094, - "learning_rate": 3.890379626093477e-06, - "loss": 0.9197, - "num_input_tokens_seen": 23159680, - "step": 1104 - }, - { - "epoch": 0.1328683941561955, - "flos": 15138207181560.0, - "grad_norm": 10.234043634306369, - "learning_rate": 3.890125131811287e-06, - "loss": 1.1341, - "num_input_tokens_seen": 23177450, - "step": 1105 - }, - { - "epoch": 0.1329886370468346, - "flos": 9663836526600.0, - "grad_norm": 6.218079190520179, - "learning_rate": 3.889870350798618e-06, - "loss": 0.9842, - "num_input_tokens_seen": 23194515, - "step": 1106 - }, - { - "epoch": 0.1331088799374737, - "flos": 14934079453560.0, - "grad_norm": 3.32312897549044, - "learning_rate": 3.889615283094119e-06, - "loss": 1.0176, - "num_input_tokens_seen": 23213425, - "step": 1107 - }, - { - "epoch": 0.13322912282811278, - "flos": 12942036169680.0, - "grad_norm": 9.322602188233637, - "learning_rate": 3.889359928736485e-06, - "loss": 1.0739, - "num_input_tokens_seen": 23231090, - "step": 1108 - }, - { - "epoch": 0.1333493657187519, - "flos": 17397889380360.0, - "grad_norm": 7.1514564684658, - "learning_rate": 3.889104287764451e-06, - "loss": 1.1275, - "num_input_tokens_seen": 23251185, - "step": 1109 - }, - { - "epoch": 0.13346960860939097, - "flos": 15743072735280.0, - "grad_norm": 3.633891283876983, - "learning_rate": 3.888848360216798e-06, - "loss": 1.1313, - "num_input_tokens_seen": 23268550, - "step": 1110 - }, - { - "epoch": 0.13358985150003005, - "flos": 48646260295440.0, - "grad_norm": 0.799747376490419, - "learning_rate": 3.888592146132351e-06, - "loss": 0.8121, - "num_input_tokens_seen": 23329540, - "step": 1111 - }, - { - "epoch": 0.13371009439066917, - "flos": 19103860603680.0, - "grad_norm": 8.171129940341979, - "learning_rate": 3.888335645549978e-06, - "loss": 0.9999, - "num_input_tokens_seen": 23349680, - "step": 1112 - }, - { - "epoch": 0.13383033728130825, - "flos": 18736212149280.0, - "grad_norm": 17.240298874249177, - "learning_rate": 3.888078858508588e-06, - "loss": 1.0493, - "num_input_tokens_seen": 23369260, - "step": 1113 - }, - { - "epoch": 0.13395058017194733, - "flos": 16034633215080.0, - "grad_norm": 4.273778926174639, - "learning_rate": 3.8878217850471365e-06, - "loss": 1.0749, - "num_input_tokens_seen": 23388895, - "step": 1114 - }, - { - "epoch": 0.13407082306258641, - "flos": 18368165094600.0, - "grad_norm": 3.4036047226928985, - "learning_rate": 3.887564425204621e-06, - "loss": 0.9631, - "num_input_tokens_seen": 23410300, - "step": 1115 - }, - { - "epoch": 0.13419106595322552, - "flos": 48934729505760.0, - "grad_norm": 0.8063654025974226, - "learning_rate": 3.887306779020083e-06, - "loss": 0.799, - "num_input_tokens_seen": 23464675, - "step": 1116 - }, - { - "epoch": 0.1343113088438646, - "flos": 14514908482200.0, - "grad_norm": 5.170637368734168, - "learning_rate": 3.887048846532608e-06, - "loss": 0.928, - "num_input_tokens_seen": 23481370, - "step": 1117 - }, - { - "epoch": 0.1344315517345037, - "flos": 48253009438440.0, - "grad_norm": 0.7483034416025787, - "learning_rate": 3.8867906277813224e-06, - "loss": 0.8271, - "num_input_tokens_seen": 23539245, - "step": 1118 - }, - { - "epoch": 0.1345517946251428, - "flos": 29093517668760.0, - "grad_norm": 4.609676441745451, - "learning_rate": 3.886532122805399e-06, - "loss": 0.9709, - "num_input_tokens_seen": 23561445, - "step": 1119 - }, - { - "epoch": 0.13467203751578188, - "flos": 16216065840600.0, - "grad_norm": 6.953468708587019, - "learning_rate": 3.886273331644053e-06, - "loss": 1.1215, - "num_input_tokens_seen": 23580035, - "step": 1120 - }, - { - "epoch": 0.13479228040642097, - "flos": 12101823871800.0, - "grad_norm": 4.565427703041013, - "learning_rate": 3.886014254336542e-06, - "loss": 1.0563, - "num_input_tokens_seen": 23596230, - "step": 1121 - }, - { - "epoch": 0.13491252329706005, - "flos": 16872913834680.0, - "grad_norm": 3.4381359552438764, - "learning_rate": 3.885754890922168e-06, - "loss": 1.1485, - "num_input_tokens_seen": 23616280, - "step": 1122 - }, - { - "epoch": 0.13503276618769916, - "flos": 24344448254040.0, - "grad_norm": 4.810113488521964, - "learning_rate": 3.885495241440277e-06, - "loss": 1.0176, - "num_input_tokens_seen": 23640095, - "step": 1123 - }, - { - "epoch": 0.13515300907833824, - "flos": 12548013225600.0, - "grad_norm": 8.02006874568709, - "learning_rate": 3.885235305930257e-06, - "loss": 0.974, - "num_input_tokens_seen": 23658015, - "step": 1124 - }, - { - "epoch": 0.13527325196897733, - "flos": 14378866061640.0, - "grad_norm": 5.438596417611105, - "learning_rate": 3.884975084431539e-06, - "loss": 1.0971, - "num_input_tokens_seen": 23672685, - "step": 1125 - }, - { - "epoch": 0.13539349485961644, - "flos": 12888765943800.0, - "grad_norm": 6.105744905382911, - "learning_rate": 3.8847145769836e-06, - "loss": 1.1498, - "num_input_tokens_seen": 23688825, - "step": 1126 - }, - { - "epoch": 0.13551373775025552, - "flos": 13701561258960.0, - "grad_norm": 8.826686829758094, - "learning_rate": 3.884453783625959e-06, - "loss": 0.9076, - "num_input_tokens_seen": 23706155, - "step": 1127 - }, - { - "epoch": 0.1356339806408946, - "flos": 14803678760040.0, - "grad_norm": 4.337897660113611, - "learning_rate": 3.884192704398176e-06, - "loss": 1.0831, - "num_input_tokens_seen": 23723075, - "step": 1128 - }, - { - "epoch": 0.13575422353153369, - "flos": 36094603892280.0, - "grad_norm": 4.9809512633914865, - "learning_rate": 3.883931339339858e-06, - "loss": 0.966, - "num_input_tokens_seen": 23747180, - "step": 1129 - }, - { - "epoch": 0.1358744664221728, - "flos": 12863219316240.0, - "grad_norm": 8.616097984018916, - "learning_rate": 3.883669688490654e-06, - "loss": 1.0147, - "num_input_tokens_seen": 23764670, - "step": 1130 - }, - { - "epoch": 0.13599470931281188, - "flos": 13073234063760.0, - "grad_norm": 7.031708426895906, - "learning_rate": 3.883407751890256e-06, - "loss": 1.0827, - "num_input_tokens_seen": 23782995, - "step": 1131 - }, - { - "epoch": 0.13611495220345096, - "flos": 18992506287000.0, - "grad_norm": 37.95299152148204, - "learning_rate": 3.8831455295783994e-06, - "loss": 1.0762, - "num_input_tokens_seen": 23801965, - "step": 1132 - }, - { - "epoch": 0.13623519509409007, - "flos": 15403577141040.0, - "grad_norm": 7.698443555200455, - "learning_rate": 3.882883021594864e-06, - "loss": 0.9754, - "num_input_tokens_seen": 23819825, - "step": 1133 - }, - { - "epoch": 0.13635543798472916, - "flos": 10476877134240.0, - "grad_norm": 6.583634703615058, - "learning_rate": 3.8826202279794705e-06, - "loss": 1.0973, - "num_input_tokens_seen": 23836605, - "step": 1134 - }, - { - "epoch": 0.13647568087536824, - "flos": 16268937466200.0, - "grad_norm": 7.039348092673371, - "learning_rate": 3.882357148772085e-06, - "loss": 0.9269, - "num_input_tokens_seen": 23853750, - "step": 1135 - }, - { - "epoch": 0.13659592376600732, - "flos": 14147505320280.0, - "grad_norm": 4.854510555719877, - "learning_rate": 3.882093784012617e-06, - "loss": 1.0766, - "num_input_tokens_seen": 23872110, - "step": 1136 - }, - { - "epoch": 0.13671616665664643, - "flos": 15218097189600.0, - "grad_norm": 3.889716352501488, - "learning_rate": 3.881830133741019e-06, - "loss": 1.0613, - "num_input_tokens_seen": 23890695, - "step": 1137 - }, - { - "epoch": 0.13683640954728551, - "flos": 16187943642000.0, - "grad_norm": 7.424849578669533, - "learning_rate": 3.881566197997285e-06, - "loss": 0.9933, - "num_input_tokens_seen": 23906850, - "step": 1138 - }, - { - "epoch": 0.1369566524379246, - "flos": 15432496540200.0, - "grad_norm": 7.17368428464902, - "learning_rate": 3.881301976821456e-06, - "loss": 0.9798, - "num_input_tokens_seen": 23926600, - "step": 1139 - }, - { - "epoch": 0.1370768953285637, - "flos": 13203542772600.0, - "grad_norm": 4.384776064853941, - "learning_rate": 3.881037470253612e-06, - "loss": 1.1266, - "num_input_tokens_seen": 23945835, - "step": 1140 - }, - { - "epoch": 0.1371971382192028, - "flos": 10555264725840.0, - "grad_norm": 8.17754988260662, - "learning_rate": 3.88077267833388e-06, - "loss": 1.0273, - "num_input_tokens_seen": 23962070, - "step": 1141 - }, - { - "epoch": 0.13731738110984187, - "flos": 13490258725920.0, - "grad_norm": 2.768473047738093, - "learning_rate": 3.880507601102427e-06, - "loss": 1.064, - "num_input_tokens_seen": 23979725, - "step": 1142 - }, - { - "epoch": 0.13743762400048098, - "flos": 12889593805920.0, - "grad_norm": 3.0184212009974454, - "learning_rate": 3.880242238599467e-06, - "loss": 1.052, - "num_input_tokens_seen": 23995970, - "step": 1143 - }, - { - "epoch": 0.13755786689112007, - "flos": 14933711514840.0, - "grad_norm": 3.7215042813640475, - "learning_rate": 3.879976590865254e-06, - "loss": 1.0672, - "num_input_tokens_seen": 24015145, - "step": 1144 - }, - { - "epoch": 0.13767810978175915, - "flos": 15169947444240.0, - "grad_norm": 5.639663572084671, - "learning_rate": 3.879710657940087e-06, - "loss": 1.1049, - "num_input_tokens_seen": 24033815, - "step": 1145 - }, - { - "epoch": 0.13779835267239823, - "flos": 21804489029520.0, - "grad_norm": 10.115742536774674, - "learning_rate": 3.879444439864308e-06, - "loss": 0.9302, - "num_input_tokens_seen": 24053110, - "step": 1146 - }, - { - "epoch": 0.13791859556303734, - "flos": 16110291927840.0, - "grad_norm": 2.9776638126232213, - "learning_rate": 3.879177936678301e-06, - "loss": 1.0858, - "num_input_tokens_seen": 24071835, - "step": 1147 - }, - { - "epoch": 0.13803883845367643, - "flos": 25133260681200.0, - "grad_norm": 4.05970104338306, - "learning_rate": 3.878911148422496e-06, - "loss": 1.0071, - "num_input_tokens_seen": 24093030, - "step": 1148 - }, - { - "epoch": 0.1381590813443155, - "flos": 22826716522560.0, - "grad_norm": 5.926283350514548, - "learning_rate": 3.878644075137364e-06, - "loss": 0.9314, - "num_input_tokens_seen": 24113400, - "step": 1149 - }, - { - "epoch": 0.13827932423495462, - "flos": 12626738094360.0, - "grad_norm": 5.453638440638592, - "learning_rate": 3.878376716863418e-06, - "loss": 1.0235, - "num_input_tokens_seen": 24129420, - "step": 1150 - }, - { - "epoch": 0.1383995671255937, - "flos": 13780378112400.0, - "grad_norm": 4.171789504185983, - "learning_rate": 3.878109073641219e-06, - "loss": 0.9272, - "num_input_tokens_seen": 24148170, - "step": 1151 - }, - { - "epoch": 0.13851981001623279, - "flos": 20155191465240.0, - "grad_norm": 18.05221275228902, - "learning_rate": 3.877841145511366e-06, - "loss": 1.0391, - "num_input_tokens_seen": 24170630, - "step": 1152 - }, - { - "epoch": 0.13864005290687187, - "flos": 15063682946520.0, - "grad_norm": 2.610728112892927, - "learning_rate": 3.8775729325145035e-06, - "loss": 1.0578, - "num_input_tokens_seen": 24189585, - "step": 1153 - }, - { - "epoch": 0.13876029579751098, - "flos": 51321403416840.0, - "grad_norm": 0.7910033425213473, - "learning_rate": 3.877304434691321e-06, - "loss": 0.8778, - "num_input_tokens_seen": 24256155, - "step": 1154 - }, - { - "epoch": 0.13888053868815006, - "flos": 15586880121720.0, - "grad_norm": 3.8896978414904804, - "learning_rate": 3.877035652082548e-06, - "loss": 1.0246, - "num_input_tokens_seen": 24275320, - "step": 1155 - }, - { - "epoch": 0.13900078157878915, - "flos": 13910717482800.0, - "grad_norm": 4.966276884891952, - "learning_rate": 3.87676658472896e-06, - "loss": 1.0706, - "num_input_tokens_seen": 24293850, - "step": 1156 - }, - { - "epoch": 0.13912102446942826, - "flos": 16088056748760.0, - "grad_norm": 4.138315845277922, - "learning_rate": 3.876497232671372e-06, - "loss": 1.0761, - "num_input_tokens_seen": 24313525, - "step": 1157 - }, - { - "epoch": 0.13924126736006734, - "flos": 21122615654400.0, - "grad_norm": 5.161597051466154, - "learning_rate": 3.876227595950647e-06, - "loss": 1.0528, - "num_input_tokens_seen": 24332675, - "step": 1158 - }, - { - "epoch": 0.13936151025070642, - "flos": 19523062236600.0, - "grad_norm": 2.9853113113699643, - "learning_rate": 3.875957674607686e-06, - "loss": 1.0149, - "num_input_tokens_seen": 24354670, - "step": 1159 - }, - { - "epoch": 0.1394817531413455, - "flos": 11604449278200.0, - "grad_norm": 3.484686456966988, - "learning_rate": 3.8756874686834386e-06, - "loss": 1.1111, - "num_input_tokens_seen": 24372605, - "step": 1160 - }, - { - "epoch": 0.13960199603198462, - "flos": 22042074067560.0, - "grad_norm": 3.2369274443189417, - "learning_rate": 3.875416978218893e-06, - "loss": 1.0316, - "num_input_tokens_seen": 24395520, - "step": 1161 - }, - { - "epoch": 0.1397222389226237, - "flos": 12836538210960.0, - "grad_norm": 12.786387262192163, - "learning_rate": 3.8751462032550835e-06, - "loss": 1.0609, - "num_input_tokens_seen": 24412245, - "step": 1162 - }, - { - "epoch": 0.13984248181326278, - "flos": 11944588765200.0, - "grad_norm": 3.586924830461308, - "learning_rate": 3.874875143833085e-06, - "loss": 1.0447, - "num_input_tokens_seen": 24430205, - "step": 1163 - }, - { - "epoch": 0.1399627247039019, - "flos": 38714023863000.0, - "grad_norm": 4.097505666695121, - "learning_rate": 3.874603799994019e-06, - "loss": 0.9162, - "num_input_tokens_seen": 24453460, - "step": 1164 - }, - { - "epoch": 0.14008296759454097, - "flos": 8274205871640.0, - "grad_norm": 5.040001691484925, - "learning_rate": 3.874332171779046e-06, - "loss": 1.1083, - "num_input_tokens_seen": 24468060, - "step": 1165 - }, - { - "epoch": 0.14020321048518006, - "flos": 15642051364320.0, - "grad_norm": 4.167193926193724, - "learning_rate": 3.874060259229373e-06, - "loss": 0.9917, - "num_input_tokens_seen": 24489355, - "step": 1166 - }, - { - "epoch": 0.14032345337581917, - "flos": 16638762891360.0, - "grad_norm": 4.199988119925999, - "learning_rate": 3.873788062386249e-06, - "loss": 1.1465, - "num_input_tokens_seen": 24507335, - "step": 1167 - }, - { - "epoch": 0.14044369626645825, - "flos": 21124700640480.0, - "grad_norm": 12.555884597952842, - "learning_rate": 3.873515581290965e-06, - "loss": 1.0593, - "num_input_tokens_seen": 24531860, - "step": 1168 - }, - { - "epoch": 0.14056393915709733, - "flos": 12994110594720.0, - "grad_norm": 4.563271862216953, - "learning_rate": 3.8732428159848575e-06, - "loss": 0.9876, - "num_input_tokens_seen": 24550555, - "step": 1169 - }, - { - "epoch": 0.14068418204773642, - "flos": 18998270660280.0, - "grad_norm": 2.8396674393988173, - "learning_rate": 3.872969766509304e-06, - "loss": 1.0215, - "num_input_tokens_seen": 24570830, - "step": 1170 - }, - { - "epoch": 0.14080442493837553, - "flos": 46724877890040.0, - "grad_norm": 0.8493676554268994, - "learning_rate": 3.872696432905726e-06, - "loss": 0.8175, - "num_input_tokens_seen": 24631370, - "step": 1171 - }, - { - "epoch": 0.1409246678290146, - "flos": 18343875591000.0, - "grad_norm": 4.910703545100368, - "learning_rate": 3.872422815215589e-06, - "loss": 0.93, - "num_input_tokens_seen": 24650170, - "step": 1172 - }, - { - "epoch": 0.1410449107196537, - "flos": 15534560404200.0, - "grad_norm": 4.192168409124207, - "learning_rate": 3.8721489134803994e-06, - "loss": 0.9711, - "num_input_tokens_seen": 24668680, - "step": 1173 - }, - { - "epoch": 0.1411651536102928, - "flos": 11808178405920.0, - "grad_norm": 5.517151705037787, - "learning_rate": 3.871874727741707e-06, - "loss": 0.951, - "num_input_tokens_seen": 24685630, - "step": 1174 - }, - { - "epoch": 0.1412853965009319, - "flos": 14904730792560.0, - "grad_norm": 2.874395389837087, - "learning_rate": 3.871600258041108e-06, - "loss": 1.1958, - "num_input_tokens_seen": 24704875, - "step": 1175 - }, - { - "epoch": 0.14140563939157097, - "flos": 14432534887800.0, - "grad_norm": 4.821287381158558, - "learning_rate": 3.871325504420238e-06, - "loss": 1.0797, - "num_input_tokens_seen": 24723585, - "step": 1176 - }, - { - "epoch": 0.14152588228221005, - "flos": 14960239312320.0, - "grad_norm": 4.21068746358782, - "learning_rate": 3.871050466920776e-06, - "loss": 1.051, - "num_input_tokens_seen": 24743210, - "step": 1177 - }, - { - "epoch": 0.14164612517284916, - "flos": 12915140433480.0, - "grad_norm": 4.262234748350779, - "learning_rate": 3.870775145584447e-06, - "loss": 1.032, - "num_input_tokens_seen": 24760710, - "step": 1178 - }, - { - "epoch": 0.14176636806348825, - "flos": 16164114061800.0, - "grad_norm": 19.74040510602556, - "learning_rate": 3.8704995404530145e-06, - "loss": 0.8647, - "num_input_tokens_seen": 24776055, - "step": 1179 - }, - { - "epoch": 0.14188661095412733, - "flos": 15695014974600.0, - "grad_norm": 6.688512137284024, - "learning_rate": 3.87022365156829e-06, - "loss": 1.0759, - "num_input_tokens_seen": 24796490, - "step": 1180 - }, - { - "epoch": 0.14200685384476644, - "flos": 17320053696840.0, - "grad_norm": 9.326909942980722, - "learning_rate": 3.869947478972123e-06, - "loss": 1.0386, - "num_input_tokens_seen": 24817235, - "step": 1181 - }, - { - "epoch": 0.14212709673540552, - "flos": 17082376674120.0, - "grad_norm": 4.7880571564056975, - "learning_rate": 3.869671022706412e-06, - "loss": 1.0446, - "num_input_tokens_seen": 24835685, - "step": 1182 - }, - { - "epoch": 0.1422473396260446, - "flos": 18814446433080.0, - "grad_norm": 15.36552571483202, - "learning_rate": 3.869394282813092e-06, - "loss": 0.8857, - "num_input_tokens_seen": 24854605, - "step": 1183 - }, - { - "epoch": 0.1423675825166837, - "flos": 12075786659280.0, - "grad_norm": 5.944632332150839, - "learning_rate": 3.869117259334147e-06, - "loss": 1.1106, - "num_input_tokens_seen": 24872250, - "step": 1184 - }, - { - "epoch": 0.1424878254073228, - "flos": 12703837900440.0, - "grad_norm": 5.880908997528106, - "learning_rate": 3.868839952311599e-06, - "loss": 1.0519, - "num_input_tokens_seen": 24889925, - "step": 1185 - }, - { - "epoch": 0.14260806829796188, - "flos": 14410422354960.0, - "grad_norm": 6.839966167790135, - "learning_rate": 3.868562361787516e-06, - "loss": 1.0301, - "num_input_tokens_seen": 24908775, - "step": 1186 - }, - { - "epoch": 0.14272831118860096, - "flos": 16481711754120.0, - "grad_norm": 4.347472427745777, - "learning_rate": 3.868284487804009e-06, - "loss": 0.9246, - "num_input_tokens_seen": 24927725, - "step": 1187 - }, - { - "epoch": 0.14284855407924008, - "flos": 19389380756160.0, - "grad_norm": 5.8197688111206425, - "learning_rate": 3.86800633040323e-06, - "loss": 1.0223, - "num_input_tokens_seen": 24948035, - "step": 1188 - }, - { - "epoch": 0.14296879696987916, - "flos": 20074013671680.0, - "grad_norm": 4.049282462523911, - "learning_rate": 3.867727889627376e-06, - "loss": 1.0138, - "num_input_tokens_seen": 24967370, - "step": 1189 - }, - { - "epoch": 0.14308903986051824, - "flos": 13753880976480.0, - "grad_norm": 3.0462592652077825, - "learning_rate": 3.867449165518687e-06, - "loss": 1.0109, - "num_input_tokens_seen": 24983560, - "step": 1190 - }, - { - "epoch": 0.14320928275115732, - "flos": 12364955537400.0, - "grad_norm": 4.6881511998839835, - "learning_rate": 3.867170158119444e-06, - "loss": 0.9253, - "num_input_tokens_seen": 25002280, - "step": 1191 - }, - { - "epoch": 0.14332952564179643, - "flos": 15245176895160.0, - "grad_norm": 5.177154888474732, - "learning_rate": 3.866890867471972e-06, - "loss": 0.9836, - "num_input_tokens_seen": 25020470, - "step": 1192 - }, - { - "epoch": 0.14344976853243552, - "flos": 11602456276800.0, - "grad_norm": 6.915542633765885, - "learning_rate": 3.86661129361864e-06, - "loss": 1.1179, - "num_input_tokens_seen": 25034680, - "step": 1193 - }, - { - "epoch": 0.1435700114230746, - "flos": 13413649504800.0, - "grad_norm": 3.1745606125900028, - "learning_rate": 3.866331436601859e-06, - "loss": 1.0798, - "num_input_tokens_seen": 25052395, - "step": 1194 - }, - { - "epoch": 0.1436902543137137, - "flos": 14016307426200.0, - "grad_norm": 4.2509443692426165, - "learning_rate": 3.866051296464083e-06, - "loss": 0.9741, - "num_input_tokens_seen": 25070950, - "step": 1195 - }, - { - "epoch": 0.1438104972043528, - "flos": 10372299022320.0, - "grad_norm": 5.491506954913583, - "learning_rate": 3.86577087324781e-06, - "loss": 1.0773, - "num_input_tokens_seen": 25087160, - "step": 1196 - }, - { - "epoch": 0.14393074009499188, - "flos": 12102467764560.0, - "grad_norm": 5.06942294506699, - "learning_rate": 3.865490166995578e-06, - "loss": 0.9951, - "num_input_tokens_seen": 25105110, - "step": 1197 - }, - { - "epoch": 0.144050982985631, - "flos": 21720337064640.0, - "grad_norm": 6.037510286247384, - "learning_rate": 3.86520917774997e-06, - "loss": 1.058, - "num_input_tokens_seen": 25124265, - "step": 1198 - }, - { - "epoch": 0.14417122587627007, - "flos": 12653081922480.0, - "grad_norm": 4.278735682479971, - "learning_rate": 3.864927905553614e-06, - "loss": 0.9788, - "num_input_tokens_seen": 25141895, - "step": 1199 - }, - { - "epoch": 0.14429146876690915, - "flos": 15351686685360.0, - "grad_norm": 3.7960298327528856, - "learning_rate": 3.8646463504491765e-06, - "loss": 1.1126, - "num_input_tokens_seen": 25161750, - "step": 1200 - }, - { - "epoch": 0.14441171165754824, - "flos": 16533663532920.0, - "grad_norm": 7.262767730347741, - "learning_rate": 3.8643645124793705e-06, - "loss": 1.0664, - "num_input_tokens_seen": 25180370, - "step": 1201 - }, - { - "epoch": 0.14453195454818735, - "flos": 30617443035360.0, - "grad_norm": 4.2796917606484355, - "learning_rate": 3.8640823916869515e-06, - "loss": 0.9718, - "num_input_tokens_seen": 25204400, - "step": 1202 - }, - { - "epoch": 0.14465219743882643, - "flos": 19392508235280.0, - "grad_norm": 2.752634866928835, - "learning_rate": 3.863799988114714e-06, - "loss": 1.0113, - "num_input_tokens_seen": 25226150, - "step": 1203 - }, - { - "epoch": 0.1447724403294655, - "flos": 11813789471400.0, - "grad_norm": 4.944604711518791, - "learning_rate": 3.863517301805502e-06, - "loss": 0.9303, - "num_input_tokens_seen": 25244260, - "step": 1204 - }, - { - "epoch": 0.14489268322010462, - "flos": 14243743487880.0, - "grad_norm": 6.8267010703312705, - "learning_rate": 3.863234332802196e-06, - "loss": 1.1937, - "num_input_tokens_seen": 25256185, - "step": 1205 - }, - { - "epoch": 0.1450129261107437, - "flos": 19313170135320.0, - "grad_norm": 5.0851883343803665, - "learning_rate": 3.862951081147723e-06, - "loss": 0.9464, - "num_input_tokens_seen": 25276070, - "step": 1206 - }, - { - "epoch": 0.1451331690013828, - "flos": 18290237426400.0, - "grad_norm": 5.490507256814601, - "learning_rate": 3.862667546885053e-06, - "loss": 1.0106, - "num_input_tokens_seen": 25294340, - "step": 1207 - }, - { - "epoch": 0.14525341189202187, - "flos": 18315232145880.0, - "grad_norm": 8.355751806799669, - "learning_rate": 3.8623837300571965e-06, - "loss": 0.9595, - "num_input_tokens_seen": 25313045, - "step": 1208 - }, - { - "epoch": 0.14537365478266098, - "flos": 16401024545520.0, - "grad_norm": 5.254018426427923, - "learning_rate": 3.8620996307072085e-06, - "loss": 1.0741, - "num_input_tokens_seen": 25333470, - "step": 1209 - }, - { - "epoch": 0.14549389767330007, - "flos": 14619793209720.0, - "grad_norm": 4.023775016183109, - "learning_rate": 3.861815248878188e-06, - "loss": 0.8744, - "num_input_tokens_seen": 25350675, - "step": 1210 - }, - { - "epoch": 0.14561414056393915, - "flos": 10686247989000.0, - "grad_norm": 4.1806023567990565, - "learning_rate": 3.861530584613274e-06, - "loss": 1.029, - "num_input_tokens_seen": 25368395, - "step": 1211 - }, - { - "epoch": 0.14573438345457826, - "flos": 13780715389560.0, - "grad_norm": 6.030993836650378, - "learning_rate": 3.86124563795565e-06, - "loss": 1.0373, - "num_input_tokens_seen": 25386930, - "step": 1212 - }, - { - "epoch": 0.14585462634521734, - "flos": 17662400816160.0, - "grad_norm": 2.84263136104729, - "learning_rate": 3.860960408948543e-06, - "loss": 0.9401, - "num_input_tokens_seen": 25408400, - "step": 1213 - }, - { - "epoch": 0.14597486923585642, - "flos": 10920920178840.0, - "grad_norm": 5.02191813081879, - "learning_rate": 3.860674897635222e-06, - "loss": 1.1301, - "num_input_tokens_seen": 25424605, - "step": 1214 - }, - { - "epoch": 0.1460951121264955, - "flos": 11788365490080.0, - "grad_norm": 8.087275327129458, - "learning_rate": 3.860389104058998e-06, - "loss": 1.0677, - "num_input_tokens_seen": 25442555, - "step": 1215 - }, - { - "epoch": 0.14621535501713462, - "flos": 17687242227840.0, - "grad_norm": 3.7888996521867426, - "learning_rate": 3.860103028263227e-06, - "loss": 0.9454, - "num_input_tokens_seen": 25465380, - "step": 1216 - }, - { - "epoch": 0.1463355979077737, - "flos": 17949331400400.0, - "grad_norm": 13.304401352068197, - "learning_rate": 3.859816670291304e-06, - "loss": 0.9201, - "num_input_tokens_seen": 25484195, - "step": 1217 - }, - { - "epoch": 0.14645584079841278, - "flos": 15668395192440.0, - "grad_norm": 12.02946225049702, - "learning_rate": 3.859530030186672e-06, - "loss": 1.1279, - "num_input_tokens_seen": 25500925, - "step": 1218 - }, - { - "epoch": 0.1465760836890519, - "flos": 16796641890720.0, - "grad_norm": 8.969350548817797, - "learning_rate": 3.859243107992813e-06, - "loss": 1.0527, - "num_input_tokens_seen": 25519450, - "step": 1219 - }, - { - "epoch": 0.14669632657969098, - "flos": 26701932360000.0, - "grad_norm": 6.111579048659812, - "learning_rate": 3.858955903753252e-06, - "loss": 1.013, - "num_input_tokens_seen": 25537810, - "step": 1220 - }, - { - "epoch": 0.14681656947033006, - "flos": 20204506349880.0, - "grad_norm": 2.983852800149328, - "learning_rate": 3.858668417511559e-06, - "loss": 1.0607, - "num_input_tokens_seen": 25560280, - "step": 1221 - }, - { - "epoch": 0.14693681236096917, - "flos": 13099485907200.0, - "grad_norm": 5.732641472582585, - "learning_rate": 3.8583806493113445e-06, - "loss": 0.9856, - "num_input_tokens_seen": 25578345, - "step": 1222 - }, - { - "epoch": 0.14705705525160825, - "flos": 14751205734720.0, - "grad_norm": 5.550033143090616, - "learning_rate": 3.858092599196263e-06, - "loss": 1.0584, - "num_input_tokens_seen": 25596020, - "step": 1223 - }, - { - "epoch": 0.14717729814224734, - "flos": 21330453431160.0, - "grad_norm": 15.774747751579946, - "learning_rate": 3.857804267210012e-06, - "loss": 1.0547, - "num_input_tokens_seen": 25615040, - "step": 1224 - }, - { - "epoch": 0.14729754103288642, - "flos": 14226260850600.0, - "grad_norm": 5.1096764338524405, - "learning_rate": 3.857515653396331e-06, - "loss": 1.115, - "num_input_tokens_seen": 25631970, - "step": 1225 - }, - { - "epoch": 0.14741778392352553, - "flos": 13675800000480.0, - "grad_norm": 5.657256484713937, - "learning_rate": 3.857226757799002e-06, - "loss": 1.0988, - "num_input_tokens_seen": 25649245, - "step": 1226 - }, - { - "epoch": 0.1475380268141646, - "flos": 18081019879440.0, - "grad_norm": 10.731238027575952, - "learning_rate": 3.85693758046185e-06, - "loss": 0.9604, - "num_input_tokens_seen": 25667255, - "step": 1227 - }, - { - "epoch": 0.1476582697048037, - "flos": 14800980542760.0, - "grad_norm": 6.97839645766571, - "learning_rate": 3.8566481214287435e-06, - "loss": 1.0596, - "num_input_tokens_seen": 25685095, - "step": 1228 - }, - { - "epoch": 0.1477785125954428, - "flos": 9900348410040.0, - "grad_norm": 3.9763773517187375, - "learning_rate": 3.8563583807435935e-06, - "loss": 1.1367, - "num_input_tokens_seen": 25700960, - "step": 1229 - }, - { - "epoch": 0.1478987554860819, - "flos": 14562782273520.0, - "grad_norm": 3.858140137835995, - "learning_rate": 3.856068358450353e-06, - "loss": 1.0043, - "num_input_tokens_seen": 25720630, - "step": 1230 - }, - { - "epoch": 0.14801899837672097, - "flos": 12651824798520.0, - "grad_norm": 4.344256035398769, - "learning_rate": 3.8557780545930186e-06, - "loss": 1.0913, - "num_input_tokens_seen": 25738765, - "step": 1231 - }, - { - "epoch": 0.14813924126736006, - "flos": 14824871446080.0, - "grad_norm": 5.52422667049487, - "learning_rate": 3.855487469215628e-06, - "loss": 1.0132, - "num_input_tokens_seen": 25757415, - "step": 1232 - }, - { - "epoch": 0.14825948415799917, - "flos": 26707206148320.0, - "grad_norm": 8.2913305575493, - "learning_rate": 3.855196602362264e-06, - "loss": 0.9517, - "num_input_tokens_seen": 25780055, - "step": 1233 - }, - { - "epoch": 0.14837972704863825, - "flos": 15639598439520.0, - "grad_norm": 4.130869071353868, - "learning_rate": 3.854905454077051e-06, - "loss": 1.1736, - "num_input_tokens_seen": 25797385, - "step": 1234 - }, - { - "epoch": 0.14849996993927733, - "flos": 14908195548840.0, - "grad_norm": 11.328655698775837, - "learning_rate": 3.854614024404155e-06, - "loss": 1.0976, - "num_input_tokens_seen": 25815415, - "step": 1235 - }, - { - "epoch": 0.14862021282991644, - "flos": 14226690112440.0, - "grad_norm": 8.135999540645921, - "learning_rate": 3.8543223133877865e-06, - "loss": 1.1268, - "num_input_tokens_seen": 25833730, - "step": 1236 - }, - { - "epoch": 0.14874045572055553, - "flos": 16141357636200.0, - "grad_norm": 4.565117699065736, - "learning_rate": 3.854030321072198e-06, - "loss": 1.1018, - "num_input_tokens_seen": 25853355, - "step": 1237 - }, - { - "epoch": 0.1488606986111946, - "flos": 18081173187240.0, - "grad_norm": 8.395675872260123, - "learning_rate": 3.853738047501682e-06, - "loss": 0.9641, - "num_input_tokens_seen": 25873635, - "step": 1238 - }, - { - "epoch": 0.1489809415018337, - "flos": 12049504154280.0, - "grad_norm": 6.952226833668285, - "learning_rate": 3.85344549272058e-06, - "loss": 1.0106, - "num_input_tokens_seen": 25891335, - "step": 1239 - }, - { - "epoch": 0.1491011843924728, - "flos": 24009245278200.0, - "grad_norm": 3.3305115358994444, - "learning_rate": 3.853152656773269e-06, - "loss": 1.0564, - "num_input_tokens_seen": 25912490, - "step": 1240 - }, - { - "epoch": 0.14922142728311188, - "flos": 15039516089160.0, - "grad_norm": 4.744647172164657, - "learning_rate": 3.852859539704174e-06, - "loss": 1.0665, - "num_input_tokens_seen": 25931510, - "step": 1241 - }, - { - "epoch": 0.14934167017375097, - "flos": 21278501652360.0, - "grad_norm": 8.920234275444148, - "learning_rate": 3.85256614155776e-06, - "loss": 0.9866, - "num_input_tokens_seen": 25951360, - "step": 1242 - }, - { - "epoch": 0.14946191306439008, - "flos": 12049902754560.0, - "grad_norm": 6.090277518728269, - "learning_rate": 3.852272462378535e-06, - "loss": 0.9788, - "num_input_tokens_seen": 25968955, - "step": 1243 - }, - { - "epoch": 0.14958215595502916, - "flos": 11079535055640.0, - "grad_norm": 8.347219367369723, - "learning_rate": 3.85197850221105e-06, - "loss": 1.0118, - "num_input_tokens_seen": 25984975, - "step": 1244 - }, - { - "epoch": 0.14970239884566824, - "flos": 23612892055560.0, - "grad_norm": 4.215483580663985, - "learning_rate": 3.851684261099899e-06, - "loss": 0.9943, - "num_input_tokens_seen": 26006435, - "step": 1245 - }, - { - "epoch": 0.14982264173630733, - "flos": 12626216847840.0, - "grad_norm": 4.4814518030501596, - "learning_rate": 3.851389739089718e-06, - "loss": 1.0955, - "num_input_tokens_seen": 26022775, - "step": 1246 - }, - { - "epoch": 0.14994288462694644, - "flos": 23109753088680.0, - "grad_norm": 4.858259557531352, - "learning_rate": 3.851094936225186e-06, - "loss": 1.038, - "num_input_tokens_seen": 26043380, - "step": 1247 - }, - { - "epoch": 0.15006312751758552, - "flos": 22668561569160.0, - "grad_norm": 2.9392337958040162, - "learning_rate": 3.850799852551024e-06, - "loss": 1.0024, - "num_input_tokens_seen": 26065520, - "step": 1248 - }, - { - "epoch": 0.1501833704082246, - "flos": 11761592400120.0, - "grad_norm": 5.24292059830649, - "learning_rate": 3.850504488111995e-06, - "loss": 1.0836, - "num_input_tokens_seen": 26081915, - "step": 1249 - }, - { - "epoch": 0.15030361329886371, - "flos": 16685992789920.0, - "grad_norm": 3.402360708060853, - "learning_rate": 3.850208842952907e-06, - "loss": 1.0561, - "num_input_tokens_seen": 26100440, - "step": 1250 - }, - { - "epoch": 0.1504238561895028, - "flos": 18237825724200.0, - "grad_norm": 3.8259772055984325, - "learning_rate": 3.849912917118608e-06, - "loss": 1.0247, - "num_input_tokens_seen": 26121200, - "step": 1251 - }, - { - "epoch": 0.15054409908014188, - "flos": 37265010235560.0, - "grad_norm": 1.0181697363066962, - "learning_rate": 3.849616710653992e-06, - "loss": 0.859, - "num_input_tokens_seen": 26182390, - "step": 1252 - }, - { - "epoch": 0.150664341970781, - "flos": 13387428322920.0, - "grad_norm": 7.01641311126975, - "learning_rate": 3.84932022360399e-06, - "loss": 0.9842, - "num_input_tokens_seen": 26200775, - "step": 1253 - }, - { - "epoch": 0.15078458486142007, - "flos": 15746752122480.0, - "grad_norm": 6.5077415728052666, - "learning_rate": 3.849023456013581e-06, - "loss": 1.0591, - "num_input_tokens_seen": 26218055, - "step": 1254 - }, - { - "epoch": 0.15090482775205916, - "flos": 18972785355840.0, - "grad_norm": 5.276411921572583, - "learning_rate": 3.848726407927784e-06, - "loss": 0.8477, - "num_input_tokens_seen": 26238160, - "step": 1255 - }, - { - "epoch": 0.15102507064269824, - "flos": 15485061550200.0, - "grad_norm": 4.487522841371513, - "learning_rate": 3.84842907939166e-06, - "loss": 1.0957, - "num_input_tokens_seen": 26257105, - "step": 1256 - }, - { - "epoch": 0.15114531353333735, - "flos": 16219530596880.0, - "grad_norm": 5.128835220019388, - "learning_rate": 3.8481314704503146e-06, - "loss": 0.9344, - "num_input_tokens_seen": 26276655, - "step": 1257 - }, - { - "epoch": 0.15126555642397643, - "flos": 13964079693360.0, - "grad_norm": 8.309760309060843, - "learning_rate": 3.847833581148895e-06, - "loss": 1.1029, - "num_input_tokens_seen": 26295285, - "step": 1258 - }, - { - "epoch": 0.15138579931461552, - "flos": 20462670842760.0, - "grad_norm": 6.081836185255021, - "learning_rate": 3.84753541153259e-06, - "loss": 1.0254, - "num_input_tokens_seen": 26314575, - "step": 1259 - }, - { - "epoch": 0.15150604220525463, - "flos": 15720745571520.0, - "grad_norm": 3.8198196935791455, - "learning_rate": 3.847236961646633e-06, - "loss": 1.0621, - "num_input_tokens_seen": 26333275, - "step": 1260 - }, - { - "epoch": 0.1516262850958937, - "flos": 9138799657800.0, - "grad_norm": 12.216362058858808, - "learning_rate": 3.846938231536296e-06, - "loss": 1.0202, - "num_input_tokens_seen": 26348615, - "step": 1261 - }, - { - "epoch": 0.1517465279865328, - "flos": 15483467149080.0, - "grad_norm": 4.3125840330920475, - "learning_rate": 3.8466392212468995e-06, - "loss": 1.0384, - "num_input_tokens_seen": 26368525, - "step": 1262 - }, - { - "epoch": 0.15186677087717187, - "flos": 41632326878280.0, - "grad_norm": 0.9495102862463322, - "learning_rate": 3.8463399308238e-06, - "loss": 0.8778, - "num_input_tokens_seen": 26427350, - "step": 1263 - }, - { - "epoch": 0.15198701376781099, - "flos": 23297348687760.0, - "grad_norm": 6.103910612219259, - "learning_rate": 3.846040360312402e-06, - "loss": 0.8763, - "num_input_tokens_seen": 26450330, - "step": 1264 - }, - { - "epoch": 0.15210725665845007, - "flos": 20230880839560.0, - "grad_norm": 5.527574772943682, - "learning_rate": 3.8457405097581485e-06, - "loss": 1.0452, - "num_input_tokens_seen": 26469040, - "step": 1265 - }, - { - "epoch": 0.15222749954908915, - "flos": 14147719951200.0, - "grad_norm": 4.932377149518554, - "learning_rate": 3.8454403792065275e-06, - "loss": 1.0156, - "num_input_tokens_seen": 26487580, - "step": 1266 - }, - { - "epoch": 0.15234774243972826, - "flos": 15143787585480.0, - "grad_norm": 4.398774860785546, - "learning_rate": 3.845139968703068e-06, - "loss": 1.081, - "num_input_tokens_seen": 26504820, - "step": 1267 - }, - { - "epoch": 0.15246798533036734, - "flos": 18473847022680.0, - "grad_norm": 4.723489970156488, - "learning_rate": 3.844839278293342e-06, - "loss": 1.0593, - "num_input_tokens_seen": 26525390, - "step": 1268 - }, - { - "epoch": 0.15258822822100643, - "flos": 18368992956720.0, - "grad_norm": 9.256916244195514, - "learning_rate": 3.8445383080229654e-06, - "loss": 0.9911, - "num_input_tokens_seen": 26541125, - "step": 1269 - }, - { - "epoch": 0.1527084711116455, - "flos": 17976380444400.0, - "grad_norm": 6.870129056159791, - "learning_rate": 3.844237057937593e-06, - "loss": 0.969, - "num_input_tokens_seen": 26559850, - "step": 1270 - }, - { - "epoch": 0.15282871400228462, - "flos": 20832680237280.0, - "grad_norm": 7.962890385470318, - "learning_rate": 3.843935528082926e-06, - "loss": 1.0072, - "num_input_tokens_seen": 26580595, - "step": 1271 - }, - { - "epoch": 0.1529489568929237, - "flos": 14826189893160.0, - "grad_norm": 3.682814773604185, - "learning_rate": 3.843633718504704e-06, - "loss": 1.0798, - "num_input_tokens_seen": 26598760, - "step": 1272 - }, - { - "epoch": 0.1530691997835628, - "flos": 14252359386240.0, - "grad_norm": 12.262181045163825, - "learning_rate": 3.843331629248715e-06, - "loss": 1.1241, - "num_input_tokens_seen": 26616080, - "step": 1273 - }, - { - "epoch": 0.1531894426742019, - "flos": 20486592407640.0, - "grad_norm": 4.428580117107254, - "learning_rate": 3.843029260360782e-06, - "loss": 1.0033, - "num_input_tokens_seen": 26634170, - "step": 1274 - }, - { - "epoch": 0.15330968556484098, - "flos": 15799255809360.0, - "grad_norm": 4.929692142925706, - "learning_rate": 3.8427266118867755e-06, - "loss": 1.0133, - "num_input_tokens_seen": 26653640, - "step": 1275 - }, - { - "epoch": 0.15342992845548006, - "flos": 19602063059400.0, - "grad_norm": 5.1487753587928555, - "learning_rate": 3.842423683872608e-06, - "loss": 1.0516, - "num_input_tokens_seen": 26673935, - "step": 1276 - }, - { - "epoch": 0.15355017134611917, - "flos": 13911545344920.0, - "grad_norm": 5.738395591100438, - "learning_rate": 3.842120476364232e-06, - "loss": 1.0056, - "num_input_tokens_seen": 26692105, - "step": 1277 - }, - { - "epoch": 0.15367041423675826, - "flos": 13098719368200.0, - "grad_norm": 4.288851706739503, - "learning_rate": 3.841816989407644e-06, - "loss": 1.0578, - "num_input_tokens_seen": 26707315, - "step": 1278 - }, - { - "epoch": 0.15379065712739734, - "flos": 29591720124480.0, - "grad_norm": 5.171928105845174, - "learning_rate": 3.841513223048884e-06, - "loss": 0.9942, - "num_input_tokens_seen": 26727720, - "step": 1279 - }, - { - "epoch": 0.15391090001803642, - "flos": 15668456515560.0, - "grad_norm": 24.495585141127982, - "learning_rate": 3.841209177334031e-06, - "loss": 0.9953, - "num_input_tokens_seen": 26745800, - "step": 1280 - }, - { - "epoch": 0.15403114290867553, - "flos": 10922391933720.0, - "grad_norm": 5.35330363441549, - "learning_rate": 3.84090485230921e-06, - "loss": 0.9817, - "num_input_tokens_seen": 26763760, - "step": 1281 - }, - { - "epoch": 0.15415138579931462, - "flos": 12703561946400.0, - "grad_norm": 7.246179560644192, - "learning_rate": 3.840600248020588e-06, - "loss": 0.9863, - "num_input_tokens_seen": 26780420, - "step": 1282 - }, - { - "epoch": 0.1542716286899537, - "flos": 8031531014640.0, - "grad_norm": 4.79013074501326, - "learning_rate": 3.840295364514371e-06, - "loss": 1.0166, - "num_input_tokens_seen": 26797520, - "step": 1283 - }, - { - "epoch": 0.1543918715805928, - "flos": 12338151785880.0, - "grad_norm": 8.642578787076246, - "learning_rate": 3.83999020183681e-06, - "loss": 1.008, - "num_input_tokens_seen": 26815935, - "step": 1284 - }, - { - "epoch": 0.1545121144712319, - "flos": 12601375436160.0, - "grad_norm": 5.898716557450992, - "learning_rate": 3.839684760034199e-06, - "loss": 1.0082, - "num_input_tokens_seen": 26833860, - "step": 1285 - }, - { - "epoch": 0.15463235736187098, - "flos": 20099805591720.0, - "grad_norm": 10.059241104775445, - "learning_rate": 3.8393790391528716e-06, - "loss": 0.8701, - "num_input_tokens_seen": 26854275, - "step": 1286 - }, - { - "epoch": 0.15475260025251006, - "flos": 16245322516920.0, - "grad_norm": 8.547087448799262, - "learning_rate": 3.8390730392392075e-06, - "loss": 1.1157, - "num_input_tokens_seen": 26873975, - "step": 1287 - }, - { - "epoch": 0.15487284314314917, - "flos": 12469349679960.0, - "grad_norm": 6.014192326354297, - "learning_rate": 3.838766760339626e-06, - "loss": 1.0362, - "num_input_tokens_seen": 26892220, - "step": 1288 - }, - { - "epoch": 0.15499308603378825, - "flos": 14250826308240.0, - "grad_norm": 11.549993235928754, - "learning_rate": 3.838460202500587e-06, - "loss": 1.004, - "num_input_tokens_seen": 26907730, - "step": 1289 - }, - { - "epoch": 0.15511332892442733, - "flos": 11257625571120.0, - "grad_norm": 5.075026097723845, - "learning_rate": 3.838153365768599e-06, - "loss": 0.9654, - "num_input_tokens_seen": 26923960, - "step": 1290 - }, - { - "epoch": 0.15523357181506645, - "flos": 29487479289720.0, - "grad_norm": 6.084836417062673, - "learning_rate": 3.837846250190206e-06, - "loss": 0.9711, - "num_input_tokens_seen": 26946545, - "step": 1291 - }, - { - "epoch": 0.15535381470570553, - "flos": 13098872676000.0, - "grad_norm": 16.280910730030392, - "learning_rate": 3.837538855811998e-06, - "loss": 0.9933, - "num_input_tokens_seen": 26964440, - "step": 1292 - }, - { - "epoch": 0.1554740575963446, - "flos": 9821255602560.0, - "grad_norm": 19.46878291780925, - "learning_rate": 3.837231182680606e-06, - "loss": 0.9386, - "num_input_tokens_seen": 26982125, - "step": 1293 - }, - { - "epoch": 0.1555943004869837, - "flos": 14800489957800.0, - "grad_norm": 6.322449953387211, - "learning_rate": 3.836923230842706e-06, - "loss": 0.9991, - "num_input_tokens_seen": 27000960, - "step": 1294 - }, - { - "epoch": 0.1557145433776228, - "flos": 15690569048400.0, - "grad_norm": 11.30891319443032, - "learning_rate": 3.836615000345011e-06, - "loss": 1.0358, - "num_input_tokens_seen": 27018860, - "step": 1295 - }, - { - "epoch": 0.1558347862682619, - "flos": 14042620592760.0, - "grad_norm": 12.352820663505423, - "learning_rate": 3.836306491234282e-06, - "loss": 1.0132, - "num_input_tokens_seen": 27036430, - "step": 1296 - }, - { - "epoch": 0.15595502915890097, - "flos": 12206340660600.0, - "grad_norm": 5.7432187769721965, - "learning_rate": 3.835997703557317e-06, - "loss": 0.9704, - "num_input_tokens_seen": 27052890, - "step": 1297 - }, - { - "epoch": 0.15607527204954008, - "flos": 13990607490840.0, - "grad_norm": 11.749624043395295, - "learning_rate": 3.83568863736096e-06, - "loss": 1.03, - "num_input_tokens_seen": 27071480, - "step": 1298 - }, - { - "epoch": 0.15619551494017916, - "flos": 13125431135040.0, - "grad_norm": 11.050442261281757, - "learning_rate": 3.8353792926920975e-06, - "loss": 1.1121, - "num_input_tokens_seen": 27089850, - "step": 1299 - }, - { - "epoch": 0.15631575783081825, - "flos": 14121866708040.0, - "grad_norm": 5.902057431217246, - "learning_rate": 3.835069669597655e-06, - "loss": 1.0391, - "num_input_tokens_seen": 27107960, - "step": 1300 - }, - { - "epoch": 0.15643600072145733, - "flos": 14751083088480.0, - "grad_norm": 13.335376265101631, - "learning_rate": 3.834759768124603e-06, - "loss": 1.0319, - "num_input_tokens_seen": 27126555, - "step": 1301 - }, - { - "epoch": 0.15655624361209644, - "flos": 13146746467320.0, - "grad_norm": 5.0053642108420755, - "learning_rate": 3.834449588319953e-06, - "loss": 0.9811, - "num_input_tokens_seen": 27144310, - "step": 1302 - }, - { - "epoch": 0.15667648650273552, - "flos": 17950649847480.0, - "grad_norm": 6.6508529345550915, - "learning_rate": 3.834139130230758e-06, - "loss": 1.0764, - "num_input_tokens_seen": 27163335, - "step": 1303 - }, - { - "epoch": 0.1567967293933746, - "flos": 17661266338440.0, - "grad_norm": 5.254970740500894, - "learning_rate": 3.833828393904117e-06, - "loss": 1.0393, - "num_input_tokens_seen": 27183335, - "step": 1304 - }, - { - "epoch": 0.15691697228401372, - "flos": 13590728188800.0, - "grad_norm": 30.968069059428476, - "learning_rate": 3.833517379387165e-06, - "loss": 0.9977, - "num_input_tokens_seen": 27199510, - "step": 1305 - }, - { - "epoch": 0.1570372151746528, - "flos": 17635321110600.0, - "grad_norm": 5.338858702635246, - "learning_rate": 3.833206086727085e-06, - "loss": 1.1175, - "num_input_tokens_seen": 27218580, - "step": 1306 - }, - { - "epoch": 0.15715745806529188, - "flos": 17687548843440.0, - "grad_norm": 6.6040505319807385, - "learning_rate": 3.8328945159710994e-06, - "loss": 0.9315, - "num_input_tokens_seen": 27238480, - "step": 1307 - }, - { - "epoch": 0.157277700955931, - "flos": 15537657221760.0, - "grad_norm": 5.695115241675694, - "learning_rate": 3.832582667166473e-06, - "loss": 1.1063, - "num_input_tokens_seen": 27258010, - "step": 1308 - }, - { - "epoch": 0.15739794384657008, - "flos": 17449902482280.0, - "grad_norm": 6.251866176468956, - "learning_rate": 3.8322705403605125e-06, - "loss": 1.0444, - "num_input_tokens_seen": 27278075, - "step": 1309 - }, - { - "epoch": 0.15751818673720916, - "flos": 12571260236160.0, - "grad_norm": 10.579865936410625, - "learning_rate": 3.831958135600568e-06, - "loss": 1.0437, - "num_input_tokens_seen": 27295345, - "step": 1310 - }, - { - "epoch": 0.15763842962784824, - "flos": 12653112584040.0, - "grad_norm": 8.733044572477734, - "learning_rate": 3.831645452934032e-06, - "loss": 1.0298, - "num_input_tokens_seen": 27313495, - "step": 1311 - }, - { - "epoch": 0.15775867251848735, - "flos": 19209327900840.0, - "grad_norm": 6.131877230137482, - "learning_rate": 3.831332492408336e-06, - "loss": 1.031, - "num_input_tokens_seen": 27334625, - "step": 1312 - }, - { - "epoch": 0.15787891540912644, - "flos": 13645623477360.0, - "grad_norm": 4.945242497097846, - "learning_rate": 3.831019254070957e-06, - "loss": 0.9108, - "num_input_tokens_seen": 27352130, - "step": 1313 - }, - { - "epoch": 0.15799915829976552, - "flos": 19417656262560.0, - "grad_norm": 8.78611582548444, - "learning_rate": 3.8307057379694135e-06, - "loss": 1.168, - "num_input_tokens_seen": 27371185, - "step": 1314 - }, - { - "epoch": 0.15811940119040463, - "flos": 14482646973000.0, - "grad_norm": 7.157035418058359, - "learning_rate": 3.830391944151264e-06, - "loss": 1.0464, - "num_input_tokens_seen": 27386785, - "step": 1315 - }, - { - "epoch": 0.1582396440810437, - "flos": 23247144617880.0, - "grad_norm": 5.364360836809918, - "learning_rate": 3.830077872664114e-06, - "loss": 0.8992, - "num_input_tokens_seen": 27407630, - "step": 1316 - }, - { - "epoch": 0.1583598869716828, - "flos": 24110052018240.0, - "grad_norm": 5.908708333255286, - "learning_rate": 3.829763523555604e-06, - "loss": 0.9696, - "num_input_tokens_seen": 27427750, - "step": 1317 - }, - { - "epoch": 0.15848012986232188, - "flos": 17556534918720.0, - "grad_norm": 9.38851199124057, - "learning_rate": 3.829448896873423e-06, - "loss": 1.0127, - "num_input_tokens_seen": 27446570, - "step": 1318 - }, - { - "epoch": 0.158600372752961, - "flos": 16062264828720.0, - "grad_norm": 8.512744101274809, - "learning_rate": 3.829133992665299e-06, - "loss": 1.0216, - "num_input_tokens_seen": 27465415, - "step": 1319 - }, - { - "epoch": 0.15872061564360007, - "flos": 19889729521080.0, - "grad_norm": 6.605920921653887, - "learning_rate": 3.828818810979002e-06, - "loss": 1.099, - "num_input_tokens_seen": 27483465, - "step": 1320 - }, - { - "epoch": 0.15884085853423915, - "flos": 16849053592920.0, - "grad_norm": 4.698226862081375, - "learning_rate": 3.8285033518623454e-06, - "loss": 1.0319, - "num_input_tokens_seen": 27503435, - "step": 1321 - }, - { - "epoch": 0.15896110142487826, - "flos": 16428932113200.0, - "grad_norm": 8.61218503866102, - "learning_rate": 3.8281876153631845e-06, - "loss": 1.0382, - "num_input_tokens_seen": 27519910, - "step": 1322 - }, - { - "epoch": 0.15908134431551735, - "flos": 10372666961040.0, - "grad_norm": 9.915220994049495, - "learning_rate": 3.827871601529416e-06, - "loss": 0.8711, - "num_input_tokens_seen": 27538150, - "step": 1323 - }, - { - "epoch": 0.15920158720615643, - "flos": 14331298885920.0, - "grad_norm": 21.33184133716507, - "learning_rate": 3.827555310408979e-06, - "loss": 1.0343, - "num_input_tokens_seen": 27557265, - "step": 1324 - }, - { - "epoch": 0.1593218300967955, - "flos": 17660867738160.0, - "grad_norm": 5.898322380913041, - "learning_rate": 3.827238742049854e-06, - "loss": 1.0501, - "num_input_tokens_seen": 27577280, - "step": 1325 - }, - { - "epoch": 0.15944207298743462, - "flos": 20177671936800.0, - "grad_norm": 7.313537784849403, - "learning_rate": 3.826921896500066e-06, - "loss": 0.7567, - "num_input_tokens_seen": 27598285, - "step": 1326 - }, - { - "epoch": 0.1595623158780737, - "flos": 16322698277040.0, - "grad_norm": 7.984304528569263, - "learning_rate": 3.826604773807678e-06, - "loss": 1.0055, - "num_input_tokens_seen": 27615980, - "step": 1327 - }, - { - "epoch": 0.1596825587687128, - "flos": 13982942100840.0, - "grad_norm": 12.465821192286006, - "learning_rate": 3.826287374020798e-06, - "loss": 0.9602, - "num_input_tokens_seen": 27630505, - "step": 1328 - }, - { - "epoch": 0.1598028016593519, - "flos": 16087780794720.0, - "grad_norm": 12.494608822759881, - "learning_rate": 3.825969697187575e-06, - "loss": 1.0513, - "num_input_tokens_seen": 27649555, - "step": 1329 - }, - { - "epoch": 0.15992304454999098, - "flos": 14538860708640.0, - "grad_norm": 6.622276178064793, - "learning_rate": 3.8256517433562015e-06, - "loss": 0.9286, - "num_input_tokens_seen": 27667215, - "step": 1330 - }, - { - "epoch": 0.16004328744063007, - "flos": 12522006674640.0, - "grad_norm": 18.37561698448645, - "learning_rate": 3.82533351257491e-06, - "loss": 1.1461, - "num_input_tokens_seen": 27684885, - "step": 1331 - }, - { - "epoch": 0.16016353033126918, - "flos": 17137977178560.0, - "grad_norm": 16.35071515960292, - "learning_rate": 3.825015004891975e-06, - "loss": 1.1146, - "num_input_tokens_seen": 27703345, - "step": 1332 - }, - { - "epoch": 0.16028377322190826, - "flos": 19678917573000.0, - "grad_norm": 7.965053284317782, - "learning_rate": 3.824696220355716e-06, - "loss": 0.9817, - "num_input_tokens_seen": 27724655, - "step": 1333 - }, - { - "epoch": 0.16040401611254734, - "flos": 14883139506240.0, - "grad_norm": 6.3137879521285285, - "learning_rate": 3.824377159014491e-06, - "loss": 1.0205, - "num_input_tokens_seen": 27745270, - "step": 1334 - }, - { - "epoch": 0.16052425900318643, - "flos": 15087757819200.0, - "grad_norm": 4.3576118431248325, - "learning_rate": 3.824057820916702e-06, - "loss": 1.0776, - "num_input_tokens_seen": 27762195, - "step": 1335 - }, - { - "epoch": 0.16064450189382554, - "flos": 10975416867120.0, - "grad_norm": 9.890320301635601, - "learning_rate": 3.8237382061107904e-06, - "loss": 0.947, - "num_input_tokens_seen": 27778635, - "step": 1336 - }, - { - "epoch": 0.16076474478446462, - "flos": 15039086827320.0, - "grad_norm": 30.652653132199973, - "learning_rate": 3.823418314645243e-06, - "loss": 1.0119, - "num_input_tokens_seen": 27797230, - "step": 1337 - }, - { - "epoch": 0.1608849876751037, - "flos": 13017786867120.0, - "grad_norm": 6.613137330924409, - "learning_rate": 3.823098146568588e-06, - "loss": 0.9754, - "num_input_tokens_seen": 27816655, - "step": 1338 - }, - { - "epoch": 0.1610052305657428, - "flos": 21017424311280.0, - "grad_norm": 12.681787266449646, - "learning_rate": 3.822777701929394e-06, - "loss": 0.9474, - "num_input_tokens_seen": 27838200, - "step": 1339 - }, - { - "epoch": 0.1611254734563819, - "flos": 19077700744920.0, - "grad_norm": 8.826411528266094, - "learning_rate": 3.8224569807762714e-06, - "loss": 0.964, - "num_input_tokens_seen": 27857240, - "step": 1340 - }, - { - "epoch": 0.16124571634702098, - "flos": 15931097596200.0, - "grad_norm": 16.51724359381734, - "learning_rate": 3.822135983157873e-06, - "loss": 0.9987, - "num_input_tokens_seen": 27876235, - "step": 1341 - }, - { - "epoch": 0.16136595923766006, - "flos": 7723377082800.0, - "grad_norm": 14.198311202943293, - "learning_rate": 3.821814709122896e-06, - "loss": 1.0709, - "num_input_tokens_seen": 27894005, - "step": 1342 - }, - { - "epoch": 0.16148620212829917, - "flos": 15065308009200.0, - "grad_norm": 11.119142854986164, - "learning_rate": 3.821493158720076e-06, - "loss": 1.0807, - "num_input_tokens_seen": 27912830, - "step": 1343 - }, - { - "epoch": 0.16160644501893826, - "flos": 11861785908960.0, - "grad_norm": 34.411869432108745, - "learning_rate": 3.821171331998191e-06, - "loss": 0.9499, - "num_input_tokens_seen": 27929080, - "step": 1344 - }, - { - "epoch": 0.16172668790957734, - "flos": 46138046881680.0, - "grad_norm": 0.819774996544797, - "learning_rate": 3.820849229006064e-06, - "loss": 0.8245, - "num_input_tokens_seen": 27996550, - "step": 1345 - }, - { - "epoch": 0.16184693080021645, - "flos": 16533295594200.0, - "grad_norm": 32.766323535114715, - "learning_rate": 3.8205268497925564e-06, - "loss": 0.9381, - "num_input_tokens_seen": 28016740, - "step": 1346 - }, - { - "epoch": 0.16196717369085553, - "flos": 12359497779720.0, - "grad_norm": 22.533464426114524, - "learning_rate": 3.8202041944065725e-06, - "loss": 1.0198, - "num_input_tokens_seen": 28032280, - "step": 1347 - }, - { - "epoch": 0.16208741658149461, - "flos": 16975161668040.0, - "grad_norm": 11.30437637248948, - "learning_rate": 3.819881262897061e-06, - "loss": 0.9682, - "num_input_tokens_seen": 28050135, - "step": 1348 - }, - { - "epoch": 0.1622076594721337, - "flos": 18421373997360.0, - "grad_norm": 15.289001121651197, - "learning_rate": 3.819558055313008e-06, - "loss": 0.9665, - "num_input_tokens_seen": 28070540, - "step": 1349 - }, - { - "epoch": 0.1623279023627728, - "flos": 15298232490120.0, - "grad_norm": 12.363078222069129, - "learning_rate": 3.819234571703444e-06, - "loss": 0.997, - "num_input_tokens_seen": 28089085, - "step": 1350 - }, - { - "epoch": 0.1624481452534119, - "flos": 15690170448120.0, - "grad_norm": 13.016446787157031, - "learning_rate": 3.8189108121174435e-06, - "loss": 1.0749, - "num_input_tokens_seen": 28108570, - "step": 1351 - }, - { - "epoch": 0.16256838814405097, - "flos": 19285538521680.0, - "grad_norm": 12.094759955608422, - "learning_rate": 3.818586776604118e-06, - "loss": 1.0652, - "num_input_tokens_seen": 28128930, - "step": 1352 - }, - { - "epoch": 0.16268863103469008, - "flos": 14278611229680.0, - "grad_norm": 15.82492457849449, - "learning_rate": 3.818262465212625e-06, - "loss": 0.8496, - "num_input_tokens_seen": 28148775, - "step": 1353 - }, - { - "epoch": 0.16280887392532917, - "flos": 12993957286920.0, - "grad_norm": 42.57649509495013, - "learning_rate": 3.817937877992161e-06, - "loss": 1.0055, - "num_input_tokens_seen": 28165790, - "step": 1354 - }, - { - "epoch": 0.16292911681596825, - "flos": 8352317509200.0, - "grad_norm": 21.218079771263174, - "learning_rate": 3.817613014991967e-06, - "loss": 1.0842, - "num_input_tokens_seen": 28181650, - "step": 1355 - }, - { - "epoch": 0.16304935970660733, - "flos": 18578486457720.0, - "grad_norm": 16.652284647590402, - "learning_rate": 3.817287876261323e-06, - "loss": 0.982, - "num_input_tokens_seen": 28201705, - "step": 1356 - }, - { - "epoch": 0.16316960259724644, - "flos": 20913275461200.0, - "grad_norm": 7.718215244317639, - "learning_rate": 3.816962461849553e-06, - "loss": 1.024, - "num_input_tokens_seen": 28223295, - "step": 1357 - }, - { - "epoch": 0.16328984548788553, - "flos": 14829440018520.0, - "grad_norm": 27.179493253473748, - "learning_rate": 3.8166367718060235e-06, - "loss": 1.0682, - "num_input_tokens_seen": 28242905, - "step": 1358 - }, - { - "epoch": 0.1634100883785246, - "flos": 12784341139680.0, - "grad_norm": 18.15237527232576, - "learning_rate": 3.816310806180139e-06, - "loss": 0.9704, - "num_input_tokens_seen": 28261035, - "step": 1359 - }, - { - "epoch": 0.16353033126916372, - "flos": 17478086004000.0, - "grad_norm": 19.608461445353285, - "learning_rate": 3.81598456502135e-06, - "loss": 1.0345, - "num_input_tokens_seen": 28280775, - "step": 1360 - }, - { - "epoch": 0.1636505741598028, - "flos": 14113986687120.0, - "grad_norm": 17.076240568209368, - "learning_rate": 3.8156580483791455e-06, - "loss": 1.0941, - "num_input_tokens_seen": 28295685, - "step": 1361 - }, - { - "epoch": 0.16377081705044189, - "flos": 20230911501120.0, - "grad_norm": 14.511796033609821, - "learning_rate": 3.815331256303059e-06, - "loss": 0.9989, - "num_input_tokens_seen": 28315435, - "step": 1362 - }, - { - "epoch": 0.163891059941081, - "flos": 15563387818680.0, - "grad_norm": 16.029716552208296, - "learning_rate": 3.815004188842665e-06, - "loss": 0.9848, - "num_input_tokens_seen": 28333195, - "step": 1363 - }, - { - "epoch": 0.16401130283172008, - "flos": 19077486114000.0, - "grad_norm": 6.791372312318143, - "learning_rate": 3.814676846047578e-06, - "loss": 1.018, - "num_input_tokens_seen": 28353790, - "step": 1364 - }, - { - "epoch": 0.16413154572235916, - "flos": 23532787416600.0, - "grad_norm": 10.458986016281385, - "learning_rate": 3.8143492279674565e-06, - "loss": 0.9297, - "num_input_tokens_seen": 28376205, - "step": 1365 - }, - { - "epoch": 0.16425178861299825, - "flos": 28652197954920.0, - "grad_norm": 0.944663747324187, - "learning_rate": 3.8140213346519997e-06, - "loss": 0.8879, - "num_input_tokens_seen": 28426520, - "step": 1366 - }, - { - "epoch": 0.16437203150363736, - "flos": 18107149076640.0, - "grad_norm": 7.166579608240601, - "learning_rate": 3.813693166150948e-06, - "loss": 0.9913, - "num_input_tokens_seen": 28446450, - "step": 1367 - }, - { - "epoch": 0.16449227439427644, - "flos": 16690867977960.0, - "grad_norm": 28.58357379541559, - "learning_rate": 3.813364722514086e-06, - "loss": 1.0699, - "num_input_tokens_seen": 28464505, - "step": 1368 - }, - { - "epoch": 0.16461251728491552, - "flos": 9552144932760.0, - "grad_norm": 60.2361077085414, - "learning_rate": 3.8130360037912368e-06, - "loss": 1.0315, - "num_input_tokens_seen": 28480670, - "step": 1369 - }, - { - "epoch": 0.16473276017555463, - "flos": 16350605844720.0, - "grad_norm": 31.709758577802198, - "learning_rate": 3.812707010032268e-06, - "loss": 1.0395, - "num_input_tokens_seen": 28499445, - "step": 1370 - }, - { - "epoch": 0.16485300306619372, - "flos": 17635106479680.0, - "grad_norm": 10.386899810968885, - "learning_rate": 3.8123777412870863e-06, - "loss": 1.0263, - "num_input_tokens_seen": 28518665, - "step": 1371 - }, - { - "epoch": 0.1649732459568328, - "flos": 14987472325680.0, - "grad_norm": 7.711231106286502, - "learning_rate": 3.812048197605643e-06, - "loss": 1.015, - "num_input_tokens_seen": 28537280, - "step": 1372 - }, - { - "epoch": 0.16509348884747188, - "flos": 14383863895920.0, - "grad_norm": 10.58858289709037, - "learning_rate": 3.8117183790379277e-06, - "loss": 1.0343, - "num_input_tokens_seen": 28555450, - "step": 1373 - }, - { - "epoch": 0.165213731738111, - "flos": 7749506280000.0, - "grad_norm": 37.614757677236035, - "learning_rate": 3.811388285633976e-06, - "loss": 1.1626, - "num_input_tokens_seen": 28571155, - "step": 1374 - }, - { - "epoch": 0.16533397462875007, - "flos": 21358912906920.0, - "grad_norm": 13.79680145347753, - "learning_rate": 3.811057917443861e-06, - "loss": 0.8537, - "num_input_tokens_seen": 28590140, - "step": 1375 - }, - { - "epoch": 0.16545421751938916, - "flos": 46939399886880.0, - "grad_norm": 1.0110249736504884, - "learning_rate": 3.8107272745177e-06, - "loss": 0.9361, - "num_input_tokens_seen": 28662190, - "step": 1376 - }, - { - "epoch": 0.16557446041002827, - "flos": 15983386652160.0, - "grad_norm": 12.33283548810888, - "learning_rate": 3.8103963569056513e-06, - "loss": 1.014, - "num_input_tokens_seen": 28681045, - "step": 1377 - }, - { - "epoch": 0.16569470330066735, - "flos": 17499278690040.0, - "grad_norm": 10.015815816361899, - "learning_rate": 3.8100651646579146e-06, - "loss": 1.1067, - "num_input_tokens_seen": 28699975, - "step": 1378 - }, - { - "epoch": 0.16581494619130643, - "flos": 10602341316600.0, - "grad_norm": 6.536974282944588, - "learning_rate": 3.8097336978247317e-06, - "loss": 1.1502, - "num_input_tokens_seen": 28716400, - "step": 1379 - }, - { - "epoch": 0.16593518908194552, - "flos": 12338151785880.0, - "grad_norm": 6.9962172505326325, - "learning_rate": 3.8094019564563854e-06, - "loss": 1.1084, - "num_input_tokens_seen": 28733050, - "step": 1380 - }, - { - "epoch": 0.16605543197258463, - "flos": 14488717961880.0, - "grad_norm": 22.65344810260681, - "learning_rate": 3.809069940603201e-06, - "loss": 0.9911, - "num_input_tokens_seen": 28750725, - "step": 1381 - }, - { - "epoch": 0.1661756748632237, - "flos": 10030779765120.0, - "grad_norm": 9.633351851536817, - "learning_rate": 3.8087376503155452e-06, - "loss": 1.0059, - "num_input_tokens_seen": 28767930, - "step": 1382 - }, - { - "epoch": 0.1662959177538628, - "flos": 47312940908880.0, - "grad_norm": 0.9541508204103543, - "learning_rate": 3.808405085643826e-06, - "loss": 0.8398, - "num_input_tokens_seen": 28832530, - "step": 1383 - }, - { - "epoch": 0.1664161606445019, - "flos": 14724340660080.0, - "grad_norm": 16.494174738160403, - "learning_rate": 3.8080722466384925e-06, - "loss": 1.1193, - "num_input_tokens_seen": 28850100, - "step": 1384 - }, - { - "epoch": 0.166536403535141, - "flos": 17972517087840.0, - "grad_norm": 14.842426955840246, - "learning_rate": 3.8077391333500376e-06, - "loss": 0.9291, - "num_input_tokens_seen": 28868960, - "step": 1385 - }, - { - "epoch": 0.16665664642578007, - "flos": 18107118415080.0, - "grad_norm": 9.440194458979107, - "learning_rate": 3.8074057458289934e-06, - "loss": 0.9921, - "num_input_tokens_seen": 28889370, - "step": 1386 - }, - { - "epoch": 0.16677688931641918, - "flos": 15773862489600.0, - "grad_norm": 7.9558009833774355, - "learning_rate": 3.807072084125934e-06, - "loss": 1.0398, - "num_input_tokens_seen": 28910940, - "step": 1387 - }, - { - "epoch": 0.16689713220705826, - "flos": 11997215098320.0, - "grad_norm": 11.450880551524124, - "learning_rate": 3.806738148291477e-06, - "loss": 1.0134, - "num_input_tokens_seen": 28927485, - "step": 1388 - }, - { - "epoch": 0.16701737509769735, - "flos": 25866319296120.0, - "grad_norm": 11.971102570479031, - "learning_rate": 3.8064039383762793e-06, - "loss": 0.9485, - "num_input_tokens_seen": 28949570, - "step": 1389 - }, - { - "epoch": 0.16713761798833643, - "flos": 16534123456320.0, - "grad_norm": 17.98293937037095, - "learning_rate": 3.8060694544310396e-06, - "loss": 1.0155, - "num_input_tokens_seen": 28967800, - "step": 1390 - }, - { - "epoch": 0.16725786087897554, - "flos": 18003092211240.0, - "grad_norm": 9.69110788411131, - "learning_rate": 3.8057346965065006e-06, - "loss": 1.0101, - "num_input_tokens_seen": 28988750, - "step": 1391 - }, - { - "epoch": 0.16737810376961462, - "flos": 22695120028200.0, - "grad_norm": 22.657326213663072, - "learning_rate": 3.805399664653443e-06, - "loss": 1.0804, - "num_input_tokens_seen": 29010610, - "step": 1392 - }, - { - "epoch": 0.1674983466602537, - "flos": 19913743070640.0, - "grad_norm": 10.861563263110641, - "learning_rate": 3.805064358922692e-06, - "loss": 0.983, - "num_input_tokens_seen": 29028620, - "step": 1393 - }, - { - "epoch": 0.16761858955089282, - "flos": 15458963014560.0, - "grad_norm": 20.508052825972793, - "learning_rate": 3.8047287793651136e-06, - "loss": 1.0255, - "num_input_tokens_seen": 29049785, - "step": 1394 - }, - { - "epoch": 0.1677388324415319, - "flos": 16927073245800.0, - "grad_norm": 7.709899028507777, - "learning_rate": 3.8043929260316137e-06, - "loss": 1.1164, - "num_input_tokens_seen": 29067660, - "step": 1395 - }, - { - "epoch": 0.16785907533217098, - "flos": 14593847981880.0, - "grad_norm": 13.182417771625358, - "learning_rate": 3.8040567989731417e-06, - "loss": 1.0717, - "num_input_tokens_seen": 29085325, - "step": 1396 - }, - { - "epoch": 0.16797931822281006, - "flos": 11080056302160.0, - "grad_norm": 11.001817379721329, - "learning_rate": 3.8037203982406876e-06, - "loss": 1.0376, - "num_input_tokens_seen": 29103210, - "step": 1397 - }, - { - "epoch": 0.16809956111344918, - "flos": 11705777264760.0, - "grad_norm": 10.187074991733743, - "learning_rate": 3.8033837238852835e-06, - "loss": 0.9564, - "num_input_tokens_seen": 29119630, - "step": 1398 - }, - { - "epoch": 0.16821980400408826, - "flos": 16533632871360.0, - "grad_norm": 27.913267682149154, - "learning_rate": 3.8030467759580017e-06, - "loss": 0.9222, - "num_input_tokens_seen": 29140270, - "step": 1399 - }, - { - "epoch": 0.16834004689472734, - "flos": 14748599502120.0, - "grad_norm": 20.082127367378423, - "learning_rate": 3.802709554509958e-06, - "loss": 1.1025, - "num_input_tokens_seen": 29157790, - "step": 1400 - }, - { - "epoch": 0.16846028978536645, - "flos": 18998086690920.0, - "grad_norm": 19.875411503208557, - "learning_rate": 3.8023720595923083e-06, - "loss": 1.0179, - "num_input_tokens_seen": 29176765, - "step": 1401 - }, - { - "epoch": 0.16858053267600553, - "flos": 13360808540760.0, - "grad_norm": 13.788380272606068, - "learning_rate": 3.80203429125625e-06, - "loss": 1.1018, - "num_input_tokens_seen": 29194660, - "step": 1402 - }, - { - "epoch": 0.16870077556664462, - "flos": 19757795749560.0, - "grad_norm": 9.449642227101238, - "learning_rate": 3.8016962495530225e-06, - "loss": 0.9347, - "num_input_tokens_seen": 29213570, - "step": 1403 - }, - { - "epoch": 0.1688210184572837, - "flos": 9685918397880.0, - "grad_norm": 11.521600754042247, - "learning_rate": 3.8013579345339063e-06, - "loss": 0.9947, - "num_input_tokens_seen": 29228155, - "step": 1404 - }, - { - "epoch": 0.1689412613479228, - "flos": 18841096876800.0, - "grad_norm": 19.7749422209965, - "learning_rate": 3.801019346250224e-06, - "loss": 0.929, - "num_input_tokens_seen": 29248020, - "step": 1405 - }, - { - "epoch": 0.1690615042385619, - "flos": 15010167428160.0, - "grad_norm": 9.09680627805436, - "learning_rate": 3.8006804847533395e-06, - "loss": 1.0489, - "num_input_tokens_seen": 29267255, - "step": 1406 - }, - { - "epoch": 0.16918174712920098, - "flos": 14802421636080.0, - "grad_norm": 12.71032465774472, - "learning_rate": 3.8003413500946556e-06, - "loss": 1.077, - "num_input_tokens_seen": 29287085, - "step": 1407 - }, - { - "epoch": 0.1693019900198401, - "flos": 12024141496080.0, - "grad_norm": 19.941789984677122, - "learning_rate": 3.8000019423256216e-06, - "loss": 1.0612, - "num_input_tokens_seen": 29304570, - "step": 1408 - }, - { - "epoch": 0.16942223291047917, - "flos": 19076872882800.0, - "grad_norm": 15.143244057208214, - "learning_rate": 3.7996622614977234e-06, - "loss": 1.1081, - "num_input_tokens_seen": 29325480, - "step": 1409 - }, - { - "epoch": 0.16954247580111825, - "flos": 13173672865080.0, - "grad_norm": 16.75677262216497, - "learning_rate": 3.799322307662492e-06, - "loss": 1.0314, - "num_input_tokens_seen": 29343020, - "step": 1410 - }, - { - "epoch": 0.16966271869175734, - "flos": 9867320361840.0, - "grad_norm": 14.671289466817177, - "learning_rate": 3.798982080871496e-06, - "loss": 1.0716, - "num_input_tokens_seen": 29357880, - "step": 1411 - }, - { - "epoch": 0.16978296158239645, - "flos": 26759893804560.0, - "grad_norm": 15.55699115365445, - "learning_rate": 3.798641581176349e-06, - "loss": 0.9067, - "num_input_tokens_seen": 29379880, - "step": 1412 - }, - { - "epoch": 0.16990320447303553, - "flos": 20177886567720.0, - "grad_norm": 17.560456398005435, - "learning_rate": 3.7983008086287044e-06, - "loss": 0.9673, - "num_input_tokens_seen": 29400920, - "step": 1413 - }, - { - "epoch": 0.1700234473636746, - "flos": 14326668990360.0, - "grad_norm": 16.51626617428539, - "learning_rate": 3.797959763280257e-06, - "loss": 1.0206, - "num_input_tokens_seen": 29419325, - "step": 1414 - }, - { - "epoch": 0.17014369025431372, - "flos": 17683286886600.0, - "grad_norm": 28.05607983526309, - "learning_rate": 3.797618445182743e-06, - "loss": 1.0228, - "num_input_tokens_seen": 29440440, - "step": 1415 - }, - { - "epoch": 0.1702639331449528, - "flos": 11652568362000.0, - "grad_norm": 21.307996167720553, - "learning_rate": 3.79727685438794e-06, - "loss": 1.0744, - "num_input_tokens_seen": 29454350, - "step": 1416 - }, - { - "epoch": 0.1703841760355919, - "flos": 37559090511360.0, - "grad_norm": 1.4488874606861364, - "learning_rate": 3.796934990947667e-06, - "loss": 0.8667, - "num_input_tokens_seen": 29515755, - "step": 1417 - }, - { - "epoch": 0.170504418926231, - "flos": 35303639607840.0, - "grad_norm": 0.9366965347722785, - "learning_rate": 3.7965928549137854e-06, - "loss": 0.8876, - "num_input_tokens_seen": 29572290, - "step": 1418 - }, - { - "epoch": 0.17062466181687008, - "flos": 18395919354480.0, - "grad_norm": 82.5893012186279, - "learning_rate": 3.7962504463381953e-06, - "loss": 0.9828, - "num_input_tokens_seen": 29593500, - "step": 1419 - }, - { - "epoch": 0.17074490470750917, - "flos": 14882464951920.0, - "grad_norm": 14.974848195932102, - "learning_rate": 3.7959077652728412e-06, - "loss": 1.0108, - "num_input_tokens_seen": 29611675, - "step": 1420 - }, - { - "epoch": 0.17086514759814825, - "flos": 14881913043840.0, - "grad_norm": 12.712390556351963, - "learning_rate": 3.795564811769707e-06, - "loss": 0.9912, - "num_input_tokens_seen": 29629750, - "step": 1421 - }, - { - "epoch": 0.17098539048878736, - "flos": 20282066079360.0, - "grad_norm": 19.399266557495157, - "learning_rate": 3.795221585880818e-06, - "loss": 1.0081, - "num_input_tokens_seen": 29650150, - "step": 1422 - }, - { - "epoch": 0.17110563337942644, - "flos": 11525908378800.0, - "grad_norm": 13.760405885787002, - "learning_rate": 3.794878087658242e-06, - "loss": 1.141, - "num_input_tokens_seen": 29667640, - "step": 1423 - }, - { - "epoch": 0.17122587627006552, - "flos": 21144053632920.0, - "grad_norm": 13.925214202164787, - "learning_rate": 3.7945343171540873e-06, - "loss": 1.0056, - "num_input_tokens_seen": 29688235, - "step": 1424 - }, - { - "epoch": 0.17134611916070464, - "flos": 18028792146600.0, - "grad_norm": 13.216355127139996, - "learning_rate": 3.7941902744205033e-06, - "loss": 1.0041, - "num_input_tokens_seen": 29708990, - "step": 1425 - }, - { - "epoch": 0.17146636205134372, - "flos": 9846802230120.0, - "grad_norm": 15.829695327149876, - "learning_rate": 3.7938459595096817e-06, - "loss": 1.0581, - "num_input_tokens_seen": 29727255, - "step": 1426 - }, - { - "epoch": 0.1715866049419828, - "flos": 17005951422360.0, - "grad_norm": 14.725945465355792, - "learning_rate": 3.7935013724738545e-06, - "loss": 1.0855, - "num_input_tokens_seen": 29747475, - "step": 1427 - }, - { - "epoch": 0.17170684783262188, - "flos": 16139088680760.0, - "grad_norm": 20.133397595300917, - "learning_rate": 3.7931565133652945e-06, - "loss": 1.0026, - "num_input_tokens_seen": 29767270, - "step": 1428 - }, - { - "epoch": 0.171827090723261, - "flos": 18944755141920.0, - "grad_norm": 29.966721451157284, - "learning_rate": 3.792811382236317e-06, - "loss": 0.8973, - "num_input_tokens_seen": 29785500, - "step": 1429 - }, - { - "epoch": 0.17194733361390008, - "flos": 20048467044120.0, - "grad_norm": 12.502696852634122, - "learning_rate": 3.792465979139279e-06, - "loss": 0.9928, - "num_input_tokens_seen": 29807825, - "step": 1430 - }, - { - "epoch": 0.17206757650453916, - "flos": 46917139594320.0, - "grad_norm": 1.0651418714992376, - "learning_rate": 3.792120304126576e-06, - "loss": 0.9735, - "num_input_tokens_seen": 29870920, - "step": 1431 - }, - { - "epoch": 0.17218781939517827, - "flos": 15826090222440.0, - "grad_norm": 13.76129962283978, - "learning_rate": 3.791774357250649e-06, - "loss": 1.0637, - "num_input_tokens_seen": 29889470, - "step": 1432 - }, - { - "epoch": 0.17230806228581735, - "flos": 9978797324760.0, - "grad_norm": 38.118906403392856, - "learning_rate": 3.7914281385639757e-06, - "loss": 1.0079, - "num_input_tokens_seen": 29907065, - "step": 1433 - }, - { - "epoch": 0.17242830517645644, - "flos": 14698763370960.0, - "grad_norm": 11.57740935920913, - "learning_rate": 3.7910816481190784e-06, - "loss": 1.007, - "num_input_tokens_seen": 29926600, - "step": 1434 - }, - { - "epoch": 0.17254854806709552, - "flos": 21935656262040.0, - "grad_norm": 10.366060749103372, - "learning_rate": 3.7907348859685193e-06, - "loss": 0.9823, - "num_input_tokens_seen": 29948025, - "step": 1435 - }, - { - "epoch": 0.17266879095773463, - "flos": 18945981604320.0, - "grad_norm": 9.761410027881315, - "learning_rate": 3.790387852164902e-06, - "loss": 1.0193, - "num_input_tokens_seen": 29968475, - "step": 1436 - }, - { - "epoch": 0.1727890338483737, - "flos": 14383495957200.0, - "grad_norm": 10.771669061483331, - "learning_rate": 3.7900405467608707e-06, - "loss": 0.992, - "num_input_tokens_seen": 29987740, - "step": 1437 - }, - { - "epoch": 0.1729092767390128, - "flos": 12883062893640.0, - "grad_norm": 18.404934329304616, - "learning_rate": 3.7896929698091114e-06, - "loss": 0.9995, - "num_input_tokens_seen": 30000275, - "step": 1438 - }, - { - "epoch": 0.1730295196296519, - "flos": 19049885161920.0, - "grad_norm": 15.308281266745684, - "learning_rate": 3.7893451213623518e-06, - "loss": 0.9268, - "num_input_tokens_seen": 30017225, - "step": 1439 - }, - { - "epoch": 0.173149762520291, - "flos": 16953754351080.0, - "grad_norm": 11.103280113762237, - "learning_rate": 3.7889970014733606e-06, - "loss": 1.0485, - "num_input_tokens_seen": 30036050, - "step": 1440 - }, - { - "epoch": 0.17327000541093007, - "flos": 16612848325080.0, - "grad_norm": 9.659067872371844, - "learning_rate": 3.7886486101949463e-06, - "loss": 0.995, - "num_input_tokens_seen": 30056950, - "step": 1441 - }, - { - "epoch": 0.17339024830156918, - "flos": 12913546032360.0, - "grad_norm": 11.007655167656692, - "learning_rate": 3.7882999475799594e-06, - "loss": 1.1005, - "num_input_tokens_seen": 30074705, - "step": 1442 - }, - { - "epoch": 0.17351049119220827, - "flos": 16586903097240.0, - "grad_norm": 8.57976534364727, - "learning_rate": 3.787951013681293e-06, - "loss": 1.0462, - "num_input_tokens_seen": 30092470, - "step": 1443 - }, - { - "epoch": 0.17363073408284735, - "flos": 16925356198440.0, - "grad_norm": 11.57247576945272, - "learning_rate": 3.787601808551879e-06, - "loss": 1.01, - "num_input_tokens_seen": 30112005, - "step": 1444 - }, - { - "epoch": 0.17375097697348643, - "flos": 13125124519440.0, - "grad_norm": 13.143943267867, - "learning_rate": 3.7872523322446926e-06, - "loss": 1.0585, - "num_input_tokens_seen": 30130610, - "step": 1445 - }, - { - "epoch": 0.17387121986412554, - "flos": 27759824795400.0, - "grad_norm": 13.403314300146647, - "learning_rate": 3.7869025848127478e-06, - "loss": 0.8262, - "num_input_tokens_seen": 30154525, - "step": 1446 - }, - { - "epoch": 0.17399146275476463, - "flos": 14462128841280.0, - "grad_norm": 27.959037245081866, - "learning_rate": 3.786552566309102e-06, - "loss": 1.0129, - "num_input_tokens_seen": 30172455, - "step": 1447 - }, - { - "epoch": 0.1741117056454037, - "flos": 13590544219440.0, - "grad_norm": 13.526442451204314, - "learning_rate": 3.7862022767868517e-06, - "loss": 1.1005, - "num_input_tokens_seen": 30189765, - "step": 1448 - }, - { - "epoch": 0.17423194853604282, - "flos": 18054522743520.0, - "grad_norm": 15.507459260775422, - "learning_rate": 3.7858517162991367e-06, - "loss": 1.0726, - "num_input_tokens_seen": 30209560, - "step": 1449 - }, - { - "epoch": 0.1743521914266819, - "flos": 17923416834120.0, - "grad_norm": 11.852650380607786, - "learning_rate": 3.7855008848991363e-06, - "loss": 0.8379, - "num_input_tokens_seen": 30227485, - "step": 1450 - }, - { - "epoch": 0.17447243431732098, - "flos": 18264629475720.0, - "grad_norm": 8.25877791926424, - "learning_rate": 3.7851497826400714e-06, - "loss": 1.0019, - "num_input_tokens_seen": 30247345, - "step": 1451 - }, - { - "epoch": 0.17459267720796007, - "flos": 25893429663240.0, - "grad_norm": 12.786182598966025, - "learning_rate": 3.7847984095752034e-06, - "loss": 0.9817, - "num_input_tokens_seen": 30270520, - "step": 1452 - }, - { - "epoch": 0.17471292009859918, - "flos": 14200131653400.0, - "grad_norm": 40.39634419131807, - "learning_rate": 3.784446765757836e-06, - "loss": 1.0213, - "num_input_tokens_seen": 30288885, - "step": 1453 - }, - { - "epoch": 0.17483316298923826, - "flos": 19811709868200.0, - "grad_norm": 12.80200718401037, - "learning_rate": 3.7840948512413133e-06, - "loss": 1.0043, - "num_input_tokens_seen": 30306190, - "step": 1454 - }, - { - "epoch": 0.17495340587987734, - "flos": 31453270730160.0, - "grad_norm": 20.853114550939612, - "learning_rate": 3.7837426660790196e-06, - "loss": 1.0024, - "num_input_tokens_seen": 30327325, - "step": 1455 - }, - { - "epoch": 0.17507364877051645, - "flos": 14828642817960.0, - "grad_norm": 11.299318505726248, - "learning_rate": 3.783390210324382e-06, - "loss": 1.0452, - "num_input_tokens_seen": 30346770, - "step": 1456 - }, - { - "epoch": 0.17519389166115554, - "flos": 17582572131240.0, - "grad_norm": 12.038781125037637, - "learning_rate": 3.7830374840308676e-06, - "loss": 0.9442, - "num_input_tokens_seen": 30366645, - "step": 1457 - }, - { - "epoch": 0.17531413455179462, - "flos": 16920787626000.0, - "grad_norm": 12.697945640391131, - "learning_rate": 3.7826844872519842e-06, - "loss": 1.0383, - "num_input_tokens_seen": 30384220, - "step": 1458 - }, - { - "epoch": 0.1754343774424337, - "flos": 17478300634920.0, - "grad_norm": 8.776976499187244, - "learning_rate": 3.782331220041282e-06, - "loss": 0.9524, - "num_input_tokens_seen": 30404005, - "step": 1459 - }, - { - "epoch": 0.17555462033307281, - "flos": 12837243426840.0, - "grad_norm": 9.423656693712745, - "learning_rate": 3.7819776824523504e-06, - "loss": 1.0619, - "num_input_tokens_seen": 30421590, - "step": 1460 - }, - { - "epoch": 0.1756748632237119, - "flos": 20204598334560.0, - "grad_norm": 11.264455366857026, - "learning_rate": 3.7816238745388213e-06, - "loss": 1.0639, - "num_input_tokens_seen": 30440855, - "step": 1461 - }, - { - "epoch": 0.17579510611435098, - "flos": 18311430112440.0, - "grad_norm": 19.33499918809193, - "learning_rate": 3.781269796354367e-06, - "loss": 1.105, - "num_input_tokens_seen": 30460195, - "step": 1462 - }, - { - "epoch": 0.1759153490049901, - "flos": 13177413575400.0, - "grad_norm": 9.139268145152757, - "learning_rate": 3.7809154479527006e-06, - "loss": 1.1013, - "num_input_tokens_seen": 30479120, - "step": 1463 - }, - { - "epoch": 0.17603559189562917, - "flos": 13199158169520.0, - "grad_norm": 13.528079183372691, - "learning_rate": 3.780560829387577e-06, - "loss": 1.0632, - "num_input_tokens_seen": 30497340, - "step": 1464 - }, - { - "epoch": 0.17615583478626826, - "flos": 43323611214360.0, - "grad_norm": 0.8448687306981548, - "learning_rate": 3.7802059407127915e-06, - "loss": 0.843, - "num_input_tokens_seen": 30555610, - "step": 1465 - }, - { - "epoch": 0.17627607767690734, - "flos": 16790110978440.0, - "grad_norm": 14.257789490563626, - "learning_rate": 3.7798507819821797e-06, - "loss": 1.0863, - "num_input_tokens_seen": 30572455, - "step": 1466 - }, - { - "epoch": 0.17639632056754645, - "flos": 12495325569360.0, - "grad_norm": 22.16533540251217, - "learning_rate": 3.7794953532496197e-06, - "loss": 1.0234, - "num_input_tokens_seen": 30588080, - "step": 1467 - }, - { - "epoch": 0.17651656345818553, - "flos": 41460246028560.0, - "grad_norm": 0.9166961289497305, - "learning_rate": 3.7791396545690295e-06, - "loss": 0.8687, - "num_input_tokens_seen": 30649035, - "step": 1468 - }, - { - "epoch": 0.17663680634882462, - "flos": 16297458265080.0, - "grad_norm": 9.89690124103523, - "learning_rate": 3.7787836859943685e-06, - "loss": 1.0209, - "num_input_tokens_seen": 30667480, - "step": 1469 - }, - { - "epoch": 0.17675704923946373, - "flos": 16087995425640.0, - "grad_norm": 12.950486009823145, - "learning_rate": 3.7784274475796363e-06, - "loss": 1.017, - "num_input_tokens_seen": 30685830, - "step": 1470 - }, - { - "epoch": 0.1768772921301028, - "flos": 19313936674320.0, - "grad_norm": 12.366565642925332, - "learning_rate": 3.7780709393788745e-06, - "loss": 0.9922, - "num_input_tokens_seen": 30706025, - "step": 1471 - }, - { - "epoch": 0.1769975350207419, - "flos": 13597197777960.0, - "grad_norm": 11.7882722927264, - "learning_rate": 3.777714161446165e-06, - "loss": 0.9801, - "num_input_tokens_seen": 30725450, - "step": 1472 - }, - { - "epoch": 0.177117777911381, - "flos": 25787625088920.0, - "grad_norm": 13.267415243783416, - "learning_rate": 3.7773571138356304e-06, - "loss": 0.8964, - "num_input_tokens_seen": 30745340, - "step": 1473 - }, - { - "epoch": 0.17723802080202009, - "flos": 15694585712760.0, - "grad_norm": 16.879191464409292, - "learning_rate": 3.776999796601435e-06, - "loss": 1.1179, - "num_input_tokens_seen": 30763820, - "step": 1474 - }, - { - "epoch": 0.17735826369265917, - "flos": 21538383192600.0, - "grad_norm": 5.02470284717341, - "learning_rate": 3.776642209797783e-06, - "loss": 0.9345, - "num_input_tokens_seen": 30785370, - "step": 1475 - }, - { - "epoch": 0.17747850658329825, - "flos": 15196567226400.0, - "grad_norm": 7.875909419757976, - "learning_rate": 3.7762843534789205e-06, - "loss": 0.9912, - "num_input_tokens_seen": 30803840, - "step": 1476 - }, - { - "epoch": 0.17759874947393736, - "flos": 12024202819200.0, - "grad_norm": 7.374922412528847, - "learning_rate": 3.7759262276991343e-06, - "loss": 1.0925, - "num_input_tokens_seen": 30821170, - "step": 1477 - }, - { - "epoch": 0.17771899236457644, - "flos": 8116449518520.0, - "grad_norm": 8.354881148254883, - "learning_rate": 3.7755678325127506e-06, - "loss": 1.0237, - "num_input_tokens_seen": 30838570, - "step": 1478 - }, - { - "epoch": 0.17783923525521553, - "flos": 13335384559440.0, - "grad_norm": 10.540935827762164, - "learning_rate": 3.7752091679741393e-06, - "loss": 0.9711, - "num_input_tokens_seen": 30856080, - "step": 1479 - }, - { - "epoch": 0.17795947814585464, - "flos": 21672279303960.0, - "grad_norm": 10.555564996369363, - "learning_rate": 3.774850234137708e-06, - "loss": 1.0002, - "num_input_tokens_seen": 30873095, - "step": 1480 - }, - { - "epoch": 0.17807972103649372, - "flos": 17346428186520.0, - "grad_norm": 9.399297533774424, - "learning_rate": 3.7744910310579076e-06, - "loss": 1.0482, - "num_input_tokens_seen": 30891740, - "step": 1481 - }, - { - "epoch": 0.1781999639271328, - "flos": 14408674646040.0, - "grad_norm": 8.942594770015196, - "learning_rate": 3.774131558789229e-06, - "loss": 1.0831, - "num_input_tokens_seen": 30910790, - "step": 1482 - }, - { - "epoch": 0.1783202068177719, - "flos": 11263113990360.0, - "grad_norm": 9.876951387000538, - "learning_rate": 3.773771817386203e-06, - "loss": 0.9219, - "num_input_tokens_seen": 30927840, - "step": 1483 - }, - { - "epoch": 0.178440449708411, - "flos": 14646229022520.0, - "grad_norm": 5.787598459547369, - "learning_rate": 3.773411806903403e-06, - "loss": 1.0175, - "num_input_tokens_seen": 30946640, - "step": 1484 - }, - { - "epoch": 0.17856069259905008, - "flos": 15403423833240.0, - "grad_norm": 5.995799183521834, - "learning_rate": 3.7730515273954415e-06, - "loss": 1.1637, - "num_input_tokens_seen": 30964970, - "step": 1485 - }, - { - "epoch": 0.17868093548968916, - "flos": 18915253173120.0, - "grad_norm": 12.708407309061503, - "learning_rate": 3.772690978916973e-06, - "loss": 1.062, - "num_input_tokens_seen": 30984445, - "step": 1486 - }, - { - "epoch": 0.17880117838032827, - "flos": 13171373248080.0, - "grad_norm": 7.8169955327437375, - "learning_rate": 3.772330161522693e-06, - "loss": 1.0928, - "num_input_tokens_seen": 31002075, - "step": 1487 - }, - { - "epoch": 0.17892142127096736, - "flos": 18893079317160.0, - "grad_norm": 6.996094022630508, - "learning_rate": 3.7719690752673365e-06, - "loss": 1.0306, - "num_input_tokens_seen": 31022590, - "step": 1488 - }, - { - "epoch": 0.17904166416160644, - "flos": 16974517775280.0, - "grad_norm": 4.930728953480947, - "learning_rate": 3.7716077202056796e-06, - "loss": 1.0052, - "num_input_tokens_seen": 31040785, - "step": 1489 - }, - { - "epoch": 0.17916190705224552, - "flos": 13569934103040.0, - "grad_norm": 11.987537906654417, - "learning_rate": 3.7712460963925404e-06, - "loss": 1.1583, - "num_input_tokens_seen": 31056445, - "step": 1490 - }, - { - "epoch": 0.17928214994288463, - "flos": 17894405450280.0, - "grad_norm": 8.168814499942611, - "learning_rate": 3.7708842038827775e-06, - "loss": 0.9782, - "num_input_tokens_seen": 31075125, - "step": 1491 - }, - { - "epoch": 0.17940239283352372, - "flos": 15906194861400.0, - "grad_norm": 7.936373172584659, - "learning_rate": 3.770522042731288e-06, - "loss": 1.0836, - "num_input_tokens_seen": 31096740, - "step": 1492 - }, - { - "epoch": 0.1795226357241628, - "flos": 16475702088360.0, - "grad_norm": 4.9115857309732665, - "learning_rate": 3.7701596129930122e-06, - "loss": 1.1086, - "num_input_tokens_seen": 31115185, - "step": 1493 - }, - { - "epoch": 0.1796428786148019, - "flos": 15694432404960.0, - "grad_norm": 3.8434224803298283, - "learning_rate": 3.7697969147229315e-06, - "loss": 0.957, - "num_input_tokens_seen": 31133065, - "step": 1494 - }, - { - "epoch": 0.179763121505441, - "flos": 15144124862640.0, - "grad_norm": 7.615459038667919, - "learning_rate": 3.7694339479760647e-06, - "loss": 1.0871, - "num_input_tokens_seen": 31151815, - "step": 1495 - }, - { - "epoch": 0.17988336439608008, - "flos": 48808682753760.0, - "grad_norm": 0.8553570539461357, - "learning_rate": 3.769070712807476e-06, - "loss": 0.8367, - "num_input_tokens_seen": 31213565, - "step": 1496 - }, - { - "epoch": 0.18000360728671919, - "flos": 15590038262400.0, - "grad_norm": 4.470844978158556, - "learning_rate": 3.768707209272266e-06, - "loss": 1.0092, - "num_input_tokens_seen": 31233415, - "step": 1497 - }, - { - "epoch": 0.18012385017735827, - "flos": 13463209681920.0, - "grad_norm": 8.288171028139997, - "learning_rate": 3.768343437425579e-06, - "loss": 0.9891, - "num_input_tokens_seen": 31251705, - "step": 1498 - }, - { - "epoch": 0.18024409306799735, - "flos": 14010083129520.0, - "grad_norm": 8.482343398856994, - "learning_rate": 3.7679793973225987e-06, - "loss": 1.0776, - "num_input_tokens_seen": 31267235, - "step": 1499 - }, - { - "epoch": 0.18036433595863643, - "flos": 48138828710160.0, - "grad_norm": 0.8629223176619796, - "learning_rate": 3.767615089018549e-06, - "loss": 0.8683, - "num_input_tokens_seen": 31329300, - "step": 1500 - }, - { - "epoch": 0.18048457884927555, - "flos": 12884473325400.0, - "grad_norm": 13.699340715346715, - "learning_rate": 3.7672505125686966e-06, - "loss": 1.0784, - "num_input_tokens_seen": 31345385, - "step": 1501 - }, - { - "epoch": 0.18060482173991463, - "flos": 11183377290120.0, - "grad_norm": 9.185549574406897, - "learning_rate": 3.7668856680283455e-06, - "loss": 1.0788, - "num_input_tokens_seen": 31362130, - "step": 1502 - }, - { - "epoch": 0.1807250646305537, - "flos": 13176831005760.0, - "grad_norm": 9.480514620562701, - "learning_rate": 3.7665205554528437e-06, - "loss": 1.0486, - "num_input_tokens_seen": 31381205, - "step": 1503 - }, - { - "epoch": 0.18084530752119282, - "flos": 16455398587560.0, - "grad_norm": 11.985625240359543, - "learning_rate": 3.7661551748975782e-06, - "loss": 0.9741, - "num_input_tokens_seen": 31399100, - "step": 1504 - }, - { - "epoch": 0.1809655504118319, - "flos": 42800965947240.0, - "grad_norm": 0.8189520681163085, - "learning_rate": 3.7657895264179772e-06, - "loss": 0.8474, - "num_input_tokens_seen": 31454795, - "step": 1505 - }, - { - "epoch": 0.181085793302471, - "flos": 31585725748200.0, - "grad_norm": 4.086990016655318, - "learning_rate": 3.765423610069509e-06, - "loss": 0.9696, - "num_input_tokens_seen": 31479905, - "step": 1506 - }, - { - "epoch": 0.18120603619311007, - "flos": 24899477676600.0, - "grad_norm": 5.878772903907149, - "learning_rate": 3.765057425907683e-06, - "loss": 0.9569, - "num_input_tokens_seen": 31501085, - "step": 1507 - }, - { - "epoch": 0.18132627908374918, - "flos": 15274985479560.0, - "grad_norm": 5.285715776980863, - "learning_rate": 3.764690973988048e-06, - "loss": 1.0167, - "num_input_tokens_seen": 31521145, - "step": 1508 - }, - { - "epoch": 0.18144652197438826, - "flos": 20701482343200.0, - "grad_norm": 4.742760612634773, - "learning_rate": 3.7643242543661967e-06, - "loss": 0.97, - "num_input_tokens_seen": 31543525, - "step": 1509 - }, - { - "epoch": 0.18156676486502735, - "flos": 49177833624600.0, - "grad_norm": 0.8699265601961669, - "learning_rate": 3.7639572670977573e-06, - "loss": 0.8748, - "num_input_tokens_seen": 31598740, - "step": 1510 - }, - { - "epoch": 0.18168700775566646, - "flos": 18843427155360.0, - "grad_norm": 5.870902072927324, - "learning_rate": 3.7635900122384042e-06, - "loss": 0.9973, - "num_input_tokens_seen": 31621455, - "step": 1511 - }, - { - "epoch": 0.18180725064630554, - "flos": 10601881393200.0, - "grad_norm": 10.44004160379473, - "learning_rate": 3.7632224898438477e-06, - "loss": 1.0888, - "num_input_tokens_seen": 31637650, - "step": 1512 - }, - { - "epoch": 0.18192749353694462, - "flos": 13963834400880.0, - "grad_norm": 4.456943949079276, - "learning_rate": 3.762854699969842e-06, - "loss": 1.0232, - "num_input_tokens_seen": 31657880, - "step": 1513 - }, - { - "epoch": 0.1820477364275837, - "flos": 14696954338920.0, - "grad_norm": 10.989271654286249, - "learning_rate": 3.762486642672179e-06, - "loss": 0.9564, - "num_input_tokens_seen": 31674540, - "step": 1514 - }, - { - "epoch": 0.18216797931822282, - "flos": 12128596961760.0, - "grad_norm": 5.805704718239692, - "learning_rate": 3.7621183180066946e-06, - "loss": 1.0856, - "num_input_tokens_seen": 31692220, - "step": 1515 - }, - { - "epoch": 0.1822882222088619, - "flos": 20834673238680.0, - "grad_norm": 6.402854354448284, - "learning_rate": 3.7617497260292625e-06, - "loss": 0.964, - "num_input_tokens_seen": 31713995, - "step": 1516 - }, - { - "epoch": 0.18240846509950098, - "flos": 12539826589080.0, - "grad_norm": 8.305274978485764, - "learning_rate": 3.7613808667957967e-06, - "loss": 1.0162, - "num_input_tokens_seen": 31726405, - "step": 1517 - }, - { - "epoch": 0.1825287079901401, - "flos": 10447344503880.0, - "grad_norm": 70.61591133444183, - "learning_rate": 3.7610117403622547e-06, - "loss": 1.1435, - "num_input_tokens_seen": 31742685, - "step": 1518 - }, - { - "epoch": 0.18264895088077918, - "flos": 15591050093880.0, - "grad_norm": 5.207081799501446, - "learning_rate": 3.7606423467846313e-06, - "loss": 1.1276, - "num_input_tokens_seen": 31762010, - "step": 1519 - }, - { - "epoch": 0.18276919377141826, - "flos": 14829440018520.0, - "grad_norm": 5.475279479536028, - "learning_rate": 3.760272686118964e-06, - "loss": 1.016, - "num_input_tokens_seen": 31779950, - "step": 1520 - }, - { - "epoch": 0.18288943666205737, - "flos": 15248427020520.0, - "grad_norm": 6.474672122150701, - "learning_rate": 3.7599027584213297e-06, - "loss": 1.1499, - "num_input_tokens_seen": 31798550, - "step": 1521 - }, - { - "epoch": 0.18300967955269645, - "flos": 15297895212960.0, - "grad_norm": 4.114713848635448, - "learning_rate": 3.7595325637478465e-06, - "loss": 1.0086, - "num_input_tokens_seen": 31816295, - "step": 1522 - }, - { - "epoch": 0.18312992244333554, - "flos": 20571204295920.0, - "grad_norm": 8.0710229108606, - "learning_rate": 3.7591621021546723e-06, - "loss": 1.0468, - "num_input_tokens_seen": 31838010, - "step": 1523 - }, - { - "epoch": 0.18325016533397462, - "flos": 14278396598760.0, - "grad_norm": 5.78392898760794, - "learning_rate": 3.7587913736980062e-06, - "loss": 1.0405, - "num_input_tokens_seen": 31857370, - "step": 1524 - }, - { - "epoch": 0.18337040822461373, - "flos": 16584480834000.0, - "grad_norm": 3.1908581565382197, - "learning_rate": 3.7584203784340865e-06, - "loss": 1.075, - "num_input_tokens_seen": 31876260, - "step": 1525 - }, - { - "epoch": 0.1834906511152528, - "flos": 17792402909400.0, - "grad_norm": 7.745968169840591, - "learning_rate": 3.7580491164191938e-06, - "loss": 1.0875, - "num_input_tokens_seen": 31894290, - "step": 1526 - }, - { - "epoch": 0.1836108940058919, - "flos": 48154655623200.0, - "grad_norm": 0.7590815349907518, - "learning_rate": 3.757677587709648e-06, - "loss": 0.8624, - "num_input_tokens_seen": 31957275, - "step": 1527 - }, - { - "epoch": 0.183731136896531, - "flos": 18316366623600.0, - "grad_norm": 4.117413567085759, - "learning_rate": 3.7573057923618095e-06, - "loss": 0.9808, - "num_input_tokens_seen": 31977090, - "step": 1528 - }, - { - "epoch": 0.1838513797871701, - "flos": 14515184436240.0, - "grad_norm": 7.6161470142728716, - "learning_rate": 3.7569337304320793e-06, - "loss": 0.9651, - "num_input_tokens_seen": 31996395, - "step": 1529 - }, - { - "epoch": 0.18397162267780917, - "flos": 49098771478680.0, - "grad_norm": 0.8418823340421472, - "learning_rate": 3.756561401976899e-06, - "loss": 0.8901, - "num_input_tokens_seen": 32055820, - "step": 1530 - }, - { - "epoch": 0.18409186556844825, - "flos": 22773875558520.0, - "grad_norm": 22.08764391400484, - "learning_rate": 3.7561888070527514e-06, - "loss": 1.0601, - "num_input_tokens_seen": 32077580, - "step": 1531 - }, - { - "epoch": 0.18421210845908736, - "flos": 14278764537480.0, - "grad_norm": 4.3128779148978325, - "learning_rate": 3.7558159457161577e-06, - "loss": 1.0179, - "num_input_tokens_seen": 32095265, - "step": 1532 - }, - { - "epoch": 0.18433235134972645, - "flos": 16427337712080.0, - "grad_norm": 8.197375217056905, - "learning_rate": 3.755442818023681e-06, - "loss": 1.0006, - "num_input_tokens_seen": 32114610, - "step": 1533 - }, - { - "epoch": 0.18445259424036553, - "flos": 12963504809760.0, - "grad_norm": 9.458507071312386, - "learning_rate": 3.7550694240319246e-06, - "loss": 0.985, - "num_input_tokens_seen": 32132205, - "step": 1534 - }, - { - "epoch": 0.18457283713100464, - "flos": 15143664939240.0, - "grad_norm": 8.283050917090707, - "learning_rate": 3.7546957637975326e-06, - "loss": 0.9872, - "num_input_tokens_seen": 32149335, - "step": 1535 - }, - { - "epoch": 0.18469308002164372, - "flos": 14462098179720.0, - "grad_norm": 24.50517872164346, - "learning_rate": 3.7543218373771873e-06, - "loss": 0.9726, - "num_input_tokens_seen": 32168380, - "step": 1536 - }, - { - "epoch": 0.1848133229122828, - "flos": 18817941850920.0, - "grad_norm": 3.0954940966003153, - "learning_rate": 3.753947644827615e-06, - "loss": 1.0153, - "num_input_tokens_seen": 32191560, - "step": 1537 - }, - { - "epoch": 0.1849335658029219, - "flos": 50525048245920.0, - "grad_norm": 0.8048705846407977, - "learning_rate": 3.753573186205579e-06, - "loss": 0.8316, - "num_input_tokens_seen": 32259400, - "step": 1538 - }, - { - "epoch": 0.185053808693561, - "flos": 12312175896480.0, - "grad_norm": 5.919497211343834, - "learning_rate": 3.753198461567885e-06, - "loss": 0.9965, - "num_input_tokens_seen": 32276365, - "step": 1539 - }, - { - "epoch": 0.18517405158420008, - "flos": 20650266441840.0, - "grad_norm": 3.803763087335061, - "learning_rate": 3.7528234709713783e-06, - "loss": 1.1461, - "num_input_tokens_seen": 32298830, - "step": 1540 - }, - { - "epoch": 0.18529429447483917, - "flos": 19075217158560.0, - "grad_norm": 8.282065297425534, - "learning_rate": 3.7524482144729447e-06, - "loss": 1.0645, - "num_input_tokens_seen": 32318005, - "step": 1541 - }, - { - "epoch": 0.18541453736547828, - "flos": 9578274129960.0, - "grad_norm": 8.20303617883461, - "learning_rate": 3.7520726921295106e-06, - "loss": 1.0631, - "num_input_tokens_seen": 32334445, - "step": 1542 - }, - { - "epoch": 0.18553478025611736, - "flos": 17085258860760.0, - "grad_norm": 5.189715360505939, - "learning_rate": 3.751696903998042e-06, - "loss": 0.9623, - "num_input_tokens_seen": 32352800, - "step": 1543 - }, - { - "epoch": 0.18565502314675644, - "flos": 18421680612960.0, - "grad_norm": 4.840294177360367, - "learning_rate": 3.7513208501355456e-06, - "loss": 0.9208, - "num_input_tokens_seen": 32373625, - "step": 1544 - }, - { - "epoch": 0.18577526603739553, - "flos": 13911943945200.0, - "grad_norm": 7.227781780586174, - "learning_rate": 3.750944530599069e-06, - "loss": 1.0566, - "num_input_tokens_seen": 32392915, - "step": 1545 - }, - { - "epoch": 0.18589550892803464, - "flos": 13095561227520.0, - "grad_norm": 6.641786433360169, - "learning_rate": 3.7505679454456992e-06, - "loss": 1.0252, - "num_input_tokens_seen": 32409245, - "step": 1546 - }, - { - "epoch": 0.18601575181867372, - "flos": 16743402326400.0, - "grad_norm": 4.06159329199516, - "learning_rate": 3.750191094732564e-06, - "loss": 0.9126, - "num_input_tokens_seen": 32429830, - "step": 1547 - }, - { - "epoch": 0.1861359947093128, - "flos": 18763169208600.0, - "grad_norm": 8.303655752290016, - "learning_rate": 3.7498139785168313e-06, - "loss": 0.9799, - "num_input_tokens_seen": 32450155, - "step": 1548 - }, - { - "epoch": 0.1862562375999519, - "flos": 16586197881360.0, - "grad_norm": 7.398535207165668, - "learning_rate": 3.749436596855709e-06, - "loss": 1.0058, - "num_input_tokens_seen": 32469175, - "step": 1549 - }, - { - "epoch": 0.186376480490591, - "flos": 11782447809000.0, - "grad_norm": 10.61783816243459, - "learning_rate": 3.749058949806446e-06, - "loss": 1.145, - "num_input_tokens_seen": 32485620, - "step": 1550 - }, - { - "epoch": 0.18649672338123008, - "flos": 15247384527480.0, - "grad_norm": 4.420604725546762, - "learning_rate": 3.748681037426331e-06, - "loss": 1.0645, - "num_input_tokens_seen": 32504550, - "step": 1551 - }, - { - "epoch": 0.1866169662718692, - "flos": 8667462276720.0, - "grad_norm": 15.221881896682897, - "learning_rate": 3.7483028597726936e-06, - "loss": 1.1402, - "num_input_tokens_seen": 32521040, - "step": 1552 - }, - { - "epoch": 0.18673720916250827, - "flos": 16765208243640.0, - "grad_norm": 4.8237464580933125, - "learning_rate": 3.7479244169029017e-06, - "loss": 0.854, - "num_input_tokens_seen": 32540550, - "step": 1553 - }, - { - "epoch": 0.18685745205314735, - "flos": 13989626320920.0, - "grad_norm": 5.085661287322438, - "learning_rate": 3.7475457088743658e-06, - "loss": 0.942, - "num_input_tokens_seen": 32557520, - "step": 1554 - }, - { - "epoch": 0.18697769494378644, - "flos": 24400386035640.0, - "grad_norm": 5.466971669566236, - "learning_rate": 3.7471667357445348e-06, - "loss": 0.9737, - "num_input_tokens_seen": 32577070, - "step": 1555 - }, - { - "epoch": 0.18709793783442555, - "flos": 24427220448720.0, - "grad_norm": 5.430056859907736, - "learning_rate": 3.7467874975709e-06, - "loss": 0.953, - "num_input_tokens_seen": 32597595, - "step": 1556 - }, - { - "epoch": 0.18721818072506463, - "flos": 29093609653440.0, - "grad_norm": 6.799855668275247, - "learning_rate": 3.7464079944109904e-06, - "loss": 1.0176, - "num_input_tokens_seen": 32619175, - "step": 1557 - }, - { - "epoch": 0.18733842361570371, - "flos": 15741754288200.0, - "grad_norm": 6.7093494032792185, - "learning_rate": 3.746028226322376e-06, - "loss": 1.0006, - "num_input_tokens_seen": 32634775, - "step": 1558 - }, - { - "epoch": 0.18745866650634282, - "flos": 13412116426800.0, - "grad_norm": 5.7023055650608745, - "learning_rate": 3.745648193362669e-06, - "loss": 0.9849, - "num_input_tokens_seen": 32653850, - "step": 1559 - }, - { - "epoch": 0.1875789093969819, - "flos": 13699138995720.0, - "grad_norm": 4.852201115691131, - "learning_rate": 3.745267895589518e-06, - "loss": 0.9607, - "num_input_tokens_seen": 32672110, - "step": 1560 - }, - { - "epoch": 0.187699152287621, - "flos": 12049688123640.0, - "grad_norm": 5.300379491211407, - "learning_rate": 3.7448873330606154e-06, - "loss": 1.0364, - "num_input_tokens_seen": 32689600, - "step": 1561 - }, - { - "epoch": 0.18781939517826007, - "flos": 15878471263080.0, - "grad_norm": 5.32738500921202, - "learning_rate": 3.7445065058336914e-06, - "loss": 1.0936, - "num_input_tokens_seen": 32708190, - "step": 1562 - }, - { - "epoch": 0.18793963806889918, - "flos": 10319550042960.0, - "grad_norm": 4.033737952584078, - "learning_rate": 3.7441254139665176e-06, - "loss": 1.0947, - "num_input_tokens_seen": 32724095, - "step": 1563 - }, - { - "epoch": 0.18805988095953827, - "flos": 12364587598680.0, - "grad_norm": 9.592839002069663, - "learning_rate": 3.743744057516905e-06, - "loss": 1.0464, - "num_input_tokens_seen": 32741875, - "step": 1564 - }, - { - "epoch": 0.18818012385017735, - "flos": 10629574329960.0, - "grad_norm": 9.708317790748914, - "learning_rate": 3.743362436542706e-06, - "loss": 1.099, - "num_input_tokens_seen": 32756285, - "step": 1565 - }, - { - "epoch": 0.18830036674081646, - "flos": 33992432754120.0, - "grad_norm": 79.63413347832542, - "learning_rate": 3.7429805511018115e-06, - "loss": 0.9917, - "num_input_tokens_seen": 32777665, - "step": 1566 - }, - { - "epoch": 0.18842060963145554, - "flos": 21411017993520.0, - "grad_norm": 4.535925826985066, - "learning_rate": 3.7425984012521524e-06, - "loss": 1.0026, - "num_input_tokens_seen": 32797585, - "step": 1567 - }, - { - "epoch": 0.18854085252209463, - "flos": 50359043933160.0, - "grad_norm": 0.7311788361311043, - "learning_rate": 3.7422159870517025e-06, - "loss": 0.8559, - "num_input_tokens_seen": 32862560, - "step": 1568 - }, - { - "epoch": 0.1886610954127337, - "flos": 15118915512240.0, - "grad_norm": 6.215728792654014, - "learning_rate": 3.7418333085584717e-06, - "loss": 1.0143, - "num_input_tokens_seen": 32883465, - "step": 1569 - }, - { - "epoch": 0.18878133830337282, - "flos": 12226797469200.0, - "grad_norm": 6.498222107928048, - "learning_rate": 3.7414503658305128e-06, - "loss": 1.1401, - "num_input_tokens_seen": 32900420, - "step": 1570 - }, - { - "epoch": 0.1889015811940119, - "flos": 18342587805480.0, - "grad_norm": 11.366163909025593, - "learning_rate": 3.7410671589259185e-06, - "loss": 1.0067, - "num_input_tokens_seen": 32918740, - "step": 1571 - }, - { - "epoch": 0.18902182408465099, - "flos": 14933895484200.0, - "grad_norm": 6.300189109264895, - "learning_rate": 3.7406836879028205e-06, - "loss": 1.0102, - "num_input_tokens_seen": 32938685, - "step": 1572 - }, - { - "epoch": 0.1891420669752901, - "flos": 15824925083160.0, - "grad_norm": 14.633662861536656, - "learning_rate": 3.7402999528193907e-06, - "loss": 1.0004, - "num_input_tokens_seen": 32957905, - "step": 1573 - }, - { - "epoch": 0.18926230986592918, - "flos": 15642296656800.0, - "grad_norm": 9.775341260177553, - "learning_rate": 3.739915953733842e-06, - "loss": 1.0686, - "num_input_tokens_seen": 32975670, - "step": 1574 - }, - { - "epoch": 0.18938255275656826, - "flos": 17399422458360.0, - "grad_norm": 5.4915706890481255, - "learning_rate": 3.7395316907044264e-06, - "loss": 1.0436, - "num_input_tokens_seen": 32996175, - "step": 1575 - }, - { - "epoch": 0.18950279564720737, - "flos": 17373937153920.0, - "grad_norm": 12.523612951047962, - "learning_rate": 3.7391471637894364e-06, - "loss": 1.0253, - "num_input_tokens_seen": 33018160, - "step": 1576 - }, - { - "epoch": 0.18962303853784646, - "flos": 14017073965200.0, - "grad_norm": 5.252757120498088, - "learning_rate": 3.738762373047205e-06, - "loss": 1.0791, - "num_input_tokens_seen": 33037800, - "step": 1577 - }, - { - "epoch": 0.18974328142848554, - "flos": 14933772837960.0, - "grad_norm": 3.5998887415753824, - "learning_rate": 3.738377318536103e-06, - "loss": 1.0624, - "num_input_tokens_seen": 33057405, - "step": 1578 - }, - { - "epoch": 0.18986352431912462, - "flos": 9136592025480.0, - "grad_norm": 5.0582379897222065, - "learning_rate": 3.7379920003145447e-06, - "loss": 0.9414, - "num_input_tokens_seen": 33071400, - "step": 1579 - }, - { - "epoch": 0.18998376720976373, - "flos": 16899901555560.0, - "grad_norm": 13.088051951372899, - "learning_rate": 3.7376064184409817e-06, - "loss": 1.0703, - "num_input_tokens_seen": 33090700, - "step": 1580 - }, - { - "epoch": 0.19010401010040281, - "flos": 16323710108520.0, - "grad_norm": 4.0463412017490175, - "learning_rate": 3.7372205729739063e-06, - "loss": 1.1036, - "num_input_tokens_seen": 33112235, - "step": 1581 - }, - { - "epoch": 0.1902242529910419, - "flos": 13570976596080.0, - "grad_norm": 5.3120657350655245, - "learning_rate": 3.7368344639718514e-06, - "loss": 0.9537, - "num_input_tokens_seen": 33129890, - "step": 1582 - }, - { - "epoch": 0.190344495881681, - "flos": 18133339596960.0, - "grad_norm": 6.482155587800491, - "learning_rate": 3.7364480914933895e-06, - "loss": 1.0354, - "num_input_tokens_seen": 33149850, - "step": 1583 - }, - { - "epoch": 0.1904647387723201, - "flos": 19073408126520.0, - "grad_norm": 8.519603786658193, - "learning_rate": 3.7360614555971325e-06, - "loss": 1.0474, - "num_input_tokens_seen": 33169225, - "step": 1584 - }, - { - "epoch": 0.19058498166295917, - "flos": 17058025847400.0, - "grad_norm": 5.189841419950459, - "learning_rate": 3.735674556341733e-06, - "loss": 1.0755, - "num_input_tokens_seen": 33188560, - "step": 1585 - }, - { - "epoch": 0.19070522455359826, - "flos": 20152370601720.0, - "grad_norm": 6.36714016949433, - "learning_rate": 3.7352873937858835e-06, - "loss": 1.0642, - "num_input_tokens_seen": 33209815, - "step": 1586 - }, - { - "epoch": 0.19082546744423737, - "flos": 18259202379600.0, - "grad_norm": 5.9624677421289025, - "learning_rate": 3.734899967988316e-06, - "loss": 0.9416, - "num_input_tokens_seen": 33227715, - "step": 1587 - }, - { - "epoch": 0.19094571033487645, - "flos": 13990576829280.0, - "grad_norm": 5.352486946841934, - "learning_rate": 3.7345122790078026e-06, - "loss": 1.0678, - "num_input_tokens_seen": 33245000, - "step": 1588 - }, - { - "epoch": 0.19106595322551553, - "flos": 15353741009880.0, - "grad_norm": 9.395907269939022, - "learning_rate": 3.7341243269031556e-06, - "loss": 1.1526, - "num_input_tokens_seen": 33263710, - "step": 1589 - }, - { - "epoch": 0.19118619611615464, - "flos": 21304600188000.0, - "grad_norm": 3.2606703849443184, - "learning_rate": 3.7337361117332275e-06, - "loss": 1.0015, - "num_input_tokens_seen": 33285170, - "step": 1590 - }, - { - "epoch": 0.19130643900679373, - "flos": 12232439196240.0, - "grad_norm": 7.472618218587879, - "learning_rate": 3.7333476335569087e-06, - "loss": 0.9922, - "num_input_tokens_seen": 33302890, - "step": 1591 - }, - { - "epoch": 0.1914266818974328, - "flos": 17660622445680.0, - "grad_norm": 5.108629644780896, - "learning_rate": 3.7329588924331325e-06, - "loss": 0.8875, - "num_input_tokens_seen": 33323815, - "step": 1592 - }, - { - "epoch": 0.1915469247880719, - "flos": 13439226793920.0, - "grad_norm": 6.394397620014849, - "learning_rate": 3.732569888420871e-06, - "loss": 1.0425, - "num_input_tokens_seen": 33343070, - "step": 1593 - }, - { - "epoch": 0.191667167678711, - "flos": 14934539376960.0, - "grad_norm": 6.321444076506755, - "learning_rate": 3.732180621579134e-06, - "loss": 1.0537, - "num_input_tokens_seen": 33362005, - "step": 1594 - }, - { - "epoch": 0.1917874105693501, - "flos": 24424092969600.0, - "grad_norm": 4.8298536249257085, - "learning_rate": 3.7317910919669745e-06, - "loss": 1.0317, - "num_input_tokens_seen": 33382920, - "step": 1595 - }, - { - "epoch": 0.19190765345998917, - "flos": 16743954234480.0, - "grad_norm": 9.359060750280522, - "learning_rate": 3.7314012996434826e-06, - "loss": 0.9863, - "num_input_tokens_seen": 33401370, - "step": 1596 - }, - { - "epoch": 0.19202789635062828, - "flos": 14091966138960.0, - "grad_norm": 4.145103249730667, - "learning_rate": 3.7310112446677907e-06, - "loss": 1.0279, - "num_input_tokens_seen": 33419000, - "step": 1597 - }, - { - "epoch": 0.19214813924126736, - "flos": 14855415907920.0, - "grad_norm": 4.026705760723928, - "learning_rate": 3.7306209270990695e-06, - "loss": 0.9169, - "num_input_tokens_seen": 33436725, - "step": 1598 - }, - { - "epoch": 0.19226838213190645, - "flos": 18762770608320.0, - "grad_norm": 3.7483846364143747, - "learning_rate": 3.7302303469965292e-06, - "loss": 1.0891, - "num_input_tokens_seen": 33455985, - "step": 1599 - }, - { - "epoch": 0.19238862502254553, - "flos": 14803433467560.0, - "grad_norm": 4.697310493873541, - "learning_rate": 3.7298395044194206e-06, - "loss": 0.9314, - "num_input_tokens_seen": 33474515, - "step": 1600 - }, - { - "epoch": 0.19250886791318464, - "flos": 15432649848000.0, - "grad_norm": 5.080098211885989, - "learning_rate": 3.7294483994270356e-06, - "loss": 1.1552, - "num_input_tokens_seen": 33492560, - "step": 1601 - }, - { - "epoch": 0.19262911080382372, - "flos": 16612541709480.0, - "grad_norm": 5.803067336774296, - "learning_rate": 3.7290570320787033e-06, - "loss": 0.9908, - "num_input_tokens_seen": 33511860, - "step": 1602 - }, - { - "epoch": 0.1927493536944628, - "flos": 15588719815320.0, - "grad_norm": 8.315807630962599, - "learning_rate": 3.728665402433793e-06, - "loss": 0.9436, - "num_input_tokens_seen": 33530150, - "step": 1603 - }, - { - "epoch": 0.19286959658510192, - "flos": 11709609959760.0, - "grad_norm": 6.499550437052478, - "learning_rate": 3.7282735105517164e-06, - "loss": 1.0885, - "num_input_tokens_seen": 33547995, - "step": 1604 - }, - { - "epoch": 0.192989839475741, - "flos": 15354292917960.0, - "grad_norm": 9.512462272017824, - "learning_rate": 3.727881356491922e-06, - "loss": 0.9096, - "num_input_tokens_seen": 33566125, - "step": 1605 - }, - { - "epoch": 0.19311008236638008, - "flos": 13675800000480.0, - "grad_norm": 5.686310025324117, - "learning_rate": 3.7274889403139002e-06, - "loss": 0.9916, - "num_input_tokens_seen": 33583470, - "step": 1606 - }, - { - "epoch": 0.1932303252570192, - "flos": 20409645909360.0, - "grad_norm": 8.041765861360632, - "learning_rate": 3.727096262077179e-06, - "loss": 1.019, - "num_input_tokens_seen": 33602185, - "step": 1607 - }, - { - "epoch": 0.19335056814765827, - "flos": 13019933176320.0, - "grad_norm": 11.54650168015424, - "learning_rate": 3.7267033218413285e-06, - "loss": 1.0788, - "num_input_tokens_seen": 33619700, - "step": 1608 - }, - { - "epoch": 0.19347081103829736, - "flos": 9348814405320.0, - "grad_norm": 7.557394526960519, - "learning_rate": 3.726310119665957e-06, - "loss": 1.0434, - "num_input_tokens_seen": 33635755, - "step": 1609 - }, - { - "epoch": 0.19359105392893644, - "flos": 14407448183640.0, - "grad_norm": 5.670987819087454, - "learning_rate": 3.725916655610713e-06, - "loss": 1.0827, - "num_input_tokens_seen": 33654805, - "step": 1610 - }, - { - "epoch": 0.19371129681957555, - "flos": 14540301801960.0, - "grad_norm": 11.5569679016634, - "learning_rate": 3.725522929735284e-06, - "loss": 0.9723, - "num_input_tokens_seen": 33671460, - "step": 1611 - }, - { - "epoch": 0.19383153971021463, - "flos": 21699052393920.0, - "grad_norm": 6.801111063226062, - "learning_rate": 3.725128942099399e-06, - "loss": 0.9614, - "num_input_tokens_seen": 33691580, - "step": 1612 - }, - { - "epoch": 0.19395178260085372, - "flos": 17476215648840.0, - "grad_norm": 6.1835009049212655, - "learning_rate": 3.7247346927628245e-06, - "loss": 1.0341, - "num_input_tokens_seen": 33711235, - "step": 1613 - }, - { - "epoch": 0.19407202549149283, - "flos": 20624842460520.0, - "grad_norm": 4.547925690056445, - "learning_rate": 3.7243401817853694e-06, - "loss": 1.0119, - "num_input_tokens_seen": 33731645, - "step": 1614 - }, - { - "epoch": 0.1941922683821319, - "flos": 12758027973120.0, - "grad_norm": 5.576709728055712, - "learning_rate": 3.723945409226879e-06, - "loss": 0.9482, - "num_input_tokens_seen": 33749855, - "step": 1615 - }, - { - "epoch": 0.194312511272771, - "flos": 6804102639000.0, - "grad_norm": 7.3811759305462825, - "learning_rate": 3.723550375147241e-06, - "loss": 1.0406, - "num_input_tokens_seen": 33764350, - "step": 1616 - }, - { - "epoch": 0.19443275416341008, - "flos": 19234537251240.0, - "grad_norm": 6.881979988315159, - "learning_rate": 3.7231550796063816e-06, - "loss": 1.0251, - "num_input_tokens_seen": 33784080, - "step": 1617 - }, - { - "epoch": 0.1945529970540492, - "flos": 11206501654440.0, - "grad_norm": 4.851825111149379, - "learning_rate": 3.722759522664266e-06, - "loss": 0.8742, - "num_input_tokens_seen": 33801100, - "step": 1618 - }, - { - "epoch": 0.19467323994468827, - "flos": 13699445611320.0, - "grad_norm": 7.573351428229694, - "learning_rate": 3.7223637043809016e-06, - "loss": 1.0414, - "num_input_tokens_seen": 33819800, - "step": 1619 - }, - { - "epoch": 0.19479348283532735, - "flos": 17162511974640.0, - "grad_norm": 7.395762297239913, - "learning_rate": 3.7219676248163322e-06, - "loss": 1.0913, - "num_input_tokens_seen": 33836685, - "step": 1620 - }, - { - "epoch": 0.19491372572596646, - "flos": 18342955744200.0, - "grad_norm": 3.747210164227859, - "learning_rate": 3.721571284030643e-06, - "loss": 1.1449, - "num_input_tokens_seen": 33856215, - "step": 1621 - }, - { - "epoch": 0.19503396861660555, - "flos": 13937030649360.0, - "grad_norm": 13.366046511247083, - "learning_rate": 3.7211746820839587e-06, - "loss": 1.0079, - "num_input_tokens_seen": 33873030, - "step": 1622 - }, - { - "epoch": 0.19515421150724463, - "flos": 14934600700080.0, - "grad_norm": 5.960165671279747, - "learning_rate": 3.7207778190364437e-06, - "loss": 1.0478, - "num_input_tokens_seen": 33891175, - "step": 1623 - }, - { - "epoch": 0.1952744543978837, - "flos": 23506842188760.0, - "grad_norm": 5.469653943149648, - "learning_rate": 3.720380694948302e-06, - "loss": 0.9727, - "num_input_tokens_seen": 33913780, - "step": 1624 - }, - { - "epoch": 0.19539469728852282, - "flos": 45850196450640.0, - "grad_norm": 0.9843178384321114, - "learning_rate": 3.719983309879777e-06, - "loss": 0.9765, - "num_input_tokens_seen": 33973280, - "step": 1625 - }, - { - "epoch": 0.1955149401791619, - "flos": 9427968535920.0, - "grad_norm": 5.079383107372867, - "learning_rate": 3.719585663891151e-06, - "loss": 1.0079, - "num_input_tokens_seen": 33990535, - "step": 1626 - }, - { - "epoch": 0.195635183069801, - "flos": 13277913699840.0, - "grad_norm": 10.294745310393035, - "learning_rate": 3.719187757042747e-06, - "loss": 1.0091, - "num_input_tokens_seen": 34008075, - "step": 1627 - }, - { - "epoch": 0.1957554259604401, - "flos": 49639507066200.0, - "grad_norm": 0.9113811714571376, - "learning_rate": 3.7187895893949275e-06, - "loss": 0.8275, - "num_input_tokens_seen": 34074265, - "step": 1628 - }, - { - "epoch": 0.19587566885107918, - "flos": 14960607251040.0, - "grad_norm": 7.200934550774713, - "learning_rate": 3.7183911610080937e-06, - "loss": 0.9672, - "num_input_tokens_seen": 34090850, - "step": 1629 - }, - { - "epoch": 0.19599591174171827, - "flos": 15768650024400.0, - "grad_norm": 8.173505864689716, - "learning_rate": 3.7179924719426872e-06, - "loss": 0.9742, - "num_input_tokens_seen": 34108465, - "step": 1630 - }, - { - "epoch": 0.19611615463235738, - "flos": 16900300155840.0, - "grad_norm": 9.488579561232743, - "learning_rate": 3.7175935222591885e-06, - "loss": 0.9764, - "num_input_tokens_seen": 34127485, - "step": 1631 - }, - { - "epoch": 0.19623639752299646, - "flos": 20385417728880.0, - "grad_norm": 6.775556151588597, - "learning_rate": 3.717194312018118e-06, - "loss": 0.9869, - "num_input_tokens_seen": 34146190, - "step": 1632 - }, - { - "epoch": 0.19635664041363554, - "flos": 14934324746040.0, - "grad_norm": 6.468643261309889, - "learning_rate": 3.716794841280036e-06, - "loss": 0.9813, - "num_input_tokens_seen": 34164615, - "step": 1633 - }, - { - "epoch": 0.19647688330427462, - "flos": 13386999061080.0, - "grad_norm": 7.842161466073046, - "learning_rate": 3.7163951101055407e-06, - "loss": 0.9974, - "num_input_tokens_seen": 34182395, - "step": 1634 - }, - { - "epoch": 0.19659712619491373, - "flos": 17241022212480.0, - "grad_norm": 6.529736503203597, - "learning_rate": 3.715995118555273e-06, - "loss": 1.0113, - "num_input_tokens_seen": 34202090, - "step": 1635 - }, - { - "epoch": 0.19671736908555282, - "flos": 17582418823440.0, - "grad_norm": 38.871351064681946, - "learning_rate": 3.71559486668991e-06, - "loss": 1.067, - "num_input_tokens_seen": 34220670, - "step": 1636 - }, - { - "epoch": 0.1968376119761919, - "flos": 16953539720160.0, - "grad_norm": 4.11726301149326, - "learning_rate": 3.715194354570169e-06, - "loss": 0.9998, - "num_input_tokens_seen": 34240395, - "step": 1637 - }, - { - "epoch": 0.196957854866831, - "flos": 12836660857200.0, - "grad_norm": 7.947927999092579, - "learning_rate": 3.714793582256809e-06, - "loss": 1.0562, - "num_input_tokens_seen": 34257180, - "step": 1638 - }, - { - "epoch": 0.1970780977574701, - "flos": 15380238145800.0, - "grad_norm": 4.726733193586202, - "learning_rate": 3.7143925498106253e-06, - "loss": 1.0669, - "num_input_tokens_seen": 34275440, - "step": 1639 - }, - { - "epoch": 0.19719834064810918, - "flos": 14775311268960.0, - "grad_norm": 14.363211666101504, - "learning_rate": 3.7139912572924558e-06, - "loss": 1.0224, - "num_input_tokens_seen": 34294190, - "step": 1640 - }, - { - "epoch": 0.19731858353874826, - "flos": 16659863592720.0, - "grad_norm": 7.44382752603153, - "learning_rate": 3.7135897047631744e-06, - "loss": 1.0452, - "num_input_tokens_seen": 34311795, - "step": 1641 - }, - { - "epoch": 0.19743882642938737, - "flos": 17058393786120.0, - "grad_norm": 5.060310361187156, - "learning_rate": 3.713187892283698e-06, - "loss": 0.9712, - "num_input_tokens_seen": 34331125, - "step": 1642 - }, - { - "epoch": 0.19755906932002645, - "flos": 10600746915480.0, - "grad_norm": 17.468974485643916, - "learning_rate": 3.71278581991498e-06, - "loss": 1.0853, - "num_input_tokens_seen": 34346705, - "step": 1643 - }, - { - "epoch": 0.19767931221066554, - "flos": 13828037272800.0, - "grad_norm": 17.44432322682072, - "learning_rate": 3.712383487718015e-06, - "loss": 1.0208, - "num_input_tokens_seen": 34364665, - "step": 1644 - }, - { - "epoch": 0.19779955510130465, - "flos": 18315661407720.0, - "grad_norm": 8.793199113347352, - "learning_rate": 3.7119808957538365e-06, - "loss": 1.0973, - "num_input_tokens_seen": 34383380, - "step": 1645 - }, - { - "epoch": 0.19791979799194373, - "flos": 14751113750040.0, - "grad_norm": 8.392784707356848, - "learning_rate": 3.711578044083517e-06, - "loss": 1.0217, - "num_input_tokens_seen": 34399900, - "step": 1646 - }, - { - "epoch": 0.1980400408825828, - "flos": 18208262432280.0, - "grad_norm": 8.80388853684215, - "learning_rate": 3.7111749327681698e-06, - "loss": 0.9801, - "num_input_tokens_seen": 34419655, - "step": 1647 - }, - { - "epoch": 0.1981602837732219, - "flos": 16717487760120.0, - "grad_norm": 7.937848181109153, - "learning_rate": 3.7107715618689455e-06, - "loss": 1.0778, - "num_input_tokens_seen": 34438350, - "step": 1648 - }, - { - "epoch": 0.198280526663861, - "flos": 16481405138520.0, - "grad_norm": 3.6122221843190543, - "learning_rate": 3.710367931447035e-06, - "loss": 1.0548, - "num_input_tokens_seen": 34459850, - "step": 1649 - }, - { - "epoch": 0.1984007695545001, - "flos": 15406336681440.0, - "grad_norm": 8.257530705634233, - "learning_rate": 3.70996404156367e-06, - "loss": 1.0842, - "num_input_tokens_seen": 34479205, - "step": 1650 - }, - { - "epoch": 0.19852101244513917, - "flos": 25736930434080.0, - "grad_norm": 4.280967536017249, - "learning_rate": 3.7095598922801187e-06, - "loss": 0.9514, - "num_input_tokens_seen": 34501000, - "step": 1651 - }, - { - "epoch": 0.19864125533577828, - "flos": 16423413032400.0, - "grad_norm": 4.440481983864301, - "learning_rate": 3.7091554836576914e-06, - "loss": 0.9917, - "num_input_tokens_seen": 34517395, - "step": 1652 - }, - { - "epoch": 0.19876149822641737, - "flos": 17504215201200.0, - "grad_norm": 6.4919524626528, - "learning_rate": 3.708750815757736e-06, - "loss": 1.0521, - "num_input_tokens_seen": 34537885, - "step": 1653 - }, - { - "epoch": 0.19888174111705645, - "flos": 22957546477920.0, - "grad_norm": 12.205788061695618, - "learning_rate": 3.7083458886416407e-06, - "loss": 0.9415, - "num_input_tokens_seen": 34556800, - "step": 1654 - }, - { - "epoch": 0.19900198400769553, - "flos": 17504184539640.0, - "grad_norm": 5.708000711741734, - "learning_rate": 3.707940702370832e-06, - "loss": 1.1022, - "num_input_tokens_seen": 34577365, - "step": 1655 - }, - { - "epoch": 0.19912222689833464, - "flos": 48633106486200.0, - "grad_norm": 0.781155618688636, - "learning_rate": 3.707535257006777e-06, - "loss": 0.8258, - "num_input_tokens_seen": 34642710, - "step": 1656 - }, - { - "epoch": 0.19924246978897373, - "flos": 11132253373440.0, - "grad_norm": 19.24689250486296, - "learning_rate": 3.707129552610981e-06, - "loss": 1.1099, - "num_input_tokens_seen": 34661080, - "step": 1657 - }, - { - "epoch": 0.1993627126796128, - "flos": 12443281805880.0, - "grad_norm": 7.208891507038645, - "learning_rate": 3.70672358924499e-06, - "loss": 0.9622, - "num_input_tokens_seen": 34680040, - "step": 1658 - }, - { - "epoch": 0.19948295557025192, - "flos": 29172119891280.0, - "grad_norm": 5.1840141258165024, - "learning_rate": 3.706317366970386e-06, - "loss": 1.0089, - "num_input_tokens_seen": 34700760, - "step": 1659 - }, - { - "epoch": 0.199603198460891, - "flos": 17844753288480.0, - "grad_norm": 9.36666499598297, - "learning_rate": 3.705910885848795e-06, - "loss": 1.0739, - "num_input_tokens_seen": 34718855, - "step": 1660 - }, - { - "epoch": 0.19972344135153008, - "flos": 14252604678720.0, - "grad_norm": 10.006032649961215, - "learning_rate": 3.705504145941879e-06, - "loss": 1.0602, - "num_input_tokens_seen": 34736745, - "step": 1661 - }, - { - "epoch": 0.1998436842421692, - "flos": 16870307602080.0, - "grad_norm": 7.407602693667927, - "learning_rate": 3.7050971473113403e-06, - "loss": 1.0111, - "num_input_tokens_seen": 34756240, - "step": 1662 - }, - { - "epoch": 0.19996392713280828, - "flos": 25764746017080.0, - "grad_norm": 3.551839974905461, - "learning_rate": 3.7046898900189196e-06, - "loss": 1.0285, - "num_input_tokens_seen": 34780295, - "step": 1663 - }, - { - "epoch": 0.20008417002344736, - "flos": 16820563455600.0, - "grad_norm": 11.155751449600938, - "learning_rate": 3.704282374126398e-06, - "loss": 1.0663, - "num_input_tokens_seen": 34799695, - "step": 1664 - }, - { - "epoch": 0.20020441291408644, - "flos": 15537595898640.0, - "grad_norm": 3.4218995254543474, - "learning_rate": 3.7038745996955954e-06, - "loss": 1.1069, - "num_input_tokens_seen": 34818760, - "step": 1665 - }, - { - "epoch": 0.20032465580472555, - "flos": 16476560612040.0, - "grad_norm": 7.612520605601217, - "learning_rate": 3.703466566788371e-06, - "loss": 0.9391, - "num_input_tokens_seen": 34837610, - "step": 1666 - }, - { - "epoch": 0.20044489869536464, - "flos": 16975284314280.0, - "grad_norm": 20.410707603198897, - "learning_rate": 3.703058275466622e-06, - "loss": 0.9759, - "num_input_tokens_seen": 34856565, - "step": 1667 - }, - { - "epoch": 0.20056514158600372, - "flos": 15590191570200.0, - "grad_norm": 5.244818173757909, - "learning_rate": 3.7026497257922877e-06, - "loss": 0.9989, - "num_input_tokens_seen": 34876595, - "step": 1668 - }, - { - "epoch": 0.20068538447664283, - "flos": 16980098179200.0, - "grad_norm": 4.762883833188383, - "learning_rate": 3.7022409178273436e-06, - "loss": 1.0698, - "num_input_tokens_seen": 34897295, - "step": 1669 - }, - { - "epoch": 0.2008056273672819, - "flos": 13073111417520.0, - "grad_norm": 6.09659545194991, - "learning_rate": 3.7018318516338054e-06, - "loss": 1.0132, - "num_input_tokens_seen": 34916175, - "step": 1670 - }, - { - "epoch": 0.200925870257921, - "flos": 16822157856720.0, - "grad_norm": 4.436753331080015, - "learning_rate": 3.7014225272737284e-06, - "loss": 1.0279, - "num_input_tokens_seen": 34935120, - "step": 1671 - }, - { - "epoch": 0.20104611314856008, - "flos": 11473527338160.0, - "grad_norm": 5.372061169978359, - "learning_rate": 3.701012944809207e-06, - "loss": 0.9597, - "num_input_tokens_seen": 34951955, - "step": 1672 - }, - { - "epoch": 0.2011663560391992, - "flos": 15196383257040.0, - "grad_norm": 11.278586456252924, - "learning_rate": 3.700603104302374e-06, - "loss": 1.0182, - "num_input_tokens_seen": 34971485, - "step": 1673 - }, - { - "epoch": 0.20128659892983827, - "flos": 44547170685360.0, - "grad_norm": 0.8716946569170687, - "learning_rate": 3.7001930058154027e-06, - "loss": 0.815, - "num_input_tokens_seen": 35036165, - "step": 1674 - }, - { - "epoch": 0.20140684182047736, - "flos": 20256826067400.0, - "grad_norm": 6.477431806104513, - "learning_rate": 3.6997826494105037e-06, - "loss": 1.0213, - "num_input_tokens_seen": 35056330, - "step": 1675 - }, - { - "epoch": 0.20152708471111647, - "flos": 19994767556400.0, - "grad_norm": 5.0435386944342815, - "learning_rate": 3.6993720351499286e-06, - "loss": 0.9188, - "num_input_tokens_seen": 35077175, - "step": 1676 - }, - { - "epoch": 0.20164732760175555, - "flos": 16743218357040.0, - "grad_norm": 3.281441237916716, - "learning_rate": 3.6989611630959666e-06, - "loss": 0.9964, - "num_input_tokens_seen": 35095450, - "step": 1677 - }, - { - "epoch": 0.20176757049239463, - "flos": 50923394469960.0, - "grad_norm": 0.6931986877787784, - "learning_rate": 3.6985500333109474e-06, - "loss": 0.843, - "num_input_tokens_seen": 35163500, - "step": 1678 - }, - { - "epoch": 0.20188781338303372, - "flos": 15219446298240.0, - "grad_norm": 4.775205294205333, - "learning_rate": 3.6981386458572385e-06, - "loss": 0.9937, - "num_input_tokens_seen": 35181195, - "step": 1679 - }, - { - "epoch": 0.20200805627367283, - "flos": 8116541503200.0, - "grad_norm": 4.539655374226996, - "learning_rate": 3.6977270007972468e-06, - "loss": 0.9777, - "num_input_tokens_seen": 35198450, - "step": 1680 - }, - { - "epoch": 0.2021282991643119, - "flos": 20333098011360.0, - "grad_norm": 6.420959852961986, - "learning_rate": 3.6973150981934196e-06, - "loss": 0.9462, - "num_input_tokens_seen": 35219400, - "step": 1681 - }, - { - "epoch": 0.202248542054951, - "flos": 12698686758360.0, - "grad_norm": 9.321529235463334, - "learning_rate": 3.6969029381082415e-06, - "loss": 1.0591, - "num_input_tokens_seen": 35235115, - "step": 1682 - }, - { - "epoch": 0.2023687849455901, - "flos": 14094787002480.0, - "grad_norm": 3.7328866203169904, - "learning_rate": 3.696490520604237e-06, - "loss": 1.0307, - "num_input_tokens_seen": 35253525, - "step": 1683 - }, - { - "epoch": 0.20248902783622919, - "flos": 16035307769400.0, - "grad_norm": 8.888548060346738, - "learning_rate": 3.696077845743968e-06, - "loss": 1.0392, - "num_input_tokens_seen": 35272835, - "step": 1684 - }, - { - "epoch": 0.20260927072686827, - "flos": 16139395296360.0, - "grad_norm": 4.880119569908258, - "learning_rate": 3.69566491359004e-06, - "loss": 0.949, - "num_input_tokens_seen": 35289200, - "step": 1685 - }, - { - "epoch": 0.20272951361750738, - "flos": 36488504190120.0, - "grad_norm": 4.410211824619391, - "learning_rate": 3.695251724205092e-06, - "loss": 0.9316, - "num_input_tokens_seen": 35313280, - "step": 1686 - }, - { - "epoch": 0.20284975650814646, - "flos": 18919668437760.0, - "grad_norm": 2.6475684616559536, - "learning_rate": 3.6948382776518054e-06, - "loss": 1.0866, - "num_input_tokens_seen": 35333705, - "step": 1687 - }, - { - "epoch": 0.20296999939878554, - "flos": 11342022828480.0, - "grad_norm": 9.032370105171559, - "learning_rate": 3.6944245739929e-06, - "loss": 1.0253, - "num_input_tokens_seen": 35349585, - "step": 1688 - }, - { - "epoch": 0.20309024228942463, - "flos": 13618942372080.0, - "grad_norm": 7.3427154461609785, - "learning_rate": 3.6940106132911332e-06, - "loss": 0.9522, - "num_input_tokens_seen": 35366490, - "step": 1689 - }, - { - "epoch": 0.20321048518006374, - "flos": 16219683904680.0, - "grad_norm": 5.582526855677485, - "learning_rate": 3.6935963956093037e-06, - "loss": 1.1057, - "num_input_tokens_seen": 35386295, - "step": 1690 - }, - { - "epoch": 0.20333072807070282, - "flos": 13542394474080.0, - "grad_norm": 4.175655409484227, - "learning_rate": 3.6931819210102474e-06, - "loss": 0.9284, - "num_input_tokens_seen": 35405410, - "step": 1691 - }, - { - "epoch": 0.2034509709613419, - "flos": 12884044063560.0, - "grad_norm": 4.362468537924918, - "learning_rate": 3.6927671895568402e-06, - "loss": 1.0826, - "num_input_tokens_seen": 35424190, - "step": 1692 - }, - { - "epoch": 0.20357121385198101, - "flos": 16292675061720.0, - "grad_norm": 3.7932769933001715, - "learning_rate": 3.692352201311996e-06, - "loss": 1.0996, - "num_input_tokens_seen": 35442760, - "step": 1693 - }, - { - "epoch": 0.2036914567426201, - "flos": 14854710692040.0, - "grad_norm": 4.114592630239035, - "learning_rate": 3.6919369563386687e-06, - "loss": 0.9897, - "num_input_tokens_seen": 35462280, - "step": 1694 - }, - { - "epoch": 0.20381169963325918, - "flos": 10971277556520.0, - "grad_norm": 4.5844399705689245, - "learning_rate": 3.69152145469985e-06, - "loss": 1.0257, - "num_input_tokens_seen": 35479045, - "step": 1695 - }, - { - "epoch": 0.20393194252389826, - "flos": 20539341387000.0, - "grad_norm": 3.5749590998090617, - "learning_rate": 3.691105696458572e-06, - "loss": 1.0491, - "num_input_tokens_seen": 35496060, - "step": 1696 - }, - { - "epoch": 0.20405218541453737, - "flos": 15980136526800.0, - "grad_norm": 6.741534540730855, - "learning_rate": 3.690689681677904e-06, - "loss": 0.9048, - "num_input_tokens_seen": 35514250, - "step": 1697 - }, - { - "epoch": 0.20417242830517646, - "flos": 18054614728200.0, - "grad_norm": 3.818982475933774, - "learning_rate": 3.690273410420956e-06, - "loss": 1.1107, - "num_input_tokens_seen": 35533735, - "step": 1698 - }, - { - "epoch": 0.20429267119581554, - "flos": 10449950736480.0, - "grad_norm": 3.8866544631932185, - "learning_rate": 3.689856882750875e-06, - "loss": 0.996, - "num_input_tokens_seen": 35548655, - "step": 1699 - }, - { - "epoch": 0.20441291408645465, - "flos": 12597512079600.0, - "grad_norm": 5.945107495336272, - "learning_rate": 3.6894400987308486e-06, - "loss": 1.0138, - "num_input_tokens_seen": 35565895, - "step": 1700 - }, - { - "epoch": 0.20453315697709373, - "flos": 11630547813840.0, - "grad_norm": 4.152000377162556, - "learning_rate": 3.6890230584241024e-06, - "loss": 1.0724, - "num_input_tokens_seen": 35582545, - "step": 1701 - }, - { - "epoch": 0.20465339986773282, - "flos": 47768911300320.0, - "grad_norm": 0.9189264698914207, - "learning_rate": 3.6886057618939016e-06, - "loss": 0.9307, - "num_input_tokens_seen": 35645085, - "step": 1702 - }, - { - "epoch": 0.2047736427583719, - "flos": 29984332636800.0, - "grad_norm": 4.697842796855779, - "learning_rate": 3.6881882092035492e-06, - "loss": 0.917, - "num_input_tokens_seen": 35666190, - "step": 1703 - }, - { - "epoch": 0.204893885649011, - "flos": 50086039245240.0, - "grad_norm": 1.0201004018174917, - "learning_rate": 3.6877704004163873e-06, - "loss": 0.9148, - "num_input_tokens_seen": 35726315, - "step": 1704 - }, - { - "epoch": 0.2050141285396501, - "flos": 15773218596840.0, - "grad_norm": 7.319496915445408, - "learning_rate": 3.6873523355957984e-06, - "loss": 1.0138, - "num_input_tokens_seen": 35745035, - "step": 1705 - }, - { - "epoch": 0.20513437143028918, - "flos": 33087073110480.0, - "grad_norm": 1.0552317027711038, - "learning_rate": 3.686934014805201e-06, - "loss": 0.9545, - "num_input_tokens_seen": 35795385, - "step": 1706 - }, - { - "epoch": 0.20525461432092829, - "flos": 15560199016440.0, - "grad_norm": 8.354117706485143, - "learning_rate": 3.6865154381080552e-06, - "loss": 1.0471, - "num_input_tokens_seen": 35815790, - "step": 1707 - }, - { - "epoch": 0.20537485721156737, - "flos": 15064694778000.0, - "grad_norm": 12.508495038177742, - "learning_rate": 3.6860966055678585e-06, - "loss": 1.0606, - "num_input_tokens_seen": 35831865, - "step": 1708 - }, - { - "epoch": 0.20549510010220645, - "flos": 14329244561400.0, - "grad_norm": 4.427530399263382, - "learning_rate": 3.685677517248147e-06, - "loss": 1.0974, - "num_input_tokens_seen": 35850475, - "step": 1709 - }, - { - "epoch": 0.20561534299284553, - "flos": 12047756445360.0, - "grad_norm": 3.404510473837213, - "learning_rate": 3.6852581732124967e-06, - "loss": 1.0416, - "num_input_tokens_seen": 35867540, - "step": 1710 - }, - { - "epoch": 0.20573558588348465, - "flos": 16269060112440.0, - "grad_norm": 4.293658464058529, - "learning_rate": 3.6848385735245213e-06, - "loss": 0.9923, - "num_input_tokens_seen": 35886350, - "step": 1711 - }, - { - "epoch": 0.20585582877412373, - "flos": 17528841981960.0, - "grad_norm": 6.205315114395506, - "learning_rate": 3.6844187182478734e-06, - "loss": 1.0948, - "num_input_tokens_seen": 35906925, - "step": 1712 - }, - { - "epoch": 0.2059760716647628, - "flos": 17215598231160.0, - "grad_norm": 5.4521228120759515, - "learning_rate": 3.683998607446246e-06, - "loss": 0.9859, - "num_input_tokens_seen": 35925295, - "step": 1713 - }, - { - "epoch": 0.20609631455540192, - "flos": 14357029482840.0, - "grad_norm": 5.774418273955212, - "learning_rate": 3.6835782411833686e-06, - "loss": 0.9732, - "num_input_tokens_seen": 35944535, - "step": 1714 - }, - { - "epoch": 0.206216557446041, - "flos": 14095062956520.0, - "grad_norm": 3.0201390137161583, - "learning_rate": 3.68315761952301e-06, - "loss": 0.9808, - "num_input_tokens_seen": 35961485, - "step": 1715 - }, - { - "epoch": 0.2063368003366801, - "flos": 17135922854040.0, - "grad_norm": 4.084236330627666, - "learning_rate": 3.6827367425289797e-06, - "loss": 1.0652, - "num_input_tokens_seen": 35980980, - "step": 1716 - }, - { - "epoch": 0.2064570432273192, - "flos": 14436704859960.0, - "grad_norm": 4.2494840869796375, - "learning_rate": 3.6823156102651225e-06, - "loss": 0.9556, - "num_input_tokens_seen": 35998855, - "step": 1717 - }, - { - "epoch": 0.20657728611795828, - "flos": 14565909752640.0, - "grad_norm": 4.016866173017059, - "learning_rate": 3.6818942227953257e-06, - "loss": 0.9429, - "num_input_tokens_seen": 36019120, - "step": 1718 - }, - { - "epoch": 0.20669752900859736, - "flos": 15485306842680.0, - "grad_norm": 5.211701161305297, - "learning_rate": 3.681472580183512e-06, - "loss": 0.9243, - "num_input_tokens_seen": 36037490, - "step": 1719 - }, - { - "epoch": 0.20681777189923645, - "flos": 10686431958360.0, - "grad_norm": 5.3833344182976886, - "learning_rate": 3.6810506824936455e-06, - "loss": 1.0995, - "num_input_tokens_seen": 36055290, - "step": 1720 - }, - { - "epoch": 0.20693801478987556, - "flos": 40413152010360.0, - "grad_norm": 1.1916332591426961, - "learning_rate": 3.680628529789726e-06, - "loss": 0.8934, - "num_input_tokens_seen": 36107420, - "step": 1721 - }, - { - "epoch": 0.20705825768051464, - "flos": 15351931977840.0, - "grad_norm": 3.9023453385022417, - "learning_rate": 3.680206122135796e-06, - "loss": 1.0852, - "num_input_tokens_seen": 36127745, - "step": 1722 - }, - { - "epoch": 0.20717850057115372, - "flos": 18395643400440.0, - "grad_norm": 3.126430031688218, - "learning_rate": 3.6797834595959323e-06, - "loss": 1.0131, - "num_input_tokens_seen": 36147365, - "step": 1723 - }, - { - "epoch": 0.20729874346179283, - "flos": 20754047353200.0, - "grad_norm": 5.695730437340214, - "learning_rate": 3.679360542234254e-06, - "loss": 1.0073, - "num_input_tokens_seen": 36166430, - "step": 1724 - }, - { - "epoch": 0.20741898635243192, - "flos": 20677100854920.0, - "grad_norm": 3.3868981196192873, - "learning_rate": 3.678937370114916e-06, - "loss": 0.957, - "num_input_tokens_seen": 36185955, - "step": 1725 - }, - { - "epoch": 0.207539229243071, - "flos": 11000748863760.0, - "grad_norm": 6.32469102911514, - "learning_rate": 3.678513943302114e-06, - "loss": 1.0292, - "num_input_tokens_seen": 36202450, - "step": 1726 - }, - { - "epoch": 0.20765947213371008, - "flos": 14566522983840.0, - "grad_norm": 3.817536825965473, - "learning_rate": 3.678090261860082e-06, - "loss": 1.0829, - "num_input_tokens_seen": 36221900, - "step": 1727 - }, - { - "epoch": 0.2077797150243492, - "flos": 13728181041120.0, - "grad_norm": 7.890416873415453, - "learning_rate": 3.6776663258530906e-06, - "loss": 1.0114, - "num_input_tokens_seen": 36240270, - "step": 1728 - }, - { - "epoch": 0.20789995791498828, - "flos": 15506560851840.0, - "grad_norm": 5.993093330123787, - "learning_rate": 3.6772421353454516e-06, - "loss": 0.9528, - "num_input_tokens_seen": 36258585, - "step": 1729 - }, - { - "epoch": 0.20802020080562736, - "flos": 16455153295080.0, - "grad_norm": 5.934987389825088, - "learning_rate": 3.6768176904015153e-06, - "loss": 1.1054, - "num_input_tokens_seen": 36278110, - "step": 1730 - }, - { - "epoch": 0.20814044369626647, - "flos": 16399982052480.0, - "grad_norm": 8.335306874869227, - "learning_rate": 3.6763929910856674e-06, - "loss": 0.8365, - "num_input_tokens_seen": 36296280, - "step": 1731 - }, - { - "epoch": 0.20826068658690555, - "flos": 13910196236280.0, - "grad_norm": 6.217307071921291, - "learning_rate": 3.6759680374623365e-06, - "loss": 1.0054, - "num_input_tokens_seen": 36313915, - "step": 1732 - }, - { - "epoch": 0.20838092947754464, - "flos": 18054308112600.0, - "grad_norm": 9.206238613393543, - "learning_rate": 3.675542829595986e-06, - "loss": 0.9704, - "num_input_tokens_seen": 36333300, - "step": 1733 - }, - { - "epoch": 0.20850117236818372, - "flos": 17111326734840.0, - "grad_norm": 2.3831897874654957, - "learning_rate": 3.6751173675511213e-06, - "loss": 1.0216, - "num_input_tokens_seen": 36355065, - "step": 1734 - }, - { - "epoch": 0.20862141525882283, - "flos": 14247208244160.0, - "grad_norm": 5.5877752522168525, - "learning_rate": 3.674691651392283e-06, - "loss": 1.1132, - "num_input_tokens_seen": 36372455, - "step": 1735 - }, - { - "epoch": 0.2087416581494619, - "flos": 27857320086960.0, - "grad_norm": 4.663805674985123, - "learning_rate": 3.674265681184053e-06, - "loss": 0.9917, - "num_input_tokens_seen": 36395435, - "step": 1736 - }, - { - "epoch": 0.208861901040101, - "flos": 18577229333760.0, - "grad_norm": 8.647223989642617, - "learning_rate": 3.6738394569910504e-06, - "loss": 1.1022, - "num_input_tokens_seen": 36415695, - "step": 1737 - }, - { - "epoch": 0.2089821439307401, - "flos": 20227600052640.0, - "grad_norm": 6.5381783419767725, - "learning_rate": 3.6734129788779333e-06, - "loss": 1.0556, - "num_input_tokens_seen": 36434590, - "step": 1738 - }, - { - "epoch": 0.2091023868213792, - "flos": 14960913866640.0, - "grad_norm": 6.476978026382986, - "learning_rate": 3.6729862469093976e-06, - "loss": 1.1369, - "num_input_tokens_seen": 36453405, - "step": 1739 - }, - { - "epoch": 0.20922262971201827, - "flos": 15956828193120.0, - "grad_norm": 9.34429378904664, - "learning_rate": 3.6725592611501782e-06, - "loss": 1.0414, - "num_input_tokens_seen": 36471800, - "step": 1740 - }, - { - "epoch": 0.20934287260265738, - "flos": 19834956878760.0, - "grad_norm": 3.8621480145609324, - "learning_rate": 3.6721320216650496e-06, - "loss": 0.9972, - "num_input_tokens_seen": 36492135, - "step": 1741 - }, - { - "epoch": 0.20946311549329646, - "flos": 11630241198240.0, - "grad_norm": 8.928899294691709, - "learning_rate": 3.6717045285188215e-06, - "loss": 1.0766, - "num_input_tokens_seen": 36509550, - "step": 1742 - }, - { - "epoch": 0.20958335838393555, - "flos": 15983693267760.0, - "grad_norm": 11.13427724739138, - "learning_rate": 3.671276781776346e-06, - "loss": 1.0947, - "num_input_tokens_seen": 36527925, - "step": 1743 - }, - { - "epoch": 0.20970360127457463, - "flos": 17946571860000.0, - "grad_norm": 4.1469261735462295, - "learning_rate": 3.6708487815025128e-06, - "loss": 0.8994, - "num_input_tokens_seen": 36548225, - "step": 1744 - }, - { - "epoch": 0.20982384416521374, - "flos": 13099209953160.0, - "grad_norm": 3.1657032421267655, - "learning_rate": 3.6704205277622463e-06, - "loss": 0.9706, - "num_input_tokens_seen": 36566385, - "step": 1745 - }, - { - "epoch": 0.20994408705585282, - "flos": 18053878850760.0, - "grad_norm": 2.964013587817554, - "learning_rate": 3.6699920206205146e-06, - "loss": 1.0304, - "num_input_tokens_seen": 36586845, - "step": 1746 - }, - { - "epoch": 0.2100643299464919, - "flos": 15140997383520.0, - "grad_norm": 4.325601992304099, - "learning_rate": 3.669563260142321e-06, - "loss": 1.0613, - "num_input_tokens_seen": 36605455, - "step": 1747 - }, - { - "epoch": 0.21018457283713102, - "flos": 13728150379560.0, - "grad_norm": 4.557778410576472, - "learning_rate": 3.6691342463927083e-06, - "loss": 1.0656, - "num_input_tokens_seen": 36624170, - "step": 1748 - }, - { - "epoch": 0.2103048157277701, - "flos": 20178806414520.0, - "grad_norm": 11.704332276681518, - "learning_rate": 3.668704979436758e-06, - "loss": 1.0523, - "num_input_tokens_seen": 36643985, - "step": 1749 - }, - { - "epoch": 0.21042505861840918, - "flos": 12364771568040.0, - "grad_norm": 5.294368904982989, - "learning_rate": 3.668275459339588e-06, - "loss": 1.0239, - "num_input_tokens_seen": 36662185, - "step": 1750 - }, - { - "epoch": 0.21054530150904827, - "flos": 10030841088240.0, - "grad_norm": 2.9192749531581006, - "learning_rate": 3.667845686166358e-06, - "loss": 1.0344, - "num_input_tokens_seen": 36678830, - "step": 1751 - }, - { - "epoch": 0.21066554439968738, - "flos": 13198207661160.0, - "grad_norm": 3.3552305694351716, - "learning_rate": 3.6674156599822634e-06, - "loss": 1.0919, - "num_input_tokens_seen": 36694345, - "step": 1752 - }, - { - "epoch": 0.21078578729032646, - "flos": 16821605948640.0, - "grad_norm": 3.6412825747760467, - "learning_rate": 3.666985380852539e-06, - "loss": 1.0411, - "num_input_tokens_seen": 36713070, - "step": 1753 - }, - { - "epoch": 0.21090603018096554, - "flos": 20908522919400.0, - "grad_norm": 4.533642157152067, - "learning_rate": 3.6665548488424576e-06, - "loss": 0.9835, - "num_input_tokens_seen": 36731550, - "step": 1754 - }, - { - "epoch": 0.21102627307160465, - "flos": 16536331088640.0, - "grad_norm": 3.7228316018594265, - "learning_rate": 3.6661240640173307e-06, - "loss": 1.1111, - "num_input_tokens_seen": 36752740, - "step": 1755 - }, - { - "epoch": 0.21114651596224374, - "flos": 39087124527000.0, - "grad_norm": 0.9392556599580242, - "learning_rate": 3.6656930264425085e-06, - "loss": 0.8676, - "num_input_tokens_seen": 36816505, - "step": 1756 - }, - { - "epoch": 0.21126675885288282, - "flos": 15301267984560.0, - "grad_norm": 3.4510296984986137, - "learning_rate": 3.665261736183378e-06, - "loss": 0.9877, - "num_input_tokens_seen": 36836260, - "step": 1757 - }, - { - "epoch": 0.2113870017435219, - "flos": 7696512008160.0, - "grad_norm": 12.452219471860856, - "learning_rate": 3.664830193305366e-06, - "loss": 1.1039, - "num_input_tokens_seen": 36853755, - "step": 1758 - }, - { - "epoch": 0.211507244634161, - "flos": 11787231012360.0, - "grad_norm": 8.414859036258282, - "learning_rate": 3.6643983978739373e-06, - "loss": 0.9919, - "num_input_tokens_seen": 36870090, - "step": 1759 - }, - { - "epoch": 0.2116274875248001, - "flos": 14877068517360.0, - "grad_norm": 3.297094545692814, - "learning_rate": 3.663966349954596e-06, - "loss": 1.0429, - "num_input_tokens_seen": 36889990, - "step": 1760 - }, - { - "epoch": 0.21174773041543918, - "flos": 48833708134800.0, - "grad_norm": 0.7874171788699807, - "learning_rate": 3.6635340496128816e-06, - "loss": 0.8464, - "num_input_tokens_seen": 36946640, - "step": 1761 - }, - { - "epoch": 0.2118679733060783, - "flos": 14672511527520.0, - "grad_norm": 2.5620342854894025, - "learning_rate": 3.6631014969143747e-06, - "loss": 1.1525, - "num_input_tokens_seen": 36966050, - "step": 1762 - }, - { - "epoch": 0.21198821619671737, - "flos": 16508975429040.0, - "grad_norm": 2.6560918424307634, - "learning_rate": 3.662668691924693e-06, - "loss": 1.1104, - "num_input_tokens_seen": 36986820, - "step": 1763 - }, - { - "epoch": 0.21210845908735645, - "flos": 17425122393720.0, - "grad_norm": 3.2505428154341702, - "learning_rate": 3.6622356347094927e-06, - "loss": 0.9382, - "num_input_tokens_seen": 37008105, - "step": 1764 - }, - { - "epoch": 0.21222870197799554, - "flos": 19287500861520.0, - "grad_norm": 3.3881677961716488, - "learning_rate": 3.6618023253344684e-06, - "loss": 1.022, - "num_input_tokens_seen": 37026685, - "step": 1765 - }, - { - "epoch": 0.21234894486863465, - "flos": 11945508612000.0, - "grad_norm": 2.817876974102013, - "learning_rate": 3.6613687638653527e-06, - "loss": 1.0768, - "num_input_tokens_seen": 37044575, - "step": 1766 - }, - { - "epoch": 0.21246918775927373, - "flos": 16689212253720.0, - "grad_norm": 2.666561353113904, - "learning_rate": 3.660934950367916e-06, - "loss": 1.0132, - "num_input_tokens_seen": 37063540, - "step": 1767 - }, - { - "epoch": 0.21258943064991281, - "flos": 15904201860000.0, - "grad_norm": 3.379533901242661, - "learning_rate": 3.660500884907968e-06, - "loss": 1.0649, - "num_input_tokens_seen": 37084000, - "step": 1768 - }, - { - "epoch": 0.21270967354055192, - "flos": 42540348529560.0, - "grad_norm": 0.887078266950563, - "learning_rate": 3.660066567551356e-06, - "loss": 0.8523, - "num_input_tokens_seen": 37143865, - "step": 1769 - }, - { - "epoch": 0.212829916431191, - "flos": 15301881215760.0, - "grad_norm": 4.861740468776813, - "learning_rate": 3.6596319983639657e-06, - "loss": 1.0596, - "num_input_tokens_seen": 37162165, - "step": 1770 - }, - { - "epoch": 0.2129501593218301, - "flos": 20651615550480.0, - "grad_norm": 3.1214435939919905, - "learning_rate": 3.6591971774117214e-06, - "loss": 1.0939, - "num_input_tokens_seen": 37184860, - "step": 1771 - }, - { - "epoch": 0.2130704022124692, - "flos": 13334740666680.0, - "grad_norm": 3.430421094708917, - "learning_rate": 3.6587621047605833e-06, - "loss": 1.0369, - "num_input_tokens_seen": 37201750, - "step": 1772 - }, - { - "epoch": 0.21319064510310828, - "flos": 9821255602560.0, - "grad_norm": 3.3716539821364155, - "learning_rate": 3.6583267804765542e-06, - "loss": 1.1091, - "num_input_tokens_seen": 37215805, - "step": 1773 - }, - { - "epoch": 0.21331088799374737, - "flos": 14881851720720.0, - "grad_norm": 4.526833953987341, - "learning_rate": 3.6578912046256702e-06, - "loss": 1.0877, - "num_input_tokens_seen": 37234045, - "step": 1774 - }, - { - "epoch": 0.21343113088438645, - "flos": 13203696080400.0, - "grad_norm": 4.175323546887284, - "learning_rate": 3.6574553772740083e-06, - "loss": 0.9982, - "num_input_tokens_seen": 37251695, - "step": 1775 - }, - { - "epoch": 0.21355137377502556, - "flos": 48272939452440.0, - "grad_norm": 0.9083519926400292, - "learning_rate": 3.657019298487684e-06, - "loss": 0.8937, - "num_input_tokens_seen": 37316425, - "step": 1776 - }, - { - "epoch": 0.21367161666566464, - "flos": 24636039395400.0, - "grad_norm": 4.561498035514531, - "learning_rate": 3.6565829683328495e-06, - "loss": 1.0458, - "num_input_tokens_seen": 37338770, - "step": 1777 - }, - { - "epoch": 0.21379185955630373, - "flos": 13465785252960.0, - "grad_norm": 3.494221549379063, - "learning_rate": 3.6561463868756965e-06, - "loss": 1.0898, - "num_input_tokens_seen": 37357190, - "step": 1778 - }, - { - "epoch": 0.21391210244694284, - "flos": 20098364498400.0, - "grad_norm": 3.014552899259537, - "learning_rate": 3.655709554182452e-06, - "loss": 1.0186, - "num_input_tokens_seen": 37377250, - "step": 1779 - }, - { - "epoch": 0.21403234533758192, - "flos": 12362870551320.0, - "grad_norm": 3.3864161654314735, - "learning_rate": 3.6552724703193855e-06, - "loss": 1.0912, - "num_input_tokens_seen": 37394160, - "step": 1780 - }, - { - "epoch": 0.214152588228221, - "flos": 36933522856560.0, - "grad_norm": 0.7878075619716448, - "learning_rate": 3.654835135352801e-06, - "loss": 0.814, - "num_input_tokens_seen": 37448690, - "step": 1781 - }, - { - "epoch": 0.21427283111886009, - "flos": 13829478366120.0, - "grad_norm": 2.5837428065819488, - "learning_rate": 3.654397549349043e-06, - "loss": 1.1151, - "num_input_tokens_seen": 37465785, - "step": 1782 - }, - { - "epoch": 0.2143930740094992, - "flos": 14253187248360.0, - "grad_norm": 4.282966214093272, - "learning_rate": 3.653959712374491e-06, - "loss": 0.9726, - "num_input_tokens_seen": 37483610, - "step": 1783 - }, - { - "epoch": 0.21451331690013828, - "flos": 15484724273040.0, - "grad_norm": 3.0502549424479626, - "learning_rate": 3.6535216244955663e-06, - "loss": 1.0584, - "num_input_tokens_seen": 37503225, - "step": 1784 - }, - { - "epoch": 0.21463355979077736, - "flos": 23429957013600.0, - "grad_norm": 3.172004584310329, - "learning_rate": 3.653083285778726e-06, - "loss": 0.9327, - "num_input_tokens_seen": 37524315, - "step": 1785 - }, - { - "epoch": 0.21475380268141647, - "flos": 15301697246400.0, - "grad_norm": 5.9121184082768785, - "learning_rate": 3.6526446962904653e-06, - "loss": 1.0357, - "num_input_tokens_seen": 37542750, - "step": 1786 - }, - { - "epoch": 0.21487404557205556, - "flos": 22930405449240.0, - "grad_norm": 3.3749275199736783, - "learning_rate": 3.652205856097318e-06, - "loss": 0.9716, - "num_input_tokens_seen": 37565655, - "step": 1787 - }, - { - "epoch": 0.21499428846269464, - "flos": 8925841400520.0, - "grad_norm": 17.095951140844907, - "learning_rate": 3.651766765265856e-06, - "loss": 1.0135, - "num_input_tokens_seen": 37582385, - "step": 1788 - }, - { - "epoch": 0.21511453135333372, - "flos": 16686330067080.0, - "grad_norm": 4.997898041453394, - "learning_rate": 3.65132742386269e-06, - "loss": 1.0391, - "num_input_tokens_seen": 37597325, - "step": 1789 - }, - { - "epoch": 0.21523477424397283, - "flos": 19182370841520.0, - "grad_norm": 3.1973783721339335, - "learning_rate": 3.6508878319544656e-06, - "loss": 1.0754, - "num_input_tokens_seen": 37617260, - "step": 1790 - }, - { - "epoch": 0.21535501713461191, - "flos": 13413158919840.0, - "grad_norm": 8.286942968736412, - "learning_rate": 3.65044798960787e-06, - "loss": 1.0393, - "num_input_tokens_seen": 37635320, - "step": 1791 - }, - { - "epoch": 0.215475260025251, - "flos": 12679425750600.0, - "grad_norm": 3.659044550855658, - "learning_rate": 3.650007896889627e-06, - "loss": 1.0129, - "num_input_tokens_seen": 37653620, - "step": 1792 - }, - { - "epoch": 0.2155955029158901, - "flos": 11787629612640.0, - "grad_norm": 4.603874623053913, - "learning_rate": 3.6495675538664974e-06, - "loss": 1.0345, - "num_input_tokens_seen": 37672355, - "step": 1793 - }, - { - "epoch": 0.2157157458065292, - "flos": 16794372935280.0, - "grad_norm": 5.017120423606623, - "learning_rate": 3.649126960605282e-06, - "loss": 1.0564, - "num_input_tokens_seen": 37693060, - "step": 1794 - }, - { - "epoch": 0.21583598869716827, - "flos": 15720806894640.0, - "grad_norm": 6.040606421034058, - "learning_rate": 3.6486861171728174e-06, - "loss": 1.0494, - "num_input_tokens_seen": 37711175, - "step": 1795 - }, - { - "epoch": 0.21595623158780738, - "flos": 16665352011960.0, - "grad_norm": 2.3741451531941045, - "learning_rate": 3.6482450236359803e-06, - "loss": 1.0216, - "num_input_tokens_seen": 37732750, - "step": 1796 - }, - { - "epoch": 0.21607647447844647, - "flos": 19155659074680.0, - "grad_norm": 7.131770882067555, - "learning_rate": 3.647803680061683e-06, - "loss": 0.9975, - "num_input_tokens_seen": 37752885, - "step": 1797 - }, - { - "epoch": 0.21619671736908555, - "flos": 10235214108720.0, - "grad_norm": 5.756010148708152, - "learning_rate": 3.6473620865168776e-06, - "loss": 0.9661, - "num_input_tokens_seen": 37769475, - "step": 1798 - }, - { - "epoch": 0.21631696025972463, - "flos": 12705217670640.0, - "grad_norm": 6.8455259204953585, - "learning_rate": 3.646920243068554e-06, - "loss": 1.0478, - "num_input_tokens_seen": 37787090, - "step": 1799 - }, - { - "epoch": 0.21643720315036374, - "flos": 17399177165880.0, - "grad_norm": 4.468027763423876, - "learning_rate": 3.6464781497837384e-06, - "loss": 0.9676, - "num_input_tokens_seen": 37808785, - "step": 1800 - }, - { - "epoch": 0.21655744604100283, - "flos": 20282372694960.0, - "grad_norm": 4.846403318139165, - "learning_rate": 3.6460358067294965e-06, - "loss": 0.9636, - "num_input_tokens_seen": 37829735, - "step": 1801 - }, - { - "epoch": 0.2166776889316419, - "flos": 14301520963080.0, - "grad_norm": 3.9335996898617642, - "learning_rate": 3.645593213972932e-06, - "loss": 1.0047, - "num_input_tokens_seen": 37848360, - "step": 1802 - }, - { - "epoch": 0.21679793182228102, - "flos": 10737586536600.0, - "grad_norm": 5.207979337742099, - "learning_rate": 3.6451503715811852e-06, - "loss": 1.0165, - "num_input_tokens_seen": 37866390, - "step": 1803 - }, - { - "epoch": 0.2169181747129201, - "flos": 12312451850520.0, - "grad_norm": 4.597396337637984, - "learning_rate": 3.6447072796214345e-06, - "loss": 1.0403, - "num_input_tokens_seen": 37884675, - "step": 1804 - }, - { - "epoch": 0.21703841760355919, - "flos": 42054104082240.0, - "grad_norm": 0.9442429915943991, - "learning_rate": 3.644263938160898e-06, - "loss": 0.9007, - "num_input_tokens_seen": 37940360, - "step": 1805 - }, - { - "epoch": 0.21715866049419827, - "flos": 15930147087840.0, - "grad_norm": 7.828634462510522, - "learning_rate": 3.6438203472668293e-06, - "loss": 0.954, - "num_input_tokens_seen": 37959725, - "step": 1806 - }, - { - "epoch": 0.21727890338483738, - "flos": 12206892568680.0, - "grad_norm": 6.241607218901941, - "learning_rate": 3.6433765070065206e-06, - "loss": 1.0559, - "num_input_tokens_seen": 37977235, - "step": 1807 - }, - { - "epoch": 0.21739914627547646, - "flos": 9472653525000.0, - "grad_norm": 10.844430687727133, - "learning_rate": 3.6429324174473025e-06, - "loss": 1.1165, - "num_input_tokens_seen": 37990495, - "step": 1808 - }, - { - "epoch": 0.21751938916611555, - "flos": 14881698412920.0, - "grad_norm": 3.605757448617319, - "learning_rate": 3.6424880786565425e-06, - "loss": 1.0729, - "num_input_tokens_seen": 38006360, - "step": 1809 - }, - { - "epoch": 0.21763963205675466, - "flos": 19654045499760.0, - "grad_norm": 8.242651238435153, - "learning_rate": 3.6420434907016482e-06, - "loss": 1.0141, - "num_input_tokens_seen": 38025770, - "step": 1810 - }, - { - "epoch": 0.21775987494739374, - "flos": 15219660929160.0, - "grad_norm": 9.096713183060313, - "learning_rate": 3.6415986536500606e-06, - "loss": 1.0491, - "num_input_tokens_seen": 38043820, - "step": 1811 - }, - { - "epoch": 0.21788011783803282, - "flos": 12994110594720.0, - "grad_norm": 4.025907539742398, - "learning_rate": 3.641153567569263e-06, - "loss": 1.0473, - "num_input_tokens_seen": 38061855, - "step": 1812 - }, - { - "epoch": 0.2180003607286719, - "flos": 21567670530480.0, - "grad_norm": 4.672281815285781, - "learning_rate": 3.640708232526774e-06, - "loss": 1.1813, - "num_input_tokens_seen": 38080230, - "step": 1813 - }, - { - "epoch": 0.21812060361931102, - "flos": 18130212117840.0, - "grad_norm": 4.477018376384049, - "learning_rate": 3.6402626485901504e-06, - "loss": 1.0116, - "num_input_tokens_seen": 38099045, - "step": 1814 - }, - { - "epoch": 0.2182408465099501, - "flos": 15563479803360.0, - "grad_norm": 5.4738231561184, - "learning_rate": 3.639816815826988e-06, - "loss": 1.0032, - "num_input_tokens_seen": 38118090, - "step": 1815 - }, - { - "epoch": 0.21836108940058918, - "flos": 16820287501560.0, - "grad_norm": 8.702396930806398, - "learning_rate": 3.6393707343049176e-06, - "loss": 1.0049, - "num_input_tokens_seen": 38138140, - "step": 1816 - }, - { - "epoch": 0.2184813322912283, - "flos": 17556258964680.0, - "grad_norm": 5.034956425740589, - "learning_rate": 3.6389244040916104e-06, - "loss": 0.9804, - "num_input_tokens_seen": 38156935, - "step": 1817 - }, - { - "epoch": 0.21860157518186737, - "flos": 18916694266440.0, - "grad_norm": 3.7815101470992705, - "learning_rate": 3.6384778252547747e-06, - "loss": 1.0194, - "num_input_tokens_seen": 38172535, - "step": 1818 - }, - { - "epoch": 0.21872181807250646, - "flos": 14829041418240.0, - "grad_norm": 7.8113780932729435, - "learning_rate": 3.638030997862155e-06, - "loss": 1.0108, - "num_input_tokens_seen": 38191190, - "step": 1819 - }, - { - "epoch": 0.21884206096314554, - "flos": 43814326701360.0, - "grad_norm": 0.8376618186283146, - "learning_rate": 3.6375839219815356e-06, - "loss": 0.8553, - "num_input_tokens_seen": 38248710, - "step": 1820 - }, - { - "epoch": 0.21896230385378465, - "flos": 16688261745360.0, - "grad_norm": 3.680412852721756, - "learning_rate": 3.6371365976807375e-06, - "loss": 1.0646, - "num_input_tokens_seen": 38268825, - "step": 1821 - }, - { - "epoch": 0.21908254674442373, - "flos": 17844324026640.0, - "grad_norm": 4.814023854151012, - "learning_rate": 3.6366890250276185e-06, - "loss": 1.0565, - "num_input_tokens_seen": 38289500, - "step": 1822 - }, - { - "epoch": 0.21920278963506282, - "flos": 16717426437000.0, - "grad_norm": 7.732773123652651, - "learning_rate": 3.6362412040900764e-06, - "loss": 1.127, - "num_input_tokens_seen": 38309010, - "step": 1823 - }, - { - "epoch": 0.21932303252570193, - "flos": 20834397284640.0, - "grad_norm": 3.8850616756536587, - "learning_rate": 3.635793134936044e-06, - "loss": 1.0364, - "num_input_tokens_seen": 38329740, - "step": 1824 - }, - { - "epoch": 0.219443275416341, - "flos": 14771079973680.0, - "grad_norm": 4.080500937358093, - "learning_rate": 3.635344817633494e-06, - "loss": 0.9642, - "num_input_tokens_seen": 38348775, - "step": 1825 - }, - { - "epoch": 0.2195635183069801, - "flos": 10240487897040.0, - "grad_norm": 4.152675841465007, - "learning_rate": 3.634896252250436e-06, - "loss": 0.9876, - "num_input_tokens_seen": 38365260, - "step": 1826 - }, - { - "epoch": 0.2196837611976192, - "flos": 17241850074600.0, - "grad_norm": 4.737277812327138, - "learning_rate": 3.6344474388549157e-06, - "loss": 1.053, - "num_input_tokens_seen": 38384635, - "step": 1827 - }, - { - "epoch": 0.2198040040882583, - "flos": 12807956088960.0, - "grad_norm": 4.314756604664407, - "learning_rate": 3.6339983775150183e-06, - "loss": 1.0471, - "num_input_tokens_seen": 38400915, - "step": 1828 - }, - { - "epoch": 0.21992424697889737, - "flos": 12599505081000.0, - "grad_norm": 7.006636334809413, - "learning_rate": 3.6335490682988664e-06, - "loss": 1.0762, - "num_input_tokens_seen": 38416245, - "step": 1829 - }, - { - "epoch": 0.22004448986953645, - "flos": 12494834984400.0, - "grad_norm": 6.283703073539221, - "learning_rate": 3.63309951127462e-06, - "loss": 1.0585, - "num_input_tokens_seen": 38432875, - "step": 1830 - }, - { - "epoch": 0.22016473276017556, - "flos": 15827837931360.0, - "grad_norm": 4.778099867341113, - "learning_rate": 3.6326497065104757e-06, - "loss": 0.9771, - "num_input_tokens_seen": 38453060, - "step": 1831 - }, - { - "epoch": 0.22028497565081465, - "flos": 18185230052640.0, - "grad_norm": 4.637191992553863, - "learning_rate": 3.6321996540746697e-06, - "loss": 1.0063, - "num_input_tokens_seen": 38471855, - "step": 1832 - }, - { - "epoch": 0.22040521854145373, - "flos": 25868802882480.0, - "grad_norm": 5.381461938811512, - "learning_rate": 3.6317493540354733e-06, - "loss": 1.0246, - "num_input_tokens_seen": 38494990, - "step": 1833 - }, - { - "epoch": 0.22052546143209284, - "flos": 8326586912280.0, - "grad_norm": 4.869476710723576, - "learning_rate": 3.6312988064611976e-06, - "loss": 1.0014, - "num_input_tokens_seen": 38513020, - "step": 1834 - }, - { - "epoch": 0.22064570432273192, - "flos": 17217560571000.0, - "grad_norm": 3.126521388693434, - "learning_rate": 3.6308480114201896e-06, - "loss": 1.0363, - "num_input_tokens_seen": 38534660, - "step": 1835 - }, - { - "epoch": 0.220765947213371, - "flos": 12705432301560.0, - "grad_norm": 4.792889358870291, - "learning_rate": 3.630396968980835e-06, - "loss": 0.9965, - "num_input_tokens_seen": 38552255, - "step": 1836 - }, - { - "epoch": 0.2208861901040101, - "flos": 19047769514280.0, - "grad_norm": 11.898319516814917, - "learning_rate": 3.6299456792115575e-06, - "loss": 1.0775, - "num_input_tokens_seen": 38573230, - "step": 1837 - }, - { - "epoch": 0.2210064329946492, - "flos": 12623426645880.0, - "grad_norm": 2.742280442273979, - "learning_rate": 3.629494142180815e-06, - "loss": 1.0404, - "num_input_tokens_seen": 38591695, - "step": 1838 - }, - { - "epoch": 0.22112667588528828, - "flos": 12731285544720.0, - "grad_norm": 5.218807928254939, - "learning_rate": 3.6290423579571075e-06, - "loss": 1.0805, - "num_input_tokens_seen": 38607955, - "step": 1839 - }, - { - "epoch": 0.22124691877592736, - "flos": 13020117145680.0, - "grad_norm": 3.642194417834821, - "learning_rate": 3.6285903266089694e-06, - "loss": 1.0248, - "num_input_tokens_seen": 38626950, - "step": 1840 - }, - { - "epoch": 0.22136716166656648, - "flos": 14776568392920.0, - "grad_norm": 5.846756268941626, - "learning_rate": 3.628138048204974e-06, - "loss": 1.0023, - "num_input_tokens_seen": 38647355, - "step": 1841 - }, - { - "epoch": 0.22148740455720556, - "flos": 12521669397480.0, - "grad_norm": 7.830649572250611, - "learning_rate": 3.6276855228137304e-06, - "loss": 1.0006, - "num_input_tokens_seen": 38665280, - "step": 1842 - }, - { - "epoch": 0.22160764744784464, - "flos": 15433201756080.0, - "grad_norm": 4.867604049565824, - "learning_rate": 3.6272327505038874e-06, - "loss": 1.0452, - "num_input_tokens_seen": 38681465, - "step": 1843 - }, - { - "epoch": 0.22172789033848372, - "flos": 16897172676720.0, - "grad_norm": 5.178699059628326, - "learning_rate": 3.626779731344131e-06, - "loss": 1.0198, - "num_input_tokens_seen": 38700975, - "step": 1844 - }, - { - "epoch": 0.22184813322912283, - "flos": 12023313633960.0, - "grad_norm": 9.731638124109079, - "learning_rate": 3.6263264654031814e-06, - "loss": 1.0871, - "num_input_tokens_seen": 38717595, - "step": 1845 - }, - { - "epoch": 0.22196837611976192, - "flos": 44254996974360.0, - "grad_norm": 0.6881563348244769, - "learning_rate": 3.6258729527498008e-06, - "loss": 0.8271, - "num_input_tokens_seen": 38778160, - "step": 1846 - }, - { - "epoch": 0.222088619010401, - "flos": 18186732469080.0, - "grad_norm": 10.180292306237071, - "learning_rate": 3.6254191934527854e-06, - "loss": 0.8546, - "num_input_tokens_seen": 38797235, - "step": 1847 - }, - { - "epoch": 0.2222088619010401, - "flos": 13702542428880.0, - "grad_norm": 13.095360413347498, - "learning_rate": 3.6249651875809715e-06, - "loss": 0.8755, - "num_input_tokens_seen": 38816835, - "step": 1848 - }, - { - "epoch": 0.2223291047916792, - "flos": 13544908722000.0, - "grad_norm": 4.832968840612471, - "learning_rate": 3.62451093520323e-06, - "loss": 1.1221, - "num_input_tokens_seen": 38834460, - "step": 1849 - }, - { - "epoch": 0.22244934768231828, - "flos": 14803341482880.0, - "grad_norm": 8.074415288488483, - "learning_rate": 3.6240564363884714e-06, - "loss": 1.1249, - "num_input_tokens_seen": 38854125, - "step": 1850 - }, - { - "epoch": 0.2225695905729574, - "flos": 11053620489360.0, - "grad_norm": 4.761402297985512, - "learning_rate": 3.623601691205643e-06, - "loss": 0.9319, - "num_input_tokens_seen": 38872920, - "step": 1851 - }, - { - "epoch": 0.22268983346359647, - "flos": 18053909512320.0, - "grad_norm": 6.349682368947556, - "learning_rate": 3.623146699723729e-06, - "loss": 1.0451, - "num_input_tokens_seen": 38892100, - "step": 1852 - }, - { - "epoch": 0.22281007635423555, - "flos": 9349304990280.0, - "grad_norm": 5.825761003677774, - "learning_rate": 3.6226914620117507e-06, - "loss": 1.0195, - "num_input_tokens_seen": 38910440, - "step": 1853 - }, - { - "epoch": 0.22293031924487464, - "flos": 10843421772480.0, - "grad_norm": 7.214442433512437, - "learning_rate": 3.622235978138768e-06, - "loss": 1.0413, - "num_input_tokens_seen": 38927785, - "step": 1854 - }, - { - "epoch": 0.22305056213551375, - "flos": 16035246446280.0, - "grad_norm": 5.489765875988606, - "learning_rate": 3.621780248173877e-06, - "loss": 1.0475, - "num_input_tokens_seen": 38945705, - "step": 1855 - }, - { - "epoch": 0.22317080502615283, - "flos": 46450677401280.0, - "grad_norm": 0.8784135242665668, - "learning_rate": 3.6213242721862125e-06, - "loss": 0.8874, - "num_input_tokens_seen": 39003880, - "step": 1856 - }, - { - "epoch": 0.2232910479167919, - "flos": 18343262359800.0, - "grad_norm": 8.658705600620838, - "learning_rate": 3.620868050244945e-06, - "loss": 0.9763, - "num_input_tokens_seen": 39024080, - "step": 1857 - }, - { - "epoch": 0.22341129080743102, - "flos": 16528267098360.0, - "grad_norm": 6.893549637982462, - "learning_rate": 3.6204115824192817e-06, - "loss": 1.0023, - "num_input_tokens_seen": 39041275, - "step": 1858 - }, - { - "epoch": 0.2235315336980701, - "flos": 15064940070480.0, - "grad_norm": 7.129857064161321, - "learning_rate": 3.619954868778471e-06, - "loss": 0.9957, - "num_input_tokens_seen": 39057690, - "step": 1859 - }, - { - "epoch": 0.2236517765887092, - "flos": 14121713400240.0, - "grad_norm": 3.051115713700264, - "learning_rate": 3.6194979093917944e-06, - "loss": 1.0521, - "num_input_tokens_seen": 39076825, - "step": 1860 - }, - { - "epoch": 0.22377201947934827, - "flos": 16501647316200.0, - "grad_norm": 6.642046033617602, - "learning_rate": 3.6190407043285724e-06, - "loss": 1.1173, - "num_input_tokens_seen": 39094280, - "step": 1861 - }, - { - "epoch": 0.22389226236998738, - "flos": 19074603927360.0, - "grad_norm": 4.367186626509304, - "learning_rate": 3.618583253658163e-06, - "loss": 0.9709, - "num_input_tokens_seen": 39114100, - "step": 1862 - }, - { - "epoch": 0.22401250526062647, - "flos": 17189285064600.0, - "grad_norm": 5.992166173466775, - "learning_rate": 3.618125557449961e-06, - "loss": 1.0959, - "num_input_tokens_seen": 39131875, - "step": 1863 - }, - { - "epoch": 0.22413274815126555, - "flos": 11862889725120.0, - "grad_norm": 4.794604431956357, - "learning_rate": 3.6176676157733983e-06, - "loss": 1.063, - "num_input_tokens_seen": 39146605, - "step": 1864 - }, - { - "epoch": 0.22425299104190466, - "flos": 15169579505520.0, - "grad_norm": 6.622520414603922, - "learning_rate": 3.6172094286979443e-06, - "loss": 0.9898, - "num_input_tokens_seen": 39163695, - "step": 1865 - }, - { - "epoch": 0.22437323393254374, - "flos": 22935924530040.0, - "grad_norm": 3.1592071579176606, - "learning_rate": 3.6167509962931064e-06, - "loss": 1.0405, - "num_input_tokens_seen": 39189115, - "step": 1866 - }, - { - "epoch": 0.22449347682318282, - "flos": 12756586879800.0, - "grad_norm": 6.465389244829595, - "learning_rate": 3.6162923186284276e-06, - "loss": 1.0105, - "num_input_tokens_seen": 39204795, - "step": 1867 - }, - { - "epoch": 0.2246137197138219, - "flos": 13255862490120.0, - "grad_norm": 11.88321313427186, - "learning_rate": 3.6158333957734888e-06, - "loss": 1.0862, - "num_input_tokens_seen": 39223105, - "step": 1868 - }, - { - "epoch": 0.22473396260446102, - "flos": 11022462796320.0, - "grad_norm": 5.145522547100036, - "learning_rate": 3.6153742277979088e-06, - "loss": 1.0663, - "num_input_tokens_seen": 39240255, - "step": 1869 - }, - { - "epoch": 0.2248542054951001, - "flos": 10214634653880.0, - "grad_norm": 4.004083456444418, - "learning_rate": 3.6149148147713434e-06, - "loss": 1.0189, - "num_input_tokens_seen": 39258210, - "step": 1870 - }, - { - "epoch": 0.22497444838573918, - "flos": 13648597648680.0, - "grad_norm": 5.417311449066007, - "learning_rate": 3.614455156763484e-06, - "loss": 1.1016, - "num_input_tokens_seen": 39276235, - "step": 1871 - }, - { - "epoch": 0.2250946912763783, - "flos": 11971699132320.0, - "grad_norm": 5.153958452974311, - "learning_rate": 3.613995253844061e-06, - "loss": 0.937, - "num_input_tokens_seen": 39293635, - "step": 1872 - }, - { - "epoch": 0.22521493416701738, - "flos": 17556565580280.0, - "grad_norm": 10.560955950498936, - "learning_rate": 3.6135351060828414e-06, - "loss": 1.0345, - "num_input_tokens_seen": 39313830, - "step": 1873 - }, - { - "epoch": 0.22533517705765646, - "flos": 12626830079040.0, - "grad_norm": 5.732519031331695, - "learning_rate": 3.6130747135496285e-06, - "loss": 0.9135, - "num_input_tokens_seen": 39332550, - "step": 1874 - }, - { - "epoch": 0.22545541994829554, - "flos": 24031633765080.0, - "grad_norm": 5.215178221073895, - "learning_rate": 3.6126140763142646e-06, - "loss": 0.8948, - "num_input_tokens_seen": 39357300, - "step": 1875 - }, - { - "epoch": 0.22557566283893465, - "flos": 13596737854560.0, - "grad_norm": 5.875627524768265, - "learning_rate": 3.6121531944466275e-06, - "loss": 1.0821, - "num_input_tokens_seen": 39374345, - "step": 1876 - }, - { - "epoch": 0.22569590572957374, - "flos": 14747342378160.0, - "grad_norm": 5.3793078170841255, - "learning_rate": 3.611692068016633e-06, - "loss": 1.0147, - "num_input_tokens_seen": 39390395, - "step": 1877 - }, - { - "epoch": 0.22581614862021282, - "flos": 13073019432840.0, - "grad_norm": 14.089552971250987, - "learning_rate": 3.611230697094233e-06, - "loss": 0.9782, - "num_input_tokens_seen": 39406815, - "step": 1878 - }, - { - "epoch": 0.22593639151085193, - "flos": 14227027389600.0, - "grad_norm": 3.716065920759372, - "learning_rate": 3.6107690817494173e-06, - "loss": 1.1084, - "num_input_tokens_seen": 39426755, - "step": 1879 - }, - { - "epoch": 0.226056634401491, - "flos": 9244082985600.0, - "grad_norm": 5.667347511828106, - "learning_rate": 3.6103072220522117e-06, - "loss": 0.9411, - "num_input_tokens_seen": 39442005, - "step": 1880 - }, - { - "epoch": 0.2261768772921301, - "flos": 13464957390840.0, - "grad_norm": 32.36828025098414, - "learning_rate": 3.609845118072682e-06, - "loss": 1.1439, - "num_input_tokens_seen": 39460395, - "step": 1881 - }, - { - "epoch": 0.2262971201827692, - "flos": 14173419886560.0, - "grad_norm": 71.3344243521755, - "learning_rate": 3.6093827698809276e-06, - "loss": 1.0213, - "num_input_tokens_seen": 39479215, - "step": 1882 - }, - { - "epoch": 0.2264173630734083, - "flos": 11787844243560.0, - "grad_norm": 7.928642316818, - "learning_rate": 3.6089201775470864e-06, - "loss": 1.0825, - "num_input_tokens_seen": 39494390, - "step": 1883 - }, - { - "epoch": 0.22653760596404737, - "flos": 17347072079280.0, - "grad_norm": 4.798666951722093, - "learning_rate": 3.6084573411413334e-06, - "loss": 1.0151, - "num_input_tokens_seen": 39513505, - "step": 1884 - }, - { - "epoch": 0.22665784885468646, - "flos": 12993742656000.0, - "grad_norm": 10.509411181825168, - "learning_rate": 3.607994260733881e-06, - "loss": 1.0305, - "num_input_tokens_seen": 39532465, - "step": 1885 - }, - { - "epoch": 0.22677809174532557, - "flos": 17108229917280.0, - "grad_norm": 4.1033166821718075, - "learning_rate": 3.6075309363949776e-06, - "loss": 0.9751, - "num_input_tokens_seen": 39551355, - "step": 1886 - }, - { - "epoch": 0.22689833463596465, - "flos": 14460871717320.0, - "grad_norm": 6.251602882840234, - "learning_rate": 3.6070673681949094e-06, - "loss": 1.0356, - "num_input_tokens_seen": 39569440, - "step": 1887 - }, - { - "epoch": 0.22701857752660373, - "flos": 21465851958960.0, - "grad_norm": 12.273357932587407, - "learning_rate": 3.606603556203999e-06, - "loss": 1.034, - "num_input_tokens_seen": 39594105, - "step": 1888 - }, - { - "epoch": 0.22713882041724284, - "flos": 15983233344360.0, - "grad_norm": 6.65354757118204, - "learning_rate": 3.6061395004926066e-06, - "loss": 1.0726, - "num_input_tokens_seen": 39612760, - "step": 1889 - }, - { - "epoch": 0.22725906330788193, - "flos": 14566921584120.0, - "grad_norm": 9.821192299118755, - "learning_rate": 3.605675201131129e-06, - "loss": 1.0699, - "num_input_tokens_seen": 39630940, - "step": 1890 - }, - { - "epoch": 0.227379306198521, - "flos": 13466275837920.0, - "grad_norm": 7.541169910685356, - "learning_rate": 3.60521065819e-06, - "loss": 1.0257, - "num_input_tokens_seen": 39647970, - "step": 1891 - }, - { - "epoch": 0.2274995490891601, - "flos": 15484509642120.0, - "grad_norm": 7.977514615525187, - "learning_rate": 3.60474587173969e-06, - "loss": 1.0986, - "num_input_tokens_seen": 39666175, - "step": 1892 - }, - { - "epoch": 0.2276197919797992, - "flos": 13515100137600.0, - "grad_norm": 11.48307749366806, - "learning_rate": 3.6042808418507084e-06, - "loss": 1.0634, - "num_input_tokens_seen": 39683580, - "step": 1893 - }, - { - "epoch": 0.22774003487043828, - "flos": 13334556697320.0, - "grad_norm": 8.474981401361184, - "learning_rate": 3.6038155685935976e-06, - "loss": 1.0053, - "num_input_tokens_seen": 39699870, - "step": 1894 - }, - { - "epoch": 0.22786027776107737, - "flos": 16349961951960.0, - "grad_norm": 54.376712886095596, - "learning_rate": 3.6033500520389404e-06, - "loss": 0.933, - "num_input_tokens_seen": 39716260, - "step": 1895 - }, - { - "epoch": 0.22798052065171648, - "flos": 47763606850440.0, - "grad_norm": 0.8266547124585412, - "learning_rate": 3.6028842922573553e-06, - "loss": 0.9197, - "num_input_tokens_seen": 39780125, - "step": 1896 - }, - { - "epoch": 0.22810076354235556, - "flos": 44440047663960.0, - "grad_norm": 0.8111239298006435, - "learning_rate": 3.602418289319497e-06, - "loss": 0.8944, - "num_input_tokens_seen": 39838400, - "step": 1897 - }, - { - "epoch": 0.22822100643299464, - "flos": 16977706577520.0, - "grad_norm": 4.248233203528557, - "learning_rate": 3.601952043296059e-06, - "loss": 0.9676, - "num_input_tokens_seen": 39858115, - "step": 1898 - }, - { - "epoch": 0.22834124932363373, - "flos": 14903933592000.0, - "grad_norm": 5.77216809349067, - "learning_rate": 3.6014855542577696e-06, - "loss": 1.0405, - "num_input_tokens_seen": 39875045, - "step": 1899 - }, - { - "epoch": 0.22846149221427284, - "flos": 17714996487720.0, - "grad_norm": 4.778153357911271, - "learning_rate": 3.6010188222753943e-06, - "loss": 1.0657, - "num_input_tokens_seen": 39895535, - "step": 1900 - }, - { - "epoch": 0.22858173510491192, - "flos": 40164707232000.0, - "grad_norm": 0.9871852059139924, - "learning_rate": 3.6005518474197372e-06, - "loss": 0.9062, - "num_input_tokens_seen": 39947300, - "step": 1901 - }, - { - "epoch": 0.228701977995551, - "flos": 17189683664880.0, - "grad_norm": 6.796764434265936, - "learning_rate": 3.6000846297616373e-06, - "loss": 1.0179, - "num_input_tokens_seen": 39965320, - "step": 1902 - }, - { - "epoch": 0.22882222088619011, - "flos": 15188288605200.0, - "grad_norm": 6.407081810517184, - "learning_rate": 3.5996171693719717e-06, - "loss": 0.9684, - "num_input_tokens_seen": 39981135, - "step": 1903 - }, - { - "epoch": 0.2289424637768292, - "flos": 46241827793040.0, - "grad_norm": 0.8612257708503445, - "learning_rate": 3.5991494663216528e-06, - "loss": 0.9001, - "num_input_tokens_seen": 40043840, - "step": 1904 - }, - { - "epoch": 0.22906270666746828, - "flos": 15746905430280.0, - "grad_norm": 4.378765338736895, - "learning_rate": 3.5986815206816314e-06, - "loss": 1.0978, - "num_input_tokens_seen": 40062380, - "step": 1905 - }, - { - "epoch": 0.2291829495581074, - "flos": 18340134880680.0, - "grad_norm": 3.910295268663834, - "learning_rate": 3.598213332522895e-06, - "loss": 0.9733, - "num_input_tokens_seen": 40082130, - "step": 1906 - }, - { - "epoch": 0.22930319244874647, - "flos": 22221391045440.0, - "grad_norm": 8.978993802866448, - "learning_rate": 3.597744901916466e-06, - "loss": 1.0092, - "num_input_tokens_seen": 40103135, - "step": 1907 - }, - { - "epoch": 0.22942343533938556, - "flos": 16530229438200.0, - "grad_norm": 4.356138497763166, - "learning_rate": 3.5972762289334058e-06, - "loss": 0.9908, - "num_input_tokens_seen": 40122485, - "step": 1908 - }, - { - "epoch": 0.22954367823002464, - "flos": 10319059458000.0, - "grad_norm": 7.006571001633233, - "learning_rate": 3.5968073136448116e-06, - "loss": 1.0803, - "num_input_tokens_seen": 40140225, - "step": 1909 - }, - { - "epoch": 0.22966392112066375, - "flos": 11866170512040.0, - "grad_norm": 4.9887861386153665, - "learning_rate": 3.596338156121818e-06, - "loss": 1.1358, - "num_input_tokens_seen": 40158830, - "step": 1910 - }, - { - "epoch": 0.22978416401130283, - "flos": 48315263501400.0, - "grad_norm": 0.7745884125412871, - "learning_rate": 3.595868756435595e-06, - "loss": 0.8469, - "num_input_tokens_seen": 40226230, - "step": 1911 - }, - { - "epoch": 0.22990440690194192, - "flos": 14095216264320.0, - "grad_norm": 14.141047451389905, - "learning_rate": 3.5953991146573504e-06, - "loss": 1.0312, - "num_input_tokens_seen": 40244595, - "step": 1912 - }, - { - "epoch": 0.23002464979258103, - "flos": 9368106074640.0, - "grad_norm": 13.709555775285502, - "learning_rate": 3.5949292308583294e-06, - "loss": 1.0512, - "num_input_tokens_seen": 40257560, - "step": 1913 - }, - { - "epoch": 0.2301448926832201, - "flos": 15747212045880.0, - "grad_norm": 5.480663989417028, - "learning_rate": 3.594459105109811e-06, - "loss": 1.031, - "num_input_tokens_seen": 40276460, - "step": 1914 - }, - { - "epoch": 0.2302651355738592, - "flos": 14698456755360.0, - "grad_norm": 7.239508305076221, - "learning_rate": 3.593988737483115e-06, - "loss": 1.0446, - "num_input_tokens_seen": 40296120, - "step": 1915 - }, - { - "epoch": 0.23038537846449827, - "flos": 13178149452840.0, - "grad_norm": 7.55892128247079, - "learning_rate": 3.5935181280495947e-06, - "loss": 1.0149, - "num_input_tokens_seen": 40314420, - "step": 1916 - }, - { - "epoch": 0.23050562135513739, - "flos": 45979186712400.0, - "grad_norm": 0.8760786560510919, - "learning_rate": 3.5930472768806412e-06, - "loss": 0.8181, - "num_input_tokens_seen": 40372810, - "step": 1917 - }, - { - "epoch": 0.23062586424577647, - "flos": 12261634549440.0, - "grad_norm": 3.4754057942304106, - "learning_rate": 3.5925761840476826e-06, - "loss": 0.9922, - "num_input_tokens_seen": 40391140, - "step": 1918 - }, - { - "epoch": 0.23074610713641555, - "flos": 19838145681000.0, - "grad_norm": 4.661833212783207, - "learning_rate": 3.592104849622183e-06, - "loss": 1.0527, - "num_input_tokens_seen": 40413115, - "step": 1919 - }, - { - "epoch": 0.23086635002705466, - "flos": 20546730822960.0, - "grad_norm": 3.6572858859090482, - "learning_rate": 3.591633273675644e-06, - "loss": 0.9584, - "num_input_tokens_seen": 40435070, - "step": 1920 - }, - { - "epoch": 0.23098659291769374, - "flos": 45041300701680.0, - "grad_norm": 0.9723730019557328, - "learning_rate": 3.591161456279602e-06, - "loss": 0.8598, - "num_input_tokens_seen": 40480335, - "step": 1921 - }, - { - "epoch": 0.23110683580833283, - "flos": 16690806654840.0, - "grad_norm": 3.858645103716846, - "learning_rate": 3.590689397505633e-06, - "loss": 1.0279, - "num_input_tokens_seen": 40500965, - "step": 1922 - }, - { - "epoch": 0.2312270786989719, - "flos": 19417625601000.0, - "grad_norm": 3.5762194761048955, - "learning_rate": 3.590217097425347e-06, - "loss": 1.0965, - "num_input_tokens_seen": 40520585, - "step": 1923 - }, - { - "epoch": 0.23134732158961102, - "flos": 9349213005600.0, - "grad_norm": 5.575299399508967, - "learning_rate": 3.589744556110391e-06, - "loss": 0.9295, - "num_input_tokens_seen": 40538295, - "step": 1924 - }, - { - "epoch": 0.2314675644802501, - "flos": 26393747766600.0, - "grad_norm": 3.988611698938881, - "learning_rate": 3.58927177363245e-06, - "loss": 1.0764, - "num_input_tokens_seen": 40560840, - "step": 1925 - }, - { - "epoch": 0.2315878073708892, - "flos": 16953478397040.0, - "grad_norm": 3.6149404974085932, - "learning_rate": 3.5887987500632447e-06, - "loss": 0.9622, - "num_input_tokens_seen": 40578565, - "step": 1926 - }, - { - "epoch": 0.2317080502615283, - "flos": 16376520411000.0, - "grad_norm": 7.585869864448098, - "learning_rate": 3.5883254854745325e-06, - "loss": 1.066, - "num_input_tokens_seen": 40596675, - "step": 1927 - }, - { - "epoch": 0.23182829315216738, - "flos": 7907048002200.0, - "grad_norm": 9.38732506884907, - "learning_rate": 3.587851979938107e-06, - "loss": 0.9891, - "num_input_tokens_seen": 40613285, - "step": 1928 - }, - { - "epoch": 0.23194853604280646, - "flos": 14069240374920.0, - "grad_norm": 3.4365581262457816, - "learning_rate": 3.5873782335257985e-06, - "loss": 0.9994, - "num_input_tokens_seen": 40631170, - "step": 1929 - }, - { - "epoch": 0.23206877893344555, - "flos": 10818028452720.0, - "grad_norm": 7.112406258392713, - "learning_rate": 3.5869042463094744e-06, - "loss": 1.0242, - "num_input_tokens_seen": 40648605, - "step": 1930 - }, - { - "epoch": 0.23218902182408466, - "flos": 16140897712800.0, - "grad_norm": 6.9102097978776005, - "learning_rate": 3.586430018361038e-06, - "loss": 1.0053, - "num_input_tokens_seen": 40668095, - "step": 1931 - }, - { - "epoch": 0.23230926471472374, - "flos": 16140253820040.0, - "grad_norm": 5.181297546831182, - "learning_rate": 3.5859555497524283e-06, - "loss": 0.9908, - "num_input_tokens_seen": 40685050, - "step": 1932 - }, - { - "epoch": 0.23242950760536282, - "flos": 14461944871920.0, - "grad_norm": 7.362865227803166, - "learning_rate": 3.5854808405556237e-06, - "loss": 1.1571, - "num_input_tokens_seen": 40704005, - "step": 1933 - }, - { - "epoch": 0.23254975049600193, - "flos": 11970012746520.0, - "grad_norm": 5.444449357703105, - "learning_rate": 3.5850058908426355e-06, - "loss": 0.9844, - "num_input_tokens_seen": 40722275, - "step": 1934 - }, - { - "epoch": 0.23266999338664102, - "flos": 16560068684160.0, - "grad_norm": 5.452551694097603, - "learning_rate": 3.584530700685514e-06, - "loss": 1.0728, - "num_input_tokens_seen": 40742255, - "step": 1935 - }, - { - "epoch": 0.2327902362772801, - "flos": 13882656607320.0, - "grad_norm": 5.662042675961062, - "learning_rate": 3.5840552701563448e-06, - "loss": 1.1215, - "num_input_tokens_seen": 40758175, - "step": 1936 - }, - { - "epoch": 0.2329104791679192, - "flos": 11840286607320.0, - "grad_norm": 8.865576240585646, - "learning_rate": 3.5835795993272513e-06, - "loss": 1.0466, - "num_input_tokens_seen": 40776180, - "step": 1937 - }, - { - "epoch": 0.2330307220585583, - "flos": 15747181384320.0, - "grad_norm": 5.502431158790318, - "learning_rate": 3.583103688270391e-06, - "loss": 0.9388, - "num_input_tokens_seen": 40795680, - "step": 1938 - }, - { - "epoch": 0.23315096494919738, - "flos": 13701438612720.0, - "grad_norm": 6.358689929700167, - "learning_rate": 3.58262753705796e-06, - "loss": 1.1103, - "num_input_tokens_seen": 40810290, - "step": 1939 - }, - { - "epoch": 0.23327120783983646, - "flos": 37935906772200.0, - "grad_norm": 0.8272495320590585, - "learning_rate": 3.5821511457621902e-06, - "loss": 0.8045, - "num_input_tokens_seen": 40867310, - "step": 1940 - }, - { - "epoch": 0.23339145073047557, - "flos": 12127125206880.0, - "grad_norm": 8.334435453649796, - "learning_rate": 3.5816745144553497e-06, - "loss": 1.0457, - "num_input_tokens_seen": 40882350, - "step": 1941 - }, - { - "epoch": 0.23351169362111465, - "flos": 9218107096200.0, - "grad_norm": 7.898595187672344, - "learning_rate": 3.5811976432097424e-06, - "loss": 0.9894, - "num_input_tokens_seen": 40899740, - "step": 1942 - }, - { - "epoch": 0.23363193651175373, - "flos": 11210456995680.0, - "grad_norm": 9.299607332205115, - "learning_rate": 3.58072053209771e-06, - "loss": 1.0833, - "num_input_tokens_seen": 40916015, - "step": 1943 - }, - { - "epoch": 0.23375217940239285, - "flos": 14928407064960.0, - "grad_norm": 8.474273794562862, - "learning_rate": 3.5802431811916296e-06, - "loss": 1.0274, - "num_input_tokens_seen": 40932345, - "step": 1944 - }, - { - "epoch": 0.23387242229303193, - "flos": 14619517255680.0, - "grad_norm": 4.8932977265215385, - "learning_rate": 3.579765590563916e-06, - "loss": 1.0319, - "num_input_tokens_seen": 40951465, - "step": 1945 - }, - { - "epoch": 0.233992665183671, - "flos": 17267672656200.0, - "grad_norm": 30.636058072134848, - "learning_rate": 3.579287760287017e-06, - "loss": 1.0416, - "num_input_tokens_seen": 40971935, - "step": 1946 - }, - { - "epoch": 0.2341129080743101, - "flos": 21490141462560.0, - "grad_norm": 4.275121381497616, - "learning_rate": 3.578809690433421e-06, - "loss": 0.954, - "num_input_tokens_seen": 40993365, - "step": 1947 - }, - { - "epoch": 0.2342331509649492, - "flos": 16193064122520.0, - "grad_norm": 7.18663616794397, - "learning_rate": 3.578331381075651e-06, - "loss": 1.0322, - "num_input_tokens_seen": 41013585, - "step": 1948 - }, - { - "epoch": 0.2343533938555883, - "flos": 16796028659520.0, - "grad_norm": 6.482933527657324, - "learning_rate": 3.5778528322862646e-06, - "loss": 0.9139, - "num_input_tokens_seen": 41032125, - "step": 1949 - }, - { - "epoch": 0.23447363674622737, - "flos": 17476552926000.0, - "grad_norm": 4.122635365907285, - "learning_rate": 3.5773740441378585e-06, - "loss": 1.0977, - "num_input_tokens_seen": 41052600, - "step": 1950 - }, - { - "epoch": 0.23459387963686648, - "flos": 38009486046960.0, - "grad_norm": 7.432748254350394, - "learning_rate": 3.5768950167030633e-06, - "loss": 0.9712, - "num_input_tokens_seen": 41077020, - "step": 1951 - }, - { - "epoch": 0.23471412252750556, - "flos": 17031927311760.0, - "grad_norm": 4.932684179503253, - "learning_rate": 3.576415750054548e-06, - "loss": 1.0119, - "num_input_tokens_seen": 41096860, - "step": 1952 - }, - { - "epoch": 0.23483436541814465, - "flos": 11106645422760.0, - "grad_norm": 4.071872639161345, - "learning_rate": 3.5759362442650172e-06, - "loss": 1.0883, - "num_input_tokens_seen": 41113330, - "step": 1953 - }, - { - "epoch": 0.23495460830878373, - "flos": 17739684591600.0, - "grad_norm": 5.729067640893721, - "learning_rate": 3.5754564994072113e-06, - "loss": 1.0755, - "num_input_tokens_seen": 41131890, - "step": 1954 - }, - { - "epoch": 0.23507485119942284, - "flos": 21725856145440.0, - "grad_norm": 5.715652377322178, - "learning_rate": 3.5749765155539067e-06, - "loss": 0.8415, - "num_input_tokens_seen": 41152095, - "step": 1955 - }, - { - "epoch": 0.23519509409006192, - "flos": 12990829807800.0, - "grad_norm": 7.7091736481546675, - "learning_rate": 3.574496292777917e-06, - "loss": 1.1543, - "num_input_tokens_seen": 41170025, - "step": 1956 - }, - { - "epoch": 0.235315336980701, - "flos": 21122523669720.0, - "grad_norm": 7.098281824385281, - "learning_rate": 3.574015831152092e-06, - "loss": 0.9367, - "num_input_tokens_seen": 41190160, - "step": 1957 - }, - { - "epoch": 0.23543557987134012, - "flos": 13151284378200.0, - "grad_norm": 4.457402045884713, - "learning_rate": 3.573535130749316e-06, - "loss": 1.0504, - "num_input_tokens_seen": 41207830, - "step": 1958 - }, - { - "epoch": 0.2355558227619792, - "flos": 17554572578880.0, - "grad_norm": 6.522030104075479, - "learning_rate": 3.5730541916425127e-06, - "loss": 0.959, - "num_input_tokens_seen": 41229030, - "step": 1959 - }, - { - "epoch": 0.23567606565261828, - "flos": 15589823631480.0, - "grad_norm": 4.395789750351679, - "learning_rate": 3.572573013904639e-06, - "loss": 1.0845, - "num_input_tokens_seen": 41248660, - "step": 1960 - }, - { - "epoch": 0.2357963085432574, - "flos": 9584897026920.0, - "grad_norm": 28.250248460605395, - "learning_rate": 3.572091597608689e-06, - "loss": 1.147, - "num_input_tokens_seen": 41266505, - "step": 1961 - }, - { - "epoch": 0.23591655143389648, - "flos": 15692960650080.0, - "grad_norm": 7.7975276452174045, - "learning_rate": 3.571609942827694e-06, - "loss": 0.9524, - "num_input_tokens_seen": 41285340, - "step": 1962 - }, - { - "epoch": 0.23603679432453556, - "flos": 12048308353440.0, - "grad_norm": 8.438787065463286, - "learning_rate": 3.57112804963472e-06, - "loss": 1.1068, - "num_input_tokens_seen": 41303275, - "step": 1963 - }, - { - "epoch": 0.23615703721517464, - "flos": 13596492562080.0, - "grad_norm": 28.029565732914616, - "learning_rate": 3.57064591810287e-06, - "loss": 0.9894, - "num_input_tokens_seen": 41320495, - "step": 1964 - }, - { - "epoch": 0.23627728010581375, - "flos": 13544448798600.0, - "grad_norm": 12.863798435169297, - "learning_rate": 3.570163548305284e-06, - "loss": 1.0369, - "num_input_tokens_seen": 41339145, - "step": 1965 - }, - { - "epoch": 0.23639752299645284, - "flos": 10082792867040.0, - "grad_norm": 6.48162239204104, - "learning_rate": 3.569680940315135e-06, - "loss": 0.9242, - "num_input_tokens_seen": 41355265, - "step": 1966 - }, - { - "epoch": 0.23651776588709192, - "flos": 16271574360360.0, - "grad_norm": 6.080612967862813, - "learning_rate": 3.5691980942056356e-06, - "loss": 1.0463, - "num_input_tokens_seen": 41374355, - "step": 1967 - }, - { - "epoch": 0.23663800877773103, - "flos": 13203604095720.0, - "grad_norm": 30.45884654756008, - "learning_rate": 3.5687150100500332e-06, - "loss": 1.018, - "num_input_tokens_seen": 41393775, - "step": 1968 - }, - { - "epoch": 0.2367582516683701, - "flos": 18184862113920.0, - "grad_norm": 3.7641909072655086, - "learning_rate": 3.568231687921611e-06, - "loss": 0.9771, - "num_input_tokens_seen": 41413670, - "step": 1969 - }, - { - "epoch": 0.2368784945590092, - "flos": 16560283315080.0, - "grad_norm": 3.467118850585378, - "learning_rate": 3.5677481278936883e-06, - "loss": 1.0449, - "num_input_tokens_seen": 41432970, - "step": 1970 - }, - { - "epoch": 0.23699873744964828, - "flos": 50031169070160.0, - "grad_norm": 0.8335616315608475, - "learning_rate": 3.5672643300396214e-06, - "loss": 0.8253, - "num_input_tokens_seen": 41501835, - "step": 1971 - }, - { - "epoch": 0.2371189803402874, - "flos": 15510914793360.0, - "grad_norm": 4.932857987124967, - "learning_rate": 3.566780294432802e-06, - "loss": 0.9131, - "num_input_tokens_seen": 41518730, - "step": 1972 - }, - { - "epoch": 0.23723922323092647, - "flos": 15563755757400.0, - "grad_norm": 5.9683118729292985, - "learning_rate": 3.566296021146657e-06, - "loss": 0.9676, - "num_input_tokens_seen": 41537830, - "step": 1973 - }, - { - "epoch": 0.23735946612156555, - "flos": 23325808163520.0, - "grad_norm": 3.6480077453664905, - "learning_rate": 3.565811510254652e-06, - "loss": 0.9525, - "num_input_tokens_seen": 41558430, - "step": 1974 - }, - { - "epoch": 0.23747970901220466, - "flos": 50524618984080.0, - "grad_norm": 0.8324813699785655, - "learning_rate": 3.5653267618302845e-06, - "loss": 0.8235, - "num_input_tokens_seen": 41625730, - "step": 1975 - }, - { - "epoch": 0.23759995190284375, - "flos": 14802820236360.0, - "grad_norm": 3.6951955893735295, - "learning_rate": 3.564841775947093e-06, - "loss": 1.0876, - "num_input_tokens_seen": 41646340, - "step": 1976 - }, - { - "epoch": 0.23772019479348283, - "flos": 23477953451160.0, - "grad_norm": 7.0511296659067115, - "learning_rate": 3.5643565526786475e-06, - "loss": 0.9895, - "num_input_tokens_seen": 41666000, - "step": 1977 - }, - { - "epoch": 0.2378404376841219, - "flos": 23062584513240.0, - "grad_norm": 4.603299342607048, - "learning_rate": 3.5638710920985574e-06, - "loss": 1.0013, - "num_input_tokens_seen": 41687180, - "step": 1978 - }, - { - "epoch": 0.23796068057476102, - "flos": 16345761318240.0, - "grad_norm": 5.800590537689891, - "learning_rate": 3.5633853942804655e-06, - "loss": 1.0326, - "num_input_tokens_seen": 41705225, - "step": 1979 - }, - { - "epoch": 0.2380809234654001, - "flos": 9507122666520.0, - "grad_norm": 6.8221714068966435, - "learning_rate": 3.5628994592980527e-06, - "loss": 0.9933, - "num_input_tokens_seen": 41722850, - "step": 1980 - }, - { - "epoch": 0.2382011663560392, - "flos": 11943760903080.0, - "grad_norm": 4.4636344172998434, - "learning_rate": 3.562413287225034e-06, - "loss": 0.9451, - "num_input_tokens_seen": 41740680, - "step": 1981 - }, - { - "epoch": 0.2383214092466783, - "flos": 13071670324200.0, - "grad_norm": 4.375639732454822, - "learning_rate": 3.5619268781351623e-06, - "loss": 1.1148, - "num_input_tokens_seen": 41758470, - "step": 1982 - }, - { - "epoch": 0.23844165213731738, - "flos": 14016583380240.0, - "grad_norm": 7.150789934818008, - "learning_rate": 3.5614402321022256e-06, - "loss": 1.0045, - "num_input_tokens_seen": 41776020, - "step": 1983 - }, - { - "epoch": 0.23856189502795647, - "flos": 16612143109200.0, - "grad_norm": 22.584632226205695, - "learning_rate": 3.5609533492000463e-06, - "loss": 1.086, - "num_input_tokens_seen": 41794630, - "step": 1984 - }, - { - "epoch": 0.23868213791859555, - "flos": 16689304238400.0, - "grad_norm": 7.98797633701386, - "learning_rate": 3.560466229502485e-06, - "loss": 1.0071, - "num_input_tokens_seen": 41813695, - "step": 1985 - }, - { - "epoch": 0.23880238080923466, - "flos": 11761009830480.0, - "grad_norm": 5.189056346858648, - "learning_rate": 3.5599788730834384e-06, - "loss": 1.132, - "num_input_tokens_seen": 41831375, - "step": 1986 - }, - { - "epoch": 0.23892262369987374, - "flos": 12286598607360.0, - "grad_norm": 6.918531174201533, - "learning_rate": 3.559491280016836e-06, - "loss": 1.0247, - "num_input_tokens_seen": 41849040, - "step": 1987 - }, - { - "epoch": 0.23904286659051283, - "flos": 15852250081200.0, - "grad_norm": 4.477661636987722, - "learning_rate": 3.5590034503766465e-06, - "loss": 0.9415, - "num_input_tokens_seen": 41868425, - "step": 1988 - }, - { - "epoch": 0.23916310948115194, - "flos": 15038872196400.0, - "grad_norm": 7.110700747782297, - "learning_rate": 3.558515384236874e-06, - "loss": 1.0505, - "num_input_tokens_seen": 41885575, - "step": 1989 - }, - { - "epoch": 0.23928335237179102, - "flos": 9978827986320.0, - "grad_norm": 4.847090424522333, - "learning_rate": 3.558027081671556e-06, - "loss": 1.0583, - "num_input_tokens_seen": 41902280, - "step": 1990 - }, - { - "epoch": 0.2394035952624301, - "flos": 16901342648880.0, - "grad_norm": 4.673451849667664, - "learning_rate": 3.557538542754769e-06, - "loss": 0.9148, - "num_input_tokens_seen": 41921695, - "step": 1991 - }, - { - "epoch": 0.2395238381530692, - "flos": 17214954338400.0, - "grad_norm": 3.508696378890006, - "learning_rate": 3.557049767560623e-06, - "loss": 0.8964, - "num_input_tokens_seen": 41940330, - "step": 1992 - }, - { - "epoch": 0.2396440810437083, - "flos": 17997327837960.0, - "grad_norm": 8.482852918690547, - "learning_rate": 3.5565607561632655e-06, - "loss": 1.084, - "num_input_tokens_seen": 41958890, - "step": 1993 - }, - { - "epoch": 0.23976432393434738, - "flos": 20331135671520.0, - "grad_norm": 10.016904656941184, - "learning_rate": 3.5560715086368787e-06, - "loss": 1.012, - "num_input_tokens_seen": 41976480, - "step": 1994 - }, - { - "epoch": 0.23988456682498646, - "flos": 13827945288120.0, - "grad_norm": 5.079657783996344, - "learning_rate": 3.5555820250556816e-06, - "loss": 1.0535, - "num_input_tokens_seen": 41993400, - "step": 1995 - }, - { - "epoch": 0.24000480971562557, - "flos": 14383526618760.0, - "grad_norm": 7.042278677022356, - "learning_rate": 3.5550923054939278e-06, - "loss": 0.9197, - "num_input_tokens_seen": 42012575, - "step": 1996 - }, - { - "epoch": 0.24012505260626466, - "flos": 18103469689440.0, - "grad_norm": 8.152380716105696, - "learning_rate": 3.5546023500259083e-06, - "loss": 0.9579, - "num_input_tokens_seen": 42033390, - "step": 1997 - }, - { - "epoch": 0.24024529549690374, - "flos": 10995536398560.0, - "grad_norm": 6.15119222854558, - "learning_rate": 3.5541121587259477e-06, - "loss": 1.0381, - "num_input_tokens_seen": 42050945, - "step": 1998 - }, - { - "epoch": 0.24036553838754285, - "flos": 40876910438040.0, - "grad_norm": 0.924330141476296, - "learning_rate": 3.553621731668408e-06, - "loss": 0.822, - "num_input_tokens_seen": 42113875, - "step": 1999 - }, - { - "epoch": 0.24048578127818193, - "flos": 17763514171800.0, - "grad_norm": 6.917999077961472, - "learning_rate": 3.553131068927688e-06, - "loss": 1.0607, - "num_input_tokens_seen": 42132000, - "step": 2000 - }, - { - "epoch": 0.24060602416882101, - "flos": 16585983250440.0, - "grad_norm": 3.5519711354603594, - "learning_rate": 3.552640170578219e-06, - "loss": 1.0353, - "num_input_tokens_seen": 42151970, - "step": 2001 - }, - { - "epoch": 0.2407262670594601, - "flos": 10004558583240.0, - "grad_norm": 4.69763674469487, - "learning_rate": 3.5521490366944703e-06, - "loss": 1.0132, - "num_input_tokens_seen": 42169340, - "step": 2002 - }, - { - "epoch": 0.2408465099500992, - "flos": 9638013945000.0, - "grad_norm": 9.552584025920192, - "learning_rate": 3.5516576673509474e-06, - "loss": 1.0316, - "num_input_tokens_seen": 42187060, - "step": 2003 - }, - { - "epoch": 0.2409667528407383, - "flos": 22275795749040.0, - "grad_norm": 7.606543544850514, - "learning_rate": 3.5511660626221896e-06, - "loss": 1.0902, - "num_input_tokens_seen": 42207420, - "step": 2004 - }, - { - "epoch": 0.24108699573137737, - "flos": 15773157273720.0, - "grad_norm": 5.826628873922195, - "learning_rate": 3.5506742225827744e-06, - "loss": 1.1071, - "num_input_tokens_seen": 42223995, - "step": 2005 - }, - { - "epoch": 0.24120723862201648, - "flos": 18578118519000.0, - "grad_norm": 5.279811324592103, - "learning_rate": 3.5501821473073116e-06, - "loss": 1.1269, - "num_input_tokens_seen": 42240300, - "step": 2006 - }, - { - "epoch": 0.24132748151265557, - "flos": 13203941372880.0, - "grad_norm": 4.8819385936708635, - "learning_rate": 3.54968983687045e-06, - "loss": 1.0966, - "num_input_tokens_seen": 42256890, - "step": 2007 - }, - { - "epoch": 0.24144772440329465, - "flos": 10791071393400.0, - "grad_norm": 4.527286664127264, - "learning_rate": 3.549197291346872e-06, - "loss": 1.1212, - "num_input_tokens_seen": 42273135, - "step": 2008 - }, - { - "epoch": 0.24156796729393373, - "flos": 17084277690840.0, - "grad_norm": 3.7813723205454015, - "learning_rate": 3.548704510811297e-06, - "loss": 1.0273, - "num_input_tokens_seen": 42292050, - "step": 2009 - }, - { - "epoch": 0.24168821018457284, - "flos": 18710358906120.0, - "grad_norm": 5.908324508050623, - "learning_rate": 3.5482114953384787e-06, - "loss": 0.9754, - "num_input_tokens_seen": 42311000, - "step": 2010 - }, - { - "epoch": 0.24180845307521193, - "flos": 12915171095040.0, - "grad_norm": 7.657646456671338, - "learning_rate": 3.5477182450032077e-06, - "loss": 1.0692, - "num_input_tokens_seen": 42329320, - "step": 2011 - }, - { - "epoch": 0.241928695965851, - "flos": 14515061790000.0, - "grad_norm": 4.2282091130970745, - "learning_rate": 3.5472247598803097e-06, - "loss": 1.0581, - "num_input_tokens_seen": 42348385, - "step": 2012 - }, - { - "epoch": 0.24204893885649012, - "flos": 18185414022000.0, - "grad_norm": 8.859106742659332, - "learning_rate": 3.546731040044645e-06, - "loss": 1.0874, - "num_input_tokens_seen": 42363275, - "step": 2013 - }, - { - "epoch": 0.2421691817471292, - "flos": 21852792082680.0, - "grad_norm": 3.761896210442884, - "learning_rate": 3.546237085571112e-06, - "loss": 0.9886, - "num_input_tokens_seen": 42381430, - "step": 2014 - }, - { - "epoch": 0.24228942463776829, - "flos": 15590007600840.0, - "grad_norm": 4.83928114951667, - "learning_rate": 3.5457428965346425e-06, - "loss": 0.9482, - "num_input_tokens_seen": 42400090, - "step": 2015 - }, - { - "epoch": 0.2424096675284074, - "flos": 24242445713160.0, - "grad_norm": 6.313362289303228, - "learning_rate": 3.545248473010205e-06, - "loss": 0.9824, - "num_input_tokens_seen": 42422615, - "step": 2016 - }, - { - "epoch": 0.24252991041904648, - "flos": 15380483438280.0, - "grad_norm": 7.559703215167644, - "learning_rate": 3.544753815072802e-06, - "loss": 1.1012, - "num_input_tokens_seen": 42440990, - "step": 2017 - }, - { - "epoch": 0.24265015330968556, - "flos": 15535878851280.0, - "grad_norm": 31.1976954856169, - "learning_rate": 3.544258922797474e-06, - "loss": 1.1216, - "num_input_tokens_seen": 42458830, - "step": 2018 - }, - { - "epoch": 0.24277039620032465, - "flos": 18236844554280.0, - "grad_norm": 3.485867988717534, - "learning_rate": 3.543763796259295e-06, - "loss": 1.0131, - "num_input_tokens_seen": 42478505, - "step": 2019 - }, - { - "epoch": 0.24289063909096376, - "flos": 18710174936760.0, - "grad_norm": 8.697761896701861, - "learning_rate": 3.5432684355333754e-06, - "loss": 1.1416, - "num_input_tokens_seen": 42496880, - "step": 2020 - }, - { - "epoch": 0.24301088198160284, - "flos": 17838467668680.0, - "grad_norm": 9.09478858706234, - "learning_rate": 3.5427728406948613e-06, - "loss": 0.9877, - "num_input_tokens_seen": 42515715, - "step": 2021 - }, - { - "epoch": 0.24313112487224192, - "flos": 50061805516680.0, - "grad_norm": 0.7844706336457999, - "learning_rate": 3.542277011818934e-06, - "loss": 0.8348, - "num_input_tokens_seen": 42579270, - "step": 2022 - }, - { - "epoch": 0.24325136776288103, - "flos": 29041811182440.0, - "grad_norm": 4.239176262785365, - "learning_rate": 3.5417809489808104e-06, - "loss": 0.9588, - "num_input_tokens_seen": 42600600, - "step": 2023 - }, - { - "epoch": 0.24337161065352012, - "flos": 17818930706880.0, - "grad_norm": 5.5205006433207915, - "learning_rate": 3.5412846522557422e-06, - "loss": 0.9529, - "num_input_tokens_seen": 42621210, - "step": 2024 - }, - { - "epoch": 0.2434918535441592, - "flos": 13229947923840.0, - "grad_norm": 5.067989998883599, - "learning_rate": 3.540788121719018e-06, - "loss": 0.9777, - "num_input_tokens_seen": 42639350, - "step": 2025 - }, - { - "epoch": 0.24361209643479828, - "flos": 17005890099240.0, - "grad_norm": 9.308398681338385, - "learning_rate": 3.5402913574459604e-06, - "loss": 1.0496, - "num_input_tokens_seen": 42658975, - "step": 2026 - }, - { - "epoch": 0.2437323393254374, - "flos": 20414030512440.0, - "grad_norm": 2.834724440057977, - "learning_rate": 3.5397943595119297e-06, - "loss": 1.0813, - "num_input_tokens_seen": 42680115, - "step": 2027 - }, - { - "epoch": 0.24385258221607647, - "flos": 16743708942000.0, - "grad_norm": 11.068979897849843, - "learning_rate": 3.5392971279923177e-06, - "loss": 0.9915, - "num_input_tokens_seen": 42698055, - "step": 2028 - }, - { - "epoch": 0.24397282510671556, - "flos": 18026829806760.0, - "grad_norm": 4.748831956702925, - "learning_rate": 3.5387996629625557e-06, - "loss": 1.051, - "num_input_tokens_seen": 42715365, - "step": 2029 - }, - { - "epoch": 0.24409306799735467, - "flos": 46673355373080.0, - "grad_norm": 0.8479732500796514, - "learning_rate": 3.5383019644981083e-06, - "loss": 0.8299, - "num_input_tokens_seen": 42778780, - "step": 2030 - }, - { - "epoch": 0.24421331088799375, - "flos": 13859501581440.0, - "grad_norm": 5.982738141534027, - "learning_rate": 3.5378040326744763e-06, - "loss": 0.949, - "num_input_tokens_seen": 42797985, - "step": 2031 - }, - { - "epoch": 0.24433355377863283, - "flos": 14960024681400.0, - "grad_norm": 4.154237647232943, - "learning_rate": 3.5373058675671946e-06, - "loss": 1.0929, - "num_input_tokens_seen": 42815710, - "step": 2032 - }, - { - "epoch": 0.24445379666927192, - "flos": 16087320871320.0, - "grad_norm": 4.691855886290077, - "learning_rate": 3.536807469251836e-06, - "loss": 0.9503, - "num_input_tokens_seen": 42834585, - "step": 2033 - }, - { - "epoch": 0.24457403955991103, - "flos": 15091651837320.0, - "grad_norm": 4.449244257156362, - "learning_rate": 3.5363088378040055e-06, - "loss": 1.0493, - "num_input_tokens_seen": 42853195, - "step": 2034 - }, - { - "epoch": 0.2446942824505501, - "flos": 47972579104920.0, - "grad_norm": 0.7849434711366777, - "learning_rate": 3.5358099732993463e-06, - "loss": 0.913, - "num_input_tokens_seen": 42912025, - "step": 2035 - }, - { - "epoch": 0.2448145253411892, - "flos": 14487430176360.0, - "grad_norm": 5.100554619345504, - "learning_rate": 3.535310875813535e-06, - "loss": 1.1125, - "num_input_tokens_seen": 42930140, - "step": 2036 - }, - { - "epoch": 0.2449347682318283, - "flos": 20520019056120.0, - "grad_norm": 4.64825428834064, - "learning_rate": 3.5348115454222843e-06, - "loss": 1.0501, - "num_input_tokens_seen": 42952445, - "step": 2037 - }, - { - "epoch": 0.2450550111224674, - "flos": 16009730480280.0, - "grad_norm": 4.554412362374836, - "learning_rate": 3.5343119822013425e-06, - "loss": 1.0768, - "num_input_tokens_seen": 42971275, - "step": 2038 - }, - { - "epoch": 0.24517525401310647, - "flos": 15455958181680.0, - "grad_norm": 3.661838531651111, - "learning_rate": 3.533812186226493e-06, - "loss": 1.0087, - "num_input_tokens_seen": 42991705, - "step": 2039 - }, - { - "epoch": 0.24529549690374555, - "flos": 17816293812720.0, - "grad_norm": 3.2639227267916877, - "learning_rate": 3.5333121575735545e-06, - "loss": 0.9918, - "num_input_tokens_seen": 43011065, - "step": 2040 - }, - { - "epoch": 0.24541573979438466, - "flos": 22905073452600.0, - "grad_norm": 3.9250392838960164, - "learning_rate": 3.532811896318381e-06, - "loss": 0.9805, - "num_input_tokens_seen": 43032855, - "step": 2041 - }, - { - "epoch": 0.24553598268502375, - "flos": 22354520617800.0, - "grad_norm": 7.380129307386829, - "learning_rate": 3.5323114025368615e-06, - "loss": 1.0285, - "num_input_tokens_seen": 43047640, - "step": 2042 - }, - { - "epoch": 0.24565622557566283, - "flos": 9899796501960.0, - "grad_norm": 3.969095650586957, - "learning_rate": 3.53181067630492e-06, - "loss": 1.0466, - "num_input_tokens_seen": 43064830, - "step": 2043 - }, - { - "epoch": 0.24577646846630194, - "flos": 11735309895120.0, - "grad_norm": 5.707771143644659, - "learning_rate": 3.5313097176985175e-06, - "loss": 0.9875, - "num_input_tokens_seen": 43082860, - "step": 2044 - }, - { - "epoch": 0.24589671135694102, - "flos": 13335108605400.0, - "grad_norm": 3.0365180418099453, - "learning_rate": 3.5308085267936482e-06, - "loss": 1.036, - "num_input_tokens_seen": 43100295, - "step": 2045 - }, - { - "epoch": 0.2460169542475801, - "flos": 14148026566800.0, - "grad_norm": 2.9767931751167858, - "learning_rate": 3.530307103666342e-06, - "loss": 1.1352, - "num_input_tokens_seen": 43119095, - "step": 2046 - }, - { - "epoch": 0.24613719713821922, - "flos": 17190020942040.0, - "grad_norm": 3.4250824410199536, - "learning_rate": 3.5298054483926658e-06, - "loss": 1.0302, - "num_input_tokens_seen": 43139510, - "step": 2047 - }, - { - "epoch": 0.2462574400288583, - "flos": 21537003422400.0, - "grad_norm": 20.414078196750566, - "learning_rate": 3.5293035610487187e-06, - "loss": 1.0566, - "num_input_tokens_seen": 43158595, - "step": 2048 - }, - { - "epoch": 0.24637768291949738, - "flos": 49372113443760.0, - "grad_norm": 0.81025124179659, - "learning_rate": 3.5288014417106374e-06, - "loss": 0.8745, - "num_input_tokens_seen": 43224335, - "step": 2049 - }, - { - "epoch": 0.24649792581013646, - "flos": 24529314974280.0, - "grad_norm": 33.61444674947719, - "learning_rate": 3.528299090454593e-06, - "loss": 0.9906, - "num_input_tokens_seen": 43244590, - "step": 2050 - }, - { - "epoch": 0.24661816870077558, - "flos": 13962178676640.0, - "grad_norm": 7.629483265108989, - "learning_rate": 3.527796507356792e-06, - "loss": 1.0636, - "num_input_tokens_seen": 43258200, - "step": 2051 - }, - { - "epoch": 0.24673841159141466, - "flos": 14192588909640.0, - "grad_norm": 6.086601563878821, - "learning_rate": 3.527293692493475e-06, - "loss": 1.1197, - "num_input_tokens_seen": 43273785, - "step": 2052 - }, - { - "epoch": 0.24685865448205374, - "flos": 15374657741880.0, - "grad_norm": 7.030762786939333, - "learning_rate": 3.52679064594092e-06, - "loss": 0.9679, - "num_input_tokens_seen": 43290845, - "step": 2053 - }, - { - "epoch": 0.24697889737269285, - "flos": 12725367863640.0, - "grad_norm": 8.896323792801564, - "learning_rate": 3.5262873677754375e-06, - "loss": 0.9674, - "num_input_tokens_seen": 43308570, - "step": 2054 - }, - { - "epoch": 0.24709914026333193, - "flos": 19470558549720.0, - "grad_norm": 2.9032634854008035, - "learning_rate": 3.5257838580733745e-06, - "loss": 1.0402, - "num_input_tokens_seen": 43327895, - "step": 2055 - }, - { - "epoch": 0.24721938315397102, - "flos": 13670679519960.0, - "grad_norm": 6.076237481062564, - "learning_rate": 3.5252801169111138e-06, - "loss": 1.0943, - "num_input_tokens_seen": 43345280, - "step": 2056 - }, - { - "epoch": 0.2473396260446101, - "flos": 16481773077240.0, - "grad_norm": 3.7945441481630438, - "learning_rate": 3.524776144365072e-06, - "loss": 1.0257, - "num_input_tokens_seen": 43363455, - "step": 2057 - }, - { - "epoch": 0.2474598689352492, - "flos": 15013233584160.0, - "grad_norm": 3.5663230861230533, - "learning_rate": 3.5242719405117016e-06, - "loss": 1.0315, - "num_input_tokens_seen": 43382980, - "step": 2058 - }, - { - "epoch": 0.2475801118258883, - "flos": 15376160158320.0, - "grad_norm": 8.876625879212874, - "learning_rate": 3.5237675054274893e-06, - "loss": 0.967, - "num_input_tokens_seen": 43401900, - "step": 2059 - }, - { - "epoch": 0.24770035471652738, - "flos": 16114247269080.0, - "grad_norm": 8.058936462659306, - "learning_rate": 3.5232628391889584e-06, - "loss": 1.028, - "num_input_tokens_seen": 43419910, - "step": 2060 - }, - { - "epoch": 0.2478205976071665, - "flos": 15747242707440.0, - "grad_norm": 4.047613245962563, - "learning_rate": 3.522757941872666e-06, - "loss": 0.8759, - "num_input_tokens_seen": 43437785, - "step": 2061 - }, - { - "epoch": 0.24794084049780557, - "flos": 17766212389080.0, - "grad_norm": 3.1063381681459554, - "learning_rate": 3.5222528135552042e-06, - "loss": 1.0585, - "num_input_tokens_seen": 43458965, - "step": 2062 - }, - { - "epoch": 0.24806108338844465, - "flos": 12967797428160.0, - "grad_norm": 7.394898805989848, - "learning_rate": 3.521747454313201e-06, - "loss": 1.04, - "num_input_tokens_seen": 43477365, - "step": 2063 - }, - { - "epoch": 0.24818132627908374, - "flos": 13676351908560.0, - "grad_norm": 5.6971300362036725, - "learning_rate": 3.521241864223319e-06, - "loss": 0.9034, - "num_input_tokens_seen": 43496045, - "step": 2064 - }, - { - "epoch": 0.24830156916972285, - "flos": 50336195522880.0, - "grad_norm": 0.8236351659034213, - "learning_rate": 3.5207360433622552e-06, - "loss": 0.8592, - "num_input_tokens_seen": 43557765, - "step": 2065 - }, - { - "epoch": 0.24842181206036193, - "flos": 28860133264440.0, - "grad_norm": 4.46688206186533, - "learning_rate": 3.5202299918067437e-06, - "loss": 0.9799, - "num_input_tokens_seen": 43581080, - "step": 2066 - }, - { - "epoch": 0.248542054951001, - "flos": 14251163585400.0, - "grad_norm": 6.000767023014804, - "learning_rate": 3.519723709633551e-06, - "loss": 0.9301, - "num_input_tokens_seen": 43599560, - "step": 2067 - }, - { - "epoch": 0.24866229784164012, - "flos": 16717579744800.0, - "grad_norm": 5.116625013813541, - "learning_rate": 3.519217196919479e-06, - "loss": 1.0577, - "num_input_tokens_seen": 43618265, - "step": 2068 - }, - { - "epoch": 0.2487825407322792, - "flos": 14095185602760.0, - "grad_norm": 4.762884834340468, - "learning_rate": 3.518710453741367e-06, - "loss": 0.9593, - "num_input_tokens_seen": 43637185, - "step": 2069 - }, - { - "epoch": 0.2489027836229183, - "flos": 15642143349000.0, - "grad_norm": 4.002659390460912, - "learning_rate": 3.518203480176086e-06, - "loss": 0.905, - "num_input_tokens_seen": 43655835, - "step": 2070 - }, - { - "epoch": 0.2490230265135574, - "flos": 16559670083880.0, - "grad_norm": 5.266617225865591, - "learning_rate": 3.517696276300545e-06, - "loss": 1.0284, - "num_input_tokens_seen": 43677095, - "step": 2071 - }, - { - "epoch": 0.24914326940419648, - "flos": 14067400681320.0, - "grad_norm": 6.97255695176143, - "learning_rate": 3.517188842191685e-06, - "loss": 0.9398, - "num_input_tokens_seen": 43694965, - "step": 2072 - }, - { - "epoch": 0.24926351229483557, - "flos": 14356814851920.0, - "grad_norm": 4.3407633002091215, - "learning_rate": 3.5166811779264837e-06, - "loss": 0.9704, - "num_input_tokens_seen": 43715005, - "step": 2073 - }, - { - "epoch": 0.24938375518547465, - "flos": 16559854053240.0, - "grad_norm": 3.5040826147086093, - "learning_rate": 3.5161732835819545e-06, - "loss": 1.0107, - "num_input_tokens_seen": 43734035, - "step": 2074 - }, - { - "epoch": 0.24950399807611376, - "flos": 12260162794560.0, - "grad_norm": 4.402113900873608, - "learning_rate": 3.515665159235143e-06, - "loss": 1.0545, - "num_input_tokens_seen": 43752640, - "step": 2075 - }, - { - "epoch": 0.24962424096675284, - "flos": 13491178572720.0, - "grad_norm": 5.181001384370461, - "learning_rate": 3.5151568049631318e-06, - "loss": 0.9807, - "num_input_tokens_seen": 43771075, - "step": 2076 - }, - { - "epoch": 0.24974448385739192, - "flos": 23821251078840.0, - "grad_norm": 4.434379232457018, - "learning_rate": 3.5146482208430385e-06, - "loss": 1.0261, - "num_input_tokens_seen": 43792625, - "step": 2077 - }, - { - "epoch": 0.24986472674803104, - "flos": 21384060934200.0, - "grad_norm": 4.648373899906297, - "learning_rate": 3.514139406952014e-06, - "loss": 0.9135, - "num_input_tokens_seen": 43814370, - "step": 2078 - }, - { - "epoch": 0.24998496963867012, - "flos": 18944571172560.0, - "grad_norm": 4.898337817965014, - "learning_rate": 3.5136303633672454e-06, - "loss": 1.0775, - "num_input_tokens_seen": 43834220, - "step": 2079 - }, - { - "epoch": 0.25010521252930923, - "flos": 16746836421120.0, - "grad_norm": 2.943742490463235, - "learning_rate": 3.5131210901659544e-06, - "loss": 0.9841, - "num_input_tokens_seen": 43855695, - "step": 2080 - }, - { - "epoch": 0.2502254554199483, - "flos": 16665321350400.0, - "grad_norm": 6.3149255303879634, - "learning_rate": 3.5126115874253967e-06, - "loss": 1.0416, - "num_input_tokens_seen": 43874970, - "step": 2081 - }, - { - "epoch": 0.2503456983105874, - "flos": 20488186808760.0, - "grad_norm": 9.420199066456089, - "learning_rate": 3.5121018552228644e-06, - "loss": 1.0328, - "num_input_tokens_seen": 43893195, - "step": 2082 - }, - { - "epoch": 0.2504659412012265, - "flos": 13308304853880.0, - "grad_norm": 4.908510889923148, - "learning_rate": 3.5115918936356827e-06, - "loss": 1.002, - "num_input_tokens_seen": 43909670, - "step": 2083 - }, - { - "epoch": 0.25058618409186556, - "flos": 11945110011720.0, - "grad_norm": 3.562721379411999, - "learning_rate": 3.5110817027412123e-06, - "loss": 1.0181, - "num_input_tokens_seen": 43928480, - "step": 2084 - }, - { - "epoch": 0.25070642698250467, - "flos": 17372649368400.0, - "grad_norm": 4.365254037287932, - "learning_rate": 3.5105712826168493e-06, - "loss": 0.9131, - "num_input_tokens_seen": 43947850, - "step": 2085 - }, - { - "epoch": 0.2508266698731437, - "flos": 14378927384760.0, - "grad_norm": 4.100860477495399, - "learning_rate": 3.5100606333400235e-06, - "loss": 0.9375, - "num_input_tokens_seen": 43964705, - "step": 2086 - }, - { - "epoch": 0.25094691276378284, - "flos": 13649824111080.0, - "grad_norm": 3.968819204613162, - "learning_rate": 3.5095497549882006e-06, - "loss": 1.0043, - "num_input_tokens_seen": 43982870, - "step": 2087 - }, - { - "epoch": 0.25106715565442195, - "flos": 19182094887480.0, - "grad_norm": 6.603040757659644, - "learning_rate": 3.50903864763888e-06, - "loss": 0.9491, - "num_input_tokens_seen": 44003380, - "step": 2088 - }, - { - "epoch": 0.251187398545061, - "flos": 34573499389200.0, - "grad_norm": 5.3169737883605634, - "learning_rate": 3.5085273113695965e-06, - "loss": 0.9837, - "num_input_tokens_seen": 44027670, - "step": 2089 - }, - { - "epoch": 0.2513076414357001, - "flos": 19234445266560.0, - "grad_norm": 3.4950100886863735, - "learning_rate": 3.508015746257919e-06, - "loss": 1.0214, - "num_input_tokens_seen": 44046430, - "step": 2090 - }, - { - "epoch": 0.2514278843263392, - "flos": 13806629955840.0, - "grad_norm": 7.956127746553748, - "learning_rate": 3.5075039523814518e-06, - "loss": 1.06, - "num_input_tokens_seen": 44065340, - "step": 2091 - }, - { - "epoch": 0.2515481272169783, - "flos": 11938885715040.0, - "grad_norm": 4.568867877321462, - "learning_rate": 3.506991929817834e-06, - "loss": 1.0459, - "num_input_tokens_seen": 44081780, - "step": 2092 - }, - { - "epoch": 0.2516683701076174, - "flos": 16874569558920.0, - "grad_norm": 2.7938729483157543, - "learning_rate": 3.506479678644738e-06, - "loss": 1.0429, - "num_input_tokens_seen": 44101895, - "step": 2093 - }, - { - "epoch": 0.2517886129982565, - "flos": 19679868081360.0, - "grad_norm": 7.63625068498447, - "learning_rate": 3.505967198939873e-06, - "loss": 0.9649, - "num_input_tokens_seen": 44118655, - "step": 2094 - }, - { - "epoch": 0.25190885588889556, - "flos": 27203108987040.0, - "grad_norm": 4.787305998295229, - "learning_rate": 3.5054544907809813e-06, - "loss": 1.0077, - "num_input_tokens_seen": 44138875, - "step": 2095 - }, - { - "epoch": 0.25202909877953467, - "flos": 15823024066440.0, - "grad_norm": 6.252924730715082, - "learning_rate": 3.50494155424584e-06, - "loss": 1.0301, - "num_input_tokens_seen": 44157500, - "step": 2096 - }, - { - "epoch": 0.2521493416701738, - "flos": 15458196475560.0, - "grad_norm": 2.9208528903482396, - "learning_rate": 3.504428389412262e-06, - "loss": 1.06, - "num_input_tokens_seen": 44178030, - "step": 2097 - }, - { - "epoch": 0.25226958456081283, - "flos": 19759666104720.0, - "grad_norm": 5.454343468365229, - "learning_rate": 3.5039149963580927e-06, - "loss": 0.9603, - "num_input_tokens_seen": 44197770, - "step": 2098 - }, - { - "epoch": 0.25238982745145194, - "flos": 21904375922760.0, - "grad_norm": 8.867887844038913, - "learning_rate": 3.503401375161215e-06, - "loss": 0.925, - "num_input_tokens_seen": 44217235, - "step": 2099 - }, - { - "epoch": 0.252510070342091, - "flos": 14383526618760.0, - "grad_norm": 3.1092852862273515, - "learning_rate": 3.502887525899544e-06, - "loss": 1.0752, - "num_input_tokens_seen": 44235935, - "step": 2100 - }, - { - "epoch": 0.2526303132327301, - "flos": 16166628309720.0, - "grad_norm": 8.38993528575287, - "learning_rate": 3.50237344865103e-06, - "loss": 1.0638, - "num_input_tokens_seen": 44256655, - "step": 2101 - }, - { - "epoch": 0.2527505561233692, - "flos": 21568283761680.0, - "grad_norm": 6.170057584472216, - "learning_rate": 3.501859143493658e-06, - "loss": 0.9945, - "num_input_tokens_seen": 44277005, - "step": 2102 - }, - { - "epoch": 0.2528707990140083, - "flos": 41859762939960.0, - "grad_norm": 0.8507389320799231, - "learning_rate": 3.5013446105054488e-06, - "loss": 0.8776, - "num_input_tokens_seen": 44329645, - "step": 2103 - }, - { - "epoch": 0.2529910419046474, - "flos": 17530559029320.0, - "grad_norm": 3.0690157338245907, - "learning_rate": 3.5008298497644555e-06, - "loss": 0.989, - "num_input_tokens_seen": 44348410, - "step": 2104 - }, - { - "epoch": 0.2531112847952865, - "flos": 16953417073920.0, - "grad_norm": 4.1465795922004345, - "learning_rate": 3.500314861348767e-06, - "loss": 1.1172, - "num_input_tokens_seen": 44368765, - "step": 2105 - }, - { - "epoch": 0.25323152768592555, - "flos": 11420809020360.0, - "grad_norm": 3.869289127679225, - "learning_rate": 3.499799645336507e-06, - "loss": 0.9998, - "num_input_tokens_seen": 44385380, - "step": 2106 - }, - { - "epoch": 0.25335177057656466, - "flos": 20233088471880.0, - "grad_norm": 7.602014134193936, - "learning_rate": 3.4992842018058336e-06, - "loss": 1.1024, - "num_input_tokens_seen": 44408000, - "step": 2107 - }, - { - "epoch": 0.25347201346720377, - "flos": 13328485708440.0, - "grad_norm": 4.290296811237613, - "learning_rate": 3.4987685308349384e-06, - "loss": 1.1024, - "num_input_tokens_seen": 44425450, - "step": 2108 - }, - { - "epoch": 0.2535922563578428, - "flos": 11184849045000.0, - "grad_norm": 5.370492331385432, - "learning_rate": 3.4982526325020497e-06, - "loss": 0.8505, - "num_input_tokens_seen": 44442140, - "step": 2109 - }, - { - "epoch": 0.25371249924848194, - "flos": 11545659971520.0, - "grad_norm": 8.820722058554919, - "learning_rate": 3.4977365068854273e-06, - "loss": 1.0503, - "num_input_tokens_seen": 44457480, - "step": 2110 - }, - { - "epoch": 0.25383274213912105, - "flos": 15458073829320.0, - "grad_norm": 4.065468745120753, - "learning_rate": 3.4972201540633676e-06, - "loss": 0.9572, - "num_input_tokens_seen": 44476555, - "step": 2111 - }, - { - "epoch": 0.2539529850297601, - "flos": 15195953995200.0, - "grad_norm": 4.8175822583447365, - "learning_rate": 3.4967035741142008e-06, - "loss": 1.0767, - "num_input_tokens_seen": 44495095, - "step": 2112 - }, - { - "epoch": 0.2540732279203992, - "flos": 17949730000680.0, - "grad_norm": 8.337536273288316, - "learning_rate": 3.4961867671162917e-06, - "loss": 1.0488, - "num_input_tokens_seen": 44514745, - "step": 2113 - }, - { - "epoch": 0.2541934708110383, - "flos": 13780623404880.0, - "grad_norm": 23.73713544472234, - "learning_rate": 3.4956697331480402e-06, - "loss": 0.9947, - "num_input_tokens_seen": 44533035, - "step": 2114 - }, - { - "epoch": 0.2543137137016774, - "flos": 17030240925960.0, - "grad_norm": 3.6926252017686965, - "learning_rate": 3.495152472287879e-06, - "loss": 1.0315, - "num_input_tokens_seen": 44553465, - "step": 2115 - }, - { - "epoch": 0.2544339565923165, - "flos": 18211911157920.0, - "grad_norm": 4.885006928765997, - "learning_rate": 3.4946349846142766e-06, - "loss": 0.964, - "num_input_tokens_seen": 44572325, - "step": 2116 - }, - { - "epoch": 0.25455419948295555, - "flos": 15406336681440.0, - "grad_norm": 5.816161612341847, - "learning_rate": 3.4941172702057353e-06, - "loss": 0.9889, - "num_input_tokens_seen": 44592105, - "step": 2117 - }, - { - "epoch": 0.25467444237359466, - "flos": 18684168385800.0, - "grad_norm": 10.638478425299663, - "learning_rate": 3.4935993291407924e-06, - "loss": 1.0359, - "num_input_tokens_seen": 44610650, - "step": 2118 - }, - { - "epoch": 0.25479468526423377, - "flos": 19208285407800.0, - "grad_norm": 5.319849293804202, - "learning_rate": 3.4930811614980183e-06, - "loss": 0.9414, - "num_input_tokens_seen": 44632065, - "step": 2119 - }, - { - "epoch": 0.2549149281548728, - "flos": 16689948131160.0, - "grad_norm": 4.408794993773144, - "learning_rate": 3.4925627673560198e-06, - "loss": 1.0222, - "num_input_tokens_seen": 44652445, - "step": 2120 - }, - { - "epoch": 0.25503517104551193, - "flos": 18369330233880.0, - "grad_norm": 2.9938461447549964, - "learning_rate": 3.4920441467934357e-06, - "loss": 1.1097, - "num_input_tokens_seen": 44672680, - "step": 2121 - }, - { - "epoch": 0.25515541393615104, - "flos": 18966990321000.0, - "grad_norm": 3.422130975772879, - "learning_rate": 3.491525299888941e-06, - "loss": 1.0436, - "num_input_tokens_seen": 44691245, - "step": 2122 - }, - { - "epoch": 0.2552756568267901, - "flos": 47225993193960.0, - "grad_norm": 3.9857009177464757, - "learning_rate": 3.491006226721244e-06, - "loss": 0.9168, - "num_input_tokens_seen": 44755175, - "step": 2123 - }, - { - "epoch": 0.2553958997174292, - "flos": 12705217670640.0, - "grad_norm": 5.645146552211666, - "learning_rate": 3.4904869273690882e-06, - "loss": 1.0039, - "num_input_tokens_seen": 44772785, - "step": 2124 - }, - { - "epoch": 0.2555161426080683, - "flos": 16612511047920.0, - "grad_norm": 6.053450919590302, - "learning_rate": 3.489967401911251e-06, - "loss": 1.1221, - "num_input_tokens_seen": 44791805, - "step": 2125 - }, - { - "epoch": 0.2556363854987074, - "flos": 29014976769360.0, - "grad_norm": 3.596010508911257, - "learning_rate": 3.4894476504265428e-06, - "loss": 0.9244, - "num_input_tokens_seen": 44815765, - "step": 2126 - }, - { - "epoch": 0.2557566283893465, - "flos": 48706802859120.0, - "grad_norm": 0.772892962569532, - "learning_rate": 3.4889276729938104e-06, - "loss": 0.8021, - "num_input_tokens_seen": 44874015, - "step": 2127 - }, - { - "epoch": 0.2558768712799856, - "flos": 16086002424240.0, - "grad_norm": 23.533145985204307, - "learning_rate": 3.488407469691934e-06, - "loss": 1.0273, - "num_input_tokens_seen": 44894430, - "step": 2128 - }, - { - "epoch": 0.25599711417062465, - "flos": 18789114436440.0, - "grad_norm": 6.412821158236017, - "learning_rate": 3.487887040599828e-06, - "loss": 1.0276, - "num_input_tokens_seen": 44913950, - "step": 2129 - }, - { - "epoch": 0.25611735706126376, - "flos": 16241183206320.0, - "grad_norm": 8.556837005207672, - "learning_rate": 3.4873663857964407e-06, - "loss": 0.9889, - "num_input_tokens_seen": 44930885, - "step": 2130 - }, - { - "epoch": 0.2562375999519028, - "flos": 16612572371040.0, - "grad_norm": 7.124396047593701, - "learning_rate": 3.4868455053607556e-06, - "loss": 0.9036, - "num_input_tokens_seen": 44950220, - "step": 2131 - }, - { - "epoch": 0.2563578428425419, - "flos": 16245383840040.0, - "grad_norm": 15.405816681437901, - "learning_rate": 3.486324399371789e-06, - "loss": 0.9323, - "num_input_tokens_seen": 44969240, - "step": 2132 - }, - { - "epoch": 0.25647808573318104, - "flos": 15380882038560.0, - "grad_norm": 14.025875702036732, - "learning_rate": 3.485803067908593e-06, - "loss": 1.0037, - "num_input_tokens_seen": 44988470, - "step": 2133 - }, - { - "epoch": 0.2565983286238201, - "flos": 24060154563960.0, - "grad_norm": 3.760269598933704, - "learning_rate": 3.485281511050253e-06, - "loss": 1.0251, - "num_input_tokens_seen": 45010325, - "step": 2134 - }, - { - "epoch": 0.2567185715144592, - "flos": 11472576829800.0, - "grad_norm": 4.679311288221813, - "learning_rate": 3.484759728875889e-06, - "loss": 1.13, - "num_input_tokens_seen": 45025410, - "step": 2135 - }, - { - "epoch": 0.2568388144050983, - "flos": 12365783399520.0, - "grad_norm": 5.682794993271082, - "learning_rate": 3.4842377214646543e-06, - "loss": 1.0385, - "num_input_tokens_seen": 45043425, - "step": 2136 - }, - { - "epoch": 0.25695905729573737, - "flos": 14830022588160.0, - "grad_norm": 4.791810583634708, - "learning_rate": 3.483715488895737e-06, - "loss": 0.9046, - "num_input_tokens_seen": 45063475, - "step": 2137 - }, - { - "epoch": 0.2570793001863765, - "flos": 17582296177200.0, - "grad_norm": 3.153591282585633, - "learning_rate": 3.48319303124836e-06, - "loss": 1.0091, - "num_input_tokens_seen": 45083575, - "step": 2138 - }, - { - "epoch": 0.2571995430770156, - "flos": 19155996351840.0, - "grad_norm": 4.718926836957076, - "learning_rate": 3.4826703486017798e-06, - "loss": 0.9041, - "num_input_tokens_seen": 45102920, - "step": 2139 - }, - { - "epoch": 0.25731978596765465, - "flos": 14043111177720.0, - "grad_norm": 3.373683672744479, - "learning_rate": 3.4821474410352867e-06, - "loss": 0.9945, - "num_input_tokens_seen": 45121300, - "step": 2140 - }, - { - "epoch": 0.25744002885829376, - "flos": 50535080124120.0, - "grad_norm": 0.9079941047590514, - "learning_rate": 3.481624308628205e-06, - "loss": 0.9022, - "num_input_tokens_seen": 45182390, - "step": 2141 - }, - { - "epoch": 0.25756027174893287, - "flos": 12781857553320.0, - "grad_norm": 6.456835107426253, - "learning_rate": 3.481100951459893e-06, - "loss": 1.2294, - "num_input_tokens_seen": 45195130, - "step": 2142 - }, - { - "epoch": 0.2576805146395719, - "flos": 16113879330360.0, - "grad_norm": 4.390868699447869, - "learning_rate": 3.4805773696097453e-06, - "loss": 1.0193, - "num_input_tokens_seen": 45215740, - "step": 2143 - }, - { - "epoch": 0.25780075753021103, - "flos": 11656830318840.0, - "grad_norm": 6.82957181289288, - "learning_rate": 3.4800535631571874e-06, - "loss": 1.1073, - "num_input_tokens_seen": 45230990, - "step": 2144 - }, - { - "epoch": 0.25792100042085014, - "flos": 16219009350360.0, - "grad_norm": 6.883007588041092, - "learning_rate": 3.4795295321816804e-06, - "loss": 0.9965, - "num_input_tokens_seen": 45249535, - "step": 2145 - }, - { - "epoch": 0.2580412433114892, - "flos": 13255862490120.0, - "grad_norm": 4.021524926565214, - "learning_rate": 3.47900527676272e-06, - "loss": 1.1451, - "num_input_tokens_seen": 45267590, - "step": 2146 - }, - { - "epoch": 0.2581614862021283, - "flos": 10083590067600.0, - "grad_norm": 5.444735043527267, - "learning_rate": 3.478480796979835e-06, - "loss": 1.1204, - "num_input_tokens_seen": 45285195, - "step": 2147 - }, - { - "epoch": 0.25828172909276736, - "flos": 21018497465880.0, - "grad_norm": 3.1682297379200914, - "learning_rate": 3.4779560929125894e-06, - "loss": 1.0065, - "num_input_tokens_seen": 45306460, - "step": 2148 - }, - { - "epoch": 0.2584019719834065, - "flos": 48057313639440.0, - "grad_norm": 0.6952785097879433, - "learning_rate": 3.4774311646405783e-06, - "loss": 0.8146, - "num_input_tokens_seen": 45376085, - "step": 2149 - }, - { - "epoch": 0.2585222148740456, - "flos": 16271697006600.0, - "grad_norm": 2.959883127224968, - "learning_rate": 3.476906012243435e-06, - "loss": 1.0613, - "num_input_tokens_seen": 45394715, - "step": 2150 - }, - { - "epoch": 0.25864245776468464, - "flos": 20594420644920.0, - "grad_norm": 6.792655950784101, - "learning_rate": 3.476380635800824e-06, - "loss": 1.0468, - "num_input_tokens_seen": 45415635, - "step": 2151 - }, - { - "epoch": 0.25876270065532375, - "flos": 10450042721160.0, - "grad_norm": 4.151089784861024, - "learning_rate": 3.475855035392444e-06, - "loss": 1.0798, - "num_input_tokens_seen": 45430675, - "step": 2152 - }, - { - "epoch": 0.25888294354596286, - "flos": 43274792462760.0, - "grad_norm": 3.350174765511891, - "learning_rate": 3.475329211098029e-06, - "loss": 0.9347, - "num_input_tokens_seen": 45453550, - "step": 2153 - }, - { - "epoch": 0.2590031864366019, - "flos": 19834037031960.0, - "grad_norm": 3.5118146125854857, - "learning_rate": 3.4748031629973453e-06, - "loss": 1.0568, - "num_input_tokens_seen": 45474000, - "step": 2154 - }, - { - "epoch": 0.25912342932724103, - "flos": 44687291093400.0, - "grad_norm": 0.907082701282576, - "learning_rate": 3.4742768911701944e-06, - "loss": 0.7996, - "num_input_tokens_seen": 45536415, - "step": 2155 - }, - { - "epoch": 0.25924367221788014, - "flos": 8714048282520.0, - "grad_norm": 12.91388467342025, - "learning_rate": 3.4737503956964113e-06, - "loss": 0.9256, - "num_input_tokens_seen": 45548440, - "step": 2156 - }, - { - "epoch": 0.2593639151085192, - "flos": 10293512830440.0, - "grad_norm": 5.193829088033522, - "learning_rate": 3.473223676655865e-06, - "loss": 0.8923, - "num_input_tokens_seen": 45566160, - "step": 2157 - }, - { - "epoch": 0.2594841579991583, - "flos": 10764635580600.0, - "grad_norm": 6.641163236459467, - "learning_rate": 3.472696734128459e-06, - "loss": 1.0198, - "num_input_tokens_seen": 45583745, - "step": 2158 - }, - { - "epoch": 0.2596044008897974, - "flos": 16796396598240.0, - "grad_norm": 2.8167543235847874, - "learning_rate": 3.4721695681941286e-06, - "loss": 0.9822, - "num_input_tokens_seen": 45602505, - "step": 2159 - }, - { - "epoch": 0.25972464378043647, - "flos": 9716432198160.0, - "grad_norm": 3.6689293266803062, - "learning_rate": 3.471642178932845e-06, - "loss": 1.0575, - "num_input_tokens_seen": 45620870, - "step": 2160 - }, - { - "epoch": 0.2598448866710756, - "flos": 13885722763320.0, - "grad_norm": 6.287603855981693, - "learning_rate": 3.471114566424613e-06, - "loss": 1.1395, - "num_input_tokens_seen": 45639050, - "step": 2161 - }, - { - "epoch": 0.25996512956171464, - "flos": 15380330130480.0, - "grad_norm": 5.804674685067449, - "learning_rate": 3.4705867307494715e-06, - "loss": 0.9933, - "num_input_tokens_seen": 45657840, - "step": 2162 - }, - { - "epoch": 0.26008537245235375, - "flos": 12915416387520.0, - "grad_norm": 7.1250572871531945, - "learning_rate": 3.470058671987492e-06, - "loss": 1.0655, - "num_input_tokens_seen": 45675825, - "step": 2163 - }, - { - "epoch": 0.26020561534299286, - "flos": 17530773660240.0, - "grad_norm": 3.7417051765029075, - "learning_rate": 3.4695303902187805e-06, - "loss": 1.0638, - "num_input_tokens_seen": 45695100, - "step": 2164 - }, - { - "epoch": 0.2603258582336319, - "flos": 18341269358400.0, - "grad_norm": 4.082027979215197, - "learning_rate": 3.469001885523478e-06, - "loss": 1.0198, - "num_input_tokens_seen": 45715540, - "step": 2165 - }, - { - "epoch": 0.260446101124271, - "flos": 20492142150000.0, - "grad_norm": 7.219254645340982, - "learning_rate": 3.4684731579817568e-06, - "loss": 1.0466, - "num_input_tokens_seen": 45736250, - "step": 2166 - }, - { - "epoch": 0.26056634401491013, - "flos": 18264782783520.0, - "grad_norm": 2.6219049099658585, - "learning_rate": 3.4679442076738247e-06, - "loss": 1.0022, - "num_input_tokens_seen": 45755685, - "step": 2167 - }, - { - "epoch": 0.2606865869055492, - "flos": 19260819756240.0, - "grad_norm": 3.7741195662520033, - "learning_rate": 3.4674150346799245e-06, - "loss": 1.0672, - "num_input_tokens_seen": 45775105, - "step": 2168 - }, - { - "epoch": 0.2608068297961883, - "flos": 12548258518080.0, - "grad_norm": 7.891810564167952, - "learning_rate": 3.4668856390803295e-06, - "loss": 1.0216, - "num_input_tokens_seen": 45792705, - "step": 2169 - }, - { - "epoch": 0.2609270726868274, - "flos": 13151223055080.0, - "grad_norm": 6.607138974693071, - "learning_rate": 3.4663560209553495e-06, - "loss": 1.1318, - "num_input_tokens_seen": 45810490, - "step": 2170 - }, - { - "epoch": 0.26104731557746647, - "flos": 15511466701440.0, - "grad_norm": 16.21598001393995, - "learning_rate": 3.4658261803853267e-06, - "loss": 1.0108, - "num_input_tokens_seen": 45828135, - "step": 2171 - }, - { - "epoch": 0.2611675584681056, - "flos": 15406489989240.0, - "grad_norm": 23.843915106325476, - "learning_rate": 3.4652961174506383e-06, - "loss": 1.0458, - "num_input_tokens_seen": 45847725, - "step": 2172 - }, - { - "epoch": 0.2612878013587447, - "flos": 51470022625080.0, - "grad_norm": 1.008044726208275, - "learning_rate": 3.464765832231694e-06, - "loss": 0.8374, - "num_input_tokens_seen": 45901610, - "step": 2173 - }, - { - "epoch": 0.26140804424938374, - "flos": 14355312435480.0, - "grad_norm": 2.8811746715702857, - "learning_rate": 3.4642353248089373e-06, - "loss": 0.94, - "num_input_tokens_seen": 45920090, - "step": 2174 - }, - { - "epoch": 0.26152828714002285, - "flos": 18185935268520.0, - "grad_norm": 3.2237984777372537, - "learning_rate": 3.463704595262846e-06, - "loss": 1.0359, - "num_input_tokens_seen": 45940690, - "step": 2175 - }, - { - "epoch": 0.26164853003066196, - "flos": 18106321214520.0, - "grad_norm": 9.922948007309206, - "learning_rate": 3.463173643673931e-06, - "loss": 0.9303, - "num_input_tokens_seen": 45962935, - "step": 2176 - }, - { - "epoch": 0.261768772921301, - "flos": 38592632120040.0, - "grad_norm": 0.9506781077802199, - "learning_rate": 3.4626424701227387e-06, - "loss": 0.894, - "num_input_tokens_seen": 46017715, - "step": 2177 - }, - { - "epoch": 0.26188901581194013, - "flos": 50624996462280.0, - "grad_norm": 0.8747811725008505, - "learning_rate": 3.4621110746898452e-06, - "loss": 0.851, - "num_input_tokens_seen": 46085295, - "step": 2178 - }, - { - "epoch": 0.2620092587025792, - "flos": 14960883205080.0, - "grad_norm": 4.686749003968133, - "learning_rate": 3.4615794574558654e-06, - "loss": 0.9749, - "num_input_tokens_seen": 46104025, - "step": 2179 - }, - { - "epoch": 0.2621295015932183, - "flos": 13020454422840.0, - "grad_norm": 4.508686582554506, - "learning_rate": 3.4610476185014436e-06, - "loss": 1.0587, - "num_input_tokens_seen": 46121005, - "step": 2180 - }, - { - "epoch": 0.2622497444838574, - "flos": 16822556457000.0, - "grad_norm": 3.1763376496947178, - "learning_rate": 3.4605155579072597e-06, - "loss": 1.0295, - "num_input_tokens_seen": 46140580, - "step": 2181 - }, - { - "epoch": 0.26236998737449646, - "flos": 15718016692680.0, - "grad_norm": 6.08438595935871, - "learning_rate": 3.459983275754027e-06, - "loss": 0.9498, - "num_input_tokens_seen": 46159195, - "step": 2182 - }, - { - "epoch": 0.26249023026513557, - "flos": 12679487073720.0, - "grad_norm": 8.656371188090082, - "learning_rate": 3.4594507721224918e-06, - "loss": 1.0229, - "num_input_tokens_seen": 46177565, - "step": 2183 - }, - { - "epoch": 0.2626104731557747, - "flos": 12994079933160.0, - "grad_norm": 3.88949899580325, - "learning_rate": 3.4589180470934353e-06, - "loss": 1.0492, - "num_input_tokens_seen": 46197150, - "step": 2184 - }, - { - "epoch": 0.26273071604641374, - "flos": 13701775889880.0, - "grad_norm": 5.437397046046255, - "learning_rate": 3.4583851007476713e-06, - "loss": 0.9925, - "num_input_tokens_seen": 46215340, - "step": 2185 - }, - { - "epoch": 0.26285095893705285, - "flos": 12989419376040.0, - "grad_norm": 5.6087979415704785, - "learning_rate": 3.4578519331660464e-06, - "loss": 0.9133, - "num_input_tokens_seen": 46232055, - "step": 2186 - }, - { - "epoch": 0.26297120182769196, - "flos": 14331482855280.0, - "grad_norm": 11.823625119307279, - "learning_rate": 3.4573185444294426e-06, - "loss": 1.053, - "num_input_tokens_seen": 46250140, - "step": 2187 - }, - { - "epoch": 0.263091444718331, - "flos": 15930116426280.0, - "grad_norm": 2.8951596544480327, - "learning_rate": 3.456784934618774e-06, - "loss": 1.0127, - "num_input_tokens_seen": 46271025, - "step": 2188 - }, - { - "epoch": 0.2632116876089701, - "flos": 13491147911160.0, - "grad_norm": 3.518570543391626, - "learning_rate": 3.4562511038149897e-06, - "loss": 1.0253, - "num_input_tokens_seen": 46286240, - "step": 2189 - }, - { - "epoch": 0.26333193049960923, - "flos": 48198016617120.0, - "grad_norm": 0.9565810098137711, - "learning_rate": 3.4557170520990705e-06, - "loss": 0.8337, - "num_input_tokens_seen": 46346635, - "step": 2190 - }, - { - "epoch": 0.2634521733902483, - "flos": 17820985031400.0, - "grad_norm": 4.428110639270225, - "learning_rate": 3.4551827795520324e-06, - "loss": 1.0935, - "num_input_tokens_seen": 46369240, - "step": 2191 - }, - { - "epoch": 0.2635724162808874, - "flos": 14619762548160.0, - "grad_norm": 3.2417199222795907, - "learning_rate": 3.4546482862549226e-06, - "loss": 1.0759, - "num_input_tokens_seen": 46389275, - "step": 2192 - }, - { - "epoch": 0.2636926591715265, - "flos": 13649241541440.0, - "grad_norm": 5.015262959090792, - "learning_rate": 3.4541135722888253e-06, - "loss": 1.0184, - "num_input_tokens_seen": 46405585, - "step": 2193 - }, - { - "epoch": 0.26381290206216557, - "flos": 20520141702360.0, - "grad_norm": 2.7312722176841886, - "learning_rate": 3.453578637734854e-06, - "loss": 1.0346, - "num_input_tokens_seen": 46426495, - "step": 2194 - }, - { - "epoch": 0.2639331449528047, - "flos": 17791851001320.0, - "grad_norm": 3.3307388858290485, - "learning_rate": 3.4530434826741605e-06, - "loss": 1.0208, - "num_input_tokens_seen": 46447155, - "step": 2195 - }, - { - "epoch": 0.26405338784344373, - "flos": 33262838895480.0, - "grad_norm": 5.411461760073456, - "learning_rate": 3.452508107187926e-06, - "loss": 0.9204, - "num_input_tokens_seen": 46470250, - "step": 2196 - }, - { - "epoch": 0.26417363073408284, - "flos": 15039914689440.0, - "grad_norm": 4.904417631241832, - "learning_rate": 3.451972511357366e-06, - "loss": 0.988, - "num_input_tokens_seen": 46489515, - "step": 2197 - }, - { - "epoch": 0.26429387362472195, - "flos": 16114461900000.0, - "grad_norm": 20.871358770893654, - "learning_rate": 3.45143669526373e-06, - "loss": 1.0893, - "num_input_tokens_seen": 46508995, - "step": 2198 - }, - { - "epoch": 0.264414116515361, - "flos": 48103930306800.0, - "grad_norm": 0.7718517346292901, - "learning_rate": 3.450900658988302e-06, - "loss": 0.8629, - "num_input_tokens_seen": 46570265, - "step": 2199 - }, - { - "epoch": 0.2645343594060001, - "flos": 18263035074600.0, - "grad_norm": 5.97909496233364, - "learning_rate": 3.450364402612397e-06, - "loss": 1.004, - "num_input_tokens_seen": 46587140, - "step": 2200 - }, - { - "epoch": 0.26465460229663923, - "flos": 15982926728760.0, - "grad_norm": 4.3163653755841835, - "learning_rate": 3.449827926217366e-06, - "loss": 1.058, - "num_input_tokens_seen": 46606295, - "step": 2201 - }, - { - "epoch": 0.2647748451872783, - "flos": 20939159365920.0, - "grad_norm": 22.181543544655263, - "learning_rate": 3.449291229884591e-06, - "loss": 1.0282, - "num_input_tokens_seen": 46627255, - "step": 2202 - }, - { - "epoch": 0.2648950880779174, - "flos": 19077639421800.0, - "grad_norm": 3.727570787214355, - "learning_rate": 3.4487543136954887e-06, - "loss": 1.0925, - "num_input_tokens_seen": 46646595, - "step": 2203 - }, - { - "epoch": 0.2650153309685565, - "flos": 20546056268640.0, - "grad_norm": 2.8233180212381055, - "learning_rate": 3.448217177731509e-06, - "loss": 1.1347, - "num_input_tokens_seen": 46666800, - "step": 2204 - }, - { - "epoch": 0.26513557385919556, - "flos": 14410115739360.0, - "grad_norm": 8.996490642026496, - "learning_rate": 3.4476798220741348e-06, - "loss": 1.0055, - "num_input_tokens_seen": 46685400, - "step": 2205 - }, - { - "epoch": 0.26525581674983467, - "flos": 12522159982440.0, - "grad_norm": 3.4582085797585607, - "learning_rate": 3.4471422468048826e-06, - "loss": 1.0097, - "num_input_tokens_seen": 46703845, - "step": 2206 - }, - { - "epoch": 0.2653760596404738, - "flos": 19103400680280.0, - "grad_norm": 4.023446981178149, - "learning_rate": 3.4466044520053022e-06, - "loss": 0.9696, - "num_input_tokens_seen": 46722570, - "step": 2207 - }, - { - "epoch": 0.26549630253111284, - "flos": 16191377736720.0, - "grad_norm": 4.138565875825214, - "learning_rate": 3.446066437756977e-06, - "loss": 0.8269, - "num_input_tokens_seen": 46741495, - "step": 2208 - }, - { - "epoch": 0.26561654542175195, - "flos": 16743432987960.0, - "grad_norm": 4.256511412447625, - "learning_rate": 3.4455282041415224e-06, - "loss": 0.9802, - "num_input_tokens_seen": 46760425, - "step": 2209 - }, - { - "epoch": 0.265736788312391, - "flos": 19155812382480.0, - "grad_norm": 6.118999110636215, - "learning_rate": 3.4449897512405894e-06, - "loss": 1.1024, - "num_input_tokens_seen": 46779295, - "step": 2210 - }, - { - "epoch": 0.2658570312030301, - "flos": 16691082608880.0, - "grad_norm": 7.323891138506321, - "learning_rate": 3.444451079135859e-06, - "loss": 0.969, - "num_input_tokens_seen": 46798525, - "step": 2211 - }, - { - "epoch": 0.2659772740936692, - "flos": 15532751372160.0, - "grad_norm": 3.4119432099905596, - "learning_rate": 3.4439121879090493e-06, - "loss": 0.9695, - "num_input_tokens_seen": 46816025, - "step": 2212 - }, - { - "epoch": 0.2660975169843083, - "flos": 14043571101120.0, - "grad_norm": 4.829145107709384, - "learning_rate": 3.4433730776419082e-06, - "loss": 1.0623, - "num_input_tokens_seen": 46834670, - "step": 2213 - }, - { - "epoch": 0.2662177598749474, - "flos": 20673360144600.0, - "grad_norm": 4.8039703081977825, - "learning_rate": 3.4428337484162183e-06, - "loss": 1.0319, - "num_input_tokens_seen": 46855200, - "step": 2214 - }, - { - "epoch": 0.2663380027655865, - "flos": 15458871029880.0, - "grad_norm": 3.318861125802537, - "learning_rate": 3.442294200313797e-06, - "loss": 1.073, - "num_input_tokens_seen": 46872950, - "step": 2215 - }, - { - "epoch": 0.26645824565622556, - "flos": 47962577888280.0, - "grad_norm": 0.8888084893489883, - "learning_rate": 3.4417544334164916e-06, - "loss": 0.8128, - "num_input_tokens_seen": 46936815, - "step": 2216 - }, - { - "epoch": 0.26657848854686467, - "flos": 17975307289800.0, - "grad_norm": 7.338035665321961, - "learning_rate": 3.4412144478061854e-06, - "loss": 0.9943, - "num_input_tokens_seen": 46958945, - "step": 2217 - }, - { - "epoch": 0.2666987314375038, - "flos": 16849022931360.0, - "grad_norm": 4.085299323518919, - "learning_rate": 3.4406742435647925e-06, - "loss": 0.99, - "num_input_tokens_seen": 46978730, - "step": 2218 - }, - { - "epoch": 0.26681897432814283, - "flos": 19257569630880.0, - "grad_norm": 4.746157648103811, - "learning_rate": 3.440133820774263e-06, - "loss": 1.0216, - "num_input_tokens_seen": 46998260, - "step": 2219 - }, - { - "epoch": 0.26693921721878194, - "flos": 20650082472480.0, - "grad_norm": 6.30087690759687, - "learning_rate": 3.439593179516578e-06, - "loss": 1.038, - "num_input_tokens_seen": 47017890, - "step": 2220 - }, - { - "epoch": 0.26705946010942105, - "flos": 15275905326360.0, - "grad_norm": 10.03486734451783, - "learning_rate": 3.4390523198737524e-06, - "loss": 1.042, - "num_input_tokens_seen": 47036770, - "step": 2221 - }, - { - "epoch": 0.2671797030000601, - "flos": 15249622821360.0, - "grad_norm": 3.959676869242423, - "learning_rate": 3.4385112419278333e-06, - "loss": 0.9637, - "num_input_tokens_seen": 47057715, - "step": 2222 - }, - { - "epoch": 0.2672999458906992, - "flos": 45954805224120.0, - "grad_norm": 0.828174337680773, - "learning_rate": 3.4379699457609033e-06, - "loss": 0.9234, - "num_input_tokens_seen": 47115260, - "step": 2223 - }, - { - "epoch": 0.26742018878133833, - "flos": 11970625977720.0, - "grad_norm": 4.737747451640637, - "learning_rate": 3.4374284314550755e-06, - "loss": 1.123, - "num_input_tokens_seen": 47134020, - "step": 2224 - }, - { - "epoch": 0.2675404316719774, - "flos": 14671867634760.0, - "grad_norm": 3.005520668305221, - "learning_rate": 3.436886699092498e-06, - "loss": 1.0313, - "num_input_tokens_seen": 47152255, - "step": 2225 - }, - { - "epoch": 0.2676606745626165, - "flos": 12383511329280.0, - "grad_norm": 6.11943860489762, - "learning_rate": 3.4363447487553502e-06, - "loss": 0.9376, - "num_input_tokens_seen": 47165290, - "step": 2226 - }, - { - "epoch": 0.26778091745325555, - "flos": 19835324817480.0, - "grad_norm": 9.826370446525194, - "learning_rate": 3.4358025805258455e-06, - "loss": 1.0104, - "num_input_tokens_seen": 47184715, - "step": 2227 - }, - { - "epoch": 0.26790116034389466, - "flos": 14878693580040.0, - "grad_norm": 8.943910233976943, - "learning_rate": 3.435260194486232e-06, - "loss": 1.0621, - "num_input_tokens_seen": 47202405, - "step": 2228 - }, - { - "epoch": 0.2680214032345338, - "flos": 12783973200960.0, - "grad_norm": 7.341398341471078, - "learning_rate": 3.4347175907187875e-06, - "loss": 1.0456, - "num_input_tokens_seen": 47219115, - "step": 2229 - }, - { - "epoch": 0.26814164612517283, - "flos": 15930974949960.0, - "grad_norm": 2.8391387778045987, - "learning_rate": 3.4341747693058254e-06, - "loss": 1.1043, - "num_input_tokens_seen": 47237310, - "step": 2230 - }, - { - "epoch": 0.26826188901581194, - "flos": 25423288083000.0, - "grad_norm": 4.068000004756181, - "learning_rate": 3.4336317303296916e-06, - "loss": 1.001, - "num_input_tokens_seen": 47258005, - "step": 2231 - }, - { - "epoch": 0.26838213190645105, - "flos": 12495938800560.0, - "grad_norm": 3.541063294718431, - "learning_rate": 3.4330884738727635e-06, - "loss": 0.9782, - "num_input_tokens_seen": 47275900, - "step": 2232 - }, - { - "epoch": 0.2685023747970901, - "flos": 16114523223120.0, - "grad_norm": 5.370186535598556, - "learning_rate": 3.4325450000174535e-06, - "loss": 0.9418, - "num_input_tokens_seen": 47292260, - "step": 2233 - }, - { - "epoch": 0.2686226176877292, - "flos": 14278764537480.0, - "grad_norm": 3.261273711218122, - "learning_rate": 3.4320013088462067e-06, - "loss": 0.9717, - "num_input_tokens_seen": 47309340, - "step": 2234 - }, - { - "epoch": 0.2687428605783683, - "flos": 15537565237080.0, - "grad_norm": 4.367484476709319, - "learning_rate": 3.431457400441499e-06, - "loss": 1.044, - "num_input_tokens_seen": 47329455, - "step": 2235 - }, - { - "epoch": 0.2688631034690074, - "flos": 50088124231320.0, - "grad_norm": 1.007919483272869, - "learning_rate": 3.4309132748858424e-06, - "loss": 0.9133, - "num_input_tokens_seen": 47390165, - "step": 2236 - }, - { - "epoch": 0.2689833463596465, - "flos": 16245107886000.0, - "grad_norm": 3.018147258727813, - "learning_rate": 3.430368932261779e-06, - "loss": 1.0727, - "num_input_tokens_seen": 47410240, - "step": 2237 - }, - { - "epoch": 0.2691035892502856, - "flos": 12179812863120.0, - "grad_norm": 4.348369968231226, - "learning_rate": 3.429824372651886e-06, - "loss": 0.9802, - "num_input_tokens_seen": 47428110, - "step": 2238 - }, - { - "epoch": 0.26922383214092466, - "flos": 12573099929760.0, - "grad_norm": 4.525169765596723, - "learning_rate": 3.4292795961387732e-06, - "loss": 1.0662, - "num_input_tokens_seen": 47445730, - "step": 2239 - }, - { - "epoch": 0.26934407503156377, - "flos": 11441112521160.0, - "grad_norm": 3.4281299851832485, - "learning_rate": 3.4287346028050818e-06, - "loss": 1.1083, - "num_input_tokens_seen": 47461520, - "step": 2240 - }, - { - "epoch": 0.2694643179222028, - "flos": 16874937497640.0, - "grad_norm": 2.7713626461165783, - "learning_rate": 3.4281893927334866e-06, - "loss": 1.026, - "num_input_tokens_seen": 47481150, - "step": 2241 - }, - { - "epoch": 0.26958456081284193, - "flos": 17583032054640.0, - "grad_norm": 3.099140903427018, - "learning_rate": 3.4276439660066963e-06, - "loss": 0.9773, - "num_input_tokens_seen": 47500570, - "step": 2242 - }, - { - "epoch": 0.26970480370348104, - "flos": 12835526379480.0, - "grad_norm": 6.865341337626914, - "learning_rate": 3.427098322707452e-06, - "loss": 1.0745, - "num_input_tokens_seen": 47516255, - "step": 2243 - }, - { - "epoch": 0.2698250465941201, - "flos": 7592148527160.0, - "grad_norm": 5.108678554467484, - "learning_rate": 3.426552462918526e-06, - "loss": 1.1209, - "num_input_tokens_seen": 47533910, - "step": 2244 - }, - { - "epoch": 0.2699452894847592, - "flos": 12257127300120.0, - "grad_norm": 6.294557054856656, - "learning_rate": 3.426006386722726e-06, - "loss": 0.97, - "num_input_tokens_seen": 47551690, - "step": 2245 - }, - { - "epoch": 0.2700655323753983, - "flos": 12811052906520.0, - "grad_norm": 3.8988579960939913, - "learning_rate": 3.4254600942028914e-06, - "loss": 1.151, - "num_input_tokens_seen": 47569285, - "step": 2246 - }, - { - "epoch": 0.2701857752660374, - "flos": 12888888590040.0, - "grad_norm": 7.111142519378177, - "learning_rate": 3.424913585441893e-06, - "loss": 1.0493, - "num_input_tokens_seen": 47586840, - "step": 2247 - }, - { - "epoch": 0.2703060181566765, - "flos": 11546150556480.0, - "grad_norm": 5.091651905660673, - "learning_rate": 3.4243668605226374e-06, - "loss": 1.1074, - "num_input_tokens_seen": 47603585, - "step": 2248 - }, - { - "epoch": 0.2704262610473156, - "flos": 13884680270280.0, - "grad_norm": 19.830440449173164, - "learning_rate": 3.423819919528061e-06, - "loss": 1.0564, - "num_input_tokens_seen": 47621390, - "step": 2249 - }, - { - "epoch": 0.27054650393795465, - "flos": 14724493967880.0, - "grad_norm": 2.7500638087412512, - "learning_rate": 3.4232727625411355e-06, - "loss": 1.019, - "num_input_tokens_seen": 47640215, - "step": 2250 - }, - { - "epoch": 0.27066674682859376, - "flos": 13227525660600.0, - "grad_norm": 2.892411458367331, - "learning_rate": 3.4227253896448626e-06, - "loss": 1.0877, - "num_input_tokens_seen": 47657795, - "step": 2251 - }, - { - "epoch": 0.2707869897192329, - "flos": 16350145921320.0, - "grad_norm": 3.8119142426314196, - "learning_rate": 3.42217780092228e-06, - "loss": 1.0346, - "num_input_tokens_seen": 47675855, - "step": 2252 - }, - { - "epoch": 0.27090723260987193, - "flos": 41739541884360.0, - "grad_norm": 0.8533603692863111, - "learning_rate": 3.421629996456456e-06, - "loss": 0.8685, - "num_input_tokens_seen": 47734195, - "step": 2253 - }, - { - "epoch": 0.27102747550051104, - "flos": 8431624947600.0, - "grad_norm": 3.469886112597849, - "learning_rate": 3.421081976330491e-06, - "loss": 1.0536, - "num_input_tokens_seen": 47752430, - "step": 2254 - }, - { - "epoch": 0.27114771839115015, - "flos": 14120394953160.0, - "grad_norm": 2.693505076450162, - "learning_rate": 3.4205337406275207e-06, - "loss": 1.1006, - "num_input_tokens_seen": 47772270, - "step": 2255 - }, - { - "epoch": 0.2712679612817892, - "flos": 12993252071040.0, - "grad_norm": 7.460758290959642, - "learning_rate": 3.4199852894307114e-06, - "loss": 0.9813, - "num_input_tokens_seen": 47788740, - "step": 2256 - }, - { - "epoch": 0.2713882041724283, - "flos": 17397398795400.0, - "grad_norm": 3.4047523495860488, - "learning_rate": 3.419436622823262e-06, - "loss": 1.0148, - "num_input_tokens_seen": 47809180, - "step": 2257 - }, - { - "epoch": 0.27150844706306737, - "flos": 16376428426320.0, - "grad_norm": 2.7136204000950035, - "learning_rate": 3.4188877408884063e-06, - "loss": 0.9697, - "num_input_tokens_seen": 47829605, - "step": 2258 - }, - { - "epoch": 0.2716286899537065, - "flos": 16034387922600.0, - "grad_norm": 6.344713260072441, - "learning_rate": 3.4183386437094088e-06, - "loss": 0.8842, - "num_input_tokens_seen": 47845990, - "step": 2259 - }, - { - "epoch": 0.2717489328443456, - "flos": 9244021662480.0, - "grad_norm": 3.7965863906996637, - "learning_rate": 3.417789331369565e-06, - "loss": 1.0378, - "num_input_tokens_seen": 47861500, - "step": 2260 - }, - { - "epoch": 0.27186917573498465, - "flos": 20860587804960.0, - "grad_norm": 3.5953229032827676, - "learning_rate": 3.4172398039522088e-06, - "loss": 1.1341, - "num_input_tokens_seen": 47882505, - "step": 2261 - }, - { - "epoch": 0.27198941862562376, - "flos": 18527515848840.0, - "grad_norm": 4.307787102924318, - "learning_rate": 3.4166900615407e-06, - "loss": 1.0262, - "num_input_tokens_seen": 47900140, - "step": 2262 - }, - { - "epoch": 0.27210966151626287, - "flos": 23377575972960.0, - "grad_norm": 3.9205086444243253, - "learning_rate": 3.416140104218436e-06, - "loss": 0.9676, - "num_input_tokens_seen": 47919225, - "step": 2263 - }, - { - "epoch": 0.2722299044069019, - "flos": 46876072669320.0, - "grad_norm": 0.8571999855814945, - "learning_rate": 3.4155899320688437e-06, - "loss": 0.9751, - "num_input_tokens_seen": 47985020, - "step": 2264 - }, - { - "epoch": 0.27235014729754103, - "flos": 10837964014800.0, - "grad_norm": 3.0572906095556007, - "learning_rate": 3.415039545175384e-06, - "loss": 0.975, - "num_input_tokens_seen": 48000465, - "step": 2265 - }, - { - "epoch": 0.27247039018818014, - "flos": 15537841191120.0, - "grad_norm": 6.898400173755897, - "learning_rate": 3.414488943621551e-06, - "loss": 0.8848, - "num_input_tokens_seen": 48018850, - "step": 2266 - }, - { - "epoch": 0.2725906330788192, - "flos": 13254329412120.0, - "grad_norm": 3.3564717366940653, - "learning_rate": 3.41393812749087e-06, - "loss": 0.9727, - "num_input_tokens_seen": 48036615, - "step": 2267 - }, - { - "epoch": 0.2727108759694583, - "flos": 12671729699040.0, - "grad_norm": 4.147868719100048, - "learning_rate": 3.4133870968668984e-06, - "loss": 0.9581, - "num_input_tokens_seen": 48051135, - "step": 2268 - }, - { - "epoch": 0.2728311188600974, - "flos": 17398563934680.0, - "grad_norm": 4.198169154800618, - "learning_rate": 3.412835851833229e-06, - "loss": 1.0056, - "num_input_tokens_seen": 48073050, - "step": 2269 - }, - { - "epoch": 0.2729513617507365, - "flos": 22092830045520.0, - "grad_norm": 3.353614587939908, - "learning_rate": 3.4122843924734834e-06, - "loss": 1.0093, - "num_input_tokens_seen": 48095070, - "step": 2270 - }, - { - "epoch": 0.2730716046413756, - "flos": 13540892057640.0, - "grad_norm": 11.409262230680715, - "learning_rate": 3.411732718871319e-06, - "loss": 1.1155, - "num_input_tokens_seen": 48110630, - "step": 2271 - }, - { - "epoch": 0.27319184753201464, - "flos": 19183229365200.0, - "grad_norm": 4.450301282306166, - "learning_rate": 3.4111808311104227e-06, - "loss": 1.0154, - "num_input_tokens_seen": 48132665, - "step": 2272 - }, - { - "epoch": 0.27331209042265375, - "flos": 22643750819040.0, - "grad_norm": 3.4169140392785486, - "learning_rate": 3.410628729274517e-06, - "loss": 0.9223, - "num_input_tokens_seen": 48153905, - "step": 2273 - }, - { - "epoch": 0.27343233331329286, - "flos": 18316673239200.0, - "grad_norm": 5.326539998050336, - "learning_rate": 3.4100764134473546e-06, - "loss": 1.0549, - "num_input_tokens_seen": 48172910, - "step": 2274 - }, - { - "epoch": 0.2735525762039319, - "flos": 17346734802120.0, - "grad_norm": 29.751754067906443, - "learning_rate": 3.4095238837127215e-06, - "loss": 1.0695, - "num_input_tokens_seen": 48191770, - "step": 2275 - }, - { - "epoch": 0.27367281909457103, - "flos": 10214512007640.0, - "grad_norm": 5.147955864706003, - "learning_rate": 3.4089711401544355e-06, - "loss": 1.0165, - "num_input_tokens_seen": 48209085, - "step": 2276 - }, - { - "epoch": 0.27379306198521014, - "flos": 16690959962640.0, - "grad_norm": 6.6913198809552945, - "learning_rate": 3.4084181828563486e-06, - "loss": 0.9049, - "num_input_tokens_seen": 48225525, - "step": 2277 - }, - { - "epoch": 0.2739133048758492, - "flos": 12365507445480.0, - "grad_norm": 2.732228946487049, - "learning_rate": 3.4078650119023428e-06, - "loss": 0.9303, - "num_input_tokens_seen": 48243560, - "step": 2278 - }, - { - "epoch": 0.2740335477664883, - "flos": 13668625195440.0, - "grad_norm": 14.911876967941737, - "learning_rate": 3.4073116273763337e-06, - "loss": 0.9754, - "num_input_tokens_seen": 48257725, - "step": 2279 - }, - { - "epoch": 0.2741537906571274, - "flos": 18579559612320.0, - "grad_norm": 43.64062220282841, - "learning_rate": 3.40675802936227e-06, - "loss": 1.0433, - "num_input_tokens_seen": 48278230, - "step": 2280 - }, - { - "epoch": 0.27427403354776647, - "flos": 24371221344000.0, - "grad_norm": 8.653144361943397, - "learning_rate": 3.4062042179441318e-06, - "loss": 0.9445, - "num_input_tokens_seen": 48298420, - "step": 2281 - }, - { - "epoch": 0.2743942764384056, - "flos": 13305514651920.0, - "grad_norm": 7.2315275384631414, - "learning_rate": 3.4056501932059314e-06, - "loss": 1.0411, - "num_input_tokens_seen": 48316215, - "step": 2282 - }, - { - "epoch": 0.2745145193290447, - "flos": 46465921744680.0, - "grad_norm": 0.853652873232295, - "learning_rate": 3.405095955231715e-06, - "loss": 0.8471, - "num_input_tokens_seen": 48367590, - "step": 2283 - }, - { - "epoch": 0.27463476221968375, - "flos": 11414922000840.0, - "grad_norm": 6.154824028815413, - "learning_rate": 3.4045415041055585e-06, - "loss": 1.17, - "num_input_tokens_seen": 48382950, - "step": 2284 - }, - { - "epoch": 0.27475500511032286, - "flos": 7275777297240.0, - "grad_norm": 7.9050021788891485, - "learning_rate": 3.4039868399115728e-06, - "loss": 1.0079, - "num_input_tokens_seen": 48397310, - "step": 2285 - }, - { - "epoch": 0.27487524800096197, - "flos": 12259641548040.0, - "grad_norm": 5.545212703439932, - "learning_rate": 3.4034319627339003e-06, - "loss": 1.0344, - "num_input_tokens_seen": 48413895, - "step": 2286 - }, - { - "epoch": 0.274995490891601, - "flos": 19308540239760.0, - "grad_norm": 7.121330788790051, - "learning_rate": 3.402876872656715e-06, - "loss": 0.9287, - "num_input_tokens_seen": 48431935, - "step": 2287 - }, - { - "epoch": 0.27511573378224013, - "flos": 16660967408880.0, - "grad_norm": 7.800218618214707, - "learning_rate": 3.402321569764223e-06, - "loss": 1.1222, - "num_input_tokens_seen": 48450960, - "step": 2288 - }, - { - "epoch": 0.2752359766728792, - "flos": 11835595388640.0, - "grad_norm": 14.182083238556723, - "learning_rate": 3.4017660541406635e-06, - "loss": 1.0604, - "num_input_tokens_seen": 48466745, - "step": 2289 - }, - { - "epoch": 0.2753562195635183, - "flos": 17999014223760.0, - "grad_norm": 5.074149397801178, - "learning_rate": 3.4012103258703092e-06, - "loss": 0.9766, - "num_input_tokens_seen": 48485220, - "step": 2290 - }, - { - "epoch": 0.2754764624541574, - "flos": 19465744684800.0, - "grad_norm": 6.609995999933784, - "learning_rate": 3.4006543850374616e-06, - "loss": 1.0666, - "num_input_tokens_seen": 48499990, - "step": 2291 - }, - { - "epoch": 0.27559670534479647, - "flos": 12207444476760.0, - "grad_norm": 5.175473900072537, - "learning_rate": 3.400098231726458e-06, - "loss": 0.9849, - "num_input_tokens_seen": 48516810, - "step": 2292 - }, - { - "epoch": 0.2757169482354356, - "flos": 15584948443440.0, - "grad_norm": 4.361315005018331, - "learning_rate": 3.3995418660216657e-06, - "loss": 1.1087, - "num_input_tokens_seen": 48533985, - "step": 2293 - }, - { - "epoch": 0.2758371911260747, - "flos": 14803556113800.0, - "grad_norm": 10.377301431960316, - "learning_rate": 3.3989852880074848e-06, - "loss": 1.0445, - "num_input_tokens_seen": 48555135, - "step": 2294 - }, - { - "epoch": 0.27595743401671374, - "flos": 49606111079280.0, - "grad_norm": 1.3393938738427225, - "learning_rate": 3.398428497768348e-06, - "loss": 0.8743, - "num_input_tokens_seen": 48620025, - "step": 2295 - }, - { - "epoch": 0.27607767690735285, - "flos": 15065338670760.0, - "grad_norm": 9.528560871928093, - "learning_rate": 3.3978714953887205e-06, - "loss": 0.9409, - "num_input_tokens_seen": 48639500, - "step": 2296 - }, - { - "epoch": 0.27619791979799196, - "flos": 17659763922000.0, - "grad_norm": 3.4695649435515645, - "learning_rate": 3.397314280953098e-06, - "loss": 1.0939, - "num_input_tokens_seen": 48660045, - "step": 2297 - }, - { - "epoch": 0.276318162688631, - "flos": 17608333389720.0, - "grad_norm": 4.994857241372804, - "learning_rate": 3.3967568545460108e-06, - "loss": 1.0359, - "num_input_tokens_seen": 48679305, - "step": 2298 - }, - { - "epoch": 0.27643840557927013, - "flos": 12862544761920.0, - "grad_norm": 3.9591090329026835, - "learning_rate": 3.3961992162520185e-06, - "loss": 1.0235, - "num_input_tokens_seen": 48697650, - "step": 2299 - }, - { - "epoch": 0.27655864846990924, - "flos": 17658568121160.0, - "grad_norm": 4.284178685783163, - "learning_rate": 3.3956413661557156e-06, - "loss": 0.9453, - "num_input_tokens_seen": 48717545, - "step": 2300 - }, - { - "epoch": 0.2766788913605483, - "flos": 14383741249680.0, - "grad_norm": 39.03160027865243, - "learning_rate": 3.3950833043417273e-06, - "loss": 0.8873, - "num_input_tokens_seen": 48735410, - "step": 2301 - }, - { - "epoch": 0.2767991342511874, - "flos": 15248764297680.0, - "grad_norm": 4.779475662011448, - "learning_rate": 3.3945250308947105e-06, - "loss": 0.9525, - "num_input_tokens_seen": 48751435, - "step": 2302 - }, - { - "epoch": 0.2769193771418265, - "flos": 47255991295800.0, - "grad_norm": 1.338369776553023, - "learning_rate": 3.3939665458993556e-06, - "loss": 0.9355, - "num_input_tokens_seen": 48805575, - "step": 2303 - }, - { - "epoch": 0.27703962003246557, - "flos": 14698426093800.0, - "grad_norm": 7.982997230457626, - "learning_rate": 3.3934078494403843e-06, - "loss": 0.9857, - "num_input_tokens_seen": 48824870, - "step": 2304 - }, - { - "epoch": 0.2771598629231047, - "flos": 16297795542240.0, - "grad_norm": 9.255979100499673, - "learning_rate": 3.3928489416025495e-06, - "loss": 1.0385, - "num_input_tokens_seen": 48845435, - "step": 2305 - }, - { - "epoch": 0.27728010581374374, - "flos": 13020331776600.0, - "grad_norm": 6.884994178387211, - "learning_rate": 3.392289822470638e-06, - "loss": 1.005, - "num_input_tokens_seen": 48863135, - "step": 2306 - }, - { - "epoch": 0.27740034870438285, - "flos": 13780838035800.0, - "grad_norm": 5.8067752990605115, - "learning_rate": 3.3917304921294674e-06, - "loss": 0.9867, - "num_input_tokens_seen": 48881020, - "step": 2307 - }, - { - "epoch": 0.27752059159502196, - "flos": 15352422562800.0, - "grad_norm": 4.36532525772283, - "learning_rate": 3.3911709506638876e-06, - "loss": 1.0292, - "num_input_tokens_seen": 48900050, - "step": 2308 - }, - { - "epoch": 0.277640834485661, - "flos": 18940247892600.0, - "grad_norm": 9.82167003780635, - "learning_rate": 3.390611198158781e-06, - "loss": 1.0394, - "num_input_tokens_seen": 48917645, - "step": 2309 - }, - { - "epoch": 0.2777610773763001, - "flos": 13826626841040.0, - "grad_norm": 9.15108742844515, - "learning_rate": 3.3900512346990612e-06, - "loss": 1.1269, - "num_input_tokens_seen": 48933355, - "step": 2310 - }, - { - "epoch": 0.27788132026693924, - "flos": 27337311714000.0, - "grad_norm": 9.115378127549242, - "learning_rate": 3.389491060369674e-06, - "loss": 0.8776, - "num_input_tokens_seen": 48958750, - "step": 2311 - }, - { - "epoch": 0.2780015631575783, - "flos": 15904416490920.0, - "grad_norm": 4.0071292763313044, - "learning_rate": 3.388930675255598e-06, - "loss": 1.113, - "num_input_tokens_seen": 48978320, - "step": 2312 - }, - { - "epoch": 0.2781218060482174, - "flos": 8588737407960.0, - "grad_norm": 5.037503733290432, - "learning_rate": 3.388370079441843e-06, - "loss": 1.0065, - "num_input_tokens_seen": 48993555, - "step": 2313 - }, - { - "epoch": 0.2782420489388565, - "flos": 12831295084200.0, - "grad_norm": 9.01105837144743, - "learning_rate": 3.3878092730134505e-06, - "loss": 1.1619, - "num_input_tokens_seen": 49011260, - "step": 2314 - }, - { - "epoch": 0.27836229182949557, - "flos": 13124664596040.0, - "grad_norm": 4.361332498318452, - "learning_rate": 3.3872482560554947e-06, - "loss": 1.0334, - "num_input_tokens_seen": 49029755, - "step": 2315 - }, - { - "epoch": 0.2784825347201347, - "flos": 48032226935280.0, - "grad_norm": 0.8177107104289174, - "learning_rate": 3.386687028653082e-06, - "loss": 0.8259, - "num_input_tokens_seen": 49092320, - "step": 2316 - }, - { - "epoch": 0.2786027776107738, - "flos": 16082139067680.0, - "grad_norm": 4.125241359238591, - "learning_rate": 3.386125590891349e-06, - "loss": 1.0756, - "num_input_tokens_seen": 49108915, - "step": 2317 - }, - { - "epoch": 0.27872302050141284, - "flos": 11158474555320.0, - "grad_norm": 8.169645687343992, - "learning_rate": 3.3855639428554657e-06, - "loss": 1.05, - "num_input_tokens_seen": 49126165, - "step": 2318 - }, - { - "epoch": 0.27884326339205195, - "flos": 15720714909960.0, - "grad_norm": 3.9762472391223884, - "learning_rate": 3.385002084630635e-06, - "loss": 1.0405, - "num_input_tokens_seen": 49144855, - "step": 2319 - }, - { - "epoch": 0.278963506282691, - "flos": 14593725335640.0, - "grad_norm": 6.728960378533887, - "learning_rate": 3.384440016302088e-06, - "loss": 1.0711, - "num_input_tokens_seen": 49163250, - "step": 2320 - }, - { - "epoch": 0.2790837491733301, - "flos": 15588229230360.0, - "grad_norm": 4.647693324426111, - "learning_rate": 3.3838777379550923e-06, - "loss": 0.8439, - "num_input_tokens_seen": 49182415, - "step": 2321 - }, - { - "epoch": 0.27920399206396923, - "flos": 18710358906120.0, - "grad_norm": 3.5518664222035095, - "learning_rate": 3.383315249674944e-06, - "loss": 1.0334, - "num_input_tokens_seen": 49200700, - "step": 2322 - }, - { - "epoch": 0.2793242349546083, - "flos": 18072373319520.0, - "grad_norm": 3.710690781272235, - "learning_rate": 3.3827525515469715e-06, - "loss": 1.0951, - "num_input_tokens_seen": 49215325, - "step": 2323 - }, - { - "epoch": 0.2794444778452474, - "flos": 14803280159760.0, - "grad_norm": 3.3168252022072355, - "learning_rate": 3.3821896436565367e-06, - "loss": 0.9345, - "num_input_tokens_seen": 49234705, - "step": 2324 - }, - { - "epoch": 0.2795647207358865, - "flos": 15324453672000.0, - "grad_norm": 2.9421991948702115, - "learning_rate": 3.381626526089032e-06, - "loss": 0.933, - "num_input_tokens_seen": 49253990, - "step": 2325 - }, - { - "epoch": 0.27968496362652556, - "flos": 15250021421640.0, - "grad_norm": 3.657426172164516, - "learning_rate": 3.3810631989298815e-06, - "loss": 1.0205, - "num_input_tokens_seen": 49273320, - "step": 2326 - }, - { - "epoch": 0.2798052065171647, - "flos": 16534154117880.0, - "grad_norm": 5.305963601514864, - "learning_rate": 3.3804996622645423e-06, - "loss": 1.0778, - "num_input_tokens_seen": 49291040, - "step": 2327 - }, - { - "epoch": 0.2799254494078038, - "flos": 15297864551400.0, - "grad_norm": 3.4856506202252766, - "learning_rate": 3.3799359161785015e-06, - "loss": 1.1152, - "num_input_tokens_seen": 49310410, - "step": 2328 - }, - { - "epoch": 0.28004569229844284, - "flos": 18787029450360.0, - "grad_norm": 3.2993799609670686, - "learning_rate": 3.3793719607572798e-06, - "loss": 1.0829, - "num_input_tokens_seen": 49331095, - "step": 2329 - }, - { - "epoch": 0.28016593518908195, - "flos": 23928650054280.0, - "grad_norm": 6.455991685542882, - "learning_rate": 3.378807796086428e-06, - "loss": 0.9936, - "num_input_tokens_seen": 49353675, - "step": 2330 - }, - { - "epoch": 0.28028617807972106, - "flos": 10843667064960.0, - "grad_norm": 3.5522629738108384, - "learning_rate": 3.37824342225153e-06, - "loss": 0.9928, - "num_input_tokens_seen": 49369815, - "step": 2331 - }, - { - "epoch": 0.2804064209703601, - "flos": 18159438132600.0, - "grad_norm": 5.091432008887386, - "learning_rate": 3.3776788393382006e-06, - "loss": 0.9982, - "num_input_tokens_seen": 49389015, - "step": 2332 - }, - { - "epoch": 0.2805266638609992, - "flos": 20913122153400.0, - "grad_norm": 4.9384301190778865, - "learning_rate": 3.3771140474320872e-06, - "loss": 0.9978, - "num_input_tokens_seen": 49408685, - "step": 2333 - }, - { - "epoch": 0.28064690675163834, - "flos": 15243183893760.0, - "grad_norm": 4.89973084625295, - "learning_rate": 3.3765490466188664e-06, - "loss": 1.0093, - "num_input_tokens_seen": 49425805, - "step": 2334 - }, - { - "epoch": 0.2807671496422774, - "flos": 14907612979200.0, - "grad_norm": 4.9898611268803625, - "learning_rate": 3.3759838369842508e-06, - "loss": 0.9549, - "num_input_tokens_seen": 49443600, - "step": 2335 - }, - { - "epoch": 0.2808873925329165, - "flos": 15274832171760.0, - "grad_norm": 4.0878187702714985, - "learning_rate": 3.375418418613981e-06, - "loss": 0.954, - "num_input_tokens_seen": 49462345, - "step": 2336 - }, - { - "epoch": 0.28100763542355556, - "flos": 11368335995040.0, - "grad_norm": 5.2176464992805975, - "learning_rate": 3.374852791593831e-06, - "loss": 1.0625, - "num_input_tokens_seen": 49478265, - "step": 2337 - }, - { - "epoch": 0.28112787831419467, - "flos": 13518472909200.0, - "grad_norm": 6.5779806017034375, - "learning_rate": 3.374286956009605e-06, - "loss": 0.758, - "num_input_tokens_seen": 49496550, - "step": 2338 - }, - { - "epoch": 0.2812481212048338, - "flos": 9034650807720.0, - "grad_norm": 4.128678733309936, - "learning_rate": 3.3737209119471405e-06, - "loss": 0.9966, - "num_input_tokens_seen": 49512780, - "step": 2339 - }, - { - "epoch": 0.28136836409547283, - "flos": 11053743135600.0, - "grad_norm": 32.75991694438685, - "learning_rate": 3.373154659492306e-06, - "loss": 0.8831, - "num_input_tokens_seen": 49530640, - "step": 2340 - }, - { - "epoch": 0.28148860698611194, - "flos": 14143917917760.0, - "grad_norm": 4.931553023665033, - "learning_rate": 3.3725881987310016e-06, - "loss": 1.0711, - "num_input_tokens_seen": 49547895, - "step": 2341 - }, - { - "epoch": 0.28160884987675106, - "flos": 12364710244920.0, - "grad_norm": 2.7156487677865244, - "learning_rate": 3.372021529749159e-06, - "loss": 1.0996, - "num_input_tokens_seen": 49566675, - "step": 2342 - }, - { - "epoch": 0.2817290927673901, - "flos": 11917049136240.0, - "grad_norm": 4.517232747281818, - "learning_rate": 3.3714546526327405e-06, - "loss": 1.1487, - "num_input_tokens_seen": 49584395, - "step": 2343 - }, - { - "epoch": 0.2818493356580292, - "flos": 10896354721200.0, - "grad_norm": 4.245381033143803, - "learning_rate": 3.3708875674677423e-06, - "loss": 1.0956, - "num_input_tokens_seen": 49602090, - "step": 2344 - }, - { - "epoch": 0.28196957854866833, - "flos": 14488564654080.0, - "grad_norm": 3.927647571183088, - "learning_rate": 3.37032027434019e-06, - "loss": 1.0567, - "num_input_tokens_seen": 49621330, - "step": 2345 - }, - { - "epoch": 0.2820898214393074, - "flos": 14172622686000.0, - "grad_norm": 5.712363089959636, - "learning_rate": 3.369752773336141e-06, - "loss": 1.0615, - "num_input_tokens_seen": 49640530, - "step": 2346 - }, - { - "epoch": 0.2822100643299465, - "flos": 16009147910640.0, - "grad_norm": 3.2794198881595578, - "learning_rate": 3.3691850645416864e-06, - "loss": 1.0054, - "num_input_tokens_seen": 49659960, - "step": 2347 - }, - { - "epoch": 0.2823303072205856, - "flos": 8116940103480.0, - "grad_norm": 3.0733444347167302, - "learning_rate": 3.368617148042945e-06, - "loss": 1.0644, - "num_input_tokens_seen": 49677350, - "step": 2348 - }, - { - "epoch": 0.28245055011122466, - "flos": 12941576246280.0, - "grad_norm": 4.115211200694122, - "learning_rate": 3.368049023926071e-06, - "loss": 1.0755, - "num_input_tokens_seen": 49696065, - "step": 2349 - }, - { - "epoch": 0.2825707930018638, - "flos": 17503877924040.0, - "grad_norm": 3.796964793458221, - "learning_rate": 3.3674806922772476e-06, - "loss": 1.0636, - "num_input_tokens_seen": 49716670, - "step": 2350 - }, - { - "epoch": 0.28269103589250283, - "flos": 17948472876720.0, - "grad_norm": 2.880429986538231, - "learning_rate": 3.3669121531826904e-06, - "loss": 0.9765, - "num_input_tokens_seen": 49737370, - "step": 2351 - }, - { - "epoch": 0.28281127878314194, - "flos": 13675830662040.0, - "grad_norm": 5.668609435705578, - "learning_rate": 3.366343406728647e-06, - "loss": 1.0584, - "num_input_tokens_seen": 49756540, - "step": 2352 - }, - { - "epoch": 0.28293152167378105, - "flos": 16979454286440.0, - "grad_norm": 4.845604129442733, - "learning_rate": 3.3657744530013946e-06, - "loss": 0.9123, - "num_input_tokens_seen": 49775495, - "step": 2353 - }, - { - "epoch": 0.2830517645644201, - "flos": 31344093384240.0, - "grad_norm": 13.226933969237258, - "learning_rate": 3.3652052920872437e-06, - "loss": 0.9479, - "num_input_tokens_seen": 49798080, - "step": 2354 - }, - { - "epoch": 0.2831720074550592, - "flos": 18972662709600.0, - "grad_norm": 5.407910747897617, - "learning_rate": 3.3646359240725355e-06, - "loss": 1.088, - "num_input_tokens_seen": 49816990, - "step": 2355 - }, - { - "epoch": 0.2832922503456983, - "flos": 22119204535200.0, - "grad_norm": 10.067203157120565, - "learning_rate": 3.364066349043643e-06, - "loss": 0.9128, - "num_input_tokens_seen": 49837915, - "step": 2356 - }, - { - "epoch": 0.2834124932363374, - "flos": 14482800280800.0, - "grad_norm": 3.286389204568785, - "learning_rate": 3.363496567086969e-06, - "loss": 1.0575, - "num_input_tokens_seen": 49854730, - "step": 2357 - }, - { - "epoch": 0.2835327361269765, - "flos": 28123640554800.0, - "grad_norm": 5.6799890577184025, - "learning_rate": 3.3629265782889506e-06, - "loss": 0.986, - "num_input_tokens_seen": 49876275, - "step": 2358 - }, - { - "epoch": 0.2836529790176156, - "flos": 21566689360560.0, - "grad_norm": 3.933215883624343, - "learning_rate": 3.362356382736054e-06, - "loss": 0.9322, - "num_input_tokens_seen": 49896600, - "step": 2359 - }, - { - "epoch": 0.28377322190825466, - "flos": 8929858064880.0, - "grad_norm": 4.066928503852058, - "learning_rate": 3.361785980514777e-06, - "loss": 1.1356, - "num_input_tokens_seen": 49912520, - "step": 2360 - }, - { - "epoch": 0.28389346479889377, - "flos": 12968042720640.0, - "grad_norm": 4.264636026394523, - "learning_rate": 3.361215371711649e-06, - "loss": 1.0033, - "num_input_tokens_seen": 49931335, - "step": 2361 - }, - { - "epoch": 0.2840137076895329, - "flos": 14483904096960.0, - "grad_norm": 5.149352482948934, - "learning_rate": 3.3606445564132326e-06, - "loss": 1.0638, - "num_input_tokens_seen": 49948350, - "step": 2362 - }, - { - "epoch": 0.28413395058017193, - "flos": 14226720774000.0, - "grad_norm": 3.2009914829149184, - "learning_rate": 3.360073534706118e-06, - "loss": 1.0475, - "num_input_tokens_seen": 49965225, - "step": 2363 - }, - { - "epoch": 0.28425419347081105, - "flos": 26885756587200.0, - "grad_norm": 7.925482836915502, - "learning_rate": 3.35950230667693e-06, - "loss": 0.9924, - "num_input_tokens_seen": 49986640, - "step": 2364 - }, - { - "epoch": 0.28437443636145016, - "flos": 9768721254120.0, - "grad_norm": 4.667479421593734, - "learning_rate": 3.358930872412323e-06, - "loss": 1.0822, - "num_input_tokens_seen": 50003525, - "step": 2365 - }, - { - "epoch": 0.2844946792520892, - "flos": 16166965586880.0, - "grad_norm": 5.5768015597838, - "learning_rate": 3.3583592319989825e-06, - "loss": 1.0369, - "num_input_tokens_seen": 50022615, - "step": 2366 - }, - { - "epoch": 0.2846149221427283, - "flos": 23115241507920.0, - "grad_norm": 9.891581460467407, - "learning_rate": 3.357787385523627e-06, - "loss": 0.9127, - "num_input_tokens_seen": 50043740, - "step": 2367 - }, - { - "epoch": 0.2847351650333674, - "flos": 20283691142040.0, - "grad_norm": 2.8190672600751108, - "learning_rate": 3.3572153330730048e-06, - "loss": 1.0672, - "num_input_tokens_seen": 50064555, - "step": 2368 - }, - { - "epoch": 0.2848554079240065, - "flos": 39890506743240.0, - "grad_norm": 0.8042650688000047, - "learning_rate": 3.3566430747338956e-06, - "loss": 0.8993, - "num_input_tokens_seen": 50119480, - "step": 2369 - }, - { - "epoch": 0.2849756508146456, - "flos": 8325268465200.0, - "grad_norm": 45.09086014865222, - "learning_rate": 3.35607061059311e-06, - "loss": 1.0944, - "num_input_tokens_seen": 50134130, - "step": 2370 - }, - { - "epoch": 0.28509589370528465, - "flos": 17897042344440.0, - "grad_norm": 5.852035116302824, - "learning_rate": 3.3554979407374917e-06, - "loss": 0.9723, - "num_input_tokens_seen": 50155960, - "step": 2371 - }, - { - "epoch": 0.28521613659592376, - "flos": 14174063779320.0, - "grad_norm": 10.723802093261943, - "learning_rate": 3.3549250652539134e-06, - "loss": 0.9769, - "num_input_tokens_seen": 50174775, - "step": 2372 - }, - { - "epoch": 0.2853363794865629, - "flos": 16612664355720.0, - "grad_norm": 3.195688985642461, - "learning_rate": 3.3543519842292794e-06, - "loss": 1.0449, - "num_input_tokens_seen": 50194150, - "step": 2373 - }, - { - "epoch": 0.28545662237720193, - "flos": 14092487385480.0, - "grad_norm": 2.6287742910382925, - "learning_rate": 3.353778697750527e-06, - "loss": 1.0692, - "num_input_tokens_seen": 50212275, - "step": 2374 - }, - { - "epoch": 0.28557686526784104, - "flos": 16979975532960.0, - "grad_norm": 2.589221263891414, - "learning_rate": 3.353205205904622e-06, - "loss": 1.1203, - "num_input_tokens_seen": 50231105, - "step": 2375 - }, - { - "epoch": 0.28569710815848015, - "flos": 32079850216440.0, - "grad_norm": 5.4036645605536435, - "learning_rate": 3.3526315087785637e-06, - "loss": 0.9548, - "num_input_tokens_seen": 50251940, - "step": 2376 - }, - { - "epoch": 0.2858173510491192, - "flos": 19099997247120.0, - "grad_norm": 2.475916444283754, - "learning_rate": 3.3520576064593805e-06, - "loss": 1.0374, - "num_input_tokens_seen": 50271615, - "step": 2377 - }, - { - "epoch": 0.2859375939397583, - "flos": 16455214618200.0, - "grad_norm": 3.0191560144435696, - "learning_rate": 3.3514834990341337e-06, - "loss": 1.0523, - "num_input_tokens_seen": 50291660, - "step": 2378 - }, - { - "epoch": 0.2860578368303974, - "flos": 8535927105480.0, - "grad_norm": 5.651274626774615, - "learning_rate": 3.3509091865899144e-06, - "loss": 1.1722, - "num_input_tokens_seen": 50306570, - "step": 2379 - }, - { - "epoch": 0.2861780797210365, - "flos": 14147597304960.0, - "grad_norm": 33.70923282914768, - "learning_rate": 3.350334669213846e-06, - "loss": 0.9322, - "num_input_tokens_seen": 50323695, - "step": 2380 - }, - { - "epoch": 0.2862983226116756, - "flos": 19627548363840.0, - "grad_norm": 5.602491144856745, - "learning_rate": 3.3497599469930816e-06, - "loss": 1.0063, - "num_input_tokens_seen": 50341625, - "step": 2381 - }, - { - "epoch": 0.28641856550231465, - "flos": 15668241884640.0, - "grad_norm": 4.539011234381321, - "learning_rate": 3.349185020014807e-06, - "loss": 1.0619, - "num_input_tokens_seen": 50358610, - "step": 2382 - }, - { - "epoch": 0.28653880839295376, - "flos": 15901197027120.0, - "grad_norm": 4.255797975262689, - "learning_rate": 3.348609888366237e-06, - "loss": 0.976, - "num_input_tokens_seen": 50377345, - "step": 2383 - }, - { - "epoch": 0.28665905128359287, - "flos": 16612664355720.0, - "grad_norm": 10.192648493546134, - "learning_rate": 3.348034552134619e-06, - "loss": 0.8562, - "num_input_tokens_seen": 50396470, - "step": 2384 - }, - { - "epoch": 0.2867792941742319, - "flos": 14825116738560.0, - "grad_norm": 3.429927600285563, - "learning_rate": 3.3474590114072316e-06, - "loss": 1.0814, - "num_input_tokens_seen": 50414190, - "step": 2385 - }, - { - "epoch": 0.28689953706487104, - "flos": 14668832140320.0, - "grad_norm": 2.767264790589099, - "learning_rate": 3.3468832662713836e-06, - "loss": 1.0541, - "num_input_tokens_seen": 50432155, - "step": 2386 - }, - { - "epoch": 0.28701977995551015, - "flos": 8927926386600.0, - "grad_norm": 4.516603568411717, - "learning_rate": 3.346307316814415e-06, - "loss": 1.0721, - "num_input_tokens_seen": 50447045, - "step": 2387 - }, - { - "epoch": 0.2871400228461492, - "flos": 15092295730080.0, - "grad_norm": 4.673314922004454, - "learning_rate": 3.3457311631236965e-06, - "loss": 0.9768, - "num_input_tokens_seen": 50467750, - "step": 2388 - }, - { - "epoch": 0.2872602657367883, - "flos": 17871281085960.0, - "grad_norm": 3.8025708587083615, - "learning_rate": 3.345154805286631e-06, - "loss": 1.0796, - "num_input_tokens_seen": 50487730, - "step": 2389 - }, - { - "epoch": 0.2873805086274274, - "flos": 11781068038800.0, - "grad_norm": 4.4945168468258485, - "learning_rate": 3.344578243390651e-06, - "loss": 0.9875, - "num_input_tokens_seen": 50503010, - "step": 2390 - }, - { - "epoch": 0.2875007515180665, - "flos": 12337967816520.0, - "grad_norm": 6.635141922928427, - "learning_rate": 3.3440014775232206e-06, - "loss": 1.0057, - "num_input_tokens_seen": 50520785, - "step": 2391 - }, - { - "epoch": 0.2876209944087056, - "flos": 16659342346200.0, - "grad_norm": 7.18509773246327, - "learning_rate": 3.343424507771834e-06, - "loss": 0.9523, - "num_input_tokens_seen": 50538715, - "step": 2392 - }, - { - "epoch": 0.2877412372993447, - "flos": 9689965723800.0, - "grad_norm": 4.792274281773534, - "learning_rate": 3.342847334224018e-06, - "loss": 1.0964, - "num_input_tokens_seen": 50555835, - "step": 2393 - }, - { - "epoch": 0.28786148018998375, - "flos": 41566111926000.0, - "grad_norm": 0.8948755726019153, - "learning_rate": 3.342269956967329e-06, - "loss": 0.896, - "num_input_tokens_seen": 50617460, - "step": 2394 - }, - { - "epoch": 0.28798172308062286, - "flos": 16660078223640.0, - "grad_norm": 51.200504872693884, - "learning_rate": 3.341692376089355e-06, - "loss": 0.9406, - "num_input_tokens_seen": 50632735, - "step": 2395 - }, - { - "epoch": 0.288101965971262, - "flos": 17863401065040.0, - "grad_norm": 3.152568823284731, - "learning_rate": 3.3411145916777146e-06, - "loss": 1.0654, - "num_input_tokens_seen": 50646615, - "step": 2396 - }, - { - "epoch": 0.28822220886190103, - "flos": 11498859334800.0, - "grad_norm": 3.6951562312112167, - "learning_rate": 3.3405366038200566e-06, - "loss": 1.13, - "num_input_tokens_seen": 50665270, - "step": 2397 - }, - { - "epoch": 0.28834245175254014, - "flos": 17451864822120.0, - "grad_norm": 9.20033333215985, - "learning_rate": 3.3399584126040617e-06, - "loss": 1.0707, - "num_input_tokens_seen": 50684490, - "step": 2398 - }, - { - "epoch": 0.2884626946431792, - "flos": 17477503434360.0, - "grad_norm": 3.1227932577113897, - "learning_rate": 3.339380018117441e-06, - "loss": 1.1288, - "num_input_tokens_seen": 50705045, - "step": 2399 - }, - { - "epoch": 0.2885829375338183, - "flos": 11708812759200.0, - "grad_norm": 7.035058918416796, - "learning_rate": 3.3388014204479366e-06, - "loss": 1.0014, - "num_input_tokens_seen": 50722570, - "step": 2400 - }, - { - "epoch": 0.2887031804244574, - "flos": 17110897473000.0, - "grad_norm": 14.75580960965301, - "learning_rate": 3.338222619683321e-06, - "loss": 1.1507, - "num_input_tokens_seen": 50742255, - "step": 2401 - }, - { - "epoch": 0.2888234233150965, - "flos": 16585768619520.0, - "grad_norm": 5.812721822464202, - "learning_rate": 3.337643615911398e-06, - "loss": 0.9569, - "num_input_tokens_seen": 50761600, - "step": 2402 - }, - { - "epoch": 0.2889436662057356, - "flos": 15825476991240.0, - "grad_norm": 2.7447417880212623, - "learning_rate": 3.3370644092200026e-06, - "loss": 1.0117, - "num_input_tokens_seen": 50778595, - "step": 2403 - }, - { - "epoch": 0.2890639090963747, - "flos": 15354262256400.0, - "grad_norm": 2.496498707858637, - "learning_rate": 3.3364849996969985e-06, - "loss": 1.0205, - "num_input_tokens_seen": 50798335, - "step": 2404 - }, - { - "epoch": 0.28918415198701375, - "flos": 20362446672360.0, - "grad_norm": 4.73940017961134, - "learning_rate": 3.335905387430283e-06, - "loss": 1.0801, - "num_input_tokens_seen": 50819490, - "step": 2405 - }, - { - "epoch": 0.28930439487765286, - "flos": 15589731646800.0, - "grad_norm": 10.593536464823396, - "learning_rate": 3.335325572507782e-06, - "loss": 1.0525, - "num_input_tokens_seen": 50839710, - "step": 2406 - }, - { - "epoch": 0.28942463776829197, - "flos": 13676137277640.0, - "grad_norm": 3.5463017071788805, - "learning_rate": 3.3347455550174537e-06, - "loss": 0.9782, - "num_input_tokens_seen": 50858770, - "step": 2407 - }, - { - "epoch": 0.289544880658931, - "flos": 10343839546560.0, - "grad_norm": 2.752324595670293, - "learning_rate": 3.3341653350472864e-06, - "loss": 0.9147, - "num_input_tokens_seen": 50875320, - "step": 2408 - }, - { - "epoch": 0.28966512354957014, - "flos": 20388545208000.0, - "grad_norm": 5.2113725060301, - "learning_rate": 3.333584912685298e-06, - "loss": 0.9312, - "num_input_tokens_seen": 50893660, - "step": 2409 - }, - { - "epoch": 0.28978536644020925, - "flos": 46331345530920.0, - "grad_norm": 0.9083202655680505, - "learning_rate": 3.3330042880195385e-06, - "loss": 0.8188, - "num_input_tokens_seen": 50947730, - "step": 2410 - }, - { - "epoch": 0.2899056093308483, - "flos": 13203696080400.0, - "grad_norm": 3.130808048696177, - "learning_rate": 3.3324234611380888e-06, - "loss": 1.0168, - "num_input_tokens_seen": 50966180, - "step": 2411 - }, - { - "epoch": 0.2900258522214874, - "flos": 16271850314400.0, - "grad_norm": 3.569854907338426, - "learning_rate": 3.3318424321290596e-06, - "loss": 1.05, - "num_input_tokens_seen": 50985615, - "step": 2412 - }, - { - "epoch": 0.2901460951121265, - "flos": 50923093402440.0, - "grad_norm": 0.9189589982832846, - "learning_rate": 3.3312612010805917e-06, - "loss": 0.8763, - "num_input_tokens_seen": 51044910, - "step": 2413 - }, - { - "epoch": 0.2902663380027656, - "flos": 22932030511920.0, - "grad_norm": 3.6521907345955023, - "learning_rate": 3.330679768080858e-06, - "loss": 0.9345, - "num_input_tokens_seen": 51068515, - "step": 2414 - }, - { - "epoch": 0.2903865808934047, - "flos": 20913030168720.0, - "grad_norm": 3.4032857053028835, - "learning_rate": 3.3300981332180627e-06, - "loss": 1.0749, - "num_input_tokens_seen": 51087440, - "step": 2415 - }, - { - "epoch": 0.29050682378404374, - "flos": 12099646901040.0, - "grad_norm": 5.140460814662919, - "learning_rate": 3.3295162965804373e-06, - "loss": 1.0446, - "num_input_tokens_seen": 51105655, - "step": 2416 - }, - { - "epoch": 0.29062706667468285, - "flos": 12652867291560.0, - "grad_norm": 5.526346565581554, - "learning_rate": 3.328934258256247e-06, - "loss": 1.0037, - "num_input_tokens_seen": 51123440, - "step": 2417 - }, - { - "epoch": 0.29074730956532197, - "flos": 17267948610240.0, - "grad_norm": 4.402210521289831, - "learning_rate": 3.3283520183337856e-06, - "loss": 0.8993, - "num_input_tokens_seen": 51142865, - "step": 2418 - }, - { - "epoch": 0.290867552455961, - "flos": 15873841367520.0, - "grad_norm": 4.636657251658823, - "learning_rate": 3.3277695769013797e-06, - "loss": 0.9291, - "num_input_tokens_seen": 51162030, - "step": 2419 - }, - { - "epoch": 0.29098779534660013, - "flos": 16482079692840.0, - "grad_norm": 5.362645344220576, - "learning_rate": 3.327186934047385e-06, - "loss": 1.0075, - "num_input_tokens_seen": 51180445, - "step": 2420 - }, - { - "epoch": 0.29110803823723924, - "flos": 10817323236840.0, - "grad_norm": 5.953442502503508, - "learning_rate": 3.3266040898601877e-06, - "loss": 0.8781, - "num_input_tokens_seen": 51198000, - "step": 2421 - }, - { - "epoch": 0.2912282811278783, - "flos": 16056531117000.0, - "grad_norm": 7.0727613054683856, - "learning_rate": 3.3260210444282045e-06, - "loss": 1.0049, - "num_input_tokens_seen": 51215675, - "step": 2422 - }, - { - "epoch": 0.2913485240185174, - "flos": 17424570485640.0, - "grad_norm": 13.911853681359737, - "learning_rate": 3.325437797839883e-06, - "loss": 0.9629, - "num_input_tokens_seen": 51233765, - "step": 2423 - }, - { - "epoch": 0.2914687669091565, - "flos": 12705156347520.0, - "grad_norm": 4.181146613013196, - "learning_rate": 3.3248543501837015e-06, - "loss": 0.9725, - "num_input_tokens_seen": 51250690, - "step": 2424 - }, - { - "epoch": 0.2915890097997956, - "flos": 16010006434320.0, - "grad_norm": 3.86902798099215, - "learning_rate": 3.3242707015481684e-06, - "loss": 1.0052, - "num_input_tokens_seen": 51270345, - "step": 2425 - }, - { - "epoch": 0.2917092526904347, - "flos": 9768843900360.0, - "grad_norm": 2.8439592556938775, - "learning_rate": 3.323686852021823e-06, - "loss": 1.0396, - "num_input_tokens_seen": 51287575, - "step": 2426 - }, - { - "epoch": 0.2918294955810738, - "flos": 16114553884680.0, - "grad_norm": 2.922383962763598, - "learning_rate": 3.323102801693235e-06, - "loss": 1.0327, - "num_input_tokens_seen": 51306060, - "step": 2427 - }, - { - "epoch": 0.29194973847171285, - "flos": 16662837764040.0, - "grad_norm": 3.8821707795836375, - "learning_rate": 3.322518550651003e-06, - "loss": 1.0262, - "num_input_tokens_seen": 51325090, - "step": 2428 - }, - { - "epoch": 0.29206998136235196, - "flos": 15564246342360.0, - "grad_norm": 2.647000713583198, - "learning_rate": 3.3219340989837586e-06, - "loss": 1.0421, - "num_input_tokens_seen": 51344800, - "step": 2429 - }, - { - "epoch": 0.292190224252991, - "flos": 16502567163000.0, - "grad_norm": 3.1875498150691484, - "learning_rate": 3.3213494467801625e-06, - "loss": 1.0387, - "num_input_tokens_seen": 51363695, - "step": 2430 - }, - { - "epoch": 0.2923104671436301, - "flos": 14724555291000.0, - "grad_norm": 4.0346024641924, - "learning_rate": 3.3207645941289063e-06, - "loss": 0.9407, - "num_input_tokens_seen": 51381760, - "step": 2431 - }, - { - "epoch": 0.29243071003426924, - "flos": 25553811422760.0, - "grad_norm": 3.313121521420282, - "learning_rate": 3.320179541118711e-06, - "loss": 1.0305, - "num_input_tokens_seen": 51403980, - "step": 2432 - }, - { - "epoch": 0.2925509529249083, - "flos": 41567736988680.0, - "grad_norm": 1.0298665911190803, - "learning_rate": 3.3195942878383293e-06, - "loss": 0.8745, - "num_input_tokens_seen": 51459800, - "step": 2433 - }, - { - "epoch": 0.2926711958155474, - "flos": 15196168626120.0, - "grad_norm": 3.516034725473408, - "learning_rate": 3.319008834376543e-06, - "loss": 1.0098, - "num_input_tokens_seen": 51479210, - "step": 2434 - }, - { - "epoch": 0.2927914387061865, - "flos": 16481773077240.0, - "grad_norm": 5.811216807493196, - "learning_rate": 3.3184231808221654e-06, - "loss": 1.1142, - "num_input_tokens_seen": 51493255, - "step": 2435 - }, - { - "epoch": 0.29291168159682557, - "flos": 15956889516240.0, - "grad_norm": 3.3794064895689484, - "learning_rate": 3.3178373272640394e-06, - "loss": 0.851, - "num_input_tokens_seen": 51512070, - "step": 2436 - }, - { - "epoch": 0.2930319244874647, - "flos": 15032371945680.0, - "grad_norm": 3.0371897597882964, - "learning_rate": 3.3172512737910387e-06, - "loss": 1.0891, - "num_input_tokens_seen": 51529300, - "step": 2437 - }, - { - "epoch": 0.2931521673781038, - "flos": 22354765910280.0, - "grad_norm": 5.234721340279345, - "learning_rate": 3.3166650204920674e-06, - "loss": 1.1108, - "num_input_tokens_seen": 51550190, - "step": 2438 - }, - { - "epoch": 0.29327241026874284, - "flos": 17210508412200.0, - "grad_norm": 3.6375739283439286, - "learning_rate": 3.316078567456059e-06, - "loss": 1.0559, - "num_input_tokens_seen": 51567750, - "step": 2439 - }, - { - "epoch": 0.29339265315938196, - "flos": 17241114197160.0, - "grad_norm": 3.1456753183725374, - "learning_rate": 3.3154919147719786e-06, - "loss": 0.9982, - "num_input_tokens_seen": 51588485, - "step": 2440 - }, - { - "epoch": 0.29351289605002107, - "flos": 11997797667960.0, - "grad_norm": 4.804823859536059, - "learning_rate": 3.31490506252882e-06, - "loss": 1.1035, - "num_input_tokens_seen": 51607585, - "step": 2441 - }, - { - "epoch": 0.2936331389406601, - "flos": 14069363021160.0, - "grad_norm": 2.5696287782090805, - "learning_rate": 3.31431801081561e-06, - "loss": 1.0842, - "num_input_tokens_seen": 51626240, - "step": 2442 - }, - { - "epoch": 0.29375338183129923, - "flos": 51149021499600.0, - "grad_norm": 0.9348041556770756, - "learning_rate": 3.313730759721402e-06, - "loss": 0.9188, - "num_input_tokens_seen": 51688890, - "step": 2443 - }, - { - "epoch": 0.29387362472193834, - "flos": 15668701808040.0, - "grad_norm": 3.642762305790991, - "learning_rate": 3.313143309335282e-06, - "loss": 1.0881, - "num_input_tokens_seen": 51707100, - "step": 2444 - }, - { - "epoch": 0.2939938676125774, - "flos": 16324047385680.0, - "grad_norm": 3.8524562580217188, - "learning_rate": 3.3125556597463665e-06, - "loss": 1.0721, - "num_input_tokens_seen": 51726125, - "step": 2445 - }, - { - "epoch": 0.2941141105032165, - "flos": 22355501787720.0, - "grad_norm": 2.236029912397086, - "learning_rate": 3.311967811043801e-06, - "loss": 0.8876, - "num_input_tokens_seen": 51747765, - "step": 2446 - }, - { - "epoch": 0.29423435339385556, - "flos": 16507871612880.0, - "grad_norm": 4.254935820439548, - "learning_rate": 3.3113797633167617e-06, - "loss": 1.0309, - "num_input_tokens_seen": 51765780, - "step": 2447 - }, - { - "epoch": 0.2943545962844947, - "flos": 19124348073840.0, - "grad_norm": 3.2905917927659534, - "learning_rate": 3.310791516654455e-06, - "loss": 0.9142, - "num_input_tokens_seen": 51782560, - "step": 2448 - }, - { - "epoch": 0.2944748391751338, - "flos": 14358869176440.0, - "grad_norm": 4.722070195362585, - "learning_rate": 3.3102030711461177e-06, - "loss": 1.0239, - "num_input_tokens_seen": 51801855, - "step": 2449 - }, - { - "epoch": 0.29459508206577284, - "flos": 11289335172240.0, - "grad_norm": 2.8579898668647448, - "learning_rate": 3.3096144268810156e-06, - "loss": 0.9137, - "num_input_tokens_seen": 51820335, - "step": 2450 - }, - { - "epoch": 0.29471532495641195, - "flos": 14488932592800.0, - "grad_norm": 5.314908906401841, - "learning_rate": 3.3090255839484462e-06, - "loss": 0.9513, - "num_input_tokens_seen": 51838050, - "step": 2451 - }, - { - "epoch": 0.29483556784705106, - "flos": 14462864718720.0, - "grad_norm": 3.1052106258192347, - "learning_rate": 3.3084365424377366e-06, - "loss": 1.0835, - "num_input_tokens_seen": 51856535, - "step": 2452 - }, - { - "epoch": 0.2949558107376901, - "flos": 49090676826840.0, - "grad_norm": 0.8099113454692658, - "learning_rate": 3.307847302438245e-06, - "loss": 0.816, - "num_input_tokens_seen": 51910235, - "step": 2453 - }, - { - "epoch": 0.2950760536283292, - "flos": 11394373207560.0, - "grad_norm": 10.868612584040022, - "learning_rate": 3.3072578640393562e-06, - "loss": 0.9988, - "num_input_tokens_seen": 51927290, - "step": 2454 - }, - { - "epoch": 0.29519629651896834, - "flos": 14539473939840.0, - "grad_norm": 4.027011506133578, - "learning_rate": 3.3066682273304886e-06, - "loss": 1.0251, - "num_input_tokens_seen": 51944655, - "step": 2455 - }, - { - "epoch": 0.2953165394096074, - "flos": 13413434873880.0, - "grad_norm": 6.702315132639395, - "learning_rate": 3.3060783924010904e-06, - "loss": 1.0048, - "num_input_tokens_seen": 51962300, - "step": 2456 - }, - { - "epoch": 0.2954367823002465, - "flos": 14640004725840.0, - "grad_norm": 3.264815088617763, - "learning_rate": 3.3054883593406387e-06, - "loss": 1.0873, - "num_input_tokens_seen": 51976770, - "step": 2457 - }, - { - "epoch": 0.2955570251908856, - "flos": 22223751985560.0, - "grad_norm": 3.6800329609099682, - "learning_rate": 3.3048981282386404e-06, - "loss": 0.8629, - "num_input_tokens_seen": 51997800, - "step": 2458 - }, - { - "epoch": 0.29567726808152467, - "flos": 15377846544120.0, - "grad_norm": 5.310008283705635, - "learning_rate": 3.304307699184634e-06, - "loss": 1.0627, - "num_input_tokens_seen": 52016110, - "step": 2459 - }, - { - "epoch": 0.2957975109721638, - "flos": 17242187351760.0, - "grad_norm": 3.004642232711615, - "learning_rate": 3.3037170722681866e-06, - "loss": 1.0282, - "num_input_tokens_seen": 52036665, - "step": 2460 - }, - { - "epoch": 0.29591775386280283, - "flos": 9506080173480.0, - "grad_norm": 5.5033193455377205, - "learning_rate": 3.3031262475788956e-06, - "loss": 0.9166, - "num_input_tokens_seen": 52053325, - "step": 2461 - }, - { - "epoch": 0.29603799675344195, - "flos": 12575062269600.0, - "grad_norm": 6.542913362638703, - "learning_rate": 3.3025352252063897e-06, - "loss": 0.9701, - "num_input_tokens_seen": 52071740, - "step": 2462 - }, - { - "epoch": 0.29615823964408106, - "flos": 16192880153160.0, - "grad_norm": 5.653290706839694, - "learning_rate": 3.3019440052403252e-06, - "loss": 0.9809, - "num_input_tokens_seen": 52091325, - "step": 2463 - }, - { - "epoch": 0.2962784825347201, - "flos": 16717794375720.0, - "grad_norm": 4.0907131899542675, - "learning_rate": 3.30135258777039e-06, - "loss": 0.9415, - "num_input_tokens_seen": 52110415, - "step": 2464 - }, - { - "epoch": 0.2963987254253592, - "flos": 11578166773200.0, - "grad_norm": 3.8241155062991465, - "learning_rate": 3.3007609728863024e-06, - "loss": 0.9354, - "num_input_tokens_seen": 52128225, - "step": 2465 - }, - { - "epoch": 0.29651896831599833, - "flos": 23874122704440.0, - "grad_norm": 2.8809654707688197, - "learning_rate": 3.300169160677809e-06, - "loss": 0.9654, - "num_input_tokens_seen": 52151860, - "step": 2466 - }, - { - "epoch": 0.2966392112066374, - "flos": 16926950599560.0, - "grad_norm": 5.523098040863854, - "learning_rate": 3.2995771512346878e-06, - "loss": 0.9842, - "num_input_tokens_seen": 52169930, - "step": 2467 - }, - { - "epoch": 0.2967594540972765, - "flos": 14147965243680.0, - "grad_norm": 4.32671435121691, - "learning_rate": 3.298984944646746e-06, - "loss": 0.9515, - "num_input_tokens_seen": 52188330, - "step": 2468 - }, - { - "epoch": 0.2968796969879156, - "flos": 16874876174520.0, - "grad_norm": 5.56187829818825, - "learning_rate": 3.298392541003822e-06, - "loss": 1.0466, - "num_input_tokens_seen": 52207455, - "step": 2469 - }, - { - "epoch": 0.29699993987855466, - "flos": 16271819652840.0, - "grad_norm": 2.8209023321175426, - "learning_rate": 3.2977999403957806e-06, - "loss": 1.1237, - "num_input_tokens_seen": 52225935, - "step": 2470 - }, - { - "epoch": 0.2971201827691938, - "flos": 24132440505120.0, - "grad_norm": 4.641053870321016, - "learning_rate": 3.2972071429125207e-06, - "loss": 0.9016, - "num_input_tokens_seen": 52246875, - "step": 2471 - }, - { - "epoch": 0.2972404256598329, - "flos": 15668579161800.0, - "grad_norm": 2.9777994464288313, - "learning_rate": 3.2966141486439682e-06, - "loss": 1.1194, - "num_input_tokens_seen": 52265785, - "step": 2472 - }, - { - "epoch": 0.29736066855047194, - "flos": 22800955264080.0, - "grad_norm": 8.946497942740116, - "learning_rate": 3.29602095768008e-06, - "loss": 0.8843, - "num_input_tokens_seen": 52286020, - "step": 2473 - }, - { - "epoch": 0.29748091144111105, - "flos": 23901754318080.0, - "grad_norm": 4.454833442494367, - "learning_rate": 3.2954275701108437e-06, - "loss": 0.8775, - "num_input_tokens_seen": 52306920, - "step": 2474 - }, - { - "epoch": 0.29760115433175016, - "flos": 29488245828720.0, - "grad_norm": 3.28646639396215, - "learning_rate": 3.294833986026275e-06, - "loss": 0.9175, - "num_input_tokens_seen": 52329880, - "step": 2475 - }, - { - "epoch": 0.2977213972223892, - "flos": 17420676467520.0, - "grad_norm": 2.8689272225351283, - "learning_rate": 3.29424020551642e-06, - "loss": 1.0715, - "num_input_tokens_seen": 52348235, - "step": 2476 - }, - { - "epoch": 0.2978416401130283, - "flos": 15115328109720.0, - "grad_norm": 3.5409355007223557, - "learning_rate": 3.2936462286713546e-06, - "loss": 0.9415, - "num_input_tokens_seen": 52366305, - "step": 2477 - }, - { - "epoch": 0.2979618830036674, - "flos": 18339981572880.0, - "grad_norm": 4.528381281432615, - "learning_rate": 3.2930520555811846e-06, - "loss": 1.0042, - "num_input_tokens_seen": 52385650, - "step": 2478 - }, - { - "epoch": 0.2980821258943065, - "flos": 16690959962640.0, - "grad_norm": 5.45222426356125, - "learning_rate": 3.292457686336046e-06, - "loss": 1.0301, - "num_input_tokens_seen": 52404690, - "step": 2479 - }, - { - "epoch": 0.2982023687849456, - "flos": 49954621172160.0, - "grad_norm": 0.8906819174799514, - "learning_rate": 3.291863121026105e-06, - "loss": 0.8824, - "num_input_tokens_seen": 52468190, - "step": 2480 - }, - { - "epoch": 0.29832261167558466, - "flos": 21253874871600.0, - "grad_norm": 2.805372659200293, - "learning_rate": 3.2912683597415547e-06, - "loss": 0.9929, - "num_input_tokens_seen": 52491995, - "step": 2481 - }, - { - "epoch": 0.29844285456622377, - "flos": 24188838210120.0, - "grad_norm": 16.682637509911597, - "learning_rate": 3.2906734025726213e-06, - "loss": 1.0066, - "num_input_tokens_seen": 52510980, - "step": 2482 - }, - { - "epoch": 0.2985630974568629, - "flos": 16978197162480.0, - "grad_norm": 3.4549229976834326, - "learning_rate": 3.290078249609559e-06, - "loss": 1.1004, - "num_input_tokens_seen": 52530120, - "step": 2483 - }, - { - "epoch": 0.29868334034750194, - "flos": 15485153534880.0, - "grad_norm": 3.4112937052755137, - "learning_rate": 3.2894829009426514e-06, - "loss": 1.0922, - "num_input_tokens_seen": 52547675, - "step": 2484 - }, - { - "epoch": 0.29880358323814105, - "flos": 18264690798840.0, - "grad_norm": 3.313767388642461, - "learning_rate": 3.288887356662213e-06, - "loss": 1.0026, - "num_input_tokens_seen": 52568730, - "step": 2485 - }, - { - "epoch": 0.29892382612878016, - "flos": 50852340539280.0, - "grad_norm": 0.7932190147813948, - "learning_rate": 3.288291616858588e-06, - "loss": 0.844, - "num_input_tokens_seen": 52623840, - "step": 2486 - }, - { - "epoch": 0.2990440690194192, - "flos": 18130947995280.0, - "grad_norm": 2.6702382389320674, - "learning_rate": 3.287695681622149e-06, - "loss": 1.0081, - "num_input_tokens_seen": 52642910, - "step": 2487 - }, - { - "epoch": 0.2991643119100583, - "flos": 16874477574240.0, - "grad_norm": 7.748166759612601, - "learning_rate": 3.2870995510432982e-06, - "loss": 1.0474, - "num_input_tokens_seen": 52661110, - "step": 2488 - }, - { - "epoch": 0.29928455480069743, - "flos": 19522970251920.0, - "grad_norm": 3.475581547219059, - "learning_rate": 3.2865032252124697e-06, - "loss": 1.0008, - "num_input_tokens_seen": 52681345, - "step": 2489 - }, - { - "epoch": 0.2994047976913365, - "flos": 24032952212160.0, - "grad_norm": 3.097786783977292, - "learning_rate": 3.2859067042201243e-06, - "loss": 1.0113, - "num_input_tokens_seen": 52703105, - "step": 2490 - }, - { - "epoch": 0.2995250405819756, - "flos": 11866477127640.0, - "grad_norm": 3.55682590427275, - "learning_rate": 3.2853099881567544e-06, - "loss": 1.0035, - "num_input_tokens_seen": 52721225, - "step": 2491 - }, - { - "epoch": 0.29964528347261465, - "flos": 16321962399600.0, - "grad_norm": 2.376032956007182, - "learning_rate": 3.284713077112881e-06, - "loss": 1.0235, - "num_input_tokens_seen": 52740375, - "step": 2492 - }, - { - "epoch": 0.29976552636325376, - "flos": 11990438893560.0, - "grad_norm": 4.15229368134795, - "learning_rate": 3.284115971179056e-06, - "loss": 1.0704, - "num_input_tokens_seen": 52754125, - "step": 2493 - }, - { - "epoch": 0.2998857692538929, - "flos": 12076767829200.0, - "grad_norm": 7.107448979321085, - "learning_rate": 3.283518670445859e-06, - "loss": 1.0289, - "num_input_tokens_seen": 52771755, - "step": 2494 - }, - { - "epoch": 0.30000601214453193, - "flos": 49291702189200.0, - "grad_norm": 2.993105754937787, - "learning_rate": 3.2829211750038995e-06, - "loss": 0.8244, - "num_input_tokens_seen": 52840105, - "step": 2495 - }, - { - "epoch": 0.30012625503517104, - "flos": 12469686957120.0, - "grad_norm": 3.62166705320391, - "learning_rate": 3.2823234849438183e-06, - "loss": 1.1121, - "num_input_tokens_seen": 52857860, - "step": 2496 - }, - { - "epoch": 0.30024649792581015, - "flos": 15092479699440.0, - "grad_norm": 4.347516519472348, - "learning_rate": 3.2817256003562836e-06, - "loss": 0.9777, - "num_input_tokens_seen": 52877955, - "step": 2497 - }, - { - "epoch": 0.3003667408164492, - "flos": 16350575183160.0, - "grad_norm": 4.318828859175106, - "learning_rate": 3.281127521331995e-06, - "loss": 0.8936, - "num_input_tokens_seen": 52898855, - "step": 2498 - }, - { - "epoch": 0.3004869837070883, - "flos": 45986085563400.0, - "grad_norm": 0.9279266479416711, - "learning_rate": 3.2805292479616798e-06, - "loss": 0.877, - "num_input_tokens_seen": 52957440, - "step": 2499 - }, - { - "epoch": 0.30060722659772743, - "flos": 18682727292480.0, - "grad_norm": 22.171931432715358, - "learning_rate": 3.2799307803360955e-06, - "loss": 1.1381, - "num_input_tokens_seen": 52973090, - "step": 2500 - }, - { - "epoch": 0.3007274694883665, - "flos": 17764311372360.0, - "grad_norm": 2.4269892683888226, - "learning_rate": 3.27933211854603e-06, - "loss": 1.0517, - "num_input_tokens_seen": 52991865, - "step": 2501 - }, - { - "epoch": 0.3008477123790056, - "flos": 12076246582680.0, - "grad_norm": 3.46703839387587, - "learning_rate": 3.278733262682299e-06, - "loss": 1.095, - "num_input_tokens_seen": 53009440, - "step": 2502 - }, - { - "epoch": 0.3009679552696447, - "flos": 15274678863960.0, - "grad_norm": 4.280802236935041, - "learning_rate": 3.2781342128357484e-06, - "loss": 1.0455, - "num_input_tokens_seen": 53028515, - "step": 2503 - }, - { - "epoch": 0.30108819816028376, - "flos": 15006028117560.0, - "grad_norm": 4.718910871378237, - "learning_rate": 3.2775349690972547e-06, - "loss": 1.0276, - "num_input_tokens_seen": 53042385, - "step": 2504 - }, - { - "epoch": 0.30120844105092287, - "flos": 50940509168520.0, - "grad_norm": 0.8892823522354278, - "learning_rate": 3.276935531557722e-06, - "loss": 0.7948, - "num_input_tokens_seen": 53107325, - "step": 2505 - }, - { - "epoch": 0.301328683941562, - "flos": 14382024202320.0, - "grad_norm": 4.785218829310188, - "learning_rate": 3.2763359003080837e-06, - "loss": 1.0274, - "num_input_tokens_seen": 53124000, - "step": 2506 - }, - { - "epoch": 0.30144892683220104, - "flos": 50596291694040.0, - "grad_norm": 0.9131796069726695, - "learning_rate": 3.2757360754393047e-06, - "loss": 0.9102, - "num_input_tokens_seen": 53187790, - "step": 2507 - }, - { - "epoch": 0.30156916972284015, - "flos": 16219223981280.0, - "grad_norm": 8.540621909159475, - "learning_rate": 3.2751360570423767e-06, - "loss": 0.8509, - "num_input_tokens_seen": 53205895, - "step": 2508 - }, - { - "epoch": 0.3016894126134792, - "flos": 21306562527840.0, - "grad_norm": 4.131200349771971, - "learning_rate": 3.2745358452083236e-06, - "loss": 0.9911, - "num_input_tokens_seen": 53228515, - "step": 2509 - }, - { - "epoch": 0.3018096555041183, - "flos": 15303843555600.0, - "grad_norm": 1.813563988241667, - "learning_rate": 3.2739354400281955e-06, - "loss": 1.0558, - "num_input_tokens_seen": 53249455, - "step": 2510 - }, - { - "epoch": 0.3019298983947574, - "flos": 42325269076560.0, - "grad_norm": 0.8732270944443259, - "learning_rate": 3.2733348415930744e-06, - "loss": 0.9197, - "num_input_tokens_seen": 53311045, - "step": 2511 - }, - { - "epoch": 0.3020501412853965, - "flos": 24557743788480.0, - "grad_norm": 26.03005300931922, - "learning_rate": 3.27273404999407e-06, - "loss": 1.0341, - "num_input_tokens_seen": 53332985, - "step": 2512 - }, - { - "epoch": 0.3021703841760356, - "flos": 50854425525360.0, - "grad_norm": 0.8847579787304981, - "learning_rate": 3.272133065322322e-06, - "loss": 0.8575, - "num_input_tokens_seen": 53390975, - "step": 2513 - }, - { - "epoch": 0.3022906270666747, - "flos": 15278204943360.0, - "grad_norm": 3.0423683680925966, - "learning_rate": 3.271531887669e-06, - "loss": 1.0194, - "num_input_tokens_seen": 53410755, - "step": 2514 - }, - { - "epoch": 0.30241086995731375, - "flos": 22191521137920.0, - "grad_norm": 4.397958082289906, - "learning_rate": 3.2709305171253015e-06, - "loss": 0.8796, - "num_input_tokens_seen": 53430595, - "step": 2515 - }, - { - "epoch": 0.30253111284795287, - "flos": 16715525420280.0, - "grad_norm": 11.35878730103682, - "learning_rate": 3.2703289537824536e-06, - "loss": 1.0132, - "num_input_tokens_seen": 53450115, - "step": 2516 - }, - { - "epoch": 0.302651355738592, - "flos": 13274908866960.0, - "grad_norm": 148.50344587752838, - "learning_rate": 3.269727197731714e-06, - "loss": 1.0269, - "num_input_tokens_seen": 53462600, - "step": 2517 - }, - { - "epoch": 0.30277159862923103, - "flos": 15930515026560.0, - "grad_norm": 2.8265804056135027, - "learning_rate": 3.269125249064367e-06, - "loss": 1.0082, - "num_input_tokens_seen": 53482015, - "step": 2518 - }, - { - "epoch": 0.30289184151987014, - "flos": 15825783606840.0, - "grad_norm": 2.8032648840525853, - "learning_rate": 3.2685231078717297e-06, - "loss": 1.0543, - "num_input_tokens_seen": 53501925, - "step": 2519 - }, - { - "epoch": 0.30301208441050925, - "flos": 17947277075880.0, - "grad_norm": 8.195937606492116, - "learning_rate": 3.267920774245145e-06, - "loss": 0.9606, - "num_input_tokens_seen": 53521050, - "step": 2520 - }, - { - "epoch": 0.3031323273011483, - "flos": 16376367103200.0, - "grad_norm": 2.6654748836343676, - "learning_rate": 3.2673182482759876e-06, - "loss": 1.08, - "num_input_tokens_seen": 53539885, - "step": 2521 - }, - { - "epoch": 0.3032525701917874, - "flos": 13384392828480.0, - "grad_norm": 3.197548231010164, - "learning_rate": 3.266715530055659e-06, - "loss": 0.8802, - "num_input_tokens_seen": 53557755, - "step": 2522 - }, - { - "epoch": 0.30337281308242653, - "flos": 12598186633920.0, - "grad_norm": 2.7207222285424977, - "learning_rate": 3.2661126196755927e-06, - "loss": 1.0438, - "num_input_tokens_seen": 53576585, - "step": 2523 - }, - { - "epoch": 0.3034930559730656, - "flos": 41382563652840.0, - "grad_norm": 0.8280153021941833, - "learning_rate": 3.265509517227248e-06, - "loss": 0.8375, - "num_input_tokens_seen": 53633120, - "step": 2524 - }, - { - "epoch": 0.3036132988637047, - "flos": 10422871030920.0, - "grad_norm": 2.7563834229438737, - "learning_rate": 3.264906222802115e-06, - "loss": 1.0302, - "num_input_tokens_seen": 53650690, - "step": 2525 - }, - { - "epoch": 0.30373354175434375, - "flos": 14934754007880.0, - "grad_norm": 3.068951720155358, - "learning_rate": 3.264302736491715e-06, - "loss": 0.9997, - "num_input_tokens_seen": 53670530, - "step": 2526 - }, - { - "epoch": 0.30385378464498286, - "flos": 15013908138480.0, - "grad_norm": 19.843119453564693, - "learning_rate": 3.263699058387594e-06, - "loss": 1.0983, - "num_input_tokens_seen": 53687685, - "step": 2527 - }, - { - "epoch": 0.30397402753562197, - "flos": 14644082713320.0, - "grad_norm": 3.925789509475142, - "learning_rate": 3.2630951885813315e-06, - "loss": 1.1423, - "num_input_tokens_seen": 53704800, - "step": 2528 - }, - { - "epoch": 0.304094270426261, - "flos": 10660333422720.0, - "grad_norm": 2.5912350330941845, - "learning_rate": 3.262491127164533e-06, - "loss": 1.0049, - "num_input_tokens_seen": 53723335, - "step": 2529 - }, - { - "epoch": 0.30421451331690014, - "flos": 9768843900360.0, - "grad_norm": 3.9501196251633424, - "learning_rate": 3.2618868742288337e-06, - "loss": 1.0387, - "num_input_tokens_seen": 53739980, - "step": 2530 - }, - { - "epoch": 0.30433475620753925, - "flos": 12312237219600.0, - "grad_norm": 2.599966481799705, - "learning_rate": 3.261282429865899e-06, - "loss": 0.9592, - "num_input_tokens_seen": 53757705, - "step": 2531 - }, - { - "epoch": 0.3044549990981783, - "flos": 13413833474160.0, - "grad_norm": 2.1792153561503382, - "learning_rate": 3.2606777941674225e-06, - "loss": 0.959, - "num_input_tokens_seen": 53776080, - "step": 2532 - }, - { - "epoch": 0.3045752419888174, - "flos": 14959902035160.0, - "grad_norm": 3.6227383628754657, - "learning_rate": 3.2600729672251276e-06, - "loss": 1.0784, - "num_input_tokens_seen": 53793515, - "step": 2533 - }, - { - "epoch": 0.3046954848794565, - "flos": 20729604541800.0, - "grad_norm": 3.123850649234554, - "learning_rate": 3.259467949130765e-06, - "loss": 0.8783, - "num_input_tokens_seen": 53814645, - "step": 2534 - }, - { - "epoch": 0.3048157277700956, - "flos": 14403370196160.0, - "grad_norm": 3.956275619255147, - "learning_rate": 3.2588627399761164e-06, - "loss": 1.0664, - "num_input_tokens_seen": 53830360, - "step": 2535 - }, - { - "epoch": 0.3049359706607347, - "flos": 16160250705240.0, - "grad_norm": 2.7788988955713845, - "learning_rate": 3.2582573398529903e-06, - "loss": 0.9466, - "num_input_tokens_seen": 53847435, - "step": 2536 - }, - { - "epoch": 0.3050562135513738, - "flos": 13066120581840.0, - "grad_norm": 3.4032646886146667, - "learning_rate": 3.2576517488532265e-06, - "loss": 0.9779, - "num_input_tokens_seen": 53863505, - "step": 2537 - }, - { - "epoch": 0.30517645644201286, - "flos": 14457529607280.0, - "grad_norm": 2.1561032673763716, - "learning_rate": 3.257045967068692e-06, - "loss": 1.0966, - "num_input_tokens_seen": 53882480, - "step": 2538 - }, - { - "epoch": 0.30529669933265197, - "flos": 15590099585520.0, - "grad_norm": 2.7965626462279265, - "learning_rate": 3.2564399945912848e-06, - "loss": 1.0529, - "num_input_tokens_seen": 53901990, - "step": 2539 - }, - { - "epoch": 0.305416942223291, - "flos": 15511221408960.0, - "grad_norm": 3.4281052261102167, - "learning_rate": 3.2558338315129287e-06, - "loss": 1.0556, - "num_input_tokens_seen": 53919855, - "step": 2540 - }, - { - "epoch": 0.30553718511393013, - "flos": 24190248641880.0, - "grad_norm": 2.8417126410411258, - "learning_rate": 3.2552274779255785e-06, - "loss": 0.9933, - "num_input_tokens_seen": 53940505, - "step": 2541 - }, - { - "epoch": 0.30565742800456924, - "flos": 15821858927160.0, - "grad_norm": 2.7438227893671785, - "learning_rate": 3.2546209339212184e-06, - "loss": 0.9968, - "num_input_tokens_seen": 53959245, - "step": 2542 - }, - { - "epoch": 0.3057776708952083, - "flos": 16035583723440.0, - "grad_norm": 2.490292106408009, - "learning_rate": 3.25401419959186e-06, - "loss": 1.0014, - "num_input_tokens_seen": 53979575, - "step": 2543 - }, - { - "epoch": 0.3058979137858474, - "flos": 15485245519560.0, - "grad_norm": 3.1563938221408967, - "learning_rate": 3.253407275029545e-06, - "loss": 0.9979, - "num_input_tokens_seen": 53998200, - "step": 2544 - }, - { - "epoch": 0.3060181566764865, - "flos": 19208070776880.0, - "grad_norm": 7.114806797731261, - "learning_rate": 3.2528001603263425e-06, - "loss": 1.0417, - "num_input_tokens_seen": 54019990, - "step": 2545 - }, - { - "epoch": 0.3061383995671256, - "flos": 14095185602760.0, - "grad_norm": 2.930666503091703, - "learning_rate": 3.2521928555743514e-06, - "loss": 1.0439, - "num_input_tokens_seen": 54037055, - "step": 2546 - }, - { - "epoch": 0.3062586424577647, - "flos": 15721174833360.0, - "grad_norm": 7.413598386751137, - "learning_rate": 3.2515853608657e-06, - "loss": 0.9085, - "num_input_tokens_seen": 54054775, - "step": 2547 - }, - { - "epoch": 0.3063788853484038, - "flos": 14799140849160.0, - "grad_norm": 3.721566552129454, - "learning_rate": 3.250977676292545e-06, - "loss": 0.954, - "num_input_tokens_seen": 54072735, - "step": 2548 - }, - { - "epoch": 0.30649912823904285, - "flos": 11467211056800.0, - "grad_norm": 2.617936485499248, - "learning_rate": 3.2503698019470712e-06, - "loss": 1.0257, - "num_input_tokens_seen": 54088225, - "step": 2549 - }, - { - "epoch": 0.30661937112968196, - "flos": 13197870384000.0, - "grad_norm": 2.7308609534799113, - "learning_rate": 3.249761737921492e-06, - "loss": 1.0032, - "num_input_tokens_seen": 54104475, - "step": 2550 - }, - { - "epoch": 0.30673961402032107, - "flos": 22377982259280.0, - "grad_norm": 2.4097674533826545, - "learning_rate": 3.249153484308051e-06, - "loss": 0.9752, - "num_input_tokens_seen": 54122810, - "step": 2551 - }, - { - "epoch": 0.3068598569109601, - "flos": 14357274775320.0, - "grad_norm": 4.183652106269003, - "learning_rate": 3.2485450411990194e-06, - "loss": 1.016, - "num_input_tokens_seen": 54141885, - "step": 2552 - }, - { - "epoch": 0.30698009980159924, - "flos": 21092255161920.0, - "grad_norm": 2.178800778522216, - "learning_rate": 3.2479364086866983e-06, - "loss": 1.0411, - "num_input_tokens_seen": 54161860, - "step": 2553 - }, - { - "epoch": 0.30710034269223835, - "flos": 16953662366400.0, - "grad_norm": 2.471418363154754, - "learning_rate": 3.247327586863416e-06, - "loss": 1.049, - "num_input_tokens_seen": 54182460, - "step": 2554 - }, - { - "epoch": 0.3072205855828774, - "flos": 18421527305160.0, - "grad_norm": 3.9379114813981215, - "learning_rate": 3.2467185758215304e-06, - "loss": 1.0139, - "num_input_tokens_seen": 54201920, - "step": 2555 - }, - { - "epoch": 0.3073408284735165, - "flos": 15799347794040.0, - "grad_norm": 2.888265935863935, - "learning_rate": 3.246109375653428e-06, - "loss": 1.0758, - "num_input_tokens_seen": 54218405, - "step": 2556 - }, - { - "epoch": 0.30746107136415557, - "flos": 13833004445520.0, - "grad_norm": 2.2278573537531248, - "learning_rate": 3.2454999864515243e-06, - "loss": 1.0292, - "num_input_tokens_seen": 54237500, - "step": 2557 - }, - { - "epoch": 0.3075813142547947, - "flos": 15431576693400.0, - "grad_norm": 2.126645180624563, - "learning_rate": 3.244890408308263e-06, - "loss": 0.9252, - "num_input_tokens_seen": 54257925, - "step": 2558 - }, - { - "epoch": 0.3077015571454338, - "flos": 17137333285800.0, - "grad_norm": 3.072366042293684, - "learning_rate": 3.2442806413161165e-06, - "loss": 0.842, - "num_input_tokens_seen": 54277290, - "step": 2559 - }, - { - "epoch": 0.30782180003607285, - "flos": 13046154358200.0, - "grad_norm": 4.977749908933131, - "learning_rate": 3.243670685567586e-06, - "loss": 1.0136, - "num_input_tokens_seen": 54294410, - "step": 2560 - }, - { - "epoch": 0.30794204292671196, - "flos": 16979576932680.0, - "grad_norm": 4.270207106416438, - "learning_rate": 3.2430605411552012e-06, - "loss": 1.0281, - "num_input_tokens_seen": 54314245, - "step": 2561 - }, - { - "epoch": 0.30806228581735107, - "flos": 48643347447240.0, - "grad_norm": 0.9213938346389898, - "learning_rate": 3.2424502081715205e-06, - "loss": 0.9492, - "num_input_tokens_seen": 54377080, - "step": 2562 - }, - { - "epoch": 0.3081825287079901, - "flos": 16954336920720.0, - "grad_norm": 2.487816112886088, - "learning_rate": 3.241839686709132e-06, - "loss": 1.012, - "num_input_tokens_seen": 54397735, - "step": 2563 - }, - { - "epoch": 0.30830277159862923, - "flos": 11467425687720.0, - "grad_norm": 3.5427523276650006, - "learning_rate": 3.2412289768606495e-06, - "loss": 1.0488, - "num_input_tokens_seen": 54414025, - "step": 2564 - }, - { - "epoch": 0.30842301448926834, - "flos": 20911190475120.0, - "grad_norm": 1.8782809479975555, - "learning_rate": 3.240618078718718e-06, - "loss": 1.0511, - "num_input_tokens_seen": 54435205, - "step": 2565 - }, - { - "epoch": 0.3085432573799074, - "flos": 15590160908640.0, - "grad_norm": 2.6362828777464347, - "learning_rate": 3.240006992376011e-06, - "loss": 0.9813, - "num_input_tokens_seen": 54454550, - "step": 2566 - }, - { - "epoch": 0.3086635002705465, - "flos": 15668885777400.0, - "grad_norm": 2.9448980146019066, - "learning_rate": 3.2393957179252284e-06, - "loss": 0.9944, - "num_input_tokens_seen": 54470805, - "step": 2567 - }, - { - "epoch": 0.3087837431611856, - "flos": 23294129223960.0, - "grad_norm": 49.32759789471306, - "learning_rate": 3.2387842554591016e-06, - "loss": 1.0418, - "num_input_tokens_seen": 54491340, - "step": 2568 - }, - { - "epoch": 0.3089039860518247, - "flos": 12466252862400.0, - "grad_norm": 3.99184755195533, - "learning_rate": 3.238172605070388e-06, - "loss": 1.1023, - "num_input_tokens_seen": 54506475, - "step": 2569 - }, - { - "epoch": 0.3090242289424638, - "flos": 10154772192600.0, - "grad_norm": 3.231413881731777, - "learning_rate": 3.2375607668518745e-06, - "loss": 1.0087, - "num_input_tokens_seen": 54519230, - "step": 2570 - }, - { - "epoch": 0.30914447183310284, - "flos": 11366465639880.0, - "grad_norm": 3.1686126017079066, - "learning_rate": 3.236948740896377e-06, - "loss": 1.1381, - "num_input_tokens_seen": 54533750, - "step": 2571 - }, - { - "epoch": 0.30926471472374195, - "flos": 22981284073440.0, - "grad_norm": 2.090971854364819, - "learning_rate": 3.2363365272967384e-06, - "loss": 1.0722, - "num_input_tokens_seen": 54556040, - "step": 2572 - }, - { - "epoch": 0.30938495761438106, - "flos": 14457866884440.0, - "grad_norm": 3.0974818369384467, - "learning_rate": 3.235724126145832e-06, - "loss": 1.0469, - "num_input_tokens_seen": 54571795, - "step": 2573 - }, - { - "epoch": 0.3095052005050201, - "flos": 17110836149880.0, - "grad_norm": 2.2212521051451626, - "learning_rate": 3.235111537536558e-06, - "loss": 1.0027, - "num_input_tokens_seen": 54592330, - "step": 2574 - }, - { - "epoch": 0.30962544339565923, - "flos": 16636401951240.0, - "grad_norm": 2.4008939508661484, - "learning_rate": 3.2344987615618456e-06, - "loss": 1.072, - "num_input_tokens_seen": 54611885, - "step": 2575 - }, - { - "epoch": 0.30974568628629834, - "flos": 24109561433280.0, - "grad_norm": 2.171799034052535, - "learning_rate": 3.2338857983146533e-06, - "loss": 1.0077, - "num_input_tokens_seen": 54633105, - "step": 2576 - }, - { - "epoch": 0.3098659291769374, - "flos": 14357090805960.0, - "grad_norm": 2.36382626640947, - "learning_rate": 3.233272647887966e-06, - "loss": 0.9915, - "num_input_tokens_seen": 54651715, - "step": 2577 - }, - { - "epoch": 0.3099861720675765, - "flos": 17604071432880.0, - "grad_norm": 2.259196454560845, - "learning_rate": 3.2326593103747985e-06, - "loss": 1.1253, - "num_input_tokens_seen": 54670450, - "step": 2578 - }, - { - "epoch": 0.3101064149582156, - "flos": 8274175210080.0, - "grad_norm": 3.5910915991228927, - "learning_rate": 3.2320457858681936e-06, - "loss": 1.0821, - "num_input_tokens_seen": 54688560, - "step": 2579 - }, - { - "epoch": 0.31022665784885467, - "flos": 16371246622680.0, - "grad_norm": 4.438815499469434, - "learning_rate": 3.2314320744612228e-06, - "loss": 1.0974, - "num_input_tokens_seen": 54703580, - "step": 2580 - }, - { - "epoch": 0.3103469007394938, - "flos": 11525203162920.0, - "grad_norm": 4.163181805832299, - "learning_rate": 3.2308181762469854e-06, - "loss": 0.9987, - "num_input_tokens_seen": 54721245, - "step": 2581 - }, - { - "epoch": 0.3104671436301329, - "flos": 21748765878840.0, - "grad_norm": 2.695522665039962, - "learning_rate": 3.230204091318609e-06, - "loss": 1.0197, - "num_input_tokens_seen": 54741505, - "step": 2582 - }, - { - "epoch": 0.31058738652077195, - "flos": 14226046219680.0, - "grad_norm": 2.525115220113215, - "learning_rate": 3.2295898197692503e-06, - "loss": 1.076, - "num_input_tokens_seen": 54760185, - "step": 2583 - }, - { - "epoch": 0.31070762941141106, - "flos": 19995258141360.0, - "grad_norm": 2.1822747083443557, - "learning_rate": 3.228975361692094e-06, - "loss": 1.0184, - "num_input_tokens_seen": 54780925, - "step": 2584 - }, - { - "epoch": 0.31082787230205017, - "flos": 14567228199720.0, - "grad_norm": 6.8830447206497825, - "learning_rate": 3.228360717180352e-06, - "loss": 1.0344, - "num_input_tokens_seen": 54798170, - "step": 2585 - }, - { - "epoch": 0.3109481151926892, - "flos": 44702412790560.0, - "grad_norm": 0.8623602256300013, - "learning_rate": 3.227745886327266e-06, - "loss": 0.8749, - "num_input_tokens_seen": 54856585, - "step": 2586 - }, - { - "epoch": 0.31106835808332833, - "flos": 31981858791840.0, - "grad_norm": 1.2300096412218806, - "learning_rate": 3.227130869226105e-06, - "loss": 0.8225, - "num_input_tokens_seen": 54913465, - "step": 2587 - }, - { - "epoch": 0.3111886009739674, - "flos": 16637689736760.0, - "grad_norm": 3.6439928186557324, - "learning_rate": 3.226515665970167e-06, - "loss": 1.0565, - "num_input_tokens_seen": 54930725, - "step": 2588 - }, - { - "epoch": 0.3113088438646065, - "flos": 12417612532080.0, - "grad_norm": 7.434831926078959, - "learning_rate": 3.225900276652777e-06, - "loss": 1.0894, - "num_input_tokens_seen": 54947220, - "step": 2589 - }, - { - "epoch": 0.3114290867552456, - "flos": 20204414365200.0, - "grad_norm": 1.984825143295194, - "learning_rate": 3.2252847013672906e-06, - "loss": 0.9918, - "num_input_tokens_seen": 54969600, - "step": 2590 - }, - { - "epoch": 0.31154932964588467, - "flos": 19495124007360.0, - "grad_norm": 7.548859961123036, - "learning_rate": 3.224668940207089e-06, - "loss": 0.9934, - "num_input_tokens_seen": 54988305, - "step": 2591 - }, - { - "epoch": 0.3116695725365238, - "flos": 18893631225240.0, - "grad_norm": 3.542983150768349, - "learning_rate": 3.2240529932655828e-06, - "loss": 1.0949, - "num_input_tokens_seen": 55007290, - "step": 2592 - }, - { - "epoch": 0.3117898154271629, - "flos": 15038320288320.0, - "grad_norm": 3.6728797349607505, - "learning_rate": 3.223436860636211e-06, - "loss": 1.0966, - "num_input_tokens_seen": 55022645, - "step": 2593 - }, - { - "epoch": 0.31191005831780194, - "flos": 19418790740280.0, - "grad_norm": 9.016618432593473, - "learning_rate": 3.2228205424124403e-06, - "loss": 0.9724, - "num_input_tokens_seen": 55045520, - "step": 2594 - }, - { - "epoch": 0.31203030120844105, - "flos": 9134353731600.0, - "grad_norm": 3.028673315647873, - "learning_rate": 3.222204038687765e-06, - "loss": 0.982, - "num_input_tokens_seen": 55058375, - "step": 2595 - }, - { - "epoch": 0.31215054409908016, - "flos": 19626597855480.0, - "grad_norm": 3.935326233540504, - "learning_rate": 3.221587349555709e-06, - "loss": 1.1141, - "num_input_tokens_seen": 55078355, - "step": 2596 - }, - { - "epoch": 0.3122707869897192, - "flos": 15274617540840.0, - "grad_norm": 2.548921476678892, - "learning_rate": 3.2209704751098236e-06, - "loss": 0.9238, - "num_input_tokens_seen": 55097105, - "step": 2597 - }, - { - "epoch": 0.31239102988035833, - "flos": 10732067455800.0, - "grad_norm": 3.487146618074684, - "learning_rate": 3.2203534154436875e-06, - "loss": 1.0527, - "num_input_tokens_seen": 55111180, - "step": 2598 - }, - { - "epoch": 0.31251127277099744, - "flos": 15668027253720.0, - "grad_norm": 2.666332184159389, - "learning_rate": 3.2197361706509084e-06, - "loss": 1.0009, - "num_input_tokens_seen": 55131655, - "step": 2599 - }, - { - "epoch": 0.3126315156616365, - "flos": 10738015798440.0, - "grad_norm": 5.103929516739119, - "learning_rate": 3.2191187408251228e-06, - "loss": 1.0584, - "num_input_tokens_seen": 55148535, - "step": 2600 - }, - { - "epoch": 0.3127517585522756, - "flos": 12858558759120.0, - "grad_norm": 3.680818355622815, - "learning_rate": 3.218501126059993e-06, - "loss": 1.0101, - "num_input_tokens_seen": 55163650, - "step": 2601 - }, - { - "epoch": 0.31287200144291466, - "flos": 15563878403640.0, - "grad_norm": 2.1945080352239765, - "learning_rate": 3.2178833264492116e-06, - "loss": 1.0446, - "num_input_tokens_seen": 55182075, - "step": 2602 - }, - { - "epoch": 0.31299224433355377, - "flos": 21304692172680.0, - "grad_norm": 2.68604824865299, - "learning_rate": 3.217265342086498e-06, - "loss": 0.9904, - "num_input_tokens_seen": 55202285, - "step": 2603 - }, - { - "epoch": 0.3131124872241929, - "flos": 8195389018200.0, - "grad_norm": 2.8746959691379588, - "learning_rate": 3.216647173065599e-06, - "loss": 0.9694, - "num_input_tokens_seen": 55217470, - "step": 2604 - }, - { - "epoch": 0.31323273011483194, - "flos": 35642864796120.0, - "grad_norm": 2.525372215256999, - "learning_rate": 3.216028819480292e-06, - "loss": 0.9698, - "num_input_tokens_seen": 55238530, - "step": 2605 - }, - { - "epoch": 0.31335297300547105, - "flos": 16061590274400.0, - "grad_norm": 2.6299979449942215, - "learning_rate": 3.2154102814243793e-06, - "loss": 0.9895, - "num_input_tokens_seen": 55257390, - "step": 2606 - }, - { - "epoch": 0.31347321589611016, - "flos": 24764017825680.0, - "grad_norm": 2.9746290921354896, - "learning_rate": 3.2147915589916937e-06, - "loss": 0.9104, - "num_input_tokens_seen": 55278670, - "step": 2607 - }, - { - "epoch": 0.3135934587867492, - "flos": 14016644703360.0, - "grad_norm": 2.2400353500437893, - "learning_rate": 3.2141726522760938e-06, - "loss": 1.0606, - "num_input_tokens_seen": 55296450, - "step": 2608 - }, - { - "epoch": 0.3137137016773883, - "flos": 47125830346680.0, - "grad_norm": 0.7265657814526032, - "learning_rate": 3.213553561371469e-06, - "loss": 0.7897, - "num_input_tokens_seen": 55359905, - "step": 2609 - }, - { - "epoch": 0.31383394456802743, - "flos": 11499012642600.0, - "grad_norm": 2.9533441048534055, - "learning_rate": 3.212934286371733e-06, - "loss": 1.2038, - "num_input_tokens_seen": 55376335, - "step": 2610 - }, - { - "epoch": 0.3139541874586665, - "flos": 27699349102920.0, - "grad_norm": 3.642300592644398, - "learning_rate": 3.2123148273708304e-06, - "loss": 1.0505, - "num_input_tokens_seen": 55396245, - "step": 2611 - }, - { - "epoch": 0.3140744303493056, - "flos": 17818869383760.0, - "grad_norm": 3.447313484529673, - "learning_rate": 3.211695184462733e-06, - "loss": 0.997, - "num_input_tokens_seen": 55417140, - "step": 2612 - }, - { - "epoch": 0.3141946732399447, - "flos": 51929555305560.0, - "grad_norm": 0.9110107340091326, - "learning_rate": 3.2110753577414383e-06, - "loss": 0.8788, - "num_input_tokens_seen": 55478440, - "step": 2613 - }, - { - "epoch": 0.31431491613058377, - "flos": 13645102230840.0, - "grad_norm": 2.5262057116749173, - "learning_rate": 3.2104553473009757e-06, - "loss": 1.0171, - "num_input_tokens_seen": 55496280, - "step": 2614 - }, - { - "epoch": 0.3144351590212229, - "flos": 25842367069680.0, - "grad_norm": 3.1032797721996412, - "learning_rate": 3.209835153235399e-06, - "loss": 0.9099, - "num_input_tokens_seen": 55517555, - "step": 2615 - }, - { - "epoch": 0.314555401911862, - "flos": 13151315039760.0, - "grad_norm": 2.9416870145301113, - "learning_rate": 3.2092147756387916e-06, - "loss": 0.9092, - "num_input_tokens_seen": 55537600, - "step": 2616 - }, - { - "epoch": 0.31467564480250104, - "flos": 11578258757880.0, - "grad_norm": 4.104626831489222, - "learning_rate": 3.208594214605264e-06, - "loss": 1.0743, - "num_input_tokens_seen": 55555865, - "step": 2617 - }, - { - "epoch": 0.31479588769314015, - "flos": 15379594253040.0, - "grad_norm": 2.949706505868704, - "learning_rate": 3.2079734702289553e-06, - "loss": 1.0164, - "num_input_tokens_seen": 55574480, - "step": 2618 - }, - { - "epoch": 0.3149161305837792, - "flos": 47292422777160.0, - "grad_norm": 0.8028549140960815, - "learning_rate": 3.207352542604031e-06, - "loss": 0.8734, - "num_input_tokens_seen": 55636535, - "step": 2619 - }, - { - "epoch": 0.3150363734744183, - "flos": 20651155627080.0, - "grad_norm": 2.3127166028005646, - "learning_rate": 3.2067314318246864e-06, - "loss": 1.0145, - "num_input_tokens_seen": 55656970, - "step": 2620 - }, - { - "epoch": 0.31515661636505743, - "flos": 19680419989440.0, - "grad_norm": 2.8870362144659176, - "learning_rate": 3.206110137985143e-06, - "loss": 1.0073, - "num_input_tokens_seen": 55676895, - "step": 2621 - }, - { - "epoch": 0.3152768592556965, - "flos": 17501762276400.0, - "grad_norm": 4.566387245001619, - "learning_rate": 3.2054886611796505e-06, - "loss": 1.1531, - "num_input_tokens_seen": 55695610, - "step": 2622 - }, - { - "epoch": 0.3153971021463356, - "flos": 49033297951920.0, - "grad_norm": 0.9381015754840187, - "learning_rate": 3.204867001502487e-06, - "loss": 0.9229, - "num_input_tokens_seen": 55753985, - "step": 2623 - }, - { - "epoch": 0.3155173450369747, - "flos": 18212279096640.0, - "grad_norm": 3.9898515948085325, - "learning_rate": 3.2042451590479567e-06, - "loss": 1.0373, - "num_input_tokens_seen": 55774220, - "step": 2624 - }, - { - "epoch": 0.31563758792761376, - "flos": 17288221449480.0, - "grad_norm": 1.7360598654071426, - "learning_rate": 3.203623133910394e-06, - "loss": 1.0922, - "num_input_tokens_seen": 55792245, - "step": 2625 - }, - { - "epoch": 0.31575783081825287, - "flos": 22747010483880.0, - "grad_norm": 3.0700995684453716, - "learning_rate": 3.203000926184158e-06, - "loss": 1.0036, - "num_input_tokens_seen": 55810890, - "step": 2626 - }, - { - "epoch": 0.315878073708892, - "flos": 21961785459240.0, - "grad_norm": 2.249780220317951, - "learning_rate": 3.202378535963639e-06, - "loss": 1.0102, - "num_input_tokens_seen": 55831525, - "step": 2627 - }, - { - "epoch": 0.31599831659953104, - "flos": 15773433227760.0, - "grad_norm": 2.73080542675757, - "learning_rate": 3.2017559633432516e-06, - "loss": 1.0688, - "num_input_tokens_seen": 55850875, - "step": 2628 - }, - { - "epoch": 0.31611855949017015, - "flos": 18212064465720.0, - "grad_norm": 2.276389391515476, - "learning_rate": 3.2011332084174398e-06, - "loss": 0.8913, - "num_input_tokens_seen": 55871465, - "step": 2629 - }, - { - "epoch": 0.31623880238080926, - "flos": 14618996009160.0, - "grad_norm": 2.084022814781378, - "learning_rate": 3.2005102712806756e-06, - "loss": 1.1265, - "num_input_tokens_seen": 55890015, - "step": 2630 - }, - { - "epoch": 0.3163590452714483, - "flos": 9006528609120.0, - "grad_norm": 2.5923107704252293, - "learning_rate": 3.1998871520274575e-06, - "loss": 0.9577, - "num_input_tokens_seen": 55905070, - "step": 2631 - }, - { - "epoch": 0.3164792881620874, - "flos": 16378114812120.0, - "grad_norm": 2.6500842171008547, - "learning_rate": 3.199263850752312e-06, - "loss": 1.0717, - "num_input_tokens_seen": 55925625, - "step": 2632 - }, - { - "epoch": 0.31659953105272653, - "flos": 12967705443480.0, - "grad_norm": 3.5206631651316, - "learning_rate": 3.198640367549795e-06, - "loss": 1.079, - "num_input_tokens_seen": 55944240, - "step": 2633 - }, - { - "epoch": 0.3167197739433656, - "flos": 18291157273200.0, - "grad_norm": 2.263955810223453, - "learning_rate": 3.198016702514487e-06, - "loss": 1.0961, - "num_input_tokens_seen": 55964240, - "step": 2634 - }, - { - "epoch": 0.3168400168340047, - "flos": 16740029554800.0, - "grad_norm": 2.4666223169755948, - "learning_rate": 3.1973928557409972e-06, - "loss": 1.0764, - "num_input_tokens_seen": 55982000, - "step": 2635 - }, - { - "epoch": 0.31696025972464376, - "flos": 20204966273280.0, - "grad_norm": 4.230558470164716, - "learning_rate": 3.1967688273239636e-06, - "loss": 0.9322, - "num_input_tokens_seen": 56001525, - "step": 2636 - }, - { - "epoch": 0.31708050261528287, - "flos": 11604265308840.0, - "grad_norm": 3.9189858300219824, - "learning_rate": 3.1961446173580503e-06, - "loss": 1.0538, - "num_input_tokens_seen": 56018185, - "step": 2637 - }, - { - "epoch": 0.317200745505922, - "flos": 18656659418400.0, - "grad_norm": 35.64032188330112, - "learning_rate": 3.1955202259379502e-06, - "loss": 1.0015, - "num_input_tokens_seen": 56039635, - "step": 2638 - }, - { - "epoch": 0.31732098839656103, - "flos": 22348817567640.0, - "grad_norm": 2.177592265192291, - "learning_rate": 3.194895653158381e-06, - "loss": 1.0556, - "num_input_tokens_seen": 56058295, - "step": 2639 - }, - { - "epoch": 0.31744123128720014, - "flos": 42219249871320.0, - "grad_norm": 0.8371576691339614, - "learning_rate": 3.194270899114093e-06, - "loss": 0.825, - "num_input_tokens_seen": 56123810, - "step": 2640 - }, - { - "epoch": 0.31756147417783925, - "flos": 12335790845760.0, - "grad_norm": 4.086034128129651, - "learning_rate": 3.193645963899858e-06, - "loss": 1.06, - "num_input_tokens_seen": 56141145, - "step": 2641 - }, - { - "epoch": 0.3176817170684783, - "flos": 18131407918680.0, - "grad_norm": 4.251718285961419, - "learning_rate": 3.193020847610479e-06, - "loss": 1.0599, - "num_input_tokens_seen": 56161185, - "step": 2642 - }, - { - "epoch": 0.3178019599591174, - "flos": 17765783127240.0, - "grad_norm": 4.733760579845048, - "learning_rate": 3.192395550340787e-06, - "loss": 0.9457, - "num_input_tokens_seen": 56178855, - "step": 2643 - }, - { - "epoch": 0.31792220284975653, - "flos": 8746156483920.0, - "grad_norm": 3.840059074503765, - "learning_rate": 3.191770072185638e-06, - "loss": 1.012, - "num_input_tokens_seen": 56195570, - "step": 2644 - }, - { - "epoch": 0.3180424457403956, - "flos": 10947693268800.0, - "grad_norm": 3.636112674809615, - "learning_rate": 3.191144413239916e-06, - "loss": 0.9585, - "num_input_tokens_seen": 56211860, - "step": 2645 - }, - { - "epoch": 0.3181626886310347, - "flos": 18629671697520.0, - "grad_norm": 2.6410713495729636, - "learning_rate": 3.190518573598534e-06, - "loss": 1.1079, - "num_input_tokens_seen": 56228185, - "step": 2646 - }, - { - "epoch": 0.3182829315216738, - "flos": 18132634381080.0, - "grad_norm": 1.7939357435441583, - "learning_rate": 3.1898925533564308e-06, - "loss": 1.0154, - "num_input_tokens_seen": 56249375, - "step": 2647 - }, - { - "epoch": 0.31840317441231286, - "flos": 12836476887840.0, - "grad_norm": 2.8665995397766593, - "learning_rate": 3.1892663526085733e-06, - "loss": 0.8846, - "num_input_tokens_seen": 56267470, - "step": 2648 - }, - { - "epoch": 0.31852341730295197, - "flos": 46352170278240.0, - "grad_norm": 0.773692233590014, - "learning_rate": 3.188639971449956e-06, - "loss": 0.8339, - "num_input_tokens_seen": 56333240, - "step": 2649 - }, - { - "epoch": 0.318643660193591, - "flos": 14672358219720.0, - "grad_norm": 4.122322138089243, - "learning_rate": 3.1880134099756e-06, - "loss": 0.9566, - "num_input_tokens_seen": 56352595, - "step": 2650 - }, - { - "epoch": 0.31876390308423014, - "flos": 19182186872160.0, - "grad_norm": 2.2052881970282034, - "learning_rate": 3.1873866682805535e-06, - "loss": 0.9386, - "num_input_tokens_seen": 56373010, - "step": 2651 - }, - { - "epoch": 0.31888414597486925, - "flos": 12785046355560.0, - "grad_norm": 2.322199609586548, - "learning_rate": 3.186759746459894e-06, - "loss": 1.1181, - "num_input_tokens_seen": 56391840, - "step": 2652 - }, - { - "epoch": 0.3190043888655083, - "flos": 17892013848600.0, - "grad_norm": 2.2008384580695752, - "learning_rate": 3.1861326446087246e-06, - "loss": 1.0305, - "num_input_tokens_seen": 56410300, - "step": 2653 - }, - { - "epoch": 0.3191246317561474, - "flos": 15668180561520.0, - "grad_norm": 2.948348122734497, - "learning_rate": 3.1855053628221763e-06, - "loss": 0.943, - "num_input_tokens_seen": 56429275, - "step": 2654 - }, - { - "epoch": 0.3192448746467865, - "flos": 10528001050920.0, - "grad_norm": 2.9550477887983524, - "learning_rate": 3.184877901195407e-06, - "loss": 1.1216, - "num_input_tokens_seen": 56445690, - "step": 2655 - }, - { - "epoch": 0.3193651175374256, - "flos": 48145359622440.0, - "grad_norm": 0.8416295073272516, - "learning_rate": 3.184250259823602e-06, - "loss": 0.9013, - "num_input_tokens_seen": 56507940, - "step": 2656 - }, - { - "epoch": 0.3194853604280647, - "flos": 8609316862800.0, - "grad_norm": 2.708346313054209, - "learning_rate": 3.183622438801974e-06, - "loss": 1.0359, - "num_input_tokens_seen": 56522950, - "step": 2657 - }, - { - "epoch": 0.3196056033187038, - "flos": 10555602003000.0, - "grad_norm": 3.015426708283958, - "learning_rate": 3.1829944382257637e-06, - "loss": 0.9958, - "num_input_tokens_seen": 56540800, - "step": 2658 - }, - { - "epoch": 0.31972584620934286, - "flos": 16900729417680.0, - "grad_norm": 2.597917632526217, - "learning_rate": 3.1823662581902373e-06, - "loss": 1.0539, - "num_input_tokens_seen": 56558205, - "step": 2659 - }, - { - "epoch": 0.31984608909998197, - "flos": 15091590514200.0, - "grad_norm": 3.157181706155601, - "learning_rate": 3.1817378987906896e-06, - "loss": 0.9721, - "num_input_tokens_seen": 56577430, - "step": 2660 - }, - { - "epoch": 0.3199663319906211, - "flos": 12967674781920.0, - "grad_norm": 2.7050234792540024, - "learning_rate": 3.181109360122442e-06, - "loss": 1.0399, - "num_input_tokens_seen": 56594740, - "step": 2661 - }, - { - "epoch": 0.32008657488126013, - "flos": 13281961025760.0, - "grad_norm": 2.8281712818047025, - "learning_rate": 3.1804806422808445e-06, - "loss": 1.0134, - "num_input_tokens_seen": 56611595, - "step": 2662 - }, - { - "epoch": 0.32020681777189924, - "flos": 14620069163760.0, - "grad_norm": 2.982638188220055, - "learning_rate": 3.1798517453612714e-06, - "loss": 0.9592, - "num_input_tokens_seen": 56631120, - "step": 2663 - }, - { - "epoch": 0.32032706066253835, - "flos": 25160371048320.0, - "grad_norm": 7.23087181480549, - "learning_rate": 3.1792226694591265e-06, - "loss": 0.9899, - "num_input_tokens_seen": 56652225, - "step": 2664 - }, - { - "epoch": 0.3204473035531774, - "flos": 10817353898400.0, - "grad_norm": 2.5725305141348245, - "learning_rate": 3.178593414669841e-06, - "loss": 1.034, - "num_input_tokens_seen": 56670530, - "step": 2665 - }, - { - "epoch": 0.3205675464438165, - "flos": 17399330473680.0, - "grad_norm": 3.615062641534954, - "learning_rate": 3.1779639810888707e-06, - "loss": 0.9413, - "num_input_tokens_seen": 56689845, - "step": 2666 - }, - { - "epoch": 0.3206877893344556, - "flos": 15957472085880.0, - "grad_norm": 2.4828223408911527, - "learning_rate": 3.1773343688117013e-06, - "loss": 0.9951, - "num_input_tokens_seen": 56710475, - "step": 2667 - }, - { - "epoch": 0.3208080322250947, - "flos": 14488380684720.0, - "grad_norm": 3.2940381034495547, - "learning_rate": 3.1767045779338445e-06, - "loss": 1.0689, - "num_input_tokens_seen": 56727855, - "step": 2668 - }, - { - "epoch": 0.3209282751157338, - "flos": 15458503091160.0, - "grad_norm": 6.1376703739797644, - "learning_rate": 3.176074608550839e-06, - "loss": 1.1345, - "num_input_tokens_seen": 56743395, - "step": 2669 - }, - { - "epoch": 0.32104851800637285, - "flos": 15669161731440.0, - "grad_norm": 2.3235178171011985, - "learning_rate": 3.17544446075825e-06, - "loss": 1.0592, - "num_input_tokens_seen": 56762280, - "step": 2670 - }, - { - "epoch": 0.32116876089701196, - "flos": 26418405208920.0, - "grad_norm": 1.9007729187958025, - "learning_rate": 3.174814134651671e-06, - "loss": 0.9504, - "num_input_tokens_seen": 56784550, - "step": 2671 - }, - { - "epoch": 0.3212890037876511, - "flos": 15614297104440.0, - "grad_norm": 2.0084557597516044, - "learning_rate": 3.1741836303267215e-06, - "loss": 1.0474, - "num_input_tokens_seen": 56803805, - "step": 2672 - }, - { - "epoch": 0.32140924667829013, - "flos": 7251365147400.0, - "grad_norm": 2.6047393372611554, - "learning_rate": 3.1735529478790496e-06, - "loss": 0.9786, - "num_input_tokens_seen": 56821515, - "step": 2673 - }, - { - "epoch": 0.32152948956892924, - "flos": 36325903310520.0, - "grad_norm": 3.4313367210783046, - "learning_rate": 3.172922087404328e-06, - "loss": 1.0345, - "num_input_tokens_seen": 56843495, - "step": 2674 - }, - { - "epoch": 0.32164973245956835, - "flos": 45720255680520.0, - "grad_norm": 0.8130774646677693, - "learning_rate": 3.1722910489982586e-06, - "loss": 0.816, - "num_input_tokens_seen": 56903575, - "step": 2675 - }, - { - "epoch": 0.3217699753502074, - "flos": 16717579744800.0, - "grad_norm": 3.852955038339511, - "learning_rate": 3.1716598327565694e-06, - "loss": 1.0397, - "num_input_tokens_seen": 56922935, - "step": 2676 - }, - { - "epoch": 0.3218902182408465, - "flos": 13518411586080.0, - "grad_norm": 2.1413030733834244, - "learning_rate": 3.171028438775015e-06, - "loss": 1.0711, - "num_input_tokens_seen": 56941850, - "step": 2677 - }, - { - "epoch": 0.3220104611314856, - "flos": 14462006195040.0, - "grad_norm": 3.5968660859926334, - "learning_rate": 3.170396867149377e-06, - "loss": 1.0675, - "num_input_tokens_seen": 56959575, - "step": 2678 - }, - { - "epoch": 0.3221307040221247, - "flos": 14275759704600.0, - "grad_norm": 2.81687866142988, - "learning_rate": 3.1697651179754653e-06, - "loss": 1.0861, - "num_input_tokens_seen": 56977955, - "step": 2679 - }, - { - "epoch": 0.3222509469127638, - "flos": 17058056508960.0, - "grad_norm": 17.112167116637636, - "learning_rate": 3.1691331913491153e-06, - "loss": 0.9746, - "num_input_tokens_seen": 57000245, - "step": 2680 - }, - { - "epoch": 0.32237118980340285, - "flos": 12521700059040.0, - "grad_norm": 3.27079968931223, - "learning_rate": 3.1685010873661898e-06, - "loss": 1.0828, - "num_input_tokens_seen": 57019140, - "step": 2681 - }, - { - "epoch": 0.32249143269404196, - "flos": 16454417417640.0, - "grad_norm": 3.2441466513433643, - "learning_rate": 3.167868806122578e-06, - "loss": 1.0275, - "num_input_tokens_seen": 57037910, - "step": 2682 - }, - { - "epoch": 0.32261167558468107, - "flos": 17369889828000.0, - "grad_norm": 2.28836027968124, - "learning_rate": 3.1672363477141968e-06, - "loss": 0.8971, - "num_input_tokens_seen": 57056925, - "step": 2683 - }, - { - "epoch": 0.3227319184753201, - "flos": 21642041457720.0, - "grad_norm": 2.8668069612934026, - "learning_rate": 3.1666037122369903e-06, - "loss": 1.0745, - "num_input_tokens_seen": 57077305, - "step": 2684 - }, - { - "epoch": 0.32285216136595923, - "flos": 11997399067680.0, - "grad_norm": 2.746658288871926, - "learning_rate": 3.165970899786928e-06, - "loss": 1.0863, - "num_input_tokens_seen": 57094940, - "step": 2685 - }, - { - "epoch": 0.32297240425659834, - "flos": 15616259444280.0, - "grad_norm": 824.9287090409692, - "learning_rate": 3.1653379104600067e-06, - "loss": 0.9639, - "num_input_tokens_seen": 57114805, - "step": 2686 - }, - { - "epoch": 0.3230926471472374, - "flos": 16167180217800.0, - "grad_norm": 2.773913791216276, - "learning_rate": 3.164704744352251e-06, - "loss": 0.9281, - "num_input_tokens_seen": 57135330, - "step": 2687 - }, - { - "epoch": 0.3232128900378765, - "flos": 11994670188840.0, - "grad_norm": 2.1909007205033393, - "learning_rate": 3.164071401559713e-06, - "loss": 1.0403, - "num_input_tokens_seen": 57152450, - "step": 2688 - }, - { - "epoch": 0.3233331329285156, - "flos": 17083817767440.0, - "grad_norm": 2.8468877228872365, - "learning_rate": 3.1634378821784674e-06, - "loss": 0.9455, - "num_input_tokens_seen": 57172385, - "step": 2689 - }, - { - "epoch": 0.3234533758191547, - "flos": 12836599534080.0, - "grad_norm": 3.8744932273995576, - "learning_rate": 3.1628041863046208e-06, - "loss": 0.9665, - "num_input_tokens_seen": 57189520, - "step": 2690 - }, - { - "epoch": 0.3235736187097938, - "flos": 11997368406120.0, - "grad_norm": 3.4549690949621468, - "learning_rate": 3.162170314034304e-06, - "loss": 1.1401, - "num_input_tokens_seen": 57206655, - "step": 2691 - }, - { - "epoch": 0.3236938616004329, - "flos": 15720868217760.0, - "grad_norm": 3.266991142454771, - "learning_rate": 3.1615362654636738e-06, - "loss": 1.0378, - "num_input_tokens_seen": 57227115, - "step": 2692 - }, - { - "epoch": 0.32381410449107195, - "flos": 12154327558680.0, - "grad_norm": 3.1066396365265465, - "learning_rate": 3.1609020406889163e-06, - "loss": 1.1078, - "num_input_tokens_seen": 57244270, - "step": 2693 - }, - { - "epoch": 0.32393434738171106, - "flos": 11732458370040.0, - "grad_norm": 2.5270779925418614, - "learning_rate": 3.1602676398062416e-06, - "loss": 1.0829, - "num_input_tokens_seen": 57262900, - "step": 2694 - }, - { - "epoch": 0.3240545902723502, - "flos": 18133063642920.0, - "grad_norm": 2.4909923402231273, - "learning_rate": 3.1596330629118886e-06, - "loss": 0.8486, - "num_input_tokens_seen": 57282590, - "step": 2695 - }, - { - "epoch": 0.32417483316298923, - "flos": 25580247235560.0, - "grad_norm": 3.3760926632480635, - "learning_rate": 3.1589983101021223e-06, - "loss": 0.9679, - "num_input_tokens_seen": 57300940, - "step": 2696 - }, - { - "epoch": 0.32429507605362834, - "flos": 21436717928880.0, - "grad_norm": 3.449510849262853, - "learning_rate": 3.1583633814732337e-06, - "loss": 1.0622, - "num_input_tokens_seen": 57320180, - "step": 2697 - }, - { - "epoch": 0.3244153189442674, - "flos": 12915232418160.0, - "grad_norm": 3.128542456271898, - "learning_rate": 3.157728277121541e-06, - "loss": 0.9358, - "num_input_tokens_seen": 57338075, - "step": 2698 - }, - { - "epoch": 0.3245355618349065, - "flos": 12546296178240.0, - "grad_norm": 2.848531203996474, - "learning_rate": 3.1570929971433897e-06, - "loss": 1.0143, - "num_input_tokens_seen": 57353580, - "step": 2699 - }, - { - "epoch": 0.3246558047255456, - "flos": 16664984073240.0, - "grad_norm": 4.789812387125139, - "learning_rate": 3.1564575416351504e-06, - "loss": 1.0564, - "num_input_tokens_seen": 57372000, - "step": 2700 - }, - { - "epoch": 0.32477604761618467, - "flos": 15457429936560.0, - "grad_norm": 3.143569401128582, - "learning_rate": 3.155821910693221e-06, - "loss": 0.9715, - "num_input_tokens_seen": 57391135, - "step": 2701 - }, - { - "epoch": 0.3248962905068238, - "flos": 14068719128400.0, - "grad_norm": 32.50013521606487, - "learning_rate": 3.1551861044140275e-06, - "loss": 1.083, - "num_input_tokens_seen": 57410490, - "step": 2702 - }, - { - "epoch": 0.3250165333974629, - "flos": 17029198432920.0, - "grad_norm": 2.4100603922324777, - "learning_rate": 3.15455012289402e-06, - "loss": 1.0, - "num_input_tokens_seen": 57429855, - "step": 2703 - }, - { - "epoch": 0.32513677628810195, - "flos": 17059374956040.0, - "grad_norm": 4.993114880790202, - "learning_rate": 3.153913966229677e-06, - "loss": 1.0711, - "num_input_tokens_seen": 57448695, - "step": 2704 - }, - { - "epoch": 0.32525701917874106, - "flos": 50602761283200.0, - "grad_norm": 0.6517389200437164, - "learning_rate": 3.1532776345175027e-06, - "loss": 0.7436, - "num_input_tokens_seen": 57513560, - "step": 2705 - }, - { - "epoch": 0.32537726206938017, - "flos": 13964049031800.0, - "grad_norm": 2.635362335569815, - "learning_rate": 3.1526411278540285e-06, - "loss": 1.0103, - "num_input_tokens_seen": 57531710, - "step": 2706 - }, - { - "epoch": 0.3254975049600192, - "flos": 20487604239120.0, - "grad_norm": 2.5063159315612245, - "learning_rate": 3.1520044463358116e-06, - "loss": 1.044, - "num_input_tokens_seen": 57548160, - "step": 2707 - }, - { - "epoch": 0.32561774785065833, - "flos": 13384975398120.0, - "grad_norm": 7.255119883666099, - "learning_rate": 3.151367590059436e-06, - "loss": 1.0338, - "num_input_tokens_seen": 57566305, - "step": 2708 - }, - { - "epoch": 0.32573799074129745, - "flos": 16428717482280.0, - "grad_norm": 2.3286949330574753, - "learning_rate": 3.1507305591215117e-06, - "loss": 1.0935, - "num_input_tokens_seen": 57583935, - "step": 2709 - }, - { - "epoch": 0.3258582336319365, - "flos": 51020675130600.0, - "grad_norm": 0.7264116602364054, - "learning_rate": 3.150093353618677e-06, - "loss": 0.8139, - "num_input_tokens_seen": 57648385, - "step": 2710 - }, - { - "epoch": 0.3259784765225756, - "flos": 15957288116520.0, - "grad_norm": 9.362940241365365, - "learning_rate": 3.149455973647596e-06, - "loss": 1.1088, - "num_input_tokens_seen": 57666165, - "step": 2711 - }, - { - "epoch": 0.32609871941321467, - "flos": 14540485771320.0, - "grad_norm": 3.3665102390420847, - "learning_rate": 3.1488184193049563e-06, - "loss": 0.9937, - "num_input_tokens_seen": 57685420, - "step": 2712 - }, - { - "epoch": 0.3262189623038538, - "flos": 15928736656080.0, - "grad_norm": 2.2932844058443487, - "learning_rate": 3.1481806906874767e-06, - "loss": 0.9634, - "num_input_tokens_seen": 57706450, - "step": 2713 - }, - { - "epoch": 0.3263392051944929, - "flos": 14855875831320.0, - "grad_norm": 1.9916888042906193, - "learning_rate": 3.147542787891899e-06, - "loss": 1.1079, - "num_input_tokens_seen": 57725515, - "step": 2714 - }, - { - "epoch": 0.32645944808513194, - "flos": 17085044229840.0, - "grad_norm": 3.273624970159451, - "learning_rate": 3.1469047110149926e-06, - "loss": 0.9756, - "num_input_tokens_seen": 57743975, - "step": 2715 - }, - { - "epoch": 0.32657969097577105, - "flos": 14934079453560.0, - "grad_norm": 2.597713521455124, - "learning_rate": 3.146266460153554e-06, - "loss": 1.0832, - "num_input_tokens_seen": 57763405, - "step": 2716 - }, - { - "epoch": 0.32669993386641016, - "flos": 16139855219760.0, - "grad_norm": 3.320179081648505, - "learning_rate": 3.145628035404404e-06, - "loss": 1.0379, - "num_input_tokens_seen": 57782325, - "step": 2717 - }, - { - "epoch": 0.3268201767570492, - "flos": 51643513906560.0, - "grad_norm": 0.8508159970140394, - "learning_rate": 3.1449894368643922e-06, - "loss": 0.8317, - "num_input_tokens_seen": 57844360, - "step": 2718 - }, - { - "epoch": 0.32694041964768833, - "flos": 17451466221840.0, - "grad_norm": 2.022809376672213, - "learning_rate": 3.1443506646303934e-06, - "loss": 0.9519, - "num_input_tokens_seen": 57865380, - "step": 2719 - }, - { - "epoch": 0.32706066253832744, - "flos": 23666591543280.0, - "grad_norm": 3.1270369947957586, - "learning_rate": 3.1437117187993086e-06, - "loss": 0.9013, - "num_input_tokens_seen": 57887420, - "step": 2720 - }, - { - "epoch": 0.3271809054289665, - "flos": 17111234750160.0, - "grad_norm": 5.528816851790232, - "learning_rate": 3.143072599468065e-06, - "loss": 1.0356, - "num_input_tokens_seen": 57906965, - "step": 2721 - }, - { - "epoch": 0.3273011483196056, - "flos": 27310753254960.0, - "grad_norm": 2.1952991417312107, - "learning_rate": 3.1424333067336174e-06, - "loss": 0.9901, - "num_input_tokens_seen": 57929450, - "step": 2722 - }, - { - "epoch": 0.3274213912102447, - "flos": 20698661479680.0, - "grad_norm": 2.4304621893494707, - "learning_rate": 3.141793840692945e-06, - "loss": 1.0053, - "num_input_tokens_seen": 57949920, - "step": 2723 - }, - { - "epoch": 0.32754163410088377, - "flos": 20756132339280.0, - "grad_norm": 4.010247693486071, - "learning_rate": 3.1411542014430553e-06, - "loss": 0.846, - "num_input_tokens_seen": 57970720, - "step": 2724 - }, - { - "epoch": 0.3276618769915229, - "flos": 14645646452880.0, - "grad_norm": 7.016494119414852, - "learning_rate": 3.1405143890809804e-06, - "loss": 1.0587, - "num_input_tokens_seen": 57989735, - "step": 2725 - }, - { - "epoch": 0.327782119882162, - "flos": 13226759121600.0, - "grad_norm": 2.4891841573731797, - "learning_rate": 3.1398744037037796e-06, - "loss": 0.9445, - "num_input_tokens_seen": 58008790, - "step": 2726 - }, - { - "epoch": 0.32790236277280105, - "flos": 15482332671360.0, - "grad_norm": 3.2006828116060255, - "learning_rate": 3.139234245408538e-06, - "loss": 1.0726, - "num_input_tokens_seen": 58027390, - "step": 2727 - }, - { - "epoch": 0.32802260566344016, - "flos": 16586381850720.0, - "grad_norm": 1.9545427593551346, - "learning_rate": 3.1385939142923666e-06, - "loss": 0.9983, - "num_input_tokens_seen": 58049500, - "step": 2728 - }, - { - "epoch": 0.3281428485540792, - "flos": 17215628892720.0, - "grad_norm": 3.414163478868142, - "learning_rate": 3.137953410452405e-06, - "loss": 1.0111, - "num_input_tokens_seen": 58069490, - "step": 2729 - }, - { - "epoch": 0.3282630914447183, - "flos": 24346073316720.0, - "grad_norm": 4.303956345290687, - "learning_rate": 3.1373127339858146e-06, - "loss": 0.9797, - "num_input_tokens_seen": 58091810, - "step": 2730 - }, - { - "epoch": 0.32838333433535744, - "flos": 19654720054080.0, - "grad_norm": 4.373016997929984, - "learning_rate": 3.136671884989787e-06, - "loss": 0.9672, - "num_input_tokens_seen": 58111440, - "step": 2731 - }, - { - "epoch": 0.3285035772259965, - "flos": 8693928751080.0, - "grad_norm": 3.4801392404608764, - "learning_rate": 3.1360308635615383e-06, - "loss": 1.0927, - "num_input_tokens_seen": 58129700, - "step": 2732 - }, - { - "epoch": 0.3286238201166356, - "flos": 17293985822760.0, - "grad_norm": 2.31601427438188, - "learning_rate": 3.135389669798311e-06, - "loss": 1.0174, - "num_input_tokens_seen": 58147480, - "step": 2733 - }, - { - "epoch": 0.3287440630072747, - "flos": 15192489238920.0, - "grad_norm": 3.6073855246967304, - "learning_rate": 3.134748303797373e-06, - "loss": 1.0385, - "num_input_tokens_seen": 58164570, - "step": 2734 - }, - { - "epoch": 0.32886430589791377, - "flos": 16874630882040.0, - "grad_norm": 3.105099906849787, - "learning_rate": 3.1341067656560203e-06, - "loss": 1.042, - "num_input_tokens_seen": 58182135, - "step": 2735 - }, - { - "epoch": 0.3289845487885529, - "flos": 15930361718760.0, - "grad_norm": 3.4182580792720714, - "learning_rate": 3.133465055471572e-06, - "loss": 1.0852, - "num_input_tokens_seen": 58201640, - "step": 2736 - }, - { - "epoch": 0.329104791679192, - "flos": 13964018370240.0, - "grad_norm": 2.8525191701099444, - "learning_rate": 3.1328231733413767e-06, - "loss": 0.8877, - "num_input_tokens_seen": 58218000, - "step": 2737 - }, - { - "epoch": 0.32922503456983104, - "flos": 11315372384760.0, - "grad_norm": 3.3895167635941905, - "learning_rate": 3.1321811193628067e-06, - "loss": 1.1322, - "num_input_tokens_seen": 58235865, - "step": 2738 - }, - { - "epoch": 0.32934527746047015, - "flos": 19103676634320.0, - "grad_norm": 7.421708211530524, - "learning_rate": 3.131538893633261e-06, - "loss": 0.9286, - "num_input_tokens_seen": 58255145, - "step": 2739 - }, - { - "epoch": 0.32946552035110926, - "flos": 16638302967960.0, - "grad_norm": 3.1131192001230543, - "learning_rate": 3.130896496250165e-06, - "loss": 1.0085, - "num_input_tokens_seen": 58274690, - "step": 2740 - }, - { - "epoch": 0.3295857632417483, - "flos": 10003884028920.0, - "grad_norm": 6.227717848818473, - "learning_rate": 3.1302539273109693e-06, - "loss": 1.0953, - "num_input_tokens_seen": 58291235, - "step": 2741 - }, - { - "epoch": 0.32970600613238743, - "flos": 15770397733320.0, - "grad_norm": 3.8779907990258438, - "learning_rate": 3.1296111869131513e-06, - "loss": 1.0508, - "num_input_tokens_seen": 58308380, - "step": 2742 - }, - { - "epoch": 0.32982624902302654, - "flos": 15668088576840.0, - "grad_norm": 3.040923267253184, - "learning_rate": 3.1289682751542153e-06, - "loss": 1.0792, - "num_input_tokens_seen": 58327660, - "step": 2743 - }, - { - "epoch": 0.3299464919136656, - "flos": 13230346524120.0, - "grad_norm": 2.3636262499986365, - "learning_rate": 3.1283251921316883e-06, - "loss": 0.9519, - "num_input_tokens_seen": 58345125, - "step": 2744 - }, - { - "epoch": 0.3300667348043047, - "flos": 9454159056240.0, - "grad_norm": 2.7543352807429087, - "learning_rate": 3.1276819379431277e-06, - "loss": 1.0532, - "num_input_tokens_seen": 58362935, - "step": 2745 - }, - { - "epoch": 0.33018697769494376, - "flos": 11132437342800.0, - "grad_norm": 5.78296565811855, - "learning_rate": 3.1270385126861134e-06, - "loss": 0.9847, - "num_input_tokens_seen": 58380640, - "step": 2746 - }, - { - "epoch": 0.3303072205855829, - "flos": 12940472430120.0, - "grad_norm": 4.189887206168567, - "learning_rate": 3.1263949164582533e-06, - "loss": 1.0493, - "num_input_tokens_seen": 58400010, - "step": 2747 - }, - { - "epoch": 0.330427463476222, - "flos": 12574755654000.0, - "grad_norm": 3.391739341338382, - "learning_rate": 3.1257511493571797e-06, - "loss": 1.0045, - "num_input_tokens_seen": 58418235, - "step": 2748 - }, - { - "epoch": 0.33054770636686104, - "flos": 19339789917480.0, - "grad_norm": 9.11663937003764, - "learning_rate": 3.125107211480552e-06, - "loss": 1.0192, - "num_input_tokens_seen": 58437890, - "step": 2749 - }, - { - "epoch": 0.33066794925750015, - "flos": 14275913012400.0, - "grad_norm": 2.6604785247836453, - "learning_rate": 3.124463102926054e-06, - "loss": 1.0217, - "num_input_tokens_seen": 58456945, - "step": 2750 - }, - { - "epoch": 0.33078819214813926, - "flos": 50591508490680.0, - "grad_norm": 0.9619137216944986, - "learning_rate": 3.1238188237913984e-06, - "loss": 0.8571, - "num_input_tokens_seen": 58521205, - "step": 2751 - }, - { - "epoch": 0.3309084350387783, - "flos": 15012804322320.0, - "grad_norm": 3.906988821731796, - "learning_rate": 3.1231743741743202e-06, - "loss": 0.9886, - "num_input_tokens_seen": 58540430, - "step": 2752 - }, - { - "epoch": 0.3310286779294174, - "flos": 10030442487960.0, - "grad_norm": 3.086730391611494, - "learning_rate": 3.122529754172582e-06, - "loss": 1.0563, - "num_input_tokens_seen": 58557035, - "step": 2753 - }, - { - "epoch": 0.33114892082005654, - "flos": 14751481688760.0, - "grad_norm": 3.915463377713963, - "learning_rate": 3.1218849638839736e-06, - "loss": 0.9591, - "num_input_tokens_seen": 58576015, - "step": 2754 - }, - { - "epoch": 0.3312691637106956, - "flos": 12101118655920.0, - "grad_norm": 4.110963927604145, - "learning_rate": 3.121240003406307e-06, - "loss": 1.0125, - "num_input_tokens_seen": 58594585, - "step": 2755 - }, - { - "epoch": 0.3313894066013347, - "flos": 20987523742200.0, - "grad_norm": 3.1482045582057747, - "learning_rate": 3.120594872837425e-06, - "loss": 0.9459, - "num_input_tokens_seen": 58612975, - "step": 2756 - }, - { - "epoch": 0.3315096494919738, - "flos": 43963865756400.0, - "grad_norm": 0.8559921962480362, - "learning_rate": 3.1199495722751906e-06, - "loss": 0.8799, - "num_input_tokens_seen": 58672225, - "step": 2757 - }, - { - "epoch": 0.33162989238261287, - "flos": 15380176822680.0, - "grad_norm": 4.10757906001247, - "learning_rate": 3.1193041018174972e-06, - "loss": 1.0792, - "num_input_tokens_seen": 58692660, - "step": 2758 - }, - { - "epoch": 0.331750135273252, - "flos": 16114492561560.0, - "grad_norm": 3.111816980878752, - "learning_rate": 3.118658461562261e-06, - "loss": 1.174, - "num_input_tokens_seen": 58708480, - "step": 2759 - }, - { - "epoch": 0.33187037816389103, - "flos": 16166199047880.0, - "grad_norm": 1.9936280670668622, - "learning_rate": 3.118012651607426e-06, - "loss": 1.0845, - "num_input_tokens_seen": 58729805, - "step": 2760 - }, - { - "epoch": 0.33199062105453014, - "flos": 13619279649240.0, - "grad_norm": 3.7914154612289583, - "learning_rate": 3.1173666720509603e-06, - "loss": 1.0627, - "num_input_tokens_seen": 58746460, - "step": 2761 - }, - { - "epoch": 0.33211086394516925, - "flos": 22512307632480.0, - "grad_norm": 3.3956542796728506, - "learning_rate": 3.116720522990859e-06, - "loss": 0.9146, - "num_input_tokens_seen": 58767055, - "step": 2762 - }, - { - "epoch": 0.3322311068358083, - "flos": 12706413471480.0, - "grad_norm": 2.9278560356122356, - "learning_rate": 3.116074204525142e-06, - "loss": 0.8454, - "num_input_tokens_seen": 58784950, - "step": 2763 - }, - { - "epoch": 0.3323513497264474, - "flos": 23010172811040.0, - "grad_norm": 2.285503863117539, - "learning_rate": 3.1154277167518553e-06, - "loss": 1.0671, - "num_input_tokens_seen": 58806285, - "step": 2764 - }, - { - "epoch": 0.33247159261708653, - "flos": 37675810601040.0, - "grad_norm": 0.8727132343207135, - "learning_rate": 3.114781059769072e-06, - "loss": 0.8562, - "num_input_tokens_seen": 58857330, - "step": 2765 - }, - { - "epoch": 0.3325918355077256, - "flos": 19312832858160.0, - "grad_norm": 3.7523780911724938, - "learning_rate": 3.1141342336748874e-06, - "loss": 0.9182, - "num_input_tokens_seen": 58876610, - "step": 2766 - }, - { - "epoch": 0.3327120783983647, - "flos": 16826327828880.0, - "grad_norm": 1.8004877012553644, - "learning_rate": 3.1134872385674253e-06, - "loss": 1.0425, - "num_input_tokens_seen": 58900485, - "step": 2767 - }, - { - "epoch": 0.3328323212890038, - "flos": 13596553885200.0, - "grad_norm": 3.320233081541005, - "learning_rate": 3.1128400745448353e-06, - "loss": 1.1004, - "num_input_tokens_seen": 58919585, - "step": 2768 - }, - { - "epoch": 0.33295256417964286, - "flos": 26917312880520.0, - "grad_norm": 3.6180467676669, - "learning_rate": 3.11219274170529e-06, - "loss": 0.8632, - "num_input_tokens_seen": 58941115, - "step": 2769 - }, - { - "epoch": 0.333072807070282, - "flos": 18867931289880.0, - "grad_norm": 2.485316547979039, - "learning_rate": 3.1115452401469903e-06, - "loss": 1.0495, - "num_input_tokens_seen": 58961235, - "step": 2770 - }, - { - "epoch": 0.3331930499609211, - "flos": 15220090191000.0, - "grad_norm": 2.125488225164397, - "learning_rate": 3.1108975699681613e-06, - "loss": 1.0965, - "num_input_tokens_seen": 58978350, - "step": 2771 - }, - { - "epoch": 0.33331329285156014, - "flos": 14665367384040.0, - "grad_norm": 2.7293423797496015, - "learning_rate": 3.1102497312670542e-06, - "loss": 0.9507, - "num_input_tokens_seen": 58996075, - "step": 2772 - }, - { - "epoch": 0.33343353574219925, - "flos": 19942754454480.0, - "grad_norm": 2.8352688928608196, - "learning_rate": 3.109601724141946e-06, - "loss": 1.0258, - "num_input_tokens_seen": 59014790, - "step": 2773 - }, - { - "epoch": 0.33355377863283836, - "flos": 16897387307640.0, - "grad_norm": 2.7976413294978335, - "learning_rate": 3.108953548691138e-06, - "loss": 0.9213, - "num_input_tokens_seen": 59034595, - "step": 2774 - }, - { - "epoch": 0.3336740215234774, - "flos": 26970061859880.0, - "grad_norm": 4.449431590950011, - "learning_rate": 3.108305205012959e-06, - "loss": 0.9475, - "num_input_tokens_seen": 59055010, - "step": 2775 - }, - { - "epoch": 0.3337942644141165, - "flos": 18158395639560.0, - "grad_norm": 2.931503994151229, - "learning_rate": 3.107656693205761e-06, - "loss": 1.1156, - "num_input_tokens_seen": 59074170, - "step": 2776 - }, - { - "epoch": 0.3339145073047556, - "flos": 18496266171120.0, - "grad_norm": 5.116961431969626, - "learning_rate": 3.107008013367924e-06, - "loss": 0.9202, - "num_input_tokens_seen": 59092685, - "step": 2777 - }, - { - "epoch": 0.3340347501953947, - "flos": 13518442247640.0, - "grad_norm": 4.125843539830507, - "learning_rate": 3.1063591655978507e-06, - "loss": 1.0889, - "num_input_tokens_seen": 59108355, - "step": 2778 - }, - { - "epoch": 0.3341549930860338, - "flos": 12833073454680.0, - "grad_norm": 3.30373027817277, - "learning_rate": 3.105710149993972e-06, - "loss": 1.0273, - "num_input_tokens_seen": 59127405, - "step": 2779 - }, - { - "epoch": 0.33427523597667286, - "flos": 16114645869360.0, - "grad_norm": 4.940562204037894, - "learning_rate": 3.1050609666547427e-06, - "loss": 1.0888, - "num_input_tokens_seen": 59146685, - "step": 2780 - }, - { - "epoch": 0.33439547886731197, - "flos": 16088302041240.0, - "grad_norm": 2.9033009322847954, - "learning_rate": 3.104411615678644e-06, - "loss": 1.0084, - "num_input_tokens_seen": 59165255, - "step": 2781 - }, - { - "epoch": 0.3345157217579511, - "flos": 17135984177160.0, - "grad_norm": 6.993083261407414, - "learning_rate": 3.1037620971641803e-06, - "loss": 0.9604, - "num_input_tokens_seen": 59184765, - "step": 2782 - }, - { - "epoch": 0.33463596464859013, - "flos": 13465907899200.0, - "grad_norm": 52.403794851553926, - "learning_rate": 3.1031124112098844e-06, - "loss": 0.8753, - "num_input_tokens_seen": 59202695, - "step": 2783 - }, - { - "epoch": 0.33475620753922924, - "flos": 14462036856600.0, - "grad_norm": 2.960719601305762, - "learning_rate": 3.1024625579143127e-06, - "loss": 0.9522, - "num_input_tokens_seen": 59219935, - "step": 2784 - }, - { - "epoch": 0.33487645042986836, - "flos": 12885423833760.0, - "grad_norm": 3.998529879302981, - "learning_rate": 3.101812537376048e-06, - "loss": 0.9608, - "num_input_tokens_seen": 59238675, - "step": 2785 - }, - { - "epoch": 0.3349966933205074, - "flos": 18395582077320.0, - "grad_norm": 3.1055036051011737, - "learning_rate": 3.1011623496936973e-06, - "loss": 1.067, - "num_input_tokens_seen": 59256690, - "step": 2786 - }, - { - "epoch": 0.3351169362111465, - "flos": 20021755277280.0, - "grad_norm": 4.237784614119616, - "learning_rate": 3.100511994965893e-06, - "loss": 0.9289, - "num_input_tokens_seen": 59276365, - "step": 2787 - }, - { - "epoch": 0.33523717910178563, - "flos": 16113235437600.0, - "grad_norm": 2.4545965943965506, - "learning_rate": 3.0998614732912947e-06, - "loss": 1.0885, - "num_input_tokens_seen": 59295460, - "step": 2788 - }, - { - "epoch": 0.3353574219924247, - "flos": 11080178948400.0, - "grad_norm": 2.7483479132264588, - "learning_rate": 3.0992107847685855e-06, - "loss": 0.9052, - "num_input_tokens_seen": 59312895, - "step": 2789 - }, - { - "epoch": 0.3354776648830638, - "flos": 17635075818120.0, - "grad_norm": 5.450995174155988, - "learning_rate": 3.0985599294964736e-06, - "loss": 1.0291, - "num_input_tokens_seen": 59332170, - "step": 2790 - }, - { - "epoch": 0.33559790777370285, - "flos": 20440987571760.0, - "grad_norm": 3.292096331275033, - "learning_rate": 3.097908907573695e-06, - "loss": 0.9378, - "num_input_tokens_seen": 59349870, - "step": 2791 - }, - { - "epoch": 0.33571815066434196, - "flos": 15798826547520.0, - "grad_norm": 3.894976040493454, - "learning_rate": 3.0972577190990067e-06, - "loss": 1.1243, - "num_input_tokens_seen": 59368070, - "step": 2792 - }, - { - "epoch": 0.3358383935549811, - "flos": 16953539720160.0, - "grad_norm": 3.024537825153809, - "learning_rate": 3.096606364171196e-06, - "loss": 1.0333, - "num_input_tokens_seen": 59387580, - "step": 2793 - }, - { - "epoch": 0.33595863644562013, - "flos": 15820816434120.0, - "grad_norm": 16.01862776728628, - "learning_rate": 3.0959548428890703e-06, - "loss": 1.0732, - "num_input_tokens_seen": 59406170, - "step": 2794 - }, - { - "epoch": 0.33607887933625924, - "flos": 14278120644720.0, - "grad_norm": 2.719488372508555, - "learning_rate": 3.095303155351468e-06, - "loss": 1.0624, - "num_input_tokens_seen": 59426095, - "step": 2795 - }, - { - "epoch": 0.33619912222689835, - "flos": 13702113167040.0, - "grad_norm": 3.5904383784335603, - "learning_rate": 3.0946513016572464e-06, - "loss": 1.023, - "num_input_tokens_seen": 59444720, - "step": 2796 - }, - { - "epoch": 0.3363193651175374, - "flos": 11892882278880.0, - "grad_norm": 4.282624629623955, - "learning_rate": 3.0939992819052938e-06, - "loss": 0.9875, - "num_input_tokens_seen": 59461950, - "step": 2797 - }, - { - "epoch": 0.3364396080081765, - "flos": 16743678280440.0, - "grad_norm": 2.8159746535092505, - "learning_rate": 3.0933470961945193e-06, - "loss": 1.0373, - "num_input_tokens_seen": 59479965, - "step": 2798 - }, - { - "epoch": 0.3365598508988156, - "flos": 19968822328560.0, - "grad_norm": 29.95097719868669, - "learning_rate": 3.0926947446238597e-06, - "loss": 0.9274, - "num_input_tokens_seen": 59499255, - "step": 2799 - }, - { - "epoch": 0.3366800937894547, - "flos": 12023712234240.0, - "grad_norm": 3.4994156213369303, - "learning_rate": 3.092042227292276e-06, - "loss": 1.0568, - "num_input_tokens_seen": 59515810, - "step": 2800 - }, - { - "epoch": 0.3368003366800938, - "flos": 17006043407040.0, - "grad_norm": 2.7944977956405848, - "learning_rate": 3.0913895442987557e-06, - "loss": 1.1202, - "num_input_tokens_seen": 59536495, - "step": 2801 - }, - { - "epoch": 0.3369205795707329, - "flos": 17635566403080.0, - "grad_norm": 2.161195473984253, - "learning_rate": 3.090736695742308e-06, - "loss": 1.0861, - "num_input_tokens_seen": 59557345, - "step": 2802 - }, - { - "epoch": 0.33704082246137196, - "flos": 12705217670640.0, - "grad_norm": 3.755392393697315, - "learning_rate": 3.0900836817219713e-06, - "loss": 0.734, - "num_input_tokens_seen": 59573495, - "step": 2803 - }, - { - "epoch": 0.33716106535201107, - "flos": 15116523910560.0, - "grad_norm": 3.519015292369399, - "learning_rate": 3.089430502336807e-06, - "loss": 1.0728, - "num_input_tokens_seen": 59593185, - "step": 2804 - }, - { - "epoch": 0.3372813082426502, - "flos": 13044008049000.0, - "grad_norm": 18.194597989405914, - "learning_rate": 3.088777157685902e-06, - "loss": 1.1218, - "num_input_tokens_seen": 59608495, - "step": 2805 - }, - { - "epoch": 0.33740155113328923, - "flos": 12180794033040.0, - "grad_norm": 3.072720813436629, - "learning_rate": 3.088123647868367e-06, - "loss": 1.0923, - "num_input_tokens_seen": 59624765, - "step": 2806 - }, - { - "epoch": 0.33752179402392835, - "flos": 20702555497800.0, - "grad_norm": 2.9227318144948655, - "learning_rate": 3.0874699729833405e-06, - "loss": 1.0446, - "num_input_tokens_seen": 59645855, - "step": 2807 - }, - { - "epoch": 0.3376420369145674, - "flos": 17843312195160.0, - "grad_norm": 4.7648651017490735, - "learning_rate": 3.086816133129983e-06, - "loss": 1.0278, - "num_input_tokens_seen": 59665835, - "step": 2808 - }, - { - "epoch": 0.3377622798052065, - "flos": 19575504600360.0, - "grad_norm": 3.113211866728031, - "learning_rate": 3.0861621284074826e-06, - "loss": 0.9963, - "num_input_tokens_seen": 59686080, - "step": 2809 - }, - { - "epoch": 0.3378825226958456, - "flos": 15610863009720.0, - "grad_norm": 2.0728228626081333, - "learning_rate": 3.085507958915051e-06, - "loss": 0.9589, - "num_input_tokens_seen": 59704230, - "step": 2810 - }, - { - "epoch": 0.3380027655864847, - "flos": 30379183443000.0, - "grad_norm": 3.0634564832881175, - "learning_rate": 3.084853624751925e-06, - "loss": 0.9425, - "num_input_tokens_seen": 59725535, - "step": 2811 - }, - { - "epoch": 0.3381230084771238, - "flos": 19025411688960.0, - "grad_norm": 3.3052696635105203, - "learning_rate": 3.0841991260173668e-06, - "loss": 1.0772, - "num_input_tokens_seen": 59745160, - "step": 2812 - }, - { - "epoch": 0.3382432513677629, - "flos": 16139947204440.0, - "grad_norm": 3.097270465317349, - "learning_rate": 3.0835444628106634e-06, - "loss": 1.0334, - "num_input_tokens_seen": 59763860, - "step": 2813 - }, - { - "epoch": 0.33836349425840195, - "flos": 15716820891840.0, - "grad_norm": 2.490601516287451, - "learning_rate": 3.082889635231126e-06, - "loss": 1.074, - "num_input_tokens_seen": 59782240, - "step": 2814 - }, - { - "epoch": 0.33848373714904106, - "flos": 19444306706280.0, - "grad_norm": 8.280816984551743, - "learning_rate": 3.0822346433780925e-06, - "loss": 0.9727, - "num_input_tokens_seen": 59802685, - "step": 2815 - }, - { - "epoch": 0.3386039800396802, - "flos": 18395490092640.0, - "grad_norm": 2.6240676859115815, - "learning_rate": 3.0815794873509237e-06, - "loss": 1.0775, - "num_input_tokens_seen": 59820690, - "step": 2816 - }, - { - "epoch": 0.33872422293031923, - "flos": 13387765600080.0, - "grad_norm": 3.572452229526796, - "learning_rate": 3.0809241672490066e-06, - "loss": 0.9555, - "num_input_tokens_seen": 59838580, - "step": 2817 - }, - { - "epoch": 0.33884446582095834, - "flos": 16453558893960.0, - "grad_norm": 3.68758793095638, - "learning_rate": 3.080268683171753e-06, - "loss": 1.0792, - "num_input_tokens_seen": 59858590, - "step": 2818 - }, - { - "epoch": 0.33896470871159745, - "flos": 11315464369440.0, - "grad_norm": 3.9016266584945556, - "learning_rate": 3.0796130352185985e-06, - "loss": 1.1123, - "num_input_tokens_seen": 59875165, - "step": 2819 - }, - { - "epoch": 0.3390849516022365, - "flos": 24609940859760.0, - "grad_norm": 3.206900701990437, - "learning_rate": 3.0789572234890057e-06, - "loss": 0.911, - "num_input_tokens_seen": 59896525, - "step": 2820 - }, - { - "epoch": 0.3392051944928756, - "flos": 11447030202240.0, - "grad_norm": 2.831270439293117, - "learning_rate": 3.0783012480824596e-06, - "loss": 1.005, - "num_input_tokens_seen": 59915390, - "step": 2821 - }, - { - "epoch": 0.33932543738351467, - "flos": 12097623238080.0, - "grad_norm": 3.098716679401991, - "learning_rate": 3.077645109098471e-06, - "loss": 0.9761, - "num_input_tokens_seen": 59931380, - "step": 2822 - }, - { - "epoch": 0.3394456802741538, - "flos": 15720714909960.0, - "grad_norm": 2.73410364166918, - "learning_rate": 3.076988806636577e-06, - "loss": 0.9479, - "num_input_tokens_seen": 59948860, - "step": 2823 - }, - { - "epoch": 0.3395659231647929, - "flos": 17948380892040.0, - "grad_norm": 6.9409525760052215, - "learning_rate": 3.0763323407963377e-06, - "loss": 1.1085, - "num_input_tokens_seen": 59968190, - "step": 2824 - }, - { - "epoch": 0.33968616605543195, - "flos": 20729665864920.0, - "grad_norm": 4.829632866216285, - "learning_rate": 3.075675711677337e-06, - "loss": 1.0261, - "num_input_tokens_seen": 59988005, - "step": 2825 - }, - { - "epoch": 0.33980640894607106, - "flos": 15222696423600.0, - "grad_norm": 3.4357338269467945, - "learning_rate": 3.0750189193791865e-06, - "loss": 1.0126, - "num_input_tokens_seen": 60007310, - "step": 2826 - }, - { - "epoch": 0.33992665183671017, - "flos": 23168603718480.0, - "grad_norm": 2.6950368546777406, - "learning_rate": 3.0743619640015203e-06, - "loss": 0.9411, - "num_input_tokens_seen": 60027280, - "step": 2827 - }, - { - "epoch": 0.3400468947273492, - "flos": 12075878643960.0, - "grad_norm": 5.462660008930534, - "learning_rate": 3.073704845643999e-06, - "loss": 1.155, - "num_input_tokens_seen": 60044125, - "step": 2828 - }, - { - "epoch": 0.34016713761798834, - "flos": 11944772734560.0, - "grad_norm": 5.305776672460165, - "learning_rate": 3.0730475644063063e-06, - "loss": 1.0156, - "num_input_tokens_seen": 60058945, - "step": 2829 - }, - { - "epoch": 0.34028738050862745, - "flos": 15562621279680.0, - "grad_norm": 2.707309372831662, - "learning_rate": 3.072390120388151e-06, - "loss": 0.8945, - "num_input_tokens_seen": 60076990, - "step": 2830 - }, - { - "epoch": 0.3404076233992665, - "flos": 16165463170440.0, - "grad_norm": 4.3499794619722145, - "learning_rate": 3.071732513689267e-06, - "loss": 0.9288, - "num_input_tokens_seen": 60095245, - "step": 2831 - }, - { - "epoch": 0.3405278662899056, - "flos": 12073272411360.0, - "grad_norm": 4.682606902523898, - "learning_rate": 3.0710747444094134e-06, - "loss": 0.9045, - "num_input_tokens_seen": 60112995, - "step": 2832 - }, - { - "epoch": 0.3406481091805447, - "flos": 30587297173800.0, - "grad_norm": 2.8481871812472908, - "learning_rate": 3.070416812648372e-06, - "loss": 0.8838, - "num_input_tokens_seen": 60136165, - "step": 2833 - }, - { - "epoch": 0.3407683520711838, - "flos": 19051540886160.0, - "grad_norm": 3.0737775900503443, - "learning_rate": 3.069758718505951e-06, - "loss": 0.8718, - "num_input_tokens_seen": 60157625, - "step": 2834 - }, - { - "epoch": 0.3408885949618229, - "flos": 20493276627720.0, - "grad_norm": 4.428341292629093, - "learning_rate": 3.0691004620819836e-06, - "loss": 1.0261, - "num_input_tokens_seen": 60177475, - "step": 2835 - }, - { - "epoch": 0.341008837852462, - "flos": 45514656197640.0, - "grad_norm": 0.8128261278440719, - "learning_rate": 3.0684420434763254e-06, - "loss": 0.8652, - "num_input_tokens_seen": 60243380, - "step": 2836 - }, - { - "epoch": 0.34112908074310105, - "flos": 14775617884560.0, - "grad_norm": 4.75707921984251, - "learning_rate": 3.06778346278886e-06, - "loss": 0.991, - "num_input_tokens_seen": 60261935, - "step": 2837 - }, - { - "epoch": 0.34124932363374016, - "flos": 17769063914160.0, - "grad_norm": 19.870389967510388, - "learning_rate": 3.0671247201194906e-06, - "loss": 1.0182, - "num_input_tokens_seen": 60283790, - "step": 2838 - }, - { - "epoch": 0.3413695665243792, - "flos": 20231218116720.0, - "grad_norm": 4.059428065939114, - "learning_rate": 3.066465815568151e-06, - "loss": 0.9771, - "num_input_tokens_seen": 60304340, - "step": 2839 - }, - { - "epoch": 0.34148980941501833, - "flos": 18002662949400.0, - "grad_norm": 3.680337188448731, - "learning_rate": 3.0658067492347947e-06, - "loss": 0.9235, - "num_input_tokens_seen": 60326700, - "step": 2840 - }, - { - "epoch": 0.34161005230565744, - "flos": 12417060624000.0, - "grad_norm": 5.103418827416318, - "learning_rate": 3.065147521219402e-06, - "loss": 0.9046, - "num_input_tokens_seen": 60345675, - "step": 2841 - }, - { - "epoch": 0.3417302951962965, - "flos": 31188514001880.0, - "grad_norm": 2.7834891403202935, - "learning_rate": 3.064488131621977e-06, - "loss": 0.9846, - "num_input_tokens_seen": 60368720, - "step": 2842 - }, - { - "epoch": 0.3418505380869356, - "flos": 22012756068120.0, - "grad_norm": 3.4721882593825106, - "learning_rate": 3.063828580542549e-06, - "loss": 0.9646, - "num_input_tokens_seen": 60389635, - "step": 2843 - }, - { - "epoch": 0.3419707809775747, - "flos": 13806844586760.0, - "grad_norm": 3.026970428090466, - "learning_rate": 3.0631688680811706e-06, - "loss": 0.9436, - "num_input_tokens_seen": 60408980, - "step": 2844 - }, - { - "epoch": 0.3420910238682138, - "flos": 20464203920760.0, - "grad_norm": 4.456930896588792, - "learning_rate": 3.062508994337921e-06, - "loss": 0.9922, - "num_input_tokens_seen": 60428305, - "step": 2845 - }, - { - "epoch": 0.3422112667588529, - "flos": 15196291272360.0, - "grad_norm": 7.556358428757693, - "learning_rate": 3.0618489594129013e-06, - "loss": 1.0138, - "num_input_tokens_seen": 60446165, - "step": 2846 - }, - { - "epoch": 0.342331509649492, - "flos": 9794850451320.0, - "grad_norm": 5.488694796474628, - "learning_rate": 3.061188763406239e-06, - "loss": 0.9323, - "num_input_tokens_seen": 60462030, - "step": 2847 - }, - { - "epoch": 0.34245175254013105, - "flos": 20388177269280.0, - "grad_norm": 7.444328443214941, - "learning_rate": 3.060528406418085e-06, - "loss": 1.0546, - "num_input_tokens_seen": 60481600, - "step": 2848 - }, - { - "epoch": 0.34257199543077016, - "flos": 24344448254040.0, - "grad_norm": 2.752488830710053, - "learning_rate": 3.0598678885486145e-06, - "loss": 0.8445, - "num_input_tokens_seen": 60503860, - "step": 2849 - }, - { - "epoch": 0.34269223832140927, - "flos": 14173389225000.0, - "grad_norm": 3.9525610480036613, - "learning_rate": 3.0592072098980282e-06, - "loss": 0.9708, - "num_input_tokens_seen": 60523240, - "step": 2850 - }, - { - "epoch": 0.3428124812120483, - "flos": 19392324265920.0, - "grad_norm": 5.58038028036498, - "learning_rate": 3.0585463705665514e-06, - "loss": 0.9512, - "num_input_tokens_seen": 60543335, - "step": 2851 - }, - { - "epoch": 0.34293272410268744, - "flos": 17477012849400.0, - "grad_norm": 4.529190963684625, - "learning_rate": 3.0578853706544304e-06, - "loss": 0.932, - "num_input_tokens_seen": 60560445, - "step": 2852 - }, - { - "epoch": 0.34305296699332655, - "flos": 15275077464240.0, - "grad_norm": 2.857716397212462, - "learning_rate": 3.0572242102619404e-06, - "loss": 0.8826, - "num_input_tokens_seen": 60577320, - "step": 2853 - }, - { - "epoch": 0.3431732098839656, - "flos": 17107003454880.0, - "grad_norm": 5.995498876232264, - "learning_rate": 3.0565628894893784e-06, - "loss": 1.0442, - "num_input_tokens_seen": 60597675, - "step": 2854 - }, - { - "epoch": 0.3432934527746047, - "flos": 11893066248240.0, - "grad_norm": 3.5980123701916655, - "learning_rate": 3.0559014084370655e-06, - "loss": 0.9766, - "num_input_tokens_seen": 60615920, - "step": 2855 - }, - { - "epoch": 0.34341369566524377, - "flos": 16664064226440.0, - "grad_norm": 4.122035955680381, - "learning_rate": 3.055239767205349e-06, - "loss": 1.0138, - "num_input_tokens_seen": 60637390, - "step": 2856 - }, - { - "epoch": 0.3435339385558829, - "flos": 12227349377280.0, - "grad_norm": 3.1300464319557193, - "learning_rate": 3.054577965894599e-06, - "loss": 1.003, - "num_input_tokens_seen": 60653255, - "step": 2857 - }, - { - "epoch": 0.343654181446522, - "flos": 15772666688760.0, - "grad_norm": 4.7757918175671366, - "learning_rate": 3.0539160046052094e-06, - "loss": 0.948, - "num_input_tokens_seen": 60672675, - "step": 2858 - }, - { - "epoch": 0.34377442433716104, - "flos": 14121590754000.0, - "grad_norm": 7.380317452072057, - "learning_rate": 3.0532538834376003e-06, - "loss": 0.9267, - "num_input_tokens_seen": 60691955, - "step": 2859 - }, - { - "epoch": 0.34389466722780015, - "flos": 15771225595440.0, - "grad_norm": 4.094386160539356, - "learning_rate": 3.0525916024922143e-06, - "loss": 1.0045, - "num_input_tokens_seen": 60710860, - "step": 2860 - }, - { - "epoch": 0.34401491011843927, - "flos": 13203634757280.0, - "grad_norm": 5.915114436457398, - "learning_rate": 3.0519291618695193e-06, - "loss": 1.0552, - "num_input_tokens_seen": 60727980, - "step": 2861 - }, - { - "epoch": 0.3441351530090783, - "flos": 12652836630000.0, - "grad_norm": 5.350428284231743, - "learning_rate": 3.0512665616700065e-06, - "loss": 0.9747, - "num_input_tokens_seen": 60746765, - "step": 2862 - }, - { - "epoch": 0.34425539589971743, - "flos": 16429269390360.0, - "grad_norm": 7.609336398612306, - "learning_rate": 3.0506038019941933e-06, - "loss": 1.1227, - "num_input_tokens_seen": 60766495, - "step": 2863 - }, - { - "epoch": 0.34437563879035654, - "flos": 15563081203080.0, - "grad_norm": 4.305195025591062, - "learning_rate": 3.049940882942617e-06, - "loss": 0.8855, - "num_input_tokens_seen": 60785000, - "step": 2864 - }, - { - "epoch": 0.3444958816809956, - "flos": 16403600116560.0, - "grad_norm": 4.160599576583875, - "learning_rate": 3.0492778046158448e-06, - "loss": 1.0212, - "num_input_tokens_seen": 60806140, - "step": 2865 - }, - { - "epoch": 0.3446161245716347, - "flos": 15563878403640.0, - "grad_norm": 3.8753190678320206, - "learning_rate": 3.0486145671144633e-06, - "loss": 1.0009, - "num_input_tokens_seen": 60825650, - "step": 2866 - }, - { - "epoch": 0.3447363674622738, - "flos": 17865792666720.0, - "grad_norm": 4.006946492465484, - "learning_rate": 3.047951170539086e-06, - "loss": 0.9755, - "num_input_tokens_seen": 60844995, - "step": 2867 - }, - { - "epoch": 0.3448566103529129, - "flos": 8431747593840.0, - "grad_norm": 6.751818376674654, - "learning_rate": 3.047287614990349e-06, - "loss": 1.0675, - "num_input_tokens_seen": 60862635, - "step": 2868 - }, - { - "epoch": 0.344976853243552, - "flos": 28700813171760.0, - "grad_norm": 4.908320287785359, - "learning_rate": 3.046623900568914e-06, - "loss": 0.8219, - "num_input_tokens_seen": 60884920, - "step": 2869 - }, - { - "epoch": 0.34509709613419104, - "flos": 20460647179800.0, - "grad_norm": 9.869422979522687, - "learning_rate": 3.045960027375465e-06, - "loss": 0.9281, - "num_input_tokens_seen": 60902475, - "step": 2870 - }, - { - "epoch": 0.34521733902483015, - "flos": 21355110873480.0, - "grad_norm": 8.896926229079547, - "learning_rate": 3.045295995510711e-06, - "loss": 1.0465, - "num_input_tokens_seen": 60919165, - "step": 2871 - }, - { - "epoch": 0.34533758191546926, - "flos": 19885559548920.0, - "grad_norm": 4.554572128483594, - "learning_rate": 3.0446318050753865e-06, - "loss": 0.9647, - "num_input_tokens_seen": 60939365, - "step": 2872 - }, - { - "epoch": 0.3454578248061083, - "flos": 19313384766240.0, - "grad_norm": 3.0896147246426238, - "learning_rate": 3.0439674561702474e-06, - "loss": 1.0028, - "num_input_tokens_seen": 60958585, - "step": 2873 - }, - { - "epoch": 0.3455780676967474, - "flos": 13491209234280.0, - "grad_norm": 2.8633628510925053, - "learning_rate": 3.043302948896076e-06, - "loss": 1.1015, - "num_input_tokens_seen": 60976910, - "step": 2874 - }, - { - "epoch": 0.34569831058738654, - "flos": 24610155490680.0, - "grad_norm": 7.738601916705894, - "learning_rate": 3.0426382833536756e-06, - "loss": 0.8242, - "num_input_tokens_seen": 60999985, - "step": 2875 - }, - { - "epoch": 0.3458185534780256, - "flos": 22197224188080.0, - "grad_norm": 7.339143127817167, - "learning_rate": 3.041973459643877e-06, - "loss": 1.0161, - "num_input_tokens_seen": 61019160, - "step": 2876 - }, - { - "epoch": 0.3459387963686647, - "flos": 23141493351360.0, - "grad_norm": 3.5363880687956346, - "learning_rate": 3.0413084778675334e-06, - "loss": 0.9033, - "num_input_tokens_seen": 61040130, - "step": 2877 - }, - { - "epoch": 0.3460590392593038, - "flos": 17551690392240.0, - "grad_norm": 4.578329765245297, - "learning_rate": 3.0406433381255214e-06, - "loss": 1.0601, - "num_input_tokens_seen": 61057885, - "step": 2878 - }, - { - "epoch": 0.34617928214994287, - "flos": 13335016620720.0, - "grad_norm": 6.674085335802958, - "learning_rate": 3.0399780405187425e-06, - "loss": 1.0514, - "num_input_tokens_seen": 61076600, - "step": 2879 - }, - { - "epoch": 0.346299525040582, - "flos": 17682428362920.0, - "grad_norm": 2.840554590475199, - "learning_rate": 3.0393125851481216e-06, - "loss": 1.0086, - "num_input_tokens_seen": 61096195, - "step": 2880 - }, - { - "epoch": 0.3464197679312211, - "flos": 11629903921080.0, - "grad_norm": 3.988519285983901, - "learning_rate": 3.038646972114608e-06, - "loss": 1.0856, - "num_input_tokens_seen": 61112240, - "step": 2881 - }, - { - "epoch": 0.34654001082186014, - "flos": 15904355167800.0, - "grad_norm": 3.7763887793942463, - "learning_rate": 3.037981201519174e-06, - "loss": 0.9065, - "num_input_tokens_seen": 61132560, - "step": 2882 - }, - { - "epoch": 0.34666025371249926, - "flos": 13885998717360.0, - "grad_norm": 6.7279925980498865, - "learning_rate": 3.0373152734628175e-06, - "loss": 0.9266, - "num_input_tokens_seen": 61150560, - "step": 2883 - }, - { - "epoch": 0.34678049660313837, - "flos": 10791040731840.0, - "grad_norm": 5.669092594043931, - "learning_rate": 3.0366491880465584e-06, - "loss": 0.9764, - "num_input_tokens_seen": 61168300, - "step": 2884 - }, - { - "epoch": 0.3469007394937774, - "flos": 15041693059920.0, - "grad_norm": 2.3799059541537466, - "learning_rate": 3.035982945371443e-06, - "loss": 1.0515, - "num_input_tokens_seen": 61189715, - "step": 2885 - }, - { - "epoch": 0.34702098238441653, - "flos": 15898376163600.0, - "grad_norm": 4.355278469428177, - "learning_rate": 3.035316545538537e-06, - "loss": 1.0893, - "num_input_tokens_seen": 61208230, - "step": 2886 - }, - { - "epoch": 0.3471412252750556, - "flos": 16297396941960.0, - "grad_norm": 3.5571656025582277, - "learning_rate": 3.034649988648935e-06, - "loss": 1.0089, - "num_input_tokens_seen": 61227715, - "step": 2887 - }, - { - "epoch": 0.3472614681656947, - "flos": 15143818247040.0, - "grad_norm": 2.4636086151725562, - "learning_rate": 3.033983274803752e-06, - "loss": 1.0392, - "num_input_tokens_seen": 61247225, - "step": 2888 - }, - { - "epoch": 0.3473817110563338, - "flos": 16689886808040.0, - "grad_norm": 6.39228601484676, - "learning_rate": 3.0333164041041283e-06, - "loss": 0.9488, - "num_input_tokens_seen": 61263260, - "step": 2889 - }, - { - "epoch": 0.34750195394697286, - "flos": 15878164647480.0, - "grad_norm": 3.3814189762898192, - "learning_rate": 3.032649376651228e-06, - "loss": 0.9435, - "num_input_tokens_seen": 61282400, - "step": 2890 - }, - { - "epoch": 0.347622196837612, - "flos": 20728408740960.0, - "grad_norm": 3.137784669887741, - "learning_rate": 3.031982192546238e-06, - "loss": 0.987, - "num_input_tokens_seen": 61305215, - "step": 2891 - }, - { - "epoch": 0.3477424397282511, - "flos": 15957042824040.0, - "grad_norm": 3.6634520975935123, - "learning_rate": 3.0313148518903696e-06, - "loss": 1.1681, - "num_input_tokens_seen": 61324760, - "step": 2892 - }, - { - "epoch": 0.34786268261889014, - "flos": 11158443893760.0, - "grad_norm": 4.283369889950638, - "learning_rate": 3.030647354784859e-06, - "loss": 1.0285, - "num_input_tokens_seen": 61341520, - "step": 2893 - }, - { - "epoch": 0.34798292550952925, - "flos": 14751052426920.0, - "grad_norm": 3.6205273363219064, - "learning_rate": 3.029979701330964e-06, - "loss": 0.9975, - "num_input_tokens_seen": 61360665, - "step": 2894 - }, - { - "epoch": 0.34810316840016836, - "flos": 14147413335600.0, - "grad_norm": 3.5068714264338356, - "learning_rate": 3.029311891629966e-06, - "loss": 1.0297, - "num_input_tokens_seen": 61378840, - "step": 2895 - }, - { - "epoch": 0.3482234112908074, - "flos": 16796151305760.0, - "grad_norm": 5.63620794338526, - "learning_rate": 3.0286439257831744e-06, - "loss": 0.9642, - "num_input_tokens_seen": 61398030, - "step": 2896 - }, - { - "epoch": 0.3483436541814465, - "flos": 17057136662160.0, - "grad_norm": 4.124799781332905, - "learning_rate": 3.0279758038919156e-06, - "loss": 0.9364, - "num_input_tokens_seen": 61415975, - "step": 2897 - }, - { - "epoch": 0.34846389707208564, - "flos": 16088302041240.0, - "grad_norm": 2.989892462898501, - "learning_rate": 3.0273075260575455e-06, - "loss": 1.0008, - "num_input_tokens_seen": 61434595, - "step": 2898 - }, - { - "epoch": 0.3485841399627247, - "flos": 15479818423440.0, - "grad_norm": 3.104433974121952, - "learning_rate": 3.0266390923814396e-06, - "loss": 1.0244, - "num_input_tokens_seen": 61452375, - "step": 2899 - }, - { - "epoch": 0.3487043828533638, - "flos": 12049749446760.0, - "grad_norm": 3.238531565453617, - "learning_rate": 3.0259705029650008e-06, - "loss": 1.0497, - "num_input_tokens_seen": 61470025, - "step": 2900 - }, - { - "epoch": 0.34882462574400286, - "flos": 16061467628160.0, - "grad_norm": 4.749196436321779, - "learning_rate": 3.025301757909652e-06, - "loss": 0.9607, - "num_input_tokens_seen": 61489940, - "step": 2901 - }, - { - "epoch": 0.34894486863464197, - "flos": 21279329514480.0, - "grad_norm": 4.192722082016795, - "learning_rate": 3.024632857316842e-06, - "loss": 1.0362, - "num_input_tokens_seen": 61510975, - "step": 2902 - }, - { - "epoch": 0.3490651115252811, - "flos": 15716698245600.0, - "grad_norm": 6.5662171508501865, - "learning_rate": 3.0239638012880412e-06, - "loss": 0.9987, - "num_input_tokens_seen": 61530590, - "step": 2903 - }, - { - "epoch": 0.34918535441592014, - "flos": 8928907556520.0, - "grad_norm": 5.577980531162714, - "learning_rate": 3.0232945899247466e-06, - "loss": 1.0436, - "num_input_tokens_seen": 61547245, - "step": 2904 - }, - { - "epoch": 0.34930559730655925, - "flos": 16481619769440.0, - "grad_norm": 3.777807134314772, - "learning_rate": 3.022625223328476e-06, - "loss": 1.0021, - "num_input_tokens_seen": 61568705, - "step": 2905 - }, - { - "epoch": 0.34942584019719836, - "flos": 16243728115800.0, - "grad_norm": 2.603620019123277, - "learning_rate": 3.0219557016007723e-06, - "loss": 0.9214, - "num_input_tokens_seen": 61588555, - "step": 2906 - }, - { - "epoch": 0.3495460830878374, - "flos": 17373078630240.0, - "grad_norm": 2.826651679425124, - "learning_rate": 3.021286024843202e-06, - "loss": 0.9293, - "num_input_tokens_seen": 61606470, - "step": 2907 - }, - { - "epoch": 0.3496663259784765, - "flos": 50134127667480.0, - "grad_norm": 1.1332059505812702, - "learning_rate": 3.0206161931573526e-06, - "loss": 0.9364, - "num_input_tokens_seen": 61658740, - "step": 2908 - }, - { - "epoch": 0.34978656886911563, - "flos": 20439239862840.0, - "grad_norm": 2.5807203696236027, - "learning_rate": 3.0199462066448388e-06, - "loss": 1.1635, - "num_input_tokens_seen": 61680655, - "step": 2909 - }, - { - "epoch": 0.3499068117597547, - "flos": 15012988291680.0, - "grad_norm": 2.8280949037752467, - "learning_rate": 3.019276065407296e-06, - "loss": 0.9181, - "num_input_tokens_seen": 61699495, - "step": 2910 - }, - { - "epoch": 0.3500270546503938, - "flos": 16191837660120.0, - "grad_norm": 2.7213273383222027, - "learning_rate": 3.018605769546385e-06, - "loss": 1.0376, - "num_input_tokens_seen": 61719770, - "step": 2911 - }, - { - "epoch": 0.3501472975410329, - "flos": 16272126268440.0, - "grad_norm": 2.8950282196619708, - "learning_rate": 3.017935319163788e-06, - "loss": 1.0257, - "num_input_tokens_seen": 61738450, - "step": 2912 - }, - { - "epoch": 0.35026754043167196, - "flos": 18106382537640.0, - "grad_norm": 7.727002138102224, - "learning_rate": 3.017264714361213e-06, - "loss": 0.9428, - "num_input_tokens_seen": 61757820, - "step": 2913 - }, - { - "epoch": 0.3503877833223111, - "flos": 13885876071120.0, - "grad_norm": 4.224059204452481, - "learning_rate": 3.016593955240389e-06, - "loss": 1.0437, - "num_input_tokens_seen": 61776230, - "step": 2914 - }, - { - "epoch": 0.3505080262129502, - "flos": 46589663331600.0, - "grad_norm": 0.8093161926114588, - "learning_rate": 3.015923041903071e-06, - "loss": 0.886, - "num_input_tokens_seen": 61842075, - "step": 2915 - }, - { - "epoch": 0.35062826910358924, - "flos": 20886287740320.0, - "grad_norm": 3.6750597838644783, - "learning_rate": 3.0152519744510347e-06, - "loss": 1.0612, - "num_input_tokens_seen": 61861595, - "step": 2916 - }, - { - "epoch": 0.35074851199422835, - "flos": 17057995185840.0, - "grad_norm": 3.1072757861637093, - "learning_rate": 3.014580752986081e-06, - "loss": 1.0659, - "num_input_tokens_seen": 61880190, - "step": 2917 - }, - { - "epoch": 0.3508687548848674, - "flos": 10759085838240.0, - "grad_norm": 6.538209264730236, - "learning_rate": 3.0139093776100345e-06, - "loss": 1.0088, - "num_input_tokens_seen": 61896500, - "step": 2918 - }, - { - "epoch": 0.3509889977755065, - "flos": 15170897952600.0, - "grad_norm": 2.9400422478579205, - "learning_rate": 3.013237848424741e-06, - "loss": 0.9828, - "num_input_tokens_seen": 61915605, - "step": 2919 - }, - { - "epoch": 0.35110924066614563, - "flos": 13570731303600.0, - "grad_norm": 6.373401516239741, - "learning_rate": 3.012566165532072e-06, - "loss": 0.9844, - "num_input_tokens_seen": 61934115, - "step": 2920 - }, - { - "epoch": 0.3512294835567847, - "flos": 15615707536200.0, - "grad_norm": 3.7498645758017752, - "learning_rate": 3.0118943290339207e-06, - "loss": 0.9835, - "num_input_tokens_seen": 61954045, - "step": 2921 - }, - { - "epoch": 0.3513497264474238, - "flos": 12622844076240.0, - "grad_norm": 3.4224263622311906, - "learning_rate": 3.011222339032204e-06, - "loss": 0.8995, - "num_input_tokens_seen": 61971915, - "step": 2922 - }, - { - "epoch": 0.3514699693380629, - "flos": 19182248195280.0, - "grad_norm": 2.5625573128594517, - "learning_rate": 3.0105501956288626e-06, - "loss": 0.9148, - "num_input_tokens_seen": 61992105, - "step": 2923 - }, - { - "epoch": 0.35159021222870196, - "flos": 10790979408720.0, - "grad_norm": 5.950227287503329, - "learning_rate": 3.0098778989258602e-06, - "loss": 0.9556, - "num_input_tokens_seen": 62010435, - "step": 2924 - }, - { - "epoch": 0.35171045511934107, - "flos": 9867749623680.0, - "grad_norm": 2.8687266030946943, - "learning_rate": 3.009205449025183e-06, - "loss": 1.1082, - "num_input_tokens_seen": 62026350, - "step": 2925 - }, - { - "epoch": 0.3518306980099802, - "flos": 10083835360080.0, - "grad_norm": 4.486986204183516, - "learning_rate": 3.008532846028842e-06, - "loss": 0.8527, - "num_input_tokens_seen": 62042830, - "step": 2926 - }, - { - "epoch": 0.35195094090061924, - "flos": 19260911740920.0, - "grad_norm": 4.605377843475686, - "learning_rate": 3.0078600900388694e-06, - "loss": 0.9336, - "num_input_tokens_seen": 62062855, - "step": 2927 - }, - { - "epoch": 0.35207118379125835, - "flos": 18237703077960.0, - "grad_norm": 3.4683312515577613, - "learning_rate": 3.007187181157323e-06, - "loss": 0.9774, - "num_input_tokens_seen": 62082585, - "step": 2928 - }, - { - "epoch": 0.35219142668189746, - "flos": 12758211942480.0, - "grad_norm": 4.094370554707746, - "learning_rate": 3.006514119486282e-06, - "loss": 0.9088, - "num_input_tokens_seen": 62099135, - "step": 2929 - }, - { - "epoch": 0.3523116695725365, - "flos": 9900348410040.0, - "grad_norm": 3.1402885294730236, - "learning_rate": 3.005840905127849e-06, - "loss": 0.9199, - "num_input_tokens_seen": 62115760, - "step": 2930 - }, - { - "epoch": 0.3524319124631756, - "flos": 15222696423600.0, - "grad_norm": 2.8704638977491514, - "learning_rate": 3.0051675381841516e-06, - "loss": 1.0989, - "num_input_tokens_seen": 62132790, - "step": 2931 - }, - { - "epoch": 0.3525521553538147, - "flos": 18736150826160.0, - "grad_norm": 2.031895930951866, - "learning_rate": 3.0044940187573363e-06, - "loss": 0.9918, - "num_input_tokens_seen": 62153520, - "step": 2932 - }, - { - "epoch": 0.3526723982444538, - "flos": 15301267984560.0, - "grad_norm": 4.674058690901955, - "learning_rate": 3.003820346949578e-06, - "loss": 0.905, - "num_input_tokens_seen": 62171320, - "step": 2933 - }, - { - "epoch": 0.3527926411350929, - "flos": 16875581390400.0, - "grad_norm": 4.012833511895236, - "learning_rate": 3.003146522863071e-06, - "loss": 1.0192, - "num_input_tokens_seen": 62191925, - "step": 2934 - }, - { - "epoch": 0.35291288402573195, - "flos": 21699021732360.0, - "grad_norm": 3.5490190520754648, - "learning_rate": 3.0024725466000345e-06, - "loss": 1.0797, - "num_input_tokens_seen": 62211600, - "step": 2935 - }, - { - "epoch": 0.35303312691637107, - "flos": 16428778805400.0, - "grad_norm": 4.448568589664347, - "learning_rate": 3.0017984182627087e-06, - "loss": 1.0153, - "num_input_tokens_seen": 62230645, - "step": 2936 - }, - { - "epoch": 0.3531533698070102, - "flos": 15511466701440.0, - "grad_norm": 4.917237243303763, - "learning_rate": 3.00112413795336e-06, - "loss": 1.0462, - "num_input_tokens_seen": 62250200, - "step": 2937 - }, - { - "epoch": 0.35327361269764923, - "flos": 11158597201560.0, - "grad_norm": 2.7374953422332573, - "learning_rate": 3.000449705774275e-06, - "loss": 1.0444, - "num_input_tokens_seen": 62268160, - "step": 2938 - }, - { - "epoch": 0.35339385558828834, - "flos": 15694432404960.0, - "grad_norm": 5.160257342818884, - "learning_rate": 2.9997751218277654e-06, - "loss": 0.9377, - "num_input_tokens_seen": 62286035, - "step": 2939 - }, - { - "epoch": 0.35351409847892745, - "flos": 17184716492160.0, - "grad_norm": 20.68788354062892, - "learning_rate": 2.999100386216166e-06, - "loss": 1.0008, - "num_input_tokens_seen": 62304695, - "step": 2940 - }, - { - "epoch": 0.3536343413695665, - "flos": 19260421155960.0, - "grad_norm": 2.9328557088003118, - "learning_rate": 2.998425499041831e-06, - "loss": 0.9714, - "num_input_tokens_seen": 62324930, - "step": 2941 - }, - { - "epoch": 0.3537545842602056, - "flos": 47251049236560.0, - "grad_norm": 0.8844410214532282, - "learning_rate": 2.997750460407142e-06, - "loss": 0.8593, - "num_input_tokens_seen": 62386005, - "step": 2942 - }, - { - "epoch": 0.35387482715084473, - "flos": 13068052260120.0, - "grad_norm": 4.756976976615999, - "learning_rate": 2.997075270414501e-06, - "loss": 0.9294, - "num_input_tokens_seen": 62402940, - "step": 2943 - }, - { - "epoch": 0.3539950700414838, - "flos": 46961635065960.0, - "grad_norm": 0.7210021187040863, - "learning_rate": 2.9963999291663347e-06, - "loss": 0.8319, - "num_input_tokens_seen": 62468440, - "step": 2944 - }, - { - "epoch": 0.3541153129321229, - "flos": 14567013568800.0, - "grad_norm": 18.95018381316483, - "learning_rate": 2.9957244367650915e-06, - "loss": 0.9732, - "num_input_tokens_seen": 62484405, - "step": 2945 - }, - { - "epoch": 0.354235555822762, - "flos": 13885722763320.0, - "grad_norm": 3.736742107390934, - "learning_rate": 2.9950487933132425e-06, - "loss": 1.0681, - "num_input_tokens_seen": 62501540, - "step": 2946 - }, - { - "epoch": 0.35435579871340106, - "flos": 14750745811320.0, - "grad_norm": 2.8346616959598534, - "learning_rate": 2.994372998913283e-06, - "loss": 0.9345, - "num_input_tokens_seen": 62519765, - "step": 2947 - }, - { - "epoch": 0.35447604160404017, - "flos": 17032264588920.0, - "grad_norm": 3.6739559688764203, - "learning_rate": 2.99369705366773e-06, - "loss": 0.8568, - "num_input_tokens_seen": 62539730, - "step": 2948 - }, - { - "epoch": 0.3545962844946792, - "flos": 16660599470160.0, - "grad_norm": 4.481981868382095, - "learning_rate": 2.9930209576791244e-06, - "loss": 1.0397, - "num_input_tokens_seen": 62557925, - "step": 2949 - }, - { - "epoch": 0.35471652738531834, - "flos": 15689925155640.0, - "grad_norm": 11.622189417886561, - "learning_rate": 2.9923447110500285e-06, - "loss": 0.8726, - "num_input_tokens_seen": 62576390, - "step": 2950 - }, - { - "epoch": 0.35483677027595745, - "flos": 19468228271160.0, - "grad_norm": 3.5720447036064638, - "learning_rate": 2.9916683138830295e-06, - "loss": 0.9823, - "num_input_tokens_seen": 62596775, - "step": 2951 - }, - { - "epoch": 0.3549570131665965, - "flos": 9532270693800.0, - "grad_norm": 3.216064369724649, - "learning_rate": 2.9909917662807353e-06, - "loss": 1.0227, - "num_input_tokens_seen": 62614295, - "step": 2952 - }, - { - "epoch": 0.3550772560572356, - "flos": 14830114572840.0, - "grad_norm": 6.999989373335265, - "learning_rate": 2.9903150683457783e-06, - "loss": 0.9058, - "num_input_tokens_seen": 62632560, - "step": 2953 - }, - { - "epoch": 0.3551974989478747, - "flos": 14331666824640.0, - "grad_norm": 3.815788351775349, - "learning_rate": 2.9896382201808126e-06, - "loss": 0.877, - "num_input_tokens_seen": 62649680, - "step": 2954 - }, - { - "epoch": 0.3553177418385138, - "flos": 14069148390240.0, - "grad_norm": 4.236134969230686, - "learning_rate": 2.988961221888516e-06, - "loss": 1.0216, - "num_input_tokens_seen": 62666075, - "step": 2955 - }, - { - "epoch": 0.3554379847291529, - "flos": 10475803979640.0, - "grad_norm": 4.611903024307345, - "learning_rate": 2.988284073571589e-06, - "loss": 1.017, - "num_input_tokens_seen": 62681880, - "step": 2956 - }, - { - "epoch": 0.355558227619792, - "flos": 14540945694720.0, - "grad_norm": 3.596489434411153, - "learning_rate": 2.9876067753327528e-06, - "loss": 0.9516, - "num_input_tokens_seen": 62699330, - "step": 2957 - }, - { - "epoch": 0.35567847051043106, - "flos": 26885756587200.0, - "grad_norm": 3.3819714352999153, - "learning_rate": 2.986929327274754e-06, - "loss": 1.0218, - "num_input_tokens_seen": 62719630, - "step": 2958 - }, - { - "epoch": 0.35579871340107017, - "flos": 19181941579680.0, - "grad_norm": 2.59515545006413, - "learning_rate": 2.9862517295003617e-06, - "loss": 1.0056, - "num_input_tokens_seen": 62739765, - "step": 2959 - }, - { - "epoch": 0.3559189562917093, - "flos": 20152401263280.0, - "grad_norm": 3.222742215946211, - "learning_rate": 2.9855739821123654e-06, - "loss": 0.9627, - "num_input_tokens_seen": 62761065, - "step": 2960 - }, - { - "epoch": 0.35603919918234833, - "flos": 18262483166520.0, - "grad_norm": 2.4204364072581646, - "learning_rate": 2.98489608521358e-06, - "loss": 1.0421, - "num_input_tokens_seen": 62780725, - "step": 2961 - }, - { - "epoch": 0.35615944207298744, - "flos": 16348520858640.0, - "grad_norm": 3.139523441086846, - "learning_rate": 2.9842180389068425e-06, - "loss": 1.0241, - "num_input_tokens_seen": 62797755, - "step": 2962 - }, - { - "epoch": 0.35627968496362655, - "flos": 48875168112000.0, - "grad_norm": 0.7786659229625564, - "learning_rate": 2.98353984329501e-06, - "loss": 0.8511, - "num_input_tokens_seen": 62861820, - "step": 2963 - }, - { - "epoch": 0.3563999278542656, - "flos": 16091184227880.0, - "grad_norm": 2.4303510440586926, - "learning_rate": 2.982861498480965e-06, - "loss": 0.9404, - "num_input_tokens_seen": 62883920, - "step": 2964 - }, - { - "epoch": 0.3565201707449047, - "flos": 18469339773360.0, - "grad_norm": 2.412250418921644, - "learning_rate": 2.9821830045676122e-06, - "loss": 1.0496, - "num_input_tokens_seen": 62903340, - "step": 2965 - }, - { - "epoch": 0.3566404136355438, - "flos": 20283476511120.0, - "grad_norm": 2.56573598345188, - "learning_rate": 2.9815043616578793e-06, - "loss": 0.9387, - "num_input_tokens_seen": 62923855, - "step": 2966 - }, - { - "epoch": 0.3567606565261829, - "flos": 27389324815920.0, - "grad_norm": 3.1455034162981543, - "learning_rate": 2.9808255698547145e-06, - "loss": 1.0042, - "num_input_tokens_seen": 62946375, - "step": 2967 - }, - { - "epoch": 0.356880899416822, - "flos": 15615830182440.0, - "grad_norm": 3.065924928635694, - "learning_rate": 2.9801466292610913e-06, - "loss": 1.0161, - "num_input_tokens_seen": 62965980, - "step": 2968 - }, - { - "epoch": 0.35700114230746105, - "flos": 13465601283600.0, - "grad_norm": 3.5114345414313686, - "learning_rate": 2.979467539980003e-06, - "loss": 1.0296, - "num_input_tokens_seen": 62982490, - "step": 2969 - }, - { - "epoch": 0.35712138519810016, - "flos": 14016920657400.0, - "grad_norm": 2.66054044797505, - "learning_rate": 2.978788302114468e-06, - "loss": 0.9928, - "num_input_tokens_seen": 62999325, - "step": 2970 - }, - { - "epoch": 0.35724162808873927, - "flos": 25103053496520.0, - "grad_norm": 3.1865831440257546, - "learning_rate": 2.9781089157675255e-06, - "loss": 1.0339, - "num_input_tokens_seen": 63017505, - "step": 2971 - }, - { - "epoch": 0.3573618709793783, - "flos": 18185628652920.0, - "grad_norm": 3.8675329805433876, - "learning_rate": 2.977429381042238e-06, - "loss": 1.1166, - "num_input_tokens_seen": 63037900, - "step": 2972 - }, - { - "epoch": 0.35748211387001744, - "flos": 20755365800280.0, - "grad_norm": 11.466655654310513, - "learning_rate": 2.9767496980416913e-06, - "loss": 1.1244, - "num_input_tokens_seen": 63056915, - "step": 2973 - }, - { - "epoch": 0.35760235676065655, - "flos": 9847078184160.0, - "grad_norm": 8.480401276504955, - "learning_rate": 2.9760698668689914e-06, - "loss": 1.0419, - "num_input_tokens_seen": 63072860, - "step": 2974 - }, - { - "epoch": 0.3577225996512956, - "flos": 31453914622920.0, - "grad_norm": 2.684496509790594, - "learning_rate": 2.975389887627269e-06, - "loss": 0.9463, - "num_input_tokens_seen": 63095180, - "step": 2975 - }, - { - "epoch": 0.3578428425419347, - "flos": 12075694674600.0, - "grad_norm": 3.2291276170563976, - "learning_rate": 2.9747097604196764e-06, - "loss": 1.122, - "num_input_tokens_seen": 63111545, - "step": 2976 - }, - { - "epoch": 0.3579630854325738, - "flos": 51336249159960.0, - "grad_norm": 0.6793737015979365, - "learning_rate": 2.9740294853493875e-06, - "loss": 0.8279, - "num_input_tokens_seen": 63182825, - "step": 2977 - }, - { - "epoch": 0.3580833283232129, - "flos": 17819145337800.0, - "grad_norm": 18.064948843242938, - "learning_rate": 2.9733490625196008e-06, - "loss": 0.9087, - "num_input_tokens_seen": 63202405, - "step": 2978 - }, - { - "epoch": 0.358203571213852, - "flos": 9846188998920.0, - "grad_norm": 7.709848560869595, - "learning_rate": 2.9726684920335353e-06, - "loss": 0.9542, - "num_input_tokens_seen": 63219990, - "step": 2979 - }, - { - "epoch": 0.35832381410449105, - "flos": 14409441185040.0, - "grad_norm": 3.694817862764207, - "learning_rate": 2.971987773994432e-06, - "loss": 1.0513, - "num_input_tokens_seen": 63235895, - "step": 2980 - }, - { - "epoch": 0.35844405699513016, - "flos": 12024110834520.0, - "grad_norm": 16.548910071679853, - "learning_rate": 2.9713069085055566e-06, - "loss": 1.0526, - "num_input_tokens_seen": 63253925, - "step": 2981 - }, - { - "epoch": 0.35856429988576927, - "flos": 16503364363560.0, - "grad_norm": 7.9395821311013535, - "learning_rate": 2.9706258956701958e-06, - "loss": 1.0185, - "num_input_tokens_seen": 63273635, - "step": 2982 - }, - { - "epoch": 0.3586845427764083, - "flos": 16372381100400.0, - "grad_norm": 4.552449482988912, - "learning_rate": 2.9699447355916575e-06, - "loss": 0.9962, - "num_input_tokens_seen": 63292165, - "step": 2983 - }, - { - "epoch": 0.35880478566704743, - "flos": 14199763714680.0, - "grad_norm": 4.029219004867391, - "learning_rate": 2.969263428373275e-06, - "loss": 0.9718, - "num_input_tokens_seen": 63310235, - "step": 2984 - }, - { - "epoch": 0.35892502855768654, - "flos": 9663805865040.0, - "grad_norm": 3.164735774239605, - "learning_rate": 2.9685819741184007e-06, - "loss": 1.0074, - "num_input_tokens_seen": 63328395, - "step": 2985 - }, - { - "epoch": 0.3590452714483256, - "flos": 12836814165000.0, - "grad_norm": 3.738790033011978, - "learning_rate": 2.967900372930411e-06, - "loss": 0.9092, - "num_input_tokens_seen": 63346625, - "step": 2986 - }, - { - "epoch": 0.3591655143389647, - "flos": 12574694330880.0, - "grad_norm": 4.70873486092374, - "learning_rate": 2.9672186249127046e-06, - "loss": 1.0172, - "num_input_tokens_seen": 63365810, - "step": 2987 - }, - { - "epoch": 0.3592857572296038, - "flos": 17946265244400.0, - "grad_norm": 4.479465468738443, - "learning_rate": 2.9665367301687014e-06, - "loss": 1.0161, - "num_input_tokens_seen": 63383775, - "step": 2988 - }, - { - "epoch": 0.3594060001202429, - "flos": 20936062548360.0, - "grad_norm": 4.215324537062217, - "learning_rate": 2.965854688801845e-06, - "loss": 0.99, - "num_input_tokens_seen": 63405555, - "step": 2989 - }, - { - "epoch": 0.359526243010882, - "flos": 12073732334760.0, - "grad_norm": 3.916603953623728, - "learning_rate": 2.9651725009156005e-06, - "loss": 0.9868, - "num_input_tokens_seen": 63423020, - "step": 2990 - }, - { - "epoch": 0.3596464859015211, - "flos": 16322912907960.0, - "grad_norm": 3.3403529799559633, - "learning_rate": 2.964490166613454e-06, - "loss": 0.9737, - "num_input_tokens_seen": 63442665, - "step": 2991 - }, - { - "epoch": 0.35976672879216015, - "flos": 39162844562880.0, - "grad_norm": 0.8234390258774925, - "learning_rate": 2.963807685998917e-06, - "loss": 0.8306, - "num_input_tokens_seen": 63498250, - "step": 2992 - }, - { - "epoch": 0.35988697168279926, - "flos": 30821172163080.0, - "grad_norm": 1.8365025726142477, - "learning_rate": 2.9631250591755196e-06, - "loss": 1.0101, - "num_input_tokens_seen": 63520685, - "step": 2993 - }, - { - "epoch": 0.36000721457343837, - "flos": 25580032604640.0, - "grad_norm": 4.185912101642922, - "learning_rate": 2.962442286246817e-06, - "loss": 0.8136, - "num_input_tokens_seen": 63543235, - "step": 2994 - }, - { - "epoch": 0.3601274574640774, - "flos": 12963290178840.0, - "grad_norm": 2.3166649916754873, - "learning_rate": 2.9617593673163853e-06, - "loss": 0.9292, - "num_input_tokens_seen": 63561775, - "step": 2995 - }, - { - "epoch": 0.36024770035471654, - "flos": 9401348753760.0, - "grad_norm": 4.9380488995973355, - "learning_rate": 2.9610763024878216e-06, - "loss": 0.9955, - "num_input_tokens_seen": 63577000, - "step": 2996 - }, - { - "epoch": 0.3603679432453556, - "flos": 14384109188400.0, - "grad_norm": 3.006923791952964, - "learning_rate": 2.960393091864747e-06, - "loss": 1.1414, - "num_input_tokens_seen": 63595100, - "step": 2997 - }, - { - "epoch": 0.3604881861359947, - "flos": 15953118144360.0, - "grad_norm": 2.205438900298632, - "learning_rate": 2.959709735550804e-06, - "loss": 0.9688, - "num_input_tokens_seen": 63614415, - "step": 2998 - }, - { - "epoch": 0.3606084290266338, - "flos": 15668487177120.0, - "grad_norm": 4.043398980731782, - "learning_rate": 2.9590262336496575e-06, - "loss": 0.9838, - "num_input_tokens_seen": 63633865, - "step": 2999 - }, - { - "epoch": 0.36072867191727287, - "flos": 11053620489360.0, - "grad_norm": 5.884647321350676, - "learning_rate": 2.9583425862649936e-06, - "loss": 1.079, - "num_input_tokens_seen": 63651720, - "step": 3000 - }, - { - "epoch": 0.360848914807912, - "flos": 13959235166880.0, - "grad_norm": 4.661477450823519, - "learning_rate": 2.9576587935005215e-06, - "loss": 0.9707, - "num_input_tokens_seen": 63669520, - "step": 3001 - }, - { - "epoch": 0.3609691576985511, - "flos": 13385343336840.0, - "grad_norm": 5.528049556029766, - "learning_rate": 2.9569748554599713e-06, - "loss": 0.9483, - "num_input_tokens_seen": 63684850, - "step": 3002 - }, - { - "epoch": 0.36108940058919015, - "flos": 30163251014400.0, - "grad_norm": 9.408373216038234, - "learning_rate": 2.956290772247097e-06, - "loss": 0.9594, - "num_input_tokens_seen": 63703245, - "step": 3003 - }, - { - "epoch": 0.36120964347982926, - "flos": 16481221169160.0, - "grad_norm": 4.521653319265424, - "learning_rate": 2.9556065439656724e-06, - "loss": 0.9671, - "num_input_tokens_seen": 63722015, - "step": 3004 - }, - { - "epoch": 0.36132988637046837, - "flos": 12835924979760.0, - "grad_norm": 2.3208009353467496, - "learning_rate": 2.9549221707194952e-06, - "loss": 1.0472, - "num_input_tokens_seen": 63740585, - "step": 3005 - }, - { - "epoch": 0.3614501292611074, - "flos": 19806466741440.0, - "grad_norm": 4.682347835872887, - "learning_rate": 2.954237652612384e-06, - "loss": 0.9697, - "num_input_tokens_seen": 63759355, - "step": 3006 - }, - { - "epoch": 0.36157037215174653, - "flos": 16085389193040.0, - "grad_norm": 11.326879645985846, - "learning_rate": 2.9535529897481796e-06, - "loss": 1.081, - "num_input_tokens_seen": 63776620, - "step": 3007 - }, - { - "epoch": 0.36169061504238564, - "flos": 8874012267960.0, - "grad_norm": 9.847707770219257, - "learning_rate": 2.9528681822307446e-06, - "loss": 0.9909, - "num_input_tokens_seen": 63793190, - "step": 3008 - }, - { - "epoch": 0.3618108579330247, - "flos": 18995817735480.0, - "grad_norm": 4.05080408661897, - "learning_rate": 2.952183230163964e-06, - "loss": 1.0529, - "num_input_tokens_seen": 63812485, - "step": 3009 - }, - { - "epoch": 0.3619311008236638, - "flos": 16216219148400.0, - "grad_norm": 2.5219621154033107, - "learning_rate": 2.9514981336517448e-06, - "loss": 0.9662, - "num_input_tokens_seen": 63831975, - "step": 3010 - }, - { - "epoch": 0.36205134371430286, - "flos": 18446154085920.0, - "grad_norm": 2.2820427967107206, - "learning_rate": 2.950812892798015e-06, - "loss": 1.0348, - "num_input_tokens_seen": 63852590, - "step": 3011 - }, - { - "epoch": 0.362171586604942, - "flos": 18604401024000.0, - "grad_norm": 2.887544877665493, - "learning_rate": 2.9501275077067256e-06, - "loss": 1.1017, - "num_input_tokens_seen": 63872930, - "step": 3012 - }, - { - "epoch": 0.3622918294955811, - "flos": 19994890202640.0, - "grad_norm": 2.659734616916108, - "learning_rate": 2.949441978481848e-06, - "loss": 1.1194, - "num_input_tokens_seen": 63893550, - "step": 3013 - }, - { - "epoch": 0.36241207238622014, - "flos": 14068443174360.0, - "grad_norm": 3.043048196521866, - "learning_rate": 2.9487563052273778e-06, - "loss": 1.0282, - "num_input_tokens_seen": 63910030, - "step": 3014 - }, - { - "epoch": 0.36253231527685925, - "flos": 15196199287680.0, - "grad_norm": 2.7503674868485524, - "learning_rate": 2.94807048804733e-06, - "loss": 1.0967, - "num_input_tokens_seen": 63929370, - "step": 3015 - }, - { - "epoch": 0.36265255816749836, - "flos": 13014720711120.0, - "grad_norm": 5.7765744659819935, - "learning_rate": 2.9473845270457434e-06, - "loss": 1.1276, - "num_input_tokens_seen": 63945905, - "step": 3016 - }, - { - "epoch": 0.3627728010581374, - "flos": 13228997415480.0, - "grad_norm": 3.385402753752461, - "learning_rate": 2.946698422326677e-06, - "loss": 0.9155, - "num_input_tokens_seen": 63963085, - "step": 3017 - }, - { - "epoch": 0.36289304394877653, - "flos": 19599303519000.0, - "grad_norm": 6.227994096129651, - "learning_rate": 2.946012173994213e-06, - "loss": 1.0336, - "num_input_tokens_seen": 63982590, - "step": 3018 - }, - { - "epoch": 0.36301328683941564, - "flos": 24637020565320.0, - "grad_norm": 1.9558481382577093, - "learning_rate": 2.945325782152454e-06, - "loss": 0.9177, - "num_input_tokens_seen": 64005345, - "step": 3019 - }, - { - "epoch": 0.3631335297300547, - "flos": 13492129081080.0, - "grad_norm": 4.873196390491843, - "learning_rate": 2.9446392469055257e-06, - "loss": 1.0265, - "num_input_tokens_seen": 64023100, - "step": 3020 - }, - { - "epoch": 0.3632537726206938, - "flos": 13859225627400.0, - "grad_norm": 2.4846602941843656, - "learning_rate": 2.9439525683575745e-06, - "loss": 1.0278, - "num_input_tokens_seen": 64041740, - "step": 3021 - }, - { - "epoch": 0.3633740155113329, - "flos": 14960055342960.0, - "grad_norm": 3.0815317887834945, - "learning_rate": 2.9432657466127694e-06, - "loss": 0.983, - "num_input_tokens_seen": 64061030, - "step": 3022 - }, - { - "epoch": 0.36349425840197197, - "flos": 14406252382800.0, - "grad_norm": 2.4212777662679144, - "learning_rate": 2.9425787817753007e-06, - "loss": 1.0074, - "num_input_tokens_seen": 64079410, - "step": 3023 - }, - { - "epoch": 0.3636145012926111, - "flos": 21175456618440.0, - "grad_norm": 2.045641811545359, - "learning_rate": 2.94189167394938e-06, - "loss": 0.9392, - "num_input_tokens_seen": 64101565, - "step": 3024 - }, - { - "epoch": 0.3637347441832502, - "flos": 15220887391560.0, - "grad_norm": 2.618529473899878, - "learning_rate": 2.941204423239241e-06, - "loss": 1.0412, - "num_input_tokens_seen": 64120160, - "step": 3025 - }, - { - "epoch": 0.36385498707388925, - "flos": 21044013431880.0, - "grad_norm": 3.8011922271061365, - "learning_rate": 2.9405170297491395e-06, - "loss": 0.9884, - "num_input_tokens_seen": 64139875, - "step": 3026 - }, - { - "epoch": 0.36397522996452836, - "flos": 15799501101840.0, - "grad_norm": 4.423502161004129, - "learning_rate": 2.939829493583353e-06, - "loss": 1.0286, - "num_input_tokens_seen": 64156240, - "step": 3027 - }, - { - "epoch": 0.3640954728551674, - "flos": 15274770848640.0, - "grad_norm": 3.821848679433974, - "learning_rate": 2.939141814846179e-06, - "loss": 1.0523, - "num_input_tokens_seen": 64173375, - "step": 3028 - }, - { - "epoch": 0.3642157157458065, - "flos": 12548473149000.0, - "grad_norm": 1.959751211063572, - "learning_rate": 2.938453993641938e-06, - "loss": 1.0597, - "num_input_tokens_seen": 64191470, - "step": 3029 - }, - { - "epoch": 0.36433595863644563, - "flos": 12495724169640.0, - "grad_norm": 2.7599166321257207, - "learning_rate": 2.937766030074973e-06, - "loss": 0.9241, - "num_input_tokens_seen": 64208445, - "step": 3030 - }, - { - "epoch": 0.3644562015270847, - "flos": 19103002080000.0, - "grad_norm": 2.102909719890168, - "learning_rate": 2.937077924249646e-06, - "loss": 1.045, - "num_input_tokens_seen": 64230755, - "step": 3031 - }, - { - "epoch": 0.3645764444177238, - "flos": 10083222128880.0, - "grad_norm": 2.7011413351830145, - "learning_rate": 2.9363896762703443e-06, - "loss": 0.9782, - "num_input_tokens_seen": 64247540, - "step": 3032 - }, - { - "epoch": 0.3646966873083629, - "flos": 14671806311640.0, - "grad_norm": 2.9593403315703792, - "learning_rate": 2.9357012862414725e-06, - "loss": 1.0731, - "num_input_tokens_seen": 64266620, - "step": 3033 - }, - { - "epoch": 0.36481693019900197, - "flos": 19785366040080.0, - "grad_norm": 2.957367932038423, - "learning_rate": 2.9350127542674593e-06, - "loss": 0.9479, - "num_input_tokens_seen": 64288550, - "step": 3034 - }, - { - "epoch": 0.3649371730896411, - "flos": 13984689809760.0, - "grad_norm": 5.567014084137769, - "learning_rate": 2.934324080452755e-06, - "loss": 0.997, - "num_input_tokens_seen": 64306060, - "step": 3035 - }, - { - "epoch": 0.3650574159802802, - "flos": 17605727157120.0, - "grad_norm": 1.9994389820027205, - "learning_rate": 2.9336352649018307e-06, - "loss": 1.0155, - "num_input_tokens_seen": 64325850, - "step": 3036 - }, - { - "epoch": 0.36517765887091924, - "flos": 23429619736440.0, - "grad_norm": 2.00717687379203, - "learning_rate": 2.9329463077191783e-06, - "loss": 0.9378, - "num_input_tokens_seen": 64348945, - "step": 3037 - }, - { - "epoch": 0.36529790176155835, - "flos": 14278580568120.0, - "grad_norm": 3.4070839517213467, - "learning_rate": 2.9322572090093135e-06, - "loss": 0.8665, - "num_input_tokens_seen": 64367370, - "step": 3038 - }, - { - "epoch": 0.36541814465219746, - "flos": 12495754831200.0, - "grad_norm": 4.047523711655206, - "learning_rate": 2.9315679688767713e-06, - "loss": 0.9717, - "num_input_tokens_seen": 64385100, - "step": 3039 - }, - { - "epoch": 0.3655383875428365, - "flos": 16114124622840.0, - "grad_norm": 1.9687813650387833, - "learning_rate": 2.9308785874261085e-06, - "loss": 0.8916, - "num_input_tokens_seen": 64405010, - "step": 3040 - }, - { - "epoch": 0.36565863043347563, - "flos": 15615952828680.0, - "grad_norm": 3.7533143973225753, - "learning_rate": 2.9301890647619045e-06, - "loss": 1.0414, - "num_input_tokens_seen": 64424025, - "step": 3041 - }, - { - "epoch": 0.36577887332411474, - "flos": 17661726261840.0, - "grad_norm": 2.2511213475331755, - "learning_rate": 2.929499400988759e-06, - "loss": 1.0183, - "num_input_tokens_seen": 64444905, - "step": 3042 - }, - { - "epoch": 0.3658991162147538, - "flos": 20152278617040.0, - "grad_norm": 2.730869771298915, - "learning_rate": 2.9288095962112927e-06, - "loss": 0.8828, - "num_input_tokens_seen": 64465330, - "step": 3043 - }, - { - "epoch": 0.3660193591053929, - "flos": 12600670220280.0, - "grad_norm": 3.1750329744323773, - "learning_rate": 2.9281196505341503e-06, - "loss": 1.074, - "num_input_tokens_seen": 64482220, - "step": 3044 - }, - { - "epoch": 0.36613960199603196, - "flos": 7251733086120.0, - "grad_norm": 2.4099308941062345, - "learning_rate": 2.9274295640619946e-06, - "loss": 1.0183, - "num_input_tokens_seen": 64499070, - "step": 3045 - }, - { - "epoch": 0.36625984488667107, - "flos": 14016491395560.0, - "grad_norm": 2.2226620424325882, - "learning_rate": 2.9267393368995103e-06, - "loss": 1.0193, - "num_input_tokens_seen": 64518020, - "step": 3046 - }, - { - "epoch": 0.3663800877773102, - "flos": 12520442935080.0, - "grad_norm": 4.083262747848679, - "learning_rate": 2.926048969151407e-06, - "loss": 0.9645, - "num_input_tokens_seen": 64535025, - "step": 3047 - }, - { - "epoch": 0.36650033066794924, - "flos": 14410422354960.0, - "grad_norm": 2.641464100079821, - "learning_rate": 2.92535846092241e-06, - "loss": 0.9024, - "num_input_tokens_seen": 64553760, - "step": 3048 - }, - { - "epoch": 0.36662057355858835, - "flos": 17581406991960.0, - "grad_norm": 3.338752791483179, - "learning_rate": 2.9246678123172704e-06, - "loss": 1.052, - "num_input_tokens_seen": 64573570, - "step": 3049 - }, - { - "epoch": 0.36674081644922746, - "flos": 8719536701760.0, - "grad_norm": 4.013429271484912, - "learning_rate": 2.9239770234407596e-06, - "loss": 0.9537, - "num_input_tokens_seen": 64591595, - "step": 3050 - }, - { - "epoch": 0.3668610593398665, - "flos": 14986981740720.0, - "grad_norm": 2.138879774605404, - "learning_rate": 2.9232860943976686e-06, - "loss": 0.9121, - "num_input_tokens_seen": 64612050, - "step": 3051 - }, - { - "epoch": 0.3669813022305056, - "flos": 19048658699520.0, - "grad_norm": 2.1646298102149006, - "learning_rate": 2.9225950252928115e-06, - "loss": 1.068, - "num_input_tokens_seen": 64632620, - "step": 3052 - }, - { - "epoch": 0.36710154512114473, - "flos": 13545062029800.0, - "grad_norm": 2.8182382326268733, - "learning_rate": 2.9219038162310217e-06, - "loss": 1.0341, - "num_input_tokens_seen": 64650540, - "step": 3053 - }, - { - "epoch": 0.3672217880117838, - "flos": 14775495238320.0, - "grad_norm": 2.802647861115761, - "learning_rate": 2.921212467317157e-06, - "loss": 1.051, - "num_input_tokens_seen": 64669705, - "step": 3054 - }, - { - "epoch": 0.3673420309024229, - "flos": 9585786212160.0, - "grad_norm": 4.08917703085509, - "learning_rate": 2.920520978656093e-06, - "loss": 1.0321, - "num_input_tokens_seen": 64686390, - "step": 3055 - }, - { - "epoch": 0.367462273793062, - "flos": 20650879673040.0, - "grad_norm": 5.754201266410089, - "learning_rate": 2.919829350352729e-06, - "loss": 0.9941, - "num_input_tokens_seen": 64707715, - "step": 3056 - }, - { - "epoch": 0.36758251668370107, - "flos": 44841220299600.0, - "grad_norm": 0.8102083901205621, - "learning_rate": 2.919137582511983e-06, - "loss": 0.8461, - "num_input_tokens_seen": 64763875, - "step": 3057 - }, - { - "epoch": 0.3677027595743402, - "flos": 8955741969600.0, - "grad_norm": 4.891883566280968, - "learning_rate": 2.918445675238797e-06, - "loss": 0.8679, - "num_input_tokens_seen": 64780520, - "step": 3058 - }, - { - "epoch": 0.36782300246497923, - "flos": 17819084014680.0, - "grad_norm": 2.2256796709569824, - "learning_rate": 2.917753628638132e-06, - "loss": 0.9134, - "num_input_tokens_seen": 64800545, - "step": 3059 - }, - { - "epoch": 0.36794324535561834, - "flos": 12337446570000.0, - "grad_norm": 4.36345238550635, - "learning_rate": 2.9170614428149716e-06, - "loss": 0.9286, - "num_input_tokens_seen": 64818600, - "step": 3060 - }, - { - "epoch": 0.36806348824625745, - "flos": 17129790542040.0, - "grad_norm": 2.7738278396363483, - "learning_rate": 2.9163691178743195e-06, - "loss": 1.0926, - "num_input_tokens_seen": 64836970, - "step": 3061 - }, - { - "epoch": 0.3681837311368965, - "flos": 14567197538160.0, - "grad_norm": 2.798136676953976, - "learning_rate": 2.9156766539212006e-06, - "loss": 1.0179, - "num_input_tokens_seen": 64854335, - "step": 3062 - }, - { - "epoch": 0.3683039740275356, - "flos": 15245667480120.0, - "grad_norm": 3.070777760748603, - "learning_rate": 2.9149840510606614e-06, - "loss": 0.9464, - "num_input_tokens_seen": 64872710, - "step": 3063 - }, - { - "epoch": 0.36842421691817473, - "flos": 50402655767640.0, - "grad_norm": 1.0194011035152073, - "learning_rate": 2.914291309397769e-06, - "loss": 0.9132, - "num_input_tokens_seen": 64929900, - "step": 3064 - }, - { - "epoch": 0.3685444598088138, - "flos": 16586351189160.0, - "grad_norm": 3.8048845118717654, - "learning_rate": 2.9135984290376117e-06, - "loss": 1.0126, - "num_input_tokens_seen": 64948485, - "step": 3065 - }, - { - "epoch": 0.3686647026994529, - "flos": 16397927727960.0, - "grad_norm": 2.254664566230059, - "learning_rate": 2.9129054100853e-06, - "loss": 1.0637, - "num_input_tokens_seen": 64967045, - "step": 3066 - }, - { - "epoch": 0.368784945590092, - "flos": 17871832994040.0, - "grad_norm": 2.080733075255605, - "learning_rate": 2.912212252645963e-06, - "loss": 0.9787, - "num_input_tokens_seen": 64989350, - "step": 3067 - }, - { - "epoch": 0.36890518848073106, - "flos": 13072651494120.0, - "grad_norm": 2.610413304778601, - "learning_rate": 2.9115189568247523e-06, - "loss": 0.9894, - "num_input_tokens_seen": 65006630, - "step": 3068 - }, - { - "epoch": 0.36902543137137017, - "flos": 11577890819160.0, - "grad_norm": 2.6120535984761597, - "learning_rate": 2.910825522726841e-06, - "loss": 1.1442, - "num_input_tokens_seen": 65023875, - "step": 3069 - }, - { - "epoch": 0.3691456742620093, - "flos": 8640689186760.0, - "grad_norm": 3.002542054503567, - "learning_rate": 2.9101319504574215e-06, - "loss": 1.0097, - "num_input_tokens_seen": 65040035, - "step": 3070 - }, - { - "epoch": 0.36926591715264834, - "flos": 12601406097720.0, - "grad_norm": 2.021866118280398, - "learning_rate": 2.909438240121709e-06, - "loss": 0.9882, - "num_input_tokens_seen": 65060030, - "step": 3071 - }, - { - "epoch": 0.36938616004328745, - "flos": 20622880120680.0, - "grad_norm": 2.4436823374559227, - "learning_rate": 2.908744391824939e-06, - "loss": 0.9316, - "num_input_tokens_seen": 65080770, - "step": 3072 - }, - { - "epoch": 0.36950640293392656, - "flos": 20807869487160.0, - "grad_norm": 26.07187869883409, - "learning_rate": 2.908050405672367e-06, - "loss": 1.0128, - "num_input_tokens_seen": 65100035, - "step": 3073 - }, - { - "epoch": 0.3696266458245656, - "flos": 17661848908080.0, - "grad_norm": 4.753262854789711, - "learning_rate": 2.9073562817692703e-06, - "loss": 1.0279, - "num_input_tokens_seen": 65118440, - "step": 3074 - }, - { - "epoch": 0.3697468887152047, - "flos": 42862913394600.0, - "grad_norm": 0.7940004148986655, - "learning_rate": 2.9066620202209468e-06, - "loss": 0.843, - "num_input_tokens_seen": 65180650, - "step": 3075 - }, - { - "epoch": 0.3698671316058438, - "flos": 18602683976640.0, - "grad_norm": 2.0968511588593186, - "learning_rate": 2.905967621132716e-06, - "loss": 1.0198, - "num_input_tokens_seen": 65197980, - "step": 3076 - }, - { - "epoch": 0.3699873744964829, - "flos": 17503326015960.0, - "grad_norm": 4.054588948781257, - "learning_rate": 2.9052730846099172e-06, - "loss": 0.9687, - "num_input_tokens_seen": 65219045, - "step": 3077 - }, - { - "epoch": 0.370107617387122, - "flos": 46454878035000.0, - "grad_norm": 0.9012428602975475, - "learning_rate": 2.9045784107579123e-06, - "loss": 0.88, - "num_input_tokens_seen": 65278870, - "step": 3078 - }, - { - "epoch": 0.37022786027776106, - "flos": 11289948403440.0, - "grad_norm": 2.913391690604492, - "learning_rate": 2.9038835996820807e-06, - "loss": 0.9085, - "num_input_tokens_seen": 65296200, - "step": 3079 - }, - { - "epoch": 0.37034810316840017, - "flos": 13147206390720.0, - "grad_norm": 3.0144954799530264, - "learning_rate": 2.903188651487826e-06, - "loss": 1.0175, - "num_input_tokens_seen": 65314475, - "step": 3080 - }, - { - "epoch": 0.3704683460590393, - "flos": 12626799417480.0, - "grad_norm": 4.2974611194709205, - "learning_rate": 2.902493566280571e-06, - "loss": 1.0963, - "num_input_tokens_seen": 65332300, - "step": 3081 - }, - { - "epoch": 0.37058858894967833, - "flos": 9976037784360.0, - "grad_norm": 3.1103041904261426, - "learning_rate": 2.9017983441657595e-06, - "loss": 1.042, - "num_input_tokens_seen": 65349350, - "step": 3082 - }, - { - "epoch": 0.37070883184031744, - "flos": 9847354138200.0, - "grad_norm": 3.1758117298948614, - "learning_rate": 2.9011029852488564e-06, - "loss": 0.9914, - "num_input_tokens_seen": 65366305, - "step": 3083 - }, - { - "epoch": 0.37082907473095655, - "flos": 37418571503040.0, - "grad_norm": 1.0159757155309594, - "learning_rate": 2.9004074896353465e-06, - "loss": 0.9085, - "num_input_tokens_seen": 65420025, - "step": 3084 - }, - { - "epoch": 0.3709493176215956, - "flos": 11316108262200.0, - "grad_norm": 3.1698442544794294, - "learning_rate": 2.8997118574307362e-06, - "loss": 1.0391, - "num_input_tokens_seen": 65436700, - "step": 3085 - }, - { - "epoch": 0.3710695605122347, - "flos": 14881974366960.0, - "grad_norm": 4.081170008577934, - "learning_rate": 2.899016088740553e-06, - "loss": 0.982, - "num_input_tokens_seen": 65454530, - "step": 3086 - }, - { - "epoch": 0.37118980340287383, - "flos": 10135572507960.0, - "grad_norm": 3.781971507949059, - "learning_rate": 2.898320183670344e-06, - "loss": 1.0209, - "num_input_tokens_seen": 65471665, - "step": 3087 - }, - { - "epoch": 0.3713100462935129, - "flos": 18421864582320.0, - "grad_norm": 3.5139529172536297, - "learning_rate": 2.8976241423256767e-06, - "loss": 1.1163, - "num_input_tokens_seen": 65491480, - "step": 3088 - }, - { - "epoch": 0.371430289184152, - "flos": 21751893357960.0, - "grad_norm": 2.8640370535877726, - "learning_rate": 2.896927964812142e-06, - "loss": 0.9186, - "num_input_tokens_seen": 65511765, - "step": 3089 - }, - { - "epoch": 0.37155053207479105, - "flos": 11132161388760.0, - "grad_norm": 6.589637667198473, - "learning_rate": 2.8962316512353465e-06, - "loss": 0.9685, - "num_input_tokens_seen": 65529030, - "step": 3090 - }, - { - "epoch": 0.37167077496543016, - "flos": 16638548260440.0, - "grad_norm": 2.2357461630195283, - "learning_rate": 2.8955352017009233e-06, - "loss": 0.9859, - "num_input_tokens_seen": 65547995, - "step": 3091 - }, - { - "epoch": 0.3717910178560693, - "flos": 15692929988520.0, - "grad_norm": 3.37293264861598, - "learning_rate": 2.8948386163145212e-06, - "loss": 0.9994, - "num_input_tokens_seen": 65566925, - "step": 3092 - }, - { - "epoch": 0.3719112607467083, - "flos": 19179304685520.0, - "grad_norm": 2.2130075206671895, - "learning_rate": 2.8941418951818135e-06, - "loss": 1.0177, - "num_input_tokens_seen": 65586205, - "step": 3093 - }, - { - "epoch": 0.37203150363734744, - "flos": 8562332256720.0, - "grad_norm": 4.591333285562194, - "learning_rate": 2.8934450384084903e-06, - "loss": 0.9387, - "num_input_tokens_seen": 65603440, - "step": 3094 - }, - { - "epoch": 0.37215174652798655, - "flos": 16849728147240.0, - "grad_norm": 4.743987645163699, - "learning_rate": 2.8927480461002653e-06, - "loss": 0.9403, - "num_input_tokens_seen": 65623130, - "step": 3095 - }, - { - "epoch": 0.3722719894186256, - "flos": 12673140130800.0, - "grad_norm": 7.053604330086155, - "learning_rate": 2.892050918362872e-06, - "loss": 1.0645, - "num_input_tokens_seen": 65637905, - "step": 3096 - }, - { - "epoch": 0.3723922323092647, - "flos": 44680183159560.0, - "grad_norm": 0.9148522901910104, - "learning_rate": 2.8913536553020626e-06, - "loss": 0.8368, - "num_input_tokens_seen": 65691680, - "step": 3097 - }, - { - "epoch": 0.3725124751999038, - "flos": 16376796365040.0, - "grad_norm": 2.292251812761397, - "learning_rate": 2.8906562570236137e-06, - "loss": 1.0888, - "num_input_tokens_seen": 65709310, - "step": 3098 - }, - { - "epoch": 0.3726327180905429, - "flos": 14853361583400.0, - "grad_norm": 1.6910352057655902, - "learning_rate": 2.889958723633318e-06, - "loss": 0.9968, - "num_input_tokens_seen": 65727970, - "step": 3099 - }, - { - "epoch": 0.372752960981182, - "flos": 21804550352640.0, - "grad_norm": 3.0110223462236223, - "learning_rate": 2.889261055236992e-06, - "loss": 0.967, - "num_input_tokens_seen": 65749905, - "step": 3100 - }, - { - "epoch": 0.3728732038718211, - "flos": 17869134776760.0, - "grad_norm": 2.0049658162159116, - "learning_rate": 2.8885632519404704e-06, - "loss": 1.0594, - "num_input_tokens_seen": 65769895, - "step": 3101 - }, - { - "epoch": 0.37299344676246016, - "flos": 18002693610960.0, - "grad_norm": 2.44392048272795, - "learning_rate": 2.8878653138496107e-06, - "loss": 0.9948, - "num_input_tokens_seen": 65790110, - "step": 3102 - }, - { - "epoch": 0.37311368965309927, - "flos": 16953754351080.0, - "grad_norm": 2.726886107032848, - "learning_rate": 2.8871672410702878e-06, - "loss": 0.9794, - "num_input_tokens_seen": 65807190, - "step": 3103 - }, - { - "epoch": 0.3732339325437384, - "flos": 18369023618280.0, - "grad_norm": 2.149098242678391, - "learning_rate": 2.8864690337084008e-06, - "loss": 1.0473, - "num_input_tokens_seen": 65826185, - "step": 3104 - }, - { - "epoch": 0.37335417543437743, - "flos": 18653317308360.0, - "grad_norm": 3.1838705305297483, - "learning_rate": 2.885770691869866e-06, - "loss": 1.0101, - "num_input_tokens_seen": 65846785, - "step": 3105 - }, - { - "epoch": 0.37347441832501654, - "flos": 17083940413680.0, - "grad_norm": 2.623102410496191, - "learning_rate": 2.8850722156606207e-06, - "loss": 0.9691, - "num_input_tokens_seen": 65864895, - "step": 3106 - }, - { - "epoch": 0.3735946612156556, - "flos": 13986682811160.0, - "grad_norm": 2.08087114431391, - "learning_rate": 2.8843736051866252e-06, - "loss": 0.8911, - "num_input_tokens_seen": 65883540, - "step": 3107 - }, - { - "epoch": 0.3737149041062947, - "flos": 16376673718800.0, - "grad_norm": 4.265370120304168, - "learning_rate": 2.8836748605538557e-06, - "loss": 0.9272, - "num_input_tokens_seen": 65904900, - "step": 3108 - }, - { - "epoch": 0.3738351469969338, - "flos": 24740985446040.0, - "grad_norm": 2.9833308285608555, - "learning_rate": 2.882975981868313e-06, - "loss": 0.8506, - "num_input_tokens_seen": 65925005, - "step": 3109 - }, - { - "epoch": 0.3739553898875729, - "flos": 31086879399720.0, - "grad_norm": 2.803722672552505, - "learning_rate": 2.882276969236016e-06, - "loss": 0.9031, - "num_input_tokens_seen": 65946085, - "step": 3110 - }, - { - "epoch": 0.374075632778212, - "flos": 9057775172040.0, - "grad_norm": 3.551914751782574, - "learning_rate": 2.881577822763005e-06, - "loss": 0.9792, - "num_input_tokens_seen": 65963755, - "step": 3111 - }, - { - "epoch": 0.3741958756688511, - "flos": 18520525013160.0, - "grad_norm": 2.4463419834283253, - "learning_rate": 2.880878542555338e-06, - "loss": 1.1098, - "num_input_tokens_seen": 65981240, - "step": 3112 - }, - { - "epoch": 0.37431611855949015, - "flos": 15222635100480.0, - "grad_norm": 2.514965278886352, - "learning_rate": 2.8801791287190976e-06, - "loss": 1.0391, - "num_input_tokens_seen": 65998955, - "step": 3113 - }, - { - "epoch": 0.37443636145012926, - "flos": 17215720877400.0, - "grad_norm": 2.8166389527923106, - "learning_rate": 2.8794795813603817e-06, - "loss": 1.071, - "num_input_tokens_seen": 66014140, - "step": 3114 - }, - { - "epoch": 0.3745566043407684, - "flos": 10870992063000.0, - "grad_norm": 2.478859302690534, - "learning_rate": 2.878779900585314e-06, - "loss": 1.0393, - "num_input_tokens_seen": 66031700, - "step": 3115 - }, - { - "epoch": 0.37467684723140743, - "flos": 17503969908720.0, - "grad_norm": 2.9716323512804808, - "learning_rate": 2.8780800865000336e-06, - "loss": 0.9913, - "num_input_tokens_seen": 66052730, - "step": 3116 - }, - { - "epoch": 0.37479709012204654, - "flos": 46090295736600.0, - "grad_norm": 1.0384440481718173, - "learning_rate": 2.877380139210702e-06, - "loss": 0.8925, - "num_input_tokens_seen": 66111120, - "step": 3117 - }, - { - "epoch": 0.37491733301268565, - "flos": 16925754798720.0, - "grad_norm": 19.518991465545326, - "learning_rate": 2.876680058823501e-06, - "loss": 0.9804, - "num_input_tokens_seen": 66131240, - "step": 3118 - }, - { - "epoch": 0.3750375759033247, - "flos": 22931999850360.0, - "grad_norm": 20.674873855372052, - "learning_rate": 2.8759798454446314e-06, - "loss": 0.8881, - "num_input_tokens_seen": 66154125, - "step": 3119 - }, - { - "epoch": 0.3751578187939638, - "flos": 16612511047920.0, - "grad_norm": 2.341044377443675, - "learning_rate": 2.8752794991803173e-06, - "loss": 1.0433, - "num_input_tokens_seen": 66171530, - "step": 3120 - }, - { - "epoch": 0.37527806168460287, - "flos": 10314644193360.0, - "grad_norm": 12.613226811588934, - "learning_rate": 2.8745790201367976e-06, - "loss": 0.9836, - "num_input_tokens_seen": 66187005, - "step": 3121 - }, - { - "epoch": 0.375398304575242, - "flos": 18784392556200.0, - "grad_norm": 3.033224819324382, - "learning_rate": 2.8738784084203373e-06, - "loss": 1.0822, - "num_input_tokens_seen": 66206800, - "step": 3122 - }, - { - "epoch": 0.3755185474658811, - "flos": 15799531763400.0, - "grad_norm": 3.2945875575984647, - "learning_rate": 2.873177664137216e-06, - "loss": 1.0161, - "num_input_tokens_seen": 66227450, - "step": 3123 - }, - { - "epoch": 0.37563879035652015, - "flos": 21963471845040.0, - "grad_norm": 1.9815107685405813, - "learning_rate": 2.8724767873937384e-06, - "loss": 0.9176, - "num_input_tokens_seen": 66251290, - "step": 3124 - }, - { - "epoch": 0.37575903324715926, - "flos": 14747557009080.0, - "grad_norm": 3.1944450562702276, - "learning_rate": 2.871775778296225e-06, - "loss": 1.0996, - "num_input_tokens_seen": 66268100, - "step": 3125 - }, - { - "epoch": 0.37587927613779837, - "flos": 13256199767280.0, - "grad_norm": 3.3496294471428483, - "learning_rate": 2.8710746369510196e-06, - "loss": 1.0177, - "num_input_tokens_seen": 66285805, - "step": 3126 - }, - { - "epoch": 0.3759995190284374, - "flos": 9610229023560.0, - "grad_norm": 4.715214295194798, - "learning_rate": 2.8703733634644846e-06, - "loss": 1.0683, - "num_input_tokens_seen": 66300280, - "step": 3127 - }, - { - "epoch": 0.37611976191907653, - "flos": 14540516432880.0, - "grad_norm": 2.9328558713849024, - "learning_rate": 2.869671957943002e-06, - "loss": 1.0321, - "num_input_tokens_seen": 66319155, - "step": 3128 - }, - { - "epoch": 0.37624000480971564, - "flos": 15012681676080.0, - "grad_norm": 2.773309323209975, - "learning_rate": 2.8689704204929747e-06, - "loss": 0.9809, - "num_input_tokens_seen": 66338055, - "step": 3129 - }, - { - "epoch": 0.3763602477003547, - "flos": 16035093138480.0, - "grad_norm": 4.003832649857075, - "learning_rate": 2.8682687512208253e-06, - "loss": 1.0302, - "num_input_tokens_seen": 66356785, - "step": 3130 - }, - { - "epoch": 0.3764804905909938, - "flos": 19601511151320.0, - "grad_norm": 2.873859345096832, - "learning_rate": 2.8675669502329972e-06, - "loss": 1.0396, - "num_input_tokens_seen": 66378035, - "step": 3131 - }, - { - "epoch": 0.3766007334816329, - "flos": 16008963941280.0, - "grad_norm": 3.401638023508815, - "learning_rate": 2.866865017635952e-06, - "loss": 1.0619, - "num_input_tokens_seen": 66395575, - "step": 3132 - }, - { - "epoch": 0.376720976372272, - "flos": 18473295114600.0, - "grad_norm": 2.228078867901528, - "learning_rate": 2.866162953536174e-06, - "loss": 1.0255, - "num_input_tokens_seen": 66416265, - "step": 3133 - }, - { - "epoch": 0.3768412192629111, - "flos": 12784402462800.0, - "grad_norm": 2.63121296546226, - "learning_rate": 2.8654607580401634e-06, - "loss": 0.9795, - "num_input_tokens_seen": 66435720, - "step": 3134 - }, - { - "epoch": 0.3769614621535502, - "flos": 45092541716520.0, - "grad_norm": 1.3159632177836078, - "learning_rate": 2.8647584312544446e-06, - "loss": 0.9154, - "num_input_tokens_seen": 66500645, - "step": 3135 - }, - { - "epoch": 0.37708170504418925, - "flos": 16823997550320.0, - "grad_norm": 2.764035818985795, - "learning_rate": 2.864055973285559e-06, - "loss": 1.0858, - "num_input_tokens_seen": 66522365, - "step": 3136 - }, - { - "epoch": 0.37720194793482836, - "flos": 17370625705440.0, - "grad_norm": 2.128774432379781, - "learning_rate": 2.8633533842400698e-06, - "loss": 1.0965, - "num_input_tokens_seen": 66542285, - "step": 3137 - }, - { - "epoch": 0.3773221908254674, - "flos": 14908348856640.0, - "grad_norm": 2.6881619015369056, - "learning_rate": 2.862650664224558e-06, - "loss": 1.0087, - "num_input_tokens_seen": 66560855, - "step": 3138 - }, - { - "epoch": 0.37744243371610653, - "flos": 26863460085000.0, - "grad_norm": 4.2426503527855335, - "learning_rate": 2.861947813345627e-06, - "loss": 0.9288, - "num_input_tokens_seen": 66583275, - "step": 3139 - }, - { - "epoch": 0.37756267660674564, - "flos": 18605198224560.0, - "grad_norm": 2.6605357881034135, - "learning_rate": 2.8612448317098974e-06, - "loss": 0.9457, - "num_input_tokens_seen": 66603330, - "step": 3140 - }, - { - "epoch": 0.3776829194973847, - "flos": 13781052666720.0, - "grad_norm": 2.7370435499657617, - "learning_rate": 2.8605417194240114e-06, - "loss": 1.0687, - "num_input_tokens_seen": 66621410, - "step": 3141 - }, - { - "epoch": 0.3778031623880238, - "flos": 12310612156920.0, - "grad_norm": 2.728073182547924, - "learning_rate": 2.8598384765946315e-06, - "loss": 1.0227, - "num_input_tokens_seen": 66639785, - "step": 3142 - }, - { - "epoch": 0.3779234052786629, - "flos": 19313354104680.0, - "grad_norm": 2.540532927586719, - "learning_rate": 2.8591351033284377e-06, - "loss": 0.9458, - "num_input_tokens_seen": 66659235, - "step": 3143 - }, - { - "epoch": 0.37804364816930197, - "flos": 13964049031800.0, - "grad_norm": 3.6415425073567027, - "learning_rate": 2.8584315997321325e-06, - "loss": 1.0512, - "num_input_tokens_seen": 66677960, - "step": 3144 - }, - { - "epoch": 0.3781638910599411, - "flos": 16133569599960.0, - "grad_norm": 3.1122659250508176, - "learning_rate": 2.8577279659124356e-06, - "loss": 1.0059, - "num_input_tokens_seen": 66695355, - "step": 3145 - }, - { - "epoch": 0.3782841339505802, - "flos": 10345464609240.0, - "grad_norm": 2.0443466528825534, - "learning_rate": 2.857024201976089e-06, - "loss": 1.0515, - "num_input_tokens_seen": 66712635, - "step": 3146 - }, - { - "epoch": 0.37840437684121925, - "flos": 23404441047600.0, - "grad_norm": 3.9117812401389633, - "learning_rate": 2.8563203080298516e-06, - "loss": 0.9579, - "num_input_tokens_seen": 66733130, - "step": 3147 - }, - { - "epoch": 0.37852461973185836, - "flos": 13020117145680.0, - "grad_norm": 3.5331459019688967, - "learning_rate": 2.855616284180505e-06, - "loss": 1.1122, - "num_input_tokens_seen": 66749900, - "step": 3148 - }, - { - "epoch": 0.37864486262249747, - "flos": 42587082295080.0, - "grad_norm": 0.9321992426972504, - "learning_rate": 2.8549121305348477e-06, - "loss": 0.9579, - "num_input_tokens_seen": 66809405, - "step": 3149 - }, - { - "epoch": 0.3787651055131365, - "flos": 16608433060440.0, - "grad_norm": 2.7623661474880707, - "learning_rate": 2.8542078471997006e-06, - "loss": 1.0472, - "num_input_tokens_seen": 66826740, - "step": 3150 - }, - { - "epoch": 0.37888534840377563, - "flos": 17498144212320.0, - "grad_norm": 8.282968479566737, - "learning_rate": 2.8535034342819013e-06, - "loss": 0.9875, - "num_input_tokens_seen": 66843870, - "step": 3151 - }, - { - "epoch": 0.37900559129441475, - "flos": 17057289969960.0, - "grad_norm": 1.8382114045329843, - "learning_rate": 2.85279889188831e-06, - "loss": 0.9513, - "num_input_tokens_seen": 66863965, - "step": 3152 - }, - { - "epoch": 0.3791258341850538, - "flos": 17530160429040.0, - "grad_norm": 4.853519751318668, - "learning_rate": 2.852094220125805e-06, - "loss": 1.0297, - "num_input_tokens_seen": 66883195, - "step": 3153 - }, - { - "epoch": 0.3792460770756929, - "flos": 12338581047720.0, - "grad_norm": 2.9427738328694066, - "learning_rate": 2.8513894191012846e-06, - "loss": 0.9344, - "num_input_tokens_seen": 66901895, - "step": 3154 - }, - { - "epoch": 0.37936631996633197, - "flos": 17215260954000.0, - "grad_norm": 2.1684523584340836, - "learning_rate": 2.8506844889216664e-06, - "loss": 1.0099, - "num_input_tokens_seen": 66921000, - "step": 3155 - }, - { - "epoch": 0.3794865628569711, - "flos": 50345823252720.0, - "grad_norm": 0.8904925130714656, - "learning_rate": 2.849979429693887e-06, - "loss": 0.9018, - "num_input_tokens_seen": 66981705, - "step": 3156 - }, - { - "epoch": 0.3796068057476102, - "flos": 11158934478720.0, - "grad_norm": 3.0404841767777446, - "learning_rate": 2.8492742415249042e-06, - "loss": 0.9761, - "num_input_tokens_seen": 66999070, - "step": 3157 - }, - { - "epoch": 0.37972704863824924, - "flos": 17923263526320.0, - "grad_norm": 2.7052375608756996, - "learning_rate": 2.848568924521694e-06, - "loss": 0.9928, - "num_input_tokens_seen": 67019570, - "step": 3158 - }, - { - "epoch": 0.37984729152888835, - "flos": 18654973032600.0, - "grad_norm": 3.3431878018359553, - "learning_rate": 2.8478634787912526e-06, - "loss": 0.9616, - "num_input_tokens_seen": 67037345, - "step": 3159 - }, - { - "epoch": 0.37996753441952746, - "flos": 18237733739520.0, - "grad_norm": 3.5314178426908494, - "learning_rate": 2.847157904440596e-06, - "loss": 1.0056, - "num_input_tokens_seen": 67056795, - "step": 3160 - }, - { - "epoch": 0.3800877773101665, - "flos": 14276955505440.0, - "grad_norm": 2.3653762989005345, - "learning_rate": 2.846452201576759e-06, - "loss": 0.9846, - "num_input_tokens_seen": 67075890, - "step": 3161 - }, - { - "epoch": 0.38020802020080563, - "flos": 45137441336520.0, - "grad_norm": 1.0065473555497693, - "learning_rate": 2.845746370306795e-06, - "loss": 0.8872, - "num_input_tokens_seen": 67140800, - "step": 3162 - }, - { - "epoch": 0.38032826309144474, - "flos": 15118148973240.0, - "grad_norm": 3.1943056343621494, - "learning_rate": 2.84504041073778e-06, - "loss": 1.0102, - "num_input_tokens_seen": 67158935, - "step": 3163 - }, - { - "epoch": 0.3804485059820838, - "flos": 13440637225680.0, - "grad_norm": 3.616306667312431, - "learning_rate": 2.844334322976806e-06, - "loss": 1.0374, - "num_input_tokens_seen": 67178870, - "step": 3164 - }, - { - "epoch": 0.3805687488727229, - "flos": 15509810977200.0, - "grad_norm": 2.787007063315714, - "learning_rate": 2.8436281071309866e-06, - "loss": 1.0586, - "num_input_tokens_seen": 67197130, - "step": 3165 - }, - { - "epoch": 0.380688991763362, - "flos": 41900333731920.0, - "grad_norm": 0.743117863626439, - "learning_rate": 2.842921763307455e-06, - "loss": 0.7867, - "num_input_tokens_seen": 67259660, - "step": 3166 - }, - { - "epoch": 0.38080923465400107, - "flos": 16921952765280.0, - "grad_norm": 2.742000475643459, - "learning_rate": 2.842215291613361e-06, - "loss": 1.0615, - "num_input_tokens_seen": 67277760, - "step": 3167 - }, - { - "epoch": 0.3809294775446402, - "flos": 39329983353360.0, - "grad_norm": 0.8281461964899511, - "learning_rate": 2.8415086921558774e-06, - "loss": 0.8687, - "num_input_tokens_seen": 67340905, - "step": 3168 - }, - { - "epoch": 0.38104972043527924, - "flos": 17529332566920.0, - "grad_norm": 3.458281658353382, - "learning_rate": 2.840801965042194e-06, - "loss": 1.0129, - "num_input_tokens_seen": 67360085, - "step": 3169 - }, - { - "epoch": 0.38116996332591835, - "flos": 16245230532240.0, - "grad_norm": 3.5606374054247865, - "learning_rate": 2.840095110379521e-06, - "loss": 1.0645, - "num_input_tokens_seen": 67379325, - "step": 3170 - }, - { - "epoch": 0.38129020621655746, - "flos": 43544909415960.0, - "grad_norm": 0.7422664399579698, - "learning_rate": 2.8393881282750884e-06, - "loss": 0.7842, - "num_input_tokens_seen": 67441875, - "step": 3171 - }, - { - "epoch": 0.3814104491071965, - "flos": 15375945527400.0, - "grad_norm": 2.9194704657339776, - "learning_rate": 2.838681018836144e-06, - "loss": 1.0143, - "num_input_tokens_seen": 67458915, - "step": 3172 - }, - { - "epoch": 0.3815306919978356, - "flos": 13544694091080.0, - "grad_norm": 8.088124326872896, - "learning_rate": 2.837973782169955e-06, - "loss": 1.0056, - "num_input_tokens_seen": 67477010, - "step": 3173 - }, - { - "epoch": 0.38165093488847474, - "flos": 48022568543880.0, - "grad_norm": 0.8934591273505136, - "learning_rate": 2.8372664183838096e-06, - "loss": 0.8563, - "num_input_tokens_seen": 67539750, - "step": 3174 - }, - { - "epoch": 0.3817711777791138, - "flos": 15873964013760.0, - "grad_norm": 8.622709108263056, - "learning_rate": 2.836558927585015e-06, - "loss": 0.9114, - "num_input_tokens_seen": 67556440, - "step": 3175 - }, - { - "epoch": 0.3818914206697529, - "flos": 16219285304400.0, - "grad_norm": 2.483845108430588, - "learning_rate": 2.8358513098808957e-06, - "loss": 1.0538, - "num_input_tokens_seen": 67576475, - "step": 3176 - }, - { - "epoch": 0.382011663560392, - "flos": 17343883277040.0, - "grad_norm": 3.9708849353512883, - "learning_rate": 2.835143565378798e-06, - "loss": 0.9943, - "num_input_tokens_seen": 67596660, - "step": 3177 - }, - { - "epoch": 0.38213190645103107, - "flos": 15616290105840.0, - "grad_norm": 4.380791890223641, - "learning_rate": 2.8344356941860847e-06, - "loss": 1.0098, - "num_input_tokens_seen": 67616010, - "step": 3178 - }, - { - "epoch": 0.3822521493416702, - "flos": 25343796675240.0, - "grad_norm": 6.828438967681527, - "learning_rate": 2.8337276964101403e-06, - "loss": 0.8946, - "num_input_tokens_seen": 67636170, - "step": 3179 - }, - { - "epoch": 0.3823723922323093, - "flos": 14961159159120.0, - "grad_norm": 7.228734101501111, - "learning_rate": 2.833019572158367e-06, - "loss": 0.9843, - "num_input_tokens_seen": 67654325, - "step": 3180 - }, - { - "epoch": 0.38249263512294834, - "flos": 14040198329520.0, - "grad_norm": 2.3978473546152093, - "learning_rate": 2.8323113215381872e-06, - "loss": 1.0431, - "num_input_tokens_seen": 67672390, - "step": 3181 - }, - { - "epoch": 0.38261287801358745, - "flos": 15222665762040.0, - "grad_norm": 2.747516203980915, - "learning_rate": 2.831602944657042e-06, - "loss": 0.9793, - "num_input_tokens_seen": 67690190, - "step": 3182 - }, - { - "epoch": 0.38273312090422656, - "flos": 15616290105840.0, - "grad_norm": 4.0776212534601495, - "learning_rate": 2.830894441622391e-06, - "loss": 0.9752, - "num_input_tokens_seen": 67706560, - "step": 3183 - }, - { - "epoch": 0.3828533637948656, - "flos": 17635290449040.0, - "grad_norm": 2.635275755320416, - "learning_rate": 2.8301858125417134e-06, - "loss": 1.0177, - "num_input_tokens_seen": 67726120, - "step": 3184 - }, - { - "epoch": 0.38297360668550473, - "flos": 15956797531560.0, - "grad_norm": 3.731129021656195, - "learning_rate": 2.8294770575225082e-06, - "loss": 0.9748, - "num_input_tokens_seen": 67745970, - "step": 3185 - }, - { - "epoch": 0.3830938495761438, - "flos": 17711010484920.0, - "grad_norm": 2.812492031509979, - "learning_rate": 2.828768176672293e-06, - "loss": 1.0711, - "num_input_tokens_seen": 67764805, - "step": 3186 - }, - { - "epoch": 0.3832140924667829, - "flos": 23561001599880.0, - "grad_norm": 3.297498020405786, - "learning_rate": 2.8280591700986044e-06, - "loss": 0.9514, - "num_input_tokens_seen": 67786390, - "step": 3187 - }, - { - "epoch": 0.383334335357422, - "flos": 22746887837640.0, - "grad_norm": 2.8669897525791828, - "learning_rate": 2.827350037908999e-06, - "loss": 0.9789, - "num_input_tokens_seen": 67805550, - "step": 3188 - }, - { - "epoch": 0.38345457824806106, - "flos": 14042559269640.0, - "grad_norm": 4.488153972099276, - "learning_rate": 2.8266407802110496e-06, - "loss": 1.0231, - "num_input_tokens_seen": 67823525, - "step": 3189 - }, - { - "epoch": 0.3835748211387002, - "flos": 15930944288400.0, - "grad_norm": 2.879123053287101, - "learning_rate": 2.8259313971123515e-06, - "loss": 0.986, - "num_input_tokens_seen": 67844365, - "step": 3190 - }, - { - "epoch": 0.3836950640293393, - "flos": 17870575870080.0, - "grad_norm": 3.7445290393373143, - "learning_rate": 2.8252218887205166e-06, - "loss": 1.0061, - "num_input_tokens_seen": 67864775, - "step": 3191 - }, - { - "epoch": 0.38381530691997834, - "flos": 15485245519560.0, - "grad_norm": 3.954640335531241, - "learning_rate": 2.824512255143178e-06, - "loss": 1.036, - "num_input_tokens_seen": 67883730, - "step": 3192 - }, - { - "epoch": 0.38393554981061745, - "flos": 15092571684120.0, - "grad_norm": 2.8368984307336116, - "learning_rate": 2.8238024964879855e-06, - "loss": 1.0193, - "num_input_tokens_seen": 67904345, - "step": 3193 - }, - { - "epoch": 0.38405579270125656, - "flos": 12050025400800.0, - "grad_norm": 5.557541769799961, - "learning_rate": 2.8230926128626095e-06, - "loss": 0.989, - "num_input_tokens_seen": 67922560, - "step": 3194 - }, - { - "epoch": 0.3841760355918956, - "flos": 15511190747400.0, - "grad_norm": 3.5169195207808515, - "learning_rate": 2.822382604374738e-06, - "loss": 1.0211, - "num_input_tokens_seen": 67941205, - "step": 3195 - }, - { - "epoch": 0.3842962784825347, - "flos": 18442321390920.0, - "grad_norm": 9.306292417082439, - "learning_rate": 2.8216724711320793e-06, - "loss": 0.8846, - "num_input_tokens_seen": 67960050, - "step": 3196 - }, - { - "epoch": 0.38441652137317384, - "flos": 18028025607600.0, - "grad_norm": 2.6045748479746926, - "learning_rate": 2.820962213242361e-06, - "loss": 1.028, - "num_input_tokens_seen": 67979100, - "step": 3197 - }, - { - "epoch": 0.3845367642638129, - "flos": 12836292918480.0, - "grad_norm": 3.4561683741721034, - "learning_rate": 2.8202518308133264e-06, - "loss": 1.0818, - "num_input_tokens_seen": 67996095, - "step": 3198 - }, - { - "epoch": 0.384657007154452, - "flos": 17949668677560.0, - "grad_norm": 3.0952588650072954, - "learning_rate": 2.8195413239527426e-06, - "loss": 0.9716, - "num_input_tokens_seen": 68015555, - "step": 3199 - }, - { - "epoch": 0.38477725004509106, - "flos": 14095829495520.0, - "grad_norm": 2.267787348699904, - "learning_rate": 2.8188306927683906e-06, - "loss": 1.0369, - "num_input_tokens_seen": 68034745, - "step": 3200 - }, - { - "epoch": 0.38489749293573017, - "flos": 12941146984440.0, - "grad_norm": 3.8340347449967487, - "learning_rate": 2.818119937368074e-06, - "loss": 0.9697, - "num_input_tokens_seen": 68053100, - "step": 3201 - }, - { - "epoch": 0.3850177358263693, - "flos": 17346520171200.0, - "grad_norm": 3.4676058145122073, - "learning_rate": 2.817409057859613e-06, - "loss": 0.8814, - "num_input_tokens_seen": 68071810, - "step": 3202 - }, - { - "epoch": 0.38513797871700833, - "flos": 12518112656520.0, - "grad_norm": 2.569962312444296, - "learning_rate": 2.8166980543508482e-06, - "loss": 1.0097, - "num_input_tokens_seen": 68087420, - "step": 3203 - }, - { - "epoch": 0.38525822160764744, - "flos": 18317102501040.0, - "grad_norm": 3.324896020625853, - "learning_rate": 2.815986926949638e-06, - "loss": 1.0116, - "num_input_tokens_seen": 68105640, - "step": 3204 - }, - { - "epoch": 0.38537846449828655, - "flos": 14331329547480.0, - "grad_norm": 3.4457398579906338, - "learning_rate": 2.8152756757638597e-06, - "loss": 1.0275, - "num_input_tokens_seen": 68123860, - "step": 3205 - }, - { - "epoch": 0.3854987073889256, - "flos": 16376459087880.0, - "grad_norm": 2.8807565858251376, - "learning_rate": 2.8145643009014093e-06, - "loss": 1.0641, - "num_input_tokens_seen": 68142075, - "step": 3206 - }, - { - "epoch": 0.3856189502795647, - "flos": 14328753976440.0, - "grad_norm": 2.6322230915958973, - "learning_rate": 2.813852802470202e-06, - "loss": 1.019, - "num_input_tokens_seen": 68159690, - "step": 3207 - }, - { - "epoch": 0.38573919317020383, - "flos": 18290482718880.0, - "grad_norm": 16.904130672384856, - "learning_rate": 2.8131411805781717e-06, - "loss": 0.9493, - "num_input_tokens_seen": 68179535, - "step": 3208 - }, - { - "epoch": 0.3858594360608429, - "flos": 21250011515040.0, - "grad_norm": 4.27919547353902, - "learning_rate": 2.8124294353332707e-06, - "loss": 0.8627, - "num_input_tokens_seen": 68197930, - "step": 3209 - }, - { - "epoch": 0.385979678951482, - "flos": 17634891848760.0, - "grad_norm": 6.383627046977474, - "learning_rate": 2.8117175668434713e-06, - "loss": 0.9972, - "num_input_tokens_seen": 68217310, - "step": 3210 - }, - { - "epoch": 0.3860999218421211, - "flos": 15117689049840.0, - "grad_norm": 3.953481183195558, - "learning_rate": 2.811005575216762e-06, - "loss": 0.9052, - "num_input_tokens_seen": 68235745, - "step": 3211 - }, - { - "epoch": 0.38622016473276016, - "flos": 17452784668920.0, - "grad_norm": 2.4717289781422114, - "learning_rate": 2.8102934605611513e-06, - "loss": 1.0139, - "num_input_tokens_seen": 68257100, - "step": 3212 - }, - { - "epoch": 0.3863404076233993, - "flos": 14593296073800.0, - "grad_norm": 3.47535063806447, - "learning_rate": 2.8095812229846665e-06, - "loss": 0.8938, - "num_input_tokens_seen": 68276780, - "step": 3213 - }, - { - "epoch": 0.3864606505140384, - "flos": 15878379278400.0, - "grad_norm": 4.750320825282776, - "learning_rate": 2.808868862595355e-06, - "loss": 0.9252, - "num_input_tokens_seen": 68296745, - "step": 3214 - }, - { - "epoch": 0.38658089340467744, - "flos": 18291310581000.0, - "grad_norm": 4.198748156770584, - "learning_rate": 2.8081563795012795e-06, - "loss": 1.0212, - "num_input_tokens_seen": 68316090, - "step": 3215 - }, - { - "epoch": 0.38670113629531655, - "flos": 24111738404040.0, - "grad_norm": 2.9025860732768565, - "learning_rate": 2.807443773810524e-06, - "loss": 0.9632, - "num_input_tokens_seen": 68337070, - "step": 3216 - }, - { - "epoch": 0.3868213791859556, - "flos": 16586473835400.0, - "grad_norm": 3.039266152900885, - "learning_rate": 2.80673104563119e-06, - "loss": 1.1199, - "num_input_tokens_seen": 68357415, - "step": 3217 - }, - { - "epoch": 0.3869416220765947, - "flos": 13072007601360.0, - "grad_norm": 4.1194522426877525, - "learning_rate": 2.8060181950713976e-06, - "loss": 1.0123, - "num_input_tokens_seen": 68373925, - "step": 3218 - }, - { - "epoch": 0.3870618649672338, - "flos": 11053681812480.0, - "grad_norm": 8.179726207973122, - "learning_rate": 2.805305222239286e-06, - "loss": 1.0353, - "num_input_tokens_seen": 68390900, - "step": 3219 - }, - { - "epoch": 0.3871821078578729, - "flos": 16717457098560.0, - "grad_norm": 3.072863734592224, - "learning_rate": 2.8045921272430118e-06, - "loss": 0.9656, - "num_input_tokens_seen": 68410300, - "step": 3220 - }, - { - "epoch": 0.387302350748512, - "flos": 12594445923600.0, - "grad_norm": 4.268500731578173, - "learning_rate": 2.803878910190753e-06, - "loss": 0.9994, - "num_input_tokens_seen": 68426940, - "step": 3221 - }, - { - "epoch": 0.3874225936391511, - "flos": 8084923886760.0, - "grad_norm": 3.7058237277753476, - "learning_rate": 2.8031655711907017e-06, - "loss": 1.0469, - "num_input_tokens_seen": 68440365, - "step": 3222 - }, - { - "epoch": 0.38754283652979016, - "flos": 15590130247080.0, - "grad_norm": 6.059661830115714, - "learning_rate": 2.8024521103510723e-06, - "loss": 1.0317, - "num_input_tokens_seen": 68456855, - "step": 3223 - }, - { - "epoch": 0.38766307942042927, - "flos": 15036940518120.0, - "grad_norm": 2.2183327080973894, - "learning_rate": 2.8017385277800952e-06, - "loss": 0.9856, - "num_input_tokens_seen": 68474930, - "step": 3224 - }, - { - "epoch": 0.3877833223110684, - "flos": 19522755621000.0, - "grad_norm": 3.6492788255565336, - "learning_rate": 2.8010248235860213e-06, - "loss": 0.9652, - "num_input_tokens_seen": 68494765, - "step": 3225 - }, - { - "epoch": 0.38790356520170743, - "flos": 46180120090080.0, - "grad_norm": 0.8466530413983067, - "learning_rate": 2.8003109978771192e-06, - "loss": 0.8979, - "num_input_tokens_seen": 68555650, - "step": 3226 - }, - { - "epoch": 0.38802380809234654, - "flos": 15877643400960.0, - "grad_norm": 3.3625176922073905, - "learning_rate": 2.799597050761674e-06, - "loss": 1.0163, - "num_input_tokens_seen": 68571575, - "step": 3227 - }, - { - "epoch": 0.38814405098298566, - "flos": 17972885026560.0, - "grad_norm": 4.17432871464329, - "learning_rate": 2.7988829823479924e-06, - "loss": 1.0191, - "num_input_tokens_seen": 68589685, - "step": 3228 - }, - { - "epoch": 0.3882642938736247, - "flos": 13358999508720.0, - "grad_norm": 2.417070409876901, - "learning_rate": 2.7981687927443976e-06, - "loss": 0.8706, - "num_input_tokens_seen": 68606205, - "step": 3229 - }, - { - "epoch": 0.3883845367642638, - "flos": 15379962191760.0, - "grad_norm": 2.9097023424325754, - "learning_rate": 2.797454482059231e-06, - "loss": 1.0818, - "num_input_tokens_seen": 68626080, - "step": 3230 - }, - { - "epoch": 0.3885047796549029, - "flos": 14592713504160.0, - "grad_norm": 3.0738245942821094, - "learning_rate": 2.7967400504008537e-06, - "loss": 1.0732, - "num_input_tokens_seen": 68645100, - "step": 3231 - }, - { - "epoch": 0.388625022545542, - "flos": 46050828760800.0, - "grad_norm": 0.8462603277367893, - "learning_rate": 2.7960254978776456e-06, - "loss": 0.8563, - "num_input_tokens_seen": 68706910, - "step": 3232 - }, - { - "epoch": 0.3887452654361811, - "flos": 12836630195640.0, - "grad_norm": 2.545354752163551, - "learning_rate": 2.7953108245980006e-06, - "loss": 1.0274, - "num_input_tokens_seen": 68725145, - "step": 3233 - }, - { - "epoch": 0.38886550832682015, - "flos": 17768052082680.0, - "grad_norm": 2.7509197951023023, - "learning_rate": 2.7945960306703365e-06, - "loss": 0.9707, - "num_input_tokens_seen": 68747850, - "step": 3234 - }, - { - "epoch": 0.38898575121745926, - "flos": 19366379038080.0, - "grad_norm": 5.935666734866635, - "learning_rate": 2.7938811162030865e-06, - "loss": 0.8891, - "num_input_tokens_seen": 68767835, - "step": 3235 - }, - { - "epoch": 0.3891059941080984, - "flos": 20489934517680.0, - "grad_norm": 2.2283478656181757, - "learning_rate": 2.793166081304702e-06, - "loss": 1.0554, - "num_input_tokens_seen": 68788050, - "step": 3236 - }, - { - "epoch": 0.38922623699873743, - "flos": 16271605021920.0, - "grad_norm": 2.589266291202551, - "learning_rate": 2.7924509260836543e-06, - "loss": 1.0391, - "num_input_tokens_seen": 68806895, - "step": 3237 - }, - { - "epoch": 0.38934647988937654, - "flos": 14042988531480.0, - "grad_norm": 2.2671127170293737, - "learning_rate": 2.791735650648431e-06, - "loss": 0.9131, - "num_input_tokens_seen": 68825735, - "step": 3238 - }, - { - "epoch": 0.38946672278001565, - "flos": 13617838555920.0, - "grad_norm": 6.6098332528047035, - "learning_rate": 2.791020255107538e-06, - "loss": 0.9644, - "num_input_tokens_seen": 68842825, - "step": 3239 - }, - { - "epoch": 0.3895869656706547, - "flos": 17739868560960.0, - "grad_norm": 12.06006277495031, - "learning_rate": 2.7903047395695023e-06, - "loss": 1.0315, - "num_input_tokens_seen": 68862445, - "step": 3240 - }, - { - "epoch": 0.3897072085612938, - "flos": 17162849251800.0, - "grad_norm": 3.407051482093533, - "learning_rate": 2.789589104142865e-06, - "loss": 1.1322, - "num_input_tokens_seen": 68879790, - "step": 3241 - }, - { - "epoch": 0.3898274514519329, - "flos": 12155799313560.0, - "grad_norm": 2.051419636501614, - "learning_rate": 2.7888733489361895e-06, - "loss": 0.997, - "num_input_tokens_seen": 68897925, - "step": 3242 - }, - { - "epoch": 0.389947694342572, - "flos": 47307759105240.0, - "grad_norm": 0.7821336707728443, - "learning_rate": 2.788157474058054e-06, - "loss": 0.8802, - "num_input_tokens_seen": 68959920, - "step": 3243 - }, - { - "epoch": 0.3900679372332111, - "flos": 18318236978760.0, - "grad_norm": 2.4639873679444153, - "learning_rate": 2.7874414796170555e-06, - "loss": 0.9297, - "num_input_tokens_seen": 68981130, - "step": 3244 - }, - { - "epoch": 0.3901881801238502, - "flos": 8300089776360.0, - "grad_norm": 4.216893218416533, - "learning_rate": 2.7867253657218113e-06, - "loss": 1.0609, - "num_input_tokens_seen": 68994740, - "step": 3245 - }, - { - "epoch": 0.39030842301448926, - "flos": 19445042583720.0, - "grad_norm": 3.3515735972827643, - "learning_rate": 2.7860091324809544e-06, - "loss": 0.9524, - "num_input_tokens_seen": 69015520, - "step": 3246 - }, - { - "epoch": 0.39042866590512837, - "flos": 19340433810240.0, - "grad_norm": 2.4046592904085182, - "learning_rate": 2.7852927800031377e-06, - "loss": 1.0386, - "num_input_tokens_seen": 69035405, - "step": 3247 - }, - { - "epoch": 0.3905489087957674, - "flos": 21174904710360.0, - "grad_norm": 2.302199957699842, - "learning_rate": 2.7845763083970298e-06, - "loss": 1.0628, - "num_input_tokens_seen": 69055525, - "step": 3248 - }, - { - "epoch": 0.39066915168640653, - "flos": 17425183716840.0, - "grad_norm": 2.320114795212484, - "learning_rate": 2.7838597177713205e-06, - "loss": 1.038, - "num_input_tokens_seen": 69076335, - "step": 3249 - }, - { - "epoch": 0.39078939457704565, - "flos": 14593602689400.0, - "grad_norm": 2.8688236734076225, - "learning_rate": 2.7831430082347143e-06, - "loss": 0.9652, - "num_input_tokens_seen": 69095260, - "step": 3250 - }, - { - "epoch": 0.3909096374676847, - "flos": 16192450891320.0, - "grad_norm": 3.912416031884003, - "learning_rate": 2.7824261798959373e-06, - "loss": 1.0519, - "num_input_tokens_seen": 69113160, - "step": 3251 - }, - { - "epoch": 0.3910298803583238, - "flos": 16350452536920.0, - "grad_norm": 3.259298228221947, - "learning_rate": 2.78170923286373e-06, - "loss": 1.0221, - "num_input_tokens_seen": 69132480, - "step": 3252 - }, - { - "epoch": 0.3911501232489629, - "flos": 17294139130560.0, - "grad_norm": 6.591952833803193, - "learning_rate": 2.780992167246854e-06, - "loss": 1.0713, - "num_input_tokens_seen": 69149725, - "step": 3253 - }, - { - "epoch": 0.391270366139602, - "flos": 43568064441840.0, - "grad_norm": 0.9977188317752916, - "learning_rate": 2.7802749831540883e-06, - "loss": 1.0114, - "num_input_tokens_seen": 69208345, - "step": 3254 - }, - { - "epoch": 0.3913906090302411, - "flos": 15301421292360.0, - "grad_norm": 4.57201490729197, - "learning_rate": 2.7795576806942268e-06, - "loss": 1.0482, - "num_input_tokens_seen": 69226870, - "step": 3255 - }, - { - "epoch": 0.3915108519208802, - "flos": 35640253015440.0, - "grad_norm": 0.7991871295241051, - "learning_rate": 2.778840259976085e-06, - "loss": 0.8052, - "num_input_tokens_seen": 69281820, - "step": 3256 - }, - { - "epoch": 0.39163109481151925, - "flos": 11681119822440.0, - "grad_norm": 3.140960827233108, - "learning_rate": 2.778122721108495e-06, - "loss": 0.9957, - "num_input_tokens_seen": 69299770, - "step": 3257 - }, - { - "epoch": 0.39175133770215836, - "flos": 18552694537680.0, - "grad_norm": 2.9039326935211456, - "learning_rate": 2.7774050642003076e-06, - "loss": 1.1134, - "num_input_tokens_seen": 69320300, - "step": 3258 - }, - { - "epoch": 0.3918715805927975, - "flos": 15537595898640.0, - "grad_norm": 8.44442622980306, - "learning_rate": 2.7766872893603896e-06, - "loss": 1.1677, - "num_input_tokens_seen": 69339995, - "step": 3259 - }, - { - "epoch": 0.39199182348343653, - "flos": 14462527441560.0, - "grad_norm": 2.3489902153022464, - "learning_rate": 2.7759693966976275e-06, - "loss": 0.9602, - "num_input_tokens_seen": 69358220, - "step": 3260 - }, - { - "epoch": 0.39211206637407564, - "flos": 15401277524040.0, - "grad_norm": 2.8101011430450447, - "learning_rate": 2.7752513863209242e-06, - "loss": 1.0682, - "num_input_tokens_seen": 69376520, - "step": 3261 - }, - { - "epoch": 0.39223230926471475, - "flos": 14958062341560.0, - "grad_norm": 2.277453568756785, - "learning_rate": 2.774533258339203e-06, - "loss": 1.0684, - "num_input_tokens_seen": 69393700, - "step": 3262 - }, - { - "epoch": 0.3923525521553538, - "flos": 12469472326200.0, - "grad_norm": 14.590738447299369, - "learning_rate": 2.7738150128614014e-06, - "loss": 1.0158, - "num_input_tokens_seen": 69410825, - "step": 3263 - }, - { - "epoch": 0.3924727950459929, - "flos": 14593357396920.0, - "grad_norm": 3.5923481736457283, - "learning_rate": 2.7730966499964777e-06, - "loss": 1.1237, - "num_input_tokens_seen": 69427495, - "step": 3264 - }, - { - "epoch": 0.39259303793663197, - "flos": 11472668814480.0, - "grad_norm": 3.896461001065545, - "learning_rate": 2.772378169853408e-06, - "loss": 1.0277, - "num_input_tokens_seen": 69444785, - "step": 3265 - }, - { - "epoch": 0.3927132808272711, - "flos": 11889754799760.0, - "grad_norm": 3.3375023202495155, - "learning_rate": 2.771659572541183e-06, - "loss": 0.9722, - "num_input_tokens_seen": 69462435, - "step": 3266 - }, - { - "epoch": 0.3928335237179102, - "flos": 14384293157760.0, - "grad_norm": 3.118214679406348, - "learning_rate": 2.7709408581688143e-06, - "loss": 1.1004, - "num_input_tokens_seen": 69482140, - "step": 3267 - }, - { - "epoch": 0.39295376660854925, - "flos": 17766335035320.0, - "grad_norm": 2.3348149182568303, - "learning_rate": 2.7702220268453307e-06, - "loss": 1.112, - "num_input_tokens_seen": 69502220, - "step": 3268 - }, - { - "epoch": 0.39307400949918836, - "flos": 13256291751960.0, - "grad_norm": 2.6566431988677155, - "learning_rate": 2.7695030786797785e-06, - "loss": 1.0665, - "num_input_tokens_seen": 69517835, - "step": 3269 - }, - { - "epoch": 0.39319425238982747, - "flos": 15927602178360.0, - "grad_norm": 2.6503804599508, - "learning_rate": 2.7687840137812206e-06, - "loss": 0.9751, - "num_input_tokens_seen": 69535640, - "step": 3270 - }, - { - "epoch": 0.3933144952804665, - "flos": 47393842748400.0, - "grad_norm": 0.8176767784558143, - "learning_rate": 2.7680648322587395e-06, - "loss": 0.8636, - "num_input_tokens_seen": 69600235, - "step": 3271 - }, - { - "epoch": 0.39343473817110564, - "flos": 10948889069640.0, - "grad_norm": 4.373610575618626, - "learning_rate": 2.7673455342214334e-06, - "loss": 1.0423, - "num_input_tokens_seen": 69616945, - "step": 3272 - }, - { - "epoch": 0.39355498106174475, - "flos": 15144370155120.0, - "grad_norm": 3.4859938346383688, - "learning_rate": 2.7666261197784198e-06, - "loss": 0.9843, - "num_input_tokens_seen": 69635480, - "step": 3273 - }, - { - "epoch": 0.3936752239523838, - "flos": 9374177063520.0, - "grad_norm": 3.5797150256573484, - "learning_rate": 2.7659065890388336e-06, - "loss": 0.9973, - "num_input_tokens_seen": 69651200, - "step": 3274 - }, - { - "epoch": 0.3937954668430229, - "flos": 11892453017040.0, - "grad_norm": 3.120881685732077, - "learning_rate": 2.7651869421118266e-06, - "loss": 1.0634, - "num_input_tokens_seen": 69667530, - "step": 3275 - }, - { - "epoch": 0.393915709733662, - "flos": 14956314632640.0, - "grad_norm": 2.2581008674183054, - "learning_rate": 2.76446717910657e-06, - "loss": 1.0534, - "num_input_tokens_seen": 69687955, - "step": 3276 - }, - { - "epoch": 0.3940359526243011, - "flos": 12154879466760.0, - "grad_norm": 7.919744381873619, - "learning_rate": 2.763747300132249e-06, - "loss": 0.9898, - "num_input_tokens_seen": 69705115, - "step": 3277 - }, - { - "epoch": 0.3941561955149402, - "flos": 14907796948560.0, - "grad_norm": 2.309061122516485, - "learning_rate": 2.7630273052980704e-06, - "loss": 1.0948, - "num_input_tokens_seen": 69725425, - "step": 3278 - }, - { - "epoch": 0.39427643840557924, - "flos": 13360900525440.0, - "grad_norm": 2.999626613268852, - "learning_rate": 2.7623071947132554e-06, - "loss": 0.9041, - "num_input_tokens_seen": 69742175, - "step": 3279 - }, - { - "epoch": 0.39439668129621835, - "flos": 16534123456320.0, - "grad_norm": 3.888135856805804, - "learning_rate": 2.7615869684870458e-06, - "loss": 1.0164, - "num_input_tokens_seen": 69761205, - "step": 3280 - }, - { - "epoch": 0.39451692418685746, - "flos": 18973122633000.0, - "grad_norm": 2.2554846778574134, - "learning_rate": 2.7608666267286986e-06, - "loss": 1.0746, - "num_input_tokens_seen": 69781155, - "step": 3281 - }, - { - "epoch": 0.3946371670774965, - "flos": 12940564414800.0, - "grad_norm": 3.3673228792028147, - "learning_rate": 2.760146169547489e-06, - "loss": 1.0836, - "num_input_tokens_seen": 69797640, - "step": 3282 - }, - { - "epoch": 0.39475740996813563, - "flos": 17214893015280.0, - "grad_norm": 1.621911782378661, - "learning_rate": 2.75942559705271e-06, - "loss": 0.9923, - "num_input_tokens_seen": 69817095, - "step": 3283 - }, - { - "epoch": 0.39487765285877474, - "flos": 13701898536120.0, - "grad_norm": 3.8308932513223786, - "learning_rate": 2.7587049093536713e-06, - "loss": 1.1141, - "num_input_tokens_seen": 69833145, - "step": 3284 - }, - { - "epoch": 0.3949978957494138, - "flos": 12260224117680.0, - "grad_norm": 5.385308250452575, - "learning_rate": 2.757984106559701e-06, - "loss": 1.035, - "num_input_tokens_seen": 69851850, - "step": 3285 - }, - { - "epoch": 0.3951181386400529, - "flos": 25919160260160.0, - "grad_norm": 3.0553346226513507, - "learning_rate": 2.7572631887801446e-06, - "loss": 0.9434, - "num_input_tokens_seen": 69873195, - "step": 3286 - }, - { - "epoch": 0.395238381530692, - "flos": 16427215065840.0, - "grad_norm": 2.361841895261855, - "learning_rate": 2.7565421561243654e-06, - "loss": 0.9858, - "num_input_tokens_seen": 69891080, - "step": 3287 - }, - { - "epoch": 0.3953586244213311, - "flos": 17315975709360.0, - "grad_norm": 3.4089921185600147, - "learning_rate": 2.7558210087017413e-06, - "loss": 1.0462, - "num_input_tokens_seen": 69910735, - "step": 3288 - }, - { - "epoch": 0.3954788673119702, - "flos": 16664370842040.0, - "grad_norm": 2.7750368476689475, - "learning_rate": 2.7550997466216724e-06, - "loss": 0.9703, - "num_input_tokens_seen": 69928250, - "step": 3289 - }, - { - "epoch": 0.3955991102026093, - "flos": 12391238042400.0, - "grad_norm": 4.499409530895025, - "learning_rate": 2.7543783699935714e-06, - "loss": 1.0383, - "num_input_tokens_seen": 69946000, - "step": 3290 - }, - { - "epoch": 0.39571935309324835, - "flos": 12913576693920.0, - "grad_norm": 3.4891674799827452, - "learning_rate": 2.753656878926872e-06, - "loss": 1.0852, - "num_input_tokens_seen": 69961600, - "step": 3291 - }, - { - "epoch": 0.39583959598388746, - "flos": 12574234407480.0, - "grad_norm": 2.504505674886186, - "learning_rate": 2.752935273531023e-06, - "loss": 0.9711, - "num_input_tokens_seen": 69979470, - "step": 3292 - }, - { - "epoch": 0.39595983887452657, - "flos": 13726433332200.0, - "grad_norm": 2.4159074665058573, - "learning_rate": 2.752213553915492e-06, - "loss": 1.0135, - "num_input_tokens_seen": 69997545, - "step": 3293 - }, - { - "epoch": 0.3960800817651656, - "flos": 43433248483680.0, - "grad_norm": 0.7989433597187768, - "learning_rate": 2.751491720189762e-06, - "loss": 0.9127, - "num_input_tokens_seen": 70055375, - "step": 3294 - }, - { - "epoch": 0.39620032465580474, - "flos": 11918735522040.0, - "grad_norm": 4.4352628414535165, - "learning_rate": 2.7507697724633364e-06, - "loss": 1.1364, - "num_input_tokens_seen": 70071855, - "step": 3295 - }, - { - "epoch": 0.3963205675464438, - "flos": 49462035330000.0, - "grad_norm": 0.8217755667292692, - "learning_rate": 2.7500477108457327e-06, - "loss": 0.7946, - "num_input_tokens_seen": 70123585, - "step": 3296 - }, - { - "epoch": 0.3964408104370829, - "flos": 18265365353160.0, - "grad_norm": 3.068001457333374, - "learning_rate": 2.7493255354464877e-06, - "loss": 1.0416, - "num_input_tokens_seen": 70141115, - "step": 3297 - }, - { - "epoch": 0.396561053327722, - "flos": 17265955608840.0, - "grad_norm": 2.7481796134986003, - "learning_rate": 2.748603246375156e-06, - "loss": 0.9919, - "num_input_tokens_seen": 70158850, - "step": 3298 - }, - { - "epoch": 0.39668129621836107, - "flos": 14567565476880.0, - "grad_norm": 3.212978187921538, - "learning_rate": 2.7478808437413055e-06, - "loss": 0.9085, - "num_input_tokens_seen": 70177980, - "step": 3299 - }, - { - "epoch": 0.3968015391090002, - "flos": 19260666448440.0, - "grad_norm": 2.8019103005079025, - "learning_rate": 2.7471583276545263e-06, - "loss": 0.8839, - "num_input_tokens_seen": 70198360, - "step": 3300 - }, - { - "epoch": 0.3969217819996393, - "flos": 8824942675800.0, - "grad_norm": 4.266813790248444, - "learning_rate": 2.7464356982244224e-06, - "loss": 0.9246, - "num_input_tokens_seen": 70216080, - "step": 3301 - }, - { - "epoch": 0.39704202489027834, - "flos": 47430274229760.0, - "grad_norm": 0.9261786658119744, - "learning_rate": 2.745712955560617e-06, - "loss": 0.876, - "num_input_tokens_seen": 70272005, - "step": 3302 - }, - { - "epoch": 0.39716226778091746, - "flos": 12023896203600.0, - "grad_norm": 4.162850323389181, - "learning_rate": 2.7449900997727496e-06, - "loss": 1.0065, - "num_input_tokens_seen": 70289835, - "step": 3303 - }, - { - "epoch": 0.39728251067155657, - "flos": 16691082608880.0, - "grad_norm": 3.2244450823626014, - "learning_rate": 2.744267130970476e-06, - "loss": 1.0711, - "num_input_tokens_seen": 70309280, - "step": 3304 - }, - { - "epoch": 0.3974027535621956, - "flos": 14698426093800.0, - "grad_norm": 3.5979813585206646, - "learning_rate": 2.7435440492634697e-06, - "loss": 0.9936, - "num_input_tokens_seen": 70328325, - "step": 3305 - }, - { - "epoch": 0.39752299645283473, - "flos": 15301237323000.0, - "grad_norm": 34.40445471020068, - "learning_rate": 2.7428208547614228e-06, - "loss": 0.896, - "num_input_tokens_seen": 70347540, - "step": 3306 - }, - { - "epoch": 0.39764323934347384, - "flos": 13623143005800.0, - "grad_norm": 2.973316896190741, - "learning_rate": 2.742097547574043e-06, - "loss": 0.9862, - "num_input_tokens_seen": 70365485, - "step": 3307 - }, - { - "epoch": 0.3977634822341129, - "flos": 14803157513520.0, - "grad_norm": 3.9974675506553496, - "learning_rate": 2.7413741278110544e-06, - "loss": 1.0011, - "num_input_tokens_seen": 70383895, - "step": 3308 - }, - { - "epoch": 0.397883725124752, - "flos": 28411950909240.0, - "grad_norm": 4.666794684334465, - "learning_rate": 2.7406505955822016e-06, - "loss": 0.9005, - "num_input_tokens_seen": 70404640, - "step": 3309 - }, - { - "epoch": 0.39800396801539106, - "flos": 12308220555240.0, - "grad_norm": 3.867395630447232, - "learning_rate": 2.7399269509972415e-06, - "loss": 0.8907, - "num_input_tokens_seen": 70418515, - "step": 3310 - }, - { - "epoch": 0.3981242109060302, - "flos": 13618421125560.0, - "grad_norm": 3.624395122705741, - "learning_rate": 2.7392031941659514e-06, - "loss": 1.0647, - "num_input_tokens_seen": 70436080, - "step": 3311 - }, - { - "epoch": 0.3982444537966693, - "flos": 17472106999800.0, - "grad_norm": 7.095974611170269, - "learning_rate": 2.7384793251981244e-06, - "loss": 1.0864, - "num_input_tokens_seen": 70454785, - "step": 3312 - }, - { - "epoch": 0.39836469668730834, - "flos": 18657947203920.0, - "grad_norm": 2.678395752368371, - "learning_rate": 2.737755344203571e-06, - "loss": 1.033, - "num_input_tokens_seen": 70474455, - "step": 3313 - }, - { - "epoch": 0.39848493957794745, - "flos": 19680021389160.0, - "grad_norm": 2.70624816048716, - "learning_rate": 2.7370312512921186e-06, - "loss": 1.0273, - "num_input_tokens_seen": 70495955, - "step": 3314 - }, - { - "epoch": 0.39860518246858656, - "flos": 8614651974240.0, - "grad_norm": 4.712955580325792, - "learning_rate": 2.736307046573611e-06, - "loss": 0.9853, - "num_input_tokens_seen": 70511545, - "step": 3315 - }, - { - "epoch": 0.3987254253592256, - "flos": 15901503642720.0, - "grad_norm": 2.657006728146914, - "learning_rate": 2.73558273015791e-06, - "loss": 1.0559, - "num_input_tokens_seen": 70531095, - "step": 3316 - }, - { - "epoch": 0.3988456682498647, - "flos": 16717641067920.0, - "grad_norm": 3.438073959984178, - "learning_rate": 2.734858302154894e-06, - "loss": 0.9293, - "num_input_tokens_seen": 70552315, - "step": 3317 - }, - { - "epoch": 0.39896591114050384, - "flos": 13623541606080.0, - "grad_norm": 3.223367138685011, - "learning_rate": 2.734133762674457e-06, - "loss": 0.9765, - "num_input_tokens_seen": 70571625, - "step": 3318 - }, - { - "epoch": 0.3990861540311429, - "flos": 20229562392480.0, - "grad_norm": 2.9290453397784795, - "learning_rate": 2.7334091118265124e-06, - "loss": 0.93, - "num_input_tokens_seen": 70593240, - "step": 3319 - }, - { - "epoch": 0.399206396921782, - "flos": 44209208169120.0, - "grad_norm": 0.6863263255547875, - "learning_rate": 2.732684349720989e-06, - "loss": 0.8196, - "num_input_tokens_seen": 70660920, - "step": 3320 - }, - { - "epoch": 0.3993266398124211, - "flos": 19995196818240.0, - "grad_norm": 2.8749824191675435, - "learning_rate": 2.7319594764678318e-06, - "loss": 0.9714, - "num_input_tokens_seen": 70682740, - "step": 3321 - }, - { - "epoch": 0.39944688270306017, - "flos": 16510385860800.0, - "grad_norm": 2.423859896516941, - "learning_rate": 2.7312344921770044e-06, - "loss": 1.0535, - "num_input_tokens_seen": 70704160, - "step": 3322 - }, - { - "epoch": 0.3995671255936993, - "flos": 13754003622720.0, - "grad_norm": 3.6295057772642854, - "learning_rate": 2.7305093969584857e-06, - "loss": 1.0114, - "num_input_tokens_seen": 70722705, - "step": 3323 - }, - { - "epoch": 0.3996873684843384, - "flos": 16953846335760.0, - "grad_norm": 4.076268734624227, - "learning_rate": 2.729784190922272e-06, - "loss": 1.0203, - "num_input_tokens_seen": 70743860, - "step": 3324 - }, - { - "epoch": 0.39980761137497745, - "flos": 47672335855560.0, - "grad_norm": 0.7749980249687378, - "learning_rate": 2.729058874178378e-06, - "loss": 0.8214, - "num_input_tokens_seen": 70814260, - "step": 3325 - }, - { - "epoch": 0.39992785426561656, - "flos": 20336317475160.0, - "grad_norm": 4.585187964460519, - "learning_rate": 2.7283334468368315e-06, - "loss": 0.9249, - "num_input_tokens_seen": 70835260, - "step": 3326 - }, - { - "epoch": 0.4000480971562556, - "flos": 10634296210200.0, - "grad_norm": 4.310521210843401, - "learning_rate": 2.72760790900768e-06, - "loss": 0.9572, - "num_input_tokens_seen": 70851565, - "step": 3327 - }, - { - "epoch": 0.4001683400468947, - "flos": 17006288699520.0, - "grad_norm": 3.2413000891625834, - "learning_rate": 2.7268822608009875e-06, - "loss": 1.0239, - "num_input_tokens_seen": 70870660, - "step": 3328 - }, - { - "epoch": 0.40028858293753383, - "flos": 17320298989320.0, - "grad_norm": 3.1954151687356585, - "learning_rate": 2.726156502326834e-06, - "loss": 1.0017, - "num_input_tokens_seen": 70891680, - "step": 3329 - }, - { - "epoch": 0.4004088258281729, - "flos": 47822181526200.0, - "grad_norm": 0.7063861032172605, - "learning_rate": 2.725430633695316e-06, - "loss": 0.861, - "num_input_tokens_seen": 70954480, - "step": 3330 - }, - { - "epoch": 0.400529068718812, - "flos": 41935023052440.0, - "grad_norm": 0.9835518619747203, - "learning_rate": 2.7247046550165485e-06, - "loss": 0.8538, - "num_input_tokens_seen": 71006325, - "step": 3331 - }, - { - "epoch": 0.4006493116094511, - "flos": 18057220960800.0, - "grad_norm": 2.4373235394109254, - "learning_rate": 2.7239785664006606e-06, - "loss": 0.9927, - "num_input_tokens_seen": 71029585, - "step": 3332 - }, - { - "epoch": 0.40076955450009016, - "flos": 43144631513640.0, - "grad_norm": 0.8317765038443709, - "learning_rate": 2.7232523679578002e-06, - "loss": 0.8985, - "num_input_tokens_seen": 71092385, - "step": 3333 - }, - { - "epoch": 0.4008897973907293, - "flos": 11760948507360.0, - "grad_norm": 9.417229919692497, - "learning_rate": 2.7225260597981295e-06, - "loss": 1.0272, - "num_input_tokens_seen": 71109810, - "step": 3334 - }, - { - "epoch": 0.4010100402813684, - "flos": 10869918908400.0, - "grad_norm": 3.610327392695334, - "learning_rate": 2.721799642031831e-06, - "loss": 1.0144, - "num_input_tokens_seen": 71125700, - "step": 3335 - }, - { - "epoch": 0.40113028317200744, - "flos": 9376292711160.0, - "grad_norm": 4.605790118134421, - "learning_rate": 2.721073114769101e-06, - "loss": 0.9988, - "num_input_tokens_seen": 71143095, - "step": 3336 - }, - { - "epoch": 0.40125052606264655, - "flos": 14672266235040.0, - "grad_norm": 3.471599611198508, - "learning_rate": 2.7203464781201523e-06, - "loss": 0.9862, - "num_input_tokens_seen": 71162130, - "step": 3337 - }, - { - "epoch": 0.40137076895328566, - "flos": 17503509985320.0, - "grad_norm": 9.148528817728486, - "learning_rate": 2.719619732195215e-06, - "loss": 1.0038, - "num_input_tokens_seen": 71183490, - "step": 3338 - }, - { - "epoch": 0.4014910118439247, - "flos": 17215291615560.0, - "grad_norm": 2.679035429956263, - "learning_rate": 2.7188928771045377e-06, - "loss": 0.9616, - "num_input_tokens_seen": 71204530, - "step": 3339 - }, - { - "epoch": 0.4016112547345638, - "flos": 19025411688960.0, - "grad_norm": 3.3262980286557986, - "learning_rate": 2.7181659129583815e-06, - "loss": 1.0334, - "num_input_tokens_seen": 71223840, - "step": 3340 - }, - { - "epoch": 0.4017314976252029, - "flos": 15117811696080.0, - "grad_norm": 6.136394256618153, - "learning_rate": 2.7174388398670276e-06, - "loss": 0.9918, - "num_input_tokens_seen": 71242740, - "step": 3341 - }, - { - "epoch": 0.401851740515842, - "flos": 18133983489720.0, - "grad_norm": 3.102963071288046, - "learning_rate": 2.716711657940773e-06, - "loss": 1.1452, - "num_input_tokens_seen": 71263470, - "step": 3342 - }, - { - "epoch": 0.4019719834064811, - "flos": 39634089959280.0, - "grad_norm": 0.8401371997662047, - "learning_rate": 2.7159843672899284e-06, - "loss": 0.8318, - "num_input_tokens_seen": 71327390, - "step": 3343 - }, - { - "epoch": 0.40209222629712016, - "flos": 12883308186120.0, - "grad_norm": 3.2393027099221197, - "learning_rate": 2.715256968024825e-06, - "loss": 1.0415, - "num_input_tokens_seen": 71344185, - "step": 3344 - }, - { - "epoch": 0.40221246918775927, - "flos": 18476575901520.0, - "grad_norm": 2.6190183697755773, - "learning_rate": 2.7145294602558083e-06, - "loss": 1.06, - "num_input_tokens_seen": 71364615, - "step": 3345 - }, - { - "epoch": 0.4023327120783984, - "flos": 24137867601240.0, - "grad_norm": 4.113531179343088, - "learning_rate": 2.713801844093241e-06, - "loss": 0.9264, - "num_input_tokens_seen": 71385485, - "step": 3346 - }, - { - "epoch": 0.40245295496903744, - "flos": 19150323963240.0, - "grad_norm": 3.883535521214046, - "learning_rate": 2.7130741196475014e-06, - "loss": 1.1062, - "num_input_tokens_seen": 71403335, - "step": 3347 - }, - { - "epoch": 0.40257319785967655, - "flos": 26339649678600.0, - "grad_norm": 3.1672764408550464, - "learning_rate": 2.7123462870289848e-06, - "loss": 1.0218, - "num_input_tokens_seen": 71423105, - "step": 3348 - }, - { - "epoch": 0.40269344075031566, - "flos": 17320728251160.0, - "grad_norm": 2.4716847034224094, - "learning_rate": 2.711618346348102e-06, - "loss": 1.0314, - "num_input_tokens_seen": 71443350, - "step": 3349 - }, - { - "epoch": 0.4028136836409547, - "flos": 10159739365320.0, - "grad_norm": 7.858028956398026, - "learning_rate": 2.7108902977152825e-06, - "loss": 0.8587, - "num_input_tokens_seen": 71460970, - "step": 3350 - }, - { - "epoch": 0.4029339265315938, - "flos": 18601365529560.0, - "grad_norm": 5.828181944049896, - "learning_rate": 2.7101621412409704e-06, - "loss": 0.9788, - "num_input_tokens_seen": 71480175, - "step": 3351 - }, - { - "epoch": 0.40305416942223293, - "flos": 16532130454920.0, - "grad_norm": 4.860703069121984, - "learning_rate": 2.7094338770356256e-06, - "loss": 1.0868, - "num_input_tokens_seen": 71498980, - "step": 3352 - }, - { - "epoch": 0.403174412312872, - "flos": 19287378215280.0, - "grad_norm": 4.09190175506995, - "learning_rate": 2.708705505209726e-06, - "loss": 0.872, - "num_input_tokens_seen": 71519475, - "step": 3353 - }, - { - "epoch": 0.4032946552035111, - "flos": 15453566580000.0, - "grad_norm": 3.3138309899688796, - "learning_rate": 2.7079770258737646e-06, - "loss": 1.1353, - "num_input_tokens_seen": 71537105, - "step": 3354 - }, - { - "epoch": 0.4034148980941502, - "flos": 12282796573920.0, - "grad_norm": 7.1596311885531865, - "learning_rate": 2.707248439138251e-06, - "loss": 0.9692, - "num_input_tokens_seen": 71553060, - "step": 3355 - }, - { - "epoch": 0.40353514098478926, - "flos": 15642327318360.0, - "grad_norm": 3.7105987715472826, - "learning_rate": 2.7065197451137114e-06, - "loss": 0.8763, - "num_input_tokens_seen": 71574160, - "step": 3356 - }, - { - "epoch": 0.4036553838754284, - "flos": 10057184916360.0, - "grad_norm": 3.3103613517337855, - "learning_rate": 2.7057909439106894e-06, - "loss": 0.9041, - "num_input_tokens_seen": 71591735, - "step": 3357 - }, - { - "epoch": 0.40377562676606743, - "flos": 17635075818120.0, - "grad_norm": 3.0187254468341864, - "learning_rate": 2.7050620356397417e-06, - "loss": 1.0108, - "num_input_tokens_seen": 71610405, - "step": 3358 - }, - { - "epoch": 0.40389586965670654, - "flos": 17110897473000.0, - "grad_norm": 3.270349325017375, - "learning_rate": 2.7043330204114437e-06, - "loss": 0.9447, - "num_input_tokens_seen": 71628835, - "step": 3359 - }, - { - "epoch": 0.40401611254734565, - "flos": 11604694570680.0, - "grad_norm": 3.9799022028040185, - "learning_rate": 2.7036038983363862e-06, - "loss": 1.0795, - "num_input_tokens_seen": 71645160, - "step": 3360 - }, - { - "epoch": 0.4041363554379847, - "flos": 17058424447680.0, - "grad_norm": 2.8207822286990507, - "learning_rate": 2.702874669525177e-06, - "loss": 1.0674, - "num_input_tokens_seen": 71663360, - "step": 3361 - }, - { - "epoch": 0.4042565983286238, - "flos": 20230359593040.0, - "grad_norm": 3.2391674269497113, - "learning_rate": 2.7021453340884394e-06, - "loss": 0.9226, - "num_input_tokens_seen": 71680805, - "step": 3362 - }, - { - "epoch": 0.40437684121926293, - "flos": 12546848086320.0, - "grad_norm": 3.226673188977966, - "learning_rate": 2.7014158921368125e-06, - "loss": 0.9454, - "num_input_tokens_seen": 71698850, - "step": 3363 - }, - { - "epoch": 0.404497084109902, - "flos": 17079739779960.0, - "grad_norm": 3.253239411056219, - "learning_rate": 2.700686343780953e-06, - "loss": 1.0914, - "num_input_tokens_seen": 71718440, - "step": 3364 - }, - { - "epoch": 0.4046173270005411, - "flos": 16297764880680.0, - "grad_norm": 3.867754407232068, - "learning_rate": 2.699956689131532e-06, - "loss": 1.0982, - "num_input_tokens_seen": 71738145, - "step": 3365 - }, - { - "epoch": 0.4047375698911802, - "flos": 14672358219720.0, - "grad_norm": 3.715267056997326, - "learning_rate": 2.699226928299238e-06, - "loss": 1.0688, - "num_input_tokens_seen": 71755885, - "step": 3366 - }, - { - "epoch": 0.40485781278181926, - "flos": 20597548124040.0, - "grad_norm": 9.363914344210071, - "learning_rate": 2.698497061394774e-06, - "loss": 1.0047, - "num_input_tokens_seen": 71774090, - "step": 3367 - }, - { - "epoch": 0.40497805567245837, - "flos": 16454754694800.0, - "grad_norm": 2.4449453840025988, - "learning_rate": 2.6977670885288627e-06, - "loss": 1.0349, - "num_input_tokens_seen": 71795210, - "step": 3368 - }, - { - "epoch": 0.4050982985630975, - "flos": 11525387132280.0, - "grad_norm": 3.2249330558038727, - "learning_rate": 2.6970370098122378e-06, - "loss": 0.9774, - "num_input_tokens_seen": 71811915, - "step": 3369 - }, - { - "epoch": 0.40521854145373654, - "flos": 24583903647240.0, - "grad_norm": 2.741210676573064, - "learning_rate": 2.6963068253556535e-06, - "loss": 1.0874, - "num_input_tokens_seen": 71833020, - "step": 3370 - }, - { - "epoch": 0.40533878434437565, - "flos": 18022567849920.0, - "grad_norm": 3.5465878273750895, - "learning_rate": 2.6955765352698763e-06, - "loss": 1.0785, - "num_input_tokens_seen": 71852885, - "step": 3371 - }, - { - "epoch": 0.40545902723501476, - "flos": 10607339150880.0, - "grad_norm": 4.736259664453677, - "learning_rate": 2.6948461396656923e-06, - "loss": 0.9557, - "num_input_tokens_seen": 71870015, - "step": 3372 - }, - { - "epoch": 0.4055792701256538, - "flos": 18160143348480.0, - "grad_norm": 3.684419670837193, - "learning_rate": 2.6941156386539013e-06, - "loss": 0.9725, - "num_input_tokens_seen": 71889685, - "step": 3373 - }, - { - "epoch": 0.4056995130162929, - "flos": 13886581287000.0, - "grad_norm": 3.641949457730177, - "learning_rate": 2.6933850323453203e-06, - "loss": 1.0163, - "num_input_tokens_seen": 71907850, - "step": 3374 - }, - { - "epoch": 0.405819755906932, - "flos": 10896109428720.0, - "grad_norm": 6.8372928343915955, - "learning_rate": 2.6926543208507806e-06, - "loss": 0.9688, - "num_input_tokens_seen": 71926250, - "step": 3375 - }, - { - "epoch": 0.4059399987975711, - "flos": 15222665762040.0, - "grad_norm": 3.2702132850174155, - "learning_rate": 2.6919235042811316e-06, - "loss": 1.0106, - "num_input_tokens_seen": 71944755, - "step": 3376 - }, - { - "epoch": 0.4060602416882102, - "flos": 18185812622280.0, - "grad_norm": 2.7810787191048565, - "learning_rate": 2.691192582747237e-06, - "loss": 0.9795, - "num_input_tokens_seen": 71964105, - "step": 3377 - }, - { - "epoch": 0.40618048457884925, - "flos": 16898920385640.0, - "grad_norm": 3.257510237542297, - "learning_rate": 2.6904615563599765e-06, - "loss": 0.9692, - "num_input_tokens_seen": 71983625, - "step": 3378 - }, - { - "epoch": 0.40630072746948837, - "flos": 12495846815880.0, - "grad_norm": 2.9592006289582256, - "learning_rate": 2.6897304252302477e-06, - "loss": 1.0546, - "num_input_tokens_seen": 72000665, - "step": 3379 - }, - { - "epoch": 0.4064209703601275, - "flos": 43544817431280.0, - "grad_norm": 0.9746254898604321, - "learning_rate": 2.688999189468962e-06, - "loss": 0.8004, - "num_input_tokens_seen": 72056815, - "step": 3380 - }, - { - "epoch": 0.40654121325076653, - "flos": 17084339013960.0, - "grad_norm": 5.586058373910738, - "learning_rate": 2.6882678491870464e-06, - "loss": 0.9778, - "num_input_tokens_seen": 72076970, - "step": 3381 - }, - { - "epoch": 0.40666145614140564, - "flos": 19470251934120.0, - "grad_norm": 2.694566706432149, - "learning_rate": 2.6875364044954453e-06, - "loss": 0.9368, - "num_input_tokens_seen": 72096920, - "step": 3382 - }, - { - "epoch": 0.40678169903204475, - "flos": 18631174113960.0, - "grad_norm": 3.4536191836975045, - "learning_rate": 2.6868048555051185e-06, - "loss": 1.0465, - "num_input_tokens_seen": 72118170, - "step": 3383 - }, - { - "epoch": 0.4069019419226838, - "flos": 20389005131400.0, - "grad_norm": 15.338848504288059, - "learning_rate": 2.686073202327041e-06, - "loss": 1.0775, - "num_input_tokens_seen": 72136890, - "step": 3384 - }, - { - "epoch": 0.4070221848133229, - "flos": 17950189924080.0, - "grad_norm": 3.2872099019554804, - "learning_rate": 2.6853414450722043e-06, - "loss": 0.9596, - "num_input_tokens_seen": 72156275, - "step": 3385 - }, - { - "epoch": 0.40714242770396203, - "flos": 13045939727280.0, - "grad_norm": 2.4939264909056593, - "learning_rate": 2.684609583851616e-06, - "loss": 1.0737, - "num_input_tokens_seen": 72174170, - "step": 3386 - }, - { - "epoch": 0.4072626705946011, - "flos": 21543718304040.0, - "grad_norm": 2.491589896083646, - "learning_rate": 2.683877618776297e-06, - "loss": 1.0315, - "num_input_tokens_seen": 72196145, - "step": 3387 - }, - { - "epoch": 0.4073829134852402, - "flos": 15510485531520.0, - "grad_norm": 4.344670040477196, - "learning_rate": 2.6831455499572876e-06, - "loss": 0.9686, - "num_input_tokens_seen": 72213800, - "step": 3388 - }, - { - "epoch": 0.40750315637587925, - "flos": 17971903856640.0, - "grad_norm": 3.4476699202988526, - "learning_rate": 2.682413377505641e-06, - "loss": 1.0007, - "num_input_tokens_seen": 72232325, - "step": 3389 - }, - { - "epoch": 0.40762339926651836, - "flos": 13984843117560.0, - "grad_norm": 4.735432421748542, - "learning_rate": 2.6816811015324284e-06, - "loss": 0.9908, - "num_input_tokens_seen": 72250095, - "step": 3390 - }, - { - "epoch": 0.40774364215715747, - "flos": 51171808586760.0, - "grad_norm": 0.7835987642380695, - "learning_rate": 2.6809487221487343e-06, - "loss": 0.8489, - "num_input_tokens_seen": 72309300, - "step": 3391 - }, - { - "epoch": 0.4078638850477965, - "flos": 10659014975640.0, - "grad_norm": 5.85530555299179, - "learning_rate": 2.6802162394656605e-06, - "loss": 1.0387, - "num_input_tokens_seen": 72325730, - "step": 3392 - }, - { - "epoch": 0.40798412793843564, - "flos": 16954060966680.0, - "grad_norm": 2.736586368635737, - "learning_rate": 2.679483653594324e-06, - "loss": 0.9376, - "num_input_tokens_seen": 72347220, - "step": 3393 - }, - { - "epoch": 0.40810437082907475, - "flos": 14957633079720.0, - "grad_norm": 3.378832160269721, - "learning_rate": 2.678750964645857e-06, - "loss": 0.9721, - "num_input_tokens_seen": 72366020, - "step": 3394 - }, - { - "epoch": 0.4082246137197138, - "flos": 7954155254520.0, - "grad_norm": 4.255436281458887, - "learning_rate": 2.6780181727314094e-06, - "loss": 1.0765, - "num_input_tokens_seen": 72380645, - "step": 3395 - }, - { - "epoch": 0.4083448566103529, - "flos": 13518442247640.0, - "grad_norm": 3.174923338677949, - "learning_rate": 2.6772852779621435e-06, - "loss": 1.0079, - "num_input_tokens_seen": 72398225, - "step": 3396 - }, - { - "epoch": 0.408465099500992, - "flos": 16743647618880.0, - "grad_norm": 3.0287081098946143, - "learning_rate": 2.676552280449239e-06, - "loss": 1.0724, - "num_input_tokens_seen": 72417830, - "step": 3397 - }, - { - "epoch": 0.4085853423916311, - "flos": 8981656535880.0, - "grad_norm": 5.978830503080875, - "learning_rate": 2.6758191803038917e-06, - "loss": 0.9723, - "num_input_tokens_seen": 72436045, - "step": 3398 - }, - { - "epoch": 0.4087055852822702, - "flos": 17320636266480.0, - "grad_norm": 2.4879748097337377, - "learning_rate": 2.6750859776373125e-06, - "loss": 1.0575, - "num_input_tokens_seen": 72455220, - "step": 3399 - }, - { - "epoch": 0.4088258281729093, - "flos": 47535680203800.0, - "grad_norm": 0.7928853484393604, - "learning_rate": 2.674352672560727e-06, - "loss": 0.8435, - "num_input_tokens_seen": 72516385, - "step": 3400 - }, - { - "epoch": 0.40894607106354836, - "flos": 14515061790000.0, - "grad_norm": 2.719124691454403, - "learning_rate": 2.673619265185377e-06, - "loss": 0.9957, - "num_input_tokens_seen": 72535945, - "step": 3401 - }, - { - "epoch": 0.40906631395418747, - "flos": 19261555633680.0, - "grad_norm": 2.8106585090103815, - "learning_rate": 2.672885755622521e-06, - "loss": 0.9995, - "num_input_tokens_seen": 72558080, - "step": 3402 - }, - { - "epoch": 0.4091865568448266, - "flos": 18133523566320.0, - "grad_norm": 3.327115474411118, - "learning_rate": 2.67215214398343e-06, - "loss": 0.9239, - "num_input_tokens_seen": 72577815, - "step": 3403 - }, - { - "epoch": 0.40930679973546563, - "flos": 20414337128040.0, - "grad_norm": 3.9381969985470806, - "learning_rate": 2.671418430379393e-06, - "loss": 0.9946, - "num_input_tokens_seen": 72596220, - "step": 3404 - }, - { - "epoch": 0.40942704262610474, - "flos": 14829041418240.0, - "grad_norm": 2.7728545100016473, - "learning_rate": 2.670684614921715e-06, - "loss": 1.0394, - "num_input_tokens_seen": 72614915, - "step": 3405 - }, - { - "epoch": 0.4095472855167438, - "flos": 15355243426320.0, - "grad_norm": 4.68884746258009, - "learning_rate": 2.6699506977217128e-06, - "loss": 0.9078, - "num_input_tokens_seen": 72634810, - "step": 3406 - }, - { - "epoch": 0.4096675284073829, - "flos": 19888165781520.0, - "grad_norm": 2.663199455547191, - "learning_rate": 2.6692166788907233e-06, - "loss": 0.9164, - "num_input_tokens_seen": 72654725, - "step": 3407 - }, - { - "epoch": 0.409787771298022, - "flos": 13621701912480.0, - "grad_norm": 12.752250323204295, - "learning_rate": 2.6684825585400957e-06, - "loss": 0.9946, - "num_input_tokens_seen": 72673390, - "step": 3408 - }, - { - "epoch": 0.4099080141886611, - "flos": 42419054319360.0, - "grad_norm": 0.9413110835302364, - "learning_rate": 2.6677483367811947e-06, - "loss": 0.9517, - "num_input_tokens_seen": 72733150, - "step": 3409 - }, - { - "epoch": 0.4100282570793002, - "flos": 15562008048480.0, - "grad_norm": 9.73879400036488, - "learning_rate": 2.6670140137254028e-06, - "loss": 0.9858, - "num_input_tokens_seen": 72752345, - "step": 3410 - }, - { - "epoch": 0.4101484999699393, - "flos": 13151192393520.0, - "grad_norm": 5.074474912642498, - "learning_rate": 2.666279589484115e-06, - "loss": 1.0968, - "num_input_tokens_seen": 72769965, - "step": 3411 - }, - { - "epoch": 0.41026874286057835, - "flos": 13540800072960.0, - "grad_norm": 3.5353641006317362, - "learning_rate": 2.6655450641687435e-06, - "loss": 1.0259, - "num_input_tokens_seen": 72787250, - "step": 3412 - }, - { - "epoch": 0.41038898575121746, - "flos": 22247888181360.0, - "grad_norm": 3.39127733837544, - "learning_rate": 2.664810437890715e-06, - "loss": 0.9159, - "num_input_tokens_seen": 72808640, - "step": 3413 - }, - { - "epoch": 0.41050922864185657, - "flos": 10135817800440.0, - "grad_norm": 4.192231195757414, - "learning_rate": 2.6640757107614714e-06, - "loss": 1.0263, - "num_input_tokens_seen": 72826455, - "step": 3414 - }, - { - "epoch": 0.4106294715324956, - "flos": 22066148940240.0, - "grad_norm": 5.595098237103341, - "learning_rate": 2.6633408828924697e-06, - "loss": 0.9378, - "num_input_tokens_seen": 72845040, - "step": 3415 - }, - { - "epoch": 0.41074971442313474, - "flos": 17394853885920.0, - "grad_norm": 3.019759748136001, - "learning_rate": 2.662605954395185e-06, - "loss": 0.9279, - "num_input_tokens_seen": 72864720, - "step": 3416 - }, - { - "epoch": 0.41086995731377385, - "flos": 15300992030520.0, - "grad_norm": 2.864413132668626, - "learning_rate": 2.6618709253811027e-06, - "loss": 1.0705, - "num_input_tokens_seen": 72883895, - "step": 3417 - }, - { - "epoch": 0.4109902002044129, - "flos": 14696985000480.0, - "grad_norm": 2.559801308848734, - "learning_rate": 2.6611357959617277e-06, - "loss": 1.1079, - "num_input_tokens_seen": 72903235, - "step": 3418 - }, - { - "epoch": 0.411110443095052, - "flos": 12883216201440.0, - "grad_norm": 3.815244593547542, - "learning_rate": 2.660400566248578e-06, - "loss": 1.1243, - "num_input_tokens_seen": 72921080, - "step": 3419 - }, - { - "epoch": 0.41123068598569107, - "flos": 10293543492000.0, - "grad_norm": 3.4676419799604576, - "learning_rate": 2.6596652363531876e-06, - "loss": 0.8962, - "num_input_tokens_seen": 72936675, - "step": 3420 - }, - { - "epoch": 0.4113509288763302, - "flos": 15038657565480.0, - "grad_norm": 3.0997022916523775, - "learning_rate": 2.6589298063871055e-06, - "loss": 1.0112, - "num_input_tokens_seen": 72956570, - "step": 3421 - }, - { - "epoch": 0.4114711717669693, - "flos": 13072590171000.0, - "grad_norm": 3.8583488799098458, - "learning_rate": 2.658194276461895e-06, - "loss": 0.9281, - "num_input_tokens_seen": 72974215, - "step": 3422 - }, - { - "epoch": 0.41159141465760835, - "flos": 19390392587640.0, - "grad_norm": 2.886213741082997, - "learning_rate": 2.6574586466891368e-06, - "loss": 0.9061, - "num_input_tokens_seen": 72994410, - "step": 3423 - }, - { - "epoch": 0.41171165754824746, - "flos": 14196513589320.0, - "grad_norm": 3.0492664830906304, - "learning_rate": 2.6567229171804247e-06, - "loss": 0.8642, - "num_input_tokens_seen": 73012015, - "step": 3424 - }, - { - "epoch": 0.41183190043888657, - "flos": 12757874665320.0, - "grad_norm": 3.206483447999525, - "learning_rate": 2.655987088047368e-06, - "loss": 1.0931, - "num_input_tokens_seen": 73030080, - "step": 3425 - }, - { - "epoch": 0.4119521433295256, - "flos": 19340464471800.0, - "grad_norm": 3.787202799501984, - "learning_rate": 2.6552511594015912e-06, - "loss": 1.0048, - "num_input_tokens_seen": 73050190, - "step": 3426 - }, - { - "epoch": 0.41207238622016473, - "flos": 10686431958360.0, - "grad_norm": 3.6205335263945657, - "learning_rate": 2.654515131354735e-06, - "loss": 1.0916, - "num_input_tokens_seen": 73068175, - "step": 3427 - }, - { - "epoch": 0.41219262911080384, - "flos": 19260911740920.0, - "grad_norm": 4.186814892649602, - "learning_rate": 2.653779004018453e-06, - "loss": 1.0847, - "num_input_tokens_seen": 73088460, - "step": 3428 - }, - { - "epoch": 0.4123128720014429, - "flos": 17560827537120.0, - "grad_norm": 2.5361495459955274, - "learning_rate": 2.653042777504417e-06, - "loss": 1.0507, - "num_input_tokens_seen": 73110770, - "step": 3429 - }, - { - "epoch": 0.412433114892082, - "flos": 18679415844000.0, - "grad_norm": 4.368654062857673, - "learning_rate": 2.6523064519243105e-06, - "loss": 1.0256, - "num_input_tokens_seen": 73130060, - "step": 3430 - }, - { - "epoch": 0.4125533577827211, - "flos": 15170284721400.0, - "grad_norm": 3.4213465452387584, - "learning_rate": 2.6515700273898333e-06, - "loss": 1.0143, - "num_input_tokens_seen": 73147655, - "step": 3431 - }, - { - "epoch": 0.4126736006733602, - "flos": 18552755860800.0, - "grad_norm": 3.398864999826904, - "learning_rate": 2.6508335040127018e-06, - "loss": 0.9216, - "num_input_tokens_seen": 73167070, - "step": 3432 - }, - { - "epoch": 0.4127938435639993, - "flos": 18264261537000.0, - "grad_norm": 2.6113089515820973, - "learning_rate": 2.6500968819046446e-06, - "loss": 0.9971, - "num_input_tokens_seen": 73187090, - "step": 3433 - }, - { - "epoch": 0.4129140864546384, - "flos": 12750699860280.0, - "grad_norm": 3.382092123878268, - "learning_rate": 2.649360161177408e-06, - "loss": 0.811, - "num_input_tokens_seen": 73201870, - "step": 3434 - }, - { - "epoch": 0.41303432934527745, - "flos": 16874937497640.0, - "grad_norm": 4.633413549227401, - "learning_rate": 2.6486233419427504e-06, - "loss": 0.9534, - "num_input_tokens_seen": 73221405, - "step": 3435 - }, - { - "epoch": 0.41315457223591656, - "flos": 14016215441520.0, - "grad_norm": 13.826443775799747, - "learning_rate": 2.6478864243124484e-06, - "loss": 0.9826, - "num_input_tokens_seen": 73240790, - "step": 3436 - }, - { - "epoch": 0.4132748151265556, - "flos": 14855722523520.0, - "grad_norm": 2.7386465658820205, - "learning_rate": 2.6471494083982903e-06, - "loss": 1.0779, - "num_input_tokens_seen": 73259895, - "step": 3437 - }, - { - "epoch": 0.4133950580171947, - "flos": 22983246413280.0, - "grad_norm": 2.869576189336797, - "learning_rate": 2.6464122943120818e-06, - "loss": 0.9788, - "num_input_tokens_seen": 73279840, - "step": 3438 - }, - { - "epoch": 0.41351530090783384, - "flos": 16558075682760.0, - "grad_norm": 5.135060554006208, - "learning_rate": 2.645675082165642e-06, - "loss": 1.0436, - "num_input_tokens_seen": 73295770, - "step": 3439 - }, - { - "epoch": 0.4136355437984729, - "flos": 18212156450400.0, - "grad_norm": 3.5526530385995896, - "learning_rate": 2.644937772070806e-06, - "loss": 0.9889, - "num_input_tokens_seen": 73313935, - "step": 3440 - }, - { - "epoch": 0.413755786689112, - "flos": 14068964420880.0, - "grad_norm": 5.758398805164193, - "learning_rate": 2.6442003641394225e-06, - "loss": 1.0623, - "num_input_tokens_seen": 73331250, - "step": 3441 - }, - { - "epoch": 0.4138760295797511, - "flos": 19129805831520.0, - "grad_norm": 2.4261504796807083, - "learning_rate": 2.643462858483356e-06, - "loss": 1.066, - "num_input_tokens_seen": 73351255, - "step": 3442 - }, - { - "epoch": 0.41399627247039017, - "flos": 11604387955080.0, - "grad_norm": 3.7388198767444982, - "learning_rate": 2.6427252552144856e-06, - "loss": 0.9696, - "num_input_tokens_seen": 73369625, - "step": 3443 - }, - { - "epoch": 0.4141165153610293, - "flos": 16298071496280.0, - "grad_norm": 2.6024130957144083, - "learning_rate": 2.6419875544447044e-06, - "loss": 0.9795, - "num_input_tokens_seen": 73390745, - "step": 3444 - }, - { - "epoch": 0.4142367582516684, - "flos": 17923999403760.0, - "grad_norm": 4.60305383734718, - "learning_rate": 2.6412497562859218e-06, - "loss": 0.9412, - "num_input_tokens_seen": 73411745, - "step": 3445 - }, - { - "epoch": 0.41435700114230745, - "flos": 15407195205120.0, - "grad_norm": 4.225410233845598, - "learning_rate": 2.6405118608500617e-06, - "loss": 0.9891, - "num_input_tokens_seen": 73430290, - "step": 3446 - }, - { - "epoch": 0.41447724403294656, - "flos": 18500804082000.0, - "grad_norm": 2.675838185839281, - "learning_rate": 2.6397738682490613e-06, - "loss": 1.0356, - "num_input_tokens_seen": 73450910, - "step": 3447 - }, - { - "epoch": 0.41459748692358567, - "flos": 12941392276920.0, - "grad_norm": 3.01233299710527, - "learning_rate": 2.6390357785948734e-06, - "loss": 0.9837, - "num_input_tokens_seen": 73467745, - "step": 3448 - }, - { - "epoch": 0.4147177298142247, - "flos": 17187660001920.0, - "grad_norm": 2.6545944440045623, - "learning_rate": 2.6382975919994667e-06, - "loss": 1.0259, - "num_input_tokens_seen": 73488040, - "step": 3449 - }, - { - "epoch": 0.41483797270486383, - "flos": 14253064602120.0, - "grad_norm": 2.288329544156858, - "learning_rate": 2.637559308574822e-06, - "loss": 0.9502, - "num_input_tokens_seen": 73507505, - "step": 3450 - }, - { - "epoch": 0.4149582155955029, - "flos": 21437177852280.0, - "grad_norm": 3.088148189566468, - "learning_rate": 2.6368209284329376e-06, - "loss": 0.9415, - "num_input_tokens_seen": 73527855, - "step": 3451 - }, - { - "epoch": 0.415078458486142, - "flos": 11866845066360.0, - "grad_norm": 3.9539543156471355, - "learning_rate": 2.636082451685825e-06, - "loss": 0.9736, - "num_input_tokens_seen": 73545775, - "step": 3452 - }, - { - "epoch": 0.4151987013767811, - "flos": 18528772972800.0, - "grad_norm": 2.6808148117619313, - "learning_rate": 2.6353438784455094e-06, - "loss": 1.0821, - "num_input_tokens_seen": 73568780, - "step": 3453 - }, - { - "epoch": 0.41531894426742016, - "flos": 17503785939360.0, - "grad_norm": 3.282567658842006, - "learning_rate": 2.6346052088240326e-06, - "loss": 0.9326, - "num_input_tokens_seen": 73588020, - "step": 3454 - }, - { - "epoch": 0.4154391871580593, - "flos": 10581301938360.0, - "grad_norm": 6.275430441147419, - "learning_rate": 2.63386644293345e-06, - "loss": 1.0048, - "num_input_tokens_seen": 73604085, - "step": 3455 - }, - { - "epoch": 0.4155594300486984, - "flos": 10345096670520.0, - "grad_norm": 3.3571703396631754, - "learning_rate": 2.633127580885833e-06, - "loss": 1.0626, - "num_input_tokens_seen": 73618305, - "step": 3456 - }, - { - "epoch": 0.41567967293933744, - "flos": 21017638942200.0, - "grad_norm": 5.295975600321824, - "learning_rate": 2.632388622793265e-06, - "loss": 0.8692, - "num_input_tokens_seen": 73637180, - "step": 3457 - }, - { - "epoch": 0.41579991582997655, - "flos": 13643906430000.0, - "grad_norm": 3.74947366199636, - "learning_rate": 2.6316495687678457e-06, - "loss": 0.9223, - "num_input_tokens_seen": 73655550, - "step": 3458 - }, - { - "epoch": 0.41592015872061566, - "flos": 17398901211840.0, - "grad_norm": 3.839169634207977, - "learning_rate": 2.6309104189216887e-06, - "loss": 0.9814, - "num_input_tokens_seen": 73672835, - "step": 3459 - }, - { - "epoch": 0.4160404016112547, - "flos": 14749366041120.0, - "grad_norm": 4.746313772870689, - "learning_rate": 2.630171173366923e-06, - "loss": 0.9736, - "num_input_tokens_seen": 73688355, - "step": 3460 - }, - { - "epoch": 0.41616064450189383, - "flos": 9819814509240.0, - "grad_norm": 3.8326955485861194, - "learning_rate": 2.629431832215691e-06, - "loss": 0.9707, - "num_input_tokens_seen": 73702880, - "step": 3461 - }, - { - "epoch": 0.41628088739253294, - "flos": 14199886360920.0, - "grad_norm": 5.488682633787762, - "learning_rate": 2.628692395580151e-06, - "loss": 1.0873, - "num_input_tokens_seen": 73722690, - "step": 3462 - }, - { - "epoch": 0.416401130283172, - "flos": 20781556320600.0, - "grad_norm": 4.606971243263287, - "learning_rate": 2.6279528635724747e-06, - "loss": 1.0202, - "num_input_tokens_seen": 73742565, - "step": 3463 - }, - { - "epoch": 0.4165213731738111, - "flos": 11493953485200.0, - "grad_norm": 4.658513485467989, - "learning_rate": 2.627213236304848e-06, - "loss": 1.0144, - "num_input_tokens_seen": 73759085, - "step": 3464 - }, - { - "epoch": 0.4166416160644502, - "flos": 24084995975640.0, - "grad_norm": 3.4889597473184186, - "learning_rate": 2.626473513889472e-06, - "loss": 0.937, - "num_input_tokens_seen": 73781185, - "step": 3465 - }, - { - "epoch": 0.41676185895508927, - "flos": 14850755350800.0, - "grad_norm": 2.8464917390784907, - "learning_rate": 2.625733696438562e-06, - "loss": 1.0535, - "num_input_tokens_seen": 73798410, - "step": 3466 - }, - { - "epoch": 0.4168821018457284, - "flos": 13046706266280.0, - "grad_norm": 4.23902665716106, - "learning_rate": 2.6249937840643476e-06, - "loss": 0.9833, - "num_input_tokens_seen": 73816435, - "step": 3467 - }, - { - "epoch": 0.41700234473636744, - "flos": 13256812998480.0, - "grad_norm": 3.460807849530874, - "learning_rate": 2.6242537768790733e-06, - "loss": 0.9037, - "num_input_tokens_seen": 73835310, - "step": 3468 - }, - { - "epoch": 0.41712258762700655, - "flos": 22122025398720.0, - "grad_norm": 4.090965197399984, - "learning_rate": 2.6235136749949975e-06, - "loss": 0.9194, - "num_input_tokens_seen": 73858480, - "step": 3469 - }, - { - "epoch": 0.41724283051764566, - "flos": 25629224843040.0, - "grad_norm": 4.846835427334827, - "learning_rate": 2.6227734785243924e-06, - "loss": 0.8341, - "num_input_tokens_seen": 73878160, - "step": 3470 - }, - { - "epoch": 0.4173630734082847, - "flos": 18024744820680.0, - "grad_norm": 2.7478143502815615, - "learning_rate": 2.6220331875795466e-06, - "loss": 1.0243, - "num_input_tokens_seen": 73897230, - "step": 3471 - }, - { - "epoch": 0.4174833162989238, - "flos": 18996461628240.0, - "grad_norm": 3.2458310898922833, - "learning_rate": 2.62129280227276e-06, - "loss": 0.9862, - "num_input_tokens_seen": 73916950, - "step": 3472 - }, - { - "epoch": 0.41760355918956293, - "flos": 53715973993080.0, - "grad_norm": 8.885188741917203, - "learning_rate": 2.62055232271635e-06, - "loss": 0.9038, - "num_input_tokens_seen": 73943855, - "step": 3473 - }, - { - "epoch": 0.417723802080202, - "flos": 10108002217440.0, - "grad_norm": 3.7038545171620547, - "learning_rate": 2.619811749022646e-06, - "loss": 1.1139, - "num_input_tokens_seen": 73958885, - "step": 3474 - }, - { - "epoch": 0.4178440449708411, - "flos": 10342214483880.0, - "grad_norm": 3.7740585416327854, - "learning_rate": 2.6190710813039917e-06, - "loss": 0.9391, - "num_input_tokens_seen": 73971730, - "step": 3475 - }, - { - "epoch": 0.4179642878614802, - "flos": 15275445402960.0, - "grad_norm": 4.205063837136084, - "learning_rate": 2.618330319672747e-06, - "loss": 1.0456, - "num_input_tokens_seen": 73990870, - "step": 3476 - }, - { - "epoch": 0.41808453075211927, - "flos": 13072406201640.0, - "grad_norm": 5.408237334556845, - "learning_rate": 2.617589464241284e-06, - "loss": 1.149, - "num_input_tokens_seen": 74004990, - "step": 3477 - }, - { - "epoch": 0.4182047736427584, - "flos": 14408214722640.0, - "grad_norm": 4.009181214642314, - "learning_rate": 2.6168485151219914e-06, - "loss": 0.9617, - "num_input_tokens_seen": 74024330, - "step": 3478 - }, - { - "epoch": 0.4183250165333975, - "flos": 13384331505360.0, - "grad_norm": 5.583325096617739, - "learning_rate": 2.616107472427269e-06, - "loss": 0.9364, - "num_input_tokens_seen": 74038745, - "step": 3479 - }, - { - "epoch": 0.41844525942403654, - "flos": 12566752986840.0, - "grad_norm": 4.5970687814993525, - "learning_rate": 2.615366336269533e-06, - "loss": 0.9949, - "num_input_tokens_seen": 74052130, - "step": 3480 - }, - { - "epoch": 0.41856550231467565, - "flos": 13013432925600.0, - "grad_norm": 4.204509974899373, - "learning_rate": 2.6146251067612126e-06, - "loss": 1.0162, - "num_input_tokens_seen": 74067325, - "step": 3481 - }, - { - "epoch": 0.41868574520531476, - "flos": 16191071121120.0, - "grad_norm": 3.4440010325466828, - "learning_rate": 2.6138837840147525e-06, - "loss": 1.0477, - "num_input_tokens_seen": 74086080, - "step": 3482 - }, - { - "epoch": 0.4188059880959538, - "flos": 9663867188160.0, - "grad_norm": 2.916304066234912, - "learning_rate": 2.6131423681426103e-06, - "loss": 0.9997, - "num_input_tokens_seen": 74101715, - "step": 3483 - }, - { - "epoch": 0.41892623098659293, - "flos": 26998092073800.0, - "grad_norm": 19.609253610182044, - "learning_rate": 2.6124008592572587e-06, - "loss": 0.9569, - "num_input_tokens_seen": 74125420, - "step": 3484 - }, - { - "epoch": 0.419046473877232, - "flos": 16534307425680.0, - "grad_norm": 3.895302285840123, - "learning_rate": 2.6116592574711835e-06, - "loss": 1.0276, - "num_input_tokens_seen": 74143440, - "step": 3485 - }, - { - "epoch": 0.4191667167678711, - "flos": 14724739260360.0, - "grad_norm": 5.067925830445863, - "learning_rate": 2.6109175628968853e-06, - "loss": 1.059, - "num_input_tokens_seen": 74162925, - "step": 3486 - }, - { - "epoch": 0.4192869596585102, - "flos": 16769592846720.0, - "grad_norm": 4.271052558048575, - "learning_rate": 2.610175775646878e-06, - "loss": 1.0575, - "num_input_tokens_seen": 74181225, - "step": 3487 - }, - { - "epoch": 0.41940720254914926, - "flos": 17843772118560.0, - "grad_norm": 8.363476468996666, - "learning_rate": 2.6094338958336907e-06, - "loss": 0.9666, - "num_input_tokens_seen": 74199615, - "step": 3488 - }, - { - "epoch": 0.41952744543978837, - "flos": 10996210952880.0, - "grad_norm": 12.680248273951992, - "learning_rate": 2.608691923569867e-06, - "loss": 1.0435, - "num_input_tokens_seen": 74216210, - "step": 3489 - }, - { - "epoch": 0.4196476883304275, - "flos": 17530252413720.0, - "grad_norm": 2.7468596647873893, - "learning_rate": 2.6079498589679616e-06, - "loss": 0.9852, - "num_input_tokens_seen": 74237020, - "step": 3490 - }, - { - "epoch": 0.41976793122106654, - "flos": 17448185434920.0, - "grad_norm": 3.6781090592627965, - "learning_rate": 2.6072077021405465e-06, - "loss": 0.98, - "num_input_tokens_seen": 74255575, - "step": 3491 - }, - { - "epoch": 0.41988817411170565, - "flos": 15036357948480.0, - "grad_norm": 6.451631608751313, - "learning_rate": 2.6064654532002054e-06, - "loss": 0.9232, - "num_input_tokens_seen": 74274305, - "step": 3492 - }, - { - "epoch": 0.42000841700234476, - "flos": 22564259411280.0, - "grad_norm": 2.6097025408726484, - "learning_rate": 2.6057231122595375e-06, - "loss": 0.9873, - "num_input_tokens_seen": 74295335, - "step": 3493 - }, - { - "epoch": 0.4201286598929838, - "flos": 15112261953720.0, - "grad_norm": 3.8662357276532204, - "learning_rate": 2.604980679431154e-06, - "loss": 0.9652, - "num_input_tokens_seen": 74313295, - "step": 3494 - }, - { - "epoch": 0.4202489027836229, - "flos": 13146869113560.0, - "grad_norm": 3.307743039706022, - "learning_rate": 2.604238154827684e-06, - "loss": 0.9849, - "num_input_tokens_seen": 74329640, - "step": 3495 - }, - { - "epoch": 0.42036914567426203, - "flos": 13701714566760.0, - "grad_norm": 4.093898158995955, - "learning_rate": 2.6034955385617656e-06, - "loss": 0.9482, - "num_input_tokens_seen": 74347690, - "step": 3496 - }, - { - "epoch": 0.4204893885649011, - "flos": 48580756107120.0, - "grad_norm": 0.7795716377358284, - "learning_rate": 2.6027528307460544e-06, - "loss": 0.8925, - "num_input_tokens_seen": 74411415, - "step": 3497 - }, - { - "epoch": 0.4206096314555402, - "flos": 15564001049880.0, - "grad_norm": 6.495521763377876, - "learning_rate": 2.602010031493217e-06, - "loss": 1.0829, - "num_input_tokens_seen": 74429365, - "step": 3498 - }, - { - "epoch": 0.42072987434617926, - "flos": 20860158543120.0, - "grad_norm": 4.120993546989737, - "learning_rate": 2.6012671409159367e-06, - "loss": 1.0854, - "num_input_tokens_seen": 74450420, - "step": 3499 - }, - { - "epoch": 0.42085011723681837, - "flos": 19654720054080.0, - "grad_norm": 4.75088633248722, - "learning_rate": 2.6005241591269097e-06, - "loss": 1.0621, - "num_input_tokens_seen": 74469510, - "step": 3500 - }, - { - "epoch": 0.4209703601274575, - "flos": 19811188621680.0, - "grad_norm": 3.0895125528683414, - "learning_rate": 2.5997810862388454e-06, - "loss": 1.0327, - "num_input_tokens_seen": 74489070, - "step": 3501 - }, - { - "epoch": 0.42109060301809653, - "flos": 19600376673600.0, - "grad_norm": 5.376930954537929, - "learning_rate": 2.599037922364467e-06, - "loss": 0.9867, - "num_input_tokens_seen": 74507690, - "step": 3502 - }, - { - "epoch": 0.42121084590873564, - "flos": 20886042447840.0, - "grad_norm": 3.8496003327440103, - "learning_rate": 2.5982946676165112e-06, - "loss": 0.9847, - "num_input_tokens_seen": 74527180, - "step": 3503 - }, - { - "epoch": 0.42133108879937475, - "flos": 48260736151560.0, - "grad_norm": 0.7688089844319618, - "learning_rate": 2.5975513221077313e-06, - "loss": 0.8303, - "num_input_tokens_seen": 74590870, - "step": 3504 - }, - { - "epoch": 0.4214513316900138, - "flos": 16423535678640.0, - "grad_norm": 3.9831137661825613, - "learning_rate": 2.5968078859508897e-06, - "loss": 1.1082, - "num_input_tokens_seen": 74607790, - "step": 3505 - }, - { - "epoch": 0.4215715745806529, - "flos": 10839711723720.0, - "grad_norm": 4.702665895885047, - "learning_rate": 2.5960643592587673e-06, - "loss": 1.0169, - "num_input_tokens_seen": 74624920, - "step": 3506 - }, - { - "epoch": 0.42169181747129203, - "flos": 15720960202440.0, - "grad_norm": 3.053888787243818, - "learning_rate": 2.5953207421441553e-06, - "loss": 1.0547, - "num_input_tokens_seen": 74643240, - "step": 3507 - }, - { - "epoch": 0.4218120603619311, - "flos": 16081801790520.0, - "grad_norm": 4.032409501216121, - "learning_rate": 2.5945770347198603e-06, - "loss": 0.9735, - "num_input_tokens_seen": 74661115, - "step": 3508 - }, - { - "epoch": 0.4219323032525702, - "flos": 13964202339600.0, - "grad_norm": 3.7286010540566688, - "learning_rate": 2.593833237098701e-06, - "loss": 1.0626, - "num_input_tokens_seen": 74678435, - "step": 3509 - }, - { - "epoch": 0.4220525461432093, - "flos": 21516056028840.0, - "grad_norm": 8.00929387974673, - "learning_rate": 2.593089349393512e-06, - "loss": 0.854, - "num_input_tokens_seen": 74698645, - "step": 3510 - }, - { - "epoch": 0.42217278903384836, - "flos": 17293863176520.0, - "grad_norm": 3.2533524168997374, - "learning_rate": 2.592345371717141e-06, - "loss": 1.0618, - "num_input_tokens_seen": 74717895, - "step": 3511 - }, - { - "epoch": 0.42229303192448747, - "flos": 12102590410800.0, - "grad_norm": 3.6860891973142267, - "learning_rate": 2.591601304182448e-06, - "loss": 0.9326, - "num_input_tokens_seen": 74735585, - "step": 3512 - }, - { - "epoch": 0.4224132748151266, - "flos": 16193401399680.0, - "grad_norm": 3.597396062498673, - "learning_rate": 2.5908571469023067e-06, - "loss": 1.0208, - "num_input_tokens_seen": 74754790, - "step": 3513 - }, - { - "epoch": 0.42253351770576564, - "flos": 12624377154240.0, - "grad_norm": 3.487176701043312, - "learning_rate": 2.5901128999896067e-06, - "loss": 0.9835, - "num_input_tokens_seen": 74769940, - "step": 3514 - }, - { - "epoch": 0.42265376059640475, - "flos": 20309851000800.0, - "grad_norm": 2.4780440366586767, - "learning_rate": 2.5893685635572487e-06, - "loss": 0.9126, - "num_input_tokens_seen": 74790510, - "step": 3515 - }, - { - "epoch": 0.4227740034870438, - "flos": 11499533889120.0, - "grad_norm": 6.137755832662045, - "learning_rate": 2.5886241377181483e-06, - "loss": 0.916, - "num_input_tokens_seen": 74809100, - "step": 3516 - }, - { - "epoch": 0.4228942463776829, - "flos": 17997266514840.0, - "grad_norm": 2.913605927808954, - "learning_rate": 2.587879622585234e-06, - "loss": 1.032, - "num_input_tokens_seen": 74827420, - "step": 3517 - }, - { - "epoch": 0.423014489268322, - "flos": 18788623851480.0, - "grad_norm": 3.2494286254989375, - "learning_rate": 2.5871350182714486e-06, - "loss": 0.9793, - "num_input_tokens_seen": 74848020, - "step": 3518 - }, - { - "epoch": 0.4231347321589611, - "flos": 12232929781200.0, - "grad_norm": 4.313343794828185, - "learning_rate": 2.586390324889748e-06, - "loss": 1.0271, - "num_input_tokens_seen": 74863640, - "step": 3519 - }, - { - "epoch": 0.4232549750496002, - "flos": 16347478365600.0, - "grad_norm": 3.48829669671307, - "learning_rate": 2.5856455425531003e-06, - "loss": 0.8968, - "num_input_tokens_seen": 74884835, - "step": 3520 - }, - { - "epoch": 0.4233752179402393, - "flos": 15088861635360.0, - "grad_norm": 5.2479420215962875, - "learning_rate": 2.5849006713744902e-06, - "loss": 1.0358, - "num_input_tokens_seen": 74903350, - "step": 3521 - }, - { - "epoch": 0.42349546083087836, - "flos": 14698303447560.0, - "grad_norm": 4.362793160018533, - "learning_rate": 2.5841557114669135e-06, - "loss": 0.9545, - "num_input_tokens_seen": 74919930, - "step": 3522 - }, - { - "epoch": 0.42361570372151747, - "flos": 13174838004360.0, - "grad_norm": 12.991877439174496, - "learning_rate": 2.58341066294338e-06, - "loss": 0.8825, - "num_input_tokens_seen": 74936315, - "step": 3523 - }, - { - "epoch": 0.4237359466121566, - "flos": 14881667751360.0, - "grad_norm": 8.366802701819369, - "learning_rate": 2.5826655259169124e-06, - "loss": 1.0817, - "num_input_tokens_seen": 74954690, - "step": 3524 - }, - { - "epoch": 0.42385618950279563, - "flos": 12781857553320.0, - "grad_norm": 3.375498416973156, - "learning_rate": 2.5819203005005475e-06, - "loss": 1.1256, - "num_input_tokens_seen": 74971745, - "step": 3525 - }, - { - "epoch": 0.42397643239343474, - "flos": 16900944048600.0, - "grad_norm": 4.276877739311878, - "learning_rate": 2.581174986807336e-06, - "loss": 1.0172, - "num_input_tokens_seen": 74991700, - "step": 3526 - }, - { - "epoch": 0.42409667528407385, - "flos": 11709180697920.0, - "grad_norm": 6.70086674563987, - "learning_rate": 2.580429584950341e-06, - "loss": 1.1427, - "num_input_tokens_seen": 75007170, - "step": 3527 - }, - { - "epoch": 0.4242169181747129, - "flos": 11341992166920.0, - "grad_norm": 5.758504466243309, - "learning_rate": 2.5796840950426397e-06, - "loss": 0.8754, - "num_input_tokens_seen": 75023975, - "step": 3528 - }, - { - "epoch": 0.424337161065352, - "flos": 14252512694040.0, - "grad_norm": 3.9160906320746776, - "learning_rate": 2.578938517197322e-06, - "loss": 0.8908, - "num_input_tokens_seen": 75041790, - "step": 3529 - }, - { - "epoch": 0.4244574039559911, - "flos": 16979423624880.0, - "grad_norm": 6.075370733306663, - "learning_rate": 2.5781928515274916e-06, - "loss": 0.8465, - "num_input_tokens_seen": 75060230, - "step": 3530 - }, - { - "epoch": 0.4245776468466302, - "flos": 12442729897800.0, - "grad_norm": 3.36977957596101, - "learning_rate": 2.577447098146265e-06, - "loss": 0.9042, - "num_input_tokens_seen": 75077125, - "step": 3531 - }, - { - "epoch": 0.4246978897372693, - "flos": 19779938943960.0, - "grad_norm": 7.4590626223311345, - "learning_rate": 2.5767012571667724e-06, - "loss": 1.0123, - "num_input_tokens_seen": 75096325, - "step": 3532 - }, - { - "epoch": 0.42481813262790835, - "flos": 11027552615280.0, - "grad_norm": 4.385206687170969, - "learning_rate": 2.5759553287021587e-06, - "loss": 0.9049, - "num_input_tokens_seen": 75114375, - "step": 3533 - }, - { - "epoch": 0.42493837551854746, - "flos": 17031651357720.0, - "grad_norm": 4.266381500569122, - "learning_rate": 2.5752093128655786e-06, - "loss": 0.9959, - "num_input_tokens_seen": 75132340, - "step": 3534 - }, - { - "epoch": 0.4250586184091866, - "flos": 14776721700720.0, - "grad_norm": 3.8434170214322, - "learning_rate": 2.574463209770204e-06, - "loss": 0.9605, - "num_input_tokens_seen": 75151375, - "step": 3535 - }, - { - "epoch": 0.42517886129982563, - "flos": 21645322244640.0, - "grad_norm": 2.7146047802592905, - "learning_rate": 2.5737170195292165e-06, - "loss": 1.0202, - "num_input_tokens_seen": 75174430, - "step": 3536 - }, - { - "epoch": 0.42529910419046474, - "flos": 14249630507400.0, - "grad_norm": 4.248287080134107, - "learning_rate": 2.572970742255814e-06, - "loss": 0.9995, - "num_input_tokens_seen": 75192640, - "step": 3537 - }, - { - "epoch": 0.42541934708110385, - "flos": 16083273545400.0, - "grad_norm": 2.948128161352847, - "learning_rate": 2.5722243780632046e-06, - "loss": 1.0494, - "num_input_tokens_seen": 75210625, - "step": 3538 - }, - { - "epoch": 0.4255395899717429, - "flos": 47400434983800.0, - "grad_norm": 0.8249875139967755, - "learning_rate": 2.5714779270646125e-06, - "loss": 0.8877, - "num_input_tokens_seen": 75271115, - "step": 3539 - }, - { - "epoch": 0.425659832862382, - "flos": 12705830901840.0, - "grad_norm": 5.889483191933432, - "learning_rate": 2.5707313893732735e-06, - "loss": 0.982, - "num_input_tokens_seen": 75289375, - "step": 3540 - }, - { - "epoch": 0.4257800757530211, - "flos": 17082805935960.0, - "grad_norm": 5.833842482372219, - "learning_rate": 2.5699847651024364e-06, - "loss": 0.9905, - "num_input_tokens_seen": 75309735, - "step": 3541 - }, - { - "epoch": 0.4259003186436602, - "flos": 16848379038600.0, - "grad_norm": 5.616191834132221, - "learning_rate": 2.5692380543653627e-06, - "loss": 0.9991, - "num_input_tokens_seen": 75327610, - "step": 3542 - }, - { - "epoch": 0.4260205615342993, - "flos": 10784325850200.0, - "grad_norm": 5.283357098322387, - "learning_rate": 2.5684912572753293e-06, - "loss": 0.918, - "num_input_tokens_seen": 75343005, - "step": 3543 - }, - { - "epoch": 0.4261408044249384, - "flos": 21857146024200.0, - "grad_norm": 2.8331783102672086, - "learning_rate": 2.5677443739456245e-06, - "loss": 1.0772, - "num_input_tokens_seen": 75364385, - "step": 3544 - }, - { - "epoch": 0.42626104731557746, - "flos": 16533479563560.0, - "grad_norm": 5.910349235737167, - "learning_rate": 2.5669974044895495e-06, - "loss": 1.0143, - "num_input_tokens_seen": 75380500, - "step": 3545 - }, - { - "epoch": 0.42638129020621657, - "flos": 18421343335800.0, - "grad_norm": 4.625923476942346, - "learning_rate": 2.5662503490204187e-06, - "loss": 1.0213, - "num_input_tokens_seen": 75400385, - "step": 3546 - }, - { - "epoch": 0.4265015330968556, - "flos": 18864834472320.0, - "grad_norm": 4.235572110820099, - "learning_rate": 2.5655032076515603e-06, - "loss": 0.9895, - "num_input_tokens_seen": 75419430, - "step": 3547 - }, - { - "epoch": 0.42662177598749473, - "flos": 17346826786800.0, - "grad_norm": 4.323017287711053, - "learning_rate": 2.5647559804963155e-06, - "loss": 1.0363, - "num_input_tokens_seen": 75439080, - "step": 3548 - }, - { - "epoch": 0.42674201887813384, - "flos": 16454969325720.0, - "grad_norm": 6.790506681209351, - "learning_rate": 2.5640086676680364e-06, - "loss": 1.0155, - "num_input_tokens_seen": 75460295, - "step": 3549 - }, - { - "epoch": 0.4268622617687729, - "flos": 15406459327680.0, - "grad_norm": 5.597855920418256, - "learning_rate": 2.5632612692800923e-06, - "loss": 1.024, - "num_input_tokens_seen": 75479080, - "step": 3550 - }, - { - "epoch": 0.426982504659412, - "flos": 16664432165160.0, - "grad_norm": 8.430240475085716, - "learning_rate": 2.5625137854458603e-06, - "loss": 0.9726, - "num_input_tokens_seen": 75497815, - "step": 3551 - }, - { - "epoch": 0.4271027475500511, - "flos": 13413496197000.0, - "grad_norm": 2.9955239282530073, - "learning_rate": 2.561766216278735e-06, - "loss": 1.0212, - "num_input_tokens_seen": 75515130, - "step": 3552 - }, - { - "epoch": 0.4272229904406902, - "flos": 19129897816200.0, - "grad_norm": 10.029911700665393, - "learning_rate": 2.561018561892121e-06, - "loss": 1.0333, - "num_input_tokens_seen": 75533990, - "step": 3553 - }, - { - "epoch": 0.4273432333313293, - "flos": 17031896650200.0, - "grad_norm": 2.342688459959725, - "learning_rate": 2.5602708223994363e-06, - "loss": 0.9925, - "num_input_tokens_seen": 75555575, - "step": 3554 - }, - { - "epoch": 0.4274634762219684, - "flos": 21070173290640.0, - "grad_norm": 6.538622332015547, - "learning_rate": 2.559522997914115e-06, - "loss": 0.8967, - "num_input_tokens_seen": 75574875, - "step": 3555 - }, - { - "epoch": 0.42758371911260745, - "flos": 15222849731400.0, - "grad_norm": 3.849348503575318, - "learning_rate": 2.558775088549599e-06, - "loss": 1.0862, - "num_input_tokens_seen": 75594175, - "step": 3556 - }, - { - "epoch": 0.42770396200324656, - "flos": 10420510090800.0, - "grad_norm": 4.081616306661454, - "learning_rate": 2.5580270944193467e-06, - "loss": 0.8745, - "num_input_tokens_seen": 75610715, - "step": 3557 - }, - { - "epoch": 0.4278242048938857, - "flos": 50601841436400.0, - "grad_norm": 0.7439467210022724, - "learning_rate": 2.557279015636827e-06, - "loss": 0.8038, - "num_input_tokens_seen": 75670845, - "step": 3558 - }, - { - "epoch": 0.42794444778452473, - "flos": 49674227048280.0, - "grad_norm": 0.8207492573920941, - "learning_rate": 2.5565308523155245e-06, - "loss": 0.8896, - "num_input_tokens_seen": 75730165, - "step": 3559 - }, - { - "epoch": 0.42806469067516384, - "flos": 12908456213400.0, - "grad_norm": 4.338046494061993, - "learning_rate": 2.5557826045689336e-06, - "loss": 1.0417, - "num_input_tokens_seen": 75746125, - "step": 3560 - }, - { - "epoch": 0.4281849335658029, - "flos": 39018548634600.0, - "grad_norm": 0.9680377433764235, - "learning_rate": 2.5550342725105643e-06, - "loss": 0.8386, - "num_input_tokens_seen": 75804010, - "step": 3561 - }, - { - "epoch": 0.428305176456442, - "flos": 12233634997080.0, - "grad_norm": 5.263205133527278, - "learning_rate": 2.554285856253937e-06, - "loss": 1.0411, - "num_input_tokens_seen": 75822565, - "step": 3562 - }, - { - "epoch": 0.4284254193470811, - "flos": 18762586638960.0, - "grad_norm": 3.4390956989715438, - "learning_rate": 2.5535373559125855e-06, - "loss": 0.9874, - "num_input_tokens_seen": 75842650, - "step": 3563 - }, - { - "epoch": 0.42854566223772017, - "flos": 21173310309240.0, - "grad_norm": 2.355838676998023, - "learning_rate": 2.552788771600057e-06, - "loss": 1.0506, - "num_input_tokens_seen": 75862680, - "step": 3564 - }, - { - "epoch": 0.4286659051283593, - "flos": 15642603272400.0, - "grad_norm": 3.779372324424517, - "learning_rate": 2.5520401034299118e-06, - "loss": 1.0484, - "num_input_tokens_seen": 75880160, - "step": 3565 - }, - { - "epoch": 0.4287861480189984, - "flos": 9401747354040.0, - "grad_norm": 3.5848339102988733, - "learning_rate": 2.551291351515722e-06, - "loss": 1.0957, - "num_input_tokens_seen": 75896895, - "step": 3566 - }, - { - "epoch": 0.42890639090963745, - "flos": 18972478740240.0, - "grad_norm": 8.35804969050234, - "learning_rate": 2.5505425159710726e-06, - "loss": 1.0877, - "num_input_tokens_seen": 75916425, - "step": 3567 - }, - { - "epoch": 0.42902663380027656, - "flos": 17106236915880.0, - "grad_norm": 2.9374735404912884, - "learning_rate": 2.549793596909561e-06, - "loss": 1.061, - "num_input_tokens_seen": 75934765, - "step": 3568 - }, - { - "epoch": 0.42914687669091567, - "flos": 11053313873760.0, - "grad_norm": 5.208058444080482, - "learning_rate": 2.5490445944447976e-06, - "loss": 0.8838, - "num_input_tokens_seen": 75952980, - "step": 3569 - }, - { - "epoch": 0.4292671195815547, - "flos": 22433858717760.0, - "grad_norm": 3.0186189800759884, - "learning_rate": 2.548295508690406e-06, - "loss": 0.8807, - "num_input_tokens_seen": 75973995, - "step": 3570 - }, - { - "epoch": 0.42938736247219383, - "flos": 21563408573640.0, - "grad_norm": 2.5640691279407593, - "learning_rate": 2.5475463397600217e-06, - "loss": 0.9932, - "num_input_tokens_seen": 75993795, - "step": 3571 - }, - { - "epoch": 0.42950760536283294, - "flos": 20911343782920.0, - "grad_norm": 5.082920286980588, - "learning_rate": 2.546797087767293e-06, - "loss": 1.0114, - "num_input_tokens_seen": 76013640, - "step": 3572 - }, - { - "epoch": 0.429627848253472, - "flos": 19129376569680.0, - "grad_norm": 4.037935612566212, - "learning_rate": 2.546047752825881e-06, - "loss": 1.0943, - "num_input_tokens_seen": 76033965, - "step": 3573 - }, - { - "epoch": 0.4297480911441111, - "flos": 9794697143520.0, - "grad_norm": 6.055887922834731, - "learning_rate": 2.5452983350494595e-06, - "loss": 1.1622, - "num_input_tokens_seen": 76049240, - "step": 3574 - }, - { - "epoch": 0.4298683340347502, - "flos": 14724831245040.0, - "grad_norm": 3.9332625582087246, - "learning_rate": 2.544548834551713e-06, - "loss": 0.8844, - "num_input_tokens_seen": 76067965, - "step": 3575 - }, - { - "epoch": 0.4299885769253893, - "flos": 14825484677280.0, - "grad_norm": 7.3907238427421795, - "learning_rate": 2.5437992514463424e-06, - "loss": 1.1502, - "num_input_tokens_seen": 76081010, - "step": 3576 - }, - { - "epoch": 0.4301088198160284, - "flos": 18133922166600.0, - "grad_norm": 3.370105019356474, - "learning_rate": 2.5430495858470565e-06, - "loss": 1.1105, - "num_input_tokens_seen": 76100200, - "step": 3577 - }, - { - "epoch": 0.43022906270666744, - "flos": 12941177646000.0, - "grad_norm": 5.469622733096609, - "learning_rate": 2.54229983786758e-06, - "loss": 1.0002, - "num_input_tokens_seen": 76117865, - "step": 3578 - }, - { - "epoch": 0.43034930559730655, - "flos": 16634684903880.0, - "grad_norm": 3.566544094534551, - "learning_rate": 2.541550007621651e-06, - "loss": 1.0643, - "num_input_tokens_seen": 76136075, - "step": 3579 - }, - { - "epoch": 0.43046954848794566, - "flos": 20074166979480.0, - "grad_norm": 5.183498631792614, - "learning_rate": 2.5408000952230156e-06, - "loss": 1.0368, - "num_input_tokens_seen": 76154585, - "step": 3580 - }, - { - "epoch": 0.4305897913785847, - "flos": 20358123392400.0, - "grad_norm": 3.961826081835239, - "learning_rate": 2.5400501007854357e-06, - "loss": 1.1216, - "num_input_tokens_seen": 76173750, - "step": 3581 - }, - { - "epoch": 0.43071003426922383, - "flos": 14514601866600.0, - "grad_norm": 3.84318922960994, - "learning_rate": 2.539300024422685e-06, - "loss": 0.9807, - "num_input_tokens_seen": 76191415, - "step": 3582 - }, - { - "epoch": 0.43083027715986294, - "flos": 37191901980360.0, - "grad_norm": 0.8121972253502647, - "learning_rate": 2.538549866248549e-06, - "loss": 0.8699, - "num_input_tokens_seen": 76246115, - "step": 3583 - }, - { - "epoch": 0.430950520050502, - "flos": 11813942779200.0, - "grad_norm": 8.179523805220924, - "learning_rate": 2.5377996263768274e-06, - "loss": 1.0352, - "num_input_tokens_seen": 76263915, - "step": 3584 - }, - { - "epoch": 0.4310707629411411, - "flos": 17504153878080.0, - "grad_norm": 3.4520448402065096, - "learning_rate": 2.5370493049213293e-06, - "loss": 0.9079, - "num_input_tokens_seen": 76283280, - "step": 3585 - }, - { - "epoch": 0.4311910058317802, - "flos": 18815458264560.0, - "grad_norm": 6.206994682574003, - "learning_rate": 2.536298901995878e-06, - "loss": 1.0242, - "num_input_tokens_seen": 76302210, - "step": 3586 - }, - { - "epoch": 0.43131124872241927, - "flos": 17897624914080.0, - "grad_norm": 4.133591461325203, - "learning_rate": 2.535548417714311e-06, - "loss": 1.0279, - "num_input_tokens_seen": 76321230, - "step": 3587 - }, - { - "epoch": 0.4314314916130584, - "flos": 15352453224360.0, - "grad_norm": 3.462426409989429, - "learning_rate": 2.534797852190474e-06, - "loss": 1.098, - "num_input_tokens_seen": 76341130, - "step": 3588 - }, - { - "epoch": 0.4315517345036975, - "flos": 13670250258120.0, - "grad_norm": 3.837341548866701, - "learning_rate": 2.5340472055382283e-06, - "loss": 1.0298, - "num_input_tokens_seen": 76356880, - "step": 3589 - }, - { - "epoch": 0.43167197739433655, - "flos": 17262858791280.0, - "grad_norm": 6.539868376920989, - "learning_rate": 2.5332964778714468e-06, - "loss": 1.0413, - "num_input_tokens_seen": 76373785, - "step": 3590 - }, - { - "epoch": 0.43179222028497566, - "flos": 11940357469920.0, - "grad_norm": 3.346508848818381, - "learning_rate": 2.5325456693040123e-06, - "loss": 0.8942, - "num_input_tokens_seen": 76390700, - "step": 3591 - }, - { - "epoch": 0.43191246317561477, - "flos": 12495448215600.0, - "grad_norm": 3.8783744147769292, - "learning_rate": 2.531794779949824e-06, - "loss": 0.9816, - "num_input_tokens_seen": 76408320, - "step": 3592 - }, - { - "epoch": 0.4320327060662538, - "flos": 16979791563600.0, - "grad_norm": 5.921975489747647, - "learning_rate": 2.5310438099227903e-06, - "loss": 1.1058, - "num_input_tokens_seen": 76425305, - "step": 3593 - }, - { - "epoch": 0.43215294895689293, - "flos": 47541628546440.0, - "grad_norm": 2.243114851202955, - "learning_rate": 2.530292759336833e-06, - "loss": 0.7914, - "num_input_tokens_seen": 76485760, - "step": 3594 - }, - { - "epoch": 0.432273191847532, - "flos": 14619149316960.0, - "grad_norm": 30.98668557005069, - "learning_rate": 2.5295416283058855e-06, - "loss": 0.9349, - "num_input_tokens_seen": 76504345, - "step": 3595 - }, - { - "epoch": 0.4323934347381711, - "flos": 13676321247000.0, - "grad_norm": 3.1291352948996343, - "learning_rate": 2.5287904169438943e-06, - "loss": 0.895, - "num_input_tokens_seen": 76523270, - "step": 3596 - }, - { - "epoch": 0.4325136776288102, - "flos": 15432864478920.0, - "grad_norm": 4.723810577044538, - "learning_rate": 2.528039125364817e-06, - "loss": 0.8562, - "num_input_tokens_seen": 76541795, - "step": 3597 - }, - { - "epoch": 0.43263392051944927, - "flos": 15873657398160.0, - "grad_norm": 4.6048642603845975, - "learning_rate": 2.5272877536826246e-06, - "loss": 0.9731, - "num_input_tokens_seen": 76560310, - "step": 3598 - }, - { - "epoch": 0.4327541634100884, - "flos": 20781219043440.0, - "grad_norm": 5.763425660828843, - "learning_rate": 2.5265363020112986e-06, - "loss": 0.9147, - "num_input_tokens_seen": 76580350, - "step": 3599 - }, - { - "epoch": 0.4328744063007275, - "flos": 18552663876120.0, - "grad_norm": 3.6858179167608287, - "learning_rate": 2.5257847704648344e-06, - "loss": 1.0737, - "num_input_tokens_seen": 76601300, - "step": 3600 - }, - { - "epoch": 0.43299464919136654, - "flos": 11735217910440.0, - "grad_norm": 4.553390396751042, - "learning_rate": 2.525033159157239e-06, - "loss": 0.9937, - "num_input_tokens_seen": 76617335, - "step": 3601 - }, - { - "epoch": 0.43311489208200565, - "flos": 11394526515360.0, - "grad_norm": 3.312485029078787, - "learning_rate": 2.52428146820253e-06, - "loss": 0.998, - "num_input_tokens_seen": 76635310, - "step": 3602 - }, - { - "epoch": 0.43323513497264476, - "flos": 16298071496280.0, - "grad_norm": 4.026595393006898, - "learning_rate": 2.52352969771474e-06, - "loss": 1.0523, - "num_input_tokens_seen": 76654255, - "step": 3603 - }, - { - "epoch": 0.4333553778632838, - "flos": 18000118039920.0, - "grad_norm": 2.409176619658819, - "learning_rate": 2.5227778478079106e-06, - "loss": 1.114, - "num_input_tokens_seen": 76673385, - "step": 3604 - }, - { - "epoch": 0.43347562075392293, - "flos": 13751213420760.0, - "grad_norm": 5.054385994316324, - "learning_rate": 2.522025918596098e-06, - "loss": 0.998, - "num_input_tokens_seen": 76691405, - "step": 3605 - }, - { - "epoch": 0.43359586364456204, - "flos": 18738879705000.0, - "grad_norm": 3.348875164385526, - "learning_rate": 2.521273910193368e-06, - "loss": 0.8813, - "num_input_tokens_seen": 76714305, - "step": 3606 - }, - { - "epoch": 0.4337161065352011, - "flos": 11308565518440.0, - "grad_norm": 5.000315465512018, - "learning_rate": 2.5205218227138006e-06, - "loss": 1.0893, - "num_input_tokens_seen": 76726980, - "step": 3607 - }, - { - "epoch": 0.4338363494258402, - "flos": 14352920833800.0, - "grad_norm": 16.06858431397952, - "learning_rate": 2.519769656271486e-06, - "loss": 1.017, - "num_input_tokens_seen": 76744120, - "step": 3608 - }, - { - "epoch": 0.43395659231647926, - "flos": 14252420709360.0, - "grad_norm": 5.006488785781729, - "learning_rate": 2.5190174109805285e-06, - "loss": 0.9007, - "num_input_tokens_seen": 76763665, - "step": 3609 - }, - { - "epoch": 0.43407683520711837, - "flos": 14121590754000.0, - "grad_norm": 3.796632484257598, - "learning_rate": 2.518265086955042e-06, - "loss": 0.8594, - "num_input_tokens_seen": 76781105, - "step": 3610 - }, - { - "epoch": 0.4341970780977575, - "flos": 16425835295640.0, - "grad_norm": 5.049709032193286, - "learning_rate": 2.5175126843091534e-06, - "loss": 1.0521, - "num_input_tokens_seen": 76800195, - "step": 3611 - }, - { - "epoch": 0.43431732098839654, - "flos": 26702269637160.0, - "grad_norm": 3.4164844712471547, - "learning_rate": 2.5167602031570034e-06, - "loss": 0.9719, - "num_input_tokens_seen": 76820100, - "step": 3612 - }, - { - "epoch": 0.43443756387903565, - "flos": 22721494517880.0, - "grad_norm": 6.772365392823693, - "learning_rate": 2.51600764361274e-06, - "loss": 0.9672, - "num_input_tokens_seen": 76841345, - "step": 3613 - }, - { - "epoch": 0.43455780676967476, - "flos": 16691082608880.0, - "grad_norm": 8.064743794334802, - "learning_rate": 2.5152550057905283e-06, - "loss": 1.0179, - "num_input_tokens_seen": 76860955, - "step": 3614 - }, - { - "epoch": 0.4346780496603138, - "flos": 17215690215840.0, - "grad_norm": 5.971270762905144, - "learning_rate": 2.5145022898045415e-06, - "loss": 0.9758, - "num_input_tokens_seen": 76879860, - "step": 3615 - }, - { - "epoch": 0.4347982925509529, - "flos": 12102498426120.0, - "grad_norm": 3.6628331316056144, - "learning_rate": 2.5137494957689664e-06, - "loss": 1.1253, - "num_input_tokens_seen": 76895190, - "step": 3616 - }, - { - "epoch": 0.43491853544159204, - "flos": 43623511638480.0, - "grad_norm": 0.7528974672144942, - "learning_rate": 2.5129966237980016e-06, - "loss": 0.8295, - "num_input_tokens_seen": 76957905, - "step": 3617 - }, - { - "epoch": 0.4350387783322311, - "flos": 15589700985240.0, - "grad_norm": 5.464111802639844, - "learning_rate": 2.512243674005857e-06, - "loss": 1.0056, - "num_input_tokens_seen": 76976990, - "step": 3618 - }, - { - "epoch": 0.4351590212228702, - "flos": 17844998580960.0, - "grad_norm": 5.545621660503034, - "learning_rate": 2.5114906465067537e-06, - "loss": 1.0886, - "num_input_tokens_seen": 76997695, - "step": 3619 - }, - { - "epoch": 0.4352792641135093, - "flos": 15274801510200.0, - "grad_norm": 3.651262451693921, - "learning_rate": 2.5107375414149264e-06, - "loss": 0.9618, - "num_input_tokens_seen": 77016660, - "step": 3620 - }, - { - "epoch": 0.43539950700414837, - "flos": 11499625873800.0, - "grad_norm": 4.150864403105545, - "learning_rate": 2.5099843588446197e-06, - "loss": 0.9258, - "num_input_tokens_seen": 77034700, - "step": 3621 - }, - { - "epoch": 0.4355197498947875, - "flos": 11814739979760.0, - "grad_norm": 2.792985737355464, - "learning_rate": 2.509231098910091e-06, - "loss": 0.8449, - "num_input_tokens_seen": 77054290, - "step": 3622 - }, - { - "epoch": 0.4356399927854266, - "flos": 11578136111640.0, - "grad_norm": 4.9909035907682835, - "learning_rate": 2.508477761725611e-06, - "loss": 0.962, - "num_input_tokens_seen": 77072285, - "step": 3623 - }, - { - "epoch": 0.43576023567606564, - "flos": 12181345941120.0, - "grad_norm": 3.528284093278672, - "learning_rate": 2.507724347405458e-06, - "loss": 1.0295, - "num_input_tokens_seen": 77089955, - "step": 3624 - }, - { - "epoch": 0.43588047856670475, - "flos": 11257288293960.0, - "grad_norm": 4.757078618418191, - "learning_rate": 2.5069708560639243e-06, - "loss": 1.0488, - "num_input_tokens_seen": 77107585, - "step": 3625 - }, - { - "epoch": 0.4360007214573438, - "flos": 16822188518280.0, - "grad_norm": 4.589808427390677, - "learning_rate": 2.5062172878153158e-06, - "loss": 0.8413, - "num_input_tokens_seen": 77126580, - "step": 3626 - }, - { - "epoch": 0.4361209643479829, - "flos": 15613898504160.0, - "grad_norm": 6.687057337861233, - "learning_rate": 2.505463642773947e-06, - "loss": 1.1017, - "num_input_tokens_seen": 77146265, - "step": 3627 - }, - { - "epoch": 0.43624120723862203, - "flos": 12337998478080.0, - "grad_norm": 3.9434679610637797, - "learning_rate": 2.504709921054146e-06, - "loss": 0.9754, - "num_input_tokens_seen": 77162800, - "step": 3628 - }, - { - "epoch": 0.4363614501292611, - "flos": 12679579058400.0, - "grad_norm": 3.2835923326712977, - "learning_rate": 2.50395612277025e-06, - "loss": 1.0658, - "num_input_tokens_seen": 77178375, - "step": 3629 - }, - { - "epoch": 0.4364816930199002, - "flos": 14409809123760.0, - "grad_norm": 5.644076403602899, - "learning_rate": 2.503202248036612e-06, - "loss": 0.9623, - "num_input_tokens_seen": 77196950, - "step": 3630 - }, - { - "epoch": 0.4366019359105393, - "flos": 17110989457680.0, - "grad_norm": 3.6450043235303147, - "learning_rate": 2.5024482969675927e-06, - "loss": 0.9545, - "num_input_tokens_seen": 77216625, - "step": 3631 - }, - { - "epoch": 0.43672217880117836, - "flos": 15451665563280.0, - "grad_norm": 4.884891354339539, - "learning_rate": 2.501694269677566e-06, - "loss": 1.0649, - "num_input_tokens_seen": 77234115, - "step": 3632 - }, - { - "epoch": 0.4368424216918175, - "flos": 12779312643840.0, - "grad_norm": 4.9384166011598625, - "learning_rate": 2.500940166280918e-06, - "loss": 1.0242, - "num_input_tokens_seen": 77252265, - "step": 3633 - }, - { - "epoch": 0.4369626645824566, - "flos": 18106842461040.0, - "grad_norm": 2.90278106721715, - "learning_rate": 2.500185986892045e-06, - "loss": 1.0136, - "num_input_tokens_seen": 77271470, - "step": 3634 - }, - { - "epoch": 0.43708290747309564, - "flos": 18003245519040.0, - "grad_norm": 2.8254207095265533, - "learning_rate": 2.499431731625355e-06, - "loss": 1.0121, - "num_input_tokens_seen": 77290215, - "step": 3635 - }, - { - "epoch": 0.43720315036373475, - "flos": 22510682569800.0, - "grad_norm": 11.05478515581866, - "learning_rate": 2.4986774005952686e-06, - "loss": 1.0275, - "num_input_tokens_seen": 77312310, - "step": 3636 - }, - { - "epoch": 0.43732339325437386, - "flos": 16428625497600.0, - "grad_norm": 4.982132075191147, - "learning_rate": 2.4979229939162166e-06, - "loss": 1.0688, - "num_input_tokens_seen": 77330810, - "step": 3637 - }, - { - "epoch": 0.4374436361450129, - "flos": 19759267504440.0, - "grad_norm": 3.3768006219179636, - "learning_rate": 2.4971685117026433e-06, - "loss": 1.0253, - "num_input_tokens_seen": 77350295, - "step": 3638 - }, - { - "epoch": 0.437563879035652, - "flos": 17191002111960.0, - "grad_norm": 4.606235896468084, - "learning_rate": 2.4964139540690018e-06, - "loss": 0.9926, - "num_input_tokens_seen": 77373350, - "step": 3639 - }, - { - "epoch": 0.4376841219262911, - "flos": 16691143932000.0, - "grad_norm": 4.695492656888471, - "learning_rate": 2.495659321129758e-06, - "loss": 0.9526, - "num_input_tokens_seen": 77390815, - "step": 3640 - }, - { - "epoch": 0.4378043648169302, - "flos": 18107425030680.0, - "grad_norm": 3.4197513487700464, - "learning_rate": 2.494904612999389e-06, - "loss": 0.9836, - "num_input_tokens_seen": 77409245, - "step": 3641 - }, - { - "epoch": 0.4379246077075693, - "flos": 38571500757120.0, - "grad_norm": 0.7931933531229224, - "learning_rate": 2.4941498297923843e-06, - "loss": 0.8375, - "num_input_tokens_seen": 77469535, - "step": 3642 - }, - { - "epoch": 0.43804485059820836, - "flos": 14614059498000.0, - "grad_norm": 3.8716448132777708, - "learning_rate": 2.4933949716232424e-06, - "loss": 0.9359, - "num_input_tokens_seen": 77486780, - "step": 3643 - }, - { - "epoch": 0.43816509348884747, - "flos": 16977583931280.0, - "grad_norm": 3.329987627069438, - "learning_rate": 2.492640038606476e-06, - "loss": 0.954, - "num_input_tokens_seen": 77504865, - "step": 3644 - }, - { - "epoch": 0.4382853363794866, - "flos": 10371992406720.0, - "grad_norm": 4.909903751214621, - "learning_rate": 2.491885030856608e-06, - "loss": 1.0148, - "num_input_tokens_seen": 77522680, - "step": 3645 - }, - { - "epoch": 0.43840557927012563, - "flos": 12154910128320.0, - "grad_norm": 6.784096397326814, - "learning_rate": 2.4911299484881713e-06, - "loss": 1.0491, - "num_input_tokens_seen": 77539930, - "step": 3646 - }, - { - "epoch": 0.43852582216076474, - "flos": 13754218253640.0, - "grad_norm": 4.058690559179867, - "learning_rate": 2.490374791615712e-06, - "loss": 1.0425, - "num_input_tokens_seen": 77559675, - "step": 3647 - }, - { - "epoch": 0.43864606505140386, - "flos": 12808477335480.0, - "grad_norm": 6.491085101199616, - "learning_rate": 2.4896195603537867e-06, - "loss": 0.9995, - "num_input_tokens_seen": 77574005, - "step": 3648 - }, - { - "epoch": 0.4387663079420429, - "flos": 13936754695320.0, - "grad_norm": 3.5069821060894566, - "learning_rate": 2.488864254816964e-06, - "loss": 0.9659, - "num_input_tokens_seen": 77592415, - "step": 3649 - }, - { - "epoch": 0.438886550832682, - "flos": 13990454183040.0, - "grad_norm": 6.507612026098864, - "learning_rate": 2.4881088751198218e-06, - "loss": 0.899, - "num_input_tokens_seen": 77610295, - "step": 3650 - }, - { - "epoch": 0.43900679372332113, - "flos": 10265543939640.0, - "grad_norm": 7.519497514128869, - "learning_rate": 2.4873534213769517e-06, - "loss": 0.8504, - "num_input_tokens_seen": 77625245, - "step": 3651 - }, - { - "epoch": 0.4391270366139602, - "flos": 17106635516160.0, - "grad_norm": 4.129100264481431, - "learning_rate": 2.4865978937029547e-06, - "loss": 0.9428, - "num_input_tokens_seen": 77643945, - "step": 3652 - }, - { - "epoch": 0.4392472795045993, - "flos": 22484369403240.0, - "grad_norm": 3.784773390926662, - "learning_rate": 2.485842292212445e-06, - "loss": 0.8923, - "num_input_tokens_seen": 77664880, - "step": 3653 - }, - { - "epoch": 0.4393675223952384, - "flos": 10502607731160.0, - "grad_norm": 3.953149003871972, - "learning_rate": 2.485086617020045e-06, - "loss": 1.0196, - "num_input_tokens_seen": 77683095, - "step": 3654 - }, - { - "epoch": 0.43948776528587746, - "flos": 10473044439240.0, - "grad_norm": 4.1499655113165925, - "learning_rate": 2.4843308682403903e-06, - "loss": 1.0459, - "num_input_tokens_seen": 77699730, - "step": 3655 - }, - { - "epoch": 0.4396080081765166, - "flos": 9818158785000.0, - "grad_norm": 4.608028098696233, - "learning_rate": 2.4835750459881294e-06, - "loss": 1.0529, - "num_input_tokens_seen": 77716075, - "step": 3656 - }, - { - "epoch": 0.43972825106715563, - "flos": 12914987125680.0, - "grad_norm": 3.418261845692242, - "learning_rate": 2.4828191503779177e-06, - "loss": 1.0389, - "num_input_tokens_seen": 77733895, - "step": 3657 - }, - { - "epoch": 0.43984849395779474, - "flos": 11944987365480.0, - "grad_norm": 3.9378454874238917, - "learning_rate": 2.482063181524425e-06, - "loss": 1.1204, - "num_input_tokens_seen": 77749515, - "step": 3658 - }, - { - "epoch": 0.43996873684843385, - "flos": 13250925978960.0, - "grad_norm": 5.436228285531441, - "learning_rate": 2.4813071395423307e-06, - "loss": 1.0315, - "num_input_tokens_seen": 77766800, - "step": 3659 - }, - { - "epoch": 0.4400889797390729, - "flos": 16816976053080.0, - "grad_norm": 5.0586438975554895, - "learning_rate": 2.4805510245463263e-06, - "loss": 0.884, - "num_input_tokens_seen": 77786675, - "step": 3660 - }, - { - "epoch": 0.440209222629712, - "flos": 16455459910680.0, - "grad_norm": 9.021060462071604, - "learning_rate": 2.4797948366511137e-06, - "loss": 0.8105, - "num_input_tokens_seen": 77806105, - "step": 3661 - }, - { - "epoch": 0.4403294655203511, - "flos": 17658108197760.0, - "grad_norm": 18.015386998373693, - "learning_rate": 2.4790385759714055e-06, - "loss": 0.9775, - "num_input_tokens_seen": 77824890, - "step": 3662 - }, - { - "epoch": 0.4404497084109902, - "flos": 16035614385000.0, - "grad_norm": 3.6205705349060637, - "learning_rate": 2.478282242621926e-06, - "loss": 0.9349, - "num_input_tokens_seen": 77845070, - "step": 3663 - }, - { - "epoch": 0.4405699513016293, - "flos": 46514801819400.0, - "grad_norm": 0.919978048954189, - "learning_rate": 2.477525836717411e-06, - "loss": 0.8646, - "num_input_tokens_seen": 77912555, - "step": 3664 - }, - { - "epoch": 0.4406901941922684, - "flos": 25447884202200.0, - "grad_norm": 6.515060583317463, - "learning_rate": 2.476769358372606e-06, - "loss": 1.0166, - "num_input_tokens_seen": 77933925, - "step": 3665 - }, - { - "epoch": 0.44081043708290746, - "flos": 12784065185640.0, - "grad_norm": 4.592654973521367, - "learning_rate": 2.4760128077022683e-06, - "loss": 0.9654, - "num_input_tokens_seen": 77951780, - "step": 3666 - }, - { - "epoch": 0.44093067997354657, - "flos": 21489190954200.0, - "grad_norm": 5.78228321637188, - "learning_rate": 2.4752561848211672e-06, - "loss": 0.9116, - "num_input_tokens_seen": 77973900, - "step": 3667 - }, - { - "epoch": 0.4410509228641857, - "flos": 16531057300320.0, - "grad_norm": 4.294516398123309, - "learning_rate": 2.4744994898440797e-06, - "loss": 0.9317, - "num_input_tokens_seen": 77992410, - "step": 3668 - }, - { - "epoch": 0.44117116575482473, - "flos": 13833249738000.0, - "grad_norm": 9.3383712341676, - "learning_rate": 2.473742722885797e-06, - "loss": 1.0611, - "num_input_tokens_seen": 78011150, - "step": 3669 - }, - { - "epoch": 0.44129140864546385, - "flos": 19261126371840.0, - "grad_norm": 5.907975525058267, - "learning_rate": 2.4729858840611197e-06, - "loss": 0.8827, - "num_input_tokens_seen": 78029780, - "step": 3670 - }, - { - "epoch": 0.4414116515361029, - "flos": 18577321318440.0, - "grad_norm": 4.806612451029782, - "learning_rate": 2.4722289734848605e-06, - "loss": 0.9531, - "num_input_tokens_seen": 78049965, - "step": 3671 - }, - { - "epoch": 0.441531894426742, - "flos": 15561701432880.0, - "grad_norm": 5.234056332125803, - "learning_rate": 2.471471991271841e-06, - "loss": 1.0079, - "num_input_tokens_seen": 78066810, - "step": 3672 - }, - { - "epoch": 0.4416521373173811, - "flos": 16661825932560.0, - "grad_norm": 3.2785704798241437, - "learning_rate": 2.470714937536896e-06, - "loss": 1.0247, - "num_input_tokens_seen": 78085255, - "step": 3673 - }, - { - "epoch": 0.4417723802080202, - "flos": 14431737687240.0, - "grad_norm": 4.37595248072067, - "learning_rate": 2.469957812394868e-06, - "loss": 0.9389, - "num_input_tokens_seen": 78103785, - "step": 3674 - }, - { - "epoch": 0.4418926230986593, - "flos": 13387796261640.0, - "grad_norm": 7.2712328199000344, - "learning_rate": 2.4692006159606148e-06, - "loss": 0.984, - "num_input_tokens_seen": 78121035, - "step": 3675 - }, - { - "epoch": 0.4420128659892984, - "flos": 13807304510160.0, - "grad_norm": 7.2233306751442905, - "learning_rate": 2.468443348349e-06, - "loss": 1.0074, - "num_input_tokens_seen": 78138630, - "step": 3676 - }, - { - "epoch": 0.44213310887993745, - "flos": 12678781857840.0, - "grad_norm": 5.241563467072178, - "learning_rate": 2.467686009674902e-06, - "loss": 1.0519, - "num_input_tokens_seen": 78152800, - "step": 3677 - }, - { - "epoch": 0.44225335177057656, - "flos": 13620046188240.0, - "grad_norm": 4.665406102272459, - "learning_rate": 2.466928600053209e-06, - "loss": 1.0783, - "num_input_tokens_seen": 78167825, - "step": 3678 - }, - { - "epoch": 0.4423735946612157, - "flos": 16686115436160.0, - "grad_norm": 3.073857946683874, - "learning_rate": 2.466171119598818e-06, - "loss": 0.9354, - "num_input_tokens_seen": 78187515, - "step": 3679 - }, - { - "epoch": 0.44249383755185473, - "flos": 18996461628240.0, - "grad_norm": 4.408439362854705, - "learning_rate": 2.465413568426639e-06, - "loss": 0.9902, - "num_input_tokens_seen": 78208185, - "step": 3680 - }, - { - "epoch": 0.44261408044249384, - "flos": 16454141463600.0, - "grad_norm": 3.399492613014469, - "learning_rate": 2.464655946651591e-06, - "loss": 1.0524, - "num_input_tokens_seen": 78226910, - "step": 3681 - }, - { - "epoch": 0.44273432333313295, - "flos": 17399330473680.0, - "grad_norm": 5.171549161575361, - "learning_rate": 2.4638982543886065e-06, - "loss": 1.0285, - "num_input_tokens_seen": 78246670, - "step": 3682 - }, - { - "epoch": 0.442854566223772, - "flos": 12416079454080.0, - "grad_norm": 8.60808818082774, - "learning_rate": 2.4631404917526254e-06, - "loss": 1.0866, - "num_input_tokens_seen": 78263345, - "step": 3683 - }, - { - "epoch": 0.4429748091144111, - "flos": 17710489238400.0, - "grad_norm": 5.279693374111208, - "learning_rate": 2.4623826588586e-06, - "loss": 1.0128, - "num_input_tokens_seen": 78283335, - "step": 3684 - }, - { - "epoch": 0.4430950520050502, - "flos": 15352361239680.0, - "grad_norm": 2.8001381363172952, - "learning_rate": 2.461624755821492e-06, - "loss": 1.0571, - "num_input_tokens_seen": 78302535, - "step": 3685 - }, - { - "epoch": 0.4432152948956893, - "flos": 17478208650240.0, - "grad_norm": 5.6056526605827655, - "learning_rate": 2.4608667827562763e-06, - "loss": 1.0054, - "num_input_tokens_seen": 78321585, - "step": 3686 - }, - { - "epoch": 0.4433355377863284, - "flos": 15458625737400.0, - "grad_norm": 3.873327324928793, - "learning_rate": 2.460108739777936e-06, - "loss": 1.1182, - "num_input_tokens_seen": 78340440, - "step": 3687 - }, - { - "epoch": 0.44345578067696745, - "flos": 14252911294320.0, - "grad_norm": 3.3400688943210493, - "learning_rate": 2.4593506270014656e-06, - "loss": 0.989, - "num_input_tokens_seen": 78359130, - "step": 3688 - }, - { - "epoch": 0.44357602356760656, - "flos": 17188549187160.0, - "grad_norm": 3.4767303769158646, - "learning_rate": 2.45859244454187e-06, - "loss": 1.0471, - "num_input_tokens_seen": 78378640, - "step": 3689 - }, - { - "epoch": 0.44369626645824567, - "flos": 16137678249000.0, - "grad_norm": 3.2507870528085223, - "learning_rate": 2.4578341925141655e-06, - "loss": 0.8873, - "num_input_tokens_seen": 78397575, - "step": 3690 - }, - { - "epoch": 0.4438165093488847, - "flos": 27149470822440.0, - "grad_norm": 5.284753664279373, - "learning_rate": 2.457075871033378e-06, - "loss": 0.9403, - "num_input_tokens_seen": 78419170, - "step": 3691 - }, - { - "epoch": 0.44393675223952384, - "flos": 10974742312800.0, - "grad_norm": 4.1457333704464165, - "learning_rate": 2.4563174802145445e-06, - "loss": 1.1085, - "num_input_tokens_seen": 78436140, - "step": 3692 - }, - { - "epoch": 0.44405699513016295, - "flos": 46234187516520.0, - "grad_norm": 0.6598539167989903, - "learning_rate": 2.455559020172712e-06, - "loss": 0.7284, - "num_input_tokens_seen": 78503215, - "step": 3693 - }, - { - "epoch": 0.444177238020802, - "flos": 17057719231800.0, - "grad_norm": 5.658094242458583, - "learning_rate": 2.4548004910229385e-06, - "loss": 1.11, - "num_input_tokens_seen": 78520510, - "step": 3694 - }, - { - "epoch": 0.4442974809114411, - "flos": 16033897337640.0, - "grad_norm": 5.375549110929836, - "learning_rate": 2.4540418928802913e-06, - "loss": 1.0895, - "num_input_tokens_seen": 78538965, - "step": 3695 - }, - { - "epoch": 0.4444177238020802, - "flos": 12521669397480.0, - "grad_norm": 7.952049077671202, - "learning_rate": 2.4532832258598506e-06, - "loss": 0.8808, - "num_input_tokens_seen": 78556515, - "step": 3696 - }, - { - "epoch": 0.4445379666927193, - "flos": 20388299915520.0, - "grad_norm": 3.99185949721222, - "learning_rate": 2.4525244900767047e-06, - "loss": 1.0349, - "num_input_tokens_seen": 78577050, - "step": 3697 - }, - { - "epoch": 0.4446582095833584, - "flos": 50483613382200.0, - "grad_norm": 0.8431870383816356, - "learning_rate": 2.4517656856459536e-06, - "loss": 0.8633, - "num_input_tokens_seen": 78642615, - "step": 3698 - }, - { - "epoch": 0.4447784524739975, - "flos": 18867256735560.0, - "grad_norm": 3.2304330291064685, - "learning_rate": 2.4510068126827073e-06, - "loss": 0.9114, - "num_input_tokens_seen": 78663335, - "step": 3699 - }, - { - "epoch": 0.44489869536463655, - "flos": 8195664972240.0, - "grad_norm": 6.02296281618981, - "learning_rate": 2.450247871302086e-06, - "loss": 1.0484, - "num_input_tokens_seen": 78680830, - "step": 3700 - }, - { - "epoch": 0.44501893825527566, - "flos": 14514387235680.0, - "grad_norm": 15.44578521421909, - "learning_rate": 2.44948886161922e-06, - "loss": 1.0567, - "num_input_tokens_seen": 78699565, - "step": 3701 - }, - { - "epoch": 0.4451391811459148, - "flos": 12942588077760.0, - "grad_norm": 7.273862308016049, - "learning_rate": 2.4487297837492524e-06, - "loss": 1.0758, - "num_input_tokens_seen": 78718450, - "step": 3702 - }, - { - "epoch": 0.44525942403655383, - "flos": 11971576486080.0, - "grad_norm": 6.910156526021051, - "learning_rate": 2.4479706378073323e-06, - "loss": 0.8327, - "num_input_tokens_seen": 78736710, - "step": 3703 - }, - { - "epoch": 0.44537966692719294, - "flos": 16534276764120.0, - "grad_norm": 3.245385488645327, - "learning_rate": 2.447211423908623e-06, - "loss": 1.0651, - "num_input_tokens_seen": 78756475, - "step": 3704 - }, - { - "epoch": 0.445499909817832, - "flos": 15431024785320.0, - "grad_norm": 3.673320599330239, - "learning_rate": 2.4464521421682966e-06, - "loss": 0.9665, - "num_input_tokens_seen": 78773785, - "step": 3705 - }, - { - "epoch": 0.4456201527084711, - "flos": 17058209816760.0, - "grad_norm": 3.0861544375883856, - "learning_rate": 2.4456927927015345e-06, - "loss": 1.105, - "num_input_tokens_seen": 78794545, - "step": 3706 - }, - { - "epoch": 0.4457403955991102, - "flos": 13334801989800.0, - "grad_norm": 9.55257643852011, - "learning_rate": 2.4449333756235307e-06, - "loss": 0.9768, - "num_input_tokens_seen": 78810980, - "step": 3707 - }, - { - "epoch": 0.4458606384897493, - "flos": 13622683082400.0, - "grad_norm": 5.006346870466205, - "learning_rate": 2.4441738910494876e-06, - "loss": 1.024, - "num_input_tokens_seen": 78825435, - "step": 3708 - }, - { - "epoch": 0.4459808813803884, - "flos": 15170499352320.0, - "grad_norm": 5.350818977774352, - "learning_rate": 2.4434143390946176e-06, - "loss": 1.0511, - "num_input_tokens_seen": 78843965, - "step": 3709 - }, - { - "epoch": 0.4461011242710275, - "flos": 16555070849880.0, - "grad_norm": 4.368621972736235, - "learning_rate": 2.4426547198741457e-06, - "loss": 1.0838, - "num_input_tokens_seen": 78861890, - "step": 3710 - }, - { - "epoch": 0.44622136716166655, - "flos": 14330777639400.0, - "grad_norm": 3.7323424102771856, - "learning_rate": 2.441895033503305e-06, - "loss": 0.9714, - "num_input_tokens_seen": 78879530, - "step": 3711 - }, - { - "epoch": 0.44634161005230566, - "flos": 15113549739240.0, - "grad_norm": 7.1374098469774365, - "learning_rate": 2.4411352800973375e-06, - "loss": 1.0586, - "num_input_tokens_seen": 78897685, - "step": 3712 - }, - { - "epoch": 0.44646185294294477, - "flos": 16297335618840.0, - "grad_norm": 4.4787775388143025, - "learning_rate": 2.4403754597715005e-06, - "loss": 0.9596, - "num_input_tokens_seen": 78916850, - "step": 3713 - }, - { - "epoch": 0.4465820958335838, - "flos": 16087750133160.0, - "grad_norm": 5.603985846153754, - "learning_rate": 2.4396155726410553e-06, - "loss": 1.151, - "num_input_tokens_seen": 78935180, - "step": 3714 - }, - { - "epoch": 0.44670233872422294, - "flos": 16112560883280.0, - "grad_norm": 8.421817022063884, - "learning_rate": 2.438855618821278e-06, - "loss": 1.1443, - "num_input_tokens_seen": 78950700, - "step": 3715 - }, - { - "epoch": 0.44682258161486205, - "flos": 16765974782640.0, - "grad_norm": 9.87874445362266, - "learning_rate": 2.4380955984274517e-06, - "loss": 0.8967, - "num_input_tokens_seen": 78969075, - "step": 3716 - }, - { - "epoch": 0.4469428245055011, - "flos": 18863975948640.0, - "grad_norm": 5.113825828047859, - "learning_rate": 2.4373355115748716e-06, - "loss": 0.9823, - "num_input_tokens_seen": 78989625, - "step": 3717 - }, - { - "epoch": 0.4470630673961402, - "flos": 15273421740000.0, - "grad_norm": 7.393083546543477, - "learning_rate": 2.436575358378842e-06, - "loss": 0.9535, - "num_input_tokens_seen": 79008835, - "step": 3718 - }, - { - "epoch": 0.44718331028677927, - "flos": 11441633767680.0, - "grad_norm": 6.955743395239644, - "learning_rate": 2.4358151389546782e-06, - "loss": 1.0537, - "num_input_tokens_seen": 79025240, - "step": 3719 - }, - { - "epoch": 0.4473035531774184, - "flos": 13962853230960.0, - "grad_norm": 4.277765568527259, - "learning_rate": 2.4350548534177035e-06, - "loss": 0.9746, - "num_input_tokens_seen": 79041790, - "step": 3720 - }, - { - "epoch": 0.4474237960680575, - "flos": 29591658801360.0, - "grad_norm": 6.081404637157426, - "learning_rate": 2.434294501883254e-06, - "loss": 0.9021, - "num_input_tokens_seen": 79064605, - "step": 3721 - }, - { - "epoch": 0.44754403895869654, - "flos": 16269795989880.0, - "grad_norm": 2.41778534176678, - "learning_rate": 2.433534084466674e-06, - "loss": 0.8866, - "num_input_tokens_seen": 79083545, - "step": 3722 - }, - { - "epoch": 0.44766428184933565, - "flos": 18238561601640.0, - "grad_norm": 3.055460098008557, - "learning_rate": 2.4327736012833178e-06, - "loss": 0.9439, - "num_input_tokens_seen": 79104985, - "step": 3723 - }, - { - "epoch": 0.44778452473997477, - "flos": 14514816497520.0, - "grad_norm": 7.66826648882285, - "learning_rate": 2.4320130524485506e-06, - "loss": 0.9793, - "num_input_tokens_seen": 79123500, - "step": 3724 - }, - { - "epoch": 0.4479047676306138, - "flos": 15611077640640.0, - "grad_norm": 3.0359050754890826, - "learning_rate": 2.431252438077746e-06, - "loss": 1.0257, - "num_input_tokens_seen": 79142720, - "step": 3725 - }, - { - "epoch": 0.44802501052125293, - "flos": 15246158065080.0, - "grad_norm": 3.459074530780895, - "learning_rate": 2.4304917582862906e-06, - "loss": 0.9854, - "num_input_tokens_seen": 79161620, - "step": 3726 - }, - { - "epoch": 0.44814525341189204, - "flos": 15720714909960.0, - "grad_norm": 3.398667040559856, - "learning_rate": 2.4297310131895774e-06, - "loss": 1.1081, - "num_input_tokens_seen": 79179885, - "step": 3727 - }, - { - "epoch": 0.4482654963025311, - "flos": 11787322997040.0, - "grad_norm": 2.8867719384688924, - "learning_rate": 2.4289702029030113e-06, - "loss": 0.9747, - "num_input_tokens_seen": 79197075, - "step": 3728 - }, - { - "epoch": 0.4483857391931702, - "flos": 13359060831840.0, - "grad_norm": 6.481876342290714, - "learning_rate": 2.4282093275420057e-06, - "loss": 1.0561, - "num_input_tokens_seen": 79215825, - "step": 3729 - }, - { - "epoch": 0.4485059820838093, - "flos": 14459461285560.0, - "grad_norm": 3.2803687955959506, - "learning_rate": 2.4274483872219863e-06, - "loss": 0.9313, - "num_input_tokens_seen": 79232905, - "step": 3730 - }, - { - "epoch": 0.4486262249744484, - "flos": 14226475481520.0, - "grad_norm": 2.8383498007415557, - "learning_rate": 2.426687382058386e-06, - "loss": 1.1568, - "num_input_tokens_seen": 79250905, - "step": 3731 - }, - { - "epoch": 0.4487464678650875, - "flos": 46247009596680.0, - "grad_norm": 1.1081373268470258, - "learning_rate": 2.425926312166649e-06, - "loss": 0.8478, - "num_input_tokens_seen": 79303500, - "step": 3732 - }, - { - "epoch": 0.4488667107557266, - "flos": 14744368206840.0, - "grad_norm": 8.468328373960162, - "learning_rate": 2.42516517766223e-06, - "loss": 0.9455, - "num_input_tokens_seen": 79321300, - "step": 3733 - }, - { - "epoch": 0.44898695364636565, - "flos": 17032203265800.0, - "grad_norm": 3.8889884724052575, - "learning_rate": 2.4244039786605907e-06, - "loss": 0.9094, - "num_input_tokens_seen": 79342025, - "step": 3734 - }, - { - "epoch": 0.44910719653700476, - "flos": 13203450787920.0, - "grad_norm": 3.9530562443841664, - "learning_rate": 2.4236427152772055e-06, - "loss": 1.0501, - "num_input_tokens_seen": 79360150, - "step": 3735 - }, - { - "epoch": 0.4492274394276438, - "flos": 40810854341640.0, - "grad_norm": 0.9056559292689477, - "learning_rate": 2.422881387627557e-06, - "loss": 0.8382, - "num_input_tokens_seen": 79412320, - "step": 3736 - }, - { - "epoch": 0.4493476823182829, - "flos": 16530903992520.0, - "grad_norm": 2.9358960906514624, - "learning_rate": 2.422119995827139e-06, - "loss": 0.9994, - "num_input_tokens_seen": 79432165, - "step": 3737 - }, - { - "epoch": 0.44946792520892204, - "flos": 11184542429400.0, - "grad_norm": 4.09685439418147, - "learning_rate": 2.4213585399914528e-06, - "loss": 0.9738, - "num_input_tokens_seen": 79449090, - "step": 3738 - }, - { - "epoch": 0.4495881680995611, - "flos": 13911943945200.0, - "grad_norm": 2.9102412883443662, - "learning_rate": 2.4205970202360113e-06, - "loss": 1.076, - "num_input_tokens_seen": 79468375, - "step": 3739 - }, - { - "epoch": 0.4497084109902002, - "flos": 18526871956080.0, - "grad_norm": 5.172413593379926, - "learning_rate": 2.4198354366763354e-06, - "loss": 1.01, - "num_input_tokens_seen": 79486735, - "step": 3740 - }, - { - "epoch": 0.4498286538808393, - "flos": 10450471983000.0, - "grad_norm": 13.701263057652751, - "learning_rate": 2.4190737894279587e-06, - "loss": 1.0093, - "num_input_tokens_seen": 79503825, - "step": 3741 - }, - { - "epoch": 0.44994889677147837, - "flos": 10922238625920.0, - "grad_norm": 7.225405576733462, - "learning_rate": 2.4183120786064203e-06, - "loss": 1.0256, - "num_input_tokens_seen": 79520420, - "step": 3742 - }, - { - "epoch": 0.4500691396621175, - "flos": 15484478980560.0, - "grad_norm": 5.18723342394406, - "learning_rate": 2.417550304327273e-06, - "loss": 1.0821, - "num_input_tokens_seen": 79538180, - "step": 3743 - }, - { - "epoch": 0.4501893825527566, - "flos": 23091166635240.0, - "grad_norm": 2.739771808672874, - "learning_rate": 2.4167884667060763e-06, - "loss": 0.9871, - "num_input_tokens_seen": 79560610, - "step": 3744 - }, - { - "epoch": 0.45030962544339564, - "flos": 11709058051680.0, - "grad_norm": 3.956307679248526, - "learning_rate": 2.4160265658584e-06, - "loss": 1.108, - "num_input_tokens_seen": 79575220, - "step": 3745 - }, - { - "epoch": 0.45042986833403476, - "flos": 13885262839920.0, - "grad_norm": 3.4553553770278356, - "learning_rate": 2.4152646018998253e-06, - "loss": 0.8986, - "num_input_tokens_seen": 79593890, - "step": 3746 - }, - { - "epoch": 0.45055011122467387, - "flos": 16429024097880.0, - "grad_norm": 2.825488131051153, - "learning_rate": 2.4145025749459403e-06, - "loss": 0.9403, - "num_input_tokens_seen": 79614635, - "step": 3747 - }, - { - "epoch": 0.4506703541153129, - "flos": 14144684456760.0, - "grad_norm": 3.066735277928726, - "learning_rate": 2.413740485112344e-06, - "loss": 0.9384, - "num_input_tokens_seen": 79632695, - "step": 3748 - }, - { - "epoch": 0.45079059700595203, - "flos": 13832360552760.0, - "grad_norm": 4.423730252090618, - "learning_rate": 2.412978332514646e-06, - "loss": 1.0454, - "num_input_tokens_seen": 79651195, - "step": 3749 - }, - { - "epoch": 0.4509108398965911, - "flos": 19680174696960.0, - "grad_norm": 3.649454305956194, - "learning_rate": 2.4122161172684623e-06, - "loss": 0.9368, - "num_input_tokens_seen": 79671710, - "step": 3750 - }, - { - "epoch": 0.4510310827872302, - "flos": 14907674302320.0, - "grad_norm": 4.768208588229875, - "learning_rate": 2.4114538394894216e-06, - "loss": 1.0482, - "num_input_tokens_seen": 79689070, - "step": 3751 - }, - { - "epoch": 0.4511513256778693, - "flos": 11473159399440.0, - "grad_norm": 5.118676913791133, - "learning_rate": 2.410691499293161e-06, - "loss": 1.0583, - "num_input_tokens_seen": 79706945, - "step": 3752 - }, - { - "epoch": 0.45127156856850836, - "flos": 17919001569480.0, - "grad_norm": 2.5771752660004656, - "learning_rate": 2.409929096795326e-06, - "loss": 0.9616, - "num_input_tokens_seen": 79727035, - "step": 3753 - }, - { - "epoch": 0.4513918114591475, - "flos": 14488625977200.0, - "grad_norm": 5.770601931269382, - "learning_rate": 2.409166632111573e-06, - "loss": 1.0128, - "num_input_tokens_seen": 79744890, - "step": 3754 - }, - { - "epoch": 0.4515120543497866, - "flos": 18969872507640.0, - "grad_norm": 3.9784060778211097, - "learning_rate": 2.4084041053575674e-06, - "loss": 1.011, - "num_input_tokens_seen": 79764030, - "step": 3755 - }, - { - "epoch": 0.45163229724042564, - "flos": 14619854532840.0, - "grad_norm": 3.663313604087104, - "learning_rate": 2.4076415166489834e-06, - "loss": 0.9535, - "num_input_tokens_seen": 79783160, - "step": 3756 - }, - { - "epoch": 0.45175254013106475, - "flos": 15405938081160.0, - "grad_norm": 2.458113536599454, - "learning_rate": 2.406878866101506e-06, - "loss": 1.0274, - "num_input_tokens_seen": 79801845, - "step": 3757 - }, - { - "epoch": 0.45187278302170386, - "flos": 13386232522080.0, - "grad_norm": 3.3276260790021577, - "learning_rate": 2.4061161538308273e-06, - "loss": 1.0071, - "num_input_tokens_seen": 79818410, - "step": 3758 - }, - { - "epoch": 0.4519930259123429, - "flos": 13172630372040.0, - "grad_norm": 2.9238848785824527, - "learning_rate": 2.4053533799526523e-06, - "loss": 1.1211, - "num_input_tokens_seen": 79833850, - "step": 3759 - }, - { - "epoch": 0.452113268802982, - "flos": 17923846095960.0, - "grad_norm": 2.7944801349353585, - "learning_rate": 2.404590544582691e-06, - "loss": 1.0809, - "num_input_tokens_seen": 79851805, - "step": 3760 - }, - { - "epoch": 0.45223351169362114, - "flos": 28118581397400.0, - "grad_norm": 7.705801663192541, - "learning_rate": 2.403827647836666e-06, - "loss": 1.0363, - "num_input_tokens_seen": 79872080, - "step": 3761 - }, - { - "epoch": 0.4523537545842602, - "flos": 15329512829400.0, - "grad_norm": 4.101584588854211, - "learning_rate": 2.4030646898303075e-06, - "loss": 0.9154, - "num_input_tokens_seen": 79893290, - "step": 3762 - }, - { - "epoch": 0.4524739974748993, - "flos": 20257500621720.0, - "grad_norm": 4.241808907772555, - "learning_rate": 2.4023016706793566e-06, - "loss": 1.0435, - "num_input_tokens_seen": 79912805, - "step": 3763 - }, - { - "epoch": 0.4525942403655384, - "flos": 44061330386400.0, - "grad_norm": 0.8107644765680667, - "learning_rate": 2.401538590499561e-06, - "loss": 0.832, - "num_input_tokens_seen": 79972980, - "step": 3764 - }, - { - "epoch": 0.45271448325617747, - "flos": 19604056060800.0, - "grad_norm": 3.497930732473084, - "learning_rate": 2.400775449406682e-06, - "loss": 0.9253, - "num_input_tokens_seen": 79995895, - "step": 3765 - }, - { - "epoch": 0.4528347261468166, - "flos": 15954375268320.0, - "grad_norm": 3.085721783401826, - "learning_rate": 2.400012247516485e-06, - "loss": 0.9586, - "num_input_tokens_seen": 80016180, - "step": 3766 - }, - { - "epoch": 0.45295496903745563, - "flos": 14985234031800.0, - "grad_norm": 2.8648650614598825, - "learning_rate": 2.3992489849447484e-06, - "loss": 1.1203, - "num_input_tokens_seen": 80034355, - "step": 3767 - }, - { - "epoch": 0.45307521192809475, - "flos": 16507043750760.0, - "grad_norm": 3.5937932550895506, - "learning_rate": 2.3984856618072584e-06, - "loss": 1.0197, - "num_input_tokens_seen": 80054110, - "step": 3768 - }, - { - "epoch": 0.45319545481873386, - "flos": 11001300771840.0, - "grad_norm": 5.871444275443536, - "learning_rate": 2.3977222782198098e-06, - "loss": 0.9604, - "num_input_tokens_seen": 80072465, - "step": 3769 - }, - { - "epoch": 0.4533156977093729, - "flos": 15589792969920.0, - "grad_norm": 4.130491589345428, - "learning_rate": 2.3969588342982077e-06, - "loss": 0.9769, - "num_input_tokens_seen": 80091560, - "step": 3770 - }, - { - "epoch": 0.453435940600012, - "flos": 17240746258440.0, - "grad_norm": 10.699432671738736, - "learning_rate": 2.396195330158267e-06, - "loss": 0.9517, - "num_input_tokens_seen": 80111170, - "step": 3771 - }, - { - "epoch": 0.45355618349065113, - "flos": 16664616134520.0, - "grad_norm": 4.6037550982473725, - "learning_rate": 2.3954317659158094e-06, - "loss": 1.0161, - "num_input_tokens_seen": 80131225, - "step": 3772 - }, - { - "epoch": 0.4536764263812902, - "flos": 47906548122000.0, - "grad_norm": 0.9244201247296369, - "learning_rate": 2.394668141686667e-06, - "loss": 0.8528, - "num_input_tokens_seen": 80192910, - "step": 3773 - }, - { - "epoch": 0.4537966692719293, - "flos": 30534517532880.0, - "grad_norm": 3.9733359206487777, - "learning_rate": 2.3939044575866813e-06, - "loss": 0.9085, - "num_input_tokens_seen": 80215380, - "step": 3774 - }, - { - "epoch": 0.4539169121625684, - "flos": 25366645085520.0, - "grad_norm": 3.121358657308338, - "learning_rate": 2.3931407137317024e-06, - "loss": 0.9802, - "num_input_tokens_seen": 80235255, - "step": 3775 - }, - { - "epoch": 0.45403715505320746, - "flos": 13124633934480.0, - "grad_norm": 4.213973668809631, - "learning_rate": 2.3923769102375907e-06, - "loss": 1.0761, - "num_input_tokens_seen": 80253840, - "step": 3776 - }, - { - "epoch": 0.4541573979438466, - "flos": 17818440121920.0, - "grad_norm": 3.0833281095993192, - "learning_rate": 2.391613047220213e-06, - "loss": 1.0144, - "num_input_tokens_seen": 80273460, - "step": 3777 - }, - { - "epoch": 0.4542776408344857, - "flos": 12993620009760.0, - "grad_norm": 6.107542733953673, - "learning_rate": 2.390849124795447e-06, - "loss": 1.0202, - "num_input_tokens_seen": 80289180, - "step": 3778 - }, - { - "epoch": 0.45439788372512474, - "flos": 14695635891840.0, - "grad_norm": 4.838127958954455, - "learning_rate": 2.3900851430791804e-06, - "loss": 1.0645, - "num_input_tokens_seen": 80306920, - "step": 3779 - }, - { - "epoch": 0.45451812661576385, - "flos": 15851851480920.0, - "grad_norm": 3.735960938855988, - "learning_rate": 2.389321102187307e-06, - "loss": 1.0673, - "num_input_tokens_seen": 80325420, - "step": 3780 - }, - { - "epoch": 0.4546383695064029, - "flos": 15459514922640.0, - "grad_norm": 3.71827015064909, - "learning_rate": 2.3885570022357326e-06, - "loss": 1.0482, - "num_input_tokens_seen": 80344270, - "step": 3781 - }, - { - "epoch": 0.454758612397042, - "flos": 45994977415800.0, - "grad_norm": 0.8347001945761361, - "learning_rate": 2.38779284334037e-06, - "loss": 0.8582, - "num_input_tokens_seen": 80408965, - "step": 3782 - }, - { - "epoch": 0.4548788552876811, - "flos": 19440964596240.0, - "grad_norm": 3.298892303438762, - "learning_rate": 2.387028625617141e-06, - "loss": 1.0032, - "num_input_tokens_seen": 80427900, - "step": 3783 - }, - { - "epoch": 0.4549990981783202, - "flos": 16245537147840.0, - "grad_norm": 9.331735701290006, - "learning_rate": 2.3862643491819766e-06, - "loss": 1.0796, - "num_input_tokens_seen": 80446185, - "step": 3784 - }, - { - "epoch": 0.4551193410689593, - "flos": 16534062133200.0, - "grad_norm": 4.0453059273405225, - "learning_rate": 2.3855000141508186e-06, - "loss": 1.0582, - "num_input_tokens_seen": 80466060, - "step": 3785 - }, - { - "epoch": 0.4552395839595984, - "flos": 14750960442240.0, - "grad_norm": 6.24977263990756, - "learning_rate": 2.3847356206396143e-06, - "loss": 1.06, - "num_input_tokens_seen": 80483090, - "step": 3786 - }, - { - "epoch": 0.45535982685023746, - "flos": 16533142286400.0, - "grad_norm": 4.963153686517395, - "learning_rate": 2.3839711687643227e-06, - "loss": 1.0161, - "num_input_tokens_seen": 80504035, - "step": 3787 - }, - { - "epoch": 0.45548006974087657, - "flos": 13937797188360.0, - "grad_norm": 3.9486582251664686, - "learning_rate": 2.38320665864091e-06, - "loss": 0.9529, - "num_input_tokens_seen": 80523105, - "step": 3788 - }, - { - "epoch": 0.4556003126315157, - "flos": 14226506143080.0, - "grad_norm": 4.053342151339459, - "learning_rate": 2.3824420903853516e-06, - "loss": 1.0501, - "num_input_tokens_seen": 80541290, - "step": 3789 - }, - { - "epoch": 0.45572055552215474, - "flos": 16318344335520.0, - "grad_norm": 4.501623708460453, - "learning_rate": 2.3816774641136324e-06, - "loss": 1.0318, - "num_input_tokens_seen": 80558265, - "step": 3790 - }, - { - "epoch": 0.45584079841279385, - "flos": 23614057194840.0, - "grad_norm": 3.6406160935714555, - "learning_rate": 2.380912779941745e-06, - "loss": 0.943, - "num_input_tokens_seen": 80581105, - "step": 3791 - }, - { - "epoch": 0.45596104130343296, - "flos": 19418576109360.0, - "grad_norm": 3.0234229903156358, - "learning_rate": 2.3801480379856918e-06, - "loss": 1.0356, - "num_input_tokens_seen": 80602535, - "step": 3792 - }, - { - "epoch": 0.456081284194072, - "flos": 15327489166440.0, - "grad_norm": 4.879106601483278, - "learning_rate": 2.379383238361484e-06, - "loss": 1.0642, - "num_input_tokens_seen": 80621615, - "step": 3793 - }, - { - "epoch": 0.4562015270847111, - "flos": 25632689599320.0, - "grad_norm": 4.363710059641812, - "learning_rate": 2.3786183811851407e-06, - "loss": 1.0188, - "num_input_tokens_seen": 80642040, - "step": 3794 - }, - { - "epoch": 0.45632176997535023, - "flos": 9585264965640.0, - "grad_norm": 5.65172197550907, - "learning_rate": 2.3778534665726892e-06, - "loss": 1.0205, - "num_input_tokens_seen": 80658590, - "step": 3795 - }, - { - "epoch": 0.4564420128659893, - "flos": 23272997861040.0, - "grad_norm": 3.241695210413718, - "learning_rate": 2.377088494640168e-06, - "loss": 0.9507, - "num_input_tokens_seen": 80680060, - "step": 3796 - }, - { - "epoch": 0.4565622557566284, - "flos": 14463815227080.0, - "grad_norm": 6.30678427997197, - "learning_rate": 2.3763234655036216e-06, - "loss": 1.0082, - "num_input_tokens_seen": 80698980, - "step": 3797 - }, - { - "epoch": 0.45668249864726745, - "flos": 18054584066640.0, - "grad_norm": 3.9656787913757197, - "learning_rate": 2.3755583792791046e-06, - "loss": 1.0878, - "num_input_tokens_seen": 80718570, - "step": 3798 - }, - { - "epoch": 0.45680274153790656, - "flos": 11001055479360.0, - "grad_norm": 3.221669326862698, - "learning_rate": 2.3747932360826803e-06, - "loss": 0.9757, - "num_input_tokens_seen": 80735220, - "step": 3799 - }, - { - "epoch": 0.4569229844285457, - "flos": 14043387131760.0, - "grad_norm": 4.734853185293003, - "learning_rate": 2.3740280360304205e-06, - "loss": 1.0389, - "num_input_tokens_seen": 80752665, - "step": 3800 - }, - { - "epoch": 0.45704322731918473, - "flos": 17133899191080.0, - "grad_norm": 3.950104897953216, - "learning_rate": 2.3732627792384038e-06, - "loss": 0.9123, - "num_input_tokens_seen": 80773455, - "step": 3801 - }, - { - "epoch": 0.45716347020982384, - "flos": 22325509233960.0, - "grad_norm": 14.249163151311564, - "learning_rate": 2.3724974658227207e-06, - "loss": 0.9759, - "num_input_tokens_seen": 80793965, - "step": 3802 - }, - { - "epoch": 0.45728371310046295, - "flos": 18864006610200.0, - "grad_norm": 4.442683241826614, - "learning_rate": 2.3717320958994687e-06, - "loss": 0.9332, - "num_input_tokens_seen": 80811245, - "step": 3803 - }, - { - "epoch": 0.457403955991102, - "flos": 12703776577320.0, - "grad_norm": 3.8675477755948653, - "learning_rate": 2.3709666695847534e-06, - "loss": 0.9292, - "num_input_tokens_seen": 80829145, - "step": 3804 - }, - { - "epoch": 0.4575241988817411, - "flos": 30168862079880.0, - "grad_norm": 8.310256540536594, - "learning_rate": 2.370201186994689e-06, - "loss": 0.9345, - "num_input_tokens_seen": 80852550, - "step": 3805 - }, - { - "epoch": 0.45764444177238023, - "flos": 21463215064800.0, - "grad_norm": 4.007036695441318, - "learning_rate": 2.369435648245399e-06, - "loss": 0.917, - "num_input_tokens_seen": 80872485, - "step": 3806 - }, - { - "epoch": 0.4577646846630193, - "flos": 17110100272440.0, - "grad_norm": 3.5255203350012496, - "learning_rate": 2.368670053453015e-06, - "loss": 1.0808, - "num_input_tokens_seen": 80893895, - "step": 3807 - }, - { - "epoch": 0.4578849275536584, - "flos": 12336649369440.0, - "grad_norm": 3.669737238195659, - "learning_rate": 2.3679044027336757e-06, - "loss": 0.9553, - "num_input_tokens_seen": 80909505, - "step": 3808 - }, - { - "epoch": 0.4580051704442975, - "flos": 9527150213280.0, - "grad_norm": 6.190997435734211, - "learning_rate": 2.3671386962035326e-06, - "loss": 0.9024, - "num_input_tokens_seen": 80926695, - "step": 3809 - }, - { - "epoch": 0.45812541333493656, - "flos": 12781366968360.0, - "grad_norm": 3.9080392630643863, - "learning_rate": 2.3663729339787405e-06, - "loss": 0.9074, - "num_input_tokens_seen": 80943350, - "step": 3810 - }, - { - "epoch": 0.45824565622557567, - "flos": 14350774524600.0, - "grad_norm": 4.864303003548108, - "learning_rate": 2.365607116175466e-06, - "loss": 0.9404, - "num_input_tokens_seen": 80958495, - "step": 3811 - }, - { - "epoch": 0.4583658991162148, - "flos": 14095001633400.0, - "grad_norm": 3.59882056731202, - "learning_rate": 2.3648412429098825e-06, - "loss": 0.8744, - "num_input_tokens_seen": 80976565, - "step": 3812 - }, - { - "epoch": 0.45848614200685384, - "flos": 14931963805920.0, - "grad_norm": 4.9261181176939575, - "learning_rate": 2.364075314298172e-06, - "loss": 1.0336, - "num_input_tokens_seen": 80993740, - "step": 3813 - }, - { - "epoch": 0.45860638489749295, - "flos": 14960637912600.0, - "grad_norm": 4.756722962853409, - "learning_rate": 2.3633093304565267e-06, - "loss": 0.9216, - "num_input_tokens_seen": 81012515, - "step": 3814 - }, - { - "epoch": 0.458726627788132, - "flos": 19103523326520.0, - "grad_norm": 4.953593042444178, - "learning_rate": 2.3625432915011443e-06, - "loss": 0.849, - "num_input_tokens_seen": 81034145, - "step": 3815 - }, - { - "epoch": 0.4588468706787711, - "flos": 17137302624240.0, - "grad_norm": 9.938161000274789, - "learning_rate": 2.3617771975482334e-06, - "loss": 0.8718, - "num_input_tokens_seen": 81052695, - "step": 3816 - }, - { - "epoch": 0.4589671135694102, - "flos": 12674489239440.0, - "grad_norm": 9.123936003223497, - "learning_rate": 2.3610110487140083e-06, - "loss": 0.9699, - "num_input_tokens_seen": 81070850, - "step": 3817 - }, - { - "epoch": 0.4590873564600493, - "flos": 18235740738120.0, - "grad_norm": 4.078149554299284, - "learning_rate": 2.360244845114695e-06, - "loss": 1.0379, - "num_input_tokens_seen": 81090190, - "step": 3818 - }, - { - "epoch": 0.4592075993506884, - "flos": 13124695257600.0, - "grad_norm": 3.6140122782462227, - "learning_rate": 2.3594785868665245e-06, - "loss": 0.9167, - "num_input_tokens_seen": 81106215, - "step": 3819 - }, - { - "epoch": 0.4593278422413275, - "flos": 14645677114440.0, - "grad_norm": 6.702935205008984, - "learning_rate": 2.3587122740857386e-06, - "loss": 1.0398, - "num_input_tokens_seen": 81123035, - "step": 3820 - }, - { - "epoch": 0.45944808513196655, - "flos": 15168138412200.0, - "grad_norm": 2.2603532164028772, - "learning_rate": 2.357945906888586e-06, - "loss": 1.009, - "num_input_tokens_seen": 81142195, - "step": 3821 - }, - { - "epoch": 0.45956832802260567, - "flos": 15217422635280.0, - "grad_norm": 10.4936522415088, - "learning_rate": 2.357179485391324e-06, - "loss": 1.0126, - "num_input_tokens_seen": 81159770, - "step": 3822 - }, - { - "epoch": 0.4596885709132448, - "flos": 15904508475600.0, - "grad_norm": 6.083702842603311, - "learning_rate": 2.3564130097102173e-06, - "loss": 1.0773, - "num_input_tokens_seen": 81179145, - "step": 3823 - }, - { - "epoch": 0.45980881380388383, - "flos": 20646679039320.0, - "grad_norm": 2.9846924617942965, - "learning_rate": 2.355646479961541e-06, - "loss": 0.9683, - "num_input_tokens_seen": 81198175, - "step": 3824 - }, - { - "epoch": 0.45992905669452294, - "flos": 23819380723680.0, - "grad_norm": 4.01680824272983, - "learning_rate": 2.354879896261576e-06, - "loss": 0.9438, - "num_input_tokens_seen": 81218105, - "step": 3825 - }, - { - "epoch": 0.46004929958516205, - "flos": 25919436214200.0, - "grad_norm": 3.78308767005682, - "learning_rate": 2.3541132587266133e-06, - "loss": 0.8038, - "num_input_tokens_seen": 81240545, - "step": 3826 - }, - { - "epoch": 0.4601695424758011, - "flos": 12207505799880.0, - "grad_norm": 3.0502816742452374, - "learning_rate": 2.3533465674729515e-06, - "loss": 0.925, - "num_input_tokens_seen": 81257495, - "step": 3827 - }, - { - "epoch": 0.4602897853664402, - "flos": 11237260747200.0, - "grad_norm": 4.295904986391064, - "learning_rate": 2.352579822616895e-06, - "loss": 0.9576, - "num_input_tokens_seen": 81274650, - "step": 3828 - }, - { - "epoch": 0.4604100282570793, - "flos": 18082307664960.0, - "grad_norm": 3.0300765764990563, - "learning_rate": 2.351813024274761e-06, - "loss": 1.0067, - "num_input_tokens_seen": 81295725, - "step": 3829 - }, - { - "epoch": 0.4605302711477184, - "flos": 19675391493600.0, - "grad_norm": 3.4105378207290906, - "learning_rate": 2.3510461725628693e-06, - "loss": 0.9609, - "num_input_tokens_seen": 81315910, - "step": 3830 - }, - { - "epoch": 0.4606505140383575, - "flos": 16951301426280.0, - "grad_norm": 3.6042822376457146, - "learning_rate": 2.350279267597554e-06, - "loss": 0.9322, - "num_input_tokens_seen": 81336270, - "step": 3831 - }, - { - "epoch": 0.46077075692899655, - "flos": 11394311884440.0, - "grad_norm": 4.781676429232425, - "learning_rate": 2.3495123094951515e-06, - "loss": 1.0535, - "num_input_tokens_seen": 81354335, - "step": 3832 - }, - { - "epoch": 0.46089099981963566, - "flos": 34888797464520.0, - "grad_norm": 3.4391228746368725, - "learning_rate": 2.34874529837201e-06, - "loss": 0.9765, - "num_input_tokens_seen": 81377600, - "step": 3833 - }, - { - "epoch": 0.46101124271027477, - "flos": 13544816737320.0, - "grad_norm": 3.817884676175242, - "learning_rate": 2.347978234344483e-06, - "loss": 1.0282, - "num_input_tokens_seen": 81393525, - "step": 3834 - }, - { - "epoch": 0.4611314856009138, - "flos": 28097051434200.0, - "grad_norm": 10.255310915693345, - "learning_rate": 2.347211117528935e-06, - "loss": 0.9271, - "num_input_tokens_seen": 81415545, - "step": 3835 - }, - { - "epoch": 0.46125172849155294, - "flos": 14774422083720.0, - "grad_norm": 4.337502576843727, - "learning_rate": 2.3464439480417374e-06, - "loss": 0.9427, - "num_input_tokens_seen": 81434785, - "step": 3836 - }, - { - "epoch": 0.46137197138219205, - "flos": 12704972378160.0, - "grad_norm": 4.224584317496769, - "learning_rate": 2.3456767259992676e-06, - "loss": 0.9763, - "num_input_tokens_seen": 81452150, - "step": 3837 - }, - { - "epoch": 0.4614922142728311, - "flos": 11918643537360.0, - "grad_norm": 4.026427230368764, - "learning_rate": 2.3449094515179135e-06, - "loss": 1.093, - "num_input_tokens_seen": 81469330, - "step": 3838 - }, - { - "epoch": 0.4616124571634702, - "flos": 18946134912120.0, - "grad_norm": 2.674554473995416, - "learning_rate": 2.34414212471407e-06, - "loss": 1.0463, - "num_input_tokens_seen": 81488845, - "step": 3839 - }, - { - "epoch": 0.4617327000541093, - "flos": 14436674198400.0, - "grad_norm": 4.571714528540551, - "learning_rate": 2.3433747457041394e-06, - "loss": 0.9644, - "num_input_tokens_seen": 81507270, - "step": 3840 - }, - { - "epoch": 0.4618529429447484, - "flos": 21070510567800.0, - "grad_norm": 3.382795021064832, - "learning_rate": 2.342607314604533e-06, - "loss": 1.0662, - "num_input_tokens_seen": 81526740, - "step": 3841 - }, - { - "epoch": 0.4619731858353875, - "flos": 14038389297480.0, - "grad_norm": 2.872123440196312, - "learning_rate": 2.3418398315316694e-06, - "loss": 1.0666, - "num_input_tokens_seen": 81544280, - "step": 3842 - }, - { - "epoch": 0.4620934287260266, - "flos": 13438092316200.0, - "grad_norm": 5.013434100930718, - "learning_rate": 2.3410722966019755e-06, - "loss": 1.006, - "num_input_tokens_seen": 81559115, - "step": 3843 - }, - { - "epoch": 0.46221367161666566, - "flos": 26655254369520.0, - "grad_norm": 3.0378415516159842, - "learning_rate": 2.3403047099318848e-06, - "loss": 0.8846, - "num_input_tokens_seen": 81582905, - "step": 3844 - }, - { - "epoch": 0.46233391450730477, - "flos": 10188014871720.0, - "grad_norm": 8.338045491871085, - "learning_rate": 2.3395370716378405e-06, - "loss": 0.9676, - "num_input_tokens_seen": 81600070, - "step": 3845 - }, - { - "epoch": 0.4624541573979438, - "flos": 15983969221800.0, - "grad_norm": 18.413028317373712, - "learning_rate": 2.338769381836292e-06, - "loss": 0.9555, - "num_input_tokens_seen": 81619400, - "step": 3846 - }, - { - "epoch": 0.46257440028858293, - "flos": 10214266715160.0, - "grad_norm": 8.89767010656134, - "learning_rate": 2.3380016406436984e-06, - "loss": 0.9464, - "num_input_tokens_seen": 81636600, - "step": 3847 - }, - { - "epoch": 0.46269464317922204, - "flos": 16587056405040.0, - "grad_norm": 4.001092523147169, - "learning_rate": 2.337233848176524e-06, - "loss": 1.0449, - "num_input_tokens_seen": 81654090, - "step": 3848 - }, - { - "epoch": 0.4628148860698611, - "flos": 13151560332240.0, - "grad_norm": 4.120842659338313, - "learning_rate": 2.3364660045512435e-06, - "loss": 1.0474, - "num_input_tokens_seen": 81672570, - "step": 3849 - }, - { - "epoch": 0.4629351289605002, - "flos": 50612818274880.0, - "grad_norm": 0.783343980256901, - "learning_rate": 2.335698109884337e-06, - "loss": 0.8569, - "num_input_tokens_seen": 81737495, - "step": 3850 - }, - { - "epoch": 0.4630553718511393, - "flos": 42721683622320.0, - "grad_norm": 0.8229342390349125, - "learning_rate": 2.334930164292294e-06, - "loss": 0.8849, - "num_input_tokens_seen": 81799765, - "step": 3851 - }, - { - "epoch": 0.4631756147417784, - "flos": 11289028556640.0, - "grad_norm": 4.563887502729799, - "learning_rate": 2.334162167891612e-06, - "loss": 1.0145, - "num_input_tokens_seen": 81816750, - "step": 3852 - }, - { - "epoch": 0.4632958576324175, - "flos": 11657412888480.0, - "grad_norm": 4.912164900903865, - "learning_rate": 2.333394120798795e-06, - "loss": 0.9633, - "num_input_tokens_seen": 81835205, - "step": 3853 - }, - { - "epoch": 0.4634161005230566, - "flos": 15878563247760.0, - "grad_norm": 2.904115610937253, - "learning_rate": 2.3326260231303545e-06, - "loss": 0.9558, - "num_input_tokens_seen": 81853525, - "step": 3854 - }, - { - "epoch": 0.46353634341369565, - "flos": 11132161388760.0, - "grad_norm": 3.04712820712699, - "learning_rate": 2.331857875002811e-06, - "loss": 1.0995, - "num_input_tokens_seen": 81871845, - "step": 3855 - }, - { - "epoch": 0.46365658630433476, - "flos": 20178622445160.0, - "grad_norm": 7.459083846138912, - "learning_rate": 2.3310896765326916e-06, - "loss": 0.9866, - "num_input_tokens_seen": 81892565, - "step": 3856 - }, - { - "epoch": 0.46377682919497387, - "flos": 17503847262480.0, - "grad_norm": 2.8700281358462876, - "learning_rate": 2.330321427836531e-06, - "loss": 1.0673, - "num_input_tokens_seen": 81914155, - "step": 3857 - }, - { - "epoch": 0.4638970720856129, - "flos": 13517062477440.0, - "grad_norm": 3.479802300110103, - "learning_rate": 2.3295531290308733e-06, - "loss": 1.0582, - "num_input_tokens_seen": 81932025, - "step": 3858 - }, - { - "epoch": 0.46401731497625204, - "flos": 13092249779040.0, - "grad_norm": 3.870553695931105, - "learning_rate": 2.3287847802322678e-06, - "loss": 0.9567, - "num_input_tokens_seen": 81947315, - "step": 3859 - }, - { - "epoch": 0.4641375578668911, - "flos": 18552326598960.0, - "grad_norm": 4.549065445719962, - "learning_rate": 2.3280163815572723e-06, - "loss": 1.0586, - "num_input_tokens_seen": 81967630, - "step": 3860 - }, - { - "epoch": 0.4642578007575302, - "flos": 13882993884480.0, - "grad_norm": 3.9697504659873815, - "learning_rate": 2.3272479331224522e-06, - "loss": 0.99, - "num_input_tokens_seen": 81984780, - "step": 3861 - }, - { - "epoch": 0.4643780436481693, - "flos": 20075761380600.0, - "grad_norm": 3.25736253590349, - "learning_rate": 2.3264794350443817e-06, - "loss": 1.0009, - "num_input_tokens_seen": 82006595, - "step": 3862 - }, - { - "epoch": 0.46449828653880837, - "flos": 18055350605640.0, - "grad_norm": 3.5595811631779064, - "learning_rate": 2.3257108874396396e-06, - "loss": 1.0173, - "num_input_tokens_seen": 82027410, - "step": 3863 - }, - { - "epoch": 0.4646185294294475, - "flos": 11342114813160.0, - "grad_norm": 4.747835368785117, - "learning_rate": 2.3249422904248152e-06, - "loss": 0.9664, - "num_input_tokens_seen": 82045565, - "step": 3864 - }, - { - "epoch": 0.4647387723200866, - "flos": 18765652794960.0, - "grad_norm": 2.0818648567055678, - "learning_rate": 2.324173644116504e-06, - "loss": 1.1018, - "num_input_tokens_seen": 82068135, - "step": 3865 - }, - { - "epoch": 0.46485901521072565, - "flos": 19337766254520.0, - "grad_norm": 3.5218965328340186, - "learning_rate": 2.3234049486313087e-06, - "loss": 1.0442, - "num_input_tokens_seen": 82089305, - "step": 3866 - }, - { - "epoch": 0.46497925810136476, - "flos": 17267979271800.0, - "grad_norm": 6.8184004962416545, - "learning_rate": 2.322636204085839e-06, - "loss": 0.9979, - "num_input_tokens_seen": 82109095, - "step": 3867 - }, - { - "epoch": 0.46509950099200387, - "flos": 11499349919760.0, - "grad_norm": 5.297945617684683, - "learning_rate": 2.3218674105967143e-06, - "loss": 0.9873, - "num_input_tokens_seen": 82127080, - "step": 3868 - }, - { - "epoch": 0.4652197438826429, - "flos": 16666241197200.0, - "grad_norm": 2.6754424611596628, - "learning_rate": 2.3210985682805593e-06, - "loss": 1.0581, - "num_input_tokens_seen": 82148580, - "step": 3869 - }, - { - "epoch": 0.46533998677328203, - "flos": 11472914106960.0, - "grad_norm": 4.113229546579724, - "learning_rate": 2.320329677254007e-06, - "loss": 0.8891, - "num_input_tokens_seen": 82165630, - "step": 3870 - }, - { - "epoch": 0.46546022966392114, - "flos": 15012742999200.0, - "grad_norm": 3.2538850011967515, - "learning_rate": 2.319560737633697e-06, - "loss": 0.945, - "num_input_tokens_seen": 82184070, - "step": 3871 - }, - { - "epoch": 0.4655804725545602, - "flos": 29407895897280.0, - "grad_norm": 3.7991692789454654, - "learning_rate": 2.3187917495362775e-06, - "loss": 0.9169, - "num_input_tokens_seen": 82208200, - "step": 3872 - }, - { - "epoch": 0.4657007154451993, - "flos": 13883484469440.0, - "grad_norm": 2.973459463594657, - "learning_rate": 2.318022713078403e-06, - "loss": 0.9849, - "num_input_tokens_seen": 82222500, - "step": 3873 - }, - { - "epoch": 0.4658209583358384, - "flos": 10970143078800.0, - "grad_norm": 4.4068957998596145, - "learning_rate": 2.3172536283767354e-06, - "loss": 1.0704, - "num_input_tokens_seen": 82235980, - "step": 3874 - }, - { - "epoch": 0.4659412012264775, - "flos": 10529166190200.0, - "grad_norm": 3.685916366613589, - "learning_rate": 2.3164844955479447e-06, - "loss": 1.0266, - "num_input_tokens_seen": 82251510, - "step": 3875 - }, - { - "epoch": 0.4660614441171166, - "flos": 17372710691520.0, - "grad_norm": 3.6664734413790443, - "learning_rate": 2.3157153147087082e-06, - "loss": 0.9267, - "num_input_tokens_seen": 82273120, - "step": 3876 - }, - { - "epoch": 0.46618168700775564, - "flos": 15695014974600.0, - "grad_norm": 4.8732153731515, - "learning_rate": 2.314946085975709e-06, - "loss": 1.0692, - "num_input_tokens_seen": 82291820, - "step": 3877 - }, - { - "epoch": 0.46630192989839475, - "flos": 18631388744880.0, - "grad_norm": 4.569264870580335, - "learning_rate": 2.3141768094656393e-06, - "loss": 1.0477, - "num_input_tokens_seen": 82310115, - "step": 3878 - }, - { - "epoch": 0.46642217278903386, - "flos": 8090228336640.0, - "grad_norm": 4.612680885469087, - "learning_rate": 2.3134074852951966e-06, - "loss": 1.0558, - "num_input_tokens_seen": 82326425, - "step": 3879 - }, - { - "epoch": 0.4665424156796729, - "flos": 23036823254760.0, - "grad_norm": 3.5965750826917895, - "learning_rate": 2.312638113581088e-06, - "loss": 1.0107, - "num_input_tokens_seen": 82346630, - "step": 3880 - }, - { - "epoch": 0.46666265857031203, - "flos": 13067439028920.0, - "grad_norm": 6.796748157940011, - "learning_rate": 2.311868694440027e-06, - "loss": 1.0082, - "num_input_tokens_seen": 82360770, - "step": 3881 - }, - { - "epoch": 0.46678290146095114, - "flos": 49010781270720.0, - "grad_norm": 0.723228846341179, - "learning_rate": 2.3110992279887323e-06, - "loss": 0.8672, - "num_input_tokens_seen": 82432415, - "step": 3882 - }, - { - "epoch": 0.4669031443515902, - "flos": 12548166533400.0, - "grad_norm": 3.969570765892666, - "learning_rate": 2.310329714343932e-06, - "loss": 1.0757, - "num_input_tokens_seen": 82448285, - "step": 3883 - }, - { - "epoch": 0.4670233872422293, - "flos": 17028799832640.0, - "grad_norm": 3.080582620333479, - "learning_rate": 2.309560153622361e-06, - "loss": 1.0382, - "num_input_tokens_seen": 82464915, - "step": 3884 - }, - { - "epoch": 0.4671436301328684, - "flos": 20021601969480.0, - "grad_norm": 5.920077491459281, - "learning_rate": 2.3087905459407602e-06, - "loss": 0.9656, - "num_input_tokens_seen": 82483575, - "step": 3885 - }, - { - "epoch": 0.46726387302350747, - "flos": 49677231881160.0, - "grad_norm": 0.8503544489149026, - "learning_rate": 2.3080208914158795e-06, - "loss": 0.9121, - "num_input_tokens_seen": 82546295, - "step": 3886 - }, - { - "epoch": 0.4673841159141466, - "flos": 18158916886080.0, - "grad_norm": 4.103354565720174, - "learning_rate": 2.3072511901644753e-06, - "loss": 0.9336, - "num_input_tokens_seen": 82565085, - "step": 3887 - }, - { - "epoch": 0.4675043588047857, - "flos": 17425735624920.0, - "grad_norm": 4.685325016240869, - "learning_rate": 2.306481442303309e-06, - "loss": 1.0306, - "num_input_tokens_seen": 82584380, - "step": 3888 - }, - { - "epoch": 0.46762460169542475, - "flos": 14882771567520.0, - "grad_norm": 3.836700177172008, - "learning_rate": 2.3057116479491515e-06, - "loss": 0.9537, - "num_input_tokens_seen": 82603510, - "step": 3889 - }, - { - "epoch": 0.46774484458606386, - "flos": 13596799177680.0, - "grad_norm": 4.279285286383213, - "learning_rate": 2.30494180721878e-06, - "loss": 0.9899, - "num_input_tokens_seen": 82620570, - "step": 3890 - }, - { - "epoch": 0.4678650874767029, - "flos": 12731530837200.0, - "grad_norm": 3.7231491098844796, - "learning_rate": 2.3041719202289794e-06, - "loss": 1.1274, - "num_input_tokens_seen": 82636465, - "step": 3891 - }, - { - "epoch": 0.467985330367342, - "flos": 15170100752040.0, - "grad_norm": 2.561472244710999, - "learning_rate": 2.30340198709654e-06, - "loss": 1.0313, - "num_input_tokens_seen": 82656020, - "step": 3892 - }, - { - "epoch": 0.46810557325798113, - "flos": 14567166876600.0, - "grad_norm": 5.267407033685598, - "learning_rate": 2.3026320079382605e-06, - "loss": 0.9787, - "num_input_tokens_seen": 82672675, - "step": 3893 - }, - { - "epoch": 0.4682258161486202, - "flos": 21463674988200.0, - "grad_norm": 6.224291302274996, - "learning_rate": 2.3018619828709454e-06, - "loss": 0.9787, - "num_input_tokens_seen": 82693935, - "step": 3894 - }, - { - "epoch": 0.4683460590392593, - "flos": 17995273513440.0, - "grad_norm": 4.756069722311818, - "learning_rate": 2.3010919120114084e-06, - "loss": 1.0472, - "num_input_tokens_seen": 82710185, - "step": 3895 - }, - { - "epoch": 0.4684663019298984, - "flos": 10862222856840.0, - "grad_norm": 3.6793532817051204, - "learning_rate": 2.3003217954764672e-06, - "loss": 0.8669, - "num_input_tokens_seen": 82724610, - "step": 3896 - }, - { - "epoch": 0.46858654482053747, - "flos": 19782146576280.0, - "grad_norm": 4.054488748666288, - "learning_rate": 2.299551633382949e-06, - "loss": 1.032, - "num_input_tokens_seen": 82744640, - "step": 3897 - }, - { - "epoch": 0.4687067877111766, - "flos": 12783942539400.0, - "grad_norm": 5.829701555573045, - "learning_rate": 2.2987814258476854e-06, - "loss": 1.0708, - "num_input_tokens_seen": 82762160, - "step": 3898 - }, - { - "epoch": 0.4688270306018157, - "flos": 12019113000240.0, - "grad_norm": 3.7829749846512426, - "learning_rate": 2.2980111729875177e-06, - "loss": 0.9005, - "num_input_tokens_seen": 82778130, - "step": 3899 - }, - { - "epoch": 0.46894727349245474, - "flos": 12626554125000.0, - "grad_norm": 5.604169465056032, - "learning_rate": 2.2972408749192917e-06, - "loss": 1.0455, - "num_input_tokens_seen": 82795580, - "step": 3900 - }, - { - "epoch": 0.46906751638309385, - "flos": 15249684144480.0, - "grad_norm": 8.50908624030921, - "learning_rate": 2.296470531759861e-06, - "loss": 0.8972, - "num_input_tokens_seen": 82813400, - "step": 3901 - }, - { - "epoch": 0.46918775927373296, - "flos": 14434711858560.0, - "grad_norm": 4.752629004287267, - "learning_rate": 2.2957001436260866e-06, - "loss": 1.0161, - "num_input_tokens_seen": 82830180, - "step": 3902 - }, - { - "epoch": 0.469308002164372, - "flos": 13044253341480.0, - "grad_norm": 4.202694101474825, - "learning_rate": 2.294929710634836e-06, - "loss": 0.9537, - "num_input_tokens_seen": 82847990, - "step": 3903 - }, - { - "epoch": 0.46942824505501113, - "flos": 27101137107720.0, - "grad_norm": 2.7547456368442003, - "learning_rate": 2.2941592329029823e-06, - "loss": 0.8312, - "num_input_tokens_seen": 82868815, - "step": 3904 - }, - { - "epoch": 0.46954848794565024, - "flos": 15537963837360.0, - "grad_norm": 6.252365580154474, - "learning_rate": 2.2933887105474067e-06, - "loss": 1.0186, - "num_input_tokens_seen": 82886710, - "step": 3905 - }, - { - "epoch": 0.4696687308362893, - "flos": 15641223502200.0, - "grad_norm": 2.9362613420198374, - "learning_rate": 2.2926181436849974e-06, - "loss": 1.0395, - "num_input_tokens_seen": 82905785, - "step": 3906 - }, - { - "epoch": 0.4697889737269284, - "flos": 15351472054440.0, - "grad_norm": 3.4143549534172344, - "learning_rate": 2.2918475324326478e-06, - "loss": 0.9546, - "num_input_tokens_seen": 82925225, - "step": 3907 - }, - { - "epoch": 0.46990921661756746, - "flos": 17949607354440.0, - "grad_norm": 3.9302617709506067, - "learning_rate": 2.2910768769072603e-06, - "loss": 1.1322, - "num_input_tokens_seen": 82943725, - "step": 3908 - }, - { - "epoch": 0.47002945950820657, - "flos": 9767832068880.0, - "grad_norm": 4.104913995285578, - "learning_rate": 2.2903061772257417e-06, - "loss": 0.9763, - "num_input_tokens_seen": 82961430, - "step": 3909 - }, - { - "epoch": 0.4701497023988457, - "flos": 18681316860720.0, - "grad_norm": 3.5001140303428446, - "learning_rate": 2.289535433505007e-06, - "loss": 1.0131, - "num_input_tokens_seen": 82982505, - "step": 3910 - }, - { - "epoch": 0.47026994528948474, - "flos": 18237917708880.0, - "grad_norm": 2.9242144516085995, - "learning_rate": 2.2887646458619767e-06, - "loss": 0.863, - "num_input_tokens_seen": 83003590, - "step": 3911 - }, - { - "epoch": 0.47039018818012385, - "flos": 14590045948440.0, - "grad_norm": 2.864176569871501, - "learning_rate": 2.2879938144135797e-06, - "loss": 0.994, - "num_input_tokens_seen": 83019415, - "step": 3912 - }, - { - "epoch": 0.47051043107076296, - "flos": 15325312195680.0, - "grad_norm": 3.0658065695465853, - "learning_rate": 2.2872229392767496e-06, - "loss": 0.9898, - "num_input_tokens_seen": 83039240, - "step": 3913 - }, - { - "epoch": 0.470630673961402, - "flos": 13439840025120.0, - "grad_norm": 3.1048222474651417, - "learning_rate": 2.286452020568428e-06, - "loss": 0.9776, - "num_input_tokens_seen": 83057035, - "step": 3914 - }, - { - "epoch": 0.4707509168520411, - "flos": 14148394505520.0, - "grad_norm": 3.9253220930180426, - "learning_rate": 2.2856810584055637e-06, - "loss": 0.971, - "num_input_tokens_seen": 83074290, - "step": 3915 - }, - { - "epoch": 0.47087115974268023, - "flos": 28651651594920.0, - "grad_norm": 2.595877177285081, - "learning_rate": 2.2849100529051085e-06, - "loss": 0.9016, - "num_input_tokens_seen": 83100945, - "step": 3916 - }, - { - "epoch": 0.4709914026333193, - "flos": 9558553198800.0, - "grad_norm": 5.897384529357856, - "learning_rate": 2.284139004184026e-06, - "loss": 1.0175, - "num_input_tokens_seen": 83117895, - "step": 3917 - }, - { - "epoch": 0.4711116455239584, - "flos": 14173910471520.0, - "grad_norm": 3.7462788238791194, - "learning_rate": 2.2833679123592814e-06, - "loss": 0.967, - "num_input_tokens_seen": 83134875, - "step": 3918 - }, - { - "epoch": 0.4712318884145975, - "flos": 22906085284080.0, - "grad_norm": 3.3760108846696872, - "learning_rate": 2.2825967775478508e-06, - "loss": 0.8533, - "num_input_tokens_seen": 83155695, - "step": 3919 - }, - { - "epoch": 0.47135213130523657, - "flos": 14226567466200.0, - "grad_norm": 3.2260586619994407, - "learning_rate": 2.2818255998667135e-06, - "loss": 1.0585, - "num_input_tokens_seen": 83173925, - "step": 3920 - }, - { - "epoch": 0.4714723741958757, - "flos": 13780286127720.0, - "grad_norm": 4.171996611347816, - "learning_rate": 2.2810543794328566e-06, - "loss": 1.0176, - "num_input_tokens_seen": 83192680, - "step": 3921 - }, - { - "epoch": 0.4715926170865148, - "flos": 14460565101720.0, - "grad_norm": 3.6264532399020792, - "learning_rate": 2.2802831163632735e-06, - "loss": 1.0439, - "num_input_tokens_seen": 83211120, - "step": 3922 - }, - { - "epoch": 0.47171285997715384, - "flos": 16112867498880.0, - "grad_norm": 7.02026132428696, - "learning_rate": 2.279511810774965e-06, - "loss": 0.9624, - "num_input_tokens_seen": 83232370, - "step": 3923 - }, - { - "epoch": 0.47183310286779295, - "flos": 14986552478880.0, - "grad_norm": 5.831897813275897, - "learning_rate": 2.2787404627849364e-06, - "loss": 0.9408, - "num_input_tokens_seen": 83251300, - "step": 3924 - }, - { - "epoch": 0.471953345758432, - "flos": 15433079109840.0, - "grad_norm": 6.523019085103089, - "learning_rate": 2.277969072510202e-06, - "loss": 1.0195, - "num_input_tokens_seen": 83270000, - "step": 3925 - }, - { - "epoch": 0.4720735886490711, - "flos": 14091751508040.0, - "grad_norm": 3.011852691878081, - "learning_rate": 2.2771976400677803e-06, - "loss": 1.0453, - "num_input_tokens_seen": 83288550, - "step": 3926 - }, - { - "epoch": 0.47219383153971023, - "flos": 13596921823920.0, - "grad_norm": 3.932083643111909, - "learning_rate": 2.2764261655746965e-06, - "loss": 1.0069, - "num_input_tokens_seen": 83305765, - "step": 3927 - }, - { - "epoch": 0.4723140744303493, - "flos": 16509466014000.0, - "grad_norm": 4.355426271513708, - "learning_rate": 2.2756546491479832e-06, - "loss": 0.9872, - "num_input_tokens_seen": 83326400, - "step": 3928 - }, - { - "epoch": 0.4724343173209884, - "flos": 12915232418160.0, - "grad_norm": 7.707808303135893, - "learning_rate": 2.274883090904679e-06, - "loss": 1.0305, - "num_input_tokens_seen": 83343885, - "step": 3929 - }, - { - "epoch": 0.4725545602116275, - "flos": 15091161252360.0, - "grad_norm": 77.39433005213463, - "learning_rate": 2.2741114909618283e-06, - "loss": 0.9008, - "num_input_tokens_seen": 83359500, - "step": 3930 - }, - { - "epoch": 0.47267480310226656, - "flos": 15222727085160.0, - "grad_norm": 2.9937246174224073, - "learning_rate": 2.2733398494364828e-06, - "loss": 0.949, - "num_input_tokens_seen": 83378465, - "step": 3931 - }, - { - "epoch": 0.47279504599290567, - "flos": 13308519484800.0, - "grad_norm": 6.43067911504028, - "learning_rate": 2.272568166445699e-06, - "loss": 1.0706, - "num_input_tokens_seen": 83396750, - "step": 3932 - }, - { - "epoch": 0.4729152888835448, - "flos": 14986828432920.0, - "grad_norm": 4.931259267433267, - "learning_rate": 2.271796442106541e-06, - "loss": 0.8656, - "num_input_tokens_seen": 83415825, - "step": 3933 - }, - { - "epoch": 0.47303553177418384, - "flos": 50274370721760.0, - "grad_norm": 0.7885517695866114, - "learning_rate": 2.271024676536079e-06, - "loss": 0.8408, - "num_input_tokens_seen": 83475805, - "step": 3934 - }, - { - "epoch": 0.47315577466482295, - "flos": 15956521577520.0, - "grad_norm": 3.435255809292921, - "learning_rate": 2.2702528698513894e-06, - "loss": 0.9572, - "num_input_tokens_seen": 83496650, - "step": 3935 - }, - { - "epoch": 0.47327601755546206, - "flos": 17320452297120.0, - "grad_norm": 2.860394405566391, - "learning_rate": 2.269481022169554e-06, - "loss": 1.0082, - "num_input_tokens_seen": 83514965, - "step": 3936 - }, - { - "epoch": 0.4733962604461011, - "flos": 16295127986520.0, - "grad_norm": 3.1360815785852103, - "learning_rate": 2.2687091336076614e-06, - "loss": 1.0314, - "num_input_tokens_seen": 83534025, - "step": 3937 - }, - { - "epoch": 0.4735165033367402, - "flos": 12989327391360.0, - "grad_norm": 4.253530270273369, - "learning_rate": 2.267937204282807e-06, - "loss": 1.0226, - "num_input_tokens_seen": 83550885, - "step": 3938 - }, - { - "epoch": 0.4736367462273793, - "flos": 16374466086480.0, - "grad_norm": 2.86318998724782, - "learning_rate": 2.2671652343120926e-06, - "loss": 0.9997, - "num_input_tokens_seen": 83571080, - "step": 3939 - }, - { - "epoch": 0.4737569891180184, - "flos": 18054553405080.0, - "grad_norm": 3.1728943938994987, - "learning_rate": 2.2663932238126236e-06, - "loss": 1.0269, - "num_input_tokens_seen": 83589360, - "step": 3940 - }, - { - "epoch": 0.4738772320086575, - "flos": 18396011339160.0, - "grad_norm": 4.677827748937585, - "learning_rate": 2.265621172901515e-06, - "loss": 1.0213, - "num_input_tokens_seen": 83612195, - "step": 3941 - }, - { - "epoch": 0.47399747489929656, - "flos": 19628652180000.0, - "grad_norm": 5.4808766219394025, - "learning_rate": 2.2648490816958854e-06, - "loss": 0.9433, - "num_input_tokens_seen": 83632910, - "step": 3942 - }, - { - "epoch": 0.47411771778993567, - "flos": 17687548843440.0, - "grad_norm": 5.804648726527636, - "learning_rate": 2.264076950312861e-06, - "loss": 0.942, - "num_input_tokens_seen": 83651440, - "step": 3943 - }, - { - "epoch": 0.4742379606805748, - "flos": 16167149556240.0, - "grad_norm": 3.284987147057877, - "learning_rate": 2.2633047788695727e-06, - "loss": 1.0466, - "num_input_tokens_seen": 83671465, - "step": 3944 - }, - { - "epoch": 0.47435820357121383, - "flos": 13962761246280.0, - "grad_norm": 3.6767195538763144, - "learning_rate": 2.262532567483159e-06, - "loss": 0.8688, - "num_input_tokens_seen": 83689745, - "step": 3945 - }, - { - "epoch": 0.47447844646185294, - "flos": 17949944631600.0, - "grad_norm": 4.4329247327008625, - "learning_rate": 2.2617603162707635e-06, - "loss": 1.0264, - "num_input_tokens_seen": 83709875, - "step": 3946 - }, - { - "epoch": 0.47459868935249205, - "flos": 17476798218480.0, - "grad_norm": 3.458854237741084, - "learning_rate": 2.2609880253495363e-06, - "loss": 1.049, - "num_input_tokens_seen": 83729230, - "step": 3947 - }, - { - "epoch": 0.4747189322431311, - "flos": 14593541366280.0, - "grad_norm": 4.01987573687812, - "learning_rate": 2.260215694836633e-06, - "loss": 1.0834, - "num_input_tokens_seen": 83748125, - "step": 3948 - }, - { - "epoch": 0.4748391751337702, - "flos": 18500865405120.0, - "grad_norm": 3.2764590209279763, - "learning_rate": 2.2594433248492157e-06, - "loss": 0.8704, - "num_input_tokens_seen": 83766820, - "step": 3949 - }, - { - "epoch": 0.47495941802440933, - "flos": 16219653243120.0, - "grad_norm": 4.2034092916895345, - "learning_rate": 2.2586709155044527e-06, - "loss": 1.0258, - "num_input_tokens_seen": 83787140, - "step": 3950 - }, - { - "epoch": 0.4750796609150484, - "flos": 19863784293240.0, - "grad_norm": 3.816920360035178, - "learning_rate": 2.2578984669195167e-06, - "loss": 0.9894, - "num_input_tokens_seen": 83807825, - "step": 3951 - }, - { - "epoch": 0.4751999038056875, - "flos": 25446289801080.0, - "grad_norm": 4.178669058529044, - "learning_rate": 2.2571259792115887e-06, - "loss": 0.9004, - "num_input_tokens_seen": 83828765, - "step": 3952 - }, - { - "epoch": 0.4753201466963266, - "flos": 15694831005240.0, - "grad_norm": 5.076458560704961, - "learning_rate": 2.2563534524978544e-06, - "loss": 1.0182, - "num_input_tokens_seen": 83845955, - "step": 3953 - }, - { - "epoch": 0.47544038958696566, - "flos": 21516148013520.0, - "grad_norm": 4.222503800511838, - "learning_rate": 2.2555808868955052e-06, - "loss": 0.9528, - "num_input_tokens_seen": 83867805, - "step": 3954 - }, - { - "epoch": 0.47556063247760477, - "flos": 16686881975160.0, - "grad_norm": 5.0323145906998406, - "learning_rate": 2.254808282521738e-06, - "loss": 0.9521, - "num_input_tokens_seen": 83886275, - "step": 3955 - }, - { - "epoch": 0.4756808753682438, - "flos": 17897042344440.0, - "grad_norm": 4.8317821688277665, - "learning_rate": 2.2540356394937573e-06, - "loss": 1.039, - "num_input_tokens_seen": 83904695, - "step": 3956 - }, - { - "epoch": 0.47580111825888294, - "flos": 11080117625280.0, - "grad_norm": 4.637970752934749, - "learning_rate": 2.253262957928772e-06, - "loss": 1.0421, - "num_input_tokens_seen": 83921300, - "step": 3957 - }, - { - "epoch": 0.47592136114952205, - "flos": 12493179260160.0, - "grad_norm": 2.526790788347906, - "learning_rate": 2.2524902379439976e-06, - "loss": 0.9442, - "num_input_tokens_seen": 83939690, - "step": 3958 - }, - { - "epoch": 0.4760416040401611, - "flos": 43962087385920.0, - "grad_norm": 0.8181142143896581, - "learning_rate": 2.251717479656655e-06, - "loss": 0.8862, - "num_input_tokens_seen": 84004205, - "step": 3959 - }, - { - "epoch": 0.4761618469308002, - "flos": 13046491635360.0, - "grad_norm": 9.62845064211154, - "learning_rate": 2.2509446831839704e-06, - "loss": 0.9808, - "num_input_tokens_seen": 84023365, - "step": 3960 - }, - { - "epoch": 0.4762820898214393, - "flos": 12784187831880.0, - "grad_norm": 10.60496337136108, - "learning_rate": 2.250171848643177e-06, - "loss": 1.0336, - "num_input_tokens_seen": 84040375, - "step": 3961 - }, - { - "epoch": 0.4764023327120784, - "flos": 13702235813280.0, - "grad_norm": 3.1879450169492114, - "learning_rate": 2.249398976151513e-06, - "loss": 1.0962, - "num_input_tokens_seen": 84057645, - "step": 3962 - }, - { - "epoch": 0.4765225756027175, - "flos": 16166505663480.0, - "grad_norm": 5.972578009522687, - "learning_rate": 2.248626065826223e-06, - "loss": 1.0184, - "num_input_tokens_seen": 84075570, - "step": 3963 - }, - { - "epoch": 0.4766428184933566, - "flos": 47206737734280.0, - "grad_norm": 0.7698872706021787, - "learning_rate": 2.2478531177845564e-06, - "loss": 0.8713, - "num_input_tokens_seen": 84136285, - "step": 3964 - }, - { - "epoch": 0.47676306138399566, - "flos": 17165302176600.0, - "grad_norm": 3.450674024336261, - "learning_rate": 2.247080132143769e-06, - "loss": 1.0745, - "num_input_tokens_seen": 84158360, - "step": 3965 - }, - { - "epoch": 0.47688330427463477, - "flos": 8876986439280.0, - "grad_norm": 5.69654713501737, - "learning_rate": 2.246307109021121e-06, - "loss": 0.9083, - "num_input_tokens_seen": 84175485, - "step": 3966 - }, - { - "epoch": 0.4770035471652739, - "flos": 15190588222200.0, - "grad_norm": 2.975401644481613, - "learning_rate": 2.2455340485338817e-06, - "loss": 1.0505, - "num_input_tokens_seen": 84192840, - "step": 3967 - }, - { - "epoch": 0.47712379005591293, - "flos": 17897931529680.0, - "grad_norm": 3.431401478354268, - "learning_rate": 2.244760950799322e-06, - "loss": 0.9047, - "num_input_tokens_seen": 84210830, - "step": 3968 - }, - { - "epoch": 0.47724403294655204, - "flos": 15668548500240.0, - "grad_norm": 3.174024933801798, - "learning_rate": 2.2439878159347203e-06, - "loss": 0.9526, - "num_input_tokens_seen": 84229975, - "step": 3969 - }, - { - "epoch": 0.4773642758371911, - "flos": 50297857476720.0, - "grad_norm": 0.816523274334671, - "learning_rate": 2.2432146440573616e-06, - "loss": 0.8514, - "num_input_tokens_seen": 84295655, - "step": 3970 - }, - { - "epoch": 0.4774845187278302, - "flos": 16742237187120.0, - "grad_norm": 9.343127788125043, - "learning_rate": 2.242441435284534e-06, - "loss": 0.882, - "num_input_tokens_seen": 84314250, - "step": 3971 - }, - { - "epoch": 0.4776047616184693, - "flos": 16402281669480.0, - "grad_norm": 6.857207701013145, - "learning_rate": 2.2416681897335337e-06, - "loss": 1.0732, - "num_input_tokens_seen": 84332120, - "step": 3972 - }, - { - "epoch": 0.4777250045091084, - "flos": 22742104634280.0, - "grad_norm": 26.084191971398717, - "learning_rate": 2.240894907521661e-06, - "loss": 0.8917, - "num_input_tokens_seen": 84350920, - "step": 3973 - }, - { - "epoch": 0.4778452473997475, - "flos": 17266538178480.0, - "grad_norm": 4.698154598506708, - "learning_rate": 2.240121588766223e-06, - "loss": 0.8664, - "num_input_tokens_seen": 84370690, - "step": 3974 - }, - { - "epoch": 0.4779654902903866, - "flos": 22510621246680.0, - "grad_norm": 3.2983505056845623, - "learning_rate": 2.239348233584531e-06, - "loss": 0.9344, - "num_input_tokens_seen": 84391265, - "step": 3975 - }, - { - "epoch": 0.47808573318102565, - "flos": 13833127091760.0, - "grad_norm": 5.02745073822638, - "learning_rate": 2.2385748420939013e-06, - "loss": 1.0312, - "num_input_tokens_seen": 84410180, - "step": 3976 - }, - { - "epoch": 0.47820597607166476, - "flos": 16061467628160.0, - "grad_norm": 2.829900363475666, - "learning_rate": 2.2378014144116583e-06, - "loss": 0.9542, - "num_input_tokens_seen": 84428710, - "step": 3977 - }, - { - "epoch": 0.4783262189623039, - "flos": 16350360552240.0, - "grad_norm": 5.000000762939395, - "learning_rate": 2.23702795065513e-06, - "loss": 1.0245, - "num_input_tokens_seen": 84448010, - "step": 3978 - }, - { - "epoch": 0.47844646185294293, - "flos": 35592777824400.0, - "grad_norm": 0.9993068856030248, - "learning_rate": 2.2362544509416493e-06, - "loss": 0.9335, - "num_input_tokens_seen": 84499845, - "step": 3979 - }, - { - "epoch": 0.47856670474358204, - "flos": 14357060144400.0, - "grad_norm": 4.249624572328396, - "learning_rate": 2.2354809153885572e-06, - "loss": 1.0425, - "num_input_tokens_seen": 84516635, - "step": 3980 - }, - { - "epoch": 0.47868694763422115, - "flos": 14903596314840.0, - "grad_norm": 11.837564992273077, - "learning_rate": 2.234707344113197e-06, - "loss": 1.0672, - "num_input_tokens_seen": 84534450, - "step": 3981 - }, - { - "epoch": 0.4788071905248602, - "flos": 13492619666040.0, - "grad_norm": 3.098224833043733, - "learning_rate": 2.233933737232919e-06, - "loss": 0.9996, - "num_input_tokens_seen": 84551950, - "step": 3982 - }, - { - "epoch": 0.4789274334154993, - "flos": 16349685997920.0, - "grad_norm": 2.9130500896113958, - "learning_rate": 2.2331600948650793e-06, - "loss": 1.0002, - "num_input_tokens_seen": 84571815, - "step": 3983 - }, - { - "epoch": 0.4790476763061384, - "flos": 16402465638840.0, - "grad_norm": 2.6217153070896586, - "learning_rate": 2.2323864171270386e-06, - "loss": 1.0252, - "num_input_tokens_seen": 84592805, - "step": 3984 - }, - { - "epoch": 0.4791679191967775, - "flos": 15039761381640.0, - "grad_norm": 4.827218276129679, - "learning_rate": 2.231612704136164e-06, - "loss": 0.9492, - "num_input_tokens_seen": 84612895, - "step": 3985 - }, - { - "epoch": 0.4792881620874166, - "flos": 15845075276160.0, - "grad_norm": 5.248415935048451, - "learning_rate": 2.2308389560098253e-06, - "loss": 0.9544, - "num_input_tokens_seen": 84628990, - "step": 3986 - }, - { - "epoch": 0.47940840497805565, - "flos": 12338611709280.0, - "grad_norm": 4.747219878294661, - "learning_rate": 2.2300651728654008e-06, - "loss": 0.9877, - "num_input_tokens_seen": 84643970, - "step": 3987 - }, - { - "epoch": 0.47952864786869476, - "flos": 46794496275480.0, - "grad_norm": 0.7471039409174558, - "learning_rate": 2.229291354820272e-06, - "loss": 0.8644, - "num_input_tokens_seen": 84704700, - "step": 3988 - }, - { - "epoch": 0.47964889075933387, - "flos": 11892146401440.0, - "grad_norm": 4.02120097691634, - "learning_rate": 2.228517501991828e-06, - "loss": 0.9888, - "num_input_tokens_seen": 84723220, - "step": 3989 - }, - { - "epoch": 0.4797691336499729, - "flos": 50189385346680.0, - "grad_norm": 0.8517733671556458, - "learning_rate": 2.22774361449746e-06, - "loss": 0.8704, - "num_input_tokens_seen": 84779420, - "step": 3990 - }, - { - "epoch": 0.47988937654061203, - "flos": 13439962671360.0, - "grad_norm": 8.431316907658214, - "learning_rate": 2.2269696924545668e-06, - "loss": 0.9289, - "num_input_tokens_seen": 84796970, - "step": 3991 - }, - { - "epoch": 0.48000961943125114, - "flos": 10211629821000.0, - "grad_norm": 3.8582449428466132, - "learning_rate": 2.2261957359805523e-06, - "loss": 1.0068, - "num_input_tokens_seen": 84813925, - "step": 3992 - }, - { - "epoch": 0.4801298623218902, - "flos": 19259439986040.0, - "grad_norm": 21.35791332096329, - "learning_rate": 2.225421745192823e-06, - "loss": 0.979, - "num_input_tokens_seen": 84833800, - "step": 3993 - }, - { - "epoch": 0.4802501052125293, - "flos": 18758937913320.0, - "grad_norm": 4.699589512532314, - "learning_rate": 2.2246477202087955e-06, - "loss": 1.0004, - "num_input_tokens_seen": 84854200, - "step": 3994 - }, - { - "epoch": 0.4803703481031684, - "flos": 14906141224320.0, - "grad_norm": 2.9118907700582604, - "learning_rate": 2.223873661145887e-06, - "loss": 1.0637, - "num_input_tokens_seen": 84873975, - "step": 3995 - }, - { - "epoch": 0.4804905909938075, - "flos": 14697812862600.0, - "grad_norm": 3.4652184732102587, - "learning_rate": 2.2230995681215226e-06, - "loss": 0.9418, - "num_input_tokens_seen": 84893220, - "step": 3996 - }, - { - "epoch": 0.4806108338844466, - "flos": 11788089536040.0, - "grad_norm": 4.84306852407457, - "learning_rate": 2.2223254412531305e-06, - "loss": 1.0038, - "num_input_tokens_seen": 84910310, - "step": 3997 - }, - { - "epoch": 0.4807310767750857, - "flos": 14200315622760.0, - "grad_norm": 4.896379978693642, - "learning_rate": 2.221551280658146e-06, - "loss": 1.0424, - "num_input_tokens_seen": 84929090, - "step": 3998 - }, - { - "epoch": 0.48085131966572475, - "flos": 16481313153840.0, - "grad_norm": 4.998804712475453, - "learning_rate": 2.2207770864540085e-06, - "loss": 0.9746, - "num_input_tokens_seen": 84947785, - "step": 3999 - }, - { - "epoch": 0.48097156255636386, - "flos": 14593664012520.0, - "grad_norm": 3.435050924220738, - "learning_rate": 2.220002858758162e-06, - "loss": 0.9469, - "num_input_tokens_seen": 84965495, - "step": 4000 - }, - { - "epoch": 0.481091805447003, - "flos": 50499378972120.0, - "grad_norm": 1.310134163239846, - "learning_rate": 2.2192285976880573e-06, - "loss": 0.8577, - "num_input_tokens_seen": 85029470, - "step": 4001 - }, - { - "epoch": 0.48121204833764203, - "flos": 25998590344800.0, - "grad_norm": 2.938029140133141, - "learning_rate": 2.2184543033611485e-06, - "loss": 1.0314, - "num_input_tokens_seen": 85050270, - "step": 4002 - }, - { - "epoch": 0.48133229122828114, - "flos": 19575197984760.0, - "grad_norm": 6.619484674962285, - "learning_rate": 2.2176799758948957e-06, - "loss": 1.044, - "num_input_tokens_seen": 85070150, - "step": 4003 - }, - { - "epoch": 0.4814525341189202, - "flos": 30774494172600.0, - "grad_norm": 3.882578299691439, - "learning_rate": 2.2169056154067635e-06, - "loss": 0.9534, - "num_input_tokens_seen": 85093790, - "step": 4004 - }, - { - "epoch": 0.4815727770095593, - "flos": 17236239009120.0, - "grad_norm": 2.7526916855703445, - "learning_rate": 2.216131222014222e-06, - "loss": 1.0524, - "num_input_tokens_seen": 85111585, - "step": 4005 - }, - { - "epoch": 0.4816930199001984, - "flos": 12836415564720.0, - "grad_norm": 4.23652442347189, - "learning_rate": 2.2153567958347455e-06, - "loss": 1.044, - "num_input_tokens_seen": 85127515, - "step": 4006 - }, - { - "epoch": 0.48181326279083747, - "flos": 12233543012400.0, - "grad_norm": 4.281676111108192, - "learning_rate": 2.214582336985815e-06, - "loss": 1.0252, - "num_input_tokens_seen": 85145135, - "step": 4007 - }, - { - "epoch": 0.4819335056814766, - "flos": 10529227513320.0, - "grad_norm": 5.21559769684426, - "learning_rate": 2.2138078455849142e-06, - "loss": 0.8743, - "num_input_tokens_seen": 85162850, - "step": 4008 - }, - { - "epoch": 0.4820537485721157, - "flos": 13649456172360.0, - "grad_norm": 11.77692979682961, - "learning_rate": 2.2130333217495334e-06, - "loss": 1.0114, - "num_input_tokens_seen": 85181740, - "step": 4009 - }, - { - "epoch": 0.48217399146275475, - "flos": 11341654889760.0, - "grad_norm": 3.6740190455358896, - "learning_rate": 2.2122587655971665e-06, - "loss": 0.894, - "num_input_tokens_seen": 85196405, - "step": 4010 - }, - { - "epoch": 0.48229423435339386, - "flos": 17163339836760.0, - "grad_norm": 2.7087821197365782, - "learning_rate": 2.211484177245314e-06, - "loss": 0.8648, - "num_input_tokens_seen": 85215715, - "step": 4011 - }, - { - "epoch": 0.48241447724403297, - "flos": 16927011922680.0, - "grad_norm": 3.1571545201821594, - "learning_rate": 2.21070955681148e-06, - "loss": 0.9522, - "num_input_tokens_seen": 85234540, - "step": 4012 - }, - { - "epoch": 0.482534720134672, - "flos": 16427184404280.0, - "grad_norm": 3.382374653661166, - "learning_rate": 2.209934904413174e-06, - "loss": 1.0028, - "num_input_tokens_seen": 85255865, - "step": 4013 - }, - { - "epoch": 0.48265496302531113, - "flos": 14856151785360.0, - "grad_norm": 5.409618408791451, - "learning_rate": 2.2091602201679095e-06, - "loss": 0.9397, - "num_input_tokens_seen": 85275195, - "step": 4014 - }, - { - "epoch": 0.48277520591595025, - "flos": 10764819549960.0, - "grad_norm": 4.16583317367412, - "learning_rate": 2.208385504193206e-06, - "loss": 1.0439, - "num_input_tokens_seen": 85292415, - "step": 4015 - }, - { - "epoch": 0.4828954488065893, - "flos": 12653235230280.0, - "grad_norm": 4.807265370118974, - "learning_rate": 2.2076107566065873e-06, - "loss": 1.0366, - "num_input_tokens_seen": 85309920, - "step": 4016 - }, - { - "epoch": 0.4830156916972284, - "flos": 22879066901640.0, - "grad_norm": 8.795006670491182, - "learning_rate": 2.2068359775255816e-06, - "loss": 0.9815, - "num_input_tokens_seen": 85327950, - "step": 4017 - }, - { - "epoch": 0.48313593458786747, - "flos": 15537289283040.0, - "grad_norm": 3.9828113554410014, - "learning_rate": 2.206061167067723e-06, - "loss": 0.9907, - "num_input_tokens_seen": 85345780, - "step": 4018 - }, - { - "epoch": 0.4832561774785066, - "flos": 16061528951280.0, - "grad_norm": 4.463134224752186, - "learning_rate": 2.205286325350549e-06, - "loss": 1.017, - "num_input_tokens_seen": 85364565, - "step": 4019 - }, - { - "epoch": 0.4833764203691457, - "flos": 9474830495760.0, - "grad_norm": 4.132548505864186, - "learning_rate": 2.204511452491603e-06, - "loss": 0.963, - "num_input_tokens_seen": 85380910, - "step": 4020 - }, - { - "epoch": 0.48349666325978474, - "flos": 31532486183880.0, - "grad_norm": 4.277096480795152, - "learning_rate": 2.2037365486084316e-06, - "loss": 0.9743, - "num_input_tokens_seen": 85403870, - "step": 4021 - }, - { - "epoch": 0.48361690615042385, - "flos": 18524235061920.0, - "grad_norm": 3.8703357558770293, - "learning_rate": 2.2029616138185886e-06, - "loss": 1.0051, - "num_input_tokens_seen": 85422590, - "step": 4022 - }, - { - "epoch": 0.48373714904106296, - "flos": 15825936914640.0, - "grad_norm": 3.771525591679883, - "learning_rate": 2.202186648239629e-06, - "loss": 1.0559, - "num_input_tokens_seen": 85442245, - "step": 4023 - }, - { - "epoch": 0.483857391931702, - "flos": 20151512078040.0, - "grad_norm": 6.325126785394769, - "learning_rate": 2.201411651989117e-06, - "loss": 0.9459, - "num_input_tokens_seen": 85463945, - "step": 4024 - }, - { - "epoch": 0.48397763482234113, - "flos": 19523399513760.0, - "grad_norm": 3.9566022333449387, - "learning_rate": 2.2006366251846167e-06, - "loss": 1.0069, - "num_input_tokens_seen": 85484305, - "step": 4025 - }, - { - "epoch": 0.48409787771298024, - "flos": 11889969430680.0, - "grad_norm": 3.4064115433489732, - "learning_rate": 2.1998615679436997e-06, - "loss": 0.9808, - "num_input_tokens_seen": 85501565, - "step": 4026 - }, - { - "epoch": 0.4842181206036193, - "flos": 17845673135280.0, - "grad_norm": 5.440746697341624, - "learning_rate": 2.199086480383942e-06, - "loss": 0.9912, - "num_input_tokens_seen": 85520660, - "step": 4027 - }, - { - "epoch": 0.4843383634942584, - "flos": 21646426060800.0, - "grad_norm": 8.492023540131783, - "learning_rate": 2.1983113626229234e-06, - "loss": 0.8824, - "num_input_tokens_seen": 85539630, - "step": 4028 - }, - { - "epoch": 0.4844586063848975, - "flos": 14489515162440.0, - "grad_norm": 6.01452753534213, - "learning_rate": 2.1975362147782293e-06, - "loss": 1.0089, - "num_input_tokens_seen": 85558545, - "step": 4029 - }, - { - "epoch": 0.48457884927553657, - "flos": 50351464979760.0, - "grad_norm": 0.9137789946833823, - "learning_rate": 2.196761036967448e-06, - "loss": 0.799, - "num_input_tokens_seen": 85626230, - "step": 4030 - }, - { - "epoch": 0.4846990921661757, - "flos": 14144592472080.0, - "grad_norm": 3.066681789969043, - "learning_rate": 2.1959858293081743e-06, - "loss": 1.0051, - "num_input_tokens_seen": 85645085, - "step": 4031 - }, - { - "epoch": 0.4848193350568148, - "flos": 16402772254440.0, - "grad_norm": 5.957053983050885, - "learning_rate": 2.1952105919180056e-06, - "loss": 0.9907, - "num_input_tokens_seen": 85664060, - "step": 4032 - }, - { - "epoch": 0.48493957794745385, - "flos": 15956828193120.0, - "grad_norm": 4.771260316001607, - "learning_rate": 2.1944353249145456e-06, - "loss": 0.9009, - "num_input_tokens_seen": 85682890, - "step": 4033 - }, - { - "epoch": 0.48505982083809296, - "flos": 17818685414400.0, - "grad_norm": 5.443084479906178, - "learning_rate": 2.193660028415401e-06, - "loss": 0.9748, - "num_input_tokens_seen": 85703390, - "step": 4034 - }, - { - "epoch": 0.485180063728732, - "flos": 19051479563040.0, - "grad_norm": 2.85976503140574, - "learning_rate": 2.1928847025381852e-06, - "loss": 1.0492, - "num_input_tokens_seen": 85723715, - "step": 4035 - }, - { - "epoch": 0.4853003066193711, - "flos": 17109119102520.0, - "grad_norm": 3.0062822685175337, - "learning_rate": 2.192109347400512e-06, - "loss": 1.0662, - "num_input_tokens_seen": 85743650, - "step": 4036 - }, - { - "epoch": 0.48542054951001024, - "flos": 16403140193160.0, - "grad_norm": 4.663595256053024, - "learning_rate": 2.191333963120004e-06, - "loss": 1.02, - "num_input_tokens_seen": 85762350, - "step": 4037 - }, - { - "epoch": 0.4855407924006493, - "flos": 18263188382400.0, - "grad_norm": 5.0932366048072835, - "learning_rate": 2.190558549814286e-06, - "loss": 0.9235, - "num_input_tokens_seen": 85782230, - "step": 4038 - }, - { - "epoch": 0.4856610352912884, - "flos": 17058179155200.0, - "grad_norm": 5.019675452391207, - "learning_rate": 2.1897831076009872e-06, - "loss": 1.0154, - "num_input_tokens_seen": 85801590, - "step": 4039 - }, - { - "epoch": 0.4857812781819275, - "flos": 17136352115880.0, - "grad_norm": 4.656129771478941, - "learning_rate": 2.1890076365977426e-06, - "loss": 1.0241, - "num_input_tokens_seen": 85821135, - "step": 4040 - }, - { - "epoch": 0.48590152107256657, - "flos": 40259688275640.0, - "grad_norm": 0.9302338028881428, - "learning_rate": 2.188232136922189e-06, - "loss": 0.7964, - "num_input_tokens_seen": 85878975, - "step": 4041 - }, - { - "epoch": 0.4860217639632057, - "flos": 14225831588760.0, - "grad_norm": 3.7823751998049597, - "learning_rate": 2.187456608691971e-06, - "loss": 0.9798, - "num_input_tokens_seen": 85897570, - "step": 4042 - }, - { - "epoch": 0.4861420068538448, - "flos": 12626983386840.0, - "grad_norm": 4.0233403166357675, - "learning_rate": 2.1866810520247334e-06, - "loss": 1.1091, - "num_input_tokens_seen": 85916160, - "step": 4043 - }, - { - "epoch": 0.48626224974448384, - "flos": 18683984416440.0, - "grad_norm": 4.741310201876684, - "learning_rate": 2.185905467038129e-06, - "loss": 0.8714, - "num_input_tokens_seen": 85934785, - "step": 4044 - }, - { - "epoch": 0.48638249263512295, - "flos": 15668701808040.0, - "grad_norm": 4.756545726590412, - "learning_rate": 2.1851298538498127e-06, - "loss": 0.9979, - "num_input_tokens_seen": 85954220, - "step": 4045 - }, - { - "epoch": 0.48650273552576206, - "flos": 17871863655600.0, - "grad_norm": 3.4221776541027547, - "learning_rate": 2.184354212577446e-06, - "loss": 1.0273, - "num_input_tokens_seen": 85974245, - "step": 4046 - }, - { - "epoch": 0.4866229784164011, - "flos": 12364464952440.0, - "grad_norm": 7.20618352272094, - "learning_rate": 2.1835785433386907e-06, - "loss": 0.8418, - "num_input_tokens_seen": 85992780, - "step": 4047 - }, - { - "epoch": 0.48674322130704023, - "flos": 16586136558240.0, - "grad_norm": 4.06459531301626, - "learning_rate": 2.182802846251216e-06, - "loss": 0.8758, - "num_input_tokens_seen": 86012770, - "step": 4048 - }, - { - "epoch": 0.4868634641976793, - "flos": 20519681778960.0, - "grad_norm": 3.0758588940483262, - "learning_rate": 2.182027121432696e-06, - "loss": 0.9481, - "num_input_tokens_seen": 86033115, - "step": 4049 - }, - { - "epoch": 0.4869837070883184, - "flos": 13492129081080.0, - "grad_norm": 4.041371023370059, - "learning_rate": 2.1812513690008054e-06, - "loss": 1.048, - "num_input_tokens_seen": 86051955, - "step": 4050 - }, - { - "epoch": 0.4871039499789575, - "flos": 10685910711840.0, - "grad_norm": 5.827457665903751, - "learning_rate": 2.180475589073227e-06, - "loss": 1.019, - "num_input_tokens_seen": 86069375, - "step": 4051 - }, - { - "epoch": 0.48722419286959656, - "flos": 18628905158520.0, - "grad_norm": 2.6185394894424974, - "learning_rate": 2.1796997817676456e-06, - "loss": 0.9676, - "num_input_tokens_seen": 86090105, - "step": 4052 - }, - { - "epoch": 0.4873444357602357, - "flos": 17085872091960.0, - "grad_norm": 3.156555312091562, - "learning_rate": 2.1789239472017494e-06, - "loss": 0.9043, - "num_input_tokens_seen": 86111475, - "step": 4053 - }, - { - "epoch": 0.4874646786508748, - "flos": 16219131996600.0, - "grad_norm": 6.684321138183678, - "learning_rate": 2.1781480854932326e-06, - "loss": 0.9557, - "num_input_tokens_seen": 86130960, - "step": 4054 - }, - { - "epoch": 0.48758492154151384, - "flos": 15117443757360.0, - "grad_norm": 3.8566289236408378, - "learning_rate": 2.1773721967597933e-06, - "loss": 1.0249, - "num_input_tokens_seen": 86149130, - "step": 4055 - }, - { - "epoch": 0.48770516443215295, - "flos": 44554749638760.0, - "grad_norm": 0.9052837087301617, - "learning_rate": 2.1765962811191322e-06, - "loss": 0.8608, - "num_input_tokens_seen": 86203315, - "step": 4056 - }, - { - "epoch": 0.48782540732279206, - "flos": 47352898469640.0, - "grad_norm": 0.9103278133467556, - "learning_rate": 2.1758203386889566e-06, - "loss": 0.911, - "num_input_tokens_seen": 86265805, - "step": 4057 - }, - { - "epoch": 0.4879456502134311, - "flos": 10316514548520.0, - "grad_norm": 5.721479587861553, - "learning_rate": 2.1750443695869746e-06, - "loss": 1.0635, - "num_input_tokens_seen": 86281095, - "step": 4058 - }, - { - "epoch": 0.4880658931040702, - "flos": 13833157753320.0, - "grad_norm": 3.9564355550584374, - "learning_rate": 2.174268373930901e-06, - "loss": 1.0765, - "num_input_tokens_seen": 86298330, - "step": 4059 - }, - { - "epoch": 0.48818613599470934, - "flos": 11836484573880.0, - "grad_norm": 5.002045975746469, - "learning_rate": 2.1734923518384537e-06, - "loss": 1.0238, - "num_input_tokens_seen": 86314655, - "step": 4060 - }, - { - "epoch": 0.4883063788853484, - "flos": 19047677529600.0, - "grad_norm": 4.820204079767315, - "learning_rate": 2.1727163034273547e-06, - "loss": 1.05, - "num_input_tokens_seen": 86332540, - "step": 4061 - }, - { - "epoch": 0.4884266217759875, - "flos": 11865986542680.0, - "grad_norm": 4.561353983719628, - "learning_rate": 2.17194022881533e-06, - "loss": 1.0084, - "num_input_tokens_seen": 86348350, - "step": 4062 - }, - { - "epoch": 0.4885468646666266, - "flos": 17215751538960.0, - "grad_norm": 3.9761895565078667, - "learning_rate": 2.1711641281201092e-06, - "loss": 0.8959, - "num_input_tokens_seen": 86368000, - "step": 4063 - }, - { - "epoch": 0.48866710755726567, - "flos": 10318967473320.0, - "grad_norm": 5.312470559431292, - "learning_rate": 2.1703880014594264e-06, - "loss": 1.0084, - "num_input_tokens_seen": 86385310, - "step": 4064 - }, - { - "epoch": 0.4887873504479048, - "flos": 20519773763640.0, - "grad_norm": 3.56793106757492, - "learning_rate": 2.1696118489510182e-06, - "loss": 0.953, - "num_input_tokens_seen": 86405960, - "step": 4065 - }, - { - "epoch": 0.48890759333854383, - "flos": 16193278753440.0, - "grad_norm": 2.9630616955290066, - "learning_rate": 2.1688356707126286e-06, - "loss": 0.939, - "num_input_tokens_seen": 86425300, - "step": 4066 - }, - { - "epoch": 0.48902783622918294, - "flos": 12601130143680.0, - "grad_norm": 6.289235670655451, - "learning_rate": 2.168059466862001e-06, - "loss": 0.9291, - "num_input_tokens_seen": 86443170, - "step": 4067 - }, - { - "epoch": 0.48914807911982205, - "flos": 15852587358360.0, - "grad_norm": 7.8782492095585654, - "learning_rate": 2.167283237516887e-06, - "loss": 1.0429, - "num_input_tokens_seen": 86461165, - "step": 4068 - }, - { - "epoch": 0.4892683220104611, - "flos": 11578381404120.0, - "grad_norm": 3.285355388398061, - "learning_rate": 2.1665069827950383e-06, - "loss": 0.9848, - "num_input_tokens_seen": 86478170, - "step": 4069 - }, - { - "epoch": 0.4893885649011002, - "flos": 11129524494600.0, - "grad_norm": 4.201951327478234, - "learning_rate": 2.1657307028142126e-06, - "loss": 1.0945, - "num_input_tokens_seen": 86495430, - "step": 4070 - }, - { - "epoch": 0.48950880779173933, - "flos": 20359411177920.0, - "grad_norm": 3.4554151302751137, - "learning_rate": 2.164954397692171e-06, - "loss": 0.9017, - "num_input_tokens_seen": 86514575, - "step": 4071 - }, - { - "epoch": 0.4896290506823784, - "flos": 47388170359800.0, - "grad_norm": 1.2518252874322446, - "learning_rate": 2.164178067546678e-06, - "loss": 1.0575, - "num_input_tokens_seen": 86573460, - "step": 4072 - }, - { - "epoch": 0.4897492935730175, - "flos": 8824298783040.0, - "grad_norm": 14.595063699738143, - "learning_rate": 2.163401712495504e-06, - "loss": 1.1344, - "num_input_tokens_seen": 86590875, - "step": 4073 - }, - { - "epoch": 0.4898695364636566, - "flos": 16690806654840.0, - "grad_norm": 2.4592946215125164, - "learning_rate": 2.1626253326564194e-06, - "loss": 1.0179, - "num_input_tokens_seen": 86609545, - "step": 4074 - }, - { - "epoch": 0.48998977935429566, - "flos": 19337766254520.0, - "grad_norm": 6.1673199419288505, - "learning_rate": 2.161848928147201e-06, - "loss": 0.9954, - "num_input_tokens_seen": 86629535, - "step": 4075 - }, - { - "epoch": 0.4901100222449348, - "flos": 14436214275000.0, - "grad_norm": 2.99745022182795, - "learning_rate": 2.161072499085629e-06, - "loss": 1.0185, - "num_input_tokens_seen": 86648250, - "step": 4076 - }, - { - "epoch": 0.4902302651355739, - "flos": 21699788271360.0, - "grad_norm": 3.949914887875429, - "learning_rate": 2.160296045589487e-06, - "loss": 1.059, - "num_input_tokens_seen": 86671430, - "step": 4077 - }, - { - "epoch": 0.49035050802621294, - "flos": 13598945486880.0, - "grad_norm": 6.156712858353461, - "learning_rate": 2.159519567776562e-06, - "loss": 0.9157, - "num_input_tokens_seen": 86690800, - "step": 4078 - }, - { - "epoch": 0.49047075091685205, - "flos": 15792602250840.0, - "grad_norm": 4.145939708987172, - "learning_rate": 2.1587430657646463e-06, - "loss": 0.9283, - "num_input_tokens_seen": 86703955, - "step": 4079 - }, - { - "epoch": 0.4905909938074911, - "flos": 14304556457520.0, - "grad_norm": 4.180165714465232, - "learning_rate": 2.157966539671533e-06, - "loss": 1.0, - "num_input_tokens_seen": 86723315, - "step": 4080 - }, - { - "epoch": 0.4907112366981302, - "flos": 12181499248920.0, - "grad_norm": 3.6570235518170953, - "learning_rate": 2.157189989615021e-06, - "loss": 0.8847, - "num_input_tokens_seen": 86741625, - "step": 4081 - }, - { - "epoch": 0.4908314795887693, - "flos": 15405907419600.0, - "grad_norm": 3.413514675995594, - "learning_rate": 2.156413415712913e-06, - "loss": 0.9699, - "num_input_tokens_seen": 86763395, - "step": 4082 - }, - { - "epoch": 0.4909517224794084, - "flos": 18657793896120.0, - "grad_norm": 2.98220634556028, - "learning_rate": 2.155636818083014e-06, - "loss": 1.0049, - "num_input_tokens_seen": 86784485, - "step": 4083 - }, - { - "epoch": 0.4910719653700475, - "flos": 16454754694800.0, - "grad_norm": 4.3714061690183, - "learning_rate": 2.154860196843134e-06, - "loss": 1.0633, - "num_input_tokens_seen": 86803400, - "step": 4084 - }, - { - "epoch": 0.4911922082606866, - "flos": 16586749789440.0, - "grad_norm": 6.6353552160207245, - "learning_rate": 2.154083552111085e-06, - "loss": 0.9938, - "num_input_tokens_seen": 86822290, - "step": 4085 - }, - { - "epoch": 0.49131245115132566, - "flos": 20806029793560.0, - "grad_norm": 4.057067995512873, - "learning_rate": 2.1533068840046834e-06, - "loss": 1.0414, - "num_input_tokens_seen": 86842275, - "step": 4086 - }, - { - "epoch": 0.49143269404196477, - "flos": 14296829744400.0, - "grad_norm": 4.875782439212321, - "learning_rate": 2.152530192641749e-06, - "loss": 0.8317, - "num_input_tokens_seen": 86856905, - "step": 4087 - }, - { - "epoch": 0.4915529369326039, - "flos": 17347072079280.0, - "grad_norm": 3.2504355798947024, - "learning_rate": 2.1517534781401068e-06, - "loss": 0.9508, - "num_input_tokens_seen": 86874505, - "step": 4088 - }, - { - "epoch": 0.49167317982324293, - "flos": 7382072456520.0, - "grad_norm": 5.03303562488071, - "learning_rate": 2.150976740617581e-06, - "loss": 0.9263, - "num_input_tokens_seen": 86890785, - "step": 4089 - }, - { - "epoch": 0.49179342271388204, - "flos": 18212033804160.0, - "grad_norm": 5.668967770869597, - "learning_rate": 2.150199980192006e-06, - "loss": 0.9516, - "num_input_tokens_seen": 86909625, - "step": 4090 - }, - { - "epoch": 0.49191366560452116, - "flos": 14983670292240.0, - "grad_norm": 3.643150665040972, - "learning_rate": 2.1494231969812114e-06, - "loss": 1.0362, - "num_input_tokens_seen": 86928335, - "step": 4091 - }, - { - "epoch": 0.4920339084951602, - "flos": 18552725199240.0, - "grad_norm": 4.404049154618829, - "learning_rate": 2.1486463911030372e-06, - "loss": 1.0203, - "num_input_tokens_seen": 86948705, - "step": 4092 - }, - { - "epoch": 0.4921541513857993, - "flos": 17843772118560.0, - "grad_norm": 3.3673149491903804, - "learning_rate": 2.147869562675324e-06, - "loss": 0.977, - "num_input_tokens_seen": 86967395, - "step": 4093 - }, - { - "epoch": 0.49227439427643843, - "flos": 17347072079280.0, - "grad_norm": 7.458601309226163, - "learning_rate": 2.147092711815915e-06, - "loss": 0.9422, - "num_input_tokens_seen": 86986465, - "step": 4094 - }, - { - "epoch": 0.4923946371670775, - "flos": 7985404932240.0, - "grad_norm": 6.714826138902152, - "learning_rate": 2.1463158386426593e-06, - "loss": 1.0924, - "num_input_tokens_seen": 87003995, - "step": 4095 - }, - { - "epoch": 0.4925148800577166, - "flos": 21699450994200.0, - "grad_norm": 3.6785130013364062, - "learning_rate": 2.145538943273407e-06, - "loss": 0.9983, - "num_input_tokens_seen": 87023990, - "step": 4096 - }, - { - "epoch": 0.49263512294835565, - "flos": 14803372144440.0, - "grad_norm": 3.4371024161943704, - "learning_rate": 2.144762025826013e-06, - "loss": 0.9481, - "num_input_tokens_seen": 87042800, - "step": 4097 - }, - { - "epoch": 0.49275536583899476, - "flos": 16899533616840.0, - "grad_norm": 3.456399323202269, - "learning_rate": 2.143985086418334e-06, - "loss": 1.0893, - "num_input_tokens_seen": 87057700, - "step": 4098 - }, - { - "epoch": 0.4928756087296339, - "flos": 15826120884000.0, - "grad_norm": 4.39869052301486, - "learning_rate": 2.1432081251682324e-06, - "loss": 1.0, - "num_input_tokens_seen": 87077790, - "step": 4099 - }, - { - "epoch": 0.49299585162027293, - "flos": 13937245280280.0, - "grad_norm": 3.2501609468955683, - "learning_rate": 2.142431142193572e-06, - "loss": 1.0913, - "num_input_tokens_seen": 87095290, - "step": 4100 - }, - { - "epoch": 0.49311609451091204, - "flos": 27729954887880.0, - "grad_norm": 3.4204636541698656, - "learning_rate": 2.1416541376122207e-06, - "loss": 0.9508, - "num_input_tokens_seen": 87115190, - "step": 4101 - }, - { - "epoch": 0.49323633740155115, - "flos": 20178009213960.0, - "grad_norm": 6.986709511797817, - "learning_rate": 2.1408771115420496e-06, - "loss": 0.9592, - "num_input_tokens_seen": 87134770, - "step": 4102 - }, - { - "epoch": 0.4933565802921902, - "flos": 15007377226200.0, - "grad_norm": 3.8815793203307765, - "learning_rate": 2.140100064100932e-06, - "loss": 0.8831, - "num_input_tokens_seen": 87150465, - "step": 4103 - }, - { - "epoch": 0.4934768231828293, - "flos": 12782562769200.0, - "grad_norm": 3.9045118203088642, - "learning_rate": 2.139322995406746e-06, - "loss": 0.9881, - "num_input_tokens_seen": 87167820, - "step": 4104 - }, - { - "epoch": 0.4935970660734684, - "flos": 16684950296880.0, - "grad_norm": 3.418991755388288, - "learning_rate": 2.1385459055773727e-06, - "loss": 1.0291, - "num_input_tokens_seen": 87185730, - "step": 4105 - }, - { - "epoch": 0.4937173089641075, - "flos": 46157466745320.0, - "grad_norm": 4.395521372833726, - "learning_rate": 2.137768794730696e-06, - "loss": 0.9647, - "num_input_tokens_seen": 87208900, - "step": 4106 - }, - { - "epoch": 0.4938375518547466, - "flos": 15878164647480.0, - "grad_norm": 6.994709467931566, - "learning_rate": 2.1369916629846026e-06, - "loss": 1.0298, - "num_input_tokens_seen": 87228370, - "step": 4107 - }, - { - "epoch": 0.4939577947453857, - "flos": 12653265891840.0, - "grad_norm": 6.734906877016923, - "learning_rate": 2.136214510456983e-06, - "loss": 0.9706, - "num_input_tokens_seen": 87246545, - "step": 4108 - }, - { - "epoch": 0.49407803763602476, - "flos": 50178715123800.0, - "grad_norm": 0.9495092504732716, - "learning_rate": 2.1354373372657296e-06, - "loss": 0.9356, - "num_input_tokens_seen": 87304705, - "step": 4109 - }, - { - "epoch": 0.49419828052666387, - "flos": 17295181623600.0, - "grad_norm": 2.110761511728246, - "learning_rate": 2.1346601435287404e-06, - "loss": 0.9359, - "num_input_tokens_seen": 87326695, - "step": 4110 - }, - { - "epoch": 0.494318523417303, - "flos": 20937472980120.0, - "grad_norm": 3.2104983435523895, - "learning_rate": 2.1338829293639144e-06, - "loss": 1.0292, - "num_input_tokens_seen": 87346775, - "step": 4111 - }, - { - "epoch": 0.49443876630794203, - "flos": 10791592639920.0, - "grad_norm": 7.654689333919968, - "learning_rate": 2.1331056948891547e-06, - "loss": 1.0631, - "num_input_tokens_seen": 87363595, - "step": 4112 - }, - { - "epoch": 0.49455900919858115, - "flos": 8692181042160.0, - "grad_norm": 3.5504301965661114, - "learning_rate": 2.1323284402223666e-06, - "loss": 0.9931, - "num_input_tokens_seen": 87379305, - "step": 4113 - }, - { - "epoch": 0.4946792520892202, - "flos": 16189200765960.0, - "grad_norm": 4.109831461919788, - "learning_rate": 2.1315511654814597e-06, - "loss": 1.1103, - "num_input_tokens_seen": 87397435, - "step": 4114 - }, - { - "epoch": 0.4947994949798593, - "flos": 16454540063880.0, - "grad_norm": 5.369090713152421, - "learning_rate": 2.1307738707843456e-06, - "loss": 0.9999, - "num_input_tokens_seen": 87416820, - "step": 4115 - }, - { - "epoch": 0.4949197378704984, - "flos": 16822556457000.0, - "grad_norm": 4.456455417515771, - "learning_rate": 2.1299965562489385e-06, - "loss": 0.9081, - "num_input_tokens_seen": 87436345, - "step": 4116 - }, - { - "epoch": 0.4950399807611375, - "flos": 19159921031520.0, - "grad_norm": 3.4635786393948056, - "learning_rate": 2.129219221993158e-06, - "loss": 1.0206, - "num_input_tokens_seen": 87460850, - "step": 4117 - }, - { - "epoch": 0.4951602236517766, - "flos": 48199248627600.0, - "grad_norm": 0.9187246903027917, - "learning_rate": 2.128441868134924e-06, - "loss": 0.873, - "num_input_tokens_seen": 87522505, - "step": 4118 - }, - { - "epoch": 0.4952804665424157, - "flos": 14121284138400.0, - "grad_norm": 3.4273803180302864, - "learning_rate": 2.1276644947921606e-06, - "loss": 1.0567, - "num_input_tokens_seen": 87541140, - "step": 4119 - }, - { - "epoch": 0.49540070943305475, - "flos": 13334587358880.0, - "grad_norm": 5.23017172507784, - "learning_rate": 2.126887102082795e-06, - "loss": 1.0522, - "num_input_tokens_seen": 87560885, - "step": 4120 - }, - { - "epoch": 0.49552095232369386, - "flos": 17737906221120.0, - "grad_norm": 6.072832563896447, - "learning_rate": 2.126109690124757e-06, - "loss": 0.9264, - "num_input_tokens_seen": 87581420, - "step": 4121 - }, - { - "epoch": 0.495641195214333, - "flos": 16245629132520.0, - "grad_norm": 2.642466157174995, - "learning_rate": 2.1253322590359786e-06, - "loss": 0.9344, - "num_input_tokens_seen": 87600475, - "step": 4122 - }, - { - "epoch": 0.49576143810497203, - "flos": 18337927248360.0, - "grad_norm": 5.6294344906328, - "learning_rate": 2.124554808934397e-06, - "loss": 0.9511, - "num_input_tokens_seen": 87620775, - "step": 4123 - }, - { - "epoch": 0.49588168099561114, - "flos": 15722309311080.0, - "grad_norm": 6.524095038436874, - "learning_rate": 2.1237773399379496e-06, - "loss": 0.9515, - "num_input_tokens_seen": 87641460, - "step": 4124 - }, - { - "epoch": 0.49600192388625025, - "flos": 17344956431640.0, - "grad_norm": 3.0794602030074585, - "learning_rate": 2.122999852164578e-06, - "loss": 1.0938, - "num_input_tokens_seen": 87661800, - "step": 4125 - }, - { - "epoch": 0.4961221667768893, - "flos": 16245782440320.0, - "grad_norm": 5.224187702025655, - "learning_rate": 2.122222345732227e-06, - "loss": 0.8051, - "num_input_tokens_seen": 87681435, - "step": 4126 - }, - { - "epoch": 0.4962424096675284, - "flos": 12652744645320.0, - "grad_norm": 3.338473425934948, - "learning_rate": 2.121444820758843e-06, - "loss": 1.0603, - "num_input_tokens_seen": 87699795, - "step": 4127 - }, - { - "epoch": 0.49636265255816747, - "flos": 15480462316200.0, - "grad_norm": 6.0420880751695, - "learning_rate": 2.120667277362376e-06, - "loss": 0.9965, - "num_input_tokens_seen": 87718230, - "step": 4128 - }, - { - "epoch": 0.4964828954488066, - "flos": 11574610032240.0, - "grad_norm": 3.08303456963959, - "learning_rate": 2.1198897156607796e-06, - "loss": 1.0719, - "num_input_tokens_seen": 87735305, - "step": 4129 - }, - { - "epoch": 0.4966031383394457, - "flos": 17577022388880.0, - "grad_norm": 2.7123826981596153, - "learning_rate": 2.1191121357720085e-06, - "loss": 0.9704, - "num_input_tokens_seen": 87753085, - "step": 4130 - }, - { - "epoch": 0.49672338123008475, - "flos": 16298408773440.0, - "grad_norm": 3.473639264340719, - "learning_rate": 2.1183345378140206e-06, - "loss": 0.9758, - "num_input_tokens_seen": 87772550, - "step": 4131 - }, - { - "epoch": 0.49684362412072386, - "flos": 47238815274120.0, - "grad_norm": 1.0361210195893154, - "learning_rate": 2.1175569219047783e-06, - "loss": 0.8911, - "num_input_tokens_seen": 87833710, - "step": 4132 - }, - { - "epoch": 0.49696386701136297, - "flos": 14171058946440.0, - "grad_norm": 3.6958215213717334, - "learning_rate": 2.1167792881622437e-06, - "loss": 0.9489, - "num_input_tokens_seen": 87852450, - "step": 4133 - }, - { - "epoch": 0.497084109902002, - "flos": 17606003111160.0, - "grad_norm": 2.5030064625003186, - "learning_rate": 2.116001636704384e-06, - "loss": 1.0387, - "num_input_tokens_seen": 87872555, - "step": 4134 - }, - { - "epoch": 0.49720435279264114, - "flos": 15537381267720.0, - "grad_norm": 5.098848564465277, - "learning_rate": 2.1152239676491685e-06, - "loss": 1.0277, - "num_input_tokens_seen": 87890380, - "step": 4135 - }, - { - "epoch": 0.49732459568328025, - "flos": 16927318538280.0, - "grad_norm": 3.1386700519207524, - "learning_rate": 2.114446281114569e-06, - "loss": 0.9674, - "num_input_tokens_seen": 87909120, - "step": 4136 - }, - { - "epoch": 0.4974448385739193, - "flos": 14226475481520.0, - "grad_norm": 3.100381083291746, - "learning_rate": 2.1136685772185587e-06, - "loss": 0.9835, - "num_input_tokens_seen": 87927865, - "step": 4137 - }, - { - "epoch": 0.4975650814645584, - "flos": 17656360488840.0, - "grad_norm": 2.849345644067289, - "learning_rate": 2.1128908560791163e-06, - "loss": 1.0119, - "num_input_tokens_seen": 87947415, - "step": 4138 - }, - { - "epoch": 0.4976853243551975, - "flos": 14069700298320.0, - "grad_norm": 6.025349789671187, - "learning_rate": 2.1121131178142203e-06, - "loss": 1.0003, - "num_input_tokens_seen": 87966500, - "step": 4139 - }, - { - "epoch": 0.4978055672458366, - "flos": 16450155460800.0, - "grad_norm": 3.204579985854962, - "learning_rate": 2.1113353625418544e-06, - "loss": 1.0516, - "num_input_tokens_seen": 87984770, - "step": 4140 - }, - { - "epoch": 0.4979258101364757, - "flos": 11001147464040.0, - "grad_norm": 2.50571931847494, - "learning_rate": 2.1105575903800017e-06, - "loss": 1.0202, - "num_input_tokens_seen": 88003210, - "step": 4141 - }, - { - "epoch": 0.4980460530271148, - "flos": 18759735113880.0, - "grad_norm": 2.2535103858520524, - "learning_rate": 2.1097798014466502e-06, - "loss": 1.075, - "num_input_tokens_seen": 88022530, - "step": 4142 - }, - { - "epoch": 0.49816629591775385, - "flos": 12233389704600.0, - "grad_norm": 5.697374428490545, - "learning_rate": 2.109001995859791e-06, - "loss": 0.7871, - "num_input_tokens_seen": 88041150, - "step": 4143 - }, - { - "epoch": 0.49828653880839296, - "flos": 46488887253120.0, - "grad_norm": 1.1775011935703845, - "learning_rate": 2.108224173737415e-06, - "loss": 0.8622, - "num_input_tokens_seen": 88104170, - "step": 4144 - }, - { - "epoch": 0.498406781699032, - "flos": 19570537427640.0, - "grad_norm": 3.213689436933523, - "learning_rate": 2.1074463351975183e-06, - "loss": 0.9808, - "num_input_tokens_seen": 88122775, - "step": 4145 - }, - { - "epoch": 0.49852702458967113, - "flos": 22456308527760.0, - "grad_norm": 2.4163094837585, - "learning_rate": 2.106668480358098e-06, - "loss": 0.9344, - "num_input_tokens_seen": 88142720, - "step": 4146 - }, - { - "epoch": 0.49864726748031024, - "flos": 16241888422200.0, - "grad_norm": 2.1628159881991382, - "learning_rate": 2.105890609337154e-06, - "loss": 0.9335, - "num_input_tokens_seen": 88160955, - "step": 4147 - }, - { - "epoch": 0.4987675103709493, - "flos": 50423750920920.0, - "grad_norm": 0.6898221765343931, - "learning_rate": 2.1051127222526883e-06, - "loss": 0.8754, - "num_input_tokens_seen": 88232790, - "step": 4148 - }, - { - "epoch": 0.4988877532615884, - "flos": 20493215304600.0, - "grad_norm": 3.53708093000883, - "learning_rate": 2.1043348192227067e-06, - "loss": 1.0348, - "num_input_tokens_seen": 88252880, - "step": 4149 - }, - { - "epoch": 0.4990079961522275, - "flos": 11944527442080.0, - "grad_norm": 4.153873997782447, - "learning_rate": 2.1035569003652156e-06, - "loss": 0.8536, - "num_input_tokens_seen": 88271230, - "step": 4150 - }, - { - "epoch": 0.4991282390428666, - "flos": 9369639152640.0, - "grad_norm": 3.5319942264012876, - "learning_rate": 2.1027789657982255e-06, - "loss": 1.038, - "num_input_tokens_seen": 88285165, - "step": 4151 - }, - { - "epoch": 0.4992484819335057, - "flos": 15296300811840.0, - "grad_norm": 3.5103452922383216, - "learning_rate": 2.1020010156397482e-06, - "loss": 1.0078, - "num_input_tokens_seen": 88302105, - "step": 4152 - }, - { - "epoch": 0.4993687248241448, - "flos": 17684513349000.0, - "grad_norm": 4.863789702650234, - "learning_rate": 2.101223050007797e-06, - "loss": 1.0071, - "num_input_tokens_seen": 88320375, - "step": 4153 - }, - { - "epoch": 0.49948896771478385, - "flos": 45056600820120.0, - "grad_norm": 0.8783385753535488, - "learning_rate": 2.1004450690203904e-06, - "loss": 0.8122, - "num_input_tokens_seen": 88376175, - "step": 4154 - }, - { - "epoch": 0.49960921060542296, - "flos": 48898353799440.0, - "grad_norm": 0.9900278933285069, - "learning_rate": 2.099667072795546e-06, - "loss": 0.9424, - "num_input_tokens_seen": 88438015, - "step": 4155 - }, - { - "epoch": 0.49972945349606207, - "flos": 16822004548920.0, - "grad_norm": 3.514194723163065, - "learning_rate": 2.0988890614512864e-06, - "loss": 1.0141, - "num_input_tokens_seen": 88457625, - "step": 4156 - }, - { - "epoch": 0.4998496963867011, - "flos": 14016675364920.0, - "grad_norm": 4.4620699806214255, - "learning_rate": 2.098111035105635e-06, - "loss": 1.0619, - "num_input_tokens_seen": 88475770, - "step": 4157 - }, - { - "epoch": 0.49996993927734024, - "flos": 15819467325480.0, - "grad_norm": 6.177614271773555, - "learning_rate": 2.0973329938766176e-06, - "loss": 0.9702, - "num_input_tokens_seen": 88492920, - "step": 4158 - }, - { - "epoch": 0.5000901821679793, - "flos": 16582794448200.0, - "grad_norm": 4.424359489679936, - "learning_rate": 2.0965549378822618e-06, - "loss": 1.0167, - "num_input_tokens_seen": 88513930, - "step": 4159 - }, - { - "epoch": 0.5002104250586185, - "flos": 14435723690040.0, - "grad_norm": 3.3929951202330506, - "learning_rate": 2.095776867240599e-06, - "loss": 1.0675, - "num_input_tokens_seen": 88530640, - "step": 4160 - }, - { - "epoch": 0.5003306679492575, - "flos": 9873912597240.0, - "grad_norm": 4.3846684026665885, - "learning_rate": 2.094998782069661e-06, - "loss": 1.0392, - "num_input_tokens_seen": 88548065, - "step": 4161 - }, - { - "epoch": 0.5004509108398966, - "flos": 19706365217280.0, - "grad_norm": 2.8341603661541632, - "learning_rate": 2.0942206824874845e-06, - "loss": 0.9846, - "num_input_tokens_seen": 88570560, - "step": 4162 - }, - { - "epoch": 0.5005711537305357, - "flos": 10581639215520.0, - "grad_norm": 4.255052592471363, - "learning_rate": 2.093442568612105e-06, - "loss": 1.022, - "num_input_tokens_seen": 88588085, - "step": 4163 - }, - { - "epoch": 0.5006913966211748, - "flos": 18865601011320.0, - "grad_norm": 2.628800139191636, - "learning_rate": 2.0926644405615613e-06, - "loss": 1.0725, - "num_input_tokens_seen": 88608705, - "step": 4164 - }, - { - "epoch": 0.5008116395118138, - "flos": 14515092451560.0, - "grad_norm": 7.691225467260235, - "learning_rate": 2.091886298453897e-06, - "loss": 1.0452, - "num_input_tokens_seen": 88626610, - "step": 4165 - }, - { - "epoch": 0.500931882402453, - "flos": 15327550489560.0, - "grad_norm": 2.6215341485109596, - "learning_rate": 2.091108142407153e-06, - "loss": 0.9462, - "num_input_tokens_seen": 88645070, - "step": 4166 - }, - { - "epoch": 0.5010521252930921, - "flos": 41353950869280.0, - "grad_norm": 0.8955347354548334, - "learning_rate": 2.090329972539377e-06, - "loss": 0.9173, - "num_input_tokens_seen": 88703355, - "step": 4167 - }, - { - "epoch": 0.5011723681837311, - "flos": 13204431957840.0, - "grad_norm": 4.3941293218974105, - "learning_rate": 2.089551788968616e-06, - "loss": 0.9086, - "num_input_tokens_seen": 88721040, - "step": 4168 - }, - { - "epoch": 0.5012926110743702, - "flos": 39986494070280.0, - "grad_norm": 0.875004529941277, - "learning_rate": 2.08877359181292e-06, - "loss": 0.8669, - "num_input_tokens_seen": 88777325, - "step": 4169 - }, - { - "epoch": 0.5014128539650093, - "flos": 17237404148400.0, - "grad_norm": 2.8733484251270633, - "learning_rate": 2.0879953811903396e-06, - "loss": 1.0673, - "num_input_tokens_seen": 88791930, - "step": 4170 - }, - { - "epoch": 0.5015330968556484, - "flos": 19602032397840.0, - "grad_norm": 3.019676688616179, - "learning_rate": 2.08721715721893e-06, - "loss": 1.0069, - "num_input_tokens_seen": 88810975, - "step": 4171 - }, - { - "epoch": 0.5016533397462875, - "flos": 16923056581440.0, - "grad_norm": 2.811940116255541, - "learning_rate": 2.0864389200167477e-06, - "loss": 1.001, - "num_input_tokens_seen": 88828950, - "step": 4172 - }, - { - "epoch": 0.5017735826369266, - "flos": 17996683945200.0, - "grad_norm": 5.764673500840187, - "learning_rate": 2.0856606697018504e-06, - "loss": 1.0215, - "num_input_tokens_seen": 88846680, - "step": 4173 - }, - { - "epoch": 0.5018938255275657, - "flos": 11944956703920.0, - "grad_norm": 4.181543241290729, - "learning_rate": 2.084882406392297e-06, - "loss": 0.9473, - "num_input_tokens_seen": 88864360, - "step": 4174 - }, - { - "epoch": 0.5020140684182047, - "flos": 18155298822000.0, - "grad_norm": 22.222646980464756, - "learning_rate": 2.0841041302061496e-06, - "loss": 0.9219, - "num_input_tokens_seen": 88883540, - "step": 4175 - }, - { - "epoch": 0.5021343113088439, - "flos": 16821943225800.0, - "grad_norm": 7.89480208319761, - "learning_rate": 2.083325841261473e-06, - "loss": 0.9775, - "num_input_tokens_seen": 88902320, - "step": 4176 - }, - { - "epoch": 0.502254554199483, - "flos": 17451036960000.0, - "grad_norm": 14.668497722652212, - "learning_rate": 2.0825475396763322e-06, - "loss": 0.8915, - "num_input_tokens_seen": 88922690, - "step": 4177 - }, - { - "epoch": 0.502374797090122, - "flos": 24426331263480.0, - "grad_norm": 2.4254048864824203, - "learning_rate": 2.081769225568796e-06, - "loss": 0.882, - "num_input_tokens_seen": 88944860, - "step": 4178 - }, - { - "epoch": 0.5024950399807612, - "flos": 18526565340480.0, - "grad_norm": 3.1121983577302537, - "learning_rate": 2.0809908990569327e-06, - "loss": 0.9808, - "num_input_tokens_seen": 88966360, - "step": 4179 - }, - { - "epoch": 0.5026152828714002, - "flos": 15092357053200.0, - "grad_norm": 8.50499533678361, - "learning_rate": 2.0802125602588146e-06, - "loss": 1.0214, - "num_input_tokens_seen": 88985345, - "step": 4180 - }, - { - "epoch": 0.5027355257620393, - "flos": 22065995632440.0, - "grad_norm": 4.031272000984658, - "learning_rate": 2.0794342092925146e-06, - "loss": 0.8945, - "num_input_tokens_seen": 89006200, - "step": 4181 - }, - { - "epoch": 0.5028557686526784, - "flos": 17635505079960.0, - "grad_norm": 4.729219254300759, - "learning_rate": 2.078655846276108e-06, - "loss": 0.9081, - "num_input_tokens_seen": 89026250, - "step": 4182 - }, - { - "epoch": 0.5029760115433175, - "flos": 16324537970640.0, - "grad_norm": 3.34060107116408, - "learning_rate": 2.0778774713276727e-06, - "loss": 0.9125, - "num_input_tokens_seen": 89045445, - "step": 4183 - }, - { - "epoch": 0.5030962544339566, - "flos": 10818151098960.0, - "grad_norm": 14.50103966504428, - "learning_rate": 2.077099084565287e-06, - "loss": 0.8844, - "num_input_tokens_seen": 89062570, - "step": 4184 - }, - { - "epoch": 0.5032164973245957, - "flos": 17421964253040.0, - "grad_norm": 5.8328350762605545, - "learning_rate": 2.0763206861070313e-06, - "loss": 0.867, - "num_input_tokens_seen": 89081350, - "step": 4185 - }, - { - "epoch": 0.5033367402152348, - "flos": 11470399859040.0, - "grad_norm": 3.921578445469029, - "learning_rate": 2.0755422760709876e-06, - "loss": 0.9824, - "num_input_tokens_seen": 89098470, - "step": 4186 - }, - { - "epoch": 0.5034569831058738, - "flos": 15191262776520.0, - "grad_norm": 3.1923754935007107, - "learning_rate": 2.0747638545752417e-06, - "loss": 1.0001, - "num_input_tokens_seen": 89116750, - "step": 4187 - }, - { - "epoch": 0.503577225996513, - "flos": 14593755997200.0, - "grad_norm": 3.0086486446867293, - "learning_rate": 2.073985421737878e-06, - "loss": 1.0605, - "num_input_tokens_seen": 89133780, - "step": 4188 - }, - { - "epoch": 0.5036974688871521, - "flos": 19386774523560.0, - "grad_norm": 5.161851464423544, - "learning_rate": 2.0732069776769844e-06, - "loss": 0.9803, - "num_input_tokens_seen": 89150910, - "step": 4189 - }, - { - "epoch": 0.5038177117777911, - "flos": 14488319361600.0, - "grad_norm": 4.452521032233365, - "learning_rate": 2.072428522510651e-06, - "loss": 0.9535, - "num_input_tokens_seen": 89167195, - "step": 4190 - }, - { - "epoch": 0.5039379546684303, - "flos": 15563326495560.0, - "grad_norm": 6.386111878659644, - "learning_rate": 2.071650056356968e-06, - "loss": 0.9808, - "num_input_tokens_seen": 89184455, - "step": 4191 - }, - { - "epoch": 0.5040581975590693, - "flos": 14199794376240.0, - "grad_norm": 3.8059506052713856, - "learning_rate": 2.070871579334028e-06, - "loss": 1.0152, - "num_input_tokens_seen": 89203285, - "step": 4192 - }, - { - "epoch": 0.5041784404497084, - "flos": 14881974366960.0, - "grad_norm": 3.0221933569963313, - "learning_rate": 2.0700930915599264e-06, - "loss": 0.9459, - "num_input_tokens_seen": 89222735, - "step": 4193 - }, - { - "epoch": 0.5042986833403476, - "flos": 8798506863000.0, - "grad_norm": 4.505317936558158, - "learning_rate": 2.0693145931527583e-06, - "loss": 1.0052, - "num_input_tokens_seen": 89238935, - "step": 4194 - }, - { - "epoch": 0.5044189262309866, - "flos": 20805232593000.0, - "grad_norm": 3.517017953210237, - "learning_rate": 2.068536084230622e-06, - "loss": 1.0097, - "num_input_tokens_seen": 89260520, - "step": 4195 - }, - { - "epoch": 0.5045391691216257, - "flos": 16975008360240.0, - "grad_norm": 3.888935626218388, - "learning_rate": 2.067757564911616e-06, - "loss": 1.1056, - "num_input_tokens_seen": 89278815, - "step": 4196 - }, - { - "epoch": 0.5046594120122648, - "flos": 17530436383080.0, - "grad_norm": 4.030480599723325, - "learning_rate": 2.0669790353138407e-06, - "loss": 1.1413, - "num_input_tokens_seen": 89297500, - "step": 4197 - }, - { - "epoch": 0.5047796549029039, - "flos": 16608279752640.0, - "grad_norm": 3.6238386990605806, - "learning_rate": 2.0662004955553995e-06, - "loss": 0.9558, - "num_input_tokens_seen": 89316920, - "step": 4198 - }, - { - "epoch": 0.5048998977935429, - "flos": 12253693205400.0, - "grad_norm": 3.3733175817380703, - "learning_rate": 2.065421945754395e-06, - "loss": 0.9922, - "num_input_tokens_seen": 89334370, - "step": 4199 - }, - { - "epoch": 0.505020140684182, - "flos": 24869055861000.0, - "grad_norm": 3.4016887678157, - "learning_rate": 2.0646433860289344e-06, - "loss": 1.014, - "num_input_tokens_seen": 89353015, - "step": 4200 - }, - { - "epoch": 0.5051403835748212, - "flos": 17687824797480.0, - "grad_norm": 4.386004531614482, - "learning_rate": 2.0638648164971233e-06, - "loss": 1.0487, - "num_input_tokens_seen": 89371200, - "step": 4201 - }, - { - "epoch": 0.5052606264654602, - "flos": 14881851720720.0, - "grad_norm": 4.393669186278939, - "learning_rate": 2.06308623727707e-06, - "loss": 1.1157, - "num_input_tokens_seen": 89391020, - "step": 4202 - }, - { - "epoch": 0.5053808693560993, - "flos": 13934608386120.0, - "grad_norm": 4.0041418090006395, - "learning_rate": 2.0623076484868846e-06, - "loss": 0.9789, - "num_input_tokens_seen": 89408195, - "step": 4203 - }, - { - "epoch": 0.5055011122467384, - "flos": 48336517510560.0, - "grad_norm": 0.9231522327659939, - "learning_rate": 2.061529050244679e-06, - "loss": 0.9174, - "num_input_tokens_seen": 89467660, - "step": 4204 - }, - { - "epoch": 0.5056213551373775, - "flos": 11944742073000.0, - "grad_norm": 4.518338031904722, - "learning_rate": 2.060750442668565e-06, - "loss": 0.9688, - "num_input_tokens_seen": 89485135, - "step": 4205 - }, - { - "epoch": 0.5057415980280165, - "flos": 10837994676360.0, - "grad_norm": 6.495802771313091, - "learning_rate": 2.059971825876657e-06, - "loss": 0.8734, - "num_input_tokens_seen": 89499365, - "step": 4206 - }, - { - "epoch": 0.5058618409186557, - "flos": 13492129081080.0, - "grad_norm": 10.623980304929564, - "learning_rate": 2.0591931999870713e-06, - "loss": 0.9916, - "num_input_tokens_seen": 89518010, - "step": 4207 - }, - { - "epoch": 0.5059820838092948, - "flos": 45424310597640.0, - "grad_norm": 1.018015469161879, - "learning_rate": 2.0584145651179234e-06, - "loss": 0.8622, - "num_input_tokens_seen": 89573440, - "step": 4208 - }, - { - "epoch": 0.5061023266999338, - "flos": 10915094482440.0, - "grad_norm": 4.693961610862199, - "learning_rate": 2.0576359213873327e-06, - "loss": 1.0103, - "num_input_tokens_seen": 89588310, - "step": 4209 - }, - { - "epoch": 0.506222569590573, - "flos": 15953056821240.0, - "grad_norm": 3.0344925673992944, - "learning_rate": 2.056857268913419e-06, - "loss": 0.9105, - "num_input_tokens_seen": 89608080, - "step": 4210 - }, - { - "epoch": 0.506342812481212, - "flos": 12436536262680.0, - "grad_norm": 3.422854627316745, - "learning_rate": 2.056078607814303e-06, - "loss": 1.0691, - "num_input_tokens_seen": 89623585, - "step": 4211 - }, - { - "epoch": 0.5064630553718511, - "flos": 16636831213080.0, - "grad_norm": 7.068755763529646, - "learning_rate": 2.055299938208106e-06, - "loss": 1.0121, - "num_input_tokens_seen": 89644295, - "step": 4212 - }, - { - "epoch": 0.5065832982624903, - "flos": 17057412616200.0, - "grad_norm": 18.5931640532675, - "learning_rate": 2.0545212602129526e-06, - "loss": 1.093, - "num_input_tokens_seen": 89663870, - "step": 4213 - }, - { - "epoch": 0.5067035411531293, - "flos": 15271827338880.0, - "grad_norm": 18.036924001653006, - "learning_rate": 2.0537425739469673e-06, - "loss": 0.8687, - "num_input_tokens_seen": 89682525, - "step": 4214 - }, - { - "epoch": 0.5068237840437684, - "flos": 47207596257960.0, - "grad_norm": 0.9105687005018279, - "learning_rate": 2.052963879528276e-06, - "loss": 0.8688, - "num_input_tokens_seen": 89742115, - "step": 4215 - }, - { - "epoch": 0.5069440269344075, - "flos": 19411769243040.0, - "grad_norm": 6.369691620792496, - "learning_rate": 2.052185177075007e-06, - "loss": 0.9894, - "num_input_tokens_seen": 89761405, - "step": 4216 - }, - { - "epoch": 0.5070642698250466, - "flos": 16611253923960.0, - "grad_norm": 3.070715491950911, - "learning_rate": 2.051406466705288e-06, - "loss": 1.053, - "num_input_tokens_seen": 89780665, - "step": 4217 - }, - { - "epoch": 0.5071845127156857, - "flos": 14436674198400.0, - "grad_norm": 4.266094475366656, - "learning_rate": 2.0506277485372486e-06, - "loss": 1.037, - "num_input_tokens_seen": 89799210, - "step": 4218 - }, - { - "epoch": 0.5073047556063248, - "flos": 8509399308000.0, - "grad_norm": 11.41141658457866, - "learning_rate": 2.04984902268902e-06, - "loss": 0.8893, - "num_input_tokens_seen": 89816130, - "step": 4219 - }, - { - "epoch": 0.5074249984969639, - "flos": 13964233001160.0, - "grad_norm": 5.939301227607977, - "learning_rate": 2.0490702892787345e-06, - "loss": 0.9623, - "num_input_tokens_seen": 89834910, - "step": 4220 - }, - { - "epoch": 0.5075452413876029, - "flos": 20491620903480.0, - "grad_norm": 2.385058835661429, - "learning_rate": 2.0482915484245246e-06, - "loss": 0.8545, - "num_input_tokens_seen": 89856250, - "step": 4221 - }, - { - "epoch": 0.5076654842782421, - "flos": 14436091628760.0, - "grad_norm": 9.360181159198138, - "learning_rate": 2.047512800244526e-06, - "loss": 1.0596, - "num_input_tokens_seen": 89871235, - "step": 4222 - }, - { - "epoch": 0.5077857271688812, - "flos": 18762709285200.0, - "grad_norm": 8.60512785511244, - "learning_rate": 2.046734044856873e-06, - "loss": 1.0107, - "num_input_tokens_seen": 89890365, - "step": 4223 - }, - { - "epoch": 0.5079059700595202, - "flos": 15485030888640.0, - "grad_norm": 3.278639854241884, - "learning_rate": 2.045955282379702e-06, - "loss": 1.0319, - "num_input_tokens_seen": 89908745, - "step": 4224 - }, - { - "epoch": 0.5080262129501594, - "flos": 9296402703120.0, - "grad_norm": 4.1308383661552055, - "learning_rate": 2.045176512931152e-06, - "loss": 0.9776, - "num_input_tokens_seen": 89923095, - "step": 4225 - }, - { - "epoch": 0.5081464558407984, - "flos": 18002203026000.0, - "grad_norm": 2.846817374457516, - "learning_rate": 2.0443977366293604e-06, - "loss": 0.9808, - "num_input_tokens_seen": 89940855, - "step": 4226 - }, - { - "epoch": 0.5082666987314375, - "flos": 22061825660280.0, - "grad_norm": 4.115537714630008, - "learning_rate": 2.043618953592468e-06, - "loss": 1.0003, - "num_input_tokens_seen": 89963030, - "step": 4227 - }, - { - "epoch": 0.5083869416220766, - "flos": 13963926385560.0, - "grad_norm": 2.556404866421681, - "learning_rate": 2.0428401639386144e-06, - "loss": 1.0383, - "num_input_tokens_seen": 89983315, - "step": 4228 - }, - { - "epoch": 0.5085071845127157, - "flos": 51436902592200.0, - "grad_norm": 0.9063500316265272, - "learning_rate": 2.042061367785943e-06, - "loss": 0.8763, - "num_input_tokens_seen": 90036175, - "step": 4229 - }, - { - "epoch": 0.5086274274033548, - "flos": 25655752640520.0, - "grad_norm": 3.081671051908034, - "learning_rate": 2.041282565252594e-06, - "loss": 0.9736, - "num_input_tokens_seen": 90056060, - "step": 4230 - }, - { - "epoch": 0.5087476702939938, - "flos": 16717395775440.0, - "grad_norm": 3.6430784152919107, - "learning_rate": 2.040503756456714e-06, - "loss": 0.9878, - "num_input_tokens_seen": 90074990, - "step": 4231 - }, - { - "epoch": 0.508867913184633, - "flos": 10686493281480.0, - "grad_norm": 6.384824774121858, - "learning_rate": 2.0397249415164456e-06, - "loss": 1.0257, - "num_input_tokens_seen": 90092065, - "step": 4232 - }, - { - "epoch": 0.508988156075272, - "flos": 18421803259200.0, - "grad_norm": 2.4159281906799244, - "learning_rate": 2.0389461205499354e-06, - "loss": 1.0277, - "num_input_tokens_seen": 90110920, - "step": 4233 - }, - { - "epoch": 0.5091083989659111, - "flos": 9768537284760.0, - "grad_norm": 6.524728832237284, - "learning_rate": 2.03816729367533e-06, - "loss": 0.9598, - "num_input_tokens_seen": 90128795, - "step": 4234 - }, - { - "epoch": 0.5092286418565503, - "flos": 14986092555480.0, - "grad_norm": 3.5161923099128876, - "learning_rate": 2.0373884610107765e-06, - "loss": 0.9234, - "num_input_tokens_seen": 90148830, - "step": 4235 - }, - { - "epoch": 0.5093488847471893, - "flos": 13200844555320.0, - "grad_norm": 7.411481981023046, - "learning_rate": 2.0366096226744225e-06, - "loss": 0.9057, - "num_input_tokens_seen": 90163745, - "step": 4236 - }, - { - "epoch": 0.5094691276378284, - "flos": 16925601490920.0, - "grad_norm": 8.89211288900784, - "learning_rate": 2.035830778784418e-06, - "loss": 0.9909, - "num_input_tokens_seen": 90184140, - "step": 4237 - }, - { - "epoch": 0.5095893705284675, - "flos": 12338213109000.0, - "grad_norm": 3.0425623963978325, - "learning_rate": 2.0350519294589134e-06, - "loss": 1.0303, - "num_input_tokens_seen": 90201900, - "step": 4238 - }, - { - "epoch": 0.5097096134191066, - "flos": 18396042000720.0, - "grad_norm": 3.3190280718787157, - "learning_rate": 2.0342730748160588e-06, - "loss": 1.0562, - "num_input_tokens_seen": 90222085, - "step": 4239 - }, - { - "epoch": 0.5098298563097456, - "flos": 19758286334520.0, - "grad_norm": 5.193333849892613, - "learning_rate": 2.033494214974006e-06, - "loss": 0.9147, - "num_input_tokens_seen": 90242950, - "step": 4240 - }, - { - "epoch": 0.5099500992003848, - "flos": 15168475689360.0, - "grad_norm": 3.3920158662604885, - "learning_rate": 2.0327153500509067e-06, - "loss": 1.0614, - "num_input_tokens_seen": 90261695, - "step": 4241 - }, - { - "epoch": 0.5100703420910239, - "flos": 14095952141760.0, - "grad_norm": 3.927738866893565, - "learning_rate": 2.031936480164916e-06, - "loss": 1.085, - "num_input_tokens_seen": 90279155, - "step": 4242 - }, - { - "epoch": 0.5101905849816629, - "flos": 17533686508440.0, - "grad_norm": 5.417725131959792, - "learning_rate": 2.0311576054341857e-06, - "loss": 1.0267, - "num_input_tokens_seen": 90299490, - "step": 4243 - }, - { - "epoch": 0.5103108278723021, - "flos": 16297948850040.0, - "grad_norm": 3.190106840586677, - "learning_rate": 2.0303787259768715e-06, - "loss": 0.8601, - "num_input_tokens_seen": 90320110, - "step": 4244 - }, - { - "epoch": 0.5104310707629411, - "flos": 15274862833320.0, - "grad_norm": 4.990000644293917, - "learning_rate": 2.0295998419111294e-06, - "loss": 0.9007, - "num_input_tokens_seen": 90337120, - "step": 4245 - }, - { - "epoch": 0.5105513136535802, - "flos": 10529166190200.0, - "grad_norm": 9.329290422644107, - "learning_rate": 2.028820953355115e-06, - "loss": 0.948, - "num_input_tokens_seen": 90354940, - "step": 4246 - }, - { - "epoch": 0.5106715565442194, - "flos": 16062387474960.0, - "grad_norm": 7.61801853439468, - "learning_rate": 2.0280420604269834e-06, - "loss": 1.0072, - "num_input_tokens_seen": 90374200, - "step": 4247 - }, - { - "epoch": 0.5107917994348584, - "flos": 50870952106200.0, - "grad_norm": 0.823206166557463, - "learning_rate": 2.027263163244895e-06, - "loss": 0.8732, - "num_input_tokens_seen": 90443045, - "step": 4248 - }, - { - "epoch": 0.5109120423254975, - "flos": 17661542292480.0, - "grad_norm": 2.3912352736678177, - "learning_rate": 2.026484261927005e-06, - "loss": 0.9707, - "num_input_tokens_seen": 90462260, - "step": 4249 - }, - { - "epoch": 0.5110322852161366, - "flos": 15088279065720.0, - "grad_norm": 112.40674954712462, - "learning_rate": 2.025705356591475e-06, - "loss": 0.9607, - "num_input_tokens_seen": 90479670, - "step": 4250 - }, - { - "epoch": 0.5111525281067757, - "flos": 47584872442200.0, - "grad_norm": 0.8338245176103065, - "learning_rate": 2.024926447356462e-06, - "loss": 0.853, - "num_input_tokens_seen": 90541675, - "step": 4251 - }, - { - "epoch": 0.5112727709974147, - "flos": 10502975669880.0, - "grad_norm": 3.030959282531404, - "learning_rate": 2.024147534340127e-06, - "loss": 1.0113, - "num_input_tokens_seen": 90559255, - "step": 4252 - }, - { - "epoch": 0.5113930138880539, - "flos": 15038320288320.0, - "grad_norm": 12.478556436044157, - "learning_rate": 2.02336861766063e-06, - "loss": 1.0222, - "num_input_tokens_seen": 90578035, - "step": 4253 - }, - { - "epoch": 0.511513256778693, - "flos": 14485713129000.0, - "grad_norm": 2.9193309785679933, - "learning_rate": 2.0225896974361327e-06, - "loss": 1.0098, - "num_input_tokens_seen": 90597860, - "step": 4254 - }, - { - "epoch": 0.511633499669332, - "flos": 50043899165640.0, - "grad_norm": 0.9410621679061848, - "learning_rate": 2.0218107737847962e-06, - "loss": 0.8841, - "num_input_tokens_seen": 90659625, - "step": 4255 - }, - { - "epoch": 0.5117537425599712, - "flos": 17137241301120.0, - "grad_norm": 3.5854489195173, - "learning_rate": 2.0210318468247826e-06, - "loss": 0.9866, - "num_input_tokens_seen": 90678315, - "step": 4256 - }, - { - "epoch": 0.5118739854506102, - "flos": 14881729074480.0, - "grad_norm": 3.0875363614184455, - "learning_rate": 2.020252916674255e-06, - "loss": 1.0418, - "num_input_tokens_seen": 90697150, - "step": 4257 - }, - { - "epoch": 0.5119942283412493, - "flos": 12364802229600.0, - "grad_norm": 2.502631519074207, - "learning_rate": 2.019473983451375e-06, - "loss": 1.0295, - "num_input_tokens_seen": 90715290, - "step": 4258 - }, - { - "epoch": 0.5121144712318885, - "flos": 14958460941840.0, - "grad_norm": 2.9471691967740945, - "learning_rate": 2.0186950472743076e-06, - "loss": 0.9462, - "num_input_tokens_seen": 90734915, - "step": 4259 - }, - { - "epoch": 0.5122347141225275, - "flos": 14091506215560.0, - "grad_norm": 3.729015127047492, - "learning_rate": 2.0179161082612162e-06, - "loss": 0.9738, - "num_input_tokens_seen": 90754025, - "step": 4260 - }, - { - "epoch": 0.5123549570131666, - "flos": 16009301218440.0, - "grad_norm": 3.9884689540948317, - "learning_rate": 2.017137166530266e-06, - "loss": 0.9481, - "num_input_tokens_seen": 90773280, - "step": 4261 - }, - { - "epoch": 0.5124751999038056, - "flos": 14430909825120.0, - "grad_norm": 2.7809363198998525, - "learning_rate": 2.0163582221996213e-06, - "loss": 1.0331, - "num_input_tokens_seen": 90791375, - "step": 4262 - }, - { - "epoch": 0.5125954427944448, - "flos": 28411889586120.0, - "grad_norm": 2.3099608012585042, - "learning_rate": 2.015579275387446e-06, - "loss": 0.9095, - "num_input_tokens_seen": 90816415, - "step": 4263 - }, - { - "epoch": 0.5127156856850839, - "flos": 20807869487160.0, - "grad_norm": 4.650248711610378, - "learning_rate": 2.0148003262119085e-06, - "loss": 0.9087, - "num_input_tokens_seen": 90837105, - "step": 4264 - }, - { - "epoch": 0.5128359285757229, - "flos": 9559381060920.0, - "grad_norm": 2.8047415640462967, - "learning_rate": 2.0140213747911728e-06, - "loss": 0.9831, - "num_input_tokens_seen": 90855360, - "step": 4265 - }, - { - "epoch": 0.5129561714663621, - "flos": 17923600803480.0, - "grad_norm": 4.151484230920319, - "learning_rate": 2.013242421243406e-06, - "loss": 1.0309, - "num_input_tokens_seen": 90873985, - "step": 4266 - }, - { - "epoch": 0.5130764143570011, - "flos": 12862728731280.0, - "grad_norm": 2.8756225575314622, - "learning_rate": 2.012463465686774e-06, - "loss": 1.0178, - "num_input_tokens_seen": 90893455, - "step": 4267 - }, - { - "epoch": 0.5131966572476402, - "flos": 42798844751520.0, - "grad_norm": 0.9373762049002762, - "learning_rate": 2.0116845082394446e-06, - "loss": 0.8298, - "num_input_tokens_seen": 90958010, - "step": 4268 - }, - { - "epoch": 0.5133169001382794, - "flos": 13125063196320.0, - "grad_norm": 4.257050220253314, - "learning_rate": 2.0109055490195836e-06, - "loss": 1.0152, - "num_input_tokens_seen": 90976185, - "step": 4269 - }, - { - "epoch": 0.5134371430289184, - "flos": 10975018266840.0, - "grad_norm": 124.44369569583466, - "learning_rate": 2.0101265881453605e-06, - "loss": 0.8697, - "num_input_tokens_seen": 90994380, - "step": 4270 - }, - { - "epoch": 0.5135573859195575, - "flos": 15222543115800.0, - "grad_norm": 3.2832970909274235, - "learning_rate": 2.009347625734941e-06, - "loss": 1.0164, - "num_input_tokens_seen": 91014010, - "step": 4271 - }, - { - "epoch": 0.5136776288101966, - "flos": 12548289179640.0, - "grad_norm": 3.849850782995435, - "learning_rate": 2.0085686619064954e-06, - "loss": 0.9793, - "num_input_tokens_seen": 91030170, - "step": 4272 - }, - { - "epoch": 0.5137978717008357, - "flos": 11735493864480.0, - "grad_norm": 5.326843823640783, - "learning_rate": 2.00778969677819e-06, - "loss": 1.0623, - "num_input_tokens_seen": 91046925, - "step": 4273 - }, - { - "epoch": 0.5139181145914747, - "flos": 14672450204400.0, - "grad_norm": 2.573719213522985, - "learning_rate": 2.0070107304681934e-06, - "loss": 0.8758, - "num_input_tokens_seen": 91065600, - "step": 4274 - }, - { - "epoch": 0.5140383574821139, - "flos": 23482522023600.0, - "grad_norm": 3.3476327474652674, - "learning_rate": 2.006231763094675e-06, - "loss": 1.0211, - "num_input_tokens_seen": 91086340, - "step": 4275 - }, - { - "epoch": 0.514158600372753, - "flos": 13859501581440.0, - "grad_norm": 3.0536708066698695, - "learning_rate": 2.0054527947758027e-06, - "loss": 1.1096, - "num_input_tokens_seen": 91104860, - "step": 4276 - }, - { - "epoch": 0.514278843263392, - "flos": 44759086449600.0, - "grad_norm": 0.7658148549631395, - "learning_rate": 2.004673825629746e-06, - "loss": 0.8211, - "num_input_tokens_seen": 91165360, - "step": 4277 - }, - { - "epoch": 0.5143990861540312, - "flos": 18419258349720.0, - "grad_norm": 2.4559925594776284, - "learning_rate": 2.0038948557746744e-06, - "loss": 0.9589, - "num_input_tokens_seen": 91186935, - "step": 4278 - }, - { - "epoch": 0.5145193290446702, - "flos": 16585737957960.0, - "grad_norm": 4.036471039126508, - "learning_rate": 2.0031158853287558e-06, - "loss": 0.9689, - "num_input_tokens_seen": 91207090, - "step": 4279 - }, - { - "epoch": 0.5146395719353093, - "flos": 16245077224440.0, - "grad_norm": 4.178619761763843, - "learning_rate": 2.0023369144101593e-06, - "loss": 0.9392, - "num_input_tokens_seen": 91224980, - "step": 4280 - }, - { - "epoch": 0.5147598148259485, - "flos": 18785036448960.0, - "grad_norm": 2.3395965394632174, - "learning_rate": 2.0015579431370555e-06, - "loss": 0.9885, - "num_input_tokens_seen": 91246380, - "step": 4281 - }, - { - "epoch": 0.5148800577165875, - "flos": 21353485810800.0, - "grad_norm": 6.423568613639217, - "learning_rate": 2.000778971627612e-06, - "loss": 0.935, - "num_input_tokens_seen": 91265565, - "step": 4282 - }, - { - "epoch": 0.5150003006072266, - "flos": 12705462963120.0, - "grad_norm": 3.729355507159286, - "learning_rate": 2e-06, - "loss": 1.1296, - "num_input_tokens_seen": 91282880, - "step": 4283 - }, - { - "epoch": 0.5151205434978657, - "flos": 13098842014440.0, - "grad_norm": 3.628758290932146, - "learning_rate": 1.9992210283723878e-06, - "loss": 1.0845, - "num_input_tokens_seen": 91299840, - "step": 4284 - }, - { - "epoch": 0.5152407863885048, - "flos": 18031245071400.0, - "grad_norm": 2.0693883596695875, - "learning_rate": 1.9984420568629448e-06, - "loss": 1.01, - "num_input_tokens_seen": 91322325, - "step": 4285 - }, - { - "epoch": 0.5153610292791438, - "flos": 12991351054320.0, - "grad_norm": 4.248479458937362, - "learning_rate": 1.9976630855898405e-06, - "loss": 1.0054, - "num_input_tokens_seen": 91339800, - "step": 4286 - }, - { - "epoch": 0.515481272169783, - "flos": 21673628412600.0, - "grad_norm": 4.141027582138902, - "learning_rate": 1.9968841146712445e-06, - "loss": 0.9797, - "num_input_tokens_seen": 91359135, - "step": 4287 - }, - { - "epoch": 0.5156015150604221, - "flos": 16374711378960.0, - "grad_norm": 8.803203467851162, - "learning_rate": 1.996105144225326e-06, - "loss": 0.9412, - "num_input_tokens_seen": 91379090, - "step": 4288 - }, - { - "epoch": 0.5157217579510611, - "flos": 12653143245600.0, - "grad_norm": 5.360193840621177, - "learning_rate": 1.995326174370254e-06, - "loss": 1.0184, - "num_input_tokens_seen": 91397910, - "step": 4289 - }, - { - "epoch": 0.5158420008417003, - "flos": 13596645869880.0, - "grad_norm": 2.1495054607725734, - "learning_rate": 1.994547205224197e-06, - "loss": 0.9564, - "num_input_tokens_seen": 91416535, - "step": 4290 - }, - { - "epoch": 0.5159622437323393, - "flos": 15931066934640.0, - "grad_norm": 6.02065219628674, - "learning_rate": 1.993768236905325e-06, - "loss": 0.8991, - "num_input_tokens_seen": 91436925, - "step": 4291 - }, - { - "epoch": 0.5160824866229784, - "flos": 17499523982520.0, - "grad_norm": 4.161270894908841, - "learning_rate": 1.992989269531807e-06, - "loss": 0.8812, - "num_input_tokens_seen": 91455455, - "step": 4292 - }, - { - "epoch": 0.5162027295136175, - "flos": 12756525556680.0, - "grad_norm": 19.017568396481284, - "learning_rate": 1.99221030322181e-06, - "loss": 0.8897, - "num_input_tokens_seen": 91471980, - "step": 4293 - }, - { - "epoch": 0.5163229724042566, - "flos": 19470558549720.0, - "grad_norm": 2.635928339283689, - "learning_rate": 1.991431338093505e-06, - "loss": 1.0345, - "num_input_tokens_seen": 91494055, - "step": 4294 - }, - { - "epoch": 0.5164432152948957, - "flos": 15459054999240.0, - "grad_norm": 3.0510785181475364, - "learning_rate": 1.9906523742650587e-06, - "loss": 1.0156, - "num_input_tokens_seen": 91512635, - "step": 4295 - }, - { - "epoch": 0.5165634581855347, - "flos": 18180170895240.0, - "grad_norm": 2.3597558326814805, - "learning_rate": 1.9898734118546397e-06, - "loss": 0.9837, - "num_input_tokens_seen": 91532305, - "step": 4296 - }, - { - "epoch": 0.5166837010761739, - "flos": 14121069507480.0, - "grad_norm": 4.917483935346869, - "learning_rate": 1.989094450980416e-06, - "loss": 1.0354, - "num_input_tokens_seen": 91552125, - "step": 4297 - }, - { - "epoch": 0.516803943966813, - "flos": 18967818183120.0, - "grad_norm": 2.794579442957471, - "learning_rate": 1.9883154917605556e-06, - "loss": 0.9804, - "num_input_tokens_seen": 91571055, - "step": 4298 - }, - { - "epoch": 0.516924186857452, - "flos": 13964233001160.0, - "grad_norm": 3.5831776222877605, - "learning_rate": 1.9875365343132262e-06, - "loss": 1.0507, - "num_input_tokens_seen": 91587895, - "step": 4299 - }, - { - "epoch": 0.5170444297480912, - "flos": 11285594461920.0, - "grad_norm": 3.23034165857733, - "learning_rate": 1.9867575787565946e-06, - "loss": 1.0746, - "num_input_tokens_seen": 91602275, - "step": 4300 - }, - { - "epoch": 0.5171646726387302, - "flos": 10005325122240.0, - "grad_norm": 4.116362342448266, - "learning_rate": 1.9859786252088275e-06, - "loss": 1.0869, - "num_input_tokens_seen": 91619175, - "step": 4301 - }, - { - "epoch": 0.5172849155293693, - "flos": 16762877965080.0, - "grad_norm": 3.7531662607585075, - "learning_rate": 1.9851996737880914e-06, - "loss": 0.9041, - "num_input_tokens_seen": 91634080, - "step": 4302 - }, - { - "epoch": 0.5174051584200084, - "flos": 10083927344760.0, - "grad_norm": 3.782555346944411, - "learning_rate": 1.9844207246125537e-06, - "loss": 0.9771, - "num_input_tokens_seen": 91650380, - "step": 4303 - }, - { - "epoch": 0.5175254013106475, - "flos": 27048940036440.0, - "grad_norm": 3.370425621118596, - "learning_rate": 1.983641777800379e-06, - "loss": 0.9145, - "num_input_tokens_seen": 91672745, - "step": 4304 - }, - { - "epoch": 0.5176456442012866, - "flos": 49090548632520.0, - "grad_norm": 0.7903766076033745, - "learning_rate": 1.9828628334697343e-06, - "loss": 0.8564, - "num_input_tokens_seen": 91739675, - "step": 4305 - }, - { - "epoch": 0.5177658870919257, - "flos": 45879269157600.0, - "grad_norm": 0.9741695841334123, - "learning_rate": 1.982083891738784e-06, - "loss": 0.82, - "num_input_tokens_seen": 91800265, - "step": 4306 - }, - { - "epoch": 0.5178861299825648, - "flos": 18972233447760.0, - "grad_norm": 5.458510398419455, - "learning_rate": 1.9813049527256923e-06, - "loss": 1.0606, - "num_input_tokens_seen": 91820380, - "step": 4307 - }, - { - "epoch": 0.5180063728732038, - "flos": 12705892224960.0, - "grad_norm": 4.365082972416344, - "learning_rate": 1.9805260165486252e-06, - "loss": 1.0408, - "num_input_tokens_seen": 91839470, - "step": 4308 - }, - { - "epoch": 0.518126615763843, - "flos": 13833525692040.0, - "grad_norm": 3.632238986777697, - "learning_rate": 1.9797470833257457e-06, - "loss": 1.098, - "num_input_tokens_seen": 91858890, - "step": 4309 - }, - { - "epoch": 0.5182468586544821, - "flos": 14698119478200.0, - "grad_norm": 3.960421376206302, - "learning_rate": 1.9789681531752177e-06, - "loss": 1.0095, - "num_input_tokens_seen": 91878830, - "step": 4310 - }, - { - "epoch": 0.5183671015451211, - "flos": 16428717482280.0, - "grad_norm": 2.575093099151818, - "learning_rate": 1.978189226215204e-06, - "loss": 0.9553, - "num_input_tokens_seen": 91899095, - "step": 4311 - }, - { - "epoch": 0.5184873444357603, - "flos": 12464413168800.0, - "grad_norm": 6.28875890911262, - "learning_rate": 1.9774103025638675e-06, - "loss": 0.9943, - "num_input_tokens_seen": 91916940, - "step": 4312 - }, - { - "epoch": 0.5186075873263993, - "flos": 17740144515000.0, - "grad_norm": 2.790032995295512, - "learning_rate": 1.9766313823393696e-06, - "loss": 0.9933, - "num_input_tokens_seen": 91937525, - "step": 4313 - }, - { - "epoch": 0.5187278302170384, - "flos": 10734704349960.0, - "grad_norm": 3.3654840324724686, - "learning_rate": 1.975852465659873e-06, - "loss": 0.9166, - "num_input_tokens_seen": 91953225, - "step": 4314 - }, - { - "epoch": 0.5188480731076776, - "flos": 17792280263160.0, - "grad_norm": 3.138433802274983, - "learning_rate": 1.9750735526435377e-06, - "loss": 0.9252, - "num_input_tokens_seen": 91969890, - "step": 4315 - }, - { - "epoch": 0.5189683159983166, - "flos": 17634646556280.0, - "grad_norm": 5.7728015100269925, - "learning_rate": 1.974294643408525e-06, - "loss": 1.01, - "num_input_tokens_seen": 91987405, - "step": 4316 - }, - { - "epoch": 0.5190885588889557, - "flos": 17608946620920.0, - "grad_norm": 4.852917271655496, - "learning_rate": 1.9735157380729947e-06, - "loss": 0.8945, - "num_input_tokens_seen": 92007535, - "step": 4317 - }, - { - "epoch": 0.5192088017795948, - "flos": 17578340835960.0, - "grad_norm": 3.187578088140456, - "learning_rate": 1.9727368367551053e-06, - "loss": 1.0615, - "num_input_tokens_seen": 92025805, - "step": 4318 - }, - { - "epoch": 0.5193290446702339, - "flos": 19387142462280.0, - "grad_norm": 3.037229323395029, - "learning_rate": 1.9719579395730164e-06, - "loss": 0.9221, - "num_input_tokens_seen": 92044900, - "step": 4319 - }, - { - "epoch": 0.5194492875608729, - "flos": 8064191124120.0, - "grad_norm": 4.031756465353885, - "learning_rate": 1.9711790466448854e-06, - "loss": 1.1585, - "num_input_tokens_seen": 92058640, - "step": 4320 - }, - { - "epoch": 0.5195695304515121, - "flos": 14435631705360.0, - "grad_norm": 7.716421911369142, - "learning_rate": 1.9704001580888704e-06, - "loss": 0.9321, - "num_input_tokens_seen": 92077100, - "step": 4321 - }, - { - "epoch": 0.5196897733421512, - "flos": 14226966066480.0, - "grad_norm": 3.5573961606556637, - "learning_rate": 1.9696212740231283e-06, - "loss": 1.1063, - "num_input_tokens_seen": 92095470, - "step": 4322 - }, - { - "epoch": 0.5198100162327902, - "flos": 16927073245800.0, - "grad_norm": 5.625320764508747, - "learning_rate": 1.9688423945658146e-06, - "loss": 1.0459, - "num_input_tokens_seen": 92116055, - "step": 4323 - }, - { - "epoch": 0.5199302591234293, - "flos": 17084001736800.0, - "grad_norm": 2.922240535290302, - "learning_rate": 1.9680635198350845e-06, - "loss": 0.9506, - "num_input_tokens_seen": 92135485, - "step": 4324 - }, - { - "epoch": 0.5200505020140684, - "flos": 18762463992720.0, - "grad_norm": 5.324656501400833, - "learning_rate": 1.967284649949093e-06, - "loss": 0.9534, - "num_input_tokens_seen": 92154415, - "step": 4325 - }, - { - "epoch": 0.5201707449047075, - "flos": 28281396907920.0, - "grad_norm": 5.592395128231165, - "learning_rate": 1.966505785025994e-06, - "loss": 0.9607, - "num_input_tokens_seen": 92176040, - "step": 4326 - }, - { - "epoch": 0.5202909877953465, - "flos": 38398695126120.0, - "grad_norm": 6.419996446269826, - "learning_rate": 1.965726925183941e-06, - "loss": 0.9984, - "num_input_tokens_seen": 92198865, - "step": 4327 - }, - { - "epoch": 0.5204112306859857, - "flos": 13859440258320.0, - "grad_norm": 4.160504451023614, - "learning_rate": 1.964948070541087e-06, - "loss": 1.0718, - "num_input_tokens_seen": 92217245, - "step": 4328 - }, - { - "epoch": 0.5205314735766248, - "flos": 10817752498680.0, - "grad_norm": 7.680309871939016, - "learning_rate": 1.9641692212155816e-06, - "loss": 0.9175, - "num_input_tokens_seen": 92234730, - "step": 4329 - }, - { - "epoch": 0.5206517164672638, - "flos": 42409401476040.0, - "grad_norm": 3.3538808918084717, - "learning_rate": 1.9633903773255777e-06, - "loss": 0.9563, - "num_input_tokens_seen": 92256765, - "step": 4330 - }, - { - "epoch": 0.520771959357903, - "flos": 19130388401160.0, - "grad_norm": 3.5818462095233454, - "learning_rate": 1.9626115389892237e-06, - "loss": 0.975, - "num_input_tokens_seen": 92277275, - "step": 4331 - }, - { - "epoch": 0.520892202248542, - "flos": 19156149659640.0, - "grad_norm": 3.1514136396967625, - "learning_rate": 1.96183270632467e-06, - "loss": 1.072, - "num_input_tokens_seen": 92296845, - "step": 4332 - }, - { - "epoch": 0.5210124451391811, - "flos": 18396103323840.0, - "grad_norm": 4.009670964390443, - "learning_rate": 1.9610538794500644e-06, - "loss": 1.0107, - "num_input_tokens_seen": 92316115, - "step": 4333 - }, - { - "epoch": 0.5211326880298203, - "flos": 50530475342040.0, - "grad_norm": 0.8082641445778266, - "learning_rate": 1.9602750584835542e-06, - "loss": 0.863, - "num_input_tokens_seen": 92381770, - "step": 4334 - }, - { - "epoch": 0.5212529309204593, - "flos": 11050186394640.0, - "grad_norm": 2.688846162276155, - "learning_rate": 1.959496243543286e-06, - "loss": 1.0564, - "num_input_tokens_seen": 92399370, - "step": 4335 - }, - { - "epoch": 0.5213731738110984, - "flos": 18677606811960.0, - "grad_norm": 5.099988989724634, - "learning_rate": 1.9587174347474057e-06, - "loss": 1.0186, - "num_input_tokens_seen": 92415600, - "step": 4336 - }, - { - "epoch": 0.5214934167017375, - "flos": 13772958014880.0, - "grad_norm": 12.107130588375776, - "learning_rate": 1.9579386322140574e-06, - "loss": 1.029, - "num_input_tokens_seen": 92431000, - "step": 4337 - }, - { - "epoch": 0.5216136595923766, - "flos": 22065719678400.0, - "grad_norm": 4.347384242951085, - "learning_rate": 1.9571598360613854e-06, - "loss": 1.032, - "num_input_tokens_seen": 92453595, - "step": 4338 - }, - { - "epoch": 0.5217339024830157, - "flos": 15590038262400.0, - "grad_norm": 8.149542260918707, - "learning_rate": 1.956381046407532e-06, - "loss": 0.9254, - "num_input_tokens_seen": 92473610, - "step": 4339 - }, - { - "epoch": 0.5218541453736548, - "flos": 14855630538840.0, - "grad_norm": 2.6522011809874364, - "learning_rate": 1.9556022633706394e-06, - "loss": 1.0856, - "num_input_tokens_seen": 92492120, - "step": 4340 - }, - { - "epoch": 0.5219743882642939, - "flos": 17032141942680.0, - "grad_norm": 3.3313146359469434, - "learning_rate": 1.954823487068848e-06, - "loss": 1.0245, - "num_input_tokens_seen": 92512050, - "step": 4341 - }, - { - "epoch": 0.5220946311549329, - "flos": 20515358499000.0, - "grad_norm": 3.6733722637165154, - "learning_rate": 1.9540447176202976e-06, - "loss": 1.0333, - "num_input_tokens_seen": 92533015, - "step": 4342 - }, - { - "epoch": 0.5222148740455721, - "flos": 48111473050560.0, - "grad_norm": 0.8555544805898422, - "learning_rate": 1.9532659551431272e-06, - "loss": 0.8819, - "num_input_tokens_seen": 92599765, - "step": 4343 - }, - { - "epoch": 0.5223351169362112, - "flos": 44272822436880.0, - "grad_norm": 2.7297736116658586, - "learning_rate": 1.9524871997554744e-06, - "loss": 0.8998, - "num_input_tokens_seen": 92627245, - "step": 4344 - }, - { - "epoch": 0.5224553598268502, - "flos": 10345280639880.0, - "grad_norm": 5.380398368125863, - "learning_rate": 1.951708451575475e-06, - "loss": 1.0099, - "num_input_tokens_seen": 92644030, - "step": 4345 - }, - { - "epoch": 0.5225756027174894, - "flos": 10473167085480.0, - "grad_norm": 4.783943763797884, - "learning_rate": 1.9509297107212657e-06, - "loss": 1.0351, - "num_input_tokens_seen": 92660520, - "step": 4346 - }, - { - "epoch": 0.5226958456081284, - "flos": 16715924020560.0, - "grad_norm": 2.4559987723505885, - "learning_rate": 1.95015097731098e-06, - "loss": 1.0199, - "num_input_tokens_seen": 92679730, - "step": 4347 - }, - { - "epoch": 0.5228160884987675, - "flos": 13518748863240.0, - "grad_norm": 6.657385214411617, - "learning_rate": 1.949372251462751e-06, - "loss": 1.0439, - "num_input_tokens_seen": 92696865, - "step": 4348 - }, - { - "epoch": 0.5229363313894067, - "flos": 14955456108960.0, - "grad_norm": 11.555539237116841, - "learning_rate": 1.9485935332947124e-06, - "loss": 1.0555, - "num_input_tokens_seen": 92714495, - "step": 4349 - }, - { - "epoch": 0.5230565742800457, - "flos": 10476877134240.0, - "grad_norm": 4.06371301001121, - "learning_rate": 1.947814822924993e-06, - "loss": 1.0532, - "num_input_tokens_seen": 92731725, - "step": 4350 - }, - { - "epoch": 0.5231768171706848, - "flos": 18369943465080.0, - "grad_norm": 5.160196169887222, - "learning_rate": 1.9470361204717236e-06, - "loss": 1.0477, - "num_input_tokens_seen": 92750585, - "step": 4351 - }, - { - "epoch": 0.5232970600613239, - "flos": 16163562153720.0, - "grad_norm": 3.349420036102828, - "learning_rate": 1.9462574260530326e-06, - "loss": 1.0431, - "num_input_tokens_seen": 92770585, - "step": 4352 - }, - { - "epoch": 0.523417302951963, - "flos": 12259334932440.0, - "grad_norm": 2.721569397125107, - "learning_rate": 1.9454787397870472e-06, - "loss": 1.0373, - "num_input_tokens_seen": 92787625, - "step": 4353 - }, - { - "epoch": 0.523537545842602, - "flos": 13151315039760.0, - "grad_norm": 6.4651800799975305, - "learning_rate": 1.944700061791894e-06, - "loss": 0.9402, - "num_input_tokens_seen": 92805740, - "step": 4354 - }, - { - "epoch": 0.5236577887332411, - "flos": 13990392859920.0, - "grad_norm": 4.258000416939761, - "learning_rate": 1.943921392185698e-06, - "loss": 0.881, - "num_input_tokens_seen": 92824085, - "step": 4355 - }, - { - "epoch": 0.5237780316238803, - "flos": 16743616957320.0, - "grad_norm": 5.878610678304873, - "learning_rate": 1.9431427310865814e-06, - "loss": 1.0017, - "num_input_tokens_seen": 92843410, - "step": 4356 - }, - { - "epoch": 0.5238982745145193, - "flos": 15982988051880.0, - "grad_norm": 3.6094703826851724, - "learning_rate": 1.942364078612667e-06, - "loss": 1.0257, - "num_input_tokens_seen": 92861860, - "step": 4357 - }, - { - "epoch": 0.5240185174051584, - "flos": 19286764984080.0, - "grad_norm": 3.3330226912391314, - "learning_rate": 1.9415854348820765e-06, - "loss": 0.9698, - "num_input_tokens_seen": 92881430, - "step": 4358 - }, - { - "epoch": 0.5241387602957975, - "flos": 16114400576880.0, - "grad_norm": 4.66418293297454, - "learning_rate": 1.940806800012929e-06, - "loss": 0.8945, - "num_input_tokens_seen": 92901220, - "step": 4359 - }, - { - "epoch": 0.5242590031864366, - "flos": 28963638221760.0, - "grad_norm": 4.34541585251336, - "learning_rate": 1.9400281741233432e-06, - "loss": 0.87, - "num_input_tokens_seen": 92925830, - "step": 4360 - }, - { - "epoch": 0.5243792460770756, - "flos": 47742444825960.0, - "grad_norm": 0.6870821419893126, - "learning_rate": 1.939249557331435e-06, - "loss": 0.7874, - "num_input_tokens_seen": 92991365, - "step": 4361 - }, - { - "epoch": 0.5244994889677148, - "flos": 20073553748280.0, - "grad_norm": 3.5489200292516365, - "learning_rate": 1.938470949755321e-06, - "loss": 0.9555, - "num_input_tokens_seen": 93010965, - "step": 4362 - }, - { - "epoch": 0.5246197318583539, - "flos": 47221240652160.0, - "grad_norm": 0.8865690062872084, - "learning_rate": 1.937692351513115e-06, - "loss": 0.8522, - "num_input_tokens_seen": 93069680, - "step": 4363 - }, - { - "epoch": 0.5247399747489929, - "flos": 14934999300360.0, - "grad_norm": 3.4009262618405245, - "learning_rate": 1.9369137627229297e-06, - "loss": 1.0271, - "num_input_tokens_seen": 93087800, - "step": 4364 - }, - { - "epoch": 0.5248602176396321, - "flos": 13492098419520.0, - "grad_norm": 3.5036414140961023, - "learning_rate": 1.936135183502877e-06, - "loss": 1.1061, - "num_input_tokens_seen": 93104820, - "step": 4365 - }, - { - "epoch": 0.5249804605302711, - "flos": 15773279919960.0, - "grad_norm": 5.712000464837406, - "learning_rate": 1.935356613971066e-06, - "loss": 1.0305, - "num_input_tokens_seen": 93125200, - "step": 4366 - }, - { - "epoch": 0.5251007034209102, - "flos": 16927226553600.0, - "grad_norm": 5.406534352845276, - "learning_rate": 1.9345780542456047e-06, - "loss": 1.0019, - "num_input_tokens_seen": 93144295, - "step": 4367 - }, - { - "epoch": 0.5252209463115494, - "flos": 16559884714800.0, - "grad_norm": 3.2242148219778595, - "learning_rate": 1.9337995044446007e-06, - "loss": 0.9458, - "num_input_tokens_seen": 93162855, - "step": 4368 - }, - { - "epoch": 0.5253411892021884, - "flos": 14069025744000.0, - "grad_norm": 5.192686131263135, - "learning_rate": 1.9330209646861596e-06, - "loss": 1.032, - "num_input_tokens_seen": 93181725, - "step": 4369 - }, - { - "epoch": 0.5254614320928275, - "flos": 17162941236480.0, - "grad_norm": 6.2920427357361515, - "learning_rate": 1.9322424350883843e-06, - "loss": 1.0037, - "num_input_tokens_seen": 93203280, - "step": 4370 - }, - { - "epoch": 0.5255816749834666, - "flos": 17530405721520.0, - "grad_norm": 3.602136793708815, - "learning_rate": 1.931463915769379e-06, - "loss": 1.0119, - "num_input_tokens_seen": 93223115, - "step": 4371 - }, - { - "epoch": 0.5257019178741057, - "flos": 9978184093560.0, - "grad_norm": 5.943475547254363, - "learning_rate": 1.930685406847242e-06, - "loss": 0.9715, - "num_input_tokens_seen": 93237410, - "step": 4372 - }, - { - "epoch": 0.5258221607647448, - "flos": 16742543802720.0, - "grad_norm": 3.7271849554262215, - "learning_rate": 1.9299069084400734e-06, - "loss": 1.0545, - "num_input_tokens_seen": 93257990, - "step": 4373 - }, - { - "epoch": 0.5259424036553839, - "flos": 17760233384880.0, - "grad_norm": 5.189571840122175, - "learning_rate": 1.9291284206659717e-06, - "loss": 0.9171, - "num_input_tokens_seen": 93275895, - "step": 4374 - }, - { - "epoch": 0.526062646546023, - "flos": 20490087825480.0, - "grad_norm": 4.597930044417746, - "learning_rate": 1.928349943643032e-06, - "loss": 0.9369, - "num_input_tokens_seen": 93294715, - "step": 4375 - }, - { - "epoch": 0.526182889436662, - "flos": 16219929197160.0, - "grad_norm": 2.657172368291529, - "learning_rate": 1.9275714774893493e-06, - "loss": 1.0551, - "num_input_tokens_seen": 93313890, - "step": 4376 - }, - { - "epoch": 0.5263031323273012, - "flos": 16297427603520.0, - "grad_norm": 3.2303999647868866, - "learning_rate": 1.9267930223230154e-06, - "loss": 0.9502, - "num_input_tokens_seen": 93332085, - "step": 4377 - }, - { - "epoch": 0.5264233752179402, - "flos": 12574081099680.0, - "grad_norm": 3.699554138125681, - "learning_rate": 1.9260145782621224e-06, - "loss": 1.0006, - "num_input_tokens_seen": 93349585, - "step": 4378 - }, - { - "epoch": 0.5265436181085793, - "flos": 17369307258360.0, - "grad_norm": 4.365304284954732, - "learning_rate": 1.925236145424758e-06, - "loss": 1.1025, - "num_input_tokens_seen": 93368125, - "step": 4379 - }, - { - "epoch": 0.5266638609992185, - "flos": 49563235122240.0, - "grad_norm": 0.7219919811765078, - "learning_rate": 1.924457723929012e-06, - "loss": 0.8309, - "num_input_tokens_seen": 93438655, - "step": 4380 - }, - { - "epoch": 0.5267841038898575, - "flos": 14722746258960.0, - "grad_norm": 3.8181347090634685, - "learning_rate": 1.9236793138929685e-06, - "loss": 1.0508, - "num_input_tokens_seen": 93457645, - "step": 4381 - }, - { - "epoch": 0.5269043467804966, - "flos": 12204041043600.0, - "grad_norm": 3.3166981131693865, - "learning_rate": 1.9229009154347133e-06, - "loss": 1.0362, - "num_input_tokens_seen": 93474955, - "step": 4382 - }, - { - "epoch": 0.5270245896711357, - "flos": 12915140433480.0, - "grad_norm": 3.603747848843539, - "learning_rate": 1.922122528672327e-06, - "loss": 1.0279, - "num_input_tokens_seen": 93493340, - "step": 4383 - }, - { - "epoch": 0.5271448325617748, - "flos": 15117413095800.0, - "grad_norm": 4.606301113554179, - "learning_rate": 1.9213441537238914e-06, - "loss": 1.0089, - "num_input_tokens_seen": 93509935, - "step": 4384 - }, - { - "epoch": 0.5272650754524139, - "flos": 46892022228600.0, - "grad_norm": 1.0411974549516312, - "learning_rate": 1.920565790707485e-06, - "loss": 0.8856, - "num_input_tokens_seen": 93575045, - "step": 4385 - }, - { - "epoch": 0.527385318343053, - "flos": 13958744581920.0, - "grad_norm": 7.759602166429024, - "learning_rate": 1.9197874397411853e-06, - "loss": 0.884, - "num_input_tokens_seen": 93591395, - "step": 4386 - }, - { - "epoch": 0.5275055612336921, - "flos": 8954147568480.0, - "grad_norm": 8.555551201405043, - "learning_rate": 1.919009100943067e-06, - "loss": 0.8618, - "num_input_tokens_seen": 93606805, - "step": 4387 - }, - { - "epoch": 0.5276258041243311, - "flos": 12574418376840.0, - "grad_norm": 5.217213295926793, - "learning_rate": 1.9182307744312043e-06, - "loss": 0.88, - "num_input_tokens_seen": 93623630, - "step": 4388 - }, - { - "epoch": 0.5277460470149702, - "flos": 16140008527560.0, - "grad_norm": 5.879646249626924, - "learning_rate": 1.9174524603236676e-06, - "loss": 0.9794, - "num_input_tokens_seen": 93642300, - "step": 4389 - }, - { - "epoch": 0.5278662899056094, - "flos": 14122357293000.0, - "grad_norm": 3.88183090113667, - "learning_rate": 1.916674158738527e-06, - "loss": 0.9995, - "num_input_tokens_seen": 93660925, - "step": 4390 - }, - { - "epoch": 0.5279865327962484, - "flos": 12758641204320.0, - "grad_norm": 3.6775939558047965, - "learning_rate": 1.9158958697938506e-06, - "loss": 0.8232, - "num_input_tokens_seen": 93679025, - "step": 4391 - }, - { - "epoch": 0.5281067756868875, - "flos": 11263083328800.0, - "grad_norm": 5.248713562800989, - "learning_rate": 1.9151175936077032e-06, - "loss": 1.0697, - "num_input_tokens_seen": 93693715, - "step": 4392 - }, - { - "epoch": 0.5282270185775266, - "flos": 13779856865880.0, - "grad_norm": 3.053832575977211, - "learning_rate": 1.9143393302981507e-06, - "loss": 1.0213, - "num_input_tokens_seen": 93711120, - "step": 4393 - }, - { - "epoch": 0.5283472614681657, - "flos": 11604694570680.0, - "grad_norm": 5.173913467527072, - "learning_rate": 1.913561079983252e-06, - "loss": 1.0545, - "num_input_tokens_seen": 93729665, - "step": 4394 - }, - { - "epoch": 0.5284675043588047, - "flos": 19050743685600.0, - "grad_norm": 6.1914835735510705, - "learning_rate": 1.9127828427810693e-06, - "loss": 0.9679, - "num_input_tokens_seen": 93749950, - "step": 4395 - }, - { - "epoch": 0.5285877472494439, - "flos": 14119965691320.0, - "grad_norm": 4.5851376305476865, - "learning_rate": 1.9120046188096607e-06, - "loss": 1.0269, - "num_input_tokens_seen": 93767715, - "step": 4396 - }, - { - "epoch": 0.528707990140083, - "flos": 14200254299640.0, - "grad_norm": 29.567227588423385, - "learning_rate": 1.9112264081870804e-06, - "loss": 0.9776, - "num_input_tokens_seen": 93785825, - "step": 4397 - }, - { - "epoch": 0.528828233030722, - "flos": 14672204911920.0, - "grad_norm": 7.630641913653418, - "learning_rate": 1.9104482110313843e-06, - "loss": 0.9766, - "num_input_tokens_seen": 93805135, - "step": 4398 - }, - { - "epoch": 0.5289484759213612, - "flos": 17923907419080.0, - "grad_norm": 4.071730236838147, - "learning_rate": 1.909670027460623e-06, - "loss": 0.9719, - "num_input_tokens_seen": 93822155, - "step": 4399 - }, - { - "epoch": 0.5290687188120002, - "flos": 22197714773040.0, - "grad_norm": 6.578427414274102, - "learning_rate": 1.908891857592847e-06, - "loss": 0.9443, - "num_input_tokens_seen": 93842945, - "step": 4400 - }, - { - "epoch": 0.5291889617026393, - "flos": 14278181967840.0, - "grad_norm": 7.620960353698639, - "learning_rate": 1.9081137015461034e-06, - "loss": 1.124, - "num_input_tokens_seen": 93858740, - "step": 4401 - }, - { - "epoch": 0.5293092045932785, - "flos": 13935773525400.0, - "grad_norm": 11.268196140585028, - "learning_rate": 1.9073355594384383e-06, - "loss": 1.1273, - "num_input_tokens_seen": 93876700, - "step": 4402 - }, - { - "epoch": 0.5294294474839175, - "flos": 17295396254520.0, - "grad_norm": 4.358784297176956, - "learning_rate": 1.906557431387895e-06, - "loss": 1.0372, - "num_input_tokens_seen": 93895410, - "step": 4403 - }, - { - "epoch": 0.5295496903745566, - "flos": 13383748935720.0, - "grad_norm": 9.335016870795004, - "learning_rate": 1.905779317512516e-06, - "loss": 1.0214, - "num_input_tokens_seen": 93912675, - "step": 4404 - }, - { - "epoch": 0.5296699332651957, - "flos": 14855538554160.0, - "grad_norm": 11.178006198370118, - "learning_rate": 1.9050012179303385e-06, - "loss": 1.0234, - "num_input_tokens_seen": 93930905, - "step": 4405 - }, - { - "epoch": 0.5297901761558348, - "flos": 15662078911080.0, - "grad_norm": 15.545357573560144, - "learning_rate": 1.904223132759401e-06, - "loss": 0.9016, - "num_input_tokens_seen": 93949225, - "step": 4406 - }, - { - "epoch": 0.5299104190464738, - "flos": 15484632288360.0, - "grad_norm": 4.507369047728606, - "learning_rate": 1.9034450621177383e-06, - "loss": 0.9032, - "num_input_tokens_seen": 93967265, - "step": 4407 - }, - { - "epoch": 0.530030661937113, - "flos": 10398090942360.0, - "grad_norm": 7.559325000965023, - "learning_rate": 1.9026670061233824e-06, - "loss": 0.9303, - "num_input_tokens_seen": 93984420, - "step": 4408 - }, - { - "epoch": 0.5301509048277521, - "flos": 15091559852640.0, - "grad_norm": 4.146785888634324, - "learning_rate": 1.901888964894365e-06, - "loss": 1.0321, - "num_input_tokens_seen": 94003180, - "step": 4409 - }, - { - "epoch": 0.5302711477183911, - "flos": 18473203129920.0, - "grad_norm": 3.993410404673711, - "learning_rate": 1.9011109385487134e-06, - "loss": 0.9039, - "num_input_tokens_seen": 94024150, - "step": 4410 - }, - { - "epoch": 0.5303913906090303, - "flos": 15825783606840.0, - "grad_norm": 5.276534463877435, - "learning_rate": 1.900332927204454e-06, - "loss": 0.8715, - "num_input_tokens_seen": 94042320, - "step": 4411 - }, - { - "epoch": 0.5305116334996693, - "flos": 17739469960680.0, - "grad_norm": 3.9667156141664446, - "learning_rate": 1.8995549309796097e-06, - "loss": 0.9919, - "num_input_tokens_seen": 94061345, - "step": 4412 - }, - { - "epoch": 0.5306318763903084, - "flos": 14327650160280.0, - "grad_norm": 3.50340840727136, - "learning_rate": 1.8987769499922028e-06, - "loss": 0.9918, - "num_input_tokens_seen": 94080035, - "step": 4413 - }, - { - "epoch": 0.5307521192809476, - "flos": 14383557280320.0, - "grad_norm": 4.217638561638621, - "learning_rate": 1.897998984360252e-06, - "loss": 0.926, - "num_input_tokens_seen": 94098725, - "step": 4414 - }, - { - "epoch": 0.5308723621715866, - "flos": 20548846470600.0, - "grad_norm": 3.2117942539508837, - "learning_rate": 1.897221034201775e-06, - "loss": 1.009, - "num_input_tokens_seen": 94122185, - "step": 4415 - }, - { - "epoch": 0.5309926050622257, - "flos": 19552380236040.0, - "grad_norm": 3.7063211418129973, - "learning_rate": 1.8964430996347842e-06, - "loss": 0.8942, - "num_input_tokens_seen": 94143455, - "step": 4416 - }, - { - "epoch": 0.5311128479528648, - "flos": 14560942579920.0, - "grad_norm": 4.313763198500731, - "learning_rate": 1.8956651807772931e-06, - "loss": 1.0569, - "num_input_tokens_seen": 94161210, - "step": 4417 - }, - { - "epoch": 0.5312330908435039, - "flos": 15196628549520.0, - "grad_norm": 3.3145197342949646, - "learning_rate": 1.8948872777473115e-06, - "loss": 1.0546, - "num_input_tokens_seen": 94178885, - "step": 4418 - }, - { - "epoch": 0.531353333734143, - "flos": 17582357500320.0, - "grad_norm": 6.726387260892439, - "learning_rate": 1.8941093906628458e-06, - "loss": 0.8712, - "num_input_tokens_seen": 94196390, - "step": 4419 - }, - { - "epoch": 0.531473576624782, - "flos": 21724169759640.0, - "grad_norm": 5.4203204034223695, - "learning_rate": 1.893331519641902e-06, - "loss": 0.9487, - "num_input_tokens_seen": 94218255, - "step": 4420 - }, - { - "epoch": 0.5315938195154212, - "flos": 16350329890680.0, - "grad_norm": 4.899864950070631, - "learning_rate": 1.8925536648024815e-06, - "loss": 0.9763, - "num_input_tokens_seen": 94235395, - "step": 4421 - }, - { - "epoch": 0.5317140624060602, - "flos": 16167640141200.0, - "grad_norm": 11.286910140296888, - "learning_rate": 1.8917758262625849e-06, - "loss": 0.9883, - "num_input_tokens_seen": 94255355, - "step": 4422 - }, - { - "epoch": 0.5318343052966993, - "flos": 16219683904680.0, - "grad_norm": 2.617997411413348, - "learning_rate": 1.8909980041402089e-06, - "loss": 1.0398, - "num_input_tokens_seen": 94273670, - "step": 4423 - }, - { - "epoch": 0.5319545481873384, - "flos": 9611332839720.0, - "grad_norm": 6.034162855641883, - "learning_rate": 1.8902201985533494e-06, - "loss": 0.8859, - "num_input_tokens_seen": 94290655, - "step": 4424 - }, - { - "epoch": 0.5320747910779775, - "flos": 15746138891280.0, - "grad_norm": 3.409135069127603, - "learning_rate": 1.8894424096199983e-06, - "loss": 0.9791, - "num_input_tokens_seen": 94309580, - "step": 4425 - }, - { - "epoch": 0.5321950339686166, - "flos": 13177413575400.0, - "grad_norm": 7.226605785343339, - "learning_rate": 1.8886646374581463e-06, - "loss": 1.0859, - "num_input_tokens_seen": 94328525, - "step": 4426 - }, - { - "epoch": 0.5323152768592557, - "flos": 16245261193800.0, - "grad_norm": 5.247025510027644, - "learning_rate": 1.8878868821857795e-06, - "loss": 0.9259, - "num_input_tokens_seen": 94347895, - "step": 4427 - }, - { - "epoch": 0.5324355197498948, - "flos": 24216807100920.0, - "grad_norm": 5.091396419930667, - "learning_rate": 1.8871091439208838e-06, - "loss": 0.9668, - "num_input_tokens_seen": 94369225, - "step": 4428 - }, - { - "epoch": 0.5325557626405338, - "flos": 16532467732080.0, - "grad_norm": 9.63342669308664, - "learning_rate": 1.8863314227814414e-06, - "loss": 0.9905, - "num_input_tokens_seen": 94387255, - "step": 4429 - }, - { - "epoch": 0.532676005531173, - "flos": 19019953931280.0, - "grad_norm": 4.412111968790739, - "learning_rate": 1.8855537188854313e-06, - "loss": 0.69, - "num_input_tokens_seen": 94405950, - "step": 4430 - }, - { - "epoch": 0.5327962484218121, - "flos": 12678996488760.0, - "grad_norm": 4.2346605718394255, - "learning_rate": 1.8847760323508315e-06, - "loss": 1.0108, - "num_input_tokens_seen": 94424575, - "step": 4431 - }, - { - "epoch": 0.5329164913124511, - "flos": 12700342482600.0, - "grad_norm": 2.9113924204417487, - "learning_rate": 1.883998363295616e-06, - "loss": 0.9775, - "num_input_tokens_seen": 94441775, - "step": 4432 - }, - { - "epoch": 0.5330367342030903, - "flos": 40969745172480.0, - "grad_norm": 0.9524809115041546, - "learning_rate": 1.8832207118377565e-06, - "loss": 0.9175, - "num_input_tokens_seen": 94496865, - "step": 4433 - }, - { - "epoch": 0.5331569770937293, - "flos": 12338305093680.0, - "grad_norm": 3.414613308327922, - "learning_rate": 1.882443078095222e-06, - "loss": 0.9111, - "num_input_tokens_seen": 94515465, - "step": 4434 - }, - { - "epoch": 0.5332772199843684, - "flos": 40608964907520.0, - "grad_norm": 0.8596555078641868, - "learning_rate": 1.8816654621859794e-06, - "loss": 0.9193, - "num_input_tokens_seen": 94574850, - "step": 4435 - }, - { - "epoch": 0.5333974628750076, - "flos": 13256261090400.0, - "grad_norm": 3.3246230922842233, - "learning_rate": 1.8808878642279915e-06, - "loss": 0.9334, - "num_input_tokens_seen": 94589975, - "step": 4436 - }, - { - "epoch": 0.5335177057656466, - "flos": 16927073245800.0, - "grad_norm": 4.822744785264295, - "learning_rate": 1.8801102843392209e-06, - "loss": 0.89, - "num_input_tokens_seen": 94609100, - "step": 4437 - }, - { - "epoch": 0.5336379486562857, - "flos": 17841564486240.0, - "grad_norm": 3.69210966966939, - "learning_rate": 1.8793327226376238e-06, - "loss": 1.0851, - "num_input_tokens_seen": 94628140, - "step": 4438 - }, - { - "epoch": 0.5337581915469248, - "flos": 15195800687400.0, - "grad_norm": 2.5906806157564937, - "learning_rate": 1.8785551792411569e-06, - "loss": 1.0246, - "num_input_tokens_seen": 94646870, - "step": 4439 - }, - { - "epoch": 0.5338784344375639, - "flos": 10502270454000.0, - "grad_norm": 3.254389512901701, - "learning_rate": 1.8777776542677733e-06, - "loss": 1.049, - "num_input_tokens_seen": 94664640, - "step": 4440 - }, - { - "epoch": 0.5339986773282029, - "flos": 14776752362280.0, - "grad_norm": 4.556192924654837, - "learning_rate": 1.8770001478354216e-06, - "loss": 0.9404, - "num_input_tokens_seen": 94684035, - "step": 4441 - }, - { - "epoch": 0.5341189202188421, - "flos": 12732726638040.0, - "grad_norm": 6.0626101139726085, - "learning_rate": 1.8762226600620504e-06, - "loss": 1.0543, - "num_input_tokens_seen": 94702370, - "step": 4442 - }, - { - "epoch": 0.5342391631094812, - "flos": 7746409462440.0, - "grad_norm": 5.0091596627312045, - "learning_rate": 1.8754451910656031e-06, - "loss": 0.8244, - "num_input_tokens_seen": 94715990, - "step": 4443 - }, - { - "epoch": 0.5343594060001202, - "flos": 10842225971640.0, - "grad_norm": 7.150811273392396, - "learning_rate": 1.8746677409640212e-06, - "loss": 1.0424, - "num_input_tokens_seen": 94732810, - "step": 4444 - }, - { - "epoch": 0.5344796488907594, - "flos": 19150875871320.0, - "grad_norm": 6.291202081702073, - "learning_rate": 1.8738903098752432e-06, - "loss": 1.0664, - "num_input_tokens_seen": 94751660, - "step": 4445 - }, - { - "epoch": 0.5345998917813984, - "flos": 18081111864120.0, - "grad_norm": 3.7807345708960907, - "learning_rate": 1.8731128979172052e-06, - "loss": 0.9533, - "num_input_tokens_seen": 94770580, - "step": 4446 - }, - { - "epoch": 0.5347201346720375, - "flos": 23429527751760.0, - "grad_norm": 4.638384037122204, - "learning_rate": 1.8723355052078394e-06, - "loss": 0.8824, - "num_input_tokens_seen": 94790335, - "step": 4447 - }, - { - "epoch": 0.5348403775626767, - "flos": 12731745468120.0, - "grad_norm": 3.418991476453905, - "learning_rate": 1.8715581318650765e-06, - "loss": 0.9888, - "num_input_tokens_seen": 94809110, - "step": 4448 - }, - { - "epoch": 0.5349606204533157, - "flos": 12469748280240.0, - "grad_norm": 5.753801830455267, - "learning_rate": 1.8707807780068422e-06, - "loss": 1.0348, - "num_input_tokens_seen": 94826645, - "step": 4449 - }, - { - "epoch": 0.5350808633439548, - "flos": 20780728458480.0, - "grad_norm": 5.246625996827479, - "learning_rate": 1.8700034437510611e-06, - "loss": 0.898, - "num_input_tokens_seen": 94846460, - "step": 4450 - }, - { - "epoch": 0.5352011062345938, - "flos": 13832759153040.0, - "grad_norm": 2.987913420733982, - "learning_rate": 1.8692261292156549e-06, - "loss": 1.0363, - "num_input_tokens_seen": 94865415, - "step": 4451 - }, - { - "epoch": 0.535321349125233, - "flos": 16691511870720.0, - "grad_norm": 4.030044730342884, - "learning_rate": 1.8684488345185401e-06, - "loss": 1.041, - "num_input_tokens_seen": 94885310, - "step": 4452 - }, - { - "epoch": 0.535441592015872, - "flos": 14534721398040.0, - "grad_norm": 3.5500826006668658, - "learning_rate": 1.8676715597776332e-06, - "loss": 1.003, - "num_input_tokens_seen": 94903375, - "step": 4453 - }, - { - "epoch": 0.5355618349065111, - "flos": 13885354824600.0, - "grad_norm": 3.6157468888979487, - "learning_rate": 1.8668943051108455e-06, - "loss": 0.9818, - "num_input_tokens_seen": 94920400, - "step": 4454 - }, - { - "epoch": 0.5356820777971503, - "flos": 17084584306440.0, - "grad_norm": 3.3819153192838285, - "learning_rate": 1.8661170706360856e-06, - "loss": 0.985, - "num_input_tokens_seen": 94939285, - "step": 4455 - }, - { - "epoch": 0.5358023206877893, - "flos": 14827661648040.0, - "grad_norm": 4.624485296528955, - "learning_rate": 1.8653398564712594e-06, - "loss": 1.0541, - "num_input_tokens_seen": 94957950, - "step": 4456 - }, - { - "epoch": 0.5359225635784284, - "flos": 15930607011240.0, - "grad_norm": 28.066487528153015, - "learning_rate": 1.8645626627342704e-06, - "loss": 1.0517, - "num_input_tokens_seen": 94978435, - "step": 4457 - }, - { - "epoch": 0.5360428064690675, - "flos": 17137179978000.0, - "grad_norm": 5.2447199837092855, - "learning_rate": 1.8637854895430172e-06, - "loss": 1.0189, - "num_input_tokens_seen": 94997420, - "step": 4458 - }, - { - "epoch": 0.5361630493597066, - "flos": 15223156347000.0, - "grad_norm": 4.220643021465857, - "learning_rate": 1.8630083370153978e-06, - "loss": 0.914, - "num_input_tokens_seen": 95016780, - "step": 4459 - }, - { - "epoch": 0.5362832922503457, - "flos": 49330653466560.0, - "grad_norm": 0.7769585042990707, - "learning_rate": 1.8622312052693041e-06, - "loss": 0.8168, - "num_input_tokens_seen": 95077680, - "step": 4460 - }, - { - "epoch": 0.5364035351409848, - "flos": 6856882279920.0, - "grad_norm": 3.899011237175973, - "learning_rate": 1.8614540944226267e-06, - "loss": 0.9531, - "num_input_tokens_seen": 95094070, - "step": 4461 - }, - { - "epoch": 0.5365237780316239, - "flos": 16556021358240.0, - "grad_norm": 15.353013111330286, - "learning_rate": 1.8606770045932537e-06, - "loss": 0.9132, - "num_input_tokens_seen": 95112905, - "step": 4462 - }, - { - "epoch": 0.5366440209222629, - "flos": 18919668437760.0, - "grad_norm": 9.380843719081772, - "learning_rate": 1.859899935899068e-06, - "loss": 1.0401, - "num_input_tokens_seen": 95132480, - "step": 4463 - }, - { - "epoch": 0.5367642638129021, - "flos": 13910226897840.0, - "grad_norm": 3.2544224800117396, - "learning_rate": 1.8591228884579506e-06, - "loss": 1.0243, - "num_input_tokens_seen": 95150695, - "step": 4464 - }, - { - "epoch": 0.5368845067035412, - "flos": 17005951422360.0, - "grad_norm": 3.5477781784318103, - "learning_rate": 1.8583458623877795e-06, - "loss": 1.0469, - "num_input_tokens_seen": 95169515, - "step": 4465 - }, - { - "epoch": 0.5370047495941802, - "flos": 11944895380800.0, - "grad_norm": 7.964971386989199, - "learning_rate": 1.8575688578064281e-06, - "loss": 0.9601, - "num_input_tokens_seen": 95187360, - "step": 4466 - }, - { - "epoch": 0.5371249924848194, - "flos": 14724647275680.0, - "grad_norm": 3.70723293708457, - "learning_rate": 1.8567918748317674e-06, - "loss": 0.9822, - "num_input_tokens_seen": 95206430, - "step": 4467 - }, - { - "epoch": 0.5372452353754584, - "flos": 12731898775920.0, - "grad_norm": 9.973947156828485, - "learning_rate": 1.8560149135816659e-06, - "loss": 1.0475, - "num_input_tokens_seen": 95222985, - "step": 4468 - }, - { - "epoch": 0.5373654782660975, - "flos": 10869642954360.0, - "grad_norm": 4.991588001342507, - "learning_rate": 1.8552379741739873e-06, - "loss": 1.0621, - "num_input_tokens_seen": 95240050, - "step": 4469 - }, - { - "epoch": 0.5374857211567367, - "flos": 49410972736440.0, - "grad_norm": 0.893356284539432, - "learning_rate": 1.8544610567265935e-06, - "loss": 0.8114, - "num_input_tokens_seen": 95293710, - "step": 4470 - }, - { - "epoch": 0.5376059640473757, - "flos": 10660057468680.0, - "grad_norm": 4.111043729029428, - "learning_rate": 1.853684161357341e-06, - "loss": 1.0655, - "num_input_tokens_seen": 95311090, - "step": 4471 - }, - { - "epoch": 0.5377262069380148, - "flos": 14043141839280.0, - "grad_norm": 4.866629432291923, - "learning_rate": 1.852907288184085e-06, - "loss": 1.0054, - "num_input_tokens_seen": 95329695, - "step": 4472 - }, - { - "epoch": 0.5378464498286539, - "flos": 21380473531680.0, - "grad_norm": 3.582344998229558, - "learning_rate": 1.8521304373246762e-06, - "loss": 0.9276, - "num_input_tokens_seen": 95350460, - "step": 4473 - }, - { - "epoch": 0.537966692719293, - "flos": 15091314560160.0, - "grad_norm": 7.0192785456391125, - "learning_rate": 1.8513536088969626e-06, - "loss": 1.1128, - "num_input_tokens_seen": 95367845, - "step": 4474 - }, - { - "epoch": 0.538086935609932, - "flos": 15301145338320.0, - "grad_norm": 3.5069169768379296, - "learning_rate": 1.8505768030187884e-06, - "loss": 1.0218, - "num_input_tokens_seen": 95387695, - "step": 4475 - }, - { - "epoch": 0.5382071785005712, - "flos": 16166475001920.0, - "grad_norm": 2.7396797385616543, - "learning_rate": 1.849800019807995e-06, - "loss": 1.0347, - "num_input_tokens_seen": 95408640, - "step": 4476 - }, - { - "epoch": 0.5383274213912103, - "flos": 17738243498280.0, - "grad_norm": 3.6984876738071226, - "learning_rate": 1.8490232593824186e-06, - "loss": 0.9405, - "num_input_tokens_seen": 95424815, - "step": 4477 - }, - { - "epoch": 0.5384476642818493, - "flos": 15852495373680.0, - "grad_norm": 2.6547103009695756, - "learning_rate": 1.8482465218598935e-06, - "loss": 1.0677, - "num_input_tokens_seen": 95444480, - "step": 4478 - }, - { - "epoch": 0.5385679071724885, - "flos": 16140499112520.0, - "grad_norm": 3.6901331406637885, - "learning_rate": 1.8474698073582508e-06, - "loss": 1.0694, - "num_input_tokens_seen": 95465570, - "step": 4479 - }, - { - "epoch": 0.5386881500631275, - "flos": 11283172198680.0, - "grad_norm": 5.029054150696209, - "learning_rate": 1.8466931159953166e-06, - "loss": 1.0858, - "num_input_tokens_seen": 95481925, - "step": 4480 - }, - { - "epoch": 0.5388083929537666, - "flos": 17109885641520.0, - "grad_norm": 5.267230686134048, - "learning_rate": 1.8459164478889158e-06, - "loss": 1.0705, - "num_input_tokens_seen": 95503040, - "step": 4481 - }, - { - "epoch": 0.5389286358444056, - "flos": 15799777055880.0, - "grad_norm": 7.338882196529063, - "learning_rate": 1.8451398031568663e-06, - "loss": 0.9808, - "num_input_tokens_seen": 95522385, - "step": 4482 - }, - { - "epoch": 0.5390488787350448, - "flos": 17269573672920.0, - "grad_norm": 3.8118513212079925, - "learning_rate": 1.844363181916986e-06, - "loss": 0.9745, - "num_input_tokens_seen": 95542830, - "step": 4483 - }, - { - "epoch": 0.5391691216256839, - "flos": 11735248572000.0, - "grad_norm": 4.286608384736328, - "learning_rate": 1.8435865842870868e-06, - "loss": 1.0551, - "num_input_tokens_seen": 95560490, - "step": 4484 - }, - { - "epoch": 0.5392893645163229, - "flos": 16585216711440.0, - "grad_norm": 2.442439136976742, - "learning_rate": 1.8428100103849787e-06, - "loss": 0.9392, - "num_input_tokens_seen": 95580005, - "step": 4485 - }, - { - "epoch": 0.5394096074069621, - "flos": 11080178948400.0, - "grad_norm": 5.074269494910811, - "learning_rate": 1.842033460328467e-06, - "loss": 0.9352, - "num_input_tokens_seen": 95598445, - "step": 4486 - }, - { - "epoch": 0.5395298502976011, - "flos": 16271605021920.0, - "grad_norm": 6.646523976887603, - "learning_rate": 1.8412569342353541e-06, - "loss": 0.9725, - "num_input_tokens_seen": 95618320, - "step": 4487 - }, - { - "epoch": 0.5396500931882402, - "flos": 16953417073920.0, - "grad_norm": 4.671889493275492, - "learning_rate": 1.840480432223438e-06, - "loss": 1.0701, - "num_input_tokens_seen": 95637045, - "step": 4488 - }, - { - "epoch": 0.5397703360788794, - "flos": 18736334795520.0, - "grad_norm": 7.088946224181415, - "learning_rate": 1.8397039544105131e-06, - "loss": 0.9887, - "num_input_tokens_seen": 95655850, - "step": 4489 - }, - { - "epoch": 0.5398905789695184, - "flos": 15064970732040.0, - "grad_norm": 3.4230568991547705, - "learning_rate": 1.8389275009143711e-06, - "loss": 0.9268, - "num_input_tokens_seen": 95675310, - "step": 4490 - }, - { - "epoch": 0.5400108218601575, - "flos": 17842974918000.0, - "grad_norm": 5.069565728302966, - "learning_rate": 1.8381510718527988e-06, - "loss": 0.9561, - "num_input_tokens_seen": 95694640, - "step": 4491 - }, - { - "epoch": 0.5401310647507966, - "flos": 18760869591600.0, - "grad_norm": 2.6028626088753035, - "learning_rate": 1.8373746673435812e-06, - "loss": 0.8671, - "num_input_tokens_seen": 95715385, - "step": 4492 - }, - { - "epoch": 0.5402513076414357, - "flos": 19837593772920.0, - "grad_norm": 14.418010553481798, - "learning_rate": 1.8365982875044964e-06, - "loss": 1.016, - "num_input_tokens_seen": 95735415, - "step": 4493 - }, - { - "epoch": 0.5403715505320748, - "flos": 16271880975960.0, - "grad_norm": 3.8150913624554046, - "learning_rate": 1.8358219324533217e-06, - "loss": 0.9864, - "num_input_tokens_seen": 95755400, - "step": 4494 - }, - { - "epoch": 0.5404917934227139, - "flos": 21539640316560.0, - "grad_norm": 3.2627635630114593, - "learning_rate": 1.8350456023078292e-06, - "loss": 0.9326, - "num_input_tokens_seen": 95777495, - "step": 4495 - }, - { - "epoch": 0.540612036313353, - "flos": 14147995905240.0, - "grad_norm": 3.997293152458535, - "learning_rate": 1.8342692971857874e-06, - "loss": 0.988, - "num_input_tokens_seen": 95796415, - "step": 4496 - }, - { - "epoch": 0.540732279203992, - "flos": 17267764640880.0, - "grad_norm": 5.800486958549155, - "learning_rate": 1.833493017204962e-06, - "loss": 0.9324, - "num_input_tokens_seen": 95816240, - "step": 4497 - }, - { - "epoch": 0.5408525220946312, - "flos": 14330961608760.0, - "grad_norm": 3.061319551873283, - "learning_rate": 1.8327167624831134e-06, - "loss": 0.9889, - "num_input_tokens_seen": 95833690, - "step": 4498 - }, - { - "epoch": 0.5409727649852702, - "flos": 17163554467680.0, - "grad_norm": 7.314932394178123, - "learning_rate": 1.831940533137999e-06, - "loss": 0.9303, - "num_input_tokens_seen": 95852315, - "step": 4499 - }, - { - "epoch": 0.5410930078759093, - "flos": 17003805113160.0, - "grad_norm": 6.075592690271949, - "learning_rate": 1.8311643292873718e-06, - "loss": 0.9488, - "num_input_tokens_seen": 95870855, - "step": 4500 - }, - { - "epoch": 0.5412132507665485, - "flos": 14986307186400.0, - "grad_norm": 3.3672587305310238, - "learning_rate": 1.8303881510489818e-06, - "loss": 1.1127, - "num_input_tokens_seen": 95888965, - "step": 4501 - }, - { - "epoch": 0.5413334936571875, - "flos": 21542369195400.0, - "grad_norm": 7.238242250636977, - "learning_rate": 1.829611998540574e-06, - "loss": 0.9157, - "num_input_tokens_seen": 95909890, - "step": 4502 - }, - { - "epoch": 0.5414537365478266, - "flos": 17267795302440.0, - "grad_norm": 3.088911800924656, - "learning_rate": 1.8288358718798914e-06, - "loss": 1.0305, - "num_input_tokens_seen": 95928800, - "step": 4503 - }, - { - "epoch": 0.5415739794384657, - "flos": 11787844243560.0, - "grad_norm": 4.207396425002525, - "learning_rate": 1.8280597711846703e-06, - "loss": 0.9493, - "num_input_tokens_seen": 95946760, - "step": 4504 - }, - { - "epoch": 0.5416942223291048, - "flos": 16481711754120.0, - "grad_norm": 6.124743709262486, - "learning_rate": 1.8272836965726455e-06, - "loss": 1.0619, - "num_input_tokens_seen": 95965415, - "step": 4505 - }, - { - "epoch": 0.5418144652197439, - "flos": 14410115739360.0, - "grad_norm": 4.731831991261975, - "learning_rate": 1.8265076481615461e-06, - "loss": 1.0086, - "num_input_tokens_seen": 95985050, - "step": 4506 - }, - { - "epoch": 0.541934708110383, - "flos": 8772316342680.0, - "grad_norm": 3.6816461942514245, - "learning_rate": 1.8257316260690987e-06, - "loss": 1.0952, - "num_input_tokens_seen": 96002555, - "step": 4507 - }, - { - "epoch": 0.5420549510010221, - "flos": 15093522192480.0, - "grad_norm": 6.466096342964889, - "learning_rate": 1.8249556304130254e-06, - "loss": 0.99, - "num_input_tokens_seen": 96023555, - "step": 4508 - }, - { - "epoch": 0.5421751938916611, - "flos": 21011874568920.0, - "grad_norm": 7.53877205560994, - "learning_rate": 1.824179661311044e-06, - "loss": 0.9184, - "num_input_tokens_seen": 96042025, - "step": 4509 - }, - { - "epoch": 0.5422954367823003, - "flos": 13282482272280.0, - "grad_norm": 5.240410357124675, - "learning_rate": 1.823403718880868e-06, - "loss": 1.0151, - "num_input_tokens_seen": 96060505, - "step": 4510 - }, - { - "epoch": 0.5424156796729394, - "flos": 28543516742040.0, - "grad_norm": 3.1993837418821496, - "learning_rate": 1.822627803240207e-06, - "loss": 0.9, - "num_input_tokens_seen": 96082555, - "step": 4511 - }, - { - "epoch": 0.5425359225635784, - "flos": 8116817457240.0, - "grad_norm": 3.527388224949752, - "learning_rate": 1.8218519145067675e-06, - "loss": 1.0808, - "num_input_tokens_seen": 96097895, - "step": 4512 - }, - { - "epoch": 0.5426561654542175, - "flos": 14357182790640.0, - "grad_norm": 4.013405984577925, - "learning_rate": 1.8210760527982508e-06, - "loss": 1.1157, - "num_input_tokens_seen": 96117900, - "step": 4513 - }, - { - "epoch": 0.5427764083448566, - "flos": 15537258621480.0, - "grad_norm": 4.4321465235014665, - "learning_rate": 1.8203002182323552e-06, - "loss": 0.9709, - "num_input_tokens_seen": 96135175, - "step": 4514 - }, - { - "epoch": 0.5428966512354957, - "flos": 13933381923720.0, - "grad_norm": 20.435372058084575, - "learning_rate": 1.819524410926773e-06, - "loss": 0.9849, - "num_input_tokens_seen": 96152575, - "step": 4515 - }, - { - "epoch": 0.5430168941261347, - "flos": 15904109875320.0, - "grad_norm": 3.212209927417237, - "learning_rate": 1.8187486309991944e-06, - "loss": 0.9973, - "num_input_tokens_seen": 96173175, - "step": 4516 - }, - { - "epoch": 0.5431371370167739, - "flos": 13303123050240.0, - "grad_norm": 2.822983763250452, - "learning_rate": 1.817972878567304e-06, - "loss": 0.9982, - "num_input_tokens_seen": 96191550, - "step": 4517 - }, - { - "epoch": 0.543257379907413, - "flos": 13333912804560.0, - "grad_norm": 5.5459831272973235, - "learning_rate": 1.8171971537487834e-06, - "loss": 0.9918, - "num_input_tokens_seen": 96209920, - "step": 4518 - }, - { - "epoch": 0.543377622798052, - "flos": 12390655472760.0, - "grad_norm": 4.52428516621784, - "learning_rate": 1.8164214566613093e-06, - "loss": 1.0258, - "num_input_tokens_seen": 96228265, - "step": 4519 - }, - { - "epoch": 0.5434978656886912, - "flos": 13465754591400.0, - "grad_norm": 6.066152514514097, - "learning_rate": 1.8156457874225547e-06, - "loss": 0.8693, - "num_input_tokens_seen": 96246445, - "step": 4520 - }, - { - "epoch": 0.5436181085793302, - "flos": 12233726981760.0, - "grad_norm": 3.5937964394927464, - "learning_rate": 1.814870146150187e-06, - "loss": 1.027, - "num_input_tokens_seen": 96264275, - "step": 4521 - }, - { - "epoch": 0.5437383514699693, - "flos": 13544264829240.0, - "grad_norm": 3.839513661660947, - "learning_rate": 1.814094532961871e-06, - "loss": 1.0129, - "num_input_tokens_seen": 96282570, - "step": 4522 - }, - { - "epoch": 0.5438585943606085, - "flos": 16062816736800.0, - "grad_norm": 3.9254871770687956, - "learning_rate": 1.8133189479752666e-06, - "loss": 1.051, - "num_input_tokens_seen": 96301220, - "step": 4523 - }, - { - "epoch": 0.5439788372512475, - "flos": 15484632288360.0, - "grad_norm": 4.540303623616917, - "learning_rate": 1.8125433913080292e-06, - "loss": 1.0446, - "num_input_tokens_seen": 96318640, - "step": 4524 - }, - { - "epoch": 0.5440990801418866, - "flos": 11604479939760.0, - "grad_norm": 4.076567955495943, - "learning_rate": 1.811767863077811e-06, - "loss": 1.0412, - "num_input_tokens_seen": 96337310, - "step": 4525 - }, - { - "epoch": 0.5442193230325257, - "flos": 15353158440240.0, - "grad_norm": 2.7937072204455027, - "learning_rate": 1.8109923634022577e-06, - "loss": 1.013, - "num_input_tokens_seen": 96357055, - "step": 4526 - }, - { - "epoch": 0.5443395659231648, - "flos": 10943799250680.0, - "grad_norm": 3.4508455097346307, - "learning_rate": 1.8102168923990128e-06, - "loss": 1.0824, - "num_input_tokens_seen": 96370320, - "step": 4527 - }, - { - "epoch": 0.5444598088138038, - "flos": 13308458161680.0, - "grad_norm": 3.0054295996666176, - "learning_rate": 1.809441450185714e-06, - "loss": 1.0179, - "num_input_tokens_seen": 96388525, - "step": 4528 - }, - { - "epoch": 0.544580051704443, - "flos": 15013141599480.0, - "grad_norm": 6.16791741686207, - "learning_rate": 1.8086660368799958e-06, - "loss": 0.9651, - "num_input_tokens_seen": 96406295, - "step": 4529 - }, - { - "epoch": 0.5447002945950821, - "flos": 23169860842440.0, - "grad_norm": 2.8096476183914008, - "learning_rate": 1.807890652599488e-06, - "loss": 0.99, - "num_input_tokens_seen": 96430400, - "step": 4530 - }, - { - "epoch": 0.5448205374857211, - "flos": 8297268912840.0, - "grad_norm": 3.5489667866416483, - "learning_rate": 1.8071152974618156e-06, - "loss": 1.0456, - "num_input_tokens_seen": 96447920, - "step": 4531 - }, - { - "epoch": 0.5449407803763603, - "flos": 17162971898040.0, - "grad_norm": 3.683889011106983, - "learning_rate": 1.806339971584599e-06, - "loss": 1.0139, - "num_input_tokens_seen": 96464300, - "step": 4532 - }, - { - "epoch": 0.5450610232669993, - "flos": 16534031471640.0, - "grad_norm": 3.9818229131326084, - "learning_rate": 1.8055646750854546e-06, - "loss": 1.0827, - "num_input_tokens_seen": 96483530, - "step": 4533 - }, - { - "epoch": 0.5451812661576384, - "flos": 12600271620000.0, - "grad_norm": 9.615002480762973, - "learning_rate": 1.8047894080819945e-06, - "loss": 1.0452, - "num_input_tokens_seen": 96500500, - "step": 4534 - }, - { - "epoch": 0.5453015090482776, - "flos": 50895854841000.0, - "grad_norm": 0.7399141127129161, - "learning_rate": 1.8040141706918258e-06, - "loss": 0.8935, - "num_input_tokens_seen": 96561460, - "step": 4535 - }, - { - "epoch": 0.5454217519389166, - "flos": 18181765296360.0, - "grad_norm": 2.9154420915903136, - "learning_rate": 1.8032389630325525e-06, - "loss": 0.9934, - "num_input_tokens_seen": 96579930, - "step": 4536 - }, - { - "epoch": 0.5455419948295557, - "flos": 16821146025240.0, - "grad_norm": 3.7155488926413547, - "learning_rate": 1.8024637852217707e-06, - "loss": 0.992, - "num_input_tokens_seen": 96599375, - "step": 4537 - }, - { - "epoch": 0.5456622377201948, - "flos": 16638486937320.0, - "grad_norm": 3.8731182820888366, - "learning_rate": 1.8016886373770766e-06, - "loss": 1.075, - "num_input_tokens_seen": 96617610, - "step": 4538 - }, - { - "epoch": 0.5457824806108339, - "flos": 17058271139880.0, - "grad_norm": 4.740864853647157, - "learning_rate": 1.8009135196160579e-06, - "loss": 1.0011, - "num_input_tokens_seen": 96636205, - "step": 4539 - }, - { - "epoch": 0.545902723501473, - "flos": 16036288939320.0, - "grad_norm": 5.523650039835366, - "learning_rate": 1.8001384320563e-06, - "loss": 1.0648, - "num_input_tokens_seen": 96656180, - "step": 4540 - }, - { - "epoch": 0.5460229663921121, - "flos": 39490596779640.0, - "grad_norm": 3.508421439350431, - "learning_rate": 1.7993633748153833e-06, - "loss": 0.8359, - "num_input_tokens_seen": 96710505, - "step": 4541 - }, - { - "epoch": 0.5461432092827512, - "flos": 10896170751840.0, - "grad_norm": 3.484103478770579, - "learning_rate": 1.7985883480108834e-06, - "loss": 0.9523, - "num_input_tokens_seen": 96727860, - "step": 4542 - }, - { - "epoch": 0.5462634521733902, - "flos": 17084829598920.0, - "grad_norm": 2.5004485681557593, - "learning_rate": 1.797813351760371e-06, - "loss": 0.9515, - "num_input_tokens_seen": 96749285, - "step": 4543 - }, - { - "epoch": 0.5463836950640293, - "flos": 16219561258440.0, - "grad_norm": 3.6295278486987783, - "learning_rate": 1.7970383861814116e-06, - "loss": 1.0004, - "num_input_tokens_seen": 96768775, - "step": 4544 - }, - { - "epoch": 0.5465039379546685, - "flos": 14514233927880.0, - "grad_norm": 3.8216917204558993, - "learning_rate": 1.7962634513915684e-06, - "loss": 0.9785, - "num_input_tokens_seen": 96785845, - "step": 4545 - }, - { - "epoch": 0.5466241808453075, - "flos": 12282244665840.0, - "grad_norm": 15.006992998838491, - "learning_rate": 1.7954885475083969e-06, - "loss": 1.0247, - "num_input_tokens_seen": 96803235, - "step": 4546 - }, - { - "epoch": 0.5467444237359466, - "flos": 15354354241080.0, - "grad_norm": 12.335623313763374, - "learning_rate": 1.7947136746494513e-06, - "loss": 0.958, - "num_input_tokens_seen": 96823870, - "step": 4547 - }, - { - "epoch": 0.5468646666265857, - "flos": 17189377049280.0, - "grad_norm": 10.957210894249261, - "learning_rate": 1.793938832932277e-06, - "loss": 1.1032, - "num_input_tokens_seen": 96841700, - "step": 4548 - }, - { - "epoch": 0.5469849095172248, - "flos": 19601633797560.0, - "grad_norm": 4.036442687301665, - "learning_rate": 1.7931640224744185e-06, - "loss": 0.9287, - "num_input_tokens_seen": 96861970, - "step": 4549 - }, - { - "epoch": 0.5471051524078638, - "flos": 19916165333880.0, - "grad_norm": 3.637227449988414, - "learning_rate": 1.7923892433934127e-06, - "loss": 0.9605, - "num_input_tokens_seen": 96882765, - "step": 4550 - }, - { - "epoch": 0.547225395298503, - "flos": 12862974023760.0, - "grad_norm": 2.908570050622794, - "learning_rate": 1.7916144958067939e-06, - "loss": 1.0057, - "num_input_tokens_seen": 96900345, - "step": 4551 - }, - { - "epoch": 0.5473456381891421, - "flos": 15170683321680.0, - "grad_norm": 2.705831154353568, - "learning_rate": 1.7908397798320905e-06, - "loss": 1.0124, - "num_input_tokens_seen": 96919800, - "step": 4552 - }, - { - "epoch": 0.5474658810797811, - "flos": 14142568809120.0, - "grad_norm": 2.66959520307458, - "learning_rate": 1.7900650955868265e-06, - "loss": 0.9824, - "num_input_tokens_seen": 96939165, - "step": 4553 - }, - { - "epoch": 0.5475861239704203, - "flos": 36094757200080.0, - "grad_norm": 2.4212284332559535, - "learning_rate": 1.7892904431885202e-06, - "loss": 0.9957, - "num_input_tokens_seen": 96966060, - "step": 4554 - }, - { - "epoch": 0.5477063668610593, - "flos": 14699008663440.0, - "grad_norm": 3.1546643494828337, - "learning_rate": 1.788515822754686e-06, - "loss": 0.9824, - "num_input_tokens_seen": 96986200, - "step": 4555 - }, - { - "epoch": 0.5478266097516984, - "flos": 13911667991160.0, - "grad_norm": 5.436298456842232, - "learning_rate": 1.7877412344028335e-06, - "loss": 1.0041, - "num_input_tokens_seen": 97005725, - "step": 4556 - }, - { - "epoch": 0.5479468526423376, - "flos": 9087093171480.0, - "grad_norm": 4.366257788659113, - "learning_rate": 1.7869666782504668e-06, - "loss": 0.9917, - "num_input_tokens_seen": 97022025, - "step": 4557 - }, - { - "epoch": 0.5480670955329766, - "flos": 13178026806600.0, - "grad_norm": 3.6474527014214244, - "learning_rate": 1.7861921544150867e-06, - "loss": 0.9181, - "num_input_tokens_seen": 97040595, - "step": 4558 - }, - { - "epoch": 0.5481873384236157, - "flos": 11283908076120.0, - "grad_norm": 2.958432227259251, - "learning_rate": 1.7854176630141856e-06, - "loss": 0.988, - "num_input_tokens_seen": 97057450, - "step": 4559 - }, - { - "epoch": 0.5483075813142548, - "flos": 16192788168480.0, - "grad_norm": 3.867585626010616, - "learning_rate": 1.784643204165255e-06, - "loss": 1.0431, - "num_input_tokens_seen": 97076490, - "step": 4560 - }, - { - "epoch": 0.5484278242048939, - "flos": 13701346628040.0, - "grad_norm": 5.759213239661523, - "learning_rate": 1.7838687779857783e-06, - "loss": 0.997, - "num_input_tokens_seen": 97094085, - "step": 4561 - }, - { - "epoch": 0.5485480670955329, - "flos": 16215360624720.0, - "grad_norm": 4.456695303262615, - "learning_rate": 1.7830943845932366e-06, - "loss": 0.8661, - "num_input_tokens_seen": 97113130, - "step": 4562 - }, - { - "epoch": 0.5486683099861721, - "flos": 16111518390240.0, - "grad_norm": 2.895741980413045, - "learning_rate": 1.7823200241051044e-06, - "loss": 0.9803, - "num_input_tokens_seen": 97131765, - "step": 4563 - }, - { - "epoch": 0.5487885528768112, - "flos": 16455613218480.0, - "grad_norm": 3.936913825105725, - "learning_rate": 1.7815456966388513e-06, - "loss": 1.0261, - "num_input_tokens_seen": 97150580, - "step": 4564 - }, - { - "epoch": 0.5489087957674502, - "flos": 15668211223080.0, - "grad_norm": 3.790345405399926, - "learning_rate": 1.780771402311943e-06, - "loss": 1.0546, - "num_input_tokens_seen": 97169135, - "step": 4565 - }, - { - "epoch": 0.5490290386580894, - "flos": 17293893838080.0, - "grad_norm": 11.137784320186322, - "learning_rate": 1.7799971412418374e-06, - "loss": 1.0075, - "num_input_tokens_seen": 97190250, - "step": 4566 - }, - { - "epoch": 0.5491492815487284, - "flos": 12966570965760.0, - "grad_norm": 6.493648653827482, - "learning_rate": 1.7792229135459918e-06, - "loss": 0.9671, - "num_input_tokens_seen": 97206620, - "step": 4567 - }, - { - "epoch": 0.5492695244393675, - "flos": 46213153686360.0, - "grad_norm": 0.810496059801148, - "learning_rate": 1.7784487193418538e-06, - "loss": 0.8635, - "num_input_tokens_seen": 97264190, - "step": 4568 - }, - { - "epoch": 0.5493897673300067, - "flos": 12308281878360.0, - "grad_norm": 4.132495658858385, - "learning_rate": 1.7776745587468698e-06, - "loss": 0.8277, - "num_input_tokens_seen": 97281335, - "step": 4569 - }, - { - "epoch": 0.5495100102206457, - "flos": 14121560092440.0, - "grad_norm": 5.263422021261583, - "learning_rate": 1.7769004318784776e-06, - "loss": 1.0411, - "num_input_tokens_seen": 97298700, - "step": 4570 - }, - { - "epoch": 0.5496302531112848, - "flos": 11840010653280.0, - "grad_norm": 4.224172540478596, - "learning_rate": 1.776126338854113e-06, - "loss": 1.0306, - "num_input_tokens_seen": 97316210, - "step": 4571 - }, - { - "epoch": 0.5497504960019239, - "flos": 17478269973360.0, - "grad_norm": 2.7488358374321162, - "learning_rate": 1.7753522797912044e-06, - "loss": 1.0745, - "num_input_tokens_seen": 97336015, - "step": 4572 - }, - { - "epoch": 0.549870738892563, - "flos": 10922514579960.0, - "grad_norm": 5.068727407158149, - "learning_rate": 1.7745782548071765e-06, - "loss": 0.9246, - "num_input_tokens_seen": 97352630, - "step": 4573 - }, - { - "epoch": 0.549990981783202, - "flos": 14960484604800.0, - "grad_norm": 4.9532287779324395, - "learning_rate": 1.7738042640194482e-06, - "loss": 0.9639, - "num_input_tokens_seen": 97372015, - "step": 4574 - }, - { - "epoch": 0.5501112246738411, - "flos": 15195126133080.0, - "grad_norm": 6.76764041051785, - "learning_rate": 1.7730303075454335e-06, - "loss": 0.9308, - "num_input_tokens_seen": 97390625, - "step": 4575 - }, - { - "epoch": 0.5502314675644803, - "flos": 12364464952440.0, - "grad_norm": 2.5268602336156767, - "learning_rate": 1.7722563855025402e-06, - "loss": 1.08, - "num_input_tokens_seen": 97408375, - "step": 4576 - }, - { - "epoch": 0.5503517104551193, - "flos": 17289141296280.0, - "grad_norm": 88.21490593378338, - "learning_rate": 1.7714824980081721e-06, - "loss": 0.937, - "num_input_tokens_seen": 97427390, - "step": 4577 - }, - { - "epoch": 0.5504719533457584, - "flos": 15930974949960.0, - "grad_norm": 3.7153431649955566, - "learning_rate": 1.7707086451797276e-06, - "loss": 0.971, - "num_input_tokens_seen": 97447985, - "step": 4578 - }, - { - "epoch": 0.5505921962363975, - "flos": 48186150593400.0, - "grad_norm": 0.878547798812581, - "learning_rate": 1.7699348271345993e-06, - "loss": 0.7713, - "num_input_tokens_seen": 97510330, - "step": 4579 - }, - { - "epoch": 0.5507124391270366, - "flos": 32654870976120.0, - "grad_norm": 1.0762131366331351, - "learning_rate": 1.7691610439901753e-06, - "loss": 0.7941, - "num_input_tokens_seen": 97572985, - "step": 4580 - }, - { - "epoch": 0.5508326820176757, - "flos": 15826396838040.0, - "grad_norm": 2.707793421603799, - "learning_rate": 1.7683872958638367e-06, - "loss": 0.9802, - "num_input_tokens_seen": 97591585, - "step": 4581 - }, - { - "epoch": 0.5509529249083148, - "flos": 14201266131120.0, - "grad_norm": 4.802645216677004, - "learning_rate": 1.7676135828729614e-06, - "loss": 1.0634, - "num_input_tokens_seen": 97611015, - "step": 4582 - }, - { - "epoch": 0.5510731677989539, - "flos": 15510700162440.0, - "grad_norm": 3.1580782590208396, - "learning_rate": 1.7668399051349205e-06, - "loss": 1.0601, - "num_input_tokens_seen": 97630415, - "step": 4583 - }, - { - "epoch": 0.5511934106895929, - "flos": 15246740634720.0, - "grad_norm": 4.567550163473808, - "learning_rate": 1.766066262767081e-06, - "loss": 1.0497, - "num_input_tokens_seen": 97647975, - "step": 4584 - }, - { - "epoch": 0.5513136535802321, - "flos": 14960331297000.0, - "grad_norm": 4.219320075053794, - "learning_rate": 1.765292655886803e-06, - "loss": 0.9961, - "num_input_tokens_seen": 97666340, - "step": 4585 - }, - { - "epoch": 0.5514338964708712, - "flos": 19808091804120.0, - "grad_norm": 2.9820534828962266, - "learning_rate": 1.764519084611443e-06, - "loss": 0.9411, - "num_input_tokens_seen": 97686515, - "step": 4586 - }, - { - "epoch": 0.5515541393615102, - "flos": 15563786418960.0, - "grad_norm": 5.204161211453285, - "learning_rate": 1.7637455490583505e-06, - "loss": 0.9966, - "num_input_tokens_seen": 97705560, - "step": 4587 - }, - { - "epoch": 0.5516743822521494, - "flos": 14541007017840.0, - "grad_norm": 4.339256102625186, - "learning_rate": 1.7629720493448701e-06, - "loss": 1.0066, - "num_input_tokens_seen": 97722575, - "step": 4588 - }, - { - "epoch": 0.5517946251427884, - "flos": 10556000603280.0, - "grad_norm": 4.848070543462086, - "learning_rate": 1.7621985855883418e-06, - "loss": 1.0768, - "num_input_tokens_seen": 97738995, - "step": 4589 - }, - { - "epoch": 0.5519148680334275, - "flos": 13045633111680.0, - "grad_norm": 3.9869419580372387, - "learning_rate": 1.7614251579060983e-06, - "loss": 0.9454, - "num_input_tokens_seen": 97757310, - "step": 4590 - }, - { - "epoch": 0.5520351109240667, - "flos": 17866221928560.0, - "grad_norm": 3.3974793234358067, - "learning_rate": 1.76065176641547e-06, - "loss": 1.0792, - "num_input_tokens_seen": 97779740, - "step": 4591 - }, - { - "epoch": 0.5521553538147057, - "flos": 14960852543520.0, - "grad_norm": 2.3140299092225165, - "learning_rate": 1.759878411233777e-06, - "loss": 1.0033, - "num_input_tokens_seen": 97797920, - "step": 4592 - }, - { - "epoch": 0.5522755967053448, - "flos": 13387213692000.0, - "grad_norm": 20.488637613082876, - "learning_rate": 1.7591050924783388e-06, - "loss": 0.9866, - "num_input_tokens_seen": 97814830, - "step": 4593 - }, - { - "epoch": 0.5523958395959839, - "flos": 46305400303080.0, - "grad_norm": 0.9089792852255136, - "learning_rate": 1.7583318102664661e-06, - "loss": 0.8644, - "num_input_tokens_seen": 97882115, - "step": 4594 - }, - { - "epoch": 0.552516082486623, - "flos": 7644774860280.0, - "grad_norm": 4.542969169846283, - "learning_rate": 1.757558564715466e-06, - "loss": 1.0183, - "num_input_tokens_seen": 97899910, - "step": 4595 - }, - { - "epoch": 0.552636325377262, - "flos": 15773034627480.0, - "grad_norm": 5.419126842027271, - "learning_rate": 1.7567853559426386e-06, - "loss": 0.9531, - "num_input_tokens_seen": 97916680, - "step": 4596 - }, - { - "epoch": 0.5527565682679012, - "flos": 17058393786120.0, - "grad_norm": 5.653234194180182, - "learning_rate": 1.7560121840652797e-06, - "loss": 0.9798, - "num_input_tokens_seen": 97935785, - "step": 4597 - }, - { - "epoch": 0.5528768111585403, - "flos": 13990392859920.0, - "grad_norm": 7.987238241810827, - "learning_rate": 1.7552390492006782e-06, - "loss": 0.916, - "num_input_tokens_seen": 97953825, - "step": 4598 - }, - { - "epoch": 0.5529970540491793, - "flos": 18659756235960.0, - "grad_norm": 3.5026798887706314, - "learning_rate": 1.7544659514661184e-06, - "loss": 0.8832, - "num_input_tokens_seen": 97976635, - "step": 4599 - }, - { - "epoch": 0.5531172969398185, - "flos": 17373017307120.0, - "grad_norm": 2.8112949756842696, - "learning_rate": 1.7536928909788786e-06, - "loss": 1.0261, - "num_input_tokens_seen": 97995660, - "step": 4600 - }, - { - "epoch": 0.5532375398304575, - "flos": 46468711946640.0, - "grad_norm": 1.1559824762930107, - "learning_rate": 1.752919867856231e-06, - "loss": 0.8707, - "num_input_tokens_seen": 98047025, - "step": 4601 - }, - { - "epoch": 0.5533577827210966, - "flos": 13964447632080.0, - "grad_norm": 2.733162834695938, - "learning_rate": 1.7521468822154436e-06, - "loss": 1.0097, - "num_input_tokens_seen": 98065660, - "step": 4602 - }, - { - "epoch": 0.5534780256117358, - "flos": 23036547300720.0, - "grad_norm": 3.61924220610107, - "learning_rate": 1.751373934173777e-06, - "loss": 0.9662, - "num_input_tokens_seen": 98088125, - "step": 4603 - }, - { - "epoch": 0.5535982685023748, - "flos": 16508116905360.0, - "grad_norm": 3.65245459077299, - "learning_rate": 1.750601023848487e-06, - "loss": 0.9586, - "num_input_tokens_seen": 98108570, - "step": 4604 - }, - { - "epoch": 0.5537185113930139, - "flos": 17320176343080.0, - "grad_norm": 2.3414388833175903, - "learning_rate": 1.749828151356823e-06, - "loss": 0.9591, - "num_input_tokens_seen": 98128485, - "step": 4605 - }, - { - "epoch": 0.553838754283653, - "flos": 16742635787400.0, - "grad_norm": 2.715655440147828, - "learning_rate": 1.7490553168160297e-06, - "loss": 0.9878, - "num_input_tokens_seen": 98149275, - "step": 4606 - }, - { - "epoch": 0.5539589971742921, - "flos": 12233818966440.0, - "grad_norm": 3.2909142000320606, - "learning_rate": 1.748282520343345e-06, - "loss": 0.9867, - "num_input_tokens_seen": 98168025, - "step": 4607 - }, - { - "epoch": 0.5540792400649311, - "flos": 19628437549080.0, - "grad_norm": 3.905769745868189, - "learning_rate": 1.7475097620560023e-06, - "loss": 1.0083, - "num_input_tokens_seen": 98187810, - "step": 4608 - }, - { - "epoch": 0.5541994829555702, - "flos": 16979668917360.0, - "grad_norm": 11.445328498282334, - "learning_rate": 1.746737042071228e-06, - "loss": 0.9339, - "num_input_tokens_seen": 98206035, - "step": 4609 - }, - { - "epoch": 0.5543197258462094, - "flos": 14274655888440.0, - "grad_norm": 4.28132172684555, - "learning_rate": 1.7459643605062424e-06, - "loss": 1.0237, - "num_input_tokens_seen": 98223015, - "step": 4610 - }, - { - "epoch": 0.5544399687368484, - "flos": 14849958150240.0, - "grad_norm": 3.7797040064539194, - "learning_rate": 1.745191717478262e-06, - "loss": 1.0339, - "num_input_tokens_seen": 98241315, - "step": 4611 - }, - { - "epoch": 0.5545602116274875, - "flos": 18158058362400.0, - "grad_norm": 4.01654992046032, - "learning_rate": 1.7444191131044948e-06, - "loss": 1.0292, - "num_input_tokens_seen": 98261310, - "step": 4612 - }, - { - "epoch": 0.5546804545181266, - "flos": 14907551656080.0, - "grad_norm": 3.024260337502256, - "learning_rate": 1.7436465475021456e-06, - "loss": 0.9508, - "num_input_tokens_seen": 98281080, - "step": 4613 - }, - { - "epoch": 0.5548006974087657, - "flos": 19103308695600.0, - "grad_norm": 2.939548995270067, - "learning_rate": 1.7428740207884111e-06, - "loss": 0.9332, - "num_input_tokens_seen": 98301680, - "step": 4614 - }, - { - "epoch": 0.5549209402994048, - "flos": 24006700368720.0, - "grad_norm": 6.809797678447335, - "learning_rate": 1.7421015330804833e-06, - "loss": 0.8493, - "num_input_tokens_seen": 98321320, - "step": 4615 - }, - { - "epoch": 0.5550411831900439, - "flos": 16901281325760.0, - "grad_norm": 3.228100574028021, - "learning_rate": 1.7413290844955475e-06, - "loss": 0.9509, - "num_input_tokens_seen": 98341070, - "step": 4616 - }, - { - "epoch": 0.555161426080683, - "flos": 15378582421560.0, - "grad_norm": 4.214570864804967, - "learning_rate": 1.7405566751507843e-06, - "loss": 1.0117, - "num_input_tokens_seen": 98358835, - "step": 4617 - }, - { - "epoch": 0.555281668971322, - "flos": 35438399790960.0, - "grad_norm": 5.892738995048867, - "learning_rate": 1.7397843051633668e-06, - "loss": 0.8982, - "num_input_tokens_seen": 98381250, - "step": 4618 - }, - { - "epoch": 0.5554019118619612, - "flos": 14725229845320.0, - "grad_norm": 3.6787196220779492, - "learning_rate": 1.739011974650464e-06, - "loss": 0.9419, - "num_input_tokens_seen": 98400300, - "step": 4619 - }, - { - "epoch": 0.5555221547526003, - "flos": 18132726365760.0, - "grad_norm": 8.761575508052236, - "learning_rate": 1.7382396837292365e-06, - "loss": 0.9813, - "num_input_tokens_seen": 98420480, - "step": 4620 - }, - { - "epoch": 0.5556423976432393, - "flos": 15458840368320.0, - "grad_norm": 5.436273721558546, - "learning_rate": 1.737467432516841e-06, - "loss": 0.9603, - "num_input_tokens_seen": 98440300, - "step": 4621 - }, - { - "epoch": 0.5557626405338785, - "flos": 17713800686880.0, - "grad_norm": 5.9910185667210945, - "learning_rate": 1.7366952211304274e-06, - "loss": 0.9495, - "num_input_tokens_seen": 98457865, - "step": 4622 - }, - { - "epoch": 0.5558828834245175, - "flos": 13256046459480.0, - "grad_norm": 7.83837365934638, - "learning_rate": 1.735923049687139e-06, - "loss": 1.0587, - "num_input_tokens_seen": 98474160, - "step": 4623 - }, - { - "epoch": 0.5560031263151566, - "flos": 19418576109360.0, - "grad_norm": 5.687247469818681, - "learning_rate": 1.7351509183041144e-06, - "loss": 0.9703, - "num_input_tokens_seen": 98494210, - "step": 4624 - }, - { - "epoch": 0.5561233692057957, - "flos": 16638272306400.0, - "grad_norm": 3.0623992591948697, - "learning_rate": 1.7343788270984852e-06, - "loss": 0.9559, - "num_input_tokens_seen": 98513070, - "step": 4625 - }, - { - "epoch": 0.5562436120964348, - "flos": 26891950222320.0, - "grad_norm": 2.5687191555278255, - "learning_rate": 1.7336067761873764e-06, - "loss": 0.9728, - "num_input_tokens_seen": 98535215, - "step": 4626 - }, - { - "epoch": 0.5563638549870739, - "flos": 17897073006000.0, - "grad_norm": 4.097569205611467, - "learning_rate": 1.7328347656879076e-06, - "loss": 0.9838, - "num_input_tokens_seen": 98554795, - "step": 4627 - }, - { - "epoch": 0.556484097877713, - "flos": 9578458099320.0, - "grad_norm": 5.269881070729565, - "learning_rate": 1.7320627957171927e-06, - "loss": 0.9153, - "num_input_tokens_seen": 98569175, - "step": 4628 - }, - { - "epoch": 0.5566043407683521, - "flos": 17556565580280.0, - "grad_norm": 2.7381886073397563, - "learning_rate": 1.7312908663923382e-06, - "loss": 1.0284, - "num_input_tokens_seen": 98585070, - "step": 4629 - }, - { - "epoch": 0.5567245836589911, - "flos": 14613936851760.0, - "grad_norm": 4.2943882630229036, - "learning_rate": 1.7305189778304463e-06, - "loss": 0.8951, - "num_input_tokens_seen": 98602965, - "step": 4630 - }, - { - "epoch": 0.5568448265496303, - "flos": 14698211462880.0, - "grad_norm": 6.93010475596949, - "learning_rate": 1.729747130148611e-06, - "loss": 1.0275, - "num_input_tokens_seen": 98621880, - "step": 4631 - }, - { - "epoch": 0.5569650694402694, - "flos": 18003429488400.0, - "grad_norm": 9.2617537595553, - "learning_rate": 1.7289753234639208e-06, - "loss": 0.9958, - "num_input_tokens_seen": 98640575, - "step": 4632 - }, - { - "epoch": 0.5570853123309084, - "flos": 13984720471320.0, - "grad_norm": 4.76261691853541, - "learning_rate": 1.7282035578934592e-06, - "loss": 0.9877, - "num_input_tokens_seen": 98658460, - "step": 4633 - }, - { - "epoch": 0.5572055552215476, - "flos": 11395231731240.0, - "grad_norm": 4.0110970584650225, - "learning_rate": 1.727431833554301e-06, - "loss": 1.0099, - "num_input_tokens_seen": 98676655, - "step": 4634 - }, - { - "epoch": 0.5573257981121866, - "flos": 12128688946440.0, - "grad_norm": 4.916306412424344, - "learning_rate": 1.7266601505635175e-06, - "loss": 0.9967, - "num_input_tokens_seen": 98693715, - "step": 4635 - }, - { - "epoch": 0.5574460410028257, - "flos": 13334771328240.0, - "grad_norm": 3.3946998124143835, - "learning_rate": 1.7258885090381717e-06, - "loss": 0.9812, - "num_input_tokens_seen": 98711475, - "step": 4636 - }, - { - "epoch": 0.5575662838934649, - "flos": 21122063746320.0, - "grad_norm": 5.1233059944477395, - "learning_rate": 1.7251169090953213e-06, - "loss": 1.0155, - "num_input_tokens_seen": 98731670, - "step": 4637 - }, - { - "epoch": 0.5576865267841039, - "flos": 15668671146480.0, - "grad_norm": 3.5372533489817792, - "learning_rate": 1.7243453508520168e-06, - "loss": 0.983, - "num_input_tokens_seen": 98748375, - "step": 4638 - }, - { - "epoch": 0.557806769674743, - "flos": 12176225460600.0, - "grad_norm": 3.8199736087945957, - "learning_rate": 1.7235738344253038e-06, - "loss": 1.0767, - "num_input_tokens_seen": 98761725, - "step": 4639 - }, - { - "epoch": 0.557927012565382, - "flos": 17578402159080.0, - "grad_norm": 5.481614333672609, - "learning_rate": 1.72280235993222e-06, - "loss": 1.0434, - "num_input_tokens_seen": 98779750, - "step": 4640 - }, - { - "epoch": 0.5580472554560212, - "flos": 12023988188280.0, - "grad_norm": 6.530726361615738, - "learning_rate": 1.722030927489798e-06, - "loss": 0.916, - "num_input_tokens_seen": 98796750, - "step": 4641 - }, - { - "epoch": 0.5581674983466602, - "flos": 16713440434200.0, - "grad_norm": 3.0495870404447443, - "learning_rate": 1.7212595372150634e-06, - "loss": 0.9692, - "num_input_tokens_seen": 98816450, - "step": 4642 - }, - { - "epoch": 0.5582877412372993, - "flos": 9506570758440.0, - "grad_norm": 6.836256130966978, - "learning_rate": 1.720488189225035e-06, - "loss": 0.9628, - "num_input_tokens_seen": 98833870, - "step": 4643 - }, - { - "epoch": 0.5584079841279385, - "flos": 15559585785240.0, - "grad_norm": 4.9868175297796276, - "learning_rate": 1.7197168836367265e-06, - "loss": 1.015, - "num_input_tokens_seen": 98850400, - "step": 4644 - }, - { - "epoch": 0.5585282270185775, - "flos": 13361023171680.0, - "grad_norm": 3.7063787788546048, - "learning_rate": 1.7189456205671433e-06, - "loss": 1.046, - "num_input_tokens_seen": 98868965, - "step": 4645 - }, - { - "epoch": 0.5586484699092166, - "flos": 15534499081080.0, - "grad_norm": 9.449457336668573, - "learning_rate": 1.7181744001332866e-06, - "loss": 1.0502, - "num_input_tokens_seen": 98887295, - "step": 4646 - }, - { - "epoch": 0.5587687127998557, - "flos": 16271421052560.0, - "grad_norm": 2.928549095228151, - "learning_rate": 1.7174032224521493e-06, - "loss": 0.8712, - "num_input_tokens_seen": 98905725, - "step": 4647 - }, - { - "epoch": 0.5588889556904948, - "flos": 14409962431560.0, - "grad_norm": 4.162092419164195, - "learning_rate": 1.7166320876407184e-06, - "loss": 0.9265, - "num_input_tokens_seen": 98924865, - "step": 4648 - }, - { - "epoch": 0.5590091985811338, - "flos": 11656768995720.0, - "grad_norm": 5.734902173166602, - "learning_rate": 1.7158609958159742e-06, - "loss": 0.9053, - "num_input_tokens_seen": 98941990, - "step": 4649 - }, - { - "epoch": 0.559129441471773, - "flos": 10261711244640.0, - "grad_norm": 3.8149854969233945, - "learning_rate": 1.7150899470948911e-06, - "loss": 1.0097, - "num_input_tokens_seen": 98956975, - "step": 4650 - }, - { - "epoch": 0.5592496843624121, - "flos": 43320202233120.0, - "grad_norm": 0.832924698935157, - "learning_rate": 1.7143189415944365e-06, - "loss": 0.8319, - "num_input_tokens_seen": 99021155, - "step": 4651 - }, - { - "epoch": 0.5593699272530511, - "flos": 14829010756680.0, - "grad_norm": 2.980732077468783, - "learning_rate": 1.7135479794315714e-06, - "loss": 0.9876, - "num_input_tokens_seen": 99037830, - "step": 4652 - }, - { - "epoch": 0.5594901701436903, - "flos": 9087215817720.0, - "grad_norm": 2.909339165390048, - "learning_rate": 1.7127770607232502e-06, - "loss": 1.0131, - "num_input_tokens_seen": 99056095, - "step": 4653 - }, - { - "epoch": 0.5596104130343293, - "flos": 16428717482280.0, - "grad_norm": 3.957523838725679, - "learning_rate": 1.7120061855864204e-06, - "loss": 1.0324, - "num_input_tokens_seen": 99075825, - "step": 4654 - }, - { - "epoch": 0.5597306559249684, - "flos": 18473847022680.0, - "grad_norm": 3.5780519702904834, - "learning_rate": 1.7112353541380233e-06, - "loss": 0.935, - "num_input_tokens_seen": 99095405, - "step": 4655 - }, - { - "epoch": 0.5598508988156076, - "flos": 15983325329040.0, - "grad_norm": 2.8477995827267946, - "learning_rate": 1.7104645664949931e-06, - "loss": 0.9543, - "num_input_tokens_seen": 99117595, - "step": 4656 - }, - { - "epoch": 0.5599711417062466, - "flos": 16429085421000.0, - "grad_norm": 3.789396983061983, - "learning_rate": 1.7096938227742584e-06, - "loss": 0.952, - "num_input_tokens_seen": 99138445, - "step": 4657 - }, - { - "epoch": 0.5600913845968857, - "flos": 15872461597320.0, - "grad_norm": 3.8845341202544352, - "learning_rate": 1.70892312309274e-06, - "loss": 1.0702, - "num_input_tokens_seen": 99156055, - "step": 4658 - }, - { - "epoch": 0.5602116274875248, - "flos": 12490818320040.0, - "grad_norm": 4.133038173924739, - "learning_rate": 1.7081524675673523e-06, - "loss": 0.9032, - "num_input_tokens_seen": 99171265, - "step": 4659 - }, - { - "epoch": 0.5603318703781639, - "flos": 50215790497920.0, - "grad_norm": 0.8359609939268441, - "learning_rate": 1.7073818563150026e-06, - "loss": 0.8743, - "num_input_tokens_seen": 99233065, - "step": 4660 - }, - { - "epoch": 0.560452113268803, - "flos": 13146286543920.0, - "grad_norm": 4.388074032001642, - "learning_rate": 1.7066112894525935e-06, - "loss": 1.0859, - "num_input_tokens_seen": 99250865, - "step": 4661 - }, - { - "epoch": 0.5605723561594421, - "flos": 17975705890080.0, - "grad_norm": 2.574168828500433, - "learning_rate": 1.7058407670970177e-06, - "loss": 0.9555, - "num_input_tokens_seen": 99272060, - "step": 4662 - }, - { - "epoch": 0.5606925990500812, - "flos": 14619731886600.0, - "grad_norm": 5.969860992091238, - "learning_rate": 1.7050702893651643e-06, - "loss": 0.8324, - "num_input_tokens_seen": 99291360, - "step": 4663 - }, - { - "epoch": 0.5608128419407202, - "flos": 24976608144240.0, - "grad_norm": 3.481651439155622, - "learning_rate": 1.7042998563739134e-06, - "loss": 0.9821, - "num_input_tokens_seen": 99309430, - "step": 4664 - }, - { - "epoch": 0.5609330848313594, - "flos": 17526021118440.0, - "grad_norm": 3.759624972003823, - "learning_rate": 1.703529468240139e-06, - "loss": 0.9345, - "num_input_tokens_seen": 99328020, - "step": 4665 - }, - { - "epoch": 0.5610533277219985, - "flos": 13302019234080.0, - "grad_norm": 5.528378188317656, - "learning_rate": 1.7027591250807088e-06, - "loss": 0.952, - "num_input_tokens_seen": 99344915, - "step": 4666 - }, - { - "epoch": 0.5611735706126375, - "flos": 10607615104920.0, - "grad_norm": 4.255936459547426, - "learning_rate": 1.7019888270124825e-06, - "loss": 1.0454, - "num_input_tokens_seen": 99361800, - "step": 4667 - }, - { - "epoch": 0.5612938135032767, - "flos": 11653365562560.0, - "grad_norm": 3.29753113495514, - "learning_rate": 1.7012185741523147e-06, - "loss": 1.0485, - "num_input_tokens_seen": 99377845, - "step": 4668 - }, - { - "epoch": 0.5614140563939157, - "flos": 18264506829480.0, - "grad_norm": 3.695467213785237, - "learning_rate": 1.7004483666170514e-06, - "loss": 0.8361, - "num_input_tokens_seen": 99398060, - "step": 4669 - }, - { - "epoch": 0.5615342992845548, - "flos": 17582541469680.0, - "grad_norm": 5.279247197613571, - "learning_rate": 1.699678204523533e-06, - "loss": 1.0148, - "num_input_tokens_seen": 99417645, - "step": 4670 - }, - { - "epoch": 0.5616545421751938, - "flos": 15640702255680.0, - "grad_norm": 4.11500981048119, - "learning_rate": 1.6989080879885918e-06, - "loss": 0.8963, - "num_input_tokens_seen": 99435225, - "step": 4671 - }, - { - "epoch": 0.561774785065833, - "flos": 38459993132640.0, - "grad_norm": 0.8941851950186662, - "learning_rate": 1.6981380171290544e-06, - "loss": 0.8689, - "num_input_tokens_seen": 99495970, - "step": 4672 - }, - { - "epoch": 0.5618950279564721, - "flos": 14011708192200.0, - "grad_norm": 6.058284591765578, - "learning_rate": 1.6973679920617396e-06, - "loss": 0.9659, - "num_input_tokens_seen": 99513225, - "step": 4673 - }, - { - "epoch": 0.5620152708471111, - "flos": 11892820955760.0, - "grad_norm": 10.691277801110015, - "learning_rate": 1.6965980129034603e-06, - "loss": 1.08, - "num_input_tokens_seen": 99530330, - "step": 4674 - }, - { - "epoch": 0.5621355137377503, - "flos": 19077639421800.0, - "grad_norm": 3.6392969156700135, - "learning_rate": 1.6958280797710209e-06, - "loss": 0.995, - "num_input_tokens_seen": 99551975, - "step": 4675 - }, - { - "epoch": 0.5622557566283893, - "flos": 43812272376840.0, - "grad_norm": 0.8164956805879393, - "learning_rate": 1.6950581927812198e-06, - "loss": 0.7871, - "num_input_tokens_seen": 99611265, - "step": 4676 - }, - { - "epoch": 0.5623759995190284, - "flos": 18840882245880.0, - "grad_norm": 9.802708885887862, - "learning_rate": 1.6942883520508486e-06, - "loss": 1.0043, - "num_input_tokens_seen": 99629720, - "step": 4677 - }, - { - "epoch": 0.5624962424096676, - "flos": 13754402223000.0, - "grad_norm": 3.3095723295951363, - "learning_rate": 1.693518557696691e-06, - "loss": 1.0058, - "num_input_tokens_seen": 99648580, - "step": 4678 - }, - { - "epoch": 0.5626164853003066, - "flos": 14671683665400.0, - "grad_norm": 10.294715295961435, - "learning_rate": 1.6927488098355252e-06, - "loss": 1.1147, - "num_input_tokens_seen": 99665930, - "step": 4679 - }, - { - "epoch": 0.5627367281909457, - "flos": 44931381930240.0, - "grad_norm": 0.9141665668423742, - "learning_rate": 1.6919791085841201e-06, - "loss": 0.9045, - "num_input_tokens_seen": 99723060, - "step": 4680 - }, - { - "epoch": 0.5628569710815848, - "flos": 9008429625840.0, - "grad_norm": 9.148361609687363, - "learning_rate": 1.6912094540592396e-06, - "loss": 1.014, - "num_input_tokens_seen": 99738300, - "step": 4681 - }, - { - "epoch": 0.5629772139722239, - "flos": 9707724315120.0, - "grad_norm": 6.384145123660719, - "learning_rate": 1.6904398463776393e-06, - "loss": 1.0242, - "num_input_tokens_seen": 99751820, - "step": 4682 - }, - { - "epoch": 0.5630974568628629, - "flos": 15246679311600.0, - "grad_norm": 6.047028295658628, - "learning_rate": 1.6896702856560683e-06, - "loss": 0.9595, - "num_input_tokens_seen": 99770635, - "step": 4683 - }, - { - "epoch": 0.5632176997535021, - "flos": 10056724992960.0, - "grad_norm": 5.478286188934555, - "learning_rate": 1.6889007720112677e-06, - "loss": 0.9002, - "num_input_tokens_seen": 99788100, - "step": 4684 - }, - { - "epoch": 0.5633379426441412, - "flos": 14775679207680.0, - "grad_norm": 5.318211077451802, - "learning_rate": 1.6881313055599734e-06, - "loss": 1.0066, - "num_input_tokens_seen": 99807750, - "step": 4685 - }, - { - "epoch": 0.5634581855347802, - "flos": 16061252997240.0, - "grad_norm": 14.106647823539893, - "learning_rate": 1.6873618864189117e-06, - "loss": 1.0463, - "num_input_tokens_seen": 99823240, - "step": 4686 - }, - { - "epoch": 0.5635784284254194, - "flos": 15275169448920.0, - "grad_norm": 3.833588481096402, - "learning_rate": 1.686592514704803e-06, - "loss": 1.005, - "num_input_tokens_seen": 99840355, - "step": 4687 - }, - { - "epoch": 0.5636986713160584, - "flos": 14067983250960.0, - "grad_norm": 14.737275923173257, - "learning_rate": 1.685823190534361e-06, - "loss": 0.9316, - "num_input_tokens_seen": 99858315, - "step": 4688 - }, - { - "epoch": 0.5638189142066975, - "flos": 14043019193040.0, - "grad_norm": 2.713669689084786, - "learning_rate": 1.6850539140242907e-06, - "loss": 1.0495, - "num_input_tokens_seen": 99877295, - "step": 4689 - }, - { - "epoch": 0.5639391570973367, - "flos": 16271880975960.0, - "grad_norm": 3.2393874981105673, - "learning_rate": 1.684284685291292e-06, - "loss": 1.0452, - "num_input_tokens_seen": 99898660, - "step": 4690 - }, - { - "epoch": 0.5640593999879757, - "flos": 16869970324920.0, - "grad_norm": 6.0029705959084785, - "learning_rate": 1.683515504452055e-06, - "loss": 1.0244, - "num_input_tokens_seen": 99915755, - "step": 4691 - }, - { - "epoch": 0.5641796428786148, - "flos": 16139732573520.0, - "grad_norm": 4.241886472471524, - "learning_rate": 1.6827463716232648e-06, - "loss": 0.8896, - "num_input_tokens_seen": 99936135, - "step": 4692 - }, - { - "epoch": 0.5642998857692539, - "flos": 14042620592760.0, - "grad_norm": 6.754610818061008, - "learning_rate": 1.6819772869215972e-06, - "loss": 0.9819, - "num_input_tokens_seen": 99954935, - "step": 4693 - }, - { - "epoch": 0.564420128659893, - "flos": 16481865061920.0, - "grad_norm": 4.143256736614091, - "learning_rate": 1.6812082504637228e-06, - "loss": 1.0433, - "num_input_tokens_seen": 99975975, - "step": 4694 - }, - { - "epoch": 0.564540371550532, - "flos": 16533786179160.0, - "grad_norm": 6.26576076750505, - "learning_rate": 1.6804392623663025e-06, - "loss": 0.9772, - "num_input_tokens_seen": 99996900, - "step": 4695 - }, - { - "epoch": 0.5646606144411712, - "flos": 17792832171240.0, - "grad_norm": 4.665261579336971, - "learning_rate": 1.6796703227459935e-06, - "loss": 1.0042, - "num_input_tokens_seen": 100014575, - "step": 4696 - }, - { - "epoch": 0.5647808573318103, - "flos": 26078940276240.0, - "grad_norm": 5.376895304243237, - "learning_rate": 1.6789014317194407e-06, - "loss": 0.9857, - "num_input_tokens_seen": 100035775, - "step": 4697 - }, - { - "epoch": 0.5649011002224493, - "flos": 16009270556880.0, - "grad_norm": 7.287766058172583, - "learning_rate": 1.6781325894032853e-06, - "loss": 0.9434, - "num_input_tokens_seen": 100054455, - "step": 4698 - }, - { - "epoch": 0.5650213431130885, - "flos": 13125001873200.0, - "grad_norm": 4.108851178220105, - "learning_rate": 1.6773637959141608e-06, - "loss": 1.1482, - "num_input_tokens_seen": 100071150, - "step": 4699 - }, - { - "epoch": 0.5651415860037275, - "flos": 12413841160200.0, - "grad_norm": 3.123096496684036, - "learning_rate": 1.6765950513686915e-06, - "loss": 0.8664, - "num_input_tokens_seen": 100088980, - "step": 4700 - }, - { - "epoch": 0.5652618288943666, - "flos": 18159223501680.0, - "grad_norm": 8.811759755312872, - "learning_rate": 1.675826355883496e-06, - "loss": 0.9871, - "num_input_tokens_seen": 100107915, - "step": 4701 - }, - { - "epoch": 0.5653820717850057, - "flos": 13964447632080.0, - "grad_norm": 2.473052320051066, - "learning_rate": 1.6750577095751848e-06, - "loss": 1.0257, - "num_input_tokens_seen": 100126745, - "step": 4702 - }, - { - "epoch": 0.5655023146756448, - "flos": 19208316069360.0, - "grad_norm": 3.0984439882488486, - "learning_rate": 1.6742891125603605e-06, - "loss": 0.9646, - "num_input_tokens_seen": 100147370, - "step": 4703 - }, - { - "epoch": 0.5656225575662839, - "flos": 19703452369080.0, - "grad_norm": 3.486169053837756, - "learning_rate": 1.6735205649556185e-06, - "loss": 0.9427, - "num_input_tokens_seen": 100166960, - "step": 4704 - }, - { - "epoch": 0.5657428004569229, - "flos": 17317784741400.0, - "grad_norm": 2.751767197576089, - "learning_rate": 1.6727520668775476e-06, - "loss": 1.0723, - "num_input_tokens_seen": 100186965, - "step": 4705 - }, - { - "epoch": 0.5658630433475621, - "flos": 15589179738720.0, - "grad_norm": 3.1894324933311573, - "learning_rate": 1.6719836184427275e-06, - "loss": 0.9806, - "num_input_tokens_seen": 100206990, - "step": 4706 - }, - { - "epoch": 0.5659832862382012, - "flos": 21672831212040.0, - "grad_norm": 4.346775017033439, - "learning_rate": 1.671215219767733e-06, - "loss": 0.8721, - "num_input_tokens_seen": 100226170, - "step": 4707 - }, - { - "epoch": 0.5661035291288402, - "flos": 9296617334040.0, - "grad_norm": 4.665459862844339, - "learning_rate": 1.670446870969127e-06, - "loss": 0.9871, - "num_input_tokens_seen": 100243555, - "step": 4708 - }, - { - "epoch": 0.5662237720194794, - "flos": 11420134466040.0, - "grad_norm": 15.587904207056168, - "learning_rate": 1.6696785721634685e-06, - "loss": 1.0446, - "num_input_tokens_seen": 100257760, - "step": 4709 - }, - { - "epoch": 0.5663440149101184, - "flos": 12522650567400.0, - "grad_norm": 3.0725913872408706, - "learning_rate": 1.6689103234673086e-06, - "loss": 0.9551, - "num_input_tokens_seen": 100275800, - "step": 4710 - }, - { - "epoch": 0.5664642578007575, - "flos": 16612970971320.0, - "grad_norm": 3.5277294062170883, - "learning_rate": 1.668142124997189e-06, - "loss": 0.9786, - "num_input_tokens_seen": 100295180, - "step": 4711 - }, - { - "epoch": 0.5665845006913967, - "flos": 46909928579640.0, - "grad_norm": 0.7489386439664467, - "learning_rate": 1.6673739768696453e-06, - "loss": 0.855, - "num_input_tokens_seen": 100361470, - "step": 4712 - }, - { - "epoch": 0.5667047435820357, - "flos": 18605504840160.0, - "grad_norm": 5.396619565183675, - "learning_rate": 1.6666058792012052e-06, - "loss": 0.9955, - "num_input_tokens_seen": 100382075, - "step": 4713 - }, - { - "epoch": 0.5668249864726748, - "flos": 51474161935680.0, - "grad_norm": 0.8400603259086644, - "learning_rate": 1.6658378321083878e-06, - "loss": 0.928, - "num_input_tokens_seen": 100446125, - "step": 4714 - }, - { - "epoch": 0.5669452293633139, - "flos": 15769508548080.0, - "grad_norm": 3.169378641886604, - "learning_rate": 1.6650698357077055e-06, - "loss": 1.054, - "num_input_tokens_seen": 100462890, - "step": 4715 - }, - { - "epoch": 0.567065472253953, - "flos": 12915171095040.0, - "grad_norm": 7.600367547232656, - "learning_rate": 1.6643018901156632e-06, - "loss": 1.0315, - "num_input_tokens_seen": 100481705, - "step": 4716 - }, - { - "epoch": 0.567185715144592, - "flos": 14458572100320.0, - "grad_norm": 12.833366559138367, - "learning_rate": 1.6635339954487566e-06, - "loss": 1.0048, - "num_input_tokens_seen": 100497300, - "step": 4717 - }, - { - "epoch": 0.5673059580352312, - "flos": 16507197058560.0, - "grad_norm": 3.58855135196141, - "learning_rate": 1.6627661518234765e-06, - "loss": 1.0486, - "num_input_tokens_seen": 100516275, - "step": 4718 - }, - { - "epoch": 0.5674262009258703, - "flos": 15427130767200.0, - "grad_norm": 6.4514876313506715, - "learning_rate": 1.661998359356302e-06, - "loss": 1.0752, - "num_input_tokens_seen": 100535025, - "step": 4719 - }, - { - "epoch": 0.5675464438165093, - "flos": 48311952052920.0, - "grad_norm": 0.8229952947660526, - "learning_rate": 1.6612306181637077e-06, - "loss": 0.8118, - "num_input_tokens_seen": 100594070, - "step": 4720 - }, - { - "epoch": 0.5676666867071485, - "flos": 13386508476120.0, - "grad_norm": 3.799519940219165, - "learning_rate": 1.6604629283621598e-06, - "loss": 0.8793, - "num_input_tokens_seen": 100611720, - "step": 4721 - }, - { - "epoch": 0.5677869295977875, - "flos": 23927883515280.0, - "grad_norm": 2.8298985099791523, - "learning_rate": 1.6596952900681152e-06, - "loss": 0.9659, - "num_input_tokens_seen": 100632200, - "step": 4722 - }, - { - "epoch": 0.5679071724884266, - "flos": 19968270420480.0, - "grad_norm": 5.026159329264186, - "learning_rate": 1.658927703398025e-06, - "loss": 1.0511, - "num_input_tokens_seen": 100651985, - "step": 4723 - }, - { - "epoch": 0.5680274153790658, - "flos": 16743524972640.0, - "grad_norm": 4.073117276427746, - "learning_rate": 1.6581601684683309e-06, - "loss": 0.9853, - "num_input_tokens_seen": 100672130, - "step": 4724 - }, - { - "epoch": 0.5681476582697048, - "flos": 15956552239080.0, - "grad_norm": 7.229806598698878, - "learning_rate": 1.6573926853954674e-06, - "loss": 0.9128, - "num_input_tokens_seen": 100689435, - "step": 4725 - }, - { - "epoch": 0.5682679011603439, - "flos": 13859348273640.0, - "grad_norm": 2.891315109766634, - "learning_rate": 1.6566252542958608e-06, - "loss": 1.0518, - "num_input_tokens_seen": 100708655, - "step": 4726 - }, - { - "epoch": 0.568388144050983, - "flos": 20491436934120.0, - "grad_norm": 7.403817207820988, - "learning_rate": 1.6558578752859305e-06, - "loss": 0.9982, - "num_input_tokens_seen": 100727335, - "step": 4727 - }, - { - "epoch": 0.5685083869416221, - "flos": 15060586128960.0, - "grad_norm": 5.608788079714238, - "learning_rate": 1.6550905484820865e-06, - "loss": 1.012, - "num_input_tokens_seen": 100745515, - "step": 4728 - }, - { - "epoch": 0.5686286298322611, - "flos": 17661205015320.0, - "grad_norm": 6.576020182669081, - "learning_rate": 1.6543232740007328e-06, - "loss": 1.0198, - "num_input_tokens_seen": 100762350, - "step": 4729 - }, - { - "epoch": 0.5687488727229003, - "flos": 18946318881480.0, - "grad_norm": 12.082846445381, - "learning_rate": 1.653556051958263e-06, - "loss": 0.8721, - "num_input_tokens_seen": 100781750, - "step": 4730 - }, - { - "epoch": 0.5688691156135394, - "flos": 14772674374800.0, - "grad_norm": 4.036468676482044, - "learning_rate": 1.6527888824710642e-06, - "loss": 0.9621, - "num_input_tokens_seen": 100801070, - "step": 4731 - }, - { - "epoch": 0.5689893585041784, - "flos": 18418093210440.0, - "grad_norm": 5.461279949753098, - "learning_rate": 1.6520217656555166e-06, - "loss": 0.976, - "num_input_tokens_seen": 100820080, - "step": 4732 - }, - { - "epoch": 0.5691096013948175, - "flos": 16691358562920.0, - "grad_norm": 2.2609745046333023, - "learning_rate": 1.65125470162799e-06, - "loss": 0.9299, - "num_input_tokens_seen": 100840155, - "step": 4733 - }, - { - "epoch": 0.5692298442854566, - "flos": 12808875935760.0, - "grad_norm": 3.3575837793641456, - "learning_rate": 1.6504876905048485e-06, - "loss": 0.927, - "num_input_tokens_seen": 100856835, - "step": 4734 - }, - { - "epoch": 0.5693500871760957, - "flos": 16376428426320.0, - "grad_norm": 3.7268299190770082, - "learning_rate": 1.6497207324024464e-06, - "loss": 0.9563, - "num_input_tokens_seen": 100875455, - "step": 4735 - }, - { - "epoch": 0.5694703300667348, - "flos": 13466061207000.0, - "grad_norm": 2.7112894406930033, - "learning_rate": 1.6489538274371305e-06, - "loss": 1.0465, - "num_input_tokens_seen": 100893780, - "step": 4736 - }, - { - "epoch": 0.5695905729573739, - "flos": 15563939726760.0, - "grad_norm": 3.9267054746185655, - "learning_rate": 1.6481869757252396e-06, - "loss": 1.0566, - "num_input_tokens_seen": 100911835, - "step": 4737 - }, - { - "epoch": 0.569710815848013, - "flos": 20284273711680.0, - "grad_norm": 2.817558571342927, - "learning_rate": 1.647420177383105e-06, - "loss": 0.9439, - "num_input_tokens_seen": 100934425, - "step": 4738 - }, - { - "epoch": 0.569831058738652, - "flos": 20204720980800.0, - "grad_norm": 4.076777560788836, - "learning_rate": 1.646653432527049e-06, - "loss": 0.96, - "num_input_tokens_seen": 100954785, - "step": 4739 - }, - { - "epoch": 0.5699513016292912, - "flos": 18396563247240.0, - "grad_norm": 2.455282441030843, - "learning_rate": 1.645886741273387e-06, - "loss": 0.9741, - "num_input_tokens_seen": 100976320, - "step": 4740 - }, - { - "epoch": 0.5700715445199303, - "flos": 12781581599280.0, - "grad_norm": 4.957432170794407, - "learning_rate": 1.645120103738424e-06, - "loss": 0.9618, - "num_input_tokens_seen": 100993550, - "step": 4741 - }, - { - "epoch": 0.5701917874105693, - "flos": 8064191124120.0, - "grad_norm": 4.542565889610209, - "learning_rate": 1.6443535200384591e-06, - "loss": 1.0613, - "num_input_tokens_seen": 101011445, - "step": 4742 - }, - { - "epoch": 0.5703120303012085, - "flos": 15458196475560.0, - "grad_norm": 3.9953352431096425, - "learning_rate": 1.6435869902897827e-06, - "loss": 0.9323, - "num_input_tokens_seen": 101029745, - "step": 4743 - }, - { - "epoch": 0.5704322731918475, - "flos": 40605101550960.0, - "grad_norm": 0.8692961821489478, - "learning_rate": 1.6428205146086764e-06, - "loss": 0.8663, - "num_input_tokens_seen": 101091445, - "step": 4744 - }, - { - "epoch": 0.5705525160824866, - "flos": 14724831245040.0, - "grad_norm": 4.072513622169825, - "learning_rate": 1.6420540931114142e-06, - "loss": 0.938, - "num_input_tokens_seen": 101111755, - "step": 4745 - }, - { - "epoch": 0.5706727589731257, - "flos": 13309163377560.0, - "grad_norm": 2.398721684323038, - "learning_rate": 1.6412877259142616e-06, - "loss": 1.0107, - "num_input_tokens_seen": 101131395, - "step": 4746 - }, - { - "epoch": 0.5707930018637648, - "flos": 19678825588320.0, - "grad_norm": 5.412020916252008, - "learning_rate": 1.6405214131334757e-06, - "loss": 0.9636, - "num_input_tokens_seen": 101149640, - "step": 4747 - }, - { - "epoch": 0.5709132447544039, - "flos": 19652113821480.0, - "grad_norm": 4.337294797413241, - "learning_rate": 1.6397551548853052e-06, - "loss": 1.0276, - "num_input_tokens_seen": 101167525, - "step": 4748 - }, - { - "epoch": 0.571033487645043, - "flos": 15403454494800.0, - "grad_norm": 3.214098485913031, - "learning_rate": 1.6389889512859917e-06, - "loss": 0.9326, - "num_input_tokens_seen": 101186905, - "step": 4749 - }, - { - "epoch": 0.5711537305356821, - "flos": 50261855257200.0, - "grad_norm": 0.9253752578741176, - "learning_rate": 1.638222802451767e-06, - "loss": 0.8881, - "num_input_tokens_seen": 101248105, - "step": 4750 - }, - { - "epoch": 0.5712739734263211, - "flos": 17582480146560.0, - "grad_norm": 4.446242411180422, - "learning_rate": 1.6374567084988561e-06, - "loss": 0.9753, - "num_input_tokens_seen": 101269010, - "step": 4751 - }, - { - "epoch": 0.5713942163169603, - "flos": 18919699099320.0, - "grad_norm": 3.540091055652667, - "learning_rate": 1.6366906695434738e-06, - "loss": 0.9915, - "num_input_tokens_seen": 101291250, - "step": 4752 - }, - { - "epoch": 0.5715144592075994, - "flos": 15013233584160.0, - "grad_norm": 4.116685058511481, - "learning_rate": 1.6359246857018275e-06, - "loss": 1.0817, - "num_input_tokens_seen": 101308500, - "step": 4753 - }, - { - "epoch": 0.5716347020982384, - "flos": 16585400680800.0, - "grad_norm": 3.269495955884025, - "learning_rate": 1.6351587570901178e-06, - "loss": 1.0127, - "num_input_tokens_seen": 101328345, - "step": 4754 - }, - { - "epoch": 0.5717549449888776, - "flos": 12041654794920.0, - "grad_norm": 25.580781923322707, - "learning_rate": 1.634392883824534e-06, - "loss": 0.983, - "num_input_tokens_seen": 101340065, - "step": 4755 - }, - { - "epoch": 0.5718751878795166, - "flos": 25344808506720.0, - "grad_norm": 3.0276072528971474, - "learning_rate": 1.6336270660212595e-06, - "loss": 0.8979, - "num_input_tokens_seen": 101361380, - "step": 4756 - }, - { - "epoch": 0.5719954307701557, - "flos": 27569009732520.0, - "grad_norm": 4.274888484459615, - "learning_rate": 1.6328613037964676e-06, - "loss": 0.874, - "num_input_tokens_seen": 101384165, - "step": 4757 - }, - { - "epoch": 0.5721156736607949, - "flos": 14645677114440.0, - "grad_norm": 5.7021319635241134, - "learning_rate": 1.6320955972663241e-06, - "loss": 0.9116, - "num_input_tokens_seen": 101403480, - "step": 4758 - }, - { - "epoch": 0.5722359165514339, - "flos": 26708064672000.0, - "grad_norm": 2.840328295900178, - "learning_rate": 1.6313299465469857e-06, - "loss": 0.8716, - "num_input_tokens_seen": 101425930, - "step": 4759 - }, - { - "epoch": 0.572356159442073, - "flos": 15608870008320.0, - "grad_norm": 8.289403507506067, - "learning_rate": 1.6305643517546014e-06, - "loss": 1.0094, - "num_input_tokens_seen": 101441030, - "step": 4760 - }, - { - "epoch": 0.5724764023327121, - "flos": 13570884611400.0, - "grad_norm": 3.8013393701937814, - "learning_rate": 1.629798813005311e-06, - "loss": 1.0748, - "num_input_tokens_seen": 101460470, - "step": 4761 - }, - { - "epoch": 0.5725966452233512, - "flos": 16218549426960.0, - "grad_norm": 3.5260796969836035, - "learning_rate": 1.6290333304152473e-06, - "loss": 0.9335, - "num_input_tokens_seen": 101480065, - "step": 4762 - }, - { - "epoch": 0.5727168881139902, - "flos": 29640728393520.0, - "grad_norm": 4.9505884071272, - "learning_rate": 1.6282679041005314e-06, - "loss": 0.8032, - "num_input_tokens_seen": 101505375, - "step": 4763 - }, - { - "epoch": 0.5728371310046293, - "flos": 10345525932360.0, - "grad_norm": 3.5892595183088263, - "learning_rate": 1.6275025341772789e-06, - "loss": 1.0943, - "num_input_tokens_seen": 101521400, - "step": 4764 - }, - { - "epoch": 0.5729573738952685, - "flos": 15274862833320.0, - "grad_norm": 4.912685377200177, - "learning_rate": 1.626737220761596e-06, - "loss": 1.0533, - "num_input_tokens_seen": 101538585, - "step": 4765 - }, - { - "epoch": 0.5730776167859075, - "flos": 16794771535560.0, - "grad_norm": 3.7804979569563675, - "learning_rate": 1.62597196396958e-06, - "loss": 1.0143, - "num_input_tokens_seen": 101556475, - "step": 4766 - }, - { - "epoch": 0.5731978596765466, - "flos": 18316795885440.0, - "grad_norm": 4.371487324495252, - "learning_rate": 1.6252067639173197e-06, - "loss": 1.0795, - "num_input_tokens_seen": 101578105, - "step": 4767 - }, - { - "epoch": 0.5733181025671857, - "flos": 18762586638960.0, - "grad_norm": 4.282835284022531, - "learning_rate": 1.6244416207208956e-06, - "loss": 0.9203, - "num_input_tokens_seen": 101598760, - "step": 4768 - }, - { - "epoch": 0.5734383454578248, - "flos": 20964828639720.0, - "grad_norm": 7.070987698681996, - "learning_rate": 1.6236765344963787e-06, - "loss": 0.9653, - "num_input_tokens_seen": 101619740, - "step": 4769 - }, - { - "epoch": 0.5735585883484638, - "flos": 24950724239520.0, - "grad_norm": 3.4028551175071544, - "learning_rate": 1.6229115053598322e-06, - "loss": 0.9199, - "num_input_tokens_seen": 101641215, - "step": 4770 - }, - { - "epoch": 0.573678831239103, - "flos": 13308734115720.0, - "grad_norm": 3.088905162981861, - "learning_rate": 1.6221465334273108e-06, - "loss": 0.9336, - "num_input_tokens_seen": 101660145, - "step": 4771 - }, - { - "epoch": 0.5737990741297421, - "flos": 18290697349800.0, - "grad_norm": 3.5159564730886954, - "learning_rate": 1.6213816188148593e-06, - "loss": 0.8404, - "num_input_tokens_seen": 101678570, - "step": 4772 - }, - { - "epoch": 0.5739193170203811, - "flos": 19416061861440.0, - "grad_norm": 4.268081125605625, - "learning_rate": 1.6206167616385162e-06, - "loss": 0.998, - "num_input_tokens_seen": 101699355, - "step": 4773 - }, - { - "epoch": 0.5740395599110203, - "flos": 8614989251400.0, - "grad_norm": 4.541221013500337, - "learning_rate": 1.6198519620143078e-06, - "loss": 0.968, - "num_input_tokens_seen": 101716230, - "step": 4774 - }, - { - "epoch": 0.5741598028016593, - "flos": 18447687163920.0, - "grad_norm": 2.7780515302763202, - "learning_rate": 1.6190872200582546e-06, - "loss": 1.0055, - "num_input_tokens_seen": 101737690, - "step": 4775 - }, - { - "epoch": 0.5742800456922984, - "flos": 13648812279600.0, - "grad_norm": 3.8418998917778144, - "learning_rate": 1.6183225358863676e-06, - "loss": 1.005, - "num_input_tokens_seen": 101754305, - "step": 4776 - }, - { - "epoch": 0.5744002885829376, - "flos": 22040510328000.0, - "grad_norm": 4.295152353553291, - "learning_rate": 1.617557909614648e-06, - "loss": 0.946, - "num_input_tokens_seen": 101773460, - "step": 4777 - }, - { - "epoch": 0.5745205314735766, - "flos": 16951608041880.0, - "grad_norm": 4.552906139509747, - "learning_rate": 1.6167933413590899e-06, - "loss": 1.0775, - "num_input_tokens_seen": 101792085, - "step": 4778 - }, - { - "epoch": 0.5746407743642157, - "flos": 8666757060840.0, - "grad_norm": 3.361376498825728, - "learning_rate": 1.6160288312356773e-06, - "loss": 1.1308, - "num_input_tokens_seen": 101808935, - "step": 4779 - }, - { - "epoch": 0.5747610172548548, - "flos": 17162879913360.0, - "grad_norm": 3.0227439538335052, - "learning_rate": 1.6152643793603857e-06, - "loss": 1.0539, - "num_input_tokens_seen": 101829005, - "step": 4780 - }, - { - "epoch": 0.5748812601454939, - "flos": 18078750924000.0, - "grad_norm": 3.2865927659161196, - "learning_rate": 1.6144999858491815e-06, - "loss": 1.1131, - "num_input_tokens_seen": 101847355, - "step": 4781 - }, - { - "epoch": 0.575001503036133, - "flos": 21827613393840.0, - "grad_norm": 4.626875419817644, - "learning_rate": 1.6137356508180232e-06, - "loss": 1.0802, - "num_input_tokens_seen": 101868785, - "step": 4782 - }, - { - "epoch": 0.5751217459267721, - "flos": 15433079109840.0, - "grad_norm": 18.124905710961645, - "learning_rate": 1.6129713743828593e-06, - "loss": 1.0422, - "num_input_tokens_seen": 101887515, - "step": 4783 - }, - { - "epoch": 0.5752419888174112, - "flos": 15378950360280.0, - "grad_norm": 4.136398058051633, - "learning_rate": 1.6122071566596306e-06, - "loss": 0.9856, - "num_input_tokens_seen": 101907510, - "step": 4784 - }, - { - "epoch": 0.5753622317080502, - "flos": 12592514245320.0, - "grad_norm": 4.207504543363188, - "learning_rate": 1.6114429977642674e-06, - "loss": 1.0592, - "num_input_tokens_seen": 101921735, - "step": 4785 - }, - { - "epoch": 0.5754824745986894, - "flos": 14040596929800.0, - "grad_norm": 2.675325095841386, - "learning_rate": 1.6106788978126926e-06, - "loss": 0.9532, - "num_input_tokens_seen": 101940430, - "step": 4786 - }, - { - "epoch": 0.5756027174893285, - "flos": 22086299133240.0, - "grad_norm": 4.5855423661224535, - "learning_rate": 1.6099148569208196e-06, - "loss": 1.0061, - "num_input_tokens_seen": 101957370, - "step": 4787 - }, - { - "epoch": 0.5757229603799675, - "flos": 20334355135320.0, - "grad_norm": 3.845856252307869, - "learning_rate": 1.6091508752045523e-06, - "loss": 0.8616, - "num_input_tokens_seen": 101977970, - "step": 4788 - }, - { - "epoch": 0.5758432032706067, - "flos": 16346957119080.0, - "grad_norm": 2.323500270547147, - "learning_rate": 1.608386952779787e-06, - "loss": 1.0965, - "num_input_tokens_seen": 101997060, - "step": 4789 - }, - { - "epoch": 0.5759634461612457, - "flos": 18317225147280.0, - "grad_norm": 2.125751698998996, - "learning_rate": 1.6076230897624098e-06, - "loss": 0.9696, - "num_input_tokens_seen": 102018985, - "step": 4790 - }, - { - "epoch": 0.5760836890518848, - "flos": 21804213075480.0, - "grad_norm": 3.7811895948897405, - "learning_rate": 1.6068592862682974e-06, - "loss": 1.0044, - "num_input_tokens_seen": 102036860, - "step": 4791 - }, - { - "epoch": 0.576203931942524, - "flos": 26078357706600.0, - "grad_norm": 3.60577397898826, - "learning_rate": 1.6060955424133187e-06, - "loss": 0.9589, - "num_input_tokens_seen": 102057505, - "step": 4792 - }, - { - "epoch": 0.576324174833163, - "flos": 18156586607520.0, - "grad_norm": 4.746096965019487, - "learning_rate": 1.6053318583133332e-06, - "loss": 1.1189, - "num_input_tokens_seen": 102078095, - "step": 4793 - }, - { - "epoch": 0.5764444177238021, - "flos": 17871281085960.0, - "grad_norm": 6.660330081584117, - "learning_rate": 1.6045682340841907e-06, - "loss": 0.983, - "num_input_tokens_seen": 102096740, - "step": 4794 - }, - { - "epoch": 0.5765646606144411, - "flos": 44532851736840.0, - "grad_norm": 0.8027106140035676, - "learning_rate": 1.6038046698417336e-06, - "loss": 0.8341, - "num_input_tokens_seen": 102157355, - "step": 4795 - }, - { - "epoch": 0.5766849035050803, - "flos": 17870851824120.0, - "grad_norm": 3.9398083656684264, - "learning_rate": 1.6030411657017919e-06, - "loss": 0.9116, - "num_input_tokens_seen": 102176730, - "step": 4796 - }, - { - "epoch": 0.5768051463957193, - "flos": 11310129258000.0, - "grad_norm": 4.273765314280588, - "learning_rate": 1.6022777217801903e-06, - "loss": 1.0793, - "num_input_tokens_seen": 102193405, - "step": 4797 - }, - { - "epoch": 0.5769253892863584, - "flos": 15747089399640.0, - "grad_norm": 2.46775875230593, - "learning_rate": 1.601514338192742e-06, - "loss": 0.9517, - "num_input_tokens_seen": 102213055, - "step": 4798 - }, - { - "epoch": 0.5770456321769976, - "flos": 16244893255080.0, - "grad_norm": 3.4291626131042277, - "learning_rate": 1.6007510150552514e-06, - "loss": 0.9456, - "num_input_tokens_seen": 102230835, - "step": 4799 - }, - { - "epoch": 0.5771658750676366, - "flos": 33132008940120.0, - "grad_norm": 5.128685719149746, - "learning_rate": 1.599987752483515e-06, - "loss": 0.8467, - "num_input_tokens_seen": 102255000, - "step": 4800 - }, - { - "epoch": 0.5772861179582757, - "flos": 15743961920520.0, - "grad_norm": 2.749613127805455, - "learning_rate": 1.5992245505933184e-06, - "loss": 0.9107, - "num_input_tokens_seen": 102274420, - "step": 4801 - }, - { - "epoch": 0.5774063608489148, - "flos": 22276562288040.0, - "grad_norm": 5.172620460283884, - "learning_rate": 1.5984614095004388e-06, - "loss": 0.9397, - "num_input_tokens_seen": 102295275, - "step": 4802 - }, - { - "epoch": 0.5775266037395539, - "flos": 16008136079160.0, - "grad_norm": 5.479822252551233, - "learning_rate": 1.5976983293206438e-06, - "loss": 1.0253, - "num_input_tokens_seen": 102310800, - "step": 4803 - }, - { - "epoch": 0.577646846630193, - "flos": 14960208650760.0, - "grad_norm": 2.9969999889924916, - "learning_rate": 1.5969353101696928e-06, - "loss": 0.9446, - "num_input_tokens_seen": 102328960, - "step": 4804 - }, - { - "epoch": 0.5777670895208321, - "flos": 21173340970800.0, - "grad_norm": 2.2995251787420066, - "learning_rate": 1.5961723521633341e-06, - "loss": 1.026, - "num_input_tokens_seen": 102349920, - "step": 4805 - }, - { - "epoch": 0.5778873324114712, - "flos": 13833127091760.0, - "grad_norm": 3.9735239707307266, - "learning_rate": 1.5954094554173097e-06, - "loss": 1.1323, - "num_input_tokens_seen": 102367630, - "step": 4806 - }, - { - "epoch": 0.5780075753021102, - "flos": 9978398724480.0, - "grad_norm": 3.627545876820849, - "learning_rate": 1.5946466200473482e-06, - "loss": 1.0231, - "num_input_tokens_seen": 102385260, - "step": 4807 - }, - { - "epoch": 0.5781278181927494, - "flos": 10786380174720.0, - "grad_norm": 6.4259198669509265, - "learning_rate": 1.5938838461691723e-06, - "loss": 1.0633, - "num_input_tokens_seen": 102401890, - "step": 4808 - }, - { - "epoch": 0.5782480610833884, - "flos": 11839704037680.0, - "grad_norm": 11.074759893634722, - "learning_rate": 1.593121133898494e-06, - "loss": 1.0459, - "num_input_tokens_seen": 102418815, - "step": 4809 - }, - { - "epoch": 0.5783683039740275, - "flos": 18131806518960.0, - "grad_norm": 3.9094790120852494, - "learning_rate": 1.592358483351016e-06, - "loss": 1.0097, - "num_input_tokens_seen": 102438710, - "step": 4810 - }, - { - "epoch": 0.5784885468646667, - "flos": 13046460973800.0, - "grad_norm": 3.533264395803039, - "learning_rate": 1.5915958946424326e-06, - "loss": 0.9548, - "num_input_tokens_seen": 102457115, - "step": 4811 - }, - { - "epoch": 0.5786087897553057, - "flos": 32950545653040.0, - "grad_norm": 2.4763065049388753, - "learning_rate": 1.5908333678884271e-06, - "loss": 0.972, - "num_input_tokens_seen": 102483255, - "step": 4812 - }, - { - "epoch": 0.5787290326459448, - "flos": 8720119271400.0, - "grad_norm": 3.488649355271413, - "learning_rate": 1.5900709032046743e-06, - "loss": 0.9612, - "num_input_tokens_seen": 102501050, - "step": 4813 - }, - { - "epoch": 0.5788492755365839, - "flos": 16556235989160.0, - "grad_norm": 4.232071253358792, - "learning_rate": 1.5893085007068391e-06, - "loss": 1.0202, - "num_input_tokens_seen": 102518330, - "step": 4814 - }, - { - "epoch": 0.578969518427223, - "flos": 17110744165200.0, - "grad_norm": 3.3784962664475304, - "learning_rate": 1.5885461605105786e-06, - "loss": 0.9392, - "num_input_tokens_seen": 102539650, - "step": 4815 - }, - { - "epoch": 0.579089761317862, - "flos": 15537381267720.0, - "grad_norm": 11.522478773677292, - "learning_rate": 1.5877838827315375e-06, - "loss": 0.9964, - "num_input_tokens_seen": 102557915, - "step": 4816 - }, - { - "epoch": 0.5792100042085012, - "flos": 16297611572880.0, - "grad_norm": 2.6600750478038644, - "learning_rate": 1.587021667485355e-06, - "loss": 0.9217, - "num_input_tokens_seen": 102577005, - "step": 4817 - }, - { - "epoch": 0.5793302470991403, - "flos": 15248580328320.0, - "grad_norm": 2.5816676912578655, - "learning_rate": 1.5862595148876559e-06, - "loss": 1.0017, - "num_input_tokens_seen": 102596830, - "step": 4818 - }, - { - "epoch": 0.5794504899897793, - "flos": 8953289044800.0, - "grad_norm": 4.23348077432185, - "learning_rate": 1.58549742505406e-06, - "loss": 1.0012, - "num_input_tokens_seen": 102611295, - "step": 4819 - }, - { - "epoch": 0.5795707328804185, - "flos": 10503251623920.0, - "grad_norm": 3.5812124733015582, - "learning_rate": 1.5847353981001747e-06, - "loss": 0.9829, - "num_input_tokens_seen": 102628195, - "step": 4820 - }, - { - "epoch": 0.5796909757710575, - "flos": 26000154084360.0, - "grad_norm": 3.078679998099946, - "learning_rate": 1.5839734341415993e-06, - "loss": 0.9208, - "num_input_tokens_seen": 102650115, - "step": 4821 - }, - { - "epoch": 0.5798112186616966, - "flos": 16376919011280.0, - "grad_norm": 3.156555916341531, - "learning_rate": 1.5832115332939238e-06, - "loss": 0.998, - "num_input_tokens_seen": 102668275, - "step": 4822 - }, - { - "epoch": 0.5799314615523358, - "flos": 11786188519320.0, - "grad_norm": 5.927870159070856, - "learning_rate": 1.5824496956727272e-06, - "loss": 0.9739, - "num_input_tokens_seen": 102685200, - "step": 4823 - }, - { - "epoch": 0.5800517044429748, - "flos": 14541221648760.0, - "grad_norm": 2.7489580434606546, - "learning_rate": 1.5816879213935797e-06, - "loss": 0.9473, - "num_input_tokens_seen": 102703730, - "step": 4824 - }, - { - "epoch": 0.5801719473336139, - "flos": 22484154772320.0, - "grad_norm": 3.2883025897587292, - "learning_rate": 1.5809262105720416e-06, - "loss": 1.0258, - "num_input_tokens_seen": 102724490, - "step": 4825 - }, - { - "epoch": 0.580292190224253, - "flos": 14462343472200.0, - "grad_norm": 2.6405627903579565, - "learning_rate": 1.5801645633236644e-06, - "loss": 1.022, - "num_input_tokens_seen": 102745195, - "step": 4826 - }, - { - "epoch": 0.5804124331148921, - "flos": 18946533512400.0, - "grad_norm": 2.3327775588488078, - "learning_rate": 1.579402979763989e-06, - "loss": 1.0059, - "num_input_tokens_seen": 102765250, - "step": 4827 - }, - { - "epoch": 0.5805326760055312, - "flos": 9505037680440.0, - "grad_norm": 4.886354184292771, - "learning_rate": 1.578641460008548e-06, - "loss": 1.0236, - "num_input_tokens_seen": 102782705, - "step": 4828 - }, - { - "epoch": 0.5806529188961702, - "flos": 8508050199360.0, - "grad_norm": 3.2679003222052443, - "learning_rate": 1.5778800041728613e-06, - "loss": 0.8875, - "num_input_tokens_seen": 102798715, - "step": 4829 - }, - { - "epoch": 0.5807731617868094, - "flos": 18658161834840.0, - "grad_norm": 5.831593971696968, - "learning_rate": 1.577118612372443e-06, - "loss": 0.8897, - "num_input_tokens_seen": 102820275, - "step": 4830 - }, - { - "epoch": 0.5808934046774484, - "flos": 27101198430840.0, - "grad_norm": 2.808901625353203, - "learning_rate": 1.5763572847227943e-06, - "loss": 0.9237, - "num_input_tokens_seen": 102840880, - "step": 4831 - }, - { - "epoch": 0.5810136475680875, - "flos": 14541374956560.0, - "grad_norm": 4.5666213560060855, - "learning_rate": 1.5755960213394091e-06, - "loss": 1.0233, - "num_input_tokens_seen": 102857700, - "step": 4832 - }, - { - "epoch": 0.5811338904587267, - "flos": 12417121947120.0, - "grad_norm": 7.8293300165659625, - "learning_rate": 1.5748348223377703e-06, - "loss": 1.0089, - "num_input_tokens_seen": 102874975, - "step": 4833 - }, - { - "epoch": 0.5812541333493657, - "flos": 13801846752480.0, - "grad_norm": 3.1581777596383978, - "learning_rate": 1.5740736878333507e-06, - "loss": 1.007, - "num_input_tokens_seen": 102892535, - "step": 4834 - }, - { - "epoch": 0.5813743762400048, - "flos": 14619731886600.0, - "grad_norm": 4.400097169670127, - "learning_rate": 1.5733126179416143e-06, - "loss": 1.0047, - "num_input_tokens_seen": 102906740, - "step": 4835 - }, - { - "epoch": 0.5814946191306439, - "flos": 23662513555800.0, - "grad_norm": 4.669566525062302, - "learning_rate": 1.5725516127780137e-06, - "loss": 0.9415, - "num_input_tokens_seen": 102928595, - "step": 4836 - }, - { - "epoch": 0.581614862021283, - "flos": 11420349096960.0, - "grad_norm": 4.619495312095961, - "learning_rate": 1.5717906724579943e-06, - "loss": 1.1006, - "num_input_tokens_seen": 102945375, - "step": 4837 - }, - { - "epoch": 0.581735104911922, - "flos": 24111830388720.0, - "grad_norm": 4.044361644870789, - "learning_rate": 1.571029797096989e-06, - "loss": 0.9108, - "num_input_tokens_seen": 102966200, - "step": 4838 - }, - { - "epoch": 0.5818553478025612, - "flos": 16586167219800.0, - "grad_norm": 2.918151114406804, - "learning_rate": 1.570268986810423e-06, - "loss": 1.0135, - "num_input_tokens_seen": 102985815, - "step": 4839 - }, - { - "epoch": 0.5819755906932003, - "flos": 14908042241040.0, - "grad_norm": 2.797300764528231, - "learning_rate": 1.5695082417137096e-06, - "loss": 0.9825, - "num_input_tokens_seen": 103003410, - "step": 4840 - }, - { - "epoch": 0.5820958335838393, - "flos": 15220734083760.0, - "grad_norm": 2.9267369582381817, - "learning_rate": 1.5687475619222539e-06, - "loss": 0.9839, - "num_input_tokens_seen": 103023085, - "step": 4841 - }, - { - "epoch": 0.5822160764744785, - "flos": 12731346867840.0, - "grad_norm": 9.707169578994257, - "learning_rate": 1.5679869475514496e-06, - "loss": 0.9598, - "num_input_tokens_seen": 103039740, - "step": 4842 - }, - { - "epoch": 0.5823363193651175, - "flos": 16371767869200.0, - "grad_norm": 3.2499614126408924, - "learning_rate": 1.567226398716682e-06, - "loss": 1.0321, - "num_input_tokens_seen": 103059375, - "step": 4843 - }, - { - "epoch": 0.5824565622557566, - "flos": 23457435319440.0, - "grad_norm": 3.3143161617252277, - "learning_rate": 1.566465915533326e-06, - "loss": 0.8555, - "num_input_tokens_seen": 103081125, - "step": 4844 - }, - { - "epoch": 0.5825768051463958, - "flos": 15793614082320.0, - "grad_norm": 2.997047242752935, - "learning_rate": 1.5657054981167458e-06, - "loss": 1.1002, - "num_input_tokens_seen": 103099740, - "step": 4845 - }, - { - "epoch": 0.5826970480370348, - "flos": 19942723792920.0, - "grad_norm": 4.143730409785894, - "learning_rate": 1.5649451465822965e-06, - "loss": 0.9029, - "num_input_tokens_seen": 103120850, - "step": 4846 - }, - { - "epoch": 0.5828172909276739, - "flos": 12652713983760.0, - "grad_norm": 2.754422100262557, - "learning_rate": 1.5641848610453218e-06, - "loss": 1.0597, - "num_input_tokens_seen": 103139230, - "step": 4847 - }, - { - "epoch": 0.582937533818313, - "flos": 14095308249000.0, - "grad_norm": 4.689413468504229, - "learning_rate": 1.563424641621158e-06, - "loss": 1.0802, - "num_input_tokens_seen": 103158130, - "step": 4848 - }, - { - "epoch": 0.5830577767089521, - "flos": 19130051124000.0, - "grad_norm": 3.4214891190010923, - "learning_rate": 1.5626644884251282e-06, - "loss": 0.9215, - "num_input_tokens_seen": 103177370, - "step": 4849 - }, - { - "epoch": 0.5831780195995911, - "flos": 17999412824040.0, - "grad_norm": 2.4132716787833224, - "learning_rate": 1.5619044015725488e-06, - "loss": 1.1101, - "num_input_tokens_seen": 103196780, - "step": 4850 - }, - { - "epoch": 0.5832982624902303, - "flos": 10424066831760.0, - "grad_norm": 8.53299234225534, - "learning_rate": 1.5611443811787224e-06, - "loss": 1.0926, - "num_input_tokens_seen": 103210625, - "step": 4851 - }, - { - "epoch": 0.5834185053808694, - "flos": 14511045125640.0, - "grad_norm": 3.2065429315731118, - "learning_rate": 1.560384427358945e-06, - "loss": 0.9309, - "num_input_tokens_seen": 103229890, - "step": 4852 - }, - { - "epoch": 0.5835387482715084, - "flos": 19367145577080.0, - "grad_norm": 2.35044575785533, - "learning_rate": 1.5596245402284998e-06, - "loss": 0.9628, - "num_input_tokens_seen": 103253135, - "step": 4853 - }, - { - "epoch": 0.5836589911621476, - "flos": 11787905566680.0, - "grad_norm": 3.4494751848953045, - "learning_rate": 1.5588647199026619e-06, - "loss": 1.0402, - "num_input_tokens_seen": 103270590, - "step": 4854 - }, - { - "epoch": 0.5837792340527866, - "flos": 14512976803920.0, - "grad_norm": 2.6705925077590513, - "learning_rate": 1.5581049664966956e-06, - "loss": 1.0977, - "num_input_tokens_seen": 103288070, - "step": 4855 - }, - { - "epoch": 0.5838994769434257, - "flos": 47249889645360.0, - "grad_norm": 1.2675332177528897, - "learning_rate": 1.5573452801258545e-06, - "loss": 0.9209, - "num_input_tokens_seen": 103334960, - "step": 4856 - }, - { - "epoch": 0.5840197198340649, - "flos": 15248978928600.0, - "grad_norm": 8.432737009227601, - "learning_rate": 1.5565856609053824e-06, - "loss": 0.859, - "num_input_tokens_seen": 103353475, - "step": 4857 - }, - { - "epoch": 0.5841399627247039, - "flos": 13570547334240.0, - "grad_norm": 2.6459064823962364, - "learning_rate": 1.5558261089505127e-06, - "loss": 1.0226, - "num_input_tokens_seen": 103371925, - "step": 4858 - }, - { - "epoch": 0.584260205615343, - "flos": 18809233967880.0, - "grad_norm": 5.7256344905891625, - "learning_rate": 1.5550666243764697e-06, - "loss": 1.0223, - "num_input_tokens_seen": 103389805, - "step": 4859 - }, - { - "epoch": 0.584380448505982, - "flos": 9795157066920.0, - "grad_norm": 4.09331946984676, - "learning_rate": 1.554307207298465e-06, - "loss": 0.9892, - "num_input_tokens_seen": 103407785, - "step": 4860 - }, - { - "epoch": 0.5845006913966212, - "flos": 15301605261720.0, - "grad_norm": 3.293621464633561, - "learning_rate": 1.553547857831704e-06, - "loss": 1.0169, - "num_input_tokens_seen": 103424015, - "step": 4861 - }, - { - "epoch": 0.5846209342872603, - "flos": 41774261866440.0, - "grad_norm": 0.9855125454530403, - "learning_rate": 1.5527885760913771e-06, - "loss": 0.9486, - "num_input_tokens_seen": 103473625, - "step": 4862 - }, - { - "epoch": 0.5847411771778993, - "flos": 13177382913840.0, - "grad_norm": 3.896037064119472, - "learning_rate": 1.552029362192668e-06, - "loss": 0.9905, - "num_input_tokens_seen": 103492605, - "step": 4863 - }, - { - "epoch": 0.5848614200685385, - "flos": 17239550457600.0, - "grad_norm": 2.3748862590658435, - "learning_rate": 1.5512702162507478e-06, - "loss": 0.9363, - "num_input_tokens_seen": 103512640, - "step": 4864 - }, - { - "epoch": 0.5849816629591775, - "flos": 51322328811720.0, - "grad_norm": 0.9940082634307963, - "learning_rate": 1.5505111383807792e-06, - "loss": 0.7982, - "num_input_tokens_seen": 103575030, - "step": 4865 - }, - { - "epoch": 0.5851019058498166, - "flos": 16924160397600.0, - "grad_norm": 2.8912047758412354, - "learning_rate": 1.5497521286979138e-06, - "loss": 1.0353, - "num_input_tokens_seen": 103594990, - "step": 4866 - }, - { - "epoch": 0.5852221487404557, - "flos": 17345968263120.0, - "grad_norm": 10.887739765317026, - "learning_rate": 1.5489931873172927e-06, - "loss": 0.9738, - "num_input_tokens_seen": 103616030, - "step": 4867 - }, - { - "epoch": 0.5853423916310948, - "flos": 19417932216600.0, - "grad_norm": 2.6567061986179703, - "learning_rate": 1.5482343143540467e-06, - "loss": 1.018, - "num_input_tokens_seen": 103637015, - "step": 4868 - }, - { - "epoch": 0.5854626345217339, - "flos": 8430981054840.0, - "grad_norm": 5.098189963978733, - "learning_rate": 1.547475509923295e-06, - "loss": 1.0591, - "num_input_tokens_seen": 103653775, - "step": 4869 - }, - { - "epoch": 0.585582877412373, - "flos": 46065209032440.0, - "grad_norm": 0.7654808064352443, - "learning_rate": 1.5467167741401495e-06, - "loss": 0.823, - "num_input_tokens_seen": 103714975, - "step": 4870 - }, - { - "epoch": 0.5857031203030121, - "flos": 12043678457880.0, - "grad_norm": 2.7224676211895145, - "learning_rate": 1.5459581071197083e-06, - "loss": 0.9458, - "num_input_tokens_seen": 103730355, - "step": 4871 - }, - { - "epoch": 0.5858233631936511, - "flos": 14828520171720.0, - "grad_norm": 3.5340183175890836, - "learning_rate": 1.5451995089770624e-06, - "loss": 1.0461, - "num_input_tokens_seen": 103749860, - "step": 4872 - }, - { - "epoch": 0.5859436060842903, - "flos": 16486985542440.0, - "grad_norm": 11.268798042656185, - "learning_rate": 1.5444409798272885e-06, - "loss": 0.948, - "num_input_tokens_seen": 103773670, - "step": 4873 - }, - { - "epoch": 0.5860638489749294, - "flos": 15983631944640.0, - "grad_norm": 3.036154171784791, - "learning_rate": 1.543682519785456e-06, - "loss": 1.0247, - "num_input_tokens_seen": 103791870, - "step": 4874 - }, - { - "epoch": 0.5861840918655684, - "flos": 12443005851840.0, - "grad_norm": 5.813556862187948, - "learning_rate": 1.5429241289666219e-06, - "loss": 1.0145, - "num_input_tokens_seen": 103809090, - "step": 4875 - }, - { - "epoch": 0.5863043347562076, - "flos": 18185138067960.0, - "grad_norm": 4.086831106348939, - "learning_rate": 1.5421658074858342e-06, - "loss": 0.9204, - "num_input_tokens_seen": 103826915, - "step": 4876 - }, - { - "epoch": 0.5864245776468466, - "flos": 14671836973200.0, - "grad_norm": 4.430077591123987, - "learning_rate": 1.5414075554581298e-06, - "loss": 0.8777, - "num_input_tokens_seen": 103844680, - "step": 4877 - }, - { - "epoch": 0.5865448205374857, - "flos": 20598437309280.0, - "grad_norm": 5.0571658442592895, - "learning_rate": 1.5406493729985348e-06, - "loss": 0.9972, - "num_input_tokens_seen": 103863595, - "step": 4878 - }, - { - "epoch": 0.5866650634281249, - "flos": 18390246965880.0, - "grad_norm": 3.1759537653500427, - "learning_rate": 1.5398912602220644e-06, - "loss": 0.9415, - "num_input_tokens_seen": 103882590, - "step": 4879 - }, - { - "epoch": 0.5867853063187639, - "flos": 12073241749800.0, - "grad_norm": 3.354232898002971, - "learning_rate": 1.539133217243724e-06, - "loss": 1.009, - "num_input_tokens_seen": 103899330, - "step": 4880 - }, - { - "epoch": 0.586905549209403, - "flos": 17530467044640.0, - "grad_norm": 4.528992820755955, - "learning_rate": 1.5383752441785081e-06, - "loss": 0.9671, - "num_input_tokens_seen": 103918275, - "step": 4881 - }, - { - "epoch": 0.5870257921000421, - "flos": 10397876311440.0, - "grad_norm": 3.999493328429899, - "learning_rate": 1.5376173411414003e-06, - "loss": 1.0765, - "num_input_tokens_seen": 103936035, - "step": 4882 - }, - { - "epoch": 0.5871460349906812, - "flos": 17005767453000.0, - "grad_norm": 3.4988680780294668, - "learning_rate": 1.5368595082473753e-06, - "loss": 1.0102, - "num_input_tokens_seen": 103954055, - "step": 4883 - }, - { - "epoch": 0.5872662778813202, - "flos": 15747549323040.0, - "grad_norm": 2.2817821143372936, - "learning_rate": 1.5361017456113935e-06, - "loss": 1.0164, - "num_input_tokens_seen": 103974125, - "step": 4884 - }, - { - "epoch": 0.5873865207719594, - "flos": 13072344878520.0, - "grad_norm": 5.502895806655904, - "learning_rate": 1.5353440533484085e-06, - "loss": 1.0821, - "num_input_tokens_seen": 103992700, - "step": 4885 - }, - { - "epoch": 0.5875067636625985, - "flos": 38638242504000.0, - "grad_norm": 2.84028447862087, - "learning_rate": 1.534586431573361e-06, - "loss": 0.8909, - "num_input_tokens_seen": 104017360, - "step": 4886 - }, - { - "epoch": 0.5876270065532375, - "flos": 19937725958640.0, - "grad_norm": 2.4301324663829957, - "learning_rate": 1.5338288804011817e-06, - "loss": 1.0049, - "num_input_tokens_seen": 104036580, - "step": 4887 - }, - { - "epoch": 0.5877472494438767, - "flos": 15170652660120.0, - "grad_norm": 3.599451203371717, - "learning_rate": 1.533071399946791e-06, - "loss": 0.9195, - "num_input_tokens_seen": 104055045, - "step": 4888 - }, - { - "epoch": 0.5878674923345157, - "flos": 15904815091200.0, - "grad_norm": 5.313296808063808, - "learning_rate": 1.5323139903250977e-06, - "loss": 0.7989, - "num_input_tokens_seen": 104075370, - "step": 4889 - }, - { - "epoch": 0.5879877352251548, - "flos": 15534683050440.0, - "grad_norm": 2.4576512268663104, - "learning_rate": 1.5315566516510002e-06, - "loss": 1.0008, - "num_input_tokens_seen": 104093260, - "step": 4890 - }, - { - "epoch": 0.5881079781157939, - "flos": 12391023411480.0, - "grad_norm": 3.455729197329895, - "learning_rate": 1.5307993840393857e-06, - "loss": 0.9114, - "num_input_tokens_seen": 104111060, - "step": 4891 - }, - { - "epoch": 0.588228221006433, - "flos": 16062080859360.0, - "grad_norm": 3.8223922466811007, - "learning_rate": 1.530042187605132e-06, - "loss": 1.0359, - "num_input_tokens_seen": 104130035, - "step": 4892 - }, - { - "epoch": 0.5883484638970721, - "flos": 18631358083320.0, - "grad_norm": 1.9427651881948553, - "learning_rate": 1.5292850624631044e-06, - "loss": 1.0687, - "num_input_tokens_seen": 104151950, - "step": 4893 - }, - { - "epoch": 0.5884687067877111, - "flos": 21697549977480.0, - "grad_norm": 3.317896208341758, - "learning_rate": 1.5285280087281593e-06, - "loss": 1.0109, - "num_input_tokens_seen": 104172400, - "step": 4894 - }, - { - "epoch": 0.5885889496783503, - "flos": 50495914215840.0, - "grad_norm": 0.642752742521117, - "learning_rate": 1.5277710265151398e-06, - "loss": 0.7998, - "num_input_tokens_seen": 104241600, - "step": 4895 - }, - { - "epoch": 0.5887091925689893, - "flos": 13544418137040.0, - "grad_norm": 12.612578371558724, - "learning_rate": 1.5270141159388803e-06, - "loss": 0.9768, - "num_input_tokens_seen": 104258340, - "step": 4896 - }, - { - "epoch": 0.5888294354596284, - "flos": 16559608760760.0, - "grad_norm": 2.8640470430463805, - "learning_rate": 1.526257277114203e-06, - "loss": 1.0213, - "num_input_tokens_seen": 104279135, - "step": 4897 - }, - { - "epoch": 0.5889496783502676, - "flos": 15616259444280.0, - "grad_norm": 2.552731853270799, - "learning_rate": 1.5255005101559201e-06, - "loss": 1.0222, - "num_input_tokens_seen": 104296465, - "step": 4898 - }, - { - "epoch": 0.5890699212409066, - "flos": 15402718617360.0, - "grad_norm": 3.284247055973471, - "learning_rate": 1.524743815178833e-06, - "loss": 0.9874, - "num_input_tokens_seen": 104314145, - "step": 4899 - }, - { - "epoch": 0.5891901641315457, - "flos": 13807427156400.0, - "grad_norm": 3.475954701302975, - "learning_rate": 1.5239871922977315e-06, - "loss": 1.0293, - "num_input_tokens_seen": 104333780, - "step": 4900 - }, - { - "epoch": 0.5893104070221848, - "flos": 13911790637400.0, - "grad_norm": 4.5224279354294135, - "learning_rate": 1.523230641627394e-06, - "loss": 1.1161, - "num_input_tokens_seen": 104352485, - "step": 4901 - }, - { - "epoch": 0.5894306499128239, - "flos": 20703506006160.0, - "grad_norm": 3.7596977723179243, - "learning_rate": 1.5224741632825888e-06, - "loss": 0.9488, - "num_input_tokens_seen": 104372395, - "step": 4902 - }, - { - "epoch": 0.589550892803463, - "flos": 30217257117720.0, - "grad_norm": 3.7750871105122283, - "learning_rate": 1.521717757378074e-06, - "loss": 0.9169, - "num_input_tokens_seen": 104392660, - "step": 4903 - }, - { - "epoch": 0.5896711356941021, - "flos": 9978582693840.0, - "grad_norm": 3.5449614215252807, - "learning_rate": 1.5209614240285943e-06, - "loss": 0.9069, - "num_input_tokens_seen": 104410035, - "step": 4904 - }, - { - "epoch": 0.5897913785847412, - "flos": 12180763371480.0, - "grad_norm": 4.666979415949516, - "learning_rate": 1.520205163348887e-06, - "loss": 1.0714, - "num_input_tokens_seen": 104427690, - "step": 4905 - }, - { - "epoch": 0.5899116214753802, - "flos": 34890324994440.0, - "grad_norm": 1.3023935584215958, - "learning_rate": 1.519448975453674e-06, - "loss": 0.8099, - "num_input_tokens_seen": 104482510, - "step": 4906 - }, - { - "epoch": 0.5900318643660194, - "flos": 14985050062440.0, - "grad_norm": 3.953826480189693, - "learning_rate": 1.5186928604576696e-06, - "loss": 0.984, - "num_input_tokens_seen": 104499425, - "step": 4907 - }, - { - "epoch": 0.5901521072566585, - "flos": 15038994842640.0, - "grad_norm": 2.9543451281304964, - "learning_rate": 1.5179368184755752e-06, - "loss": 0.9999, - "num_input_tokens_seen": 104517230, - "step": 4908 - }, - { - "epoch": 0.5902723501472975, - "flos": 14353993988400.0, - "grad_norm": 7.361723506893163, - "learning_rate": 1.5171808496220821e-06, - "loss": 1.0538, - "num_input_tokens_seen": 104535705, - "step": 4909 - }, - { - "epoch": 0.5903925930379367, - "flos": 16322330338320.0, - "grad_norm": 2.9357133667128648, - "learning_rate": 1.5164249540118708e-06, - "loss": 1.0398, - "num_input_tokens_seen": 104554550, - "step": 4910 - }, - { - "epoch": 0.5905128359285757, - "flos": 16612449724800.0, - "grad_norm": 3.1480653604413678, - "learning_rate": 1.5156691317596093e-06, - "loss": 1.0625, - "num_input_tokens_seen": 104575695, - "step": 4911 - }, - { - "epoch": 0.5906330788192148, - "flos": 19964069786760.0, - "grad_norm": 4.604097713915001, - "learning_rate": 1.5149133829799556e-06, - "loss": 0.9069, - "num_input_tokens_seen": 104593410, - "step": 4912 - }, - { - "epoch": 0.590753321709854, - "flos": 13098167460120.0, - "grad_norm": 2.8334050075964705, - "learning_rate": 1.5141577077875556e-06, - "loss": 1.0268, - "num_input_tokens_seen": 104610455, - "step": 4913 - }, - { - "epoch": 0.590873564600493, - "flos": 11945385965760.0, - "grad_norm": 4.063185413934222, - "learning_rate": 1.5134021062970451e-06, - "loss": 0.9405, - "num_input_tokens_seen": 104628555, - "step": 4914 - }, - { - "epoch": 0.5909938074911321, - "flos": 9532270693800.0, - "grad_norm": 3.2406640575560353, - "learning_rate": 1.5126465786230483e-06, - "loss": 1.0325, - "num_input_tokens_seen": 104645050, - "step": 4915 - }, - { - "epoch": 0.5911140503817712, - "flos": 18520555674720.0, - "grad_norm": 3.0010112011912407, - "learning_rate": 1.5118911248801787e-06, - "loss": 1.0351, - "num_input_tokens_seen": 104662780, - "step": 4916 - }, - { - "epoch": 0.5912342932724103, - "flos": 16534184779440.0, - "grad_norm": 20.05110277296761, - "learning_rate": 1.5111357451830364e-06, - "loss": 1.008, - "num_input_tokens_seen": 104681195, - "step": 4917 - }, - { - "epoch": 0.5913545361630493, - "flos": 13885262839920.0, - "grad_norm": 2.5713994936585696, - "learning_rate": 1.5103804396462131e-06, - "loss": 0.9354, - "num_input_tokens_seen": 104700850, - "step": 4918 - }, - { - "epoch": 0.5914747790536885, - "flos": 18657671249880.0, - "grad_norm": 7.346467249956778, - "learning_rate": 1.5096252083842877e-06, - "loss": 1.0281, - "num_input_tokens_seen": 104719780, - "step": 4919 - }, - { - "epoch": 0.5915950219443276, - "flos": 19522970251920.0, - "grad_norm": 3.6775446846410635, - "learning_rate": 1.5088700515118285e-06, - "loss": 1.0813, - "num_input_tokens_seen": 104738820, - "step": 4920 - }, - { - "epoch": 0.5917152648349666, - "flos": 15563234510880.0, - "grad_norm": 2.905055631278891, - "learning_rate": 1.508114969143392e-06, - "loss": 0.8854, - "num_input_tokens_seen": 104758525, - "step": 4921 - }, - { - "epoch": 0.5918355077256057, - "flos": 20019823599000.0, - "grad_norm": 2.01844460270694, - "learning_rate": 1.5073599613935238e-06, - "loss": 1.0045, - "num_input_tokens_seen": 104780365, - "step": 4922 - }, - { - "epoch": 0.5919557506162448, - "flos": 20073676394520.0, - "grad_norm": 2.702762116346193, - "learning_rate": 1.5066050283767574e-06, - "loss": 0.8058, - "num_input_tokens_seen": 104800765, - "step": 4923 - }, - { - "epoch": 0.5920759935068839, - "flos": 8510595108840.0, - "grad_norm": 4.754739705531565, - "learning_rate": 1.505850170207616e-06, - "loss": 1.0516, - "num_input_tokens_seen": 104817350, - "step": 4924 - }, - { - "epoch": 0.592196236397523, - "flos": 20965595178720.0, - "grad_norm": 3.093285882218664, - "learning_rate": 1.505095387000611e-06, - "loss": 0.992, - "num_input_tokens_seen": 104839370, - "step": 4925 - }, - { - "epoch": 0.5923164792881621, - "flos": 17342718137760.0, - "grad_norm": 3.338402438223794, - "learning_rate": 1.504340678870242e-06, - "loss": 0.9757, - "num_input_tokens_seen": 104857305, - "step": 4926 - }, - { - "epoch": 0.5924367221788012, - "flos": 17084247029280.0, - "grad_norm": 4.512550653823213, - "learning_rate": 1.5035860459309989e-06, - "loss": 1.1185, - "num_input_tokens_seen": 104874740, - "step": 4927 - }, - { - "epoch": 0.5925569650694402, - "flos": 19129928477760.0, - "grad_norm": 5.264695626034808, - "learning_rate": 1.5028314882973568e-06, - "loss": 0.8644, - "num_input_tokens_seen": 104894865, - "step": 4928 - }, - { - "epoch": 0.5926772079600794, - "flos": 15846179092320.0, - "grad_norm": 5.100514232665376, - "learning_rate": 1.502077006083783e-06, - "loss": 1.0688, - "num_input_tokens_seen": 104913245, - "step": 4929 - }, - { - "epoch": 0.5927974508507184, - "flos": 14095308249000.0, - "grad_norm": 3.78410080562946, - "learning_rate": 1.5013225994047315e-06, - "loss": 1.0009, - "num_input_tokens_seen": 104930595, - "step": 4930 - }, - { - "epoch": 0.5929176937413575, - "flos": 11156696184840.0, - "grad_norm": 4.210948823761444, - "learning_rate": 1.5005682683746452e-06, - "loss": 1.0373, - "num_input_tokens_seen": 104948830, - "step": 4931 - }, - { - "epoch": 0.5930379366319967, - "flos": 12468153879120.0, - "grad_norm": 3.654760725797188, - "learning_rate": 1.4998140131079553e-06, - "loss": 0.9551, - "num_input_tokens_seen": 104964640, - "step": 4932 - }, - { - "epoch": 0.5931581795226357, - "flos": 12541145036160.0, - "grad_norm": 2.8539153125283843, - "learning_rate": 1.4990598337190821e-06, - "loss": 0.9758, - "num_input_tokens_seen": 104980715, - "step": 4933 - }, - { - "epoch": 0.5932784224132748, - "flos": 17268193902720.0, - "grad_norm": 3.5130627598769117, - "learning_rate": 1.4983057303224338e-06, - "loss": 0.9034, - "num_input_tokens_seen": 105000250, - "step": 4934 - }, - { - "epoch": 0.5933986653039139, - "flos": 16295066663400.0, - "grad_norm": 2.423416533093536, - "learning_rate": 1.4975517030324072e-06, - "loss": 1.0988, - "num_input_tokens_seen": 105017980, - "step": 4935 - }, - { - "epoch": 0.593518908194553, - "flos": 50936492504160.0, - "grad_norm": 0.8604942662404358, - "learning_rate": 1.4967977519633882e-06, - "loss": 0.8796, - "num_input_tokens_seen": 105075160, - "step": 4936 - }, - { - "epoch": 0.593639151085192, - "flos": 14514540543480.0, - "grad_norm": 5.568612843889127, - "learning_rate": 1.4960438772297494e-06, - "loss": 1.0061, - "num_input_tokens_seen": 105091925, - "step": 4937 - }, - { - "epoch": 0.5937593939758312, - "flos": 22013798561160.0, - "grad_norm": 4.197226316925639, - "learning_rate": 1.495290078945855e-06, - "loss": 0.9637, - "num_input_tokens_seen": 105111410, - "step": 4938 - }, - { - "epoch": 0.5938796368664703, - "flos": 26335663675800.0, - "grad_norm": 3.449376483909314, - "learning_rate": 1.4945363572260529e-06, - "loss": 0.9731, - "num_input_tokens_seen": 105132125, - "step": 4939 - }, - { - "epoch": 0.5939998797571093, - "flos": 16954091628240.0, - "grad_norm": 3.412294396994171, - "learning_rate": 1.4937827121846845e-06, - "loss": 0.892, - "num_input_tokens_seen": 105152100, - "step": 4940 - }, - { - "epoch": 0.5941201226477485, - "flos": 17923018233840.0, - "grad_norm": 5.198423168551312, - "learning_rate": 1.4930291439360755e-06, - "loss": 0.965, - "num_input_tokens_seen": 105174385, - "step": 4941 - }, - { - "epoch": 0.5942403655383875, - "flos": 15930607011240.0, - "grad_norm": 4.61705468724222, - "learning_rate": 1.4922756525945427e-06, - "loss": 1.0222, - "num_input_tokens_seen": 105193415, - "step": 4942 - }, - { - "epoch": 0.5943606084290266, - "flos": 48427936265160.0, - "grad_norm": 0.7759009922886052, - "learning_rate": 1.4915222382743894e-06, - "loss": 0.8465, - "num_input_tokens_seen": 105251970, - "step": 4943 - }, - { - "epoch": 0.5944808513196658, - "flos": 12915140433480.0, - "grad_norm": 5.373325530646806, - "learning_rate": 1.4907689010899085e-06, - "loss": 0.9509, - "num_input_tokens_seen": 105269270, - "step": 4944 - }, - { - "epoch": 0.5946010942103048, - "flos": 17635229125920.0, - "grad_norm": 7.882147073778551, - "learning_rate": 1.4900156411553804e-06, - "loss": 0.8475, - "num_input_tokens_seen": 105288820, - "step": 4945 - }, - { - "epoch": 0.5947213371009439, - "flos": 10765218150240.0, - "grad_norm": 5.8589846875208265, - "learning_rate": 1.4892624585850739e-06, - "loss": 1.073, - "num_input_tokens_seen": 105306895, - "step": 4946 - }, - { - "epoch": 0.594841579991583, - "flos": 18394938184560.0, - "grad_norm": 2.911784654776767, - "learning_rate": 1.4885093534932465e-06, - "loss": 1.0113, - "num_input_tokens_seen": 105324580, - "step": 4947 - }, - { - "epoch": 0.5949618228822221, - "flos": 17052353458800.0, - "grad_norm": 2.7415782979929424, - "learning_rate": 1.4877563259941433e-06, - "loss": 0.9491, - "num_input_tokens_seen": 105342155, - "step": 4948 - }, - { - "epoch": 0.5950820657728612, - "flos": 28958548402800.0, - "grad_norm": 9.284691991180631, - "learning_rate": 1.4870033762019988e-06, - "loss": 0.9011, - "num_input_tokens_seen": 105362040, - "step": 4949 - }, - { - "epoch": 0.5952023086635003, - "flos": 16480638599520.0, - "grad_norm": 2.6082702856599504, - "learning_rate": 1.4862505042310334e-06, - "loss": 0.9613, - "num_input_tokens_seen": 105381045, - "step": 4950 - }, - { - "epoch": 0.5953225515541394, - "flos": 24004032813000.0, - "grad_norm": 4.80022181952059, - "learning_rate": 1.4854977101954587e-06, - "loss": 0.9263, - "num_input_tokens_seen": 105402985, - "step": 4951 - }, - { - "epoch": 0.5954427944447784, - "flos": 17396816225760.0, - "grad_norm": 3.571665968498082, - "learning_rate": 1.4847449942094716e-06, - "loss": 1.0738, - "num_input_tokens_seen": 105421585, - "step": 4952 - }, - { - "epoch": 0.5955630373354175, - "flos": 13151407024440.0, - "grad_norm": 4.458911656218251, - "learning_rate": 1.4839923563872598e-06, - "loss": 1.0853, - "num_input_tokens_seen": 105439845, - "step": 4953 - }, - { - "epoch": 0.5956832802260567, - "flos": 14042375300280.0, - "grad_norm": 3.399154131517789, - "learning_rate": 1.483239796842997e-06, - "loss": 0.9902, - "num_input_tokens_seen": 105457595, - "step": 4954 - }, - { - "epoch": 0.5958035231166957, - "flos": 14012444069640.0, - "grad_norm": 3.5923658276099726, - "learning_rate": 1.4824873156908462e-06, - "loss": 1.0619, - "num_input_tokens_seen": 105475240, - "step": 4955 - }, - { - "epoch": 0.5959237660073348, - "flos": 15379962191760.0, - "grad_norm": 6.46521754725621, - "learning_rate": 1.4817349130449584e-06, - "loss": 0.9865, - "num_input_tokens_seen": 105494680, - "step": 4956 - }, - { - "epoch": 0.5960440088979739, - "flos": 15032862530640.0, - "grad_norm": 3.472265850216904, - "learning_rate": 1.4809825890194717e-06, - "loss": 1.0547, - "num_input_tokens_seen": 105513070, - "step": 4957 - }, - { - "epoch": 0.596164251788613, - "flos": 10424526755160.0, - "grad_norm": 6.754704001920769, - "learning_rate": 1.4802303437285139e-06, - "loss": 1.0013, - "num_input_tokens_seen": 105530060, - "step": 4958 - }, - { - "epoch": 0.596284494679252, - "flos": 14541436279680.0, - "grad_norm": 4.020890520443522, - "learning_rate": 1.4794781772861994e-06, - "loss": 1.0124, - "num_input_tokens_seen": 105546275, - "step": 4959 - }, - { - "epoch": 0.5964047375698912, - "flos": 22250187798360.0, - "grad_norm": 3.929832258648672, - "learning_rate": 1.4787260898066324e-06, - "loss": 0.8951, - "num_input_tokens_seen": 105565995, - "step": 4960 - }, - { - "epoch": 0.5965249804605303, - "flos": 19569525596160.0, - "grad_norm": 9.115827993513491, - "learning_rate": 1.4779740814039023e-06, - "loss": 1.0846, - "num_input_tokens_seen": 105585800, - "step": 4961 - }, - { - "epoch": 0.5966452233511693, - "flos": 21935104353960.0, - "grad_norm": 4.542532298803053, - "learning_rate": 1.4772221521920894e-06, - "loss": 0.9048, - "num_input_tokens_seen": 105605545, - "step": 4962 - }, - { - "epoch": 0.5967654662418085, - "flos": 18130978656840.0, - "grad_norm": 3.174709462967497, - "learning_rate": 1.4764703022852598e-06, - "loss": 0.9656, - "num_input_tokens_seen": 105625785, - "step": 4963 - }, - { - "epoch": 0.5968857091324475, - "flos": 13544724752640.0, - "grad_norm": 13.344317171787022, - "learning_rate": 1.4757185317974696e-06, - "loss": 0.9875, - "num_input_tokens_seen": 105643890, - "step": 4964 - }, - { - "epoch": 0.5970059520230866, - "flos": 16845558175080.0, - "grad_norm": 4.217164370348645, - "learning_rate": 1.474966840842761e-06, - "loss": 0.9426, - "num_input_tokens_seen": 105663190, - "step": 4965 - }, - { - "epoch": 0.5971261949137258, - "flos": 16481558446320.0, - "grad_norm": 8.060894946825073, - "learning_rate": 1.4742152295351655e-06, - "loss": 1.098, - "num_input_tokens_seen": 105682065, - "step": 4966 - }, - { - "epoch": 0.5972464378043648, - "flos": 14592866811960.0, - "grad_norm": 3.5745478577562975, - "learning_rate": 1.4734636979887016e-06, - "loss": 0.8596, - "num_input_tokens_seen": 105699245, - "step": 4967 - }, - { - "epoch": 0.5973666806950039, - "flos": 20938300842240.0, - "grad_norm": 3.7810571794764245, - "learning_rate": 1.4727122463173755e-06, - "loss": 1.1313, - "num_input_tokens_seen": 105717495, - "step": 4968 - }, - { - "epoch": 0.597486923585643, - "flos": 15826120884000.0, - "grad_norm": 4.671770075509682, - "learning_rate": 1.471960874635183e-06, - "loss": 0.8721, - "num_input_tokens_seen": 105736775, - "step": 4969 - }, - { - "epoch": 0.5976071664762821, - "flos": 9716585505960.0, - "grad_norm": 4.04703760004545, - "learning_rate": 1.4712095830561055e-06, - "loss": 0.919, - "num_input_tokens_seen": 105754985, - "step": 4970 - }, - { - "epoch": 0.5977274093669211, - "flos": 13544295490800.0, - "grad_norm": 4.380278101065706, - "learning_rate": 1.4704583716941147e-06, - "loss": 1.0266, - "num_input_tokens_seen": 105773570, - "step": 4971 - }, - { - "epoch": 0.5978476522575603, - "flos": 14462374133760.0, - "grad_norm": 2.232108827604027, - "learning_rate": 1.4697072406631672e-06, - "loss": 0.9345, - "num_input_tokens_seen": 105793195, - "step": 4972 - }, - { - "epoch": 0.5979678951481994, - "flos": 20676916885560.0, - "grad_norm": 3.5241256008129414, - "learning_rate": 1.4689561900772097e-06, - "loss": 0.9656, - "num_input_tokens_seen": 105812975, - "step": 4973 - }, - { - "epoch": 0.5980881380388384, - "flos": 12731530837200.0, - "grad_norm": 6.2732617649343085, - "learning_rate": 1.4682052200501758e-06, - "loss": 0.9532, - "num_input_tokens_seen": 105829900, - "step": 4974 - }, - { - "epoch": 0.5982083809294776, - "flos": 16320981229680.0, - "grad_norm": 3.7166853871465646, - "learning_rate": 1.4674543306959876e-06, - "loss": 1.0107, - "num_input_tokens_seen": 105849090, - "step": 4975 - }, - { - "epoch": 0.5983286238201166, - "flos": 14904454838520.0, - "grad_norm": 4.3599336874582395, - "learning_rate": 1.4667035221285535e-06, - "loss": 1.0719, - "num_input_tokens_seen": 105866450, - "step": 4976 - }, - { - "epoch": 0.5984488667107557, - "flos": 20073369778920.0, - "grad_norm": 4.598965337740042, - "learning_rate": 1.4659527944617715e-06, - "loss": 0.9704, - "num_input_tokens_seen": 105886115, - "step": 4977 - }, - { - "epoch": 0.5985691096013949, - "flos": 11656462380120.0, - "grad_norm": 3.413004346753674, - "learning_rate": 1.465202147809526e-06, - "loss": 0.9857, - "num_input_tokens_seen": 105904330, - "step": 4978 - }, - { - "epoch": 0.5986893524920339, - "flos": 19020260546880.0, - "grad_norm": 3.849370181598921, - "learning_rate": 1.4644515822856888e-06, - "loss": 0.9953, - "num_input_tokens_seen": 105922485, - "step": 4979 - }, - { - "epoch": 0.598809595382673, - "flos": 44100429423480.0, - "grad_norm": 0.9261776361242028, - "learning_rate": 1.4637010980041215e-06, - "loss": 0.8178, - "num_input_tokens_seen": 105984315, - "step": 4980 - }, - { - "epoch": 0.5989298382733121, - "flos": 8300181761040.0, - "grad_norm": 4.817156829884829, - "learning_rate": 1.4629506950786707e-06, - "loss": 1.1233, - "num_input_tokens_seen": 106000215, - "step": 4981 - }, - { - "epoch": 0.5990500811639512, - "flos": 42961481840760.0, - "grad_norm": 0.8064760866989211, - "learning_rate": 1.4622003736231733e-06, - "loss": 0.8146, - "num_input_tokens_seen": 106058925, - "step": 4982 - }, - { - "epoch": 0.5991703240545903, - "flos": 12914772494760.0, - "grad_norm": 3.2431897923644364, - "learning_rate": 1.461450133751451e-06, - "loss": 1.0168, - "num_input_tokens_seen": 106076715, - "step": 4983 - }, - { - "epoch": 0.5992905669452293, - "flos": 19733138307240.0, - "grad_norm": 9.635223903617993, - "learning_rate": 1.4606999755773153e-06, - "loss": 0.9907, - "num_input_tokens_seen": 106097640, - "step": 4984 - }, - { - "epoch": 0.5994108098358685, - "flos": 14514847159080.0, - "grad_norm": 5.041286906081977, - "learning_rate": 1.4599498992145643e-06, - "loss": 1.0365, - "num_input_tokens_seen": 106117385, - "step": 4985 - }, - { - "epoch": 0.5995310527265075, - "flos": 15823085389560.0, - "grad_norm": 6.6280743645114715, - "learning_rate": 1.4591999047769846e-06, - "loss": 0.9439, - "num_input_tokens_seen": 106135960, - "step": 4986 - }, - { - "epoch": 0.5996512956171466, - "flos": 13413833474160.0, - "grad_norm": 7.551834277726965, - "learning_rate": 1.4584499923783486e-06, - "loss": 0.9831, - "num_input_tokens_seen": 106154260, - "step": 4987 - }, - { - "epoch": 0.5997715385077858, - "flos": 10864430489160.0, - "grad_norm": 5.760002061525611, - "learning_rate": 1.457700162132419e-06, - "loss": 0.9908, - "num_input_tokens_seen": 106170970, - "step": 4988 - }, - { - "epoch": 0.5998917813984248, - "flos": 17975859197880.0, - "grad_norm": 4.989718929776401, - "learning_rate": 1.4569504141529433e-06, - "loss": 0.9463, - "num_input_tokens_seen": 106188525, - "step": 4989 - }, - { - "epoch": 0.6000120242890639, - "flos": 15668701808040.0, - "grad_norm": 43.96776856802677, - "learning_rate": 1.456200748553658e-06, - "loss": 0.9517, - "num_input_tokens_seen": 106206240, - "step": 4990 - }, - { - "epoch": 0.600132267179703, - "flos": 21280954577160.0, - "grad_norm": 3.7604341938036536, - "learning_rate": 1.455451165448287e-06, - "loss": 1.009, - "num_input_tokens_seen": 106228615, - "step": 4991 - }, - { - "epoch": 0.6002525100703421, - "flos": 17818041521640.0, - "grad_norm": 6.146973412583608, - "learning_rate": 1.4547016649505407e-06, - "loss": 0.9366, - "num_input_tokens_seen": 106246345, - "step": 4992 - }, - { - "epoch": 0.6003727529609811, - "flos": 14802636267000.0, - "grad_norm": 5.043391959754546, - "learning_rate": 1.4539522471741193e-06, - "loss": 1.0606, - "num_input_tokens_seen": 106263490, - "step": 4993 - }, - { - "epoch": 0.6004929958516203, - "flos": 11025835567920.0, - "grad_norm": 4.735809209653537, - "learning_rate": 1.4532029122327067e-06, - "loss": 0.9321, - "num_input_tokens_seen": 106279995, - "step": 4994 - }, - { - "epoch": 0.6006132387422594, - "flos": 15459484261080.0, - "grad_norm": 9.399239903013143, - "learning_rate": 1.4524536602399783e-06, - "loss": 0.9769, - "num_input_tokens_seen": 106298805, - "step": 4995 - }, - { - "epoch": 0.6007334816328984, - "flos": 16246732948680.0, - "grad_norm": 3.654133632485947, - "learning_rate": 1.4517044913095938e-06, - "loss": 1.0034, - "num_input_tokens_seen": 106318945, - "step": 4996 - }, - { - "epoch": 0.6008537245235376, - "flos": 20174145857400.0, - "grad_norm": 5.061746611873947, - "learning_rate": 1.4509554055552022e-06, - "loss": 1.0415, - "num_input_tokens_seen": 106338895, - "step": 4997 - }, - { - "epoch": 0.6009739674141766, - "flos": 14828888110440.0, - "grad_norm": 6.771653658739923, - "learning_rate": 1.450206403090439e-06, - "loss": 1.0636, - "num_input_tokens_seen": 106356810, - "step": 4998 - }, - { - "epoch": 0.6010942103048157, - "flos": 14537235645960.0, - "grad_norm": 4.593148951232478, - "learning_rate": 1.4494574840289274e-06, - "loss": 1.0872, - "num_input_tokens_seen": 106373645, - "step": 4999 - }, - { - "epoch": 0.6012144531954549, - "flos": 16927471846080.0, - "grad_norm": 5.208774008863798, - "learning_rate": 1.4487086484842782e-06, - "loss": 0.9757, - "num_input_tokens_seen": 106392010, - "step": 5000 - }, - { - "epoch": 0.6013346960860939, - "flos": 13464957390840.0, - "grad_norm": 4.254719525965126, - "learning_rate": 1.4479598965700878e-06, - "loss": 0.8237, - "num_input_tokens_seen": 106408995, - "step": 5001 - }, - { - "epoch": 0.601454938976733, - "flos": 17085166876080.0, - "grad_norm": 3.7125564416775676, - "learning_rate": 1.4472112283999427e-06, - "loss": 0.9064, - "num_input_tokens_seen": 106427370, - "step": 5002 - }, - { - "epoch": 0.6015751818673721, - "flos": 18810460430280.0, - "grad_norm": 4.546039098085633, - "learning_rate": 1.4464626440874143e-06, - "loss": 0.9258, - "num_input_tokens_seen": 106446205, - "step": 5003 - }, - { - "epoch": 0.6016954247580112, - "flos": 9244205631840.0, - "grad_norm": 5.401256923622544, - "learning_rate": 1.4457141437460636e-06, - "loss": 0.9573, - "num_input_tokens_seen": 106463150, - "step": 5004 - }, - { - "epoch": 0.6018156676486502, - "flos": 16900361478960.0, - "grad_norm": 3.0942152810141104, - "learning_rate": 1.444965727489436e-06, - "loss": 0.9601, - "num_input_tokens_seen": 106482315, - "step": 5005 - }, - { - "epoch": 0.6019359105392894, - "flos": 18841587461760.0, - "grad_norm": 4.114928231893929, - "learning_rate": 1.444217395431066e-06, - "loss": 0.8665, - "num_input_tokens_seen": 106504575, - "step": 5006 - }, - { - "epoch": 0.6020561534299285, - "flos": 49549192127760.0, - "grad_norm": 2.017857697459071, - "learning_rate": 1.4434691476844755e-06, - "loss": 0.8147, - "num_input_tokens_seen": 106565270, - "step": 5007 - }, - { - "epoch": 0.6021763963205675, - "flos": 15511129424280.0, - "grad_norm": 4.952593176175606, - "learning_rate": 1.4427209843631729e-06, - "loss": 0.8973, - "num_input_tokens_seen": 106582040, - "step": 5008 - }, - { - "epoch": 0.6022966392112067, - "flos": 18920097699600.0, - "grad_norm": 3.65927533568613, - "learning_rate": 1.4419729055806534e-06, - "loss": 1.042, - "num_input_tokens_seen": 106601195, - "step": 5009 - }, - { - "epoch": 0.6024168821018457, - "flos": 14697812862600.0, - "grad_norm": 2.774232132669117, - "learning_rate": 1.441224911450401e-06, - "loss": 1.0669, - "num_input_tokens_seen": 106616870, - "step": 5010 - }, - { - "epoch": 0.6025371249924848, - "flos": 17556228303120.0, - "grad_norm": 24.874154694729704, - "learning_rate": 1.4404770020858851e-06, - "loss": 1.0617, - "num_input_tokens_seen": 106636075, - "step": 5011 - }, - { - "epoch": 0.602657367883124, - "flos": 18472129975320.0, - "grad_norm": 3.024684757168757, - "learning_rate": 1.439729177600563e-06, - "loss": 1.0921, - "num_input_tokens_seen": 106656290, - "step": 5012 - }, - { - "epoch": 0.602777610773763, - "flos": 11813728148280.0, - "grad_norm": 2.869902362292452, - "learning_rate": 1.4389814381078793e-06, - "loss": 0.9547, - "num_input_tokens_seen": 106675250, - "step": 5013 - }, - { - "epoch": 0.6028978536644021, - "flos": 9401594046240.0, - "grad_norm": 4.3294470795669175, - "learning_rate": 1.438233783721265e-06, - "loss": 1.0208, - "num_input_tokens_seen": 106691135, - "step": 5014 - }, - { - "epoch": 0.6030180965550412, - "flos": 13936693372200.0, - "grad_norm": 4.278517247700094, - "learning_rate": 1.43748621455414e-06, - "loss": 1.0046, - "num_input_tokens_seen": 106707290, - "step": 5015 - }, - { - "epoch": 0.6031383394456803, - "flos": 10208563665000.0, - "grad_norm": 3.6657133308038166, - "learning_rate": 1.4367387307199082e-06, - "loss": 1.0298, - "num_input_tokens_seen": 106723860, - "step": 5016 - }, - { - "epoch": 0.6032585823363193, - "flos": 9820948986960.0, - "grad_norm": 3.513267031507878, - "learning_rate": 1.4359913323319632e-06, - "loss": 1.054, - "num_input_tokens_seen": 106740750, - "step": 5017 - }, - { - "epoch": 0.6033788252269584, - "flos": 17320789574280.0, - "grad_norm": 3.629349696861262, - "learning_rate": 1.4352440195036847e-06, - "loss": 1.0061, - "num_input_tokens_seen": 106760645, - "step": 5018 - }, - { - "epoch": 0.6034990681175976, - "flos": 18160265994720.0, - "grad_norm": 3.2797846837431734, - "learning_rate": 1.4344967923484395e-06, - "loss": 1.0162, - "num_input_tokens_seen": 106782335, - "step": 5019 - }, - { - "epoch": 0.6036193110082366, - "flos": 18474337607640.0, - "grad_norm": 9.53967393169937, - "learning_rate": 1.433749650979581e-06, - "loss": 0.9448, - "num_input_tokens_seen": 106802040, - "step": 5020 - }, - { - "epoch": 0.6037395538988757, - "flos": 18212033804160.0, - "grad_norm": 5.934731892264101, - "learning_rate": 1.433002595510451e-06, - "loss": 0.9105, - "num_input_tokens_seen": 106820540, - "step": 5021 - }, - { - "epoch": 0.6038597967895148, - "flos": 12621954891000.0, - "grad_norm": 3.9124092067140994, - "learning_rate": 1.4322556260543757e-06, - "loss": 0.9471, - "num_input_tokens_seen": 106836835, - "step": 5022 - }, - { - "epoch": 0.6039800396801539, - "flos": 46657809962160.0, - "grad_norm": 0.9132464425899831, - "learning_rate": 1.4315087427246703e-06, - "loss": 0.8854, - "num_input_tokens_seen": 106890380, - "step": 5023 - }, - { - "epoch": 0.604100282570793, - "flos": 48250127251800.0, - "grad_norm": 0.9374755856196003, - "learning_rate": 1.4307619456346372e-06, - "loss": 0.8481, - "num_input_tokens_seen": 106934405, - "step": 5024 - }, - { - "epoch": 0.6042205254614321, - "flos": 25186009660560.0, - "grad_norm": 5.609787439681489, - "learning_rate": 1.430015234897564e-06, - "loss": 0.9604, - "num_input_tokens_seen": 106957405, - "step": 5025 - }, - { - "epoch": 0.6043407683520712, - "flos": 32631936129240.0, - "grad_norm": 9.492928533317412, - "learning_rate": 1.4292686106267274e-06, - "loss": 0.889, - "num_input_tokens_seen": 106979975, - "step": 5026 - }, - { - "epoch": 0.6044610112427102, - "flos": 11446784909760.0, - "grad_norm": 10.351918847050433, - "learning_rate": 1.4285220729353876e-06, - "loss": 0.992, - "num_input_tokens_seen": 106998235, - "step": 5027 - }, - { - "epoch": 0.6045812541333494, - "flos": 9738636715680.0, - "grad_norm": 12.876748568117785, - "learning_rate": 1.4277756219367957e-06, - "loss": 1.0027, - "num_input_tokens_seen": 107014980, - "step": 5028 - }, - { - "epoch": 0.6047014970239885, - "flos": 14174278410240.0, - "grad_norm": 6.2246884310166415, - "learning_rate": 1.4270292577441864e-06, - "loss": 1.0209, - "num_input_tokens_seen": 107034205, - "step": 5029 - }, - { - "epoch": 0.6048217399146275, - "flos": 18027688330440.0, - "grad_norm": 3.619683807010211, - "learning_rate": 1.4262829804707836e-06, - "loss": 0.9423, - "num_input_tokens_seen": 107055915, - "step": 5030 - }, - { - "epoch": 0.6049419828052667, - "flos": 18526320048000.0, - "grad_norm": 2.764260682914676, - "learning_rate": 1.4255367902297958e-06, - "loss": 0.9153, - "num_input_tokens_seen": 107076965, - "step": 5031 - }, - { - "epoch": 0.6050622256959057, - "flos": 10345893871080.0, - "grad_norm": 4.115253377833439, - "learning_rate": 1.4247906871344215e-06, - "loss": 1.0223, - "num_input_tokens_seen": 107092080, - "step": 5032 - }, - { - "epoch": 0.6051824685865448, - "flos": 16586412512280.0, - "grad_norm": 3.1643336050425317, - "learning_rate": 1.4240446712978415e-06, - "loss": 0.9839, - "num_input_tokens_seen": 107110785, - "step": 5033 - }, - { - "epoch": 0.605302711477184, - "flos": 19627701671640.0, - "grad_norm": 4.431361936546247, - "learning_rate": 1.423298742833227e-06, - "loss": 0.9671, - "num_input_tokens_seen": 107129165, - "step": 5034 - }, - { - "epoch": 0.605422954367823, - "flos": 10709403014880.0, - "grad_norm": 7.097150483321468, - "learning_rate": 1.4225529018537352e-06, - "loss": 0.9595, - "num_input_tokens_seen": 107144390, - "step": 5035 - }, - { - "epoch": 0.6055431972584621, - "flos": 19707346387200.0, - "grad_norm": 5.15915944638633, - "learning_rate": 1.4218071484725082e-06, - "loss": 1.0091, - "num_input_tokens_seen": 107166230, - "step": 5036 - }, - { - "epoch": 0.6056634401491012, - "flos": 13751489374800.0, - "grad_norm": 6.283024930932141, - "learning_rate": 1.4210614828026786e-06, - "loss": 0.9844, - "num_input_tokens_seen": 107183800, - "step": 5037 - }, - { - "epoch": 0.6057836830397403, - "flos": 17634063986640.0, - "grad_norm": 4.862207893286154, - "learning_rate": 1.4203159049573605e-06, - "loss": 0.9719, - "num_input_tokens_seen": 107204755, - "step": 5038 - }, - { - "epoch": 0.6059039259303793, - "flos": 14593326735360.0, - "grad_norm": 3.5260872699412444, - "learning_rate": 1.4195704150496593e-06, - "loss": 1.0958, - "num_input_tokens_seen": 107222190, - "step": 5039 - }, - { - "epoch": 0.6060241688210185, - "flos": 14961005851320.0, - "grad_norm": 3.0043176733856205, - "learning_rate": 1.4188250131926639e-06, - "loss": 0.9607, - "num_input_tokens_seen": 107240710, - "step": 5040 - }, - { - "epoch": 0.6061444117116576, - "flos": 11576725679880.0, - "grad_norm": 6.710818412887653, - "learning_rate": 1.4180796994994525e-06, - "loss": 1.0374, - "num_input_tokens_seen": 107257845, - "step": 5041 - }, - { - "epoch": 0.6062646546022966, - "flos": 15275445402960.0, - "grad_norm": 13.057567194357311, - "learning_rate": 1.4173344740830877e-06, - "loss": 0.9456, - "num_input_tokens_seen": 107276695, - "step": 5042 - }, - { - "epoch": 0.6063848974929358, - "flos": 27834900938520.0, - "grad_norm": 3.0445009029873873, - "learning_rate": 1.4165893370566206e-06, - "loss": 0.9324, - "num_input_tokens_seen": 107300170, - "step": 5043 - }, - { - "epoch": 0.6065051403835748, - "flos": 13938195788640.0, - "grad_norm": 3.4872629830273727, - "learning_rate": 1.4158442885330865e-06, - "loss": 0.9948, - "num_input_tokens_seen": 107318460, - "step": 5044 - }, - { - "epoch": 0.6066253832742139, - "flos": 16716843867360.0, - "grad_norm": 2.7195535108208064, - "learning_rate": 1.4150993286255094e-06, - "loss": 1.0217, - "num_input_tokens_seen": 107337430, - "step": 5045 - }, - { - "epoch": 0.6067456261648531, - "flos": 13569320871840.0, - "grad_norm": 3.297894071757417, - "learning_rate": 1.4143544574468993e-06, - "loss": 1.0103, - "num_input_tokens_seen": 107355510, - "step": 5046 - }, - { - "epoch": 0.6068658690554921, - "flos": 14566124383560.0, - "grad_norm": 2.6321937445202255, - "learning_rate": 1.4136096751102523e-06, - "loss": 1.0454, - "num_input_tokens_seen": 107373560, - "step": 5047 - }, - { - "epoch": 0.6069861119461312, - "flos": 19491199327680.0, - "grad_norm": 2.957843865391013, - "learning_rate": 1.4128649817285516e-06, - "loss": 1.0521, - "num_input_tokens_seen": 107391415, - "step": 5048 - }, - { - "epoch": 0.6071063548367702, - "flos": 18235342137840.0, - "grad_norm": 3.8071036999073384, - "learning_rate": 1.412120377414766e-06, - "loss": 0.8575, - "num_input_tokens_seen": 107411325, - "step": 5049 - }, - { - "epoch": 0.6072265977274094, - "flos": 17397582764760.0, - "grad_norm": 2.697948018681073, - "learning_rate": 1.4113758622818522e-06, - "loss": 0.927, - "num_input_tokens_seen": 107431110, - "step": 5050 - }, - { - "epoch": 0.6073468406180484, - "flos": 12861900869160.0, - "grad_norm": 6.601134301016652, - "learning_rate": 1.410631436442751e-06, - "loss": 1.0522, - "num_input_tokens_seen": 107449625, - "step": 5051 - }, - { - "epoch": 0.6074670835086875, - "flos": 14692600397400.0, - "grad_norm": 3.6713172001934176, - "learning_rate": 1.4098871000103936e-06, - "loss": 1.0923, - "num_input_tokens_seen": 107467945, - "step": 5052 - }, - { - "epoch": 0.6075873263993267, - "flos": 16901649264480.0, - "grad_norm": 2.6042020769890706, - "learning_rate": 1.409142853097693e-06, - "loss": 1.0517, - "num_input_tokens_seen": 107487905, - "step": 5053 - }, - { - "epoch": 0.6077075692899657, - "flos": 17392431622680.0, - "grad_norm": 4.576651586705476, - "learning_rate": 1.408398695817553e-06, - "loss": 1.0251, - "num_input_tokens_seen": 107504850, - "step": 5054 - }, - { - "epoch": 0.6078278121806048, - "flos": 19497638255280.0, - "grad_norm": 4.344840811566799, - "learning_rate": 1.4076546282828593e-06, - "loss": 0.9224, - "num_input_tokens_seen": 107527425, - "step": 5055 - }, - { - "epoch": 0.6079480550712439, - "flos": 27442073795280.0, - "grad_norm": 29.496547690934847, - "learning_rate": 1.4069106506064874e-06, - "loss": 0.8843, - "num_input_tokens_seen": 107548570, - "step": 5056 - }, - { - "epoch": 0.608068297961883, - "flos": 18026983114560.0, - "grad_norm": 3.543157974119335, - "learning_rate": 1.4061667629012989e-06, - "loss": 1.0024, - "num_input_tokens_seen": 107568960, - "step": 5057 - }, - { - "epoch": 0.608188540852522, - "flos": 17211765536160.0, - "grad_norm": 5.48957773554575, - "learning_rate": 1.40542296528014e-06, - "loss": 1.0573, - "num_input_tokens_seen": 107588340, - "step": 5058 - }, - { - "epoch": 0.6083087837431612, - "flos": 15114500247600.0, - "grad_norm": 3.4743601477508483, - "learning_rate": 1.4046792578558452e-06, - "loss": 0.9894, - "num_input_tokens_seen": 107605955, - "step": 5059 - }, - { - "epoch": 0.6084290266338003, - "flos": 11656155764520.0, - "grad_norm": 4.0682215381115, - "learning_rate": 1.4039356407412325e-06, - "loss": 0.9915, - "num_input_tokens_seen": 107618915, - "step": 5060 - }, - { - "epoch": 0.6085492695244393, - "flos": 47573313034080.0, - "grad_norm": 0.8402525860361598, - "learning_rate": 1.40319211404911e-06, - "loss": 0.8468, - "num_input_tokens_seen": 107673635, - "step": 5061 - }, - { - "epoch": 0.6086695124150785, - "flos": 16792471918560.0, - "grad_norm": 2.678831981520544, - "learning_rate": 1.4024486778922691e-06, - "loss": 1.1208, - "num_input_tokens_seen": 107691670, - "step": 5062 - }, - { - "epoch": 0.6087897553057176, - "flos": 14305016380920.0, - "grad_norm": 3.5518147356759675, - "learning_rate": 1.4017053323834884e-06, - "loss": 1.0008, - "num_input_tokens_seen": 107711220, - "step": 5063 - }, - { - "epoch": 0.6089099981963566, - "flos": 18132143796120.0, - "grad_norm": 3.8806130149141382, - "learning_rate": 1.4009620776355333e-06, - "loss": 0.9833, - "num_input_tokens_seen": 107732540, - "step": 5064 - }, - { - "epoch": 0.6090302410869958, - "flos": 18024100927920.0, - "grad_norm": 2.7692413824032913, - "learning_rate": 1.4002189137611553e-06, - "loss": 1.0208, - "num_input_tokens_seen": 107751600, - "step": 5065 - }, - { - "epoch": 0.6091504839776348, - "flos": 17057688570240.0, - "grad_norm": 2.6532085284237072, - "learning_rate": 1.3994758408730901e-06, - "loss": 0.918, - "num_input_tokens_seen": 107770505, - "step": 5066 - }, - { - "epoch": 0.6092707268682739, - "flos": 21122370361920.0, - "grad_norm": 5.202656126232279, - "learning_rate": 1.3987328590840629e-06, - "loss": 1.0061, - "num_input_tokens_seen": 107791170, - "step": 5067 - }, - { - "epoch": 0.609390969758913, - "flos": 17084860260480.0, - "grad_norm": 3.954325376862176, - "learning_rate": 1.397989968506783e-06, - "loss": 1.092, - "num_input_tokens_seen": 107809900, - "step": 5068 - }, - { - "epoch": 0.6095112126495521, - "flos": 7796552209200.0, - "grad_norm": 7.7735614095809, - "learning_rate": 1.3972471692539458e-06, - "loss": 0.9529, - "num_input_tokens_seen": 107824335, - "step": 5069 - }, - { - "epoch": 0.6096314555401912, - "flos": 12225662991480.0, - "grad_norm": 3.8221030690475057, - "learning_rate": 1.3965044614382348e-06, - "loss": 0.9802, - "num_input_tokens_seen": 107839505, - "step": 5070 - }, - { - "epoch": 0.6097516984308303, - "flos": 15374075172240.0, - "grad_norm": 3.432539256094372, - "learning_rate": 1.3957618451723162e-06, - "loss": 0.9768, - "num_input_tokens_seen": 107855255, - "step": 5071 - }, - { - "epoch": 0.6098719413214694, - "flos": 19366133745600.0, - "grad_norm": 4.0770556922460335, - "learning_rate": 1.3950193205688457e-06, - "loss": 0.9272, - "num_input_tokens_seen": 107874700, - "step": 5072 - }, - { - "epoch": 0.6099921842121084, - "flos": 14488993915920.0, - "grad_norm": 3.4010866560401736, - "learning_rate": 1.3942768877404627e-06, - "loss": 1.0627, - "num_input_tokens_seen": 107893385, - "step": 5073 - }, - { - "epoch": 0.6101124271027476, - "flos": 16611100616160.0, - "grad_norm": 2.5168203984815625, - "learning_rate": 1.393534546799795e-06, - "loss": 0.9759, - "num_input_tokens_seen": 107912805, - "step": 5074 - }, - { - "epoch": 0.6102326699933867, - "flos": 18998546614320.0, - "grad_norm": 2.3896528274377333, - "learning_rate": 1.3927922978594536e-06, - "loss": 0.8969, - "num_input_tokens_seen": 107933610, - "step": 5075 - }, - { - "epoch": 0.6103529128840257, - "flos": 43406015470320.0, - "grad_norm": 0.9271338838375626, - "learning_rate": 1.3920501410320387e-06, - "loss": 0.8533, - "num_input_tokens_seen": 107989445, - "step": 5076 - }, - { - "epoch": 0.6104731557746649, - "flos": 13488143078280.0, - "grad_norm": 4.157619164755308, - "learning_rate": 1.3913080764301333e-06, - "loss": 0.989, - "num_input_tokens_seen": 108006125, - "step": 5077 - }, - { - "epoch": 0.6105933986653039, - "flos": 16611744508920.0, - "grad_norm": 3.008091187880529, - "learning_rate": 1.3905661041663085e-06, - "loss": 0.9421, - "num_input_tokens_seen": 108027030, - "step": 5078 - }, - { - "epoch": 0.610713641555943, - "flos": 24711299507880.0, - "grad_norm": 3.8622630815157897, - "learning_rate": 1.389824224353122e-06, - "loss": 0.8688, - "num_input_tokens_seen": 108048340, - "step": 5079 - }, - { - "epoch": 0.610833884446582, - "flos": 18968155460280.0, - "grad_norm": 3.410536003161002, - "learning_rate": 1.389082437103115e-06, - "loss": 0.9984, - "num_input_tokens_seen": 108067330, - "step": 5080 - }, - { - "epoch": 0.6109541273372212, - "flos": 15065430655440.0, - "grad_norm": 7.685959576355772, - "learning_rate": 1.3883407425288172e-06, - "loss": 1.0, - "num_input_tokens_seen": 108087385, - "step": 5081 - }, - { - "epoch": 0.6110743702278603, - "flos": 14252574017160.0, - "grad_norm": 3.9112070598055597, - "learning_rate": 1.3875991407427417e-06, - "loss": 1.0071, - "num_input_tokens_seen": 108105330, - "step": 5082 - }, - { - "epoch": 0.6111946131184993, - "flos": 48909428170680.0, - "grad_norm": 0.7590710129619733, - "learning_rate": 1.38685763185739e-06, - "loss": 0.841, - "num_input_tokens_seen": 108158710, - "step": 5083 - }, - { - "epoch": 0.6113148560091385, - "flos": 14147290689360.0, - "grad_norm": 4.204019557671465, - "learning_rate": 1.3861162159852476e-06, - "loss": 0.8948, - "num_input_tokens_seen": 108176565, - "step": 5084 - }, - { - "epoch": 0.6114350988997775, - "flos": 16874048312400.0, - "grad_norm": 8.937395001841718, - "learning_rate": 1.3853748932387875e-06, - "loss": 1.0265, - "num_input_tokens_seen": 108196925, - "step": 5085 - }, - { - "epoch": 0.6115553417904166, - "flos": 17083787105880.0, - "grad_norm": 3.430775254834121, - "learning_rate": 1.3846336637304671e-06, - "loss": 0.9768, - "num_input_tokens_seen": 108214915, - "step": 5086 - }, - { - "epoch": 0.6116755846810558, - "flos": 16875489405720.0, - "grad_norm": 4.62887884848166, - "learning_rate": 1.3838925275727316e-06, - "loss": 1.0404, - "num_input_tokens_seen": 108235375, - "step": 5087 - }, - { - "epoch": 0.6117958275716948, - "flos": 13230346524120.0, - "grad_norm": 4.785236566609125, - "learning_rate": 1.3831514848780089e-06, - "loss": 1.0191, - "num_input_tokens_seen": 108254670, - "step": 5088 - }, - { - "epoch": 0.6119160704623339, - "flos": 11655941133600.0, - "grad_norm": 3.751256859279566, - "learning_rate": 1.3824105357587152e-06, - "loss": 1.146, - "num_input_tokens_seen": 108271495, - "step": 5089 - }, - { - "epoch": 0.612036313352973, - "flos": 17006166053280.0, - "grad_norm": 17.00050622522937, - "learning_rate": 1.381669680327253e-06, - "loss": 1.0564, - "num_input_tokens_seen": 108292895, - "step": 5090 - }, - { - "epoch": 0.6121565562436121, - "flos": 19204084774080.0, - "grad_norm": 3.424833368856236, - "learning_rate": 1.380928918696008e-06, - "loss": 0.9343, - "num_input_tokens_seen": 108311385, - "step": 5091 - }, - { - "epoch": 0.6122767991342511, - "flos": 11079381747840.0, - "grad_norm": 3.762551220516701, - "learning_rate": 1.3801882509773548e-06, - "loss": 0.945, - "num_input_tokens_seen": 108328965, - "step": 5092 - }, - { - "epoch": 0.6123970420248903, - "flos": 19915858718280.0, - "grad_norm": 5.750125386073456, - "learning_rate": 1.3794476772836503e-06, - "loss": 1.0338, - "num_input_tokens_seen": 108349785, - "step": 5093 - }, - { - "epoch": 0.6125172849155294, - "flos": 15247445850600.0, - "grad_norm": 2.33383400404726, - "learning_rate": 1.3787071977272402e-06, - "loss": 1.065, - "num_input_tokens_seen": 108368765, - "step": 5094 - }, - { - "epoch": 0.6126375278061684, - "flos": 11495241270720.0, - "grad_norm": 4.445953912836744, - "learning_rate": 1.3779668124204535e-06, - "loss": 0.934, - "num_input_tokens_seen": 108384900, - "step": 5095 - }, - { - "epoch": 0.6127577706968076, - "flos": 14514724512840.0, - "grad_norm": 2.2414734663778813, - "learning_rate": 1.3772265214756074e-06, - "loss": 1.0389, - "num_input_tokens_seen": 108404380, - "step": 5096 - }, - { - "epoch": 0.6128780135874466, - "flos": 12942005508120.0, - "grad_norm": 3.193761920346822, - "learning_rate": 1.3764863250050025e-06, - "loss": 0.9678, - "num_input_tokens_seen": 108422340, - "step": 5097 - }, - { - "epoch": 0.6129982564780857, - "flos": 17740389807480.0, - "grad_norm": 5.246570738739883, - "learning_rate": 1.3757462231209272e-06, - "loss": 1.0287, - "num_input_tokens_seen": 108442365, - "step": 5098 - }, - { - "epoch": 0.6131184993687249, - "flos": 15983110698120.0, - "grad_norm": 3.53039108228078, - "learning_rate": 1.3750062159356525e-06, - "loss": 1.1107, - "num_input_tokens_seen": 108461435, - "step": 5099 - }, - { - "epoch": 0.6132387422593639, - "flos": 11235053114880.0, - "grad_norm": 3.4237622495712747, - "learning_rate": 1.3742663035614382e-06, - "loss": 1.0535, - "num_input_tokens_seen": 108478525, - "step": 5100 - }, - { - "epoch": 0.613358985150003, - "flos": 18081387818160.0, - "grad_norm": 2.8017092222226507, - "learning_rate": 1.3735264861105283e-06, - "loss": 1.0322, - "num_input_tokens_seen": 108498885, - "step": 5101 - }, - { - "epoch": 0.6134792280406421, - "flos": 15170499352320.0, - "grad_norm": 3.9106152367039404, - "learning_rate": 1.372786763695152e-06, - "loss": 1.0097, - "num_input_tokens_seen": 108517365, - "step": 5102 - }, - { - "epoch": 0.6135994709312812, - "flos": 15062119206960.0, - "grad_norm": 2.7031377891282085, - "learning_rate": 1.3720471364275257e-06, - "loss": 0.9958, - "num_input_tokens_seen": 108536730, - "step": 5103 - }, - { - "epoch": 0.6137197138219203, - "flos": 10450778598600.0, - "grad_norm": 6.489826163151806, - "learning_rate": 1.3713076044198486e-06, - "loss": 1.0001, - "num_input_tokens_seen": 108553260, - "step": 5104 - }, - { - "epoch": 0.6138399567125594, - "flos": 14252941955880.0, - "grad_norm": 4.102884831484999, - "learning_rate": 1.3705681677843086e-06, - "loss": 1.0288, - "num_input_tokens_seen": 108571575, - "step": 5105 - }, - { - "epoch": 0.6139601996031985, - "flos": 43035325973400.0, - "grad_norm": 0.8230689829286363, - "learning_rate": 1.3698288266330768e-06, - "loss": 0.8589, - "num_input_tokens_seen": 108631920, - "step": 5106 - }, - { - "epoch": 0.6140804424938375, - "flos": 16769838139200.0, - "grad_norm": 5.915305970663098, - "learning_rate": 1.3690895810783113e-06, - "loss": 0.9546, - "num_input_tokens_seen": 108650435, - "step": 5107 - }, - { - "epoch": 0.6142006853844767, - "flos": 15197088472920.0, - "grad_norm": 3.9714294759978688, - "learning_rate": 1.3683504312321543e-06, - "loss": 0.9385, - "num_input_tokens_seen": 108670490, - "step": 5108 - }, - { - "epoch": 0.6143209282751158, - "flos": 8483944665120.0, - "grad_norm": 3.3972745462129184, - "learning_rate": 1.3676113772067355e-06, - "loss": 1.0181, - "num_input_tokens_seen": 108687265, - "step": 5109 - }, - { - "epoch": 0.6144411711657548, - "flos": 17844630642240.0, - "grad_norm": 3.641732894562944, - "learning_rate": 1.3668724191141671e-06, - "loss": 0.946, - "num_input_tokens_seen": 108706255, - "step": 5110 - }, - { - "epoch": 0.6145614140563939, - "flos": 14273245456680.0, - "grad_norm": 5.561912934259332, - "learning_rate": 1.3661335570665493e-06, - "loss": 0.8985, - "num_input_tokens_seen": 108723885, - "step": 5111 - }, - { - "epoch": 0.614681656947033, - "flos": 11892636986400.0, - "grad_norm": 4.207880102748467, - "learning_rate": 1.3653947911759676e-06, - "loss": 0.9143, - "num_input_tokens_seen": 108741155, - "step": 5112 - }, - { - "epoch": 0.6148018998376721, - "flos": 27704530906560.0, - "grad_norm": 3.691355225548482, - "learning_rate": 1.3646561215544904e-06, - "loss": 0.9633, - "num_input_tokens_seen": 108765515, - "step": 5113 - }, - { - "epoch": 0.6149221427283111, - "flos": 16582641140400.0, - "grad_norm": 6.802247338024122, - "learning_rate": 1.363917548314176e-06, - "loss": 1.0192, - "num_input_tokens_seen": 108784500, - "step": 5114 - }, - { - "epoch": 0.6150423856189503, - "flos": 15901810258320.0, - "grad_norm": 6.3454250886496215, - "learning_rate": 1.3631790715670626e-06, - "loss": 0.9511, - "num_input_tokens_seen": 108802625, - "step": 5115 - }, - { - "epoch": 0.6151626285095894, - "flos": 13251416563920.0, - "grad_norm": 2.8196459376136174, - "learning_rate": 1.3624406914251783e-06, - "loss": 1.0835, - "num_input_tokens_seen": 108819465, - "step": 5116 - }, - { - "epoch": 0.6152828714002284, - "flos": 11210824934400.0, - "grad_norm": 3.783534849292464, - "learning_rate": 1.3617024080005335e-06, - "loss": 1.1004, - "num_input_tokens_seen": 108836085, - "step": 5117 - }, - { - "epoch": 0.6154031142908676, - "flos": 17692791970200.0, - "grad_norm": 2.818678956077489, - "learning_rate": 1.3609642214051266e-06, - "loss": 0.9663, - "num_input_tokens_seen": 108860030, - "step": 5118 - }, - { - "epoch": 0.6155233571815066, - "flos": 13649333526120.0, - "grad_norm": 11.628437805479223, - "learning_rate": 1.3602261317509385e-06, - "loss": 0.8855, - "num_input_tokens_seen": 108876410, - "step": 5119 - }, - { - "epoch": 0.6156436000721457, - "flos": 13308151546080.0, - "grad_norm": 4.5335721083944085, - "learning_rate": 1.3594881391499387e-06, - "loss": 1.0447, - "num_input_tokens_seen": 108895045, - "step": 5120 - }, - { - "epoch": 0.6157638429627849, - "flos": 12784433124360.0, - "grad_norm": 2.3454611063415833, - "learning_rate": 1.3587502437140778e-06, - "loss": 1.017, - "num_input_tokens_seen": 108912930, - "step": 5121 - }, - { - "epoch": 0.6158840858534239, - "flos": 18185628652920.0, - "grad_norm": 9.329442121338776, - "learning_rate": 1.3580124455552952e-06, - "loss": 1.0781, - "num_input_tokens_seen": 108932015, - "step": 5122 - }, - { - "epoch": 0.616004328744063, - "flos": 17526848980560.0, - "grad_norm": 4.266584910165776, - "learning_rate": 1.3572747447855148e-06, - "loss": 1.0908, - "num_input_tokens_seen": 108952145, - "step": 5123 - }, - { - "epoch": 0.6161245716347021, - "flos": 15406520650800.0, - "grad_norm": 2.8230200791977573, - "learning_rate": 1.356537141516644e-06, - "loss": 0.9122, - "num_input_tokens_seen": 108969285, - "step": 5124 - }, - { - "epoch": 0.6162448145253412, - "flos": 25108357946400.0, - "grad_norm": 6.942999507200215, - "learning_rate": 1.3557996358605775e-06, - "loss": 0.8483, - "num_input_tokens_seen": 108988925, - "step": 5125 - }, - { - "epoch": 0.6163650574159802, - "flos": 15352851824640.0, - "grad_norm": 3.3376800488543696, - "learning_rate": 1.3550622279291941e-06, - "loss": 0.9291, - "num_input_tokens_seen": 109006790, - "step": 5126 - }, - { - "epoch": 0.6164853003066194, - "flos": 17478116665560.0, - "grad_norm": 1.7151030950449278, - "learning_rate": 1.354324917834358e-06, - "loss": 1.0611, - "num_input_tokens_seen": 109027755, - "step": 5127 - }, - { - "epoch": 0.6166055431972585, - "flos": 15511711993920.0, - "grad_norm": 4.251384733914554, - "learning_rate": 1.353587705687918e-06, - "loss": 0.9972, - "num_input_tokens_seen": 109045650, - "step": 5128 - }, - { - "epoch": 0.6167257860878975, - "flos": 12600424927800.0, - "grad_norm": 6.711071932315499, - "learning_rate": 1.3528505916017096e-06, - "loss": 0.9288, - "num_input_tokens_seen": 109063070, - "step": 5129 - }, - { - "epoch": 0.6168460289785367, - "flos": 16501616654640.0, - "grad_norm": 4.165579056253928, - "learning_rate": 1.3521135756875514e-06, - "loss": 1.1037, - "num_input_tokens_seen": 109079105, - "step": 5130 - }, - { - "epoch": 0.6169662718691757, - "flos": 18657456618960.0, - "grad_norm": 5.173766006551877, - "learning_rate": 1.3513766580572496e-06, - "loss": 1.0867, - "num_input_tokens_seen": 109101645, - "step": 5131 - }, - { - "epoch": 0.6170865147598148, - "flos": 13492251727320.0, - "grad_norm": 3.504482804475958, - "learning_rate": 1.3506398388225924e-06, - "loss": 0.9977, - "num_input_tokens_seen": 109118685, - "step": 5132 - }, - { - "epoch": 0.617206757650454, - "flos": 12941974846560.0, - "grad_norm": 4.469515648187679, - "learning_rate": 1.349903118095355e-06, - "loss": 0.9399, - "num_input_tokens_seen": 109137540, - "step": 5133 - }, - { - "epoch": 0.617327000541093, - "flos": 12889164544080.0, - "grad_norm": 8.085387398256364, - "learning_rate": 1.349166495987298e-06, - "loss": 0.9562, - "num_input_tokens_seen": 109155825, - "step": 5134 - }, - { - "epoch": 0.6174472434317321, - "flos": 44263582211160.0, - "grad_norm": 0.9340191115521292, - "learning_rate": 1.348429972610166e-06, - "loss": 0.9064, - "num_input_tokens_seen": 109219850, - "step": 5135 - }, - { - "epoch": 0.6175674863223712, - "flos": 51014972080440.0, - "grad_norm": 0.8809446644567804, - "learning_rate": 1.3476935480756897e-06, - "loss": 0.8371, - "num_input_tokens_seen": 109276320, - "step": 5136 - }, - { - "epoch": 0.6176877292130103, - "flos": 15511252070520.0, - "grad_norm": 4.458573284406333, - "learning_rate": 1.346957222495583e-06, - "loss": 0.972, - "num_input_tokens_seen": 109293835, - "step": 5137 - }, - { - "epoch": 0.6178079721036493, - "flos": 12568194080160.0, - "grad_norm": 4.957580680248368, - "learning_rate": 1.3462209959815466e-06, - "loss": 0.9405, - "num_input_tokens_seen": 109308295, - "step": 5138 - }, - { - "epoch": 0.6179282149942885, - "flos": 16083856115040.0, - "grad_norm": 3.0462371941110633, - "learning_rate": 1.345484868645265e-06, - "loss": 0.9573, - "num_input_tokens_seen": 109326825, - "step": 5139 - }, - { - "epoch": 0.6180484578849276, - "flos": 15852618019920.0, - "grad_norm": 3.62198197582864, - "learning_rate": 1.3447488405984088e-06, - "loss": 1.0125, - "num_input_tokens_seen": 109344805, - "step": 5140 - }, - { - "epoch": 0.6181687007755666, - "flos": 25134364497360.0, - "grad_norm": 3.807247358169556, - "learning_rate": 1.3440129119526322e-06, - "loss": 0.9171, - "num_input_tokens_seen": 109366950, - "step": 5141 - }, - { - "epoch": 0.6182889436662057, - "flos": 44053542350160.0, - "grad_norm": 0.9190857604610382, - "learning_rate": 1.3432770828195762e-06, - "loss": 0.7961, - "num_input_tokens_seen": 109427655, - "step": 5142 - }, - { - "epoch": 0.6184091865568448, - "flos": 13911882622080.0, - "grad_norm": 3.2579355925580153, - "learning_rate": 1.3425413533108635e-06, - "loss": 0.9427, - "num_input_tokens_seen": 109445975, - "step": 5143 - }, - { - "epoch": 0.6185294294474839, - "flos": 16769991447000.0, - "grad_norm": 4.047228941296932, - "learning_rate": 1.341805723538105e-06, - "loss": 0.9232, - "num_input_tokens_seen": 109465800, - "step": 5144 - }, - { - "epoch": 0.618649672338123, - "flos": 19052031471120.0, - "grad_norm": 3.86333148487783, - "learning_rate": 1.3410701936128948e-06, - "loss": 0.9967, - "num_input_tokens_seen": 109488300, - "step": 5145 - }, - { - "epoch": 0.6187699152287621, - "flos": 10207061248560.0, - "grad_norm": 11.97917530004218, - "learning_rate": 1.340334763646812e-06, - "loss": 1.0783, - "num_input_tokens_seen": 109502155, - "step": 5146 - }, - { - "epoch": 0.6188901581194012, - "flos": 14253156586800.0, - "grad_norm": 2.570118534456815, - "learning_rate": 1.3395994337514218e-06, - "loss": 0.9723, - "num_input_tokens_seen": 109522045, - "step": 5147 - }, - { - "epoch": 0.6190104010100402, - "flos": 18288857656200.0, - "grad_norm": 2.425659176615931, - "learning_rate": 1.3388642040382725e-06, - "loss": 1.0143, - "num_input_tokens_seen": 109542190, - "step": 5148 - }, - { - "epoch": 0.6191306439006794, - "flos": 21696384838200.0, - "grad_norm": 4.866352334170143, - "learning_rate": 1.3381290746188975e-06, - "loss": 1.0632, - "num_input_tokens_seen": 109561280, - "step": 5149 - }, - { - "epoch": 0.6192508867913185, - "flos": 18996798905400.0, - "grad_norm": 3.4963775690050274, - "learning_rate": 1.3373940456048152e-06, - "loss": 0.8992, - "num_input_tokens_seen": 109581025, - "step": 5150 - }, - { - "epoch": 0.6193711296819575, - "flos": 26210015524080.0, - "grad_norm": 2.8822179661960137, - "learning_rate": 1.3366591171075299e-06, - "loss": 0.8226, - "num_input_tokens_seen": 109604250, - "step": 5151 - }, - { - "epoch": 0.6194913725725967, - "flos": 17923999403760.0, - "grad_norm": 2.4741253819869833, - "learning_rate": 1.335924289238529e-06, - "loss": 1.1349, - "num_input_tokens_seen": 109623180, - "step": 5152 - }, - { - "epoch": 0.6196116154632357, - "flos": 15039362781360.0, - "grad_norm": 5.03327038875068, - "learning_rate": 1.3351895621092859e-06, - "loss": 0.9877, - "num_input_tokens_seen": 109643245, - "step": 5153 - }, - { - "epoch": 0.6197318583538748, - "flos": 11499871166280.0, - "grad_norm": 3.8425772513849776, - "learning_rate": 1.3344549358312567e-06, - "loss": 0.9901, - "num_input_tokens_seen": 109661365, - "step": 5154 - }, - { - "epoch": 0.619852101244514, - "flos": 17372710691520.0, - "grad_norm": 3.4123729304874684, - "learning_rate": 1.3337204105158852e-06, - "loss": 1.0016, - "num_input_tokens_seen": 109679955, - "step": 5155 - }, - { - "epoch": 0.619972344135153, - "flos": 11840102637960.0, - "grad_norm": 5.0061672322911885, - "learning_rate": 1.332985986274597e-06, - "loss": 0.9383, - "num_input_tokens_seen": 109697305, - "step": 5156 - }, - { - "epoch": 0.6200925870257921, - "flos": 8798598847680.0, - "grad_norm": 4.319145500302485, - "learning_rate": 1.3322516632188047e-06, - "loss": 0.9814, - "num_input_tokens_seen": 109713920, - "step": 5157 - }, - { - "epoch": 0.6202128299164312, - "flos": 18891484916040.0, - "grad_norm": 2.7700626094917333, - "learning_rate": 1.3315174414599045e-06, - "loss": 0.9046, - "num_input_tokens_seen": 109734960, - "step": 5158 - }, - { - "epoch": 0.6203330728070703, - "flos": 13308519484800.0, - "grad_norm": 3.1273192382583774, - "learning_rate": 1.3307833211092768e-06, - "loss": 0.9856, - "num_input_tokens_seen": 109753345, - "step": 5159 - }, - { - "epoch": 0.6204533156977093, - "flos": 14644542636720.0, - "grad_norm": 2.6418675315215308, - "learning_rate": 1.3300493022782873e-06, - "loss": 0.9707, - "num_input_tokens_seen": 109773635, - "step": 5160 - }, - { - "epoch": 0.6205735585883485, - "flos": 12286291991760.0, - "grad_norm": 3.796096282348919, - "learning_rate": 1.3293153850782855e-06, - "loss": 0.9487, - "num_input_tokens_seen": 109791675, - "step": 5161 - }, - { - "epoch": 0.6206938014789876, - "flos": 16322851584840.0, - "grad_norm": 2.628819003648382, - "learning_rate": 1.3285815696206069e-06, - "loss": 0.934, - "num_input_tokens_seen": 109812940, - "step": 5162 - }, - { - "epoch": 0.6208140443696266, - "flos": 17054131829280.0, - "grad_norm": 3.308631797670843, - "learning_rate": 1.32784785601657e-06, - "loss": 1.0007, - "num_input_tokens_seen": 109832070, - "step": 5163 - }, - { - "epoch": 0.6209342872602658, - "flos": 25027578753120.0, - "grad_norm": 3.68828173610924, - "learning_rate": 1.3271142443774798e-06, - "loss": 0.9701, - "num_input_tokens_seen": 109854025, - "step": 5164 - }, - { - "epoch": 0.6210545301509048, - "flos": 19207978792200.0, - "grad_norm": 3.1508946071808754, - "learning_rate": 1.3263807348146228e-06, - "loss": 1.0469, - "num_input_tokens_seen": 109873600, - "step": 5165 - }, - { - "epoch": 0.6211747730415439, - "flos": 23979038093520.0, - "grad_norm": 3.644351868952663, - "learning_rate": 1.3256473274392733e-06, - "loss": 0.9636, - "num_input_tokens_seen": 109894665, - "step": 5166 - }, - { - "epoch": 0.6212950159321831, - "flos": 24373704930360.0, - "grad_norm": 3.6104581454462994, - "learning_rate": 1.3249140223626873e-06, - "loss": 0.9286, - "num_input_tokens_seen": 109916005, - "step": 5167 - }, - { - "epoch": 0.6214152588228221, - "flos": 19917085180680.0, - "grad_norm": 4.358460251739278, - "learning_rate": 1.3241808196961077e-06, - "loss": 0.992, - "num_input_tokens_seen": 109936850, - "step": 5168 - }, - { - "epoch": 0.6215355017134612, - "flos": 14357612052480.0, - "grad_norm": 3.1831241565570654, - "learning_rate": 1.3234477195507608e-06, - "loss": 0.9329, - "num_input_tokens_seen": 109955400, - "step": 5169 - }, - { - "epoch": 0.6216557446041003, - "flos": 29592118724760.0, - "grad_norm": 5.919670560279206, - "learning_rate": 1.322714722037857e-06, - "loss": 0.8587, - "num_input_tokens_seen": 109976565, - "step": 5170 - }, - { - "epoch": 0.6217759874947394, - "flos": 19890434736960.0, - "grad_norm": 6.989953870280016, - "learning_rate": 1.321981827268591e-06, - "loss": 0.9933, - "num_input_tokens_seen": 109996940, - "step": 5171 - }, - { - "epoch": 0.6218962303853784, - "flos": 15461538585600.0, - "grad_norm": 4.683887361131518, - "learning_rate": 1.3212490353541426e-06, - "loss": 1.043, - "num_input_tokens_seen": 110018920, - "step": 5172 - }, - { - "epoch": 0.6220164732760175, - "flos": 15087083264880.0, - "grad_norm": 4.569209769476607, - "learning_rate": 1.3205163464056762e-06, - "loss": 1.0287, - "num_input_tokens_seen": 110035245, - "step": 5173 - }, - { - "epoch": 0.6221367161666567, - "flos": 18601641483600.0, - "grad_norm": 8.42045669078273, - "learning_rate": 1.319783760534339e-06, - "loss": 0.9469, - "num_input_tokens_seen": 110054210, - "step": 5174 - }, - { - "epoch": 0.6222569590572957, - "flos": 11520941206080.0, - "grad_norm": 2.838513762077338, - "learning_rate": 1.319051277851266e-06, - "loss": 0.9869, - "num_input_tokens_seen": 110070215, - "step": 5175 - }, - { - "epoch": 0.6223772019479348, - "flos": 12915723003120.0, - "grad_norm": 21.133869543804117, - "learning_rate": 1.3183188984675716e-06, - "loss": 1.068, - "num_input_tokens_seen": 110088300, - "step": 5176 - }, - { - "epoch": 0.6224974448385739, - "flos": 19574799384480.0, - "grad_norm": 7.528482395554617, - "learning_rate": 1.3175866224943586e-06, - "loss": 0.9551, - "num_input_tokens_seen": 110106740, - "step": 5177 - }, - { - "epoch": 0.622617687729213, - "flos": 14042589931200.0, - "grad_norm": 4.113573605543912, - "learning_rate": 1.316854450042712e-06, - "loss": 0.9568, - "num_input_tokens_seen": 110124400, - "step": 5178 - }, - { - "epoch": 0.622737930619852, - "flos": 16376060487600.0, - "grad_norm": 4.727328555237624, - "learning_rate": 1.3161223812237024e-06, - "loss": 0.9725, - "num_input_tokens_seen": 110143475, - "step": 5179 - }, - { - "epoch": 0.6228581735104912, - "flos": 8897627217240.0, - "grad_norm": 7.035017707981361, - "learning_rate": 1.3153904161483842e-06, - "loss": 1.076, - "num_input_tokens_seen": 110158495, - "step": 5180 - }, - { - "epoch": 0.6229784164011303, - "flos": 16924773628800.0, - "grad_norm": 5.706468746342042, - "learning_rate": 1.3146585549277953e-06, - "loss": 1.0798, - "num_input_tokens_seen": 110176855, - "step": 5181 - }, - { - "epoch": 0.6230986592917693, - "flos": 15926529023760.0, - "grad_norm": 4.9318189169504025, - "learning_rate": 1.3139267976729591e-06, - "loss": 1.0089, - "num_input_tokens_seen": 110196765, - "step": 5182 - }, - { - "epoch": 0.6232189021824085, - "flos": 24632727946920.0, - "grad_norm": 3.4351071439021945, - "learning_rate": 1.3131951444948815e-06, - "loss": 0.9461, - "num_input_tokens_seen": 110215885, - "step": 5183 - }, - { - "epoch": 0.6233391450730476, - "flos": 15799869040560.0, - "grad_norm": 5.593518129796253, - "learning_rate": 1.3124635955045546e-06, - "loss": 0.9682, - "num_input_tokens_seen": 110235420, - "step": 5184 - }, - { - "epoch": 0.6234593879636866, - "flos": 14430327255480.0, - "grad_norm": 2.7497289697429195, - "learning_rate": 1.3117321508129537e-06, - "loss": 1.07, - "num_input_tokens_seen": 110253220, - "step": 5185 - }, - { - "epoch": 0.6235796308543258, - "flos": 14669598679320.0, - "grad_norm": 5.664854470493925, - "learning_rate": 1.3110008105310388e-06, - "loss": 0.9949, - "num_input_tokens_seen": 110272760, - "step": 5186 - }, - { - "epoch": 0.6236998737449648, - "flos": 18949323714360.0, - "grad_norm": 3.9938402431363365, - "learning_rate": 1.3102695747697526e-06, - "loss": 1.0088, - "num_input_tokens_seen": 110295350, - "step": 5187 - }, - { - "epoch": 0.6238201166356039, - "flos": 8927129186040.0, - "grad_norm": 29.26607547704552, - "learning_rate": 1.3095384436400237e-06, - "loss": 1.1129, - "num_input_tokens_seen": 110306600, - "step": 5188 - }, - { - "epoch": 0.623940359526243, - "flos": 7330335308640.0, - "grad_norm": 5.412293864987511, - "learning_rate": 1.3088074172527633e-06, - "loss": 1.046, - "num_input_tokens_seen": 110323450, - "step": 5189 - }, - { - "epoch": 0.6240606024168821, - "flos": 20703046082760.0, - "grad_norm": 3.7878773517027935, - "learning_rate": 1.3080764957188684e-06, - "loss": 0.9453, - "num_input_tokens_seen": 110343415, - "step": 5190 - }, - { - "epoch": 0.6241808453075212, - "flos": 15642541949280.0, - "grad_norm": 27.32028010614322, - "learning_rate": 1.3073456791492192e-06, - "loss": 0.9279, - "num_input_tokens_seen": 110362845, - "step": 5191 - }, - { - "epoch": 0.6243010881981603, - "flos": 15010044781920.0, - "grad_norm": 12.190466260622157, - "learning_rate": 1.3066149676546801e-06, - "loss": 1.0136, - "num_input_tokens_seen": 110380745, - "step": 5192 - }, - { - "epoch": 0.6244213310887994, - "flos": 15877244800680.0, - "grad_norm": 17.89227356623916, - "learning_rate": 1.3058843613460985e-06, - "loss": 0.8863, - "num_input_tokens_seen": 110398405, - "step": 5193 - }, - { - "epoch": 0.6245415739794384, - "flos": 10765524765840.0, - "grad_norm": 3.3995628861329306, - "learning_rate": 1.3051538603343075e-06, - "loss": 0.9658, - "num_input_tokens_seen": 110416055, - "step": 5194 - }, - { - "epoch": 0.6246618168700776, - "flos": 13387121707320.0, - "grad_norm": 4.128729319322875, - "learning_rate": 1.3044234647301235e-06, - "loss": 0.9026, - "num_input_tokens_seen": 110433800, - "step": 5195 - }, - { - "epoch": 0.6247820597607167, - "flos": 10109443310760.0, - "grad_norm": 3.95338288321704, - "learning_rate": 1.303693174644347e-06, - "loss": 0.9478, - "num_input_tokens_seen": 110450995, - "step": 5196 - }, - { - "epoch": 0.6249023026513557, - "flos": 16087995425640.0, - "grad_norm": 5.186015088913781, - "learning_rate": 1.3029629901877625e-06, - "loss": 1.0319, - "num_input_tokens_seen": 110470090, - "step": 5197 - }, - { - "epoch": 0.6250225455419949, - "flos": 14383833234360.0, - "grad_norm": 3.617413705298746, - "learning_rate": 1.3022329114711376e-06, - "loss": 1.0066, - "num_input_tokens_seen": 110488520, - "step": 5198 - }, - { - "epoch": 0.6251427884326339, - "flos": 16662469825320.0, - "grad_norm": 3.5486459218155266, - "learning_rate": 1.3015029386052256e-06, - "loss": 0.9203, - "num_input_tokens_seen": 110508410, - "step": 5199 - }, - { - "epoch": 0.625263031323273, - "flos": 22617284344680.0, - "grad_norm": 5.0395865234432184, - "learning_rate": 1.3007730717007622e-06, - "loss": 0.9478, - "num_input_tokens_seen": 110528945, - "step": 5200 - }, - { - "epoch": 0.6253832742139122, - "flos": 17163462483000.0, - "grad_norm": 5.356161416388439, - "learning_rate": 1.3000433108684676e-06, - "loss": 0.9829, - "num_input_tokens_seen": 110549165, - "step": 5201 - }, - { - "epoch": 0.6255035171045512, - "flos": 19702992445680.0, - "grad_norm": 3.26409569664523, - "learning_rate": 1.2993136562190467e-06, - "loss": 1.0293, - "num_input_tokens_seen": 110568005, - "step": 5202 - }, - { - "epoch": 0.6256237599951903, - "flos": 14355557727960.0, - "grad_norm": 3.2432250787037296, - "learning_rate": 1.2985841078631871e-06, - "loss": 0.9252, - "num_input_tokens_seen": 110587045, - "step": 5203 - }, - { - "epoch": 0.6257440028858293, - "flos": 17189469033960.0, - "grad_norm": 3.3034062347938975, - "learning_rate": 1.2978546659115608e-06, - "loss": 1.0073, - "num_input_tokens_seen": 110604845, - "step": 5204 - }, - { - "epoch": 0.6258642457764685, - "flos": 11211039565320.0, - "grad_norm": 3.635425702423555, - "learning_rate": 1.2971253304748228e-06, - "loss": 1.0842, - "num_input_tokens_seen": 110622280, - "step": 5205 - }, - { - "epoch": 0.6259844886671075, - "flos": 8378998614480.0, - "grad_norm": 2.9828918926050187, - "learning_rate": 1.296396101663614e-06, - "loss": 0.9777, - "num_input_tokens_seen": 110638560, - "step": 5206 - }, - { - "epoch": 0.6261047315577466, - "flos": 11236862146920.0, - "grad_norm": 3.7475795563847747, - "learning_rate": 1.2956669795885565e-06, - "loss": 1.067, - "num_input_tokens_seen": 110654910, - "step": 5207 - }, - { - "epoch": 0.6262249744483858, - "flos": 22276562288040.0, - "grad_norm": 4.3687468390937685, - "learning_rate": 1.294937964360259e-06, - "loss": 0.9175, - "num_input_tokens_seen": 110674900, - "step": 5208 - }, - { - "epoch": 0.6263452173390248, - "flos": 19365612499080.0, - "grad_norm": 10.50110838353337, - "learning_rate": 1.2942090560893108e-06, - "loss": 0.9113, - "num_input_tokens_seen": 110694025, - "step": 5209 - }, - { - "epoch": 0.6264654602296639, - "flos": 26655591646680.0, - "grad_norm": 2.8149979412823427, - "learning_rate": 1.2934802548862882e-06, - "loss": 0.8295, - "num_input_tokens_seen": 110716530, - "step": 5210 - }, - { - "epoch": 0.626585703120303, - "flos": 10424036170200.0, - "grad_norm": 3.8052807605839174, - "learning_rate": 1.292751560861749e-06, - "loss": 1.0609, - "num_input_tokens_seen": 110731155, - "step": 5211 - }, - { - "epoch": 0.6267059460109421, - "flos": 15879268463640.0, - "grad_norm": 3.330158883734824, - "learning_rate": 1.2920229741262354e-06, - "loss": 1.0221, - "num_input_tokens_seen": 110748880, - "step": 5212 - }, - { - "epoch": 0.6268261889015811, - "flos": 12574234407480.0, - "grad_norm": 4.029147287274173, - "learning_rate": 1.2912944947902739e-06, - "loss": 0.9741, - "num_input_tokens_seen": 110765085, - "step": 5213 - }, - { - "epoch": 0.6269464317922203, - "flos": 23423640732240.0, - "grad_norm": 10.044012489520732, - "learning_rate": 1.2905661229643742e-06, - "loss": 0.9416, - "num_input_tokens_seen": 110784565, - "step": 5214 - }, - { - "epoch": 0.6270666746828594, - "flos": 12703592607960.0, - "grad_norm": 6.420670222298886, - "learning_rate": 1.2898378587590299e-06, - "loss": 1.048, - "num_input_tokens_seen": 110800885, - "step": 5215 - }, - { - "epoch": 0.6271869175734984, - "flos": 12364802229600.0, - "grad_norm": 4.242099824283895, - "learning_rate": 1.2891097022847173e-06, - "loss": 1.0987, - "num_input_tokens_seen": 110817950, - "step": 5216 - }, - { - "epoch": 0.6273071604641376, - "flos": 19128242091960.0, - "grad_norm": 8.737604134611706, - "learning_rate": 1.2883816536518978e-06, - "loss": 0.9034, - "num_input_tokens_seen": 110838810, - "step": 5217 - }, - { - "epoch": 0.6274274033547766, - "flos": 18548340596160.0, - "grad_norm": 3.406984162612082, - "learning_rate": 1.2876537129710155e-06, - "loss": 1.0462, - "num_input_tokens_seen": 110856260, - "step": 5218 - }, - { - "epoch": 0.6275476462454157, - "flos": 14383465295640.0, - "grad_norm": 3.943783424102803, - "learning_rate": 1.286925880352499e-06, - "loss": 0.9733, - "num_input_tokens_seen": 110874840, - "step": 5219 - }, - { - "epoch": 0.6276678891360549, - "flos": 18734127163200.0, - "grad_norm": 2.8538818959832066, - "learning_rate": 1.2861981559067592e-06, - "loss": 0.9278, - "num_input_tokens_seen": 110895165, - "step": 5220 - }, - { - "epoch": 0.6277881320266939, - "flos": 9816441737640.0, - "grad_norm": 3.1248174995537865, - "learning_rate": 1.2854705397441917e-06, - "loss": 1.0323, - "num_input_tokens_seen": 110910425, - "step": 5221 - }, - { - "epoch": 0.627908374917333, - "flos": 19257538969320.0, - "grad_norm": 3.8169945660683116, - "learning_rate": 1.2847430319751747e-06, - "loss": 0.9952, - "num_input_tokens_seen": 110928240, - "step": 5222 - }, - { - "epoch": 0.6280286178079721, - "flos": 16901005371720.0, - "grad_norm": 5.024117099004732, - "learning_rate": 1.2840156327100712e-06, - "loss": 0.9111, - "num_input_tokens_seen": 110945085, - "step": 5223 - }, - { - "epoch": 0.6281488606986112, - "flos": 18762280023360.0, - "grad_norm": 6.677438457723849, - "learning_rate": 1.2832883420592272e-06, - "loss": 0.9471, - "num_input_tokens_seen": 110963700, - "step": 5224 - }, - { - "epoch": 0.6282691035892503, - "flos": 25789556767200.0, - "grad_norm": 3.332768455961903, - "learning_rate": 1.282561160132972e-06, - "loss": 0.8626, - "num_input_tokens_seen": 110983940, - "step": 5225 - }, - { - "epoch": 0.6283893464798894, - "flos": 18889675884000.0, - "grad_norm": 2.858808951293373, - "learning_rate": 1.2818340870416186e-06, - "loss": 1.0423, - "num_input_tokens_seen": 111004795, - "step": 5226 - }, - { - "epoch": 0.6285095893705285, - "flos": 15800053009920.0, - "grad_norm": 4.159595261838533, - "learning_rate": 1.2811071228954626e-06, - "loss": 0.9872, - "num_input_tokens_seen": 111023150, - "step": 5227 - }, - { - "epoch": 0.6286298322611675, - "flos": 18893968502400.0, - "grad_norm": 8.932926314841442, - "learning_rate": 1.2803802678047846e-06, - "loss": 1.0385, - "num_input_tokens_seen": 111043020, - "step": 5228 - }, - { - "epoch": 0.6287500751518067, - "flos": 15481842086400.0, - "grad_norm": 4.122307794738941, - "learning_rate": 1.279653521879848e-06, - "loss": 0.9544, - "num_input_tokens_seen": 111062805, - "step": 5229 - }, - { - "epoch": 0.6288703180424458, - "flos": 14198843867880.0, - "grad_norm": 8.615522844651899, - "learning_rate": 1.2789268852308997e-06, - "loss": 1.0736, - "num_input_tokens_seen": 111077735, - "step": 5230 - }, - { - "epoch": 0.6289905609330848, - "flos": 15718967201040.0, - "grad_norm": 4.006808923067293, - "learning_rate": 1.2782003579681688e-06, - "loss": 0.9241, - "num_input_tokens_seen": 111096985, - "step": 5231 - }, - { - "epoch": 0.629110803823724, - "flos": 18157905054600.0, - "grad_norm": 2.124606713196703, - "learning_rate": 1.2774739402018701e-06, - "loss": 0.9715, - "num_input_tokens_seen": 111117540, - "step": 5232 - }, - { - "epoch": 0.629231046714363, - "flos": 14829317372280.0, - "grad_norm": 2.5890633398318266, - "learning_rate": 1.2767476320422002e-06, - "loss": 0.9675, - "num_input_tokens_seen": 111137185, - "step": 5233 - }, - { - "epoch": 0.6293512896050021, - "flos": 46575768096840.0, - "grad_norm": 0.7076738924980358, - "learning_rate": 1.2760214335993392e-06, - "loss": 0.8266, - "num_input_tokens_seen": 111203550, - "step": 5234 - }, - { - "epoch": 0.6294715324956413, - "flos": 24740770815120.0, - "grad_norm": 3.4592806120059265, - "learning_rate": 1.2752953449834514e-06, - "loss": 0.8067, - "num_input_tokens_seen": 111225720, - "step": 5235 - }, - { - "epoch": 0.6295917753862803, - "flos": 16193125445640.0, - "grad_norm": 5.272561776245602, - "learning_rate": 1.2745693663046836e-06, - "loss": 1.0229, - "num_input_tokens_seen": 111244510, - "step": 5236 - }, - { - "epoch": 0.6297120182769194, - "flos": 14803372144440.0, - "grad_norm": 5.355755798972042, - "learning_rate": 1.2738434976731662e-06, - "loss": 1.0318, - "num_input_tokens_seen": 111262415, - "step": 5237 - }, - { - "epoch": 0.6298322611675584, - "flos": 13830766151640.0, - "grad_norm": 5.417485023692598, - "learning_rate": 1.2731177391990125e-06, - "loss": 0.9758, - "num_input_tokens_seen": 111282060, - "step": 5238 - }, - { - "epoch": 0.6299525040581976, - "flos": 8877109085520.0, - "grad_norm": 8.121087173818886, - "learning_rate": 1.2723920909923203e-06, - "loss": 1.0447, - "num_input_tokens_seen": 111297525, - "step": 5239 - }, - { - "epoch": 0.6300727469488366, - "flos": 41308228935240.0, - "grad_norm": 1.0691456804027069, - "learning_rate": 1.2716665531631688e-06, - "loss": 0.8833, - "num_input_tokens_seen": 111351530, - "step": 5240 - }, - { - "epoch": 0.6301929898394757, - "flos": 16008289386960.0, - "grad_norm": 2.2459054353802186, - "learning_rate": 1.270941125821623e-06, - "loss": 0.9905, - "num_input_tokens_seen": 111371675, - "step": 5241 - }, - { - "epoch": 0.6303132327301149, - "flos": 20152585232640.0, - "grad_norm": 3.901090831302479, - "learning_rate": 1.2702158090777278e-06, - "loss": 0.9869, - "num_input_tokens_seen": 111392485, - "step": 5242 - }, - { - "epoch": 0.6304334756207539, - "flos": 17975583243840.0, - "grad_norm": 3.1351632219622516, - "learning_rate": 1.2694906030415148e-06, - "loss": 0.9736, - "num_input_tokens_seen": 111409950, - "step": 5243 - }, - { - "epoch": 0.630553718511393, - "flos": 12777902212080.0, - "grad_norm": 6.956470567577236, - "learning_rate": 1.2687655078229958e-06, - "loss": 1.0275, - "num_input_tokens_seen": 111424000, - "step": 5244 - }, - { - "epoch": 0.6306739614020321, - "flos": 19440903273120.0, - "grad_norm": 4.731546998875048, - "learning_rate": 1.2680405235321678e-06, - "loss": 0.9342, - "num_input_tokens_seen": 111445055, - "step": 5245 - }, - { - "epoch": 0.6307942042926712, - "flos": 10844188311480.0, - "grad_norm": 3.0903966435539867, - "learning_rate": 1.267315650279011e-06, - "loss": 1.0165, - "num_input_tokens_seen": 111463245, - "step": 5246 - }, - { - "epoch": 0.6309144471833102, - "flos": 13908816466080.0, - "grad_norm": 2.9867609682125207, - "learning_rate": 1.2665908881734874e-06, - "loss": 0.9652, - "num_input_tokens_seen": 111481800, - "step": 5247 - }, - { - "epoch": 0.6310346900739494, - "flos": 12390624811200.0, - "grad_norm": 3.69365024076932, - "learning_rate": 1.2658662373255432e-06, - "loss": 1.0765, - "num_input_tokens_seen": 111499910, - "step": 5248 - }, - { - "epoch": 0.6311549329645885, - "flos": 39400828201200.0, - "grad_norm": 0.8044457674352236, - "learning_rate": 1.2651416978451063e-06, - "loss": 0.8, - "num_input_tokens_seen": 111565015, - "step": 5249 - }, - { - "epoch": 0.6312751758552275, - "flos": 29933086073880.0, - "grad_norm": 3.910383311261341, - "learning_rate": 1.2644172698420903e-06, - "loss": 0.8748, - "num_input_tokens_seen": 111586840, - "step": 5250 - }, - { - "epoch": 0.6313954187458667, - "flos": 13938379758000.0, - "grad_norm": 6.987550018076933, - "learning_rate": 1.2636929534263892e-06, - "loss": 1.0712, - "num_input_tokens_seen": 111605545, - "step": 5251 - }, - { - "epoch": 0.6315156616365057, - "flos": 16272310237800.0, - "grad_norm": 2.7856107664611223, - "learning_rate": 1.2629687487078821e-06, - "loss": 0.9955, - "num_input_tokens_seen": 111624075, - "step": 5252 - }, - { - "epoch": 0.6316359045271448, - "flos": 16869265109040.0, - "grad_norm": 6.847771564737833, - "learning_rate": 1.2622446557964293e-06, - "loss": 0.9916, - "num_input_tokens_seen": 111641800, - "step": 5253 - }, - { - "epoch": 0.631756147417784, - "flos": 23612769409320.0, - "grad_norm": 4.172614899933783, - "learning_rate": 1.261520674801876e-06, - "loss": 0.9395, - "num_input_tokens_seen": 111662115, - "step": 5254 - }, - { - "epoch": 0.631876390308423, - "flos": 22276133026200.0, - "grad_norm": 3.54123255743221, - "learning_rate": 1.2607968058340488e-06, - "loss": 0.943, - "num_input_tokens_seen": 111681530, - "step": 5255 - }, - { - "epoch": 0.6319966331990621, - "flos": 17556381610920.0, - "grad_norm": 3.5315338290766927, - "learning_rate": 1.2600730490027583e-06, - "loss": 0.9546, - "num_input_tokens_seen": 111701490, - "step": 5256 - }, - { - "epoch": 0.6321168760897012, - "flos": 12389275702560.0, - "grad_norm": 3.1433166811985047, - "learning_rate": 1.2593494044177984e-06, - "loss": 1.0338, - "num_input_tokens_seen": 111719515, - "step": 5257 - }, - { - "epoch": 0.6322371189803403, - "flos": 12966938904480.0, - "grad_norm": 4.085406237868882, - "learning_rate": 1.2586258721889448e-06, - "loss": 1.0225, - "num_input_tokens_seen": 111736585, - "step": 5258 - }, - { - "epoch": 0.6323573618709794, - "flos": 14305200350280.0, - "grad_norm": 4.0084368422077885, - "learning_rate": 1.2579024524259573e-06, - "loss": 1.041, - "num_input_tokens_seen": 111752565, - "step": 5259 - }, - { - "epoch": 0.6324776047616185, - "flos": 14221906909080.0, - "grad_norm": 3.068031764615778, - "learning_rate": 1.2571791452385768e-06, - "loss": 1.1379, - "num_input_tokens_seen": 111769550, - "step": 5260 - }, - { - "epoch": 0.6325978476522576, - "flos": 21988681195440.0, - "grad_norm": 2.710513738662475, - "learning_rate": 1.2564559507365301e-06, - "loss": 0.9964, - "num_input_tokens_seen": 111791675, - "step": 5261 - }, - { - "epoch": 0.6327180905428966, - "flos": 17451404898720.0, - "grad_norm": 2.819920562815701, - "learning_rate": 1.2557328690295244e-06, - "loss": 1.0076, - "num_input_tokens_seen": 111809585, - "step": 5262 - }, - { - "epoch": 0.6328383334335358, - "flos": 15323625809880.0, - "grad_norm": 2.6006056483657356, - "learning_rate": 1.255009900227251e-06, - "loss": 0.989, - "num_input_tokens_seen": 111828330, - "step": 5263 - }, - { - "epoch": 0.6329585763241748, - "flos": 16297764880680.0, - "grad_norm": 2.7658132715312718, - "learning_rate": 1.254287044439383e-06, - "loss": 1.0237, - "num_input_tokens_seen": 111847655, - "step": 5264 - }, - { - "epoch": 0.6330788192148139, - "flos": 50803332270240.0, - "grad_norm": 0.9762686935960008, - "learning_rate": 1.2535643017755776e-06, - "loss": 0.8, - "num_input_tokens_seen": 111909565, - "step": 5265 - }, - { - "epoch": 0.6331990621054531, - "flos": 15085580848440.0, - "grad_norm": 4.110435436042872, - "learning_rate": 1.2528416723454737e-06, - "loss": 0.9509, - "num_input_tokens_seen": 111925955, - "step": 5266 - }, - { - "epoch": 0.6333193049960921, - "flos": 24583842324120.0, - "grad_norm": 2.658180791689567, - "learning_rate": 1.2521191562586945e-06, - "loss": 0.9314, - "num_input_tokens_seen": 111949325, - "step": 5267 - }, - { - "epoch": 0.6334395478867312, - "flos": 12991473700560.0, - "grad_norm": 6.26279756672818, - "learning_rate": 1.2513967536248445e-06, - "loss": 0.9976, - "num_input_tokens_seen": 111965365, - "step": 5268 - }, - { - "epoch": 0.6335597907773702, - "flos": 16796028659520.0, - "grad_norm": 2.7754746383418123, - "learning_rate": 1.2506744645535117e-06, - "loss": 1.0426, - "num_input_tokens_seen": 111985515, - "step": 5269 - }, - { - "epoch": 0.6336800336680094, - "flos": 16140407127840.0, - "grad_norm": 4.669910643088229, - "learning_rate": 1.249952289154267e-06, - "loss": 0.8228, - "num_input_tokens_seen": 112005275, - "step": 5270 - }, - { - "epoch": 0.6338002765586485, - "flos": 16795691382360.0, - "grad_norm": 5.163506228312717, - "learning_rate": 1.2492302275366635e-06, - "loss": 0.9946, - "num_input_tokens_seen": 112024670, - "step": 5271 - }, - { - "epoch": 0.6339205194492875, - "flos": 18867471366480.0, - "grad_norm": 14.997300477300751, - "learning_rate": 1.2485082798102377e-06, - "loss": 0.8637, - "num_input_tokens_seen": 112044805, - "step": 5272 - }, - { - "epoch": 0.6340407623399267, - "flos": 13144968096840.0, - "grad_norm": 6.757349040285406, - "learning_rate": 1.2477864460845084e-06, - "loss": 0.9085, - "num_input_tokens_seen": 112060925, - "step": 5273 - }, - { - "epoch": 0.6341610052305657, - "flos": 12548227856520.0, - "grad_norm": 4.172259480907844, - "learning_rate": 1.2470647264689776e-06, - "loss": 0.9449, - "num_input_tokens_seen": 112079125, - "step": 5274 - }, - { - "epoch": 0.6342812481212048, - "flos": 16770114093240.0, - "grad_norm": 5.894545163895938, - "learning_rate": 1.2463431210731282e-06, - "loss": 0.9356, - "num_input_tokens_seen": 112098430, - "step": 5275 - }, - { - "epoch": 0.634401491011844, - "flos": 12626799417480.0, - "grad_norm": 5.621937235837074, - "learning_rate": 1.2456216300064289e-06, - "loss": 0.9827, - "num_input_tokens_seen": 112115700, - "step": 5276 - }, - { - "epoch": 0.634521733902483, - "flos": 15167831796600.0, - "grad_norm": 3.9201718465934285, - "learning_rate": 1.244900253378328e-06, - "loss": 0.9953, - "num_input_tokens_seen": 112135475, - "step": 5277 - }, - { - "epoch": 0.6346419767931221, - "flos": 11709150036360.0, - "grad_norm": 3.564198942135679, - "learning_rate": 1.2441789912982583e-06, - "loss": 0.9156, - "num_input_tokens_seen": 112152280, - "step": 5278 - }, - { - "epoch": 0.6347622196837612, - "flos": 17318857896000.0, - "grad_norm": 2.8723321229169896, - "learning_rate": 1.2434578438756346e-06, - "loss": 0.8675, - "num_input_tokens_seen": 112172430, - "step": 5279 - }, - { - "epoch": 0.6348824625744003, - "flos": 46190034870120.0, - "grad_norm": 21.178049145872713, - "learning_rate": 1.242736811219855e-06, - "loss": 1.0077, - "num_input_tokens_seen": 112198110, - "step": 5280 - }, - { - "epoch": 0.6350027054650393, - "flos": 20388759838920.0, - "grad_norm": 3.5464254085925644, - "learning_rate": 1.2420158934402988e-06, - "loss": 1.0468, - "num_input_tokens_seen": 112218445, - "step": 5281 - }, - { - "epoch": 0.6351229483556785, - "flos": 16349961951960.0, - "grad_norm": 3.2237594288410394, - "learning_rate": 1.2412950906463286e-06, - "loss": 1.0612, - "num_input_tokens_seen": 112235470, - "step": 5282 - }, - { - "epoch": 0.6352431912463176, - "flos": 15585132412800.0, - "grad_norm": 5.632324536228047, - "learning_rate": 1.2405744029472902e-06, - "loss": 1.1184, - "num_input_tokens_seen": 112254675, - "step": 5283 - }, - { - "epoch": 0.6353634341369566, - "flos": 9689751092880.0, - "grad_norm": 5.756412082175359, - "learning_rate": 1.2398538304525108e-06, - "loss": 0.9742, - "num_input_tokens_seen": 112273020, - "step": 5284 - }, - { - "epoch": 0.6354836770275958, - "flos": 13700457442800.0, - "grad_norm": 3.1851504773295125, - "learning_rate": 1.2391333732713016e-06, - "loss": 0.9831, - "num_input_tokens_seen": 112290545, - "step": 5285 - }, - { - "epoch": 0.6356039199182348, - "flos": 15351226761960.0, - "grad_norm": 8.78143505838142, - "learning_rate": 1.2384130315129543e-06, - "loss": 0.9992, - "num_input_tokens_seen": 112308590, - "step": 5286 - }, - { - "epoch": 0.6357241628088739, - "flos": 12835005132960.0, - "grad_norm": 4.09802395722669, - "learning_rate": 1.2376928052867447e-06, - "loss": 0.9495, - "num_input_tokens_seen": 112327430, - "step": 5287 - }, - { - "epoch": 0.6358444056995131, - "flos": 17738826067920.0, - "grad_norm": 6.642412730409299, - "learning_rate": 1.2369726947019299e-06, - "loss": 1.0128, - "num_input_tokens_seen": 112347625, - "step": 5288 - }, - { - "epoch": 0.6359646485901521, - "flos": 16558811560200.0, - "grad_norm": 3.2412712548997433, - "learning_rate": 1.2362526998677511e-06, - "loss": 0.8898, - "num_input_tokens_seen": 112363710, - "step": 5289 - }, - { - "epoch": 0.6360848914807912, - "flos": 14830022588160.0, - "grad_norm": 2.83479189261979, - "learning_rate": 1.2355328208934301e-06, - "loss": 1.0771, - "num_input_tokens_seen": 112382305, - "step": 5290 - }, - { - "epoch": 0.6362051343714303, - "flos": 13099731199680.0, - "grad_norm": 4.029958828699214, - "learning_rate": 1.2348130578881728e-06, - "loss": 0.9567, - "num_input_tokens_seen": 112400245, - "step": 5291 - }, - { - "epoch": 0.6363253772620694, - "flos": 17346888109920.0, - "grad_norm": 3.3302082830593536, - "learning_rate": 1.2340934109611664e-06, - "loss": 0.9668, - "num_input_tokens_seen": 112420725, - "step": 5292 - }, - { - "epoch": 0.6364456201527084, - "flos": 18474214961400.0, - "grad_norm": 4.769642821560507, - "learning_rate": 1.2333738802215798e-06, - "loss": 0.8904, - "num_input_tokens_seen": 112440665, - "step": 5293 - }, - { - "epoch": 0.6365658630433476, - "flos": 14724463306320.0, - "grad_norm": 3.2955439624195195, - "learning_rate": 1.2326544657785668e-06, - "loss": 1.0238, - "num_input_tokens_seen": 112460075, - "step": 5294 - }, - { - "epoch": 0.6366861059339867, - "flos": 15222849731400.0, - "grad_norm": 4.543076859136598, - "learning_rate": 1.2319351677412608e-06, - "loss": 0.9768, - "num_input_tokens_seen": 112476840, - "step": 5295 - }, - { - "epoch": 0.6368063488246257, - "flos": 15820632464760.0, - "grad_norm": 3.7162028332593477, - "learning_rate": 1.2312159862187796e-06, - "loss": 0.9712, - "num_input_tokens_seen": 112494970, - "step": 5296 - }, - { - "epoch": 0.6369265917152649, - "flos": 15932078766120.0, - "grad_norm": 3.5151365491843443, - "learning_rate": 1.2304969213202217e-06, - "loss": 0.9843, - "num_input_tokens_seen": 112515950, - "step": 5297 - }, - { - "epoch": 0.6370468346059039, - "flos": 17583062716200.0, - "grad_norm": 5.033746893526326, - "learning_rate": 1.2297779731546692e-06, - "loss": 1.0072, - "num_input_tokens_seen": 112534765, - "step": 5298 - }, - { - "epoch": 0.637167077496543, - "flos": 17998033053840.0, - "grad_norm": 3.0679671087180482, - "learning_rate": 1.2290591418311853e-06, - "loss": 1.003, - "num_input_tokens_seen": 112551880, - "step": 5299 - }, - { - "epoch": 0.637287320387182, - "flos": 19704954785520.0, - "grad_norm": 3.173850660977683, - "learning_rate": 1.2283404274588172e-06, - "loss": 0.9399, - "num_input_tokens_seen": 112570545, - "step": 5300 - }, - { - "epoch": 0.6374075632778212, - "flos": 45224935411440.0, - "grad_norm": 0.7482297190815028, - "learning_rate": 1.227621830146592e-06, - "loss": 0.7775, - "num_input_tokens_seen": 112625630, - "step": 5301 - }, - { - "epoch": 0.6375278061684603, - "flos": 18186824453760.0, - "grad_norm": 4.569758663691636, - "learning_rate": 1.2269033500035217e-06, - "loss": 1.0191, - "num_input_tokens_seen": 112645485, - "step": 5302 - }, - { - "epoch": 0.6376480490590993, - "flos": 18264445506360.0, - "grad_norm": 3.2104044747951415, - "learning_rate": 1.2261849871385988e-06, - "loss": 0.9663, - "num_input_tokens_seen": 112666310, - "step": 5303 - }, - { - "epoch": 0.6377682919497385, - "flos": 22483664187360.0, - "grad_norm": 7.784056130719472, - "learning_rate": 1.2254667416607972e-06, - "loss": 0.8355, - "num_input_tokens_seen": 112687630, - "step": 5304 - }, - { - "epoch": 0.6378885348403776, - "flos": 16376336441640.0, - "grad_norm": 2.9937851427918423, - "learning_rate": 1.2247486136790756e-06, - "loss": 1.0628, - "num_input_tokens_seen": 112706830, - "step": 5305 - }, - { - "epoch": 0.6380087777310166, - "flos": 13255678520760.0, - "grad_norm": 5.433294138542736, - "learning_rate": 1.2240306033023726e-06, - "loss": 1.0245, - "num_input_tokens_seen": 112724375, - "step": 5306 - }, - { - "epoch": 0.6381290206216558, - "flos": 16586504496960.0, - "grad_norm": 3.0161594055710075, - "learning_rate": 1.223312710639611e-06, - "loss": 0.9527, - "num_input_tokens_seen": 112742815, - "step": 5307 - }, - { - "epoch": 0.6382492635122948, - "flos": 13387612292280.0, - "grad_norm": 4.375993234423441, - "learning_rate": 1.2225949357996928e-06, - "loss": 1.0783, - "num_input_tokens_seen": 112760660, - "step": 5308 - }, - { - "epoch": 0.6383695064029339, - "flos": 19811617883520.0, - "grad_norm": 3.552530359384097, - "learning_rate": 1.221877278891505e-06, - "loss": 1.0341, - "num_input_tokens_seen": 112779635, - "step": 5309 - }, - { - "epoch": 0.638489749293573, - "flos": 18788501205240.0, - "grad_norm": 4.042875810374991, - "learning_rate": 1.221159740023915e-06, - "loss": 0.923, - "num_input_tokens_seen": 112799185, - "step": 5310 - }, - { - "epoch": 0.6386099921842121, - "flos": 17058516432360.0, - "grad_norm": 10.76905128203672, - "learning_rate": 1.2204423193057735e-06, - "loss": 0.9523, - "num_input_tokens_seen": 112817735, - "step": 5311 - }, - { - "epoch": 0.6387302350748512, - "flos": 51373023466560.0, - "grad_norm": 0.9325782967492426, - "learning_rate": 1.2197250168459122e-06, - "loss": 0.919, - "num_input_tokens_seen": 112873855, - "step": 5312 - }, - { - "epoch": 0.6388504779654903, - "flos": 10264777400640.0, - "grad_norm": 7.234680828443859, - "learning_rate": 1.2190078327531454e-06, - "loss": 0.9692, - "num_input_tokens_seen": 112889820, - "step": 5313 - }, - { - "epoch": 0.6389707208561294, - "flos": 15878256632160.0, - "grad_norm": 2.7033634852347883, - "learning_rate": 1.2182907671362697e-06, - "loss": 0.9551, - "num_input_tokens_seen": 112910235, - "step": 5314 - }, - { - "epoch": 0.6390909637467684, - "flos": 13780194143040.0, - "grad_norm": 4.155917276243889, - "learning_rate": 1.2175738201040626e-06, - "loss": 1.0073, - "num_input_tokens_seen": 112926995, - "step": 5315 - }, - { - "epoch": 0.6392112066374076, - "flos": 17131262296920.0, - "grad_norm": 5.443620766508312, - "learning_rate": 1.2168569917652855e-06, - "loss": 1.019, - "num_input_tokens_seen": 112946570, - "step": 5316 - }, - { - "epoch": 0.6393314495280467, - "flos": 19075370466360.0, - "grad_norm": 3.0476026399772476, - "learning_rate": 1.2161402822286797e-06, - "loss": 0.8769, - "num_input_tokens_seen": 112966975, - "step": 5317 - }, - { - "epoch": 0.6394516924186857, - "flos": 14378712753840.0, - "grad_norm": 33.56815340246708, - "learning_rate": 1.2154236916029703e-06, - "loss": 1.009, - "num_input_tokens_seen": 112984670, - "step": 5318 - }, - { - "epoch": 0.6395719353093249, - "flos": 13019810530080.0, - "grad_norm": 4.220001035211084, - "learning_rate": 1.2147072199968627e-06, - "loss": 0.9493, - "num_input_tokens_seen": 113003025, - "step": 5319 - }, - { - "epoch": 0.6396921781999639, - "flos": 12443067174960.0, - "grad_norm": 8.649908834870024, - "learning_rate": 1.2139908675190454e-06, - "loss": 0.938, - "num_input_tokens_seen": 113021955, - "step": 5320 - }, - { - "epoch": 0.639812421090603, - "flos": 15091345221720.0, - "grad_norm": 3.295867621383715, - "learning_rate": 1.2132746342781883e-06, - "loss": 0.9823, - "num_input_tokens_seen": 113042835, - "step": 5321 - }, - { - "epoch": 0.6399326639812422, - "flos": 7853992407240.0, - "grad_norm": 22.047732503478038, - "learning_rate": 1.2125585203829442e-06, - "loss": 1.0126, - "num_input_tokens_seen": 113058195, - "step": 5322 - }, - { - "epoch": 0.6400529068718812, - "flos": 17002394681400.0, - "grad_norm": 3.6022666524141735, - "learning_rate": 1.211842525941946e-06, - "loss": 0.9639, - "num_input_tokens_seen": 113077710, - "step": 5323 - }, - { - "epoch": 0.6401731497625203, - "flos": 31453945284480.0, - "grad_norm": 5.441928331861438, - "learning_rate": 1.2111266510638105e-06, - "loss": 1.0273, - "num_input_tokens_seen": 113100355, - "step": 5324 - }, - { - "epoch": 0.6402933926531594, - "flos": 14667299062320.0, - "grad_norm": 3.6471637732445177, - "learning_rate": 1.2104108958571346e-06, - "loss": 1.0439, - "num_input_tokens_seen": 113118345, - "step": 5325 - }, - { - "epoch": 0.6404136355437985, - "flos": 17687272889400.0, - "grad_norm": 2.3425663311502243, - "learning_rate": 1.2096952604304975e-06, - "loss": 0.979, - "num_input_tokens_seen": 113138495, - "step": 5326 - }, - { - "epoch": 0.6405338784344375, - "flos": 28910398657440.0, - "grad_norm": 2.4547517084554182, - "learning_rate": 1.2089797448924616e-06, - "loss": 0.9241, - "num_input_tokens_seen": 113162090, - "step": 5327 - }, - { - "epoch": 0.6406541213250767, - "flos": 14828826787320.0, - "grad_norm": 4.73397211053882, - "learning_rate": 1.2082643493515692e-06, - "loss": 0.8857, - "num_input_tokens_seen": 113180130, - "step": 5328 - }, - { - "epoch": 0.6407743642157158, - "flos": 16561049854080.0, - "grad_norm": 59.827531255215185, - "learning_rate": 1.207549073916346e-06, - "loss": 1.047, - "num_input_tokens_seen": 113200785, - "step": 5329 - }, - { - "epoch": 0.6408946071063548, - "flos": 10607829735840.0, - "grad_norm": 4.06485363198215, - "learning_rate": 1.2068339186952976e-06, - "loss": 1.0097, - "num_input_tokens_seen": 113218045, - "step": 5330 - }, - { - "epoch": 0.6410148499969939, - "flos": 16009393203120.0, - "grad_norm": 5.982584154977405, - "learning_rate": 1.2061188837969136e-06, - "loss": 0.9517, - "num_input_tokens_seen": 113237375, - "step": 5331 - }, - { - "epoch": 0.641135092887633, - "flos": 8745972514560.0, - "grad_norm": 4.0020834741918385, - "learning_rate": 1.2054039693296631e-06, - "loss": 1.0534, - "num_input_tokens_seen": 113255090, - "step": 5332 - }, - { - "epoch": 0.6412553357782721, - "flos": 15721021525560.0, - "grad_norm": 2.892131021320165, - "learning_rate": 1.2046891754019992e-06, - "loss": 1.0433, - "num_input_tokens_seen": 113275420, - "step": 5333 - }, - { - "epoch": 0.6413755786689112, - "flos": 11237414055000.0, - "grad_norm": 6.40830402475045, - "learning_rate": 1.2039745021223548e-06, - "loss": 1.0449, - "num_input_tokens_seen": 113292560, - "step": 5334 - }, - { - "epoch": 0.6414958215595503, - "flos": 48723636055560.0, - "grad_norm": 0.8736843708805597, - "learning_rate": 1.2032599495991456e-06, - "loss": 0.8502, - "num_input_tokens_seen": 113357020, - "step": 5335 - }, - { - "epoch": 0.6416160644501894, - "flos": 31506050371080.0, - "grad_norm": 3.0154705106861632, - "learning_rate": 1.2025455179407685e-06, - "loss": 0.9184, - "num_input_tokens_seen": 113377900, - "step": 5336 - }, - { - "epoch": 0.6417363073408284, - "flos": 14881974366960.0, - "grad_norm": 4.774015650723153, - "learning_rate": 1.2018312072556022e-06, - "loss": 0.9653, - "num_input_tokens_seen": 113396120, - "step": 5337 - }, - { - "epoch": 0.6418565502314676, - "flos": 15956521577520.0, - "grad_norm": 3.138662151898443, - "learning_rate": 1.2011170176520077e-06, - "loss": 0.976, - "num_input_tokens_seen": 113416755, - "step": 5338 - }, - { - "epoch": 0.6419767931221066, - "flos": 17818164167880.0, - "grad_norm": 3.8319248569662787, - "learning_rate": 1.2004029492383256e-06, - "loss": 1.0327, - "num_input_tokens_seen": 113437815, - "step": 5339 - }, - { - "epoch": 0.6420970360127457, - "flos": 13806691278960.0, - "grad_norm": 4.670937536587609, - "learning_rate": 1.1996890021228814e-06, - "loss": 0.9704, - "num_input_tokens_seen": 113454310, - "step": 5340 - }, - { - "epoch": 0.6422172789033849, - "flos": 28857588354960.0, - "grad_norm": 5.119712893661655, - "learning_rate": 1.1989751764139785e-06, - "loss": 0.9245, - "num_input_tokens_seen": 113477680, - "step": 5341 - }, - { - "epoch": 0.6423375217940239, - "flos": 19706457201960.0, - "grad_norm": 4.645612816929992, - "learning_rate": 1.1982614722199044e-06, - "loss": 1.0436, - "num_input_tokens_seen": 113498575, - "step": 5342 - }, - { - "epoch": 0.642457764684663, - "flos": 13020147807240.0, - "grad_norm": 4.0841452641222435, - "learning_rate": 1.1975478896489276e-06, - "loss": 1.0035, - "num_input_tokens_seen": 113516130, - "step": 5343 - }, - { - "epoch": 0.6425780075753021, - "flos": 14012137454040.0, - "grad_norm": 2.601388553153869, - "learning_rate": 1.1968344288092981e-06, - "loss": 0.9885, - "num_input_tokens_seen": 113532430, - "step": 5344 - }, - { - "epoch": 0.6426982504659412, - "flos": 14593572027840.0, - "grad_norm": 2.6748790250143863, - "learning_rate": 1.1961210898092468e-06, - "loss": 0.8723, - "num_input_tokens_seen": 113551100, - "step": 5345 - }, - { - "epoch": 0.6428184933565803, - "flos": 12647532180120.0, - "grad_norm": 3.6544498022183305, - "learning_rate": 1.1954078727569874e-06, - "loss": 1.018, - "num_input_tokens_seen": 113568120, - "step": 5346 - }, - { - "epoch": 0.6429387362472194, - "flos": 15958085317080.0, - "grad_norm": 2.4835135447849135, - "learning_rate": 1.1946947777607141e-06, - "loss": 1.0113, - "num_input_tokens_seen": 113588975, - "step": 5347 - }, - { - "epoch": 0.6430589791378585, - "flos": 17607505527600.0, - "grad_norm": 4.059977995714212, - "learning_rate": 1.1939818049286024e-06, - "loss": 1.0258, - "num_input_tokens_seen": 113606855, - "step": 5348 - }, - { - "epoch": 0.6431792220284975, - "flos": 17714720533680.0, - "grad_norm": 4.842091725655668, - "learning_rate": 1.1932689543688101e-06, - "loss": 0.9731, - "num_input_tokens_seen": 113627680, - "step": 5349 - }, - { - "epoch": 0.6432994649191367, - "flos": 14933987468880.0, - "grad_norm": 16.70671660987917, - "learning_rate": 1.1925562261894756e-06, - "loss": 0.9507, - "num_input_tokens_seen": 113646480, - "step": 5350 - }, - { - "epoch": 0.6434197078097758, - "flos": 22014657084840.0, - "grad_norm": 5.9005826371438665, - "learning_rate": 1.1918436204987207e-06, - "loss": 1.0068, - "num_input_tokens_seen": 113668060, - "step": 5351 - }, - { - "epoch": 0.6435399507004148, - "flos": 10607707089600.0, - "grad_norm": 3.468602907653375, - "learning_rate": 1.191131137404645e-06, - "loss": 1.0357, - "num_input_tokens_seen": 113684520, - "step": 5352 - }, - { - "epoch": 0.643660193591054, - "flos": 14121744061800.0, - "grad_norm": 6.504593619833578, - "learning_rate": 1.190418777015333e-06, - "loss": 0.9957, - "num_input_tokens_seen": 113703150, - "step": 5353 - }, - { - "epoch": 0.643780436481693, - "flos": 17162849251800.0, - "grad_norm": 2.3771631528897093, - "learning_rate": 1.1897065394388487e-06, - "loss": 0.9605, - "num_input_tokens_seen": 113723310, - "step": 5354 - }, - { - "epoch": 0.6439006793723321, - "flos": 16455122633520.0, - "grad_norm": 14.36696340397552, - "learning_rate": 1.1889944247832385e-06, - "loss": 0.9932, - "num_input_tokens_seen": 113743270, - "step": 5355 - }, - { - "epoch": 0.6440209222629713, - "flos": 16791552071760.0, - "grad_norm": 4.059301202703793, - "learning_rate": 1.1882824331565283e-06, - "loss": 0.929, - "num_input_tokens_seen": 113762450, - "step": 5356 - }, - { - "epoch": 0.6441411651536103, - "flos": 11708996728560.0, - "grad_norm": 4.339934944684849, - "learning_rate": 1.1875705646667287e-06, - "loss": 1.1168, - "num_input_tokens_seen": 113780060, - "step": 5357 - }, - { - "epoch": 0.6442614080442494, - "flos": 18080897233200.0, - "grad_norm": 4.633906269098569, - "learning_rate": 1.1868588194218282e-06, - "loss": 0.9756, - "num_input_tokens_seen": 113800160, - "step": 5358 - }, - { - "epoch": 0.6443816509348885, - "flos": 20152983832920.0, - "grad_norm": 3.2831608612408396, - "learning_rate": 1.1861471975297979e-06, - "loss": 0.9606, - "num_input_tokens_seen": 113821575, - "step": 5359 - }, - { - "epoch": 0.6445018938255276, - "flos": 26187657698760.0, - "grad_norm": 3.5694890314295136, - "learning_rate": 1.185435699098591e-06, - "loss": 0.9417, - "num_input_tokens_seen": 113847490, - "step": 5360 - }, - { - "epoch": 0.6446221367161666, - "flos": 10240365250800.0, - "grad_norm": 6.51158723200644, - "learning_rate": 1.1847243242361403e-06, - "loss": 0.9957, - "num_input_tokens_seen": 113865800, - "step": 5361 - }, - { - "epoch": 0.6447423796068057, - "flos": 17504951078640.0, - "grad_norm": 2.520305473190133, - "learning_rate": 1.1840130730503624e-06, - "loss": 1.005, - "num_input_tokens_seen": 113886800, - "step": 5362 - }, - { - "epoch": 0.6448626224974449, - "flos": 17819697245880.0, - "grad_norm": 2.742294787620301, - "learning_rate": 1.1833019456491518e-06, - "loss": 0.9708, - "num_input_tokens_seen": 113908050, - "step": 5363 - }, - { - "epoch": 0.6449828653880839, - "flos": 16010895619560.0, - "grad_norm": 6.866841417082768, - "learning_rate": 1.1825909421403871e-06, - "loss": 0.9875, - "num_input_tokens_seen": 113926865, - "step": 5364 - }, - { - "epoch": 0.645103108278723, - "flos": 18284595699360.0, - "grad_norm": 2.7345680386659357, - "learning_rate": 1.181880062631926e-06, - "loss": 0.9829, - "num_input_tokens_seen": 113945920, - "step": 5365 - }, - { - "epoch": 0.6452233511693621, - "flos": 19546033293120.0, - "grad_norm": 3.0140742134115803, - "learning_rate": 1.1811693072316093e-06, - "loss": 1.0814, - "num_input_tokens_seen": 113963320, - "step": 5366 - }, - { - "epoch": 0.6453435940600012, - "flos": 13623418959840.0, - "grad_norm": 5.747835166420641, - "learning_rate": 1.1804586760472574e-06, - "loss": 1.0526, - "num_input_tokens_seen": 113979505, - "step": 5367 - }, - { - "epoch": 0.6454638369506402, - "flos": 18315477438360.0, - "grad_norm": 6.237908836527834, - "learning_rate": 1.1797481691866736e-06, - "loss": 1.0146, - "num_input_tokens_seen": 113996450, - "step": 5368 - }, - { - "epoch": 0.6455840798412794, - "flos": 14902093898400.0, - "grad_norm": 3.953549207762033, - "learning_rate": 1.1790377867576393e-06, - "loss": 1.0538, - "num_input_tokens_seen": 114013920, - "step": 5369 - }, - { - "epoch": 0.6457043227319185, - "flos": 18552755860800.0, - "grad_norm": 2.338826106274129, - "learning_rate": 1.1783275288679203e-06, - "loss": 0.9822, - "num_input_tokens_seen": 114030805, - "step": 5370 - }, - { - "epoch": 0.6458245656225575, - "flos": 43211699441520.0, - "grad_norm": 0.9488851643956256, - "learning_rate": 1.177617395625262e-06, - "loss": 0.8832, - "num_input_tokens_seen": 114088500, - "step": 5371 - }, - { - "epoch": 0.6459448085131967, - "flos": 16403477470320.0, - "grad_norm": 2.9624274136306314, - "learning_rate": 1.1769073871373908e-06, - "loss": 0.9835, - "num_input_tokens_seen": 114108425, - "step": 5372 - }, - { - "epoch": 0.6460650514038357, - "flos": 16058953380240.0, - "grad_norm": 4.694544470710187, - "learning_rate": 1.176197503512015e-06, - "loss": 1.0666, - "num_input_tokens_seen": 114127860, - "step": 5373 - }, - { - "epoch": 0.6461852942944748, - "flos": 14383863895920.0, - "grad_norm": 10.650324899794908, - "learning_rate": 1.1754877448568223e-06, - "loss": 1.0311, - "num_input_tokens_seen": 114147035, - "step": 5374 - }, - { - "epoch": 0.646305537185114, - "flos": 16611928478280.0, - "grad_norm": 3.423292867860281, - "learning_rate": 1.1747781112794837e-06, - "loss": 1.1259, - "num_input_tokens_seen": 114163250, - "step": 5375 - }, - { - "epoch": 0.646425780075753, - "flos": 17265955608840.0, - "grad_norm": 3.0103764690808528, - "learning_rate": 1.1740686028876487e-06, - "loss": 1.0542, - "num_input_tokens_seen": 114181835, - "step": 5376 - }, - { - "epoch": 0.6465460229663921, - "flos": 14777150962560.0, - "grad_norm": 4.767464104663801, - "learning_rate": 1.1733592197889507e-06, - "loss": 0.9667, - "num_input_tokens_seen": 114198465, - "step": 5377 - }, - { - "epoch": 0.6466662658570312, - "flos": 16242256360920.0, - "grad_norm": 13.418548704715436, - "learning_rate": 1.1726499620910014e-06, - "loss": 0.9523, - "num_input_tokens_seen": 114218465, - "step": 5378 - }, - { - "epoch": 0.6467865087476703, - "flos": 10817629852440.0, - "grad_norm": 5.947420205298148, - "learning_rate": 1.1719408299013955e-06, - "loss": 0.9853, - "num_input_tokens_seen": 114236910, - "step": 5379 - }, - { - "epoch": 0.6469067516383094, - "flos": 13885784086440.0, - "grad_norm": 4.833078355750325, - "learning_rate": 1.1712318233277067e-06, - "loss": 0.9674, - "num_input_tokens_seen": 114255650, - "step": 5380 - }, - { - "epoch": 0.6470269945289485, - "flos": 46606870014840.0, - "grad_norm": 0.740730300692879, - "learning_rate": 1.1705229424774916e-06, - "loss": 0.8211, - "num_input_tokens_seen": 114309640, - "step": 5381 - }, - { - "epoch": 0.6471472374195876, - "flos": 21879227895480.0, - "grad_norm": 2.8944854526783517, - "learning_rate": 1.1698141874582867e-06, - "loss": 0.8655, - "num_input_tokens_seen": 114330405, - "step": 5382 - }, - { - "epoch": 0.6472674803102266, - "flos": 14567166876600.0, - "grad_norm": 4.328781353591579, - "learning_rate": 1.169105558377609e-06, - "loss": 0.945, - "num_input_tokens_seen": 114350215, - "step": 5383 - }, - { - "epoch": 0.6473877232008658, - "flos": 17580732437640.0, - "grad_norm": 2.4409760363135846, - "learning_rate": 1.1683970553429587e-06, - "loss": 1.0048, - "num_input_tokens_seen": 114371390, - "step": 5384 - }, - { - "epoch": 0.6475079660915048, - "flos": 11234807822400.0, - "grad_norm": 3.1761543458289037, - "learning_rate": 1.1676886784618128e-06, - "loss": 1.0472, - "num_input_tokens_seen": 114387775, - "step": 5385 - }, - { - "epoch": 0.6476282089821439, - "flos": 12309998925720.0, - "grad_norm": 5.065490969391449, - "learning_rate": 1.1669804278416332e-06, - "loss": 1.049, - "num_input_tokens_seen": 114402220, - "step": 5386 - }, - { - "epoch": 0.6477484518727831, - "flos": 14331697486200.0, - "grad_norm": 4.050630336735003, - "learning_rate": 1.1662723035898602e-06, - "loss": 0.9389, - "num_input_tokens_seen": 114421700, - "step": 5387 - }, - { - "epoch": 0.6478686947634221, - "flos": 18080437309800.0, - "grad_norm": 5.871250010915378, - "learning_rate": 1.165564305813915e-06, - "loss": 1.0494, - "num_input_tokens_seen": 114440420, - "step": 5388 - }, - { - "epoch": 0.6479889376540612, - "flos": 14252144755320.0, - "grad_norm": 2.827026828397029, - "learning_rate": 1.1648564346212019e-06, - "loss": 1.0393, - "num_input_tokens_seen": 114459260, - "step": 5389 - }, - { - "epoch": 0.6481091805447003, - "flos": 19048658699520.0, - "grad_norm": 5.106995007854805, - "learning_rate": 1.164148690119104e-06, - "loss": 0.9887, - "num_input_tokens_seen": 114480260, - "step": 5390 - }, - { - "epoch": 0.6482294234353394, - "flos": 17032509881400.0, - "grad_norm": 3.6367761789739204, - "learning_rate": 1.163441072414985e-06, - "loss": 0.9705, - "num_input_tokens_seen": 114500185, - "step": 5391 - }, - { - "epoch": 0.6483496663259785, - "flos": 18654513109200.0, - "grad_norm": 3.7757948941679427, - "learning_rate": 1.16273358161619e-06, - "loss": 0.9209, - "num_input_tokens_seen": 114520240, - "step": 5392 - }, - { - "epoch": 0.6484699092166175, - "flos": 14854802676720.0, - "grad_norm": 3.880512131781993, - "learning_rate": 1.1620262178300446e-06, - "loss": 1.0743, - "num_input_tokens_seen": 114538575, - "step": 5393 - }, - { - "epoch": 0.6485901521072567, - "flos": 23587345428000.0, - "grad_norm": 5.853224636681984, - "learning_rate": 1.1613189811638563e-06, - "loss": 0.9718, - "num_input_tokens_seen": 114560020, - "step": 5394 - }, - { - "epoch": 0.6487103949978957, - "flos": 15826580807400.0, - "grad_norm": 5.3056781725781, - "learning_rate": 1.1606118717249117e-06, - "loss": 1.0008, - "num_input_tokens_seen": 114579840, - "step": 5395 - }, - { - "epoch": 0.6488306378885348, - "flos": 16298224804080.0, - "grad_norm": 4.5463258601759176, - "learning_rate": 1.1599048896204787e-06, - "loss": 0.8922, - "num_input_tokens_seen": 114599440, - "step": 5396 - }, - { - "epoch": 0.648950880779174, - "flos": 14462466118440.0, - "grad_norm": 5.037857739640769, - "learning_rate": 1.1591980349578061e-06, - "loss": 1.0331, - "num_input_tokens_seen": 114617830, - "step": 5397 - }, - { - "epoch": 0.649071123669813, - "flos": 42895236226920.0, - "grad_norm": 0.7787269669091643, - "learning_rate": 1.158491307844123e-06, - "loss": 0.8042, - "num_input_tokens_seen": 114677470, - "step": 5398 - }, - { - "epoch": 0.6491913665604521, - "flos": 14512762173000.0, - "grad_norm": 4.109624006610338, - "learning_rate": 1.1577847083866387e-06, - "loss": 1.0719, - "num_input_tokens_seen": 114696225, - "step": 5399 - }, - { - "epoch": 0.6493116094510912, - "flos": 11997950975760.0, - "grad_norm": 5.0786611420699925, - "learning_rate": 1.1570782366925453e-06, - "loss": 0.9463, - "num_input_tokens_seen": 114714460, - "step": 5400 - }, - { - "epoch": 0.6494318523417303, - "flos": 13331275910400.0, - "grad_norm": 3.6808402489172654, - "learning_rate": 1.1563718928690132e-06, - "loss": 0.9889, - "num_input_tokens_seen": 114731615, - "step": 5401 - }, - { - "epoch": 0.6495520952323693, - "flos": 13460235510600.0, - "grad_norm": 4.779593785474911, - "learning_rate": 1.1556656770231942e-06, - "loss": 0.948, - "num_input_tokens_seen": 114747530, - "step": 5402 - }, - { - "epoch": 0.6496723381230085, - "flos": 16164911262360.0, - "grad_norm": 2.214580648091059, - "learning_rate": 1.1549595892622207e-06, - "loss": 0.9882, - "num_input_tokens_seen": 114766020, - "step": 5403 - }, - { - "epoch": 0.6497925810136476, - "flos": 44485161914880.0, - "grad_norm": 0.9450374234575208, - "learning_rate": 1.1542536296932047e-06, - "loss": 0.862, - "num_input_tokens_seen": 114829275, - "step": 5404 - }, - { - "epoch": 0.6499128239042866, - "flos": 14304740426880.0, - "grad_norm": 3.6394140497124177, - "learning_rate": 1.1535477984232414e-06, - "loss": 0.9229, - "num_input_tokens_seen": 114848870, - "step": 5405 - }, - { - "epoch": 0.6500330667949258, - "flos": 17399207827440.0, - "grad_norm": 4.261302280793502, - "learning_rate": 1.152842095559404e-06, - "loss": 0.9912, - "num_input_tokens_seen": 114869250, - "step": 5406 - }, - { - "epoch": 0.6501533096855648, - "flos": 18127759193040.0, - "grad_norm": 3.136567488618209, - "learning_rate": 1.1521365212087474e-06, - "loss": 0.9998, - "num_input_tokens_seen": 114888955, - "step": 5407 - }, - { - "epoch": 0.6502735525762039, - "flos": 32028971592240.0, - "grad_norm": 2.814061896765187, - "learning_rate": 1.1514310754783062e-06, - "loss": 0.9213, - "num_input_tokens_seen": 114911625, - "step": 5408 - }, - { - "epoch": 0.6503937954668431, - "flos": 20414919697680.0, - "grad_norm": 3.8256105222460794, - "learning_rate": 1.1507257584750964e-06, - "loss": 0.9432, - "num_input_tokens_seen": 114931525, - "step": 5409 - }, - { - "epoch": 0.6505140383574821, - "flos": 14855078630760.0, - "grad_norm": 3.3915321085963694, - "learning_rate": 1.150020570306113e-06, - "loss": 0.9862, - "num_input_tokens_seen": 114950385, - "step": 5410 - }, - { - "epoch": 0.6506342812481212, - "flos": 14619977179080.0, - "grad_norm": 8.1373844542848, - "learning_rate": 1.1493155110783338e-06, - "loss": 0.9677, - "num_input_tokens_seen": 114968630, - "step": 5411 - }, - { - "epoch": 0.6507545241387603, - "flos": 21541633317960.0, - "grad_norm": 3.0763033206043464, - "learning_rate": 1.1486105808987155e-06, - "loss": 0.9313, - "num_input_tokens_seen": 114989840, - "step": 5412 - }, - { - "epoch": 0.6508747670293994, - "flos": 12128198361480.0, - "grad_norm": 8.122884152087988, - "learning_rate": 1.1479057798741947e-06, - "loss": 1.04, - "num_input_tokens_seen": 115007615, - "step": 5413 - }, - { - "epoch": 0.6509950099200384, - "flos": 49094233567800.0, - "grad_norm": 0.810983783672686, - "learning_rate": 1.14720110811169e-06, - "loss": 0.81, - "num_input_tokens_seen": 115064565, - "step": 5414 - }, - { - "epoch": 0.6511152528106776, - "flos": 15879391109880.0, - "grad_norm": 5.030575535270736, - "learning_rate": 1.146496565718098e-06, - "loss": 0.9893, - "num_input_tokens_seen": 115084855, - "step": 5415 - }, - { - "epoch": 0.6512354957013167, - "flos": 14567442830640.0, - "grad_norm": 6.405373415637062, - "learning_rate": 1.1457921528002996e-06, - "loss": 0.9825, - "num_input_tokens_seen": 115103010, - "step": 5416 - }, - { - "epoch": 0.6513557385919557, - "flos": 23057954617680.0, - "grad_norm": 5.327560618649565, - "learning_rate": 1.1450878694651522e-06, - "loss": 0.9515, - "num_input_tokens_seen": 115123295, - "step": 5417 - }, - { - "epoch": 0.6514759814825949, - "flos": 8509613938920.0, - "grad_norm": 6.407656333823063, - "learning_rate": 1.1443837158194954e-06, - "loss": 0.8501, - "num_input_tokens_seen": 115138160, - "step": 5418 - }, - { - "epoch": 0.651596224373234, - "flos": 16007952109800.0, - "grad_norm": 3.6383624597962996, - "learning_rate": 1.1436796919701484e-06, - "loss": 0.9707, - "num_input_tokens_seen": 115156595, - "step": 5419 - }, - { - "epoch": 0.651716467263873, - "flos": 19811464575720.0, - "grad_norm": 4.114430613842339, - "learning_rate": 1.1429757980239115e-06, - "loss": 0.842, - "num_input_tokens_seen": 115176740, - "step": 5420 - }, - { - "epoch": 0.6518367101545122, - "flos": 17294169792120.0, - "grad_norm": 4.266767746999587, - "learning_rate": 1.1422720340875636e-06, - "loss": 1.0214, - "num_input_tokens_seen": 115195210, - "step": 5421 - }, - { - "epoch": 0.6519569530451512, - "flos": 14200836869280.0, - "grad_norm": 3.23991001522238, - "learning_rate": 1.1415684002678671e-06, - "loss": 1.0167, - "num_input_tokens_seen": 115213690, - "step": 5422 - }, - { - "epoch": 0.6520771959357903, - "flos": 15324300364200.0, - "grad_norm": 6.147904834673449, - "learning_rate": 1.1408648966715617e-06, - "loss": 1.0054, - "num_input_tokens_seen": 115230930, - "step": 5423 - }, - { - "epoch": 0.6521974388264293, - "flos": 16140499112520.0, - "grad_norm": 3.626225231550247, - "learning_rate": 1.1401615234053683e-06, - "loss": 0.9502, - "num_input_tokens_seen": 115249470, - "step": 5424 - }, - { - "epoch": 0.6523176817170685, - "flos": 16350299229120.0, - "grad_norm": 8.892262607995267, - "learning_rate": 1.1394582805759885e-06, - "loss": 0.9786, - "num_input_tokens_seen": 115268470, - "step": 5425 - }, - { - "epoch": 0.6524379246077076, - "flos": 15405294188400.0, - "grad_norm": 3.49001604879892, - "learning_rate": 1.1387551682901022e-06, - "loss": 0.9864, - "num_input_tokens_seen": 115288795, - "step": 5426 - }, - { - "epoch": 0.6525581674983466, - "flos": 13754432884560.0, - "grad_norm": 5.830670248406635, - "learning_rate": 1.138052186654373e-06, - "loss": 0.9346, - "num_input_tokens_seen": 115305985, - "step": 5427 - }, - { - "epoch": 0.6526784103889858, - "flos": 12155032774560.0, - "grad_norm": 3.323927977560917, - "learning_rate": 1.1373493357754417e-06, - "loss": 1.1003, - "num_input_tokens_seen": 115324610, - "step": 5428 - }, - { - "epoch": 0.6527986532796248, - "flos": 13465631945160.0, - "grad_norm": 4.285395265240501, - "learning_rate": 1.1366466157599303e-06, - "loss": 0.9982, - "num_input_tokens_seen": 115343605, - "step": 5429 - }, - { - "epoch": 0.6529188961702639, - "flos": 10050470034720.0, - "grad_norm": 3.2503205654798646, - "learning_rate": 1.1359440267144412e-06, - "loss": 0.9818, - "num_input_tokens_seen": 115360780, - "step": 5430 - }, - { - "epoch": 0.653039139060903, - "flos": 26181556048320.0, - "grad_norm": 6.194570797637505, - "learning_rate": 1.1352415687455556e-06, - "loss": 0.9739, - "num_input_tokens_seen": 115381760, - "step": 5431 - }, - { - "epoch": 0.6531593819515421, - "flos": 18056209129320.0, - "grad_norm": 6.843981037856052, - "learning_rate": 1.1345392419598362e-06, - "loss": 0.857, - "num_input_tokens_seen": 115400360, - "step": 5432 - }, - { - "epoch": 0.6532796248421812, - "flos": 15039117488880.0, - "grad_norm": 5.899942895645262, - "learning_rate": 1.1338370464638263e-06, - "loss": 0.9415, - "num_input_tokens_seen": 115419480, - "step": 5433 - }, - { - "epoch": 0.6533998677328203, - "flos": 12521884028400.0, - "grad_norm": 6.491740627952971, - "learning_rate": 1.1331349823640474e-06, - "loss": 0.8676, - "num_input_tokens_seen": 115436630, - "step": 5434 - }, - { - "epoch": 0.6535201106234594, - "flos": 20229010484400.0, - "grad_norm": 4.622074954935076, - "learning_rate": 1.132433049767003e-06, - "loss": 1.0022, - "num_input_tokens_seen": 115454265, - "step": 5435 - }, - { - "epoch": 0.6536403535140984, - "flos": 16769807477640.0, - "grad_norm": 3.629914865257326, - "learning_rate": 1.1317312487791748e-06, - "loss": 1.0346, - "num_input_tokens_seen": 115475635, - "step": 5436 - }, - { - "epoch": 0.6537605964047376, - "flos": 15327213212400.0, - "grad_norm": 7.782550749008237, - "learning_rate": 1.1310295795070253e-06, - "loss": 0.9412, - "num_input_tokens_seen": 115495295, - "step": 5437 - }, - { - "epoch": 0.6538808392953767, - "flos": 19103615311200.0, - "grad_norm": 6.765100049098787, - "learning_rate": 1.1303280420569982e-06, - "loss": 1.0243, - "num_input_tokens_seen": 115516900, - "step": 5438 - }, - { - "epoch": 0.6540010821860157, - "flos": 21909557726400.0, - "grad_norm": 3.408536650944226, - "learning_rate": 1.1296266365355158e-06, - "loss": 1.0054, - "num_input_tokens_seen": 115540005, - "step": 5439 - }, - { - "epoch": 0.6541213250766549, - "flos": 19156210982760.0, - "grad_norm": 4.623723034229626, - "learning_rate": 1.1289253630489806e-06, - "loss": 0.9476, - "num_input_tokens_seen": 115560775, - "step": 5440 - }, - { - "epoch": 0.6542415679672939, - "flos": 13597412408880.0, - "grad_norm": 56.314528959008435, - "learning_rate": 1.1282242217037753e-06, - "loss": 0.9688, - "num_input_tokens_seen": 115577995, - "step": 5441 - }, - { - "epoch": 0.654361810857933, - "flos": 34439296662240.0, - "grad_norm": 4.1485209483573735, - "learning_rate": 1.127523212606262e-06, - "loss": 0.8533, - "num_input_tokens_seen": 115600540, - "step": 5442 - }, - { - "epoch": 0.6544820537485722, - "flos": 19182033564360.0, - "grad_norm": 7.564403704169827, - "learning_rate": 1.1268223358627835e-06, - "loss": 0.9496, - "num_input_tokens_seen": 115622750, - "step": 5443 - }, - { - "epoch": 0.6546022966392112, - "flos": 14829072079800.0, - "grad_norm": 8.49247947548898, - "learning_rate": 1.126121591579663e-06, - "loss": 0.9314, - "num_input_tokens_seen": 115641675, - "step": 5444 - }, - { - "epoch": 0.6547225395298503, - "flos": 17740052530320.0, - "grad_norm": 5.1053557379778844, - "learning_rate": 1.1254209798632018e-06, - "loss": 0.9168, - "num_input_tokens_seen": 115662415, - "step": 5445 - }, - { - "epoch": 0.6548427824204894, - "flos": 16036350262440.0, - "grad_norm": 4.853286903564485, - "learning_rate": 1.124720500819683e-06, - "loss": 1.0786, - "num_input_tokens_seen": 115680290, - "step": 5446 - }, - { - "epoch": 0.6549630253111285, - "flos": 13072835463480.0, - "grad_norm": 3.132318700367801, - "learning_rate": 1.1240201545553682e-06, - "loss": 1.0482, - "num_input_tokens_seen": 115697810, - "step": 5447 - }, - { - "epoch": 0.6550832682017675, - "flos": 17919522816000.0, - "grad_norm": 4.077171945104387, - "learning_rate": 1.1233199411764987e-06, - "loss": 0.9644, - "num_input_tokens_seen": 115716965, - "step": 5448 - }, - { - "epoch": 0.6552035110924067, - "flos": 16167272202480.0, - "grad_norm": 3.5341087180288, - "learning_rate": 1.1226198607892978e-06, - "loss": 0.9176, - "num_input_tokens_seen": 115737245, - "step": 5449 - }, - { - "epoch": 0.6553237539830458, - "flos": 15485214858000.0, - "grad_norm": 6.49948352815918, - "learning_rate": 1.1219199134999664e-06, - "loss": 1.0278, - "num_input_tokens_seen": 115755465, - "step": 5450 - }, - { - "epoch": 0.6554439968736848, - "flos": 14829838618800.0, - "grad_norm": 3.9111982818724584, - "learning_rate": 1.1212200994146863e-06, - "loss": 0.9987, - "num_input_tokens_seen": 115772940, - "step": 5451 - }, - { - "epoch": 0.655564239764324, - "flos": 11416731032880.0, - "grad_norm": 13.44153566306489, - "learning_rate": 1.120520418639618e-06, - "loss": 0.9836, - "num_input_tokens_seen": 115791195, - "step": 5452 - }, - { - "epoch": 0.655684482654963, - "flos": 21070173290640.0, - "grad_norm": 4.1337589557505705, - "learning_rate": 1.119820871280903e-06, - "loss": 1.0552, - "num_input_tokens_seen": 115811990, - "step": 5453 - }, - { - "epoch": 0.6558047255456021, - "flos": 21227623028160.0, - "grad_norm": 2.9495159075253548, - "learning_rate": 1.1191214574446614e-06, - "loss": 0.9406, - "num_input_tokens_seen": 115831955, - "step": 5454 - }, - { - "epoch": 0.6559249684362413, - "flos": 20702739467160.0, - "grad_norm": 3.025734514328098, - "learning_rate": 1.118422177236995e-06, - "loss": 1.0305, - "num_input_tokens_seen": 115853500, - "step": 5455 - }, - { - "epoch": 0.6560452113268803, - "flos": 14353564726560.0, - "grad_norm": 3.940437795259627, - "learning_rate": 1.1177230307639835e-06, - "loss": 1.0705, - "num_input_tokens_seen": 115870760, - "step": 5456 - }, - { - "epoch": 0.6561654542175194, - "flos": 17818194829440.0, - "grad_norm": 16.27058585642595, - "learning_rate": 1.1170240181316865e-06, - "loss": 1.0118, - "num_input_tokens_seen": 115891925, - "step": 5457 - }, - { - "epoch": 0.6562856971081584, - "flos": 16245169209120.0, - "grad_norm": 3.0818214490141242, - "learning_rate": 1.1163251394461442e-06, - "loss": 1.0244, - "num_input_tokens_seen": 115910125, - "step": 5458 - }, - { - "epoch": 0.6564059399987976, - "flos": 13380866749080.0, - "grad_norm": 2.8580220674873433, - "learning_rate": 1.1156263948133746e-06, - "loss": 1.0481, - "num_input_tokens_seen": 115926500, - "step": 5459 - }, - { - "epoch": 0.6565261828894366, - "flos": 18133554227880.0, - "grad_norm": 11.775648328422834, - "learning_rate": 1.1149277843393787e-06, - "loss": 1.0034, - "num_input_tokens_seen": 115947380, - "step": 5460 - }, - { - "epoch": 0.6566464257800757, - "flos": 13964784909240.0, - "grad_norm": 3.8528648171054694, - "learning_rate": 1.1142293081301342e-06, - "loss": 0.8561, - "num_input_tokens_seen": 115964980, - "step": 5461 - }, - { - "epoch": 0.6567666686707149, - "flos": 16717733052600.0, - "grad_norm": 2.671363558535479, - "learning_rate": 1.1135309662915995e-06, - "loss": 0.8955, - "num_input_tokens_seen": 115984865, - "step": 5462 - }, - { - "epoch": 0.6568869115613539, - "flos": 23377882588560.0, - "grad_norm": 7.78246399015483, - "learning_rate": 1.112832758929712e-06, - "loss": 0.8217, - "num_input_tokens_seen": 116007195, - "step": 5463 - }, - { - "epoch": 0.657007154451993, - "flos": 13072774140360.0, - "grad_norm": 5.344886346648893, - "learning_rate": 1.11213468615039e-06, - "loss": 0.9731, - "num_input_tokens_seen": 116026345, - "step": 5464 - }, - { - "epoch": 0.6571273973426321, - "flos": 17898330129960.0, - "grad_norm": 3.8420539153256352, - "learning_rate": 1.1114367480595292e-06, - "loss": 0.9867, - "num_input_tokens_seen": 116047145, - "step": 5465 - }, - { - "epoch": 0.6572476402332712, - "flos": 12416845993080.0, - "grad_norm": 5.789331015512737, - "learning_rate": 1.1107389447630086e-06, - "loss": 1.0419, - "num_input_tokens_seen": 116065565, - "step": 5466 - }, - { - "epoch": 0.6573678831239103, - "flos": 12045671459280.0, - "grad_norm": 2.9620090448008316, - "learning_rate": 1.1100412763666818e-06, - "loss": 1.013, - "num_input_tokens_seen": 116080545, - "step": 5467 - }, - { - "epoch": 0.6574881260145494, - "flos": 17001566819280.0, - "grad_norm": 25.569022318887164, - "learning_rate": 1.1093437429763865e-06, - "loss": 1.0203, - "num_input_tokens_seen": 116100530, - "step": 5468 - }, - { - "epoch": 0.6576083689051885, - "flos": 7881072112800.0, - "grad_norm": 8.445240228645439, - "learning_rate": 1.1086463446979361e-06, - "loss": 0.9618, - "num_input_tokens_seen": 116118600, - "step": 5469 - }, - { - "epoch": 0.6577286117958275, - "flos": 15956889516240.0, - "grad_norm": 3.9471868613801835, - "learning_rate": 1.1079490816371277e-06, - "loss": 0.9935, - "num_input_tokens_seen": 116138085, - "step": 5470 - }, - { - "epoch": 0.6578488546864667, - "flos": 15537687883320.0, - "grad_norm": 4.228889769460889, - "learning_rate": 1.1072519538997352e-06, - "loss": 0.9517, - "num_input_tokens_seen": 116156945, - "step": 5471 - }, - { - "epoch": 0.6579690975771058, - "flos": 16738097876520.0, - "grad_norm": 4.6649191627162985, - "learning_rate": 1.1065549615915095e-06, - "loss": 1.0432, - "num_input_tokens_seen": 116176495, - "step": 5472 - }, - { - "epoch": 0.6580893404677448, - "flos": 23350925529240.0, - "grad_norm": 18.46732723739331, - "learning_rate": 1.105858104818187e-06, - "loss": 0.9909, - "num_input_tokens_seen": 116197370, - "step": 5473 - }, - { - "epoch": 0.658209583358384, - "flos": 11237015454720.0, - "grad_norm": 16.300091814589955, - "learning_rate": 1.105161383685478e-06, - "loss": 0.9683, - "num_input_tokens_seen": 116213475, - "step": 5474 - }, - { - "epoch": 0.658329826249023, - "flos": 44881331168160.0, - "grad_norm": 0.787025352518033, - "learning_rate": 1.1044647982990771e-06, - "loss": 0.8268, - "num_input_tokens_seen": 116275080, - "step": 5475 - }, - { - "epoch": 0.6584500691396621, - "flos": 22616916405960.0, - "grad_norm": 4.296357168726158, - "learning_rate": 1.1037683487646536e-06, - "loss": 0.8588, - "num_input_tokens_seen": 116295085, - "step": 5476 - }, - { - "epoch": 0.6585703120303013, - "flos": 13046675604720.0, - "grad_norm": 3.9360729235119742, - "learning_rate": 1.1030720351878583e-06, - "loss": 0.9908, - "num_input_tokens_seen": 116312925, - "step": 5477 - }, - { - "epoch": 0.6586905549209403, - "flos": 41728135784040.0, - "grad_norm": 0.8025254026296269, - "learning_rate": 1.102375857674323e-06, - "loss": 0.8416, - "num_input_tokens_seen": 116374560, - "step": 5478 - }, - { - "epoch": 0.6588107978115794, - "flos": 16191776337000.0, - "grad_norm": 5.216193572378376, - "learning_rate": 1.1016798163296561e-06, - "loss": 1.131, - "num_input_tokens_seen": 116393480, - "step": 5479 - }, - { - "epoch": 0.6589310407022185, - "flos": 14671836973200.0, - "grad_norm": 3.0053881894949384, - "learning_rate": 1.1009839112594471e-06, - "loss": 0.8811, - "num_input_tokens_seen": 116411225, - "step": 5480 - }, - { - "epoch": 0.6590512835928576, - "flos": 18238837555680.0, - "grad_norm": 33.49870092805372, - "learning_rate": 1.1002881425692638e-06, - "loss": 0.9446, - "num_input_tokens_seen": 116431375, - "step": 5481 - }, - { - "epoch": 0.6591715264834966, - "flos": 16869142462800.0, - "grad_norm": 5.985815447561479, - "learning_rate": 1.0995925103646532e-06, - "loss": 0.9933, - "num_input_tokens_seen": 116449695, - "step": 5482 - }, - { - "epoch": 0.6592917693741358, - "flos": 25528632733920.0, - "grad_norm": 5.101818598209192, - "learning_rate": 1.0988970147511437e-06, - "loss": 0.8988, - "num_input_tokens_seen": 116471295, - "step": 5483 - }, - { - "epoch": 0.6594120122647749, - "flos": 15195923333640.0, - "grad_norm": 6.066493341394321, - "learning_rate": 1.0982016558342405e-06, - "loss": 1.0286, - "num_input_tokens_seen": 116489985, - "step": 5484 - }, - { - "epoch": 0.6595322551554139, - "flos": 13725973408800.0, - "grad_norm": 2.570296452712776, - "learning_rate": 1.0975064337194291e-06, - "loss": 0.9415, - "num_input_tokens_seen": 116507750, - "step": 5485 - }, - { - "epoch": 0.6596524980460531, - "flos": 11919072799200.0, - "grad_norm": 2.9815604634043624, - "learning_rate": 1.0968113485121743e-06, - "loss": 0.9312, - "num_input_tokens_seen": 116527060, - "step": 5486 - }, - { - "epoch": 0.6597727409366921, - "flos": 15484693611480.0, - "grad_norm": 7.10372640041638, - "learning_rate": 1.0961164003179185e-06, - "loss": 1.0215, - "num_input_tokens_seen": 116545290, - "step": 5487 - }, - { - "epoch": 0.6598929838273312, - "flos": 16873067142480.0, - "grad_norm": 5.932990359276802, - "learning_rate": 1.0954215892420884e-06, - "loss": 1.0536, - "num_input_tokens_seen": 116565710, - "step": 5488 - }, - { - "epoch": 0.6600132267179702, - "flos": 14173818486840.0, - "grad_norm": 6.942196193005333, - "learning_rate": 1.094726915390082e-06, - "loss": 0.9285, - "num_input_tokens_seen": 116583765, - "step": 5489 - }, - { - "epoch": 0.6601334696086094, - "flos": 15875282460840.0, - "grad_norm": 3.5941510847428906, - "learning_rate": 1.0940323788672836e-06, - "loss": 0.9266, - "num_input_tokens_seen": 116602660, - "step": 5490 - }, - { - "epoch": 0.6602537124992485, - "flos": 18290881319160.0, - "grad_norm": 2.9993767090856345, - "learning_rate": 1.0933379797790522e-06, - "loss": 0.9707, - "num_input_tokens_seen": 116621795, - "step": 5491 - }, - { - "epoch": 0.6603739553898875, - "flos": 18395367446400.0, - "grad_norm": 4.3542202789559115, - "learning_rate": 1.0926437182307293e-06, - "loss": 0.9376, - "num_input_tokens_seen": 116640325, - "step": 5492 - }, - { - "epoch": 0.6604941982805267, - "flos": 17476522264440.0, - "grad_norm": 2.369396827977571, - "learning_rate": 1.0919495943276338e-06, - "loss": 0.9972, - "num_input_tokens_seen": 116661065, - "step": 5493 - }, - { - "epoch": 0.6606144411711657, - "flos": 9349580944320.0, - "grad_norm": 5.168211039589178, - "learning_rate": 1.0912556081750611e-06, - "loss": 0.9689, - "num_input_tokens_seen": 116678715, - "step": 5494 - }, - { - "epoch": 0.6607346840618048, - "flos": 17897103667560.0, - "grad_norm": 5.951386605906138, - "learning_rate": 1.0905617598782909e-06, - "loss": 0.9882, - "num_input_tokens_seen": 116698640, - "step": 5495 - }, - { - "epoch": 0.660854926952444, - "flos": 12494436384120.0, - "grad_norm": 4.096822735695305, - "learning_rate": 1.0898680495425775e-06, - "loss": 1.0363, - "num_input_tokens_seen": 116716650, - "step": 5496 - }, - { - "epoch": 0.660975169843083, - "flos": 11918122290840.0, - "grad_norm": 4.93319785129677, - "learning_rate": 1.0891744772731594e-06, - "loss": 1.0336, - "num_input_tokens_seen": 116734185, - "step": 5497 - }, - { - "epoch": 0.6610954127337221, - "flos": 19129775169960.0, - "grad_norm": 3.379435415121228, - "learning_rate": 1.088481043175248e-06, - "loss": 0.8821, - "num_input_tokens_seen": 116754475, - "step": 5498 - }, - { - "epoch": 0.6612156556243612, - "flos": 18838459982640.0, - "grad_norm": 4.548567099839593, - "learning_rate": 1.0877877473540368e-06, - "loss": 0.988, - "num_input_tokens_seen": 116774780, - "step": 5499 - }, - { - "epoch": 0.6613358985150003, - "flos": 14042559269640.0, - "grad_norm": 3.839813697984512, - "learning_rate": 1.0870945899147002e-06, - "loss": 0.9547, - "num_input_tokens_seen": 116791145, - "step": 5500 - }, - { - "epoch": 0.6614561414056394, - "flos": 19101591648240.0, - "grad_norm": 4.178732276214797, - "learning_rate": 1.0864015709623879e-06, - "loss": 0.9765, - "num_input_tokens_seen": 116811735, - "step": 5501 - }, - { - "epoch": 0.6615763842962785, - "flos": 16272586191840.0, - "grad_norm": 8.72387457204814, - "learning_rate": 1.0857086906022313e-06, - "loss": 1.0218, - "num_input_tokens_seen": 116829790, - "step": 5502 - }, - { - "epoch": 0.6616966271869176, - "flos": 17634799864080.0, - "grad_norm": 4.83726470927049, - "learning_rate": 1.0850159489393388e-06, - "loss": 0.9581, - "num_input_tokens_seen": 116848770, - "step": 5503 - }, - { - "epoch": 0.6618168700775566, - "flos": 12181407264240.0, - "grad_norm": 4.46439197086856, - "learning_rate": 1.0843233460787992e-06, - "loss": 1.0438, - "num_input_tokens_seen": 116865705, - "step": 5504 - }, - { - "epoch": 0.6619371129681958, - "flos": 18106811799480.0, - "grad_norm": 7.278296399585868, - "learning_rate": 1.0836308821256805e-06, - "loss": 1.0039, - "num_input_tokens_seen": 116886225, - "step": 5505 - }, - { - "epoch": 0.6620573558588349, - "flos": 12783789231600.0, - "grad_norm": 5.859299804204991, - "learning_rate": 1.0829385571850282e-06, - "loss": 1.0122, - "num_input_tokens_seen": 116902925, - "step": 5506 - }, - { - "epoch": 0.6621775987494739, - "flos": 12600700881840.0, - "grad_norm": 13.333436075450507, - "learning_rate": 1.0822463713618679e-06, - "loss": 1.0521, - "num_input_tokens_seen": 116919500, - "step": 5507 - }, - { - "epoch": 0.6622978416401131, - "flos": 12389980918440.0, - "grad_norm": 14.54895856872822, - "learning_rate": 1.0815543247612034e-06, - "loss": 1.0804, - "num_input_tokens_seen": 116936290, - "step": 5508 - }, - { - "epoch": 0.6624180845307521, - "flos": 15376160158320.0, - "grad_norm": 3.009112347225523, - "learning_rate": 1.0808624174880168e-06, - "loss": 1.058, - "num_input_tokens_seen": 116956660, - "step": 5509 - }, - { - "epoch": 0.6625383274213912, - "flos": 16927410522960.0, - "grad_norm": 5.266225175018073, - "learning_rate": 1.080170649647272e-06, - "loss": 1.0244, - "num_input_tokens_seen": 116976185, - "step": 5510 - }, - { - "epoch": 0.6626585703120303, - "flos": 23724982249680.0, - "grad_norm": 3.8984976784871788, - "learning_rate": 1.0794790213439068e-06, - "loss": 0.8939, - "num_input_tokens_seen": 117002805, - "step": 5511 - }, - { - "epoch": 0.6627788132026694, - "flos": 15690477063720.0, - "grad_norm": 3.8872734862446134, - "learning_rate": 1.078787532682843e-06, - "loss": 1.0096, - "num_input_tokens_seen": 117020000, - "step": 5512 - }, - { - "epoch": 0.6628990560933085, - "flos": 25815716625960.0, - "grad_norm": 5.4581111639492415, - "learning_rate": 1.0780961837689773e-06, - "loss": 0.9915, - "num_input_tokens_seen": 117039230, - "step": 5513 - }, - { - "epoch": 0.6630192989839476, - "flos": 13123652764560.0, - "grad_norm": 3.8756153479488065, - "learning_rate": 1.0774049747071883e-06, - "loss": 0.9294, - "num_input_tokens_seen": 117056830, - "step": 5514 - }, - { - "epoch": 0.6631395418745867, - "flos": 25554148699920.0, - "grad_norm": 4.838883644176103, - "learning_rate": 1.076713905602332e-06, - "loss": 0.9108, - "num_input_tokens_seen": 117077125, - "step": 5515 - }, - { - "epoch": 0.6632597847652257, - "flos": 14226475481520.0, - "grad_norm": 4.369614692653788, - "learning_rate": 1.07602297655924e-06, - "loss": 1.0369, - "num_input_tokens_seen": 117095165, - "step": 5516 - }, - { - "epoch": 0.6633800276558649, - "flos": 15064878747360.0, - "grad_norm": 5.0687435879178775, - "learning_rate": 1.0753321876827292e-06, - "loss": 1.032, - "num_input_tokens_seen": 117114170, - "step": 5517 - }, - { - "epoch": 0.663500270546504, - "flos": 17058025847400.0, - "grad_norm": 4.831954079630841, - "learning_rate": 1.0746415390775893e-06, - "loss": 0.9678, - "num_input_tokens_seen": 117132020, - "step": 5518 - }, - { - "epoch": 0.663620513437143, - "flos": 12706321486800.0, - "grad_norm": 12.768366730511923, - "learning_rate": 1.0739510308485939e-06, - "loss": 1.0018, - "num_input_tokens_seen": 117148955, - "step": 5519 - }, - { - "epoch": 0.6637407563277821, - "flos": 41391246422400.0, - "grad_norm": 0.9282856477878213, - "learning_rate": 1.07326066310049e-06, - "loss": 0.8719, - "num_input_tokens_seen": 117212800, - "step": 5520 - }, - { - "epoch": 0.6638609992184212, - "flos": 19442528335800.0, - "grad_norm": 13.950605722491552, - "learning_rate": 1.0725704359380059e-06, - "loss": 1.0162, - "num_input_tokens_seen": 117232375, - "step": 5521 - }, - { - "epoch": 0.6639812421090603, - "flos": 13203880049760.0, - "grad_norm": 7.32042349585674, - "learning_rate": 1.0718803494658497e-06, - "loss": 0.9503, - "num_input_tokens_seen": 117250985, - "step": 5522 - }, - { - "epoch": 0.6641014849996993, - "flos": 11263113990360.0, - "grad_norm": 7.383450854925467, - "learning_rate": 1.071190403788707e-06, - "loss": 1.0573, - "num_input_tokens_seen": 117266010, - "step": 5523 - }, - { - "epoch": 0.6642217278903385, - "flos": 18867502028040.0, - "grad_norm": 15.448655765658717, - "learning_rate": 1.0705005990112415e-06, - "loss": 0.9724, - "num_input_tokens_seen": 117285510, - "step": 5524 - }, - { - "epoch": 0.6643419707809776, - "flos": 10869888246840.0, - "grad_norm": 7.672426409823697, - "learning_rate": 1.0698109352380957e-06, - "loss": 0.9586, - "num_input_tokens_seen": 117302830, - "step": 5525 - }, - { - "epoch": 0.6644622136716166, - "flos": 17869870654200.0, - "grad_norm": 8.496238324919336, - "learning_rate": 1.0691214125738909e-06, - "loss": 1.0076, - "num_input_tokens_seen": 117322755, - "step": 5526 - }, - { - "epoch": 0.6645824565622558, - "flos": 47401324169040.0, - "grad_norm": 0.7824397374064852, - "learning_rate": 1.0684320311232287e-06, - "loss": 0.844, - "num_input_tokens_seen": 117385380, - "step": 5527 - }, - { - "epoch": 0.6647026994528948, - "flos": 17844507996000.0, - "grad_norm": 4.985307851332613, - "learning_rate": 1.0677427909906865e-06, - "loss": 1.0269, - "num_input_tokens_seen": 117405550, - "step": 5528 - }, - { - "epoch": 0.6648229423435339, - "flos": 12914312571360.0, - "grad_norm": 4.60304865777028, - "learning_rate": 1.0670536922808216e-06, - "loss": 0.9295, - "num_input_tokens_seen": 117425395, - "step": 5529 - }, - { - "epoch": 0.6649431852341731, - "flos": 12968226690000.0, - "grad_norm": 9.049111625105157, - "learning_rate": 1.06636473509817e-06, - "loss": 0.9401, - "num_input_tokens_seen": 117441495, - "step": 5530 - }, - { - "epoch": 0.6650634281248121, - "flos": 12050209370160.0, - "grad_norm": 8.752372202154056, - "learning_rate": 1.0656759195472447e-06, - "loss": 1.0277, - "num_input_tokens_seen": 117458505, - "step": 5531 - }, - { - "epoch": 0.6651836710154512, - "flos": 49623164454720.0, - "grad_norm": 0.8014188507630501, - "learning_rate": 1.0649872457325414e-06, - "loss": 0.8721, - "num_input_tokens_seen": 117519510, - "step": 5532 - }, - { - "epoch": 0.6653039139060903, - "flos": 47889714925560.0, - "grad_norm": 1.062856165710416, - "learning_rate": 1.0642987137585278e-06, - "loss": 0.8431, - "num_input_tokens_seen": 117578755, - "step": 5533 - }, - { - "epoch": 0.6654241567967294, - "flos": 15248580328320.0, - "grad_norm": 4.0187121447356855, - "learning_rate": 1.0636103237296561e-06, - "loss": 1.0486, - "num_input_tokens_seen": 117597400, - "step": 5534 - }, - { - "epoch": 0.6655443996873684, - "flos": 17871495716880.0, - "grad_norm": 4.425881447798373, - "learning_rate": 1.062922075750353e-06, - "loss": 1.0622, - "num_input_tokens_seen": 117617135, - "step": 5535 - }, - { - "epoch": 0.6656646425780076, - "flos": 12364802229600.0, - "grad_norm": 14.990748286182631, - "learning_rate": 1.0622339699250267e-06, - "loss": 0.9363, - "num_input_tokens_seen": 117634775, - "step": 5536 - }, - { - "epoch": 0.6657848854686467, - "flos": 16659342346200.0, - "grad_norm": 4.22528722565487, - "learning_rate": 1.0615460063580624e-06, - "loss": 1.0364, - "num_input_tokens_seen": 117652970, - "step": 5537 - }, - { - "epoch": 0.6659051283592857, - "flos": 8090473629120.0, - "grad_norm": 15.657204604598913, - "learning_rate": 1.060858185153821e-06, - "loss": 0.9511, - "num_input_tokens_seen": 117670790, - "step": 5538 - }, - { - "epoch": 0.6660253712499249, - "flos": 14619394609440.0, - "grad_norm": 11.850850495669208, - "learning_rate": 1.0601705064166474e-06, - "loss": 0.9808, - "num_input_tokens_seen": 117688905, - "step": 5539 - }, - { - "epoch": 0.666145614140564, - "flos": 15091559852640.0, - "grad_norm": 4.478386365952476, - "learning_rate": 1.0594829702508596e-06, - "loss": 0.9616, - "num_input_tokens_seen": 117706340, - "step": 5540 - }, - { - "epoch": 0.666265857031203, - "flos": 24056413853640.0, - "grad_norm": 6.659484936841845, - "learning_rate": 1.0587955767607592e-06, - "loss": 0.7899, - "num_input_tokens_seen": 117727920, - "step": 5541 - }, - { - "epoch": 0.6663860999218422, - "flos": 12364219659960.0, - "grad_norm": 7.868548217804108, - "learning_rate": 1.0581083260506206e-06, - "loss": 0.9997, - "num_input_tokens_seen": 117744425, - "step": 5542 - }, - { - "epoch": 0.6665063428124812, - "flos": 12522558582720.0, - "grad_norm": 5.189070865908079, - "learning_rate": 1.0574212182246993e-06, - "loss": 0.9898, - "num_input_tokens_seen": 117762840, - "step": 5543 - }, - { - "epoch": 0.6666265857031203, - "flos": 19706794479120.0, - "grad_norm": 3.9256571128538718, - "learning_rate": 1.0567342533872303e-06, - "loss": 0.9617, - "num_input_tokens_seen": 117782590, - "step": 5544 - }, - { - "epoch": 0.6667468285937594, - "flos": 17819298645600.0, - "grad_norm": 8.070936417073145, - "learning_rate": 1.0560474316424255e-06, - "loss": 1.0342, - "num_input_tokens_seen": 117802070, - "step": 5545 - }, - { - "epoch": 0.6668670714843985, - "flos": 16189936643400.0, - "grad_norm": 4.538499176734715, - "learning_rate": 1.0553607530944746e-06, - "loss": 0.9587, - "num_input_tokens_seen": 117819845, - "step": 5546 - }, - { - "epoch": 0.6669873143750376, - "flos": 15747304030560.0, - "grad_norm": 11.547359306698992, - "learning_rate": 1.0546742178475463e-06, - "loss": 1.1141, - "num_input_tokens_seen": 117838560, - "step": 5547 - }, - { - "epoch": 0.6671075572656767, - "flos": 14561187872400.0, - "grad_norm": 5.994046118206591, - "learning_rate": 1.0539878260057868e-06, - "loss": 1.0859, - "num_input_tokens_seen": 117857320, - "step": 5548 - }, - { - "epoch": 0.6672278001563158, - "flos": 12705309655320.0, - "grad_norm": 6.20926667777937, - "learning_rate": 1.0533015776733226e-06, - "loss": 0.8984, - "num_input_tokens_seen": 117873190, - "step": 5549 - }, - { - "epoch": 0.6673480430469548, - "flos": 15874853199000.0, - "grad_norm": 4.169535806191299, - "learning_rate": 1.0526154729542566e-06, - "loss": 1.0066, - "num_input_tokens_seen": 117892970, - "step": 5550 - }, - { - "epoch": 0.6674682859375939, - "flos": 14697904847280.0, - "grad_norm": 4.135756368961814, - "learning_rate": 1.0519295119526699e-06, - "loss": 1.0118, - "num_input_tokens_seen": 117908995, - "step": 5551 - }, - { - "epoch": 0.667588528828233, - "flos": 18651814891920.0, - "grad_norm": 3.2763129012361807, - "learning_rate": 1.0512436947726227e-06, - "loss": 1.0537, - "num_input_tokens_seen": 117930130, - "step": 5552 - }, - { - "epoch": 0.6677087717188721, - "flos": 16398571620720.0, - "grad_norm": 14.232155317980379, - "learning_rate": 1.0505580215181517e-06, - "loss": 0.8813, - "num_input_tokens_seen": 117948090, - "step": 5553 - }, - { - "epoch": 0.6678290146095112, - "flos": 50807103642120.0, - "grad_norm": 0.8141065267127163, - "learning_rate": 1.0498724922932753e-06, - "loss": 0.8222, - "num_input_tokens_seen": 118005925, - "step": 5554 - }, - { - "epoch": 0.6679492575001503, - "flos": 13229886600720.0, - "grad_norm": 5.108320868239925, - "learning_rate": 1.0491871072019851e-06, - "loss": 1.0825, - "num_input_tokens_seen": 118023535, - "step": 5555 - }, - { - "epoch": 0.6680695003907894, - "flos": 21171378630960.0, - "grad_norm": 3.8520347816346803, - "learning_rate": 1.0485018663482555e-06, - "loss": 0.8621, - "num_input_tokens_seen": 118043275, - "step": 5556 - }, - { - "epoch": 0.6681897432814284, - "flos": 20098763098680.0, - "grad_norm": 5.558121404463286, - "learning_rate": 1.0478167698360354e-06, - "loss": 0.9201, - "num_input_tokens_seen": 118062295, - "step": 5557 - }, - { - "epoch": 0.6683099861720676, - "flos": 17819053353120.0, - "grad_norm": 4.099353478792942, - "learning_rate": 1.0471318177692556e-06, - "loss": 0.9207, - "num_input_tokens_seen": 118082315, - "step": 5558 - }, - { - "epoch": 0.6684302290627067, - "flos": 16345209410160.0, - "grad_norm": 5.185596714390906, - "learning_rate": 1.046447010251821e-06, - "loss": 0.9583, - "num_input_tokens_seen": 118099365, - "step": 5559 - }, - { - "epoch": 0.6685504719533457, - "flos": 18915989050560.0, - "grad_norm": 3.7787214920528283, - "learning_rate": 1.0457623473876157e-06, - "loss": 0.9838, - "num_input_tokens_seen": 118118590, - "step": 5560 - }, - { - "epoch": 0.6686707148439849, - "flos": 20650205118720.0, - "grad_norm": 7.596023533194569, - "learning_rate": 1.0450778292805046e-06, - "loss": 0.9329, - "num_input_tokens_seen": 118138295, - "step": 5561 - }, - { - "epoch": 0.6687909577346239, - "flos": 16796028659520.0, - "grad_norm": 7.436667002984687, - "learning_rate": 1.0443934560343267e-06, - "loss": 1.0171, - "num_input_tokens_seen": 118159425, - "step": 5562 - }, - { - "epoch": 0.668911200625263, - "flos": 16454938664160.0, - "grad_norm": 4.779584607067154, - "learning_rate": 1.0437092277529034e-06, - "loss": 0.999, - "num_input_tokens_seen": 118178400, - "step": 5563 - }, - { - "epoch": 0.6690314435159022, - "flos": 13151468347560.0, - "grad_norm": 6.259984700242136, - "learning_rate": 1.0430251445400292e-06, - "loss": 0.959, - "num_input_tokens_seen": 118196165, - "step": 5564 - }, - { - "epoch": 0.6691516864065412, - "flos": 22643505526560.0, - "grad_norm": 10.682232545825144, - "learning_rate": 1.0423412064994787e-06, - "loss": 0.8447, - "num_input_tokens_seen": 118216655, - "step": 5565 - }, - { - "epoch": 0.6692719292971803, - "flos": 24924840334800.0, - "grad_norm": 3.6714556515444015, - "learning_rate": 1.0416574137350064e-06, - "loss": 0.9622, - "num_input_tokens_seen": 118237080, - "step": 5566 - }, - { - "epoch": 0.6693921721878194, - "flos": 14515000466880.0, - "grad_norm": 4.77462049643841, - "learning_rate": 1.0409737663503428e-06, - "loss": 1.0314, - "num_input_tokens_seen": 118255180, - "step": 5567 - }, - { - "epoch": 0.6695124150784585, - "flos": 11758618228800.0, - "grad_norm": 6.290219104631996, - "learning_rate": 1.040290264449196e-06, - "loss": 1.0585, - "num_input_tokens_seen": 118273005, - "step": 5568 - }, - { - "epoch": 0.6696326579690975, - "flos": 18972999986760.0, - "grad_norm": 4.880653819701175, - "learning_rate": 1.0396069081352532e-06, - "loss": 0.8643, - "num_input_tokens_seen": 118291880, - "step": 5569 - }, - { - "epoch": 0.6697529008597367, - "flos": 47949148125000.0, - "grad_norm": 0.820796351559842, - "learning_rate": 1.0389236975121782e-06, - "loss": 0.8215, - "num_input_tokens_seen": 118346450, - "step": 5570 - }, - { - "epoch": 0.6698731437503758, - "flos": 14829593326320.0, - "grad_norm": 4.2829443926927695, - "learning_rate": 1.0382406326836147e-06, - "loss": 0.937, - "num_input_tokens_seen": 118365315, - "step": 5571 - }, - { - "epoch": 0.6699933866410148, - "flos": 14486019744600.0, - "grad_norm": 3.7022903725100185, - "learning_rate": 1.0375577137531828e-06, - "loss": 0.9667, - "num_input_tokens_seen": 118383595, - "step": 5572 - }, - { - "epoch": 0.670113629531654, - "flos": 20676855562440.0, - "grad_norm": 6.411069964110265, - "learning_rate": 1.0368749408244802e-06, - "loss": 0.9405, - "num_input_tokens_seen": 118406235, - "step": 5573 - }, - { - "epoch": 0.670233872422293, - "flos": 14042375300280.0, - "grad_norm": 9.319980970371448, - "learning_rate": 1.0361923140010836e-06, - "loss": 0.9999, - "num_input_tokens_seen": 118424440, - "step": 5574 - }, - { - "epoch": 0.6703541153129321, - "flos": 17242034043960.0, - "grad_norm": 13.156288273771525, - "learning_rate": 1.0355098333865455e-06, - "loss": 0.8575, - "num_input_tokens_seen": 118443390, - "step": 5575 - }, - { - "epoch": 0.6704743582035713, - "flos": 18999129183960.0, - "grad_norm": 4.706918599533746, - "learning_rate": 1.0348274990844006e-06, - "loss": 0.9256, - "num_input_tokens_seen": 118465870, - "step": 5576 - }, - { - "epoch": 0.6705946010942103, - "flos": 16717947683520.0, - "grad_norm": 5.2802328902236875, - "learning_rate": 1.034145311198155e-06, - "loss": 0.949, - "num_input_tokens_seen": 118485605, - "step": 5577 - }, - { - "epoch": 0.6707148439848494, - "flos": 17110713503640.0, - "grad_norm": 7.672826393322121, - "learning_rate": 1.0334632698312989e-06, - "loss": 0.8695, - "num_input_tokens_seen": 118506120, - "step": 5578 - }, - { - "epoch": 0.6708350868754885, - "flos": 16009454526240.0, - "grad_norm": 4.837832669591296, - "learning_rate": 1.032781375087295e-06, - "loss": 0.9798, - "num_input_tokens_seen": 118525740, - "step": 5579 - }, - { - "epoch": 0.6709553297661276, - "flos": 17949116769480.0, - "grad_norm": 2.653067803187575, - "learning_rate": 1.0320996270695891e-06, - "loss": 0.8982, - "num_input_tokens_seen": 118546530, - "step": 5580 - }, - { - "epoch": 0.6710755726567667, - "flos": 14514632528160.0, - "grad_norm": 5.601550241879893, - "learning_rate": 1.0314180258815998e-06, - "loss": 0.955, - "num_input_tokens_seen": 118564890, - "step": 5581 - }, - { - "epoch": 0.6711958155474057, - "flos": 18501018712920.0, - "grad_norm": 3.401278586360384, - "learning_rate": 1.0307365716267247e-06, - "loss": 0.9619, - "num_input_tokens_seen": 118585055, - "step": 5582 - }, - { - "epoch": 0.6713160584380449, - "flos": 14147321350920.0, - "grad_norm": 3.9726486881150374, - "learning_rate": 1.0300552644083423e-06, - "loss": 1.0041, - "num_input_tokens_seen": 118603700, - "step": 5583 - }, - { - "epoch": 0.6714363013286839, - "flos": 12915324402840.0, - "grad_norm": 20.599083355999653, - "learning_rate": 1.0293741043298036e-06, - "loss": 0.9407, - "num_input_tokens_seen": 118621770, - "step": 5584 - }, - { - "epoch": 0.671556544219323, - "flos": 18369391557000.0, - "grad_norm": 20.303603069588952, - "learning_rate": 1.0286930914944436e-06, - "loss": 0.9464, - "num_input_tokens_seen": 118641305, - "step": 5585 - }, - { - "epoch": 0.6716767871099621, - "flos": 11209751779800.0, - "grad_norm": 5.116350457840656, - "learning_rate": 1.0280122260055684e-06, - "loss": 1.0007, - "num_input_tokens_seen": 118656735, - "step": 5586 - }, - { - "epoch": 0.6717970300006012, - "flos": 14016920657400.0, - "grad_norm": 6.726315802858587, - "learning_rate": 1.0273315079664652e-06, - "loss": 1.0313, - "num_input_tokens_seen": 118674410, - "step": 5587 - }, - { - "epoch": 0.6719172728912403, - "flos": 18134290105320.0, - "grad_norm": 4.0228991694623835, - "learning_rate": 1.0266509374803992e-06, - "loss": 0.9666, - "num_input_tokens_seen": 118695290, - "step": 5588 - }, - { - "epoch": 0.6720375157818794, - "flos": 11230361896200.0, - "grad_norm": 6.729312702243824, - "learning_rate": 1.0259705146506123e-06, - "loss": 1.0794, - "num_input_tokens_seen": 118709905, - "step": 5589 - }, - { - "epoch": 0.6721577586725185, - "flos": 22823558381880.0, - "grad_norm": 3.615930062681064, - "learning_rate": 1.025290239580324e-06, - "loss": 1.0012, - "num_input_tokens_seen": 118730295, - "step": 5590 - }, - { - "epoch": 0.6722780015631575, - "flos": 14722041043080.0, - "grad_norm": 3.839026301038256, - "learning_rate": 1.0246101123727313e-06, - "loss": 0.979, - "num_input_tokens_seen": 118748995, - "step": 5591 - }, - { - "epoch": 0.6723982444537967, - "flos": 11971116562680.0, - "grad_norm": 8.895899704649091, - "learning_rate": 1.0239301331310085e-06, - "loss": 1.0174, - "num_input_tokens_seen": 118766335, - "step": 5592 - }, - { - "epoch": 0.6725184873444358, - "flos": 14672082265680.0, - "grad_norm": 3.061489950723959, - "learning_rate": 1.0232503019583088e-06, - "loss": 1.1116, - "num_input_tokens_seen": 118785665, - "step": 5593 - }, - { - "epoch": 0.6726387302350748, - "flos": 16870430248320.0, - "grad_norm": 9.158553631128543, - "learning_rate": 1.0225706189577619e-06, - "loss": 0.9222, - "num_input_tokens_seen": 118803910, - "step": 5594 - }, - { - "epoch": 0.672758973125714, - "flos": 10733140610400.0, - "grad_norm": 7.731677392172468, - "learning_rate": 1.021891084232475e-06, - "loss": 0.9606, - "num_input_tokens_seen": 118821565, - "step": 5595 - }, - { - "epoch": 0.672879216016353, - "flos": 12810286367520.0, - "grad_norm": 7.464674344378793, - "learning_rate": 1.0212116978855325e-06, - "loss": 1.0196, - "num_input_tokens_seen": 118839300, - "step": 5596 - }, - { - "epoch": 0.6729994589069921, - "flos": 16690408054560.0, - "grad_norm": 3.7268292793413487, - "learning_rate": 1.020532460019997e-06, - "loss": 1.0195, - "num_input_tokens_seen": 118858270, - "step": 5597 - }, - { - "epoch": 0.6731197017976313, - "flos": 18735506933400.0, - "grad_norm": 3.025415842777405, - "learning_rate": 1.0198533707389096e-06, - "loss": 0.9255, - "num_input_tokens_seen": 118878865, - "step": 5598 - }, - { - "epoch": 0.6732399446882703, - "flos": 15353832994560.0, - "grad_norm": 3.018073950466323, - "learning_rate": 1.0191744301452853e-06, - "loss": 0.9577, - "num_input_tokens_seen": 118897885, - "step": 5599 - }, - { - "epoch": 0.6733601875789094, - "flos": 18417418656120.0, - "grad_norm": 3.805049808491997, - "learning_rate": 1.0184956383421208e-06, - "loss": 0.9325, - "num_input_tokens_seen": 118916255, - "step": 5600 - }, - { - "epoch": 0.6734804304695485, - "flos": 16297519588200.0, - "grad_norm": 4.328667451763763, - "learning_rate": 1.017816995432387e-06, - "loss": 0.8845, - "num_input_tokens_seen": 118935075, - "step": 5601 - }, - { - "epoch": 0.6736006733601876, - "flos": 13256445059760.0, - "grad_norm": 3.45866564031197, - "learning_rate": 1.0171385015190353e-06, - "loss": 0.9592, - "num_input_tokens_seen": 118954655, - "step": 5602 - }, - { - "epoch": 0.6737209162508266, - "flos": 13780930020480.0, - "grad_norm": 4.42346227619397, - "learning_rate": 1.0164601567049908e-06, - "loss": 0.9515, - "num_input_tokens_seen": 118972905, - "step": 5603 - }, - { - "epoch": 0.6738411591414658, - "flos": 14305874904600.0, - "grad_norm": 4.575750679173211, - "learning_rate": 1.015781961093158e-06, - "loss": 1.0281, - "num_input_tokens_seen": 118991945, - "step": 5604 - }, - { - "epoch": 0.6739614020321049, - "flos": 15380851377000.0, - "grad_norm": 4.400852008171503, - "learning_rate": 1.0151039147864197e-06, - "loss": 1.0007, - "num_input_tokens_seen": 119011640, - "step": 5605 - }, - { - "epoch": 0.6740816449227439, - "flos": 13596829839240.0, - "grad_norm": 10.576282785948916, - "learning_rate": 1.0144260178876336e-06, - "loss": 0.8949, - "num_input_tokens_seen": 119030705, - "step": 5606 - }, - { - "epoch": 0.6742018878133831, - "flos": 14979377673840.0, - "grad_norm": 6.248508427974063, - "learning_rate": 1.0137482704996388e-06, - "loss": 0.9019, - "num_input_tokens_seen": 119044775, - "step": 5607 - }, - { - "epoch": 0.6743221307040221, - "flos": 16743402326400.0, - "grad_norm": 4.605643103278346, - "learning_rate": 1.0130706727252461e-06, - "loss": 1.0202, - "num_input_tokens_seen": 119061550, - "step": 5608 - }, - { - "epoch": 0.6744423735946612, - "flos": 11496283763760.0, - "grad_norm": 5.910567063237943, - "learning_rate": 1.0123932246672468e-06, - "loss": 0.9048, - "num_input_tokens_seen": 119075415, - "step": 5609 - }, - { - "epoch": 0.6745626164853004, - "flos": 41393821993440.0, - "grad_norm": 0.7973084486394717, - "learning_rate": 1.0117159264284114e-06, - "loss": 0.8107, - "num_input_tokens_seen": 119138305, - "step": 5610 - }, - { - "epoch": 0.6746828593759394, - "flos": 14540669740680.0, - "grad_norm": 18.695008061405474, - "learning_rate": 1.0110387781114837e-06, - "loss": 1.0025, - "num_input_tokens_seen": 119156640, - "step": 5611 - }, - { - "epoch": 0.6748031022665785, - "flos": 13623081682680.0, - "grad_norm": 4.768754375650741, - "learning_rate": 1.0103617798191872e-06, - "loss": 0.9888, - "num_input_tokens_seen": 119175835, - "step": 5612 - }, - { - "epoch": 0.6749233451572175, - "flos": 10738904983680.0, - "grad_norm": 7.538516769027201, - "learning_rate": 1.0096849316542217e-06, - "loss": 1.0302, - "num_input_tokens_seen": 119192105, - "step": 5613 - }, - { - "epoch": 0.6750435880478567, - "flos": 18862381547520.0, - "grad_norm": 5.06718698808007, - "learning_rate": 1.0090082337192643e-06, - "loss": 0.9614, - "num_input_tokens_seen": 119211470, - "step": 5614 - }, - { - "epoch": 0.6751638309384957, - "flos": 16638762891360.0, - "grad_norm": 12.072820330090904, - "learning_rate": 1.0083316861169705e-06, - "loss": 1.0002, - "num_input_tokens_seen": 119229925, - "step": 5615 - }, - { - "epoch": 0.6752840738291348, - "flos": 16665505319760.0, - "grad_norm": 4.302821923402896, - "learning_rate": 1.0076552889499713e-06, - "loss": 0.9376, - "num_input_tokens_seen": 119250410, - "step": 5616 - }, - { - "epoch": 0.675404316719774, - "flos": 21620204878920.0, - "grad_norm": 4.997772292733498, - "learning_rate": 1.006979042320876e-06, - "loss": 0.9597, - "num_input_tokens_seen": 119270345, - "step": 5617 - }, - { - "epoch": 0.675524559610413, - "flos": 16795568736120.0, - "grad_norm": 3.6091435345830534, - "learning_rate": 1.0063029463322702e-06, - "loss": 0.8627, - "num_input_tokens_seen": 119290340, - "step": 5618 - }, - { - "epoch": 0.6756448025010521, - "flos": 15089106927840.0, - "grad_norm": 6.579958408769223, - "learning_rate": 1.0056270010867164e-06, - "loss": 0.9778, - "num_input_tokens_seen": 119307630, - "step": 5619 - }, - { - "epoch": 0.6757650453916912, - "flos": 15374596418760.0, - "grad_norm": 7.954487562092751, - "learning_rate": 1.004951206686758e-06, - "loss": 0.9989, - "num_input_tokens_seen": 119325625, - "step": 5620 - }, - { - "epoch": 0.6758852882823303, - "flos": 15482210025120.0, - "grad_norm": 4.577855645806536, - "learning_rate": 1.0042755632349087e-06, - "loss": 0.9363, - "num_input_tokens_seen": 119342235, - "step": 5621 - }, - { - "epoch": 0.6760055311729694, - "flos": 19286642337840.0, - "grad_norm": 8.916077448786204, - "learning_rate": 1.0036000708336653e-06, - "loss": 0.8504, - "num_input_tokens_seen": 119361085, - "step": 5622 - }, - { - "epoch": 0.6761257740636085, - "flos": 12754011308760.0, - "grad_norm": 7.181921610290481, - "learning_rate": 1.0029247295854984e-06, - "loss": 1.027, - "num_input_tokens_seen": 119377425, - "step": 5623 - }, - { - "epoch": 0.6762460169542476, - "flos": 10686339973680.0, - "grad_norm": 8.4909390792629, - "learning_rate": 1.0022495395928588e-06, - "loss": 0.9422, - "num_input_tokens_seen": 119395625, - "step": 5624 - }, - { - "epoch": 0.6763662598448866, - "flos": 48613416216600.0, - "grad_norm": 0.7850987807160782, - "learning_rate": 1.0015745009581697e-06, - "loss": 0.8684, - "num_input_tokens_seen": 119456950, - "step": 5625 - }, - { - "epoch": 0.6764865027355258, - "flos": 14646290345640.0, - "grad_norm": 3.377232978481405, - "learning_rate": 1.0008996137838343e-06, - "loss": 0.8929, - "num_input_tokens_seen": 119475645, - "step": 5626 - }, - { - "epoch": 0.6766067456261649, - "flos": 15066013225080.0, - "grad_norm": 11.371372189351204, - "learning_rate": 1.000224878172234e-06, - "loss": 1.0214, - "num_input_tokens_seen": 119494490, - "step": 5627 - }, - { - "epoch": 0.6767269885168039, - "flos": 14147781274320.0, - "grad_norm": 3.9365887268602697, - "learning_rate": 9.99550294225724e-07, - "loss": 0.9588, - "num_input_tokens_seen": 119513365, - "step": 5628 - }, - { - "epoch": 0.6768472314074431, - "flos": 14777242947240.0, - "grad_norm": 5.17369135283073, - "learning_rate": 9.988758620466402e-07, - "loss": 0.9418, - "num_input_tokens_seen": 119531955, - "step": 5629 - }, - { - "epoch": 0.6769674742980821, - "flos": 16481957046600.0, - "grad_norm": 6.93396428367322, - "learning_rate": 9.982015817372917e-07, - "loss": 0.9814, - "num_input_tokens_seen": 119552115, - "step": 5630 - }, - { - "epoch": 0.6770877171887212, - "flos": 17241390151200.0, - "grad_norm": 6.028036101275543, - "learning_rate": 9.975274533999657e-07, - "loss": 1.0381, - "num_input_tokens_seen": 119571365, - "step": 5631 - }, - { - "epoch": 0.6772079600793603, - "flos": 12855339295320.0, - "grad_norm": 15.027546828950129, - "learning_rate": 9.96853477136929e-07, - "loss": 1.0527, - "num_input_tokens_seen": 119585830, - "step": 5632 - }, - { - "epoch": 0.6773282029699994, - "flos": 15953915344920.0, - "grad_norm": 4.781509891003945, - "learning_rate": 9.96179653050422e-07, - "loss": 0.97, - "num_input_tokens_seen": 119605710, - "step": 5633 - }, - { - "epoch": 0.6774484458606385, - "flos": 13252826995680.0, - "grad_norm": 4.9219790493094155, - "learning_rate": 9.955059812426635e-07, - "loss": 0.9576, - "num_input_tokens_seen": 119622960, - "step": 5634 - }, - { - "epoch": 0.6775686887512776, - "flos": 18499853573640.0, - "grad_norm": 4.677265846186042, - "learning_rate": 9.948324618158493e-07, - "loss": 1.0489, - "num_input_tokens_seen": 119643020, - "step": 5635 - }, - { - "epoch": 0.6776889316419167, - "flos": 9583731887640.0, - "grad_norm": 4.700777703731315, - "learning_rate": 9.941590948721502e-07, - "loss": 1.005, - "num_input_tokens_seen": 119659940, - "step": 5636 - }, - { - "epoch": 0.6778091745325557, - "flos": 19655057331240.0, - "grad_norm": 4.996449354220654, - "learning_rate": 9.934858805137188e-07, - "loss": 0.9809, - "num_input_tokens_seen": 119680310, - "step": 5637 - }, - { - "epoch": 0.6779294174231949, - "flos": 13282328964480.0, - "grad_norm": 4.913293338614581, - "learning_rate": 9.92812818842677e-07, - "loss": 1.0341, - "num_input_tokens_seen": 119699205, - "step": 5638 - }, - { - "epoch": 0.678049660313834, - "flos": 32786381033880.0, - "grad_norm": 3.5076828148876857, - "learning_rate": 9.921399099611306e-07, - "loss": 0.8555, - "num_input_tokens_seen": 119720090, - "step": 5639 - }, - { - "epoch": 0.678169903204473, - "flos": 14173849148400.0, - "grad_norm": 4.6377900066335345, - "learning_rate": 9.914671539711588e-07, - "loss": 0.9268, - "num_input_tokens_seen": 119739330, - "step": 5640 - }, - { - "epoch": 0.6782901460951122, - "flos": 15194880840600.0, - "grad_norm": 4.586223222972006, - "learning_rate": 9.90794550974817e-07, - "loss": 1.0025, - "num_input_tokens_seen": 119759445, - "step": 5641 - }, - { - "epoch": 0.6784103889857512, - "flos": 15222727085160.0, - "grad_norm": 5.609792879738471, - "learning_rate": 9.901221010741407e-07, - "loss": 1.0357, - "num_input_tokens_seen": 119778485, - "step": 5642 - }, - { - "epoch": 0.6785306318763903, - "flos": 23299249704480.0, - "grad_norm": 3.2891236795638705, - "learning_rate": 9.894498043711375e-07, - "loss": 0.972, - "num_input_tokens_seen": 119799950, - "step": 5643 - }, - { - "epoch": 0.6786508747670293, - "flos": 18240156002760.0, - "grad_norm": 3.950491287969232, - "learning_rate": 9.887776609677962e-07, - "loss": 0.9239, - "num_input_tokens_seen": 119821040, - "step": 5644 - }, - { - "epoch": 0.6787711176576685, - "flos": 13596983147040.0, - "grad_norm": 3.483024985675854, - "learning_rate": 9.88105670966079e-07, - "loss": 0.9487, - "num_input_tokens_seen": 119839220, - "step": 5645 - }, - { - "epoch": 0.6788913605483076, - "flos": 9868608147360.0, - "grad_norm": 4.49264964504577, - "learning_rate": 9.874338344679283e-07, - "loss": 1.009, - "num_input_tokens_seen": 119854785, - "step": 5646 - }, - { - "epoch": 0.6790116034389466, - "flos": 15642112687440.0, - "grad_norm": 3.0528936948589243, - "learning_rate": 9.86762151575259e-07, - "loss": 0.9696, - "num_input_tokens_seen": 119874500, - "step": 5647 - }, - { - "epoch": 0.6791318463295858, - "flos": 14854986646080.0, - "grad_norm": 2.4989816498924164, - "learning_rate": 9.860906223899651e-07, - "loss": 1.0346, - "num_input_tokens_seen": 119893615, - "step": 5648 - }, - { - "epoch": 0.6792520892202248, - "flos": 20310157616400.0, - "grad_norm": 5.993962747340352, - "learning_rate": 9.854192470139184e-07, - "loss": 0.9751, - "num_input_tokens_seen": 119914815, - "step": 5649 - }, - { - "epoch": 0.6793723321108639, - "flos": 14200806207720.0, - "grad_norm": 9.043548341307327, - "learning_rate": 9.847480255489645e-07, - "loss": 0.944, - "num_input_tokens_seen": 119933560, - "step": 5650 - }, - { - "epoch": 0.6794925750015031, - "flos": 18970731031320.0, - "grad_norm": 3.700676732422472, - "learning_rate": 9.840769580969295e-07, - "loss": 0.917, - "num_input_tokens_seen": 119953720, - "step": 5651 - }, - { - "epoch": 0.6796128178921421, - "flos": 15327979751400.0, - "grad_norm": 4.770857542995456, - "learning_rate": 9.834060447596114e-07, - "loss": 1.0154, - "num_input_tokens_seen": 119972710, - "step": 5652 - }, - { - "epoch": 0.6797330607827812, - "flos": 15983080036560.0, - "grad_norm": 8.004517233573685, - "learning_rate": 9.827352856387868e-07, - "loss": 1.0043, - "num_input_tokens_seen": 119992140, - "step": 5653 - }, - { - "epoch": 0.6798533036734203, - "flos": 47476124358120.0, - "grad_norm": 1.0758037733768882, - "learning_rate": 9.820646808362118e-07, - "loss": 0.885, - "num_input_tokens_seen": 120058115, - "step": 5654 - }, - { - "epoch": 0.6799735465640594, - "flos": 11446907556000.0, - "grad_norm": 8.940072069666991, - "learning_rate": 9.813942304536154e-07, - "loss": 0.9343, - "num_input_tokens_seen": 120075805, - "step": 5655 - }, - { - "epoch": 0.6800937894546984, - "flos": 15719672416920.0, - "grad_norm": 3.557173779583553, - "learning_rate": 9.807239345927043e-07, - "loss": 0.8567, - "num_input_tokens_seen": 120095535, - "step": 5656 - }, - { - "epoch": 0.6802140323453376, - "flos": 22538068890960.0, - "grad_norm": 5.558425782829842, - "learning_rate": 9.80053793355162e-07, - "loss": 0.9475, - "num_input_tokens_seen": 120113950, - "step": 5657 - }, - { - "epoch": 0.6803342752359767, - "flos": 12548534472120.0, - "grad_norm": 3.633740903293809, - "learning_rate": 9.793838068426472e-07, - "loss": 0.9656, - "num_input_tokens_seen": 120131365, - "step": 5658 - }, - { - "epoch": 0.6804545181266157, - "flos": 7958785150080.0, - "grad_norm": 16.61304841639688, - "learning_rate": 9.78713975156799e-07, - "loss": 0.8388, - "num_input_tokens_seen": 120146950, - "step": 5659 - }, - { - "epoch": 0.6805747610172549, - "flos": 20912447599080.0, - "grad_norm": 4.870146658567085, - "learning_rate": 9.780442983992273e-07, - "loss": 0.9478, - "num_input_tokens_seen": 120165185, - "step": 5660 - }, - { - "epoch": 0.680695003907894, - "flos": 26863582731240.0, - "grad_norm": 4.228812417413419, - "learning_rate": 9.773747766715238e-07, - "loss": 0.9422, - "num_input_tokens_seen": 120185725, - "step": 5661 - }, - { - "epoch": 0.680815246798533, - "flos": 15721052187120.0, - "grad_norm": 4.157356975249217, - "learning_rate": 9.767054100752536e-07, - "loss": 1.0313, - "num_input_tokens_seen": 120205395, - "step": 5662 - }, - { - "epoch": 0.6809354896891722, - "flos": 12181161971760.0, - "grad_norm": 4.035913889273513, - "learning_rate": 9.760361987119584e-07, - "loss": 1.0374, - "num_input_tokens_seen": 120222850, - "step": 5663 - }, - { - "epoch": 0.6810557325798112, - "flos": 8771917742400.0, - "grad_norm": 4.080313727340753, - "learning_rate": 9.753671426831592e-07, - "loss": 0.905, - "num_input_tokens_seen": 120238585, - "step": 5664 - }, - { - "epoch": 0.6811759754704503, - "flos": 15740711795160.0, - "grad_norm": 3.5499529822016354, - "learning_rate": 9.746982420903483e-07, - "loss": 1.02, - "num_input_tokens_seen": 120256500, - "step": 5665 - }, - { - "epoch": 0.6812962183610894, - "flos": 12413043959640.0, - "grad_norm": 4.321032500328652, - "learning_rate": 9.740294970349993e-07, - "loss": 0.9771, - "num_input_tokens_seen": 120272635, - "step": 5666 - }, - { - "epoch": 0.6814164612517285, - "flos": 43139296402200.0, - "grad_norm": 0.9849004402772398, - "learning_rate": 9.733609076185594e-07, - "loss": 0.9066, - "num_input_tokens_seen": 120328760, - "step": 5667 - }, - { - "epoch": 0.6815367041423676, - "flos": 13701806551440.0, - "grad_norm": 4.384863715147261, - "learning_rate": 9.72692473942455e-07, - "loss": 1.0669, - "num_input_tokens_seen": 120345705, - "step": 5668 - }, - { - "epoch": 0.6816569470330067, - "flos": 15745433675400.0, - "grad_norm": 9.696474704968892, - "learning_rate": 9.720241961080849e-07, - "loss": 0.9976, - "num_input_tokens_seen": 120364740, - "step": 5669 - }, - { - "epoch": 0.6817771899236458, - "flos": 29617389398280.0, - "grad_norm": 8.552109700083696, - "learning_rate": 9.713560742168259e-07, - "loss": 0.9351, - "num_input_tokens_seen": 120387085, - "step": 5670 - }, - { - "epoch": 0.6818974328142848, - "flos": 14987165710080.0, - "grad_norm": 3.6262201031698695, - "learning_rate": 9.706881083700333e-07, - "loss": 0.9387, - "num_input_tokens_seen": 120406490, - "step": 5671 - }, - { - "epoch": 0.682017675704924, - "flos": 14508776170200.0, - "grad_norm": 5.579228139837315, - "learning_rate": 9.700202986690357e-07, - "loss": 1.0443, - "num_input_tokens_seen": 120424510, - "step": 5672 - }, - { - "epoch": 0.682137918595563, - "flos": 14223654618000.0, - "grad_norm": 5.4181859551135245, - "learning_rate": 9.693526452151413e-07, - "loss": 0.8826, - "num_input_tokens_seen": 120443280, - "step": 5673 - }, - { - "epoch": 0.6822581614862021, - "flos": 22589346115440.0, - "grad_norm": 3.7329773142769365, - "learning_rate": 9.686851481096305e-07, - "loss": 0.9876, - "num_input_tokens_seen": 120464310, - "step": 5674 - }, - { - "epoch": 0.6823784043768413, - "flos": 16691450547600.0, - "grad_norm": 7.061468808671454, - "learning_rate": 9.68017807453762e-07, - "loss": 0.961, - "num_input_tokens_seen": 120482775, - "step": 5675 - }, - { - "epoch": 0.6824986472674803, - "flos": 9978644016960.0, - "grad_norm": 3.3988933389061, - "learning_rate": 9.673506233487721e-07, - "loss": 0.9504, - "num_input_tokens_seen": 120500460, - "step": 5676 - }, - { - "epoch": 0.6826188901581194, - "flos": 15273483063120.0, - "grad_norm": 3.835451674141878, - "learning_rate": 9.666835958958717e-07, - "loss": 1.0891, - "num_input_tokens_seen": 120519500, - "step": 5677 - }, - { - "epoch": 0.6827391330487584, - "flos": 14773870175640.0, - "grad_norm": 8.66081387402184, - "learning_rate": 9.660167251962484e-07, - "loss": 1.0212, - "num_input_tokens_seen": 120537580, - "step": 5678 - }, - { - "epoch": 0.6828593759393976, - "flos": 15405754111800.0, - "grad_norm": 4.170877744658632, - "learning_rate": 9.653500113510654e-07, - "loss": 1.0017, - "num_input_tokens_seen": 120556415, - "step": 5679 - }, - { - "epoch": 0.6829796188300367, - "flos": 18028209576960.0, - "grad_norm": 3.918827894492228, - "learning_rate": 9.646834544614627e-07, - "loss": 0.8995, - "num_input_tokens_seen": 120576635, - "step": 5680 - }, - { - "epoch": 0.6830998617206757, - "flos": 14698732709400.0, - "grad_norm": 2.8511713465136, - "learning_rate": 9.64017054628558e-07, - "loss": 0.9865, - "num_input_tokens_seen": 120595180, - "step": 5681 - }, - { - "epoch": 0.6832201046113149, - "flos": 15433140432960.0, - "grad_norm": 4.176385049015083, - "learning_rate": 9.63350811953441e-07, - "loss": 1.0232, - "num_input_tokens_seen": 120615275, - "step": 5682 - }, - { - "epoch": 0.6833403475019539, - "flos": 13859041658040.0, - "grad_norm": 8.051926414339217, - "learning_rate": 9.626847265371826e-07, - "loss": 0.9292, - "num_input_tokens_seen": 120634315, - "step": 5683 - }, - { - "epoch": 0.683460590392593, - "flos": 13726249362840.0, - "grad_norm": 3.909185054564764, - "learning_rate": 9.620187984808262e-07, - "loss": 1.0018, - "num_input_tokens_seen": 120652835, - "step": 5684 - }, - { - "epoch": 0.6835808332832322, - "flos": 16554672249600.0, - "grad_norm": 4.178723832030526, - "learning_rate": 9.613530278853919e-07, - "loss": 1.0878, - "num_input_tokens_seen": 120672530, - "step": 5685 - }, - { - "epoch": 0.6837010761738712, - "flos": 15380483438280.0, - "grad_norm": 6.327424790060879, - "learning_rate": 9.60687414851879e-07, - "loss": 0.969, - "num_input_tokens_seen": 120693255, - "step": 5686 - }, - { - "epoch": 0.6838213190645103, - "flos": 12443128498080.0, - "grad_norm": 8.904462025845612, - "learning_rate": 9.600219594812575e-07, - "loss": 0.9784, - "num_input_tokens_seen": 120710915, - "step": 5687 - }, - { - "epoch": 0.6839415619551494, - "flos": 16429085421000.0, - "grad_norm": 3.244245863923685, - "learning_rate": 9.593566618744786e-07, - "loss": 0.953, - "num_input_tokens_seen": 120730785, - "step": 5688 - }, - { - "epoch": 0.6840618048457885, - "flos": 15721481448960.0, - "grad_norm": 4.645434420897824, - "learning_rate": 9.58691522132466e-07, - "loss": 0.9629, - "num_input_tokens_seen": 120749315, - "step": 5689 - }, - { - "epoch": 0.6841820477364275, - "flos": 15640671594120.0, - "grad_norm": 5.764791619669603, - "learning_rate": 9.58026540356123e-07, - "loss": 1.0793, - "num_input_tokens_seen": 120767300, - "step": 5690 - }, - { - "epoch": 0.6843022906270667, - "flos": 17714107302480.0, - "grad_norm": 2.799174381195125, - "learning_rate": 9.573617166463246e-07, - "loss": 1.0859, - "num_input_tokens_seen": 120788235, - "step": 5691 - }, - { - "epoch": 0.6844225335177058, - "flos": 14169893807160.0, - "grad_norm": 3.5022483143853798, - "learning_rate": 9.56697051103924e-07, - "loss": 0.8249, - "num_input_tokens_seen": 120805395, - "step": 5692 - }, - { - "epoch": 0.6845427764083448, - "flos": 18420116873400.0, - "grad_norm": 11.750455806900508, - "learning_rate": 9.560325438297522e-07, - "loss": 1.0331, - "num_input_tokens_seen": 120823425, - "step": 5693 - }, - { - "epoch": 0.684663019298984, - "flos": 13387336338240.0, - "grad_norm": 5.766978531417985, - "learning_rate": 9.553681949246127e-07, - "loss": 1.1062, - "num_input_tokens_seen": 120840770, - "step": 5694 - }, - { - "epoch": 0.684783262189623, - "flos": 38765914318680.0, - "grad_norm": 4.508194561771609, - "learning_rate": 9.547040044892886e-07, - "loss": 0.9962, - "num_input_tokens_seen": 120868005, - "step": 5695 - }, - { - "epoch": 0.6849035050802621, - "flos": 45796834240080.0, - "grad_norm": 0.8719960145815495, - "learning_rate": 9.540399726245354e-07, - "loss": 0.8647, - "num_input_tokens_seen": 120924430, - "step": 5696 - }, - { - "epoch": 0.6850237479709013, - "flos": 17946019951920.0, - "grad_norm": 3.6863433834341013, - "learning_rate": 9.533760994310859e-07, - "loss": 0.906, - "num_input_tokens_seen": 120944550, - "step": 5697 - }, - { - "epoch": 0.6851439908615403, - "flos": 13727935748640.0, - "grad_norm": 13.84590686675635, - "learning_rate": 9.527123850096508e-07, - "loss": 0.9756, - "num_input_tokens_seen": 120962630, - "step": 5698 - }, - { - "epoch": 0.6852642337521794, - "flos": 16478645598120.0, - "grad_norm": 3.8854380889111115, - "learning_rate": 9.520488294609142e-07, - "loss": 0.9433, - "num_input_tokens_seen": 120981130, - "step": 5699 - }, - { - "epoch": 0.6853844766428185, - "flos": 44844685055880.0, - "grad_norm": 0.9170560154420089, - "learning_rate": 9.513854328855368e-07, - "loss": 0.8258, - "num_input_tokens_seen": 121038725, - "step": 5700 - }, - { - "epoch": 0.6855047195334576, - "flos": 16661948578800.0, - "grad_norm": 9.21038003905741, - "learning_rate": 9.507221953841558e-07, - "loss": 1.0327, - "num_input_tokens_seen": 121056075, - "step": 5701 - }, - { - "epoch": 0.6856249624240967, - "flos": 14669537356200.0, - "grad_norm": 2.9314647929344733, - "learning_rate": 9.500591170573824e-07, - "loss": 1.0021, - "num_input_tokens_seen": 121075815, - "step": 5702 - }, - { - "epoch": 0.6857452053147358, - "flos": 12098175146160.0, - "grad_norm": 4.8054480470209215, - "learning_rate": 9.493961980058078e-07, - "loss": 0.9744, - "num_input_tokens_seen": 121093130, - "step": 5703 - }, - { - "epoch": 0.6858654482053749, - "flos": 21987853333320.0, - "grad_norm": 20.58355358587667, - "learning_rate": 9.48733438329993e-07, - "loss": 0.901, - "num_input_tokens_seen": 121113115, - "step": 5704 - }, - { - "epoch": 0.6859856910960139, - "flos": 20572124142720.0, - "grad_norm": 4.0306377093753145, - "learning_rate": 9.480708381304807e-07, - "loss": 0.9616, - "num_input_tokens_seen": 121134130, - "step": 5705 - }, - { - "epoch": 0.6861059339866531, - "flos": 13728273025800.0, - "grad_norm": 5.252561580073652, - "learning_rate": 9.474083975077858e-07, - "loss": 1.07, - "num_input_tokens_seen": 121150975, - "step": 5706 - }, - { - "epoch": 0.6862261768772921, - "flos": 15773187935280.0, - "grad_norm": 6.141456761217353, - "learning_rate": 9.467461165623994e-07, - "loss": 1.0226, - "num_input_tokens_seen": 121169745, - "step": 5707 - }, - { - "epoch": 0.6863464197679312, - "flos": 18709408397760.0, - "grad_norm": 5.40749190514638, - "learning_rate": 9.46083995394791e-07, - "loss": 1.0204, - "num_input_tokens_seen": 121187275, - "step": 5708 - }, - { - "epoch": 0.6864666626585703, - "flos": 26995485841200.0, - "grad_norm": 4.190549281131042, - "learning_rate": 9.454220341054012e-07, - "loss": 0.8571, - "num_input_tokens_seen": 121211780, - "step": 5709 - }, - { - "epoch": 0.6865869055492094, - "flos": 13754616853920.0, - "grad_norm": 3.8512433211910353, - "learning_rate": 9.447602327946512e-07, - "loss": 1.036, - "num_input_tokens_seen": 121230140, - "step": 5710 - }, - { - "epoch": 0.6867071484398485, - "flos": 14462343472200.0, - "grad_norm": 4.822884984526856, - "learning_rate": 9.440985915629338e-07, - "loss": 0.9937, - "num_input_tokens_seen": 121247190, - "step": 5711 - }, - { - "epoch": 0.6868273913304875, - "flos": 11237873978400.0, - "grad_norm": 6.809968250441516, - "learning_rate": 9.434371105106223e-07, - "loss": 0.9544, - "num_input_tokens_seen": 121264510, - "step": 5712 - }, - { - "epoch": 0.6869476342211267, - "flos": 17399269150560.0, - "grad_norm": 22.481495347432755, - "learning_rate": 9.427757897380602e-07, - "loss": 0.9168, - "num_input_tokens_seen": 121283630, - "step": 5713 - }, - { - "epoch": 0.6870678771117658, - "flos": 13072896786600.0, - "grad_norm": 3.065564237997638, - "learning_rate": 9.421146293455695e-07, - "loss": 1.0675, - "num_input_tokens_seen": 121299090, - "step": 5714 - }, - { - "epoch": 0.6871881200024048, - "flos": 15773525212440.0, - "grad_norm": 9.736609629518723, - "learning_rate": 9.414536294334489e-07, - "loss": 0.9145, - "num_input_tokens_seen": 121318830, - "step": 5715 - }, - { - "epoch": 0.687308362893044, - "flos": 15721358802720.0, - "grad_norm": 5.143797663564006, - "learning_rate": 9.407927901019708e-07, - "loss": 0.9226, - "num_input_tokens_seen": 121337680, - "step": 5716 - }, - { - "epoch": 0.687428605783683, - "flos": 17813932872600.0, - "grad_norm": 4.0051186712354365, - "learning_rate": 9.401321114513854e-07, - "loss": 1.0011, - "num_input_tokens_seen": 121356295, - "step": 5717 - }, - { - "epoch": 0.6875488486743221, - "flos": 16901741249160.0, - "grad_norm": 4.299576121884675, - "learning_rate": 9.394715935819155e-07, - "loss": 0.976, - "num_input_tokens_seen": 121376405, - "step": 5718 - }, - { - "epoch": 0.6876690915649613, - "flos": 18156586607520.0, - "grad_norm": 6.125714630165579, - "learning_rate": 9.388112365937608e-07, - "loss": 0.8314, - "num_input_tokens_seen": 121395590, - "step": 5719 - }, - { - "epoch": 0.6877893344556003, - "flos": 13781144651400.0, - "grad_norm": 4.739048831969039, - "learning_rate": 9.381510405870985e-07, - "loss": 1.0347, - "num_input_tokens_seen": 121414325, - "step": 5720 - }, - { - "epoch": 0.6879095773462394, - "flos": 13230070570080.0, - "grad_norm": 8.754680580573519, - "learning_rate": 9.374910056620791e-07, - "loss": 0.9834, - "num_input_tokens_seen": 121433110, - "step": 5721 - }, - { - "epoch": 0.6880298202368785, - "flos": 14826343200960.0, - "grad_norm": 4.147353898246424, - "learning_rate": 9.368311319188293e-07, - "loss": 1.0342, - "num_input_tokens_seen": 121450645, - "step": 5722 - }, - { - "epoch": 0.6881500631275176, - "flos": 21489006984840.0, - "grad_norm": 8.185315022590332, - "learning_rate": 9.361714194574515e-07, - "loss": 1.0209, - "num_input_tokens_seen": 121472700, - "step": 5723 - }, - { - "epoch": 0.6882703060181566, - "flos": 47389023335400.0, - "grad_norm": 0.9884200771362894, - "learning_rate": 9.355118683780228e-07, - "loss": 0.8279, - "num_input_tokens_seen": 121542490, - "step": 5724 - }, - { - "epoch": 0.6883905489087958, - "flos": 12907873643760.0, - "grad_norm": 9.650013709182094, - "learning_rate": 9.348524787805987e-07, - "loss": 1.0127, - "num_input_tokens_seen": 121557400, - "step": 5725 - }, - { - "epoch": 0.6885107917994349, - "flos": 9919456110000.0, - "grad_norm": 7.483167961122816, - "learning_rate": 9.341932507652053e-07, - "loss": 1.0744, - "num_input_tokens_seen": 121571610, - "step": 5726 - }, - { - "epoch": 0.6886310346900739, - "flos": 20437461492360.0, - "grad_norm": 3.979637171438671, - "learning_rate": 9.335341844318489e-07, - "loss": 0.9997, - "num_input_tokens_seen": 121591470, - "step": 5727 - }, - { - "epoch": 0.6887512775807131, - "flos": 17451588868080.0, - "grad_norm": 4.394162527934011, - "learning_rate": 9.328752798805091e-07, - "loss": 0.9589, - "num_input_tokens_seen": 121609660, - "step": 5728 - }, - { - "epoch": 0.6888715204713521, - "flos": 15926651670000.0, - "grad_norm": 8.090580735835124, - "learning_rate": 9.322165372111399e-07, - "loss": 0.975, - "num_input_tokens_seen": 121627525, - "step": 5729 - }, - { - "epoch": 0.6889917633619912, - "flos": 15668548500240.0, - "grad_norm": 4.365012621954183, - "learning_rate": 9.315579565236747e-07, - "loss": 0.9845, - "num_input_tokens_seen": 121646350, - "step": 5730 - }, - { - "epoch": 0.6891120062526304, - "flos": 17030486218440.0, - "grad_norm": 16.197120429418252, - "learning_rate": 9.308995379180162e-07, - "loss": 0.9661, - "num_input_tokens_seen": 121665625, - "step": 5731 - }, - { - "epoch": 0.6892322491432694, - "flos": 45902700137520.0, - "grad_norm": 0.8027170740940891, - "learning_rate": 9.302412814940488e-07, - "loss": 0.8653, - "num_input_tokens_seen": 121728120, - "step": 5732 - }, - { - "epoch": 0.6893524920339085, - "flos": 16349869967280.0, - "grad_norm": 3.7489706534038123, - "learning_rate": 9.295831873516276e-07, - "loss": 0.9193, - "num_input_tokens_seen": 121747115, - "step": 5733 - }, - { - "epoch": 0.6894727349245476, - "flos": 15195432748680.0, - "grad_norm": 5.0928259637451845, - "learning_rate": 9.289252555905873e-07, - "loss": 0.9908, - "num_input_tokens_seen": 121766915, - "step": 5734 - }, - { - "epoch": 0.6895929778151867, - "flos": 14095369572120.0, - "grad_norm": 6.00344400109367, - "learning_rate": 9.282674863107334e-07, - "loss": 0.9765, - "num_input_tokens_seen": 121784450, - "step": 5735 - }, - { - "epoch": 0.6897132207058257, - "flos": 12882510985560.0, - "grad_norm": 33.17756369026929, - "learning_rate": 9.276098796118488e-07, - "loss": 0.9904, - "num_input_tokens_seen": 121800655, - "step": 5736 - }, - { - "epoch": 0.6898334635964649, - "flos": 23219880942960.0, - "grad_norm": 3.833619701359074, - "learning_rate": 9.269524355936938e-07, - "loss": 0.8916, - "num_input_tokens_seen": 121823555, - "step": 5737 - }, - { - "epoch": 0.689953706487104, - "flos": 16217905534200.0, - "grad_norm": 5.715525567559622, - "learning_rate": 9.262951543560002e-07, - "loss": 1.0749, - "num_input_tokens_seen": 121842500, - "step": 5738 - }, - { - "epoch": 0.690073949377743, - "flos": 13125185842560.0, - "grad_norm": 4.7687545756346825, - "learning_rate": 9.256380359984795e-07, - "loss": 1.0788, - "num_input_tokens_seen": 121859330, - "step": 5739 - }, - { - "epoch": 0.6901941922683821, - "flos": 24869239830360.0, - "grad_norm": 5.794570012173169, - "learning_rate": 9.249810806208139e-07, - "loss": 0.9677, - "num_input_tokens_seen": 121878315, - "step": 5740 - }, - { - "epoch": 0.6903144351590212, - "flos": 11499809843160.0, - "grad_norm": 9.579042574319471, - "learning_rate": 9.243242883226627e-07, - "loss": 1.029, - "num_input_tokens_seen": 121897130, - "step": 5741 - }, - { - "epoch": 0.6904346780496603, - "flos": 19966461388440.0, - "grad_norm": 5.681240116190522, - "learning_rate": 9.236676592036628e-07, - "loss": 0.9226, - "num_input_tokens_seen": 121916525, - "step": 5742 - }, - { - "epoch": 0.6905549209402994, - "flos": 16796979167880.0, - "grad_norm": 3.6332420833456514, - "learning_rate": 9.230111933634228e-07, - "loss": 0.9541, - "num_input_tokens_seen": 121937840, - "step": 5743 - }, - { - "epoch": 0.6906751638309385, - "flos": 16431139745520.0, - "grad_norm": 8.481858584580866, - "learning_rate": 9.223548909015288e-07, - "loss": 1.0388, - "num_input_tokens_seen": 121959250, - "step": 5744 - }, - { - "epoch": 0.6907954067215776, - "flos": 19442068412400.0, - "grad_norm": 6.21978751110917, - "learning_rate": 9.216987519175407e-07, - "loss": 0.9334, - "num_input_tokens_seen": 121979145, - "step": 5745 - }, - { - "epoch": 0.6909156496122166, - "flos": 15406275358320.0, - "grad_norm": 6.126457819376353, - "learning_rate": 9.210427765109942e-07, - "loss": 0.9172, - "num_input_tokens_seen": 121998540, - "step": 5746 - }, - { - "epoch": 0.6910358925028558, - "flos": 16032517567440.0, - "grad_norm": 7.1922230999960775, - "learning_rate": 9.20386964781402e-07, - "loss": 1.0299, - "num_input_tokens_seen": 122016280, - "step": 5747 - }, - { - "epoch": 0.6911561353934949, - "flos": 15668364530880.0, - "grad_norm": 4.573419541824475, - "learning_rate": 9.197313168282472e-07, - "loss": 1.0605, - "num_input_tokens_seen": 122033445, - "step": 5748 - }, - { - "epoch": 0.6912763782841339, - "flos": 17215260954000.0, - "grad_norm": 4.076299616919067, - "learning_rate": 9.190758327509935e-07, - "loss": 0.9422, - "num_input_tokens_seen": 122051910, - "step": 5749 - }, - { - "epoch": 0.6913966211747731, - "flos": 37430713480800.0, - "grad_norm": 0.9432040258406609, - "learning_rate": 9.184205126490767e-07, - "loss": 0.8996, - "num_input_tokens_seen": 122100525, - "step": 5750 - }, - { - "epoch": 0.6915168640654121, - "flos": 47788049661840.0, - "grad_norm": 0.9410831957634879, - "learning_rate": 9.177653566219075e-07, - "loss": 0.8674, - "num_input_tokens_seen": 122154970, - "step": 5751 - }, - { - "epoch": 0.6916371069560512, - "flos": 12967920074400.0, - "grad_norm": 4.5577152193428985, - "learning_rate": 9.171103647688744e-07, - "loss": 0.9771, - "num_input_tokens_seen": 122173430, - "step": 5752 - }, - { - "epoch": 0.6917573498466904, - "flos": 13937521234320.0, - "grad_norm": 5.148643848411269, - "learning_rate": 9.164555371893367e-07, - "loss": 0.9076, - "num_input_tokens_seen": 122193080, - "step": 5753 - }, - { - "epoch": 0.6918775927373294, - "flos": 10031270350080.0, - "grad_norm": 4.090245501324877, - "learning_rate": 9.158008739826333e-07, - "loss": 0.9817, - "num_input_tokens_seen": 122210400, - "step": 5754 - }, - { - "epoch": 0.6919978356279685, - "flos": 17056063507560.0, - "grad_norm": 5.3761013588205815, - "learning_rate": 9.151463752480744e-07, - "loss": 1.0872, - "num_input_tokens_seen": 122228850, - "step": 5755 - }, - { - "epoch": 0.6921180785186076, - "flos": 16795844690160.0, - "grad_norm": 4.538109369055335, - "learning_rate": 9.144920410849493e-07, - "loss": 1.0306, - "num_input_tokens_seen": 122249805, - "step": 5756 - }, - { - "epoch": 0.6922383214092467, - "flos": 15013386891960.0, - "grad_norm": 90.11443733612417, - "learning_rate": 9.138378715925176e-07, - "loss": 1.0241, - "num_input_tokens_seen": 122268620, - "step": 5757 - }, - { - "epoch": 0.6923585642998857, - "flos": 15249101574840.0, - "grad_norm": 2.7854591836207083, - "learning_rate": 9.131838668700167e-07, - "loss": 1.0295, - "num_input_tokens_seen": 122288410, - "step": 5758 - }, - { - "epoch": 0.6924788071905249, - "flos": 14986368509520.0, - "grad_norm": 3.4853742731818222, - "learning_rate": 9.125300270166598e-07, - "loss": 1.0902, - "num_input_tokens_seen": 122308735, - "step": 5759 - }, - { - "epoch": 0.692599050081164, - "flos": 18683892431760.0, - "grad_norm": 45.78988293178145, - "learning_rate": 9.118763521316324e-07, - "loss": 1.0877, - "num_input_tokens_seen": 122329030, - "step": 5760 - }, - { - "epoch": 0.692719292971803, - "flos": 14828121571440.0, - "grad_norm": 2.6733955384466723, - "learning_rate": 9.112228423140987e-07, - "loss": 0.9898, - "num_input_tokens_seen": 122347670, - "step": 5761 - }, - { - "epoch": 0.6928395358624422, - "flos": 18448147087320.0, - "grad_norm": 19.093326445281708, - "learning_rate": 9.105694976631932e-07, - "loss": 1.1011, - "num_input_tokens_seen": 122365300, - "step": 5762 - }, - { - "epoch": 0.6929597787530812, - "flos": 16769715492960.0, - "grad_norm": 4.73238580290885, - "learning_rate": 9.099163182780283e-07, - "loss": 0.9511, - "num_input_tokens_seen": 122383175, - "step": 5763 - }, - { - "epoch": 0.6930800216437203, - "flos": 12937958182200.0, - "grad_norm": 8.429207350638528, - "learning_rate": 9.092633042576916e-07, - "loss": 0.7186, - "num_input_tokens_seen": 122400160, - "step": 5764 - }, - { - "epoch": 0.6932002645343595, - "flos": 20781403012800.0, - "grad_norm": 10.903128190831433, - "learning_rate": 9.086104557012446e-07, - "loss": 0.7956, - "num_input_tokens_seen": 122420450, - "step": 5765 - }, - { - "epoch": 0.6933205074249985, - "flos": 16953938320440.0, - "grad_norm": 5.770766201615773, - "learning_rate": 9.079577727077239e-07, - "loss": 0.8928, - "num_input_tokens_seen": 122439000, - "step": 5766 - }, - { - "epoch": 0.6934407503156376, - "flos": 17186157585480.0, - "grad_norm": 4.451712615245984, - "learning_rate": 9.073052553761404e-07, - "loss": 0.94, - "num_input_tokens_seen": 122458085, - "step": 5767 - }, - { - "epoch": 0.6935609932062767, - "flos": 14645861083800.0, - "grad_norm": 19.598200875056428, - "learning_rate": 9.066529038054805e-07, - "loss": 0.9982, - "num_input_tokens_seen": 122477870, - "step": 5768 - }, - { - "epoch": 0.6936812360969158, - "flos": 12936701058240.0, - "grad_norm": 8.736507803665926, - "learning_rate": 9.060007180947071e-07, - "loss": 0.9691, - "num_input_tokens_seen": 122495645, - "step": 5769 - }, - { - "epoch": 0.6938014789875548, - "flos": 22325110633680.0, - "grad_norm": 6.161932322139457, - "learning_rate": 9.053486983427534e-07, - "loss": 0.9565, - "num_input_tokens_seen": 122516615, - "step": 5770 - }, - { - "epoch": 0.6939217218781939, - "flos": 12415650192240.0, - "grad_norm": 4.316795531795556, - "learning_rate": 9.046968446485326e-07, - "loss": 0.9307, - "num_input_tokens_seen": 122534740, - "step": 5771 - }, - { - "epoch": 0.6940419647688331, - "flos": 13151345701320.0, - "grad_norm": 4.868908524289146, - "learning_rate": 9.040451571109295e-07, - "loss": 0.9284, - "num_input_tokens_seen": 122550080, - "step": 5772 - }, - { - "epoch": 0.6941622076594721, - "flos": 47921363203560.0, - "grad_norm": 1.4200849059067315, - "learning_rate": 9.033936358288042e-07, - "loss": 0.8762, - "num_input_tokens_seen": 122603535, - "step": 5773 - }, - { - "epoch": 0.6942824505501112, - "flos": 18919607114640.0, - "grad_norm": 4.936938096920716, - "learning_rate": 9.027422809009937e-07, - "loss": 1.0484, - "num_input_tokens_seen": 122623200, - "step": 5774 - }, - { - "epoch": 0.6944026934407503, - "flos": 15088830973800.0, - "grad_norm": 6.405976378017085, - "learning_rate": 9.020910924263054e-07, - "loss": 1.05, - "num_input_tokens_seen": 122641445, - "step": 5775 - }, - { - "epoch": 0.6945229363313894, - "flos": 50616104609880.0, - "grad_norm": 0.8618231494845078, - "learning_rate": 9.014400705035261e-07, - "loss": 0.8532, - "num_input_tokens_seen": 122698070, - "step": 5776 - }, - { - "epoch": 0.6946431792220285, - "flos": 13439625394200.0, - "grad_norm": 4.144261789146233, - "learning_rate": 9.00789215231414e-07, - "loss": 1.0068, - "num_input_tokens_seen": 122716185, - "step": 5777 - }, - { - "epoch": 0.6947634221126676, - "flos": 14435570382240.0, - "grad_norm": 4.443194322140521, - "learning_rate": 9.001385267087056e-07, - "loss": 1.0491, - "num_input_tokens_seen": 122735050, - "step": 5778 - }, - { - "epoch": 0.6948836650033067, - "flos": 15509780315640.0, - "grad_norm": 2.656259245014932, - "learning_rate": 8.994880050341072e-07, - "loss": 0.9278, - "num_input_tokens_seen": 122754875, - "step": 5779 - }, - { - "epoch": 0.6950039078939457, - "flos": 16820440809360.0, - "grad_norm": 9.87958468471628, - "learning_rate": 8.988376503063026e-07, - "loss": 1.0058, - "num_input_tokens_seen": 122775855, - "step": 5780 - }, - { - "epoch": 0.6951241507845849, - "flos": 15479481146280.0, - "grad_norm": 4.350676797416817, - "learning_rate": 8.981874626239521e-07, - "loss": 1.0462, - "num_input_tokens_seen": 122794150, - "step": 5781 - }, - { - "epoch": 0.695244393675224, - "flos": 10345433947680.0, - "grad_norm": 7.8106191583614475, - "learning_rate": 8.975374420856872e-07, - "loss": 1.1087, - "num_input_tokens_seen": 122810765, - "step": 5782 - }, - { - "epoch": 0.695364636565863, - "flos": 11944956703920.0, - "grad_norm": 3.7340143899502407, - "learning_rate": 8.968875887901157e-07, - "loss": 0.9445, - "num_input_tokens_seen": 122827865, - "step": 5783 - }, - { - "epoch": 0.6954848794565022, - "flos": 13728273025800.0, - "grad_norm": 3.220461788354121, - "learning_rate": 8.9623790283582e-07, - "loss": 0.8377, - "num_input_tokens_seen": 122845465, - "step": 5784 - }, - { - "epoch": 0.6956051223471412, - "flos": 13466521130400.0, - "grad_norm": 9.044178509792484, - "learning_rate": 8.955883843213561e-07, - "loss": 0.982, - "num_input_tokens_seen": 122864200, - "step": 5785 - }, - { - "epoch": 0.6957253652377803, - "flos": 11394495853800.0, - "grad_norm": 3.9271239152965047, - "learning_rate": 8.949390333452569e-07, - "loss": 1.1099, - "num_input_tokens_seen": 122881865, - "step": 5786 - }, - { - "epoch": 0.6958456081284194, - "flos": 20939680612440.0, - "grad_norm": 5.747664931138952, - "learning_rate": 8.942898500060279e-07, - "loss": 0.8987, - "num_input_tokens_seen": 122901300, - "step": 5787 - }, - { - "epoch": 0.6959658510190585, - "flos": 17896735728840.0, - "grad_norm": 26.639403839314095, - "learning_rate": 8.936408344021493e-07, - "loss": 0.9255, - "num_input_tokens_seen": 122917935, - "step": 5788 - }, - { - "epoch": 0.6960860939096976, - "flos": 30588370328400.0, - "grad_norm": 4.331223903648949, - "learning_rate": 8.929919866320765e-07, - "loss": 0.9378, - "num_input_tokens_seen": 122938470, - "step": 5789 - }, - { - "epoch": 0.6962063368003367, - "flos": 12443281805880.0, - "grad_norm": 4.610637530681744, - "learning_rate": 8.923433067942385e-07, - "loss": 1.0358, - "num_input_tokens_seen": 122956755, - "step": 5790 - }, - { - "epoch": 0.6963265796909758, - "flos": 15091682498880.0, - "grad_norm": 3.321170040915374, - "learning_rate": 8.916947949870417e-07, - "loss": 0.912, - "num_input_tokens_seen": 122976140, - "step": 5791 - }, - { - "epoch": 0.6964468225816148, - "flos": 49288942648800.0, - "grad_norm": 0.8893977292467848, - "learning_rate": 8.910464513088615e-07, - "loss": 0.8608, - "num_input_tokens_seen": 123039900, - "step": 5792 - }, - { - "epoch": 0.696567065472254, - "flos": 13437877685280.0, - "grad_norm": 5.134297195374425, - "learning_rate": 8.903982758580542e-07, - "loss": 1.018, - "num_input_tokens_seen": 123058560, - "step": 5793 - }, - { - "epoch": 0.696687308362893, - "flos": 16245138547560.0, - "grad_norm": 11.413196194023882, - "learning_rate": 8.897502687329457e-07, - "loss": 1.0366, - "num_input_tokens_seen": 123078080, - "step": 5794 - }, - { - "epoch": 0.6968075512535321, - "flos": 17766059081280.0, - "grad_norm": 4.506007740635495, - "learning_rate": 8.891024300318382e-07, - "loss": 1.0278, - "num_input_tokens_seen": 123096370, - "step": 5795 - }, - { - "epoch": 0.6969277941441713, - "flos": 14932393067760.0, - "grad_norm": 3.328801332623014, - "learning_rate": 8.884547598530103e-07, - "loss": 0.9866, - "num_input_tokens_seen": 123116660, - "step": 5796 - }, - { - "epoch": 0.6970480370348103, - "flos": 15327151889280.0, - "grad_norm": 6.47408586920168, - "learning_rate": 8.8780725829471e-07, - "loss": 0.9819, - "num_input_tokens_seen": 123134285, - "step": 5797 - }, - { - "epoch": 0.6971682799254494, - "flos": 15930760319040.0, - "grad_norm": 6.769100714814686, - "learning_rate": 8.87159925455165e-07, - "loss": 0.9887, - "num_input_tokens_seen": 123153835, - "step": 5798 - }, - { - "epoch": 0.6972885228160886, - "flos": 14195624404080.0, - "grad_norm": 7.373103010961796, - "learning_rate": 8.865127614325738e-07, - "loss": 0.9606, - "num_input_tokens_seen": 123171985, - "step": 5799 - }, - { - "epoch": 0.6974087657067276, - "flos": 27023178777960.0, - "grad_norm": 4.227360732876855, - "learning_rate": 8.85865766325113e-07, - "loss": 0.8909, - "num_input_tokens_seen": 123195635, - "step": 5800 - }, - { - "epoch": 0.6975290085973667, - "flos": 21010801414320.0, - "grad_norm": 7.690736911026091, - "learning_rate": 8.852189402309287e-07, - "loss": 0.9358, - "num_input_tokens_seen": 123214540, - "step": 5801 - }, - { - "epoch": 0.6976492514880057, - "flos": 9086265309360.0, - "grad_norm": 3.965647167868313, - "learning_rate": 8.845722832481441e-07, - "loss": 0.941, - "num_input_tokens_seen": 123229690, - "step": 5802 - }, - { - "epoch": 0.6977694943786449, - "flos": 17320513620240.0, - "grad_norm": 5.447452425703032, - "learning_rate": 8.83925795474858e-07, - "loss": 0.9964, - "num_input_tokens_seen": 123249535, - "step": 5803 - }, - { - "epoch": 0.6978897372692839, - "flos": 21305949296640.0, - "grad_norm": 9.727680580190473, - "learning_rate": 8.832794770091414e-07, - "loss": 0.824, - "num_input_tokens_seen": 123270090, - "step": 5804 - }, - { - "epoch": 0.698009980159923, - "flos": 15458043167760.0, - "grad_norm": 3.153144139211921, - "learning_rate": 8.826333279490401e-07, - "loss": 1.0456, - "num_input_tokens_seen": 123290445, - "step": 5805 - }, - { - "epoch": 0.6981302230505622, - "flos": 14088409398000.0, - "grad_norm": 3.9944921959039057, - "learning_rate": 8.819873483925748e-07, - "loss": 0.8961, - "num_input_tokens_seen": 123307285, - "step": 5806 - }, - { - "epoch": 0.6982504659412012, - "flos": 15772053457560.0, - "grad_norm": 4.385694239780488, - "learning_rate": 8.81341538437739e-07, - "loss": 0.9773, - "num_input_tokens_seen": 123325295, - "step": 5807 - }, - { - "epoch": 0.6983707088318403, - "flos": 25395012576600.0, - "grad_norm": 18.212105483244173, - "learning_rate": 8.80695898182503e-07, - "loss": 0.9174, - "num_input_tokens_seen": 123345995, - "step": 5808 - }, - { - "epoch": 0.6984909517224794, - "flos": 46854542706120.0, - "grad_norm": 0.9325546483346618, - "learning_rate": 8.800504277248093e-07, - "loss": 0.9157, - "num_input_tokens_seen": 123410465, - "step": 5809 - }, - { - "epoch": 0.6986111946131185, - "flos": 13147206390720.0, - "grad_norm": 4.646943693801268, - "learning_rate": 8.794051271625753e-07, - "loss": 0.9726, - "num_input_tokens_seen": 123427820, - "step": 5810 - }, - { - "epoch": 0.6987314375037575, - "flos": 16376305780080.0, - "grad_norm": 7.157955582776411, - "learning_rate": 8.787599965936925e-07, - "loss": 1.0627, - "num_input_tokens_seen": 123448470, - "step": 5811 - }, - { - "epoch": 0.6988516803943967, - "flos": 27416220552120.0, - "grad_norm": 4.685950468171973, - "learning_rate": 8.781150361160261e-07, - "loss": 0.952, - "num_input_tokens_seen": 123470100, - "step": 5812 - }, - { - "epoch": 0.6989719232850358, - "flos": 17136505423680.0, - "grad_norm": 3.1016499773656414, - "learning_rate": 8.774702458274181e-07, - "loss": 0.9606, - "num_input_tokens_seen": 123490225, - "step": 5813 - }, - { - "epoch": 0.6990921661756748, - "flos": 10503067654560.0, - "grad_norm": 4.5323423976188, - "learning_rate": 8.768256258256799e-07, - "loss": 0.9161, - "num_input_tokens_seen": 123506570, - "step": 5814 - }, - { - "epoch": 0.699212409066314, - "flos": 14331390870600.0, - "grad_norm": 20.302251020130132, - "learning_rate": 8.76181176208602e-07, - "loss": 0.9746, - "num_input_tokens_seen": 123524390, - "step": 5815 - }, - { - "epoch": 0.699332651956953, - "flos": 13781052666720.0, - "grad_norm": 4.01055968719038, - "learning_rate": 8.755368970739461e-07, - "loss": 0.9562, - "num_input_tokens_seen": 123543470, - "step": 5816 - }, - { - "epoch": 0.6994528948475921, - "flos": 11420717035680.0, - "grad_norm": 4.27070577787636, - "learning_rate": 8.748927885194479e-07, - "loss": 0.8305, - "num_input_tokens_seen": 123561495, - "step": 5817 - }, - { - "epoch": 0.6995731377382313, - "flos": 46502291902920.0, - "grad_norm": 0.7369572711473289, - "learning_rate": 8.742488506428209e-07, - "loss": 0.8117, - "num_input_tokens_seen": 123620305, - "step": 5818 - }, - { - "epoch": 0.6996933806288703, - "flos": 17713770025320.0, - "grad_norm": 5.631969796262294, - "learning_rate": 8.736050835417466e-07, - "loss": 1.0142, - "num_input_tokens_seen": 123640065, - "step": 5819 - }, - { - "epoch": 0.6998136235195094, - "flos": 14751205734720.0, - "grad_norm": 6.283519429790963, - "learning_rate": 8.729614873138862e-07, - "loss": 0.8406, - "num_input_tokens_seen": 123657420, - "step": 5820 - }, - { - "epoch": 0.6999338664101485, - "flos": 16875428082600.0, - "grad_norm": 5.736728112650674, - "learning_rate": 8.723180620568716e-07, - "loss": 1.0075, - "num_input_tokens_seen": 123676395, - "step": 5821 - }, - { - "epoch": 0.7000541093007876, - "flos": 14094756340920.0, - "grad_norm": 7.200706754541014, - "learning_rate": 8.716748078683116e-07, - "loss": 1.0905, - "num_input_tokens_seen": 123692890, - "step": 5822 - }, - { - "epoch": 0.7001743521914267, - "flos": 21148192943520.0, - "grad_norm": 13.387880771895901, - "learning_rate": 8.710317248457855e-07, - "loss": 0.9073, - "num_input_tokens_seen": 123712225, - "step": 5823 - }, - { - "epoch": 0.7002945950820658, - "flos": 19574768722920.0, - "grad_norm": 4.926012994213986, - "learning_rate": 8.703888130868482e-07, - "loss": 0.9442, - "num_input_tokens_seen": 123733795, - "step": 5824 - }, - { - "epoch": 0.7004148379727049, - "flos": 15742674135000.0, - "grad_norm": 5.635520252490923, - "learning_rate": 8.697460726890307e-07, - "loss": 1.0432, - "num_input_tokens_seen": 123750660, - "step": 5825 - }, - { - "epoch": 0.7005350808633439, - "flos": 13777526587320.0, - "grad_norm": 4.11062313024691, - "learning_rate": 8.691035037498354e-07, - "loss": 1.121, - "num_input_tokens_seen": 123766370, - "step": 5826 - }, - { - "epoch": 0.7006553237539831, - "flos": 16690714670160.0, - "grad_norm": 4.388598860373934, - "learning_rate": 8.684611063667391e-07, - "loss": 0.9586, - "num_input_tokens_seen": 123786555, - "step": 5827 - }, - { - "epoch": 0.7007755666446221, - "flos": 22250709044880.0, - "grad_norm": 5.685501250461048, - "learning_rate": 8.678188806371935e-07, - "loss": 0.9875, - "num_input_tokens_seen": 123808310, - "step": 5828 - }, - { - "epoch": 0.7008958095352612, - "flos": 12862452777240.0, - "grad_norm": 18.303888060060746, - "learning_rate": 8.671768266586228e-07, - "loss": 1.0802, - "num_input_tokens_seen": 123826155, - "step": 5829 - }, - { - "epoch": 0.7010160524259004, - "flos": 19547719678920.0, - "grad_norm": 4.580314659403914, - "learning_rate": 8.665349445284275e-07, - "loss": 1.0135, - "num_input_tokens_seen": 123845615, - "step": 5830 - }, - { - "epoch": 0.7011362953165394, - "flos": 16953355750800.0, - "grad_norm": 4.765421678552074, - "learning_rate": 8.658932343439799e-07, - "loss": 1.039, - "num_input_tokens_seen": 123865120, - "step": 5831 - }, - { - "epoch": 0.7012565382071785, - "flos": 17658138859320.0, - "grad_norm": 3.1685800962023674, - "learning_rate": 8.65251696202627e-07, - "loss": 0.9889, - "num_input_tokens_seen": 123881220, - "step": 5832 - }, - { - "epoch": 0.7013767810978175, - "flos": 15196444580160.0, - "grad_norm": 11.393530204356248, - "learning_rate": 8.646103302016896e-07, - "loss": 1.0988, - "num_input_tokens_seen": 123899910, - "step": 5833 - }, - { - "epoch": 0.7014970239884567, - "flos": 11810753976960.0, - "grad_norm": 7.12370261711217, - "learning_rate": 8.639691364384614e-07, - "loss": 1.1127, - "num_input_tokens_seen": 123917255, - "step": 5834 - }, - { - "epoch": 0.7016172668790958, - "flos": 8851133196120.0, - "grad_norm": 5.5512521585633365, - "learning_rate": 8.633281150102136e-07, - "loss": 0.9481, - "num_input_tokens_seen": 123933825, - "step": 5835 - }, - { - "epoch": 0.7017375097697348, - "flos": 12360877549920.0, - "grad_norm": 9.097992946082815, - "learning_rate": 8.626872660141855e-07, - "loss": 0.9074, - "num_input_tokens_seen": 123951455, - "step": 5836 - }, - { - "epoch": 0.701857752660374, - "flos": 13123192841160.0, - "grad_norm": 4.703402793005502, - "learning_rate": 8.620465895475957e-07, - "loss": 0.9773, - "num_input_tokens_seen": 123969395, - "step": 5837 - }, - { - "epoch": 0.701977995551013, - "flos": 17373017307120.0, - "grad_norm": 3.6535890465599135, - "learning_rate": 8.614060857076333e-07, - "loss": 0.9788, - "num_input_tokens_seen": 123989785, - "step": 5838 - }, - { - "epoch": 0.7020982384416521, - "flos": 16350145921320.0, - "grad_norm": 4.287473957661092, - "learning_rate": 8.60765754591462e-07, - "loss": 0.9685, - "num_input_tokens_seen": 124009200, - "step": 5839 - }, - { - "epoch": 0.7022184813322913, - "flos": 14514908482200.0, - "grad_norm": 3.3099202601113973, - "learning_rate": 8.601255962962211e-07, - "loss": 0.9577, - "num_input_tokens_seen": 124027930, - "step": 5840 - }, - { - "epoch": 0.7023387242229303, - "flos": 14041363468800.0, - "grad_norm": 6.87795956805681, - "learning_rate": 8.594856109190194e-07, - "loss": 0.9572, - "num_input_tokens_seen": 124044680, - "step": 5841 - }, - { - "epoch": 0.7024589671135694, - "flos": 23721854770560.0, - "grad_norm": 4.607442780593203, - "learning_rate": 8.588457985569446e-07, - "loss": 0.9291, - "num_input_tokens_seen": 124067310, - "step": 5842 - }, - { - "epoch": 0.7025792100042085, - "flos": 13544602106400.0, - "grad_norm": 4.436915399432217, - "learning_rate": 8.582061593070542e-07, - "loss": 0.9443, - "num_input_tokens_seen": 124087760, - "step": 5843 - }, - { - "epoch": 0.7026994528948476, - "flos": 13439349440160.0, - "grad_norm": 4.508629260454616, - "learning_rate": 8.57566693266383e-07, - "loss": 0.9885, - "num_input_tokens_seen": 124105170, - "step": 5844 - }, - { - "epoch": 0.7028196957854866, - "flos": 13859133642720.0, - "grad_norm": 18.358562980712758, - "learning_rate": 8.569274005319354e-07, - "loss": 0.9317, - "num_input_tokens_seen": 124123290, - "step": 5845 - }, - { - "epoch": 0.7029399386761258, - "flos": 14798987541360.0, - "grad_norm": 3.3314128270533168, - "learning_rate": 8.562882812006913e-07, - "loss": 1.0342, - "num_input_tokens_seen": 124140500, - "step": 5846 - }, - { - "epoch": 0.7030601815667649, - "flos": 15669131069880.0, - "grad_norm": 2.8168836551429184, - "learning_rate": 8.556493353696066e-07, - "loss": 0.9961, - "num_input_tokens_seen": 124159220, - "step": 5847 - }, - { - "epoch": 0.7031804244574039, - "flos": 19365735145320.0, - "grad_norm": 5.871707399421724, - "learning_rate": 8.550105631356077e-07, - "loss": 0.892, - "num_input_tokens_seen": 124178665, - "step": 5848 - }, - { - "epoch": 0.7033006673480431, - "flos": 15902116873920.0, - "grad_norm": 6.789294275118025, - "learning_rate": 8.543719645955961e-07, - "loss": 0.99, - "num_input_tokens_seen": 124196715, - "step": 5849 - }, - { - "epoch": 0.7034209102386821, - "flos": 17581192361040.0, - "grad_norm": 2.5458783967253984, - "learning_rate": 8.537335398464467e-07, - "loss": 0.9731, - "num_input_tokens_seen": 124216755, - "step": 5850 - }, - { - "epoch": 0.7035411531293212, - "flos": 16028010318120.0, - "grad_norm": 15.202461063515187, - "learning_rate": 8.53095288985007e-07, - "loss": 1.07, - "num_input_tokens_seen": 124230210, - "step": 5851 - }, - { - "epoch": 0.7036613960199604, - "flos": 16114645869360.0, - "grad_norm": 4.850992018718834, - "learning_rate": 8.524572121081009e-07, - "loss": 1.0442, - "num_input_tokens_seen": 124250030, - "step": 5852 - }, - { - "epoch": 0.7037816389105994, - "flos": 15983202682800.0, - "grad_norm": 6.007463898980883, - "learning_rate": 8.518193093125232e-07, - "loss": 0.8458, - "num_input_tokens_seen": 124268805, - "step": 5853 - }, - { - "epoch": 0.7039018818012385, - "flos": 19285538521680.0, - "grad_norm": 4.024547594128109, - "learning_rate": 8.511815806950436e-07, - "loss": 1.032, - "num_input_tokens_seen": 124289555, - "step": 5854 - }, - { - "epoch": 0.7040221246918776, - "flos": 12575092931160.0, - "grad_norm": 3.0980536842103015, - "learning_rate": 8.505440263524044e-07, - "loss": 1.0023, - "num_input_tokens_seen": 124308120, - "step": 5855 - }, - { - "epoch": 0.7041423675825167, - "flos": 11517231157320.0, - "grad_norm": 4.5666439102237, - "learning_rate": 8.49906646381322e-07, - "loss": 1.0923, - "num_input_tokens_seen": 124320675, - "step": 5856 - }, - { - "epoch": 0.7042626104731557, - "flos": 18132695704200.0, - "grad_norm": 13.662094716083987, - "learning_rate": 8.492694408784884e-07, - "loss": 0.9485, - "num_input_tokens_seen": 124340650, - "step": 5857 - }, - { - "epoch": 0.7043828533637949, - "flos": 12652346045040.0, - "grad_norm": 21.45399841175445, - "learning_rate": 8.486324099405642e-07, - "loss": 0.8263, - "num_input_tokens_seen": 124357215, - "step": 5858 - }, - { - "epoch": 0.704503096254434, - "flos": 21015063371160.0, - "grad_norm": 2.530698928186542, - "learning_rate": 8.479955536641887e-07, - "loss": 0.9858, - "num_input_tokens_seen": 124378430, - "step": 5859 - }, - { - "epoch": 0.704623339145073, - "flos": 22040724958920.0, - "grad_norm": 4.3931503906905585, - "learning_rate": 8.473588721459716e-07, - "loss": 0.8879, - "num_input_tokens_seen": 124398060, - "step": 5860 - }, - { - "epoch": 0.7047435820357122, - "flos": 17005552822080.0, - "grad_norm": 4.571615440942968, - "learning_rate": 8.467223654824967e-07, - "loss": 0.917, - "num_input_tokens_seen": 124417235, - "step": 5861 - }, - { - "epoch": 0.7048638249263512, - "flos": 33232110464280.0, - "grad_norm": 7.663186430560805, - "learning_rate": 8.460860337703233e-07, - "loss": 0.8503, - "num_input_tokens_seen": 124437560, - "step": 5862 - }, - { - "epoch": 0.7049840678169903, - "flos": 15406122050520.0, - "grad_norm": 5.5900921503159, - "learning_rate": 8.454498771059797e-07, - "loss": 0.9235, - "num_input_tokens_seen": 124456655, - "step": 5863 - }, - { - "epoch": 0.7051043107076294, - "flos": 13045817081040.0, - "grad_norm": 14.28579072114387, - "learning_rate": 8.448138955859725e-07, - "loss": 1.0433, - "num_input_tokens_seen": 124472960, - "step": 5864 - }, - { - "epoch": 0.7052245535982685, - "flos": 13702910367600.0, - "grad_norm": 5.13603232551218, - "learning_rate": 8.44178089306778e-07, - "loss": 1.1301, - "num_input_tokens_seen": 124490615, - "step": 5865 - }, - { - "epoch": 0.7053447964889076, - "flos": 13518074308920.0, - "grad_norm": 10.198200620621146, - "learning_rate": 8.4354245836485e-07, - "loss": 1.0037, - "num_input_tokens_seen": 124508780, - "step": 5866 - }, - { - "epoch": 0.7054650393795466, - "flos": 19495185330480.0, - "grad_norm": 4.604742897303665, - "learning_rate": 8.429070028566108e-07, - "loss": 0.9542, - "num_input_tokens_seen": 124529810, - "step": 5867 - }, - { - "epoch": 0.7055852822701858, - "flos": 11390203235400.0, - "grad_norm": 3.906194091396776, - "learning_rate": 8.422717228784586e-07, - "loss": 0.9707, - "num_input_tokens_seen": 124546405, - "step": 5868 - }, - { - "epoch": 0.7057055251608249, - "flos": 8221548876960.0, - "grad_norm": 6.0429842159021545, - "learning_rate": 8.416366185267663e-07, - "loss": 0.925, - "num_input_tokens_seen": 124563625, - "step": 5869 - }, - { - "epoch": 0.7058257680514639, - "flos": 16140959035920.0, - "grad_norm": 5.369067089233604, - "learning_rate": 8.410016898978778e-07, - "loss": 1.0025, - "num_input_tokens_seen": 124580820, - "step": 5870 - }, - { - "epoch": 0.7059460109421031, - "flos": 12416600700600.0, - "grad_norm": 8.091341698744396, - "learning_rate": 8.403669370881115e-07, - "loss": 1.0309, - "num_input_tokens_seen": 124599275, - "step": 5871 - }, - { - "epoch": 0.7060662538327421, - "flos": 16738895077080.0, - "grad_norm": 5.35700015286703, - "learning_rate": 8.397323601937587e-07, - "loss": 1.0019, - "num_input_tokens_seen": 124618895, - "step": 5872 - }, - { - "epoch": 0.7061864967233812, - "flos": 21565953483120.0, - "grad_norm": 3.754911766387068, - "learning_rate": 8.390979593110838e-07, - "loss": 1.0022, - "num_input_tokens_seen": 124640745, - "step": 5873 - }, - { - "epoch": 0.7063067396140204, - "flos": 14695881184320.0, - "grad_norm": 4.18060053184764, - "learning_rate": 8.384637345363262e-07, - "loss": 1.0343, - "num_input_tokens_seen": 124659655, - "step": 5874 - }, - { - "epoch": 0.7064269825046594, - "flos": 23007045331920.0, - "grad_norm": 12.802095384749888, - "learning_rate": 8.378296859656964e-07, - "loss": 0.9922, - "num_input_tokens_seen": 124680530, - "step": 5875 - }, - { - "epoch": 0.7065472253952985, - "flos": 21542185226040.0, - "grad_norm": 4.427592002218627, - "learning_rate": 8.371958136953792e-07, - "loss": 0.9072, - "num_input_tokens_seen": 124700280, - "step": 5876 - }, - { - "epoch": 0.7066674682859376, - "flos": 11473190061000.0, - "grad_norm": 11.633308968796305, - "learning_rate": 8.365621178215326e-07, - "loss": 0.88, - "num_input_tokens_seen": 124716470, - "step": 5877 - }, - { - "epoch": 0.7067877111765767, - "flos": 10476938457360.0, - "grad_norm": 4.801291355629298, - "learning_rate": 8.359285984402871e-07, - "loss": 0.9711, - "num_input_tokens_seen": 124733455, - "step": 5878 - }, - { - "epoch": 0.7069079540672157, - "flos": 18100924779960.0, - "grad_norm": 5.6338081061983125, - "learning_rate": 8.352952556477489e-07, - "loss": 0.9722, - "num_input_tokens_seen": 124751085, - "step": 5879 - }, - { - "epoch": 0.7070281969578549, - "flos": 17503571308440.0, - "grad_norm": 6.399004453475984, - "learning_rate": 8.34662089539993e-07, - "loss": 0.9953, - "num_input_tokens_seen": 124770315, - "step": 5880 - }, - { - "epoch": 0.707148439848494, - "flos": 19024890442440.0, - "grad_norm": 3.7384809960839784, - "learning_rate": 8.340291002130722e-07, - "loss": 1.0003, - "num_input_tokens_seen": 124789225, - "step": 5881 - }, - { - "epoch": 0.707268682739133, - "flos": 10660394745840.0, - "grad_norm": 10.05349992379636, - "learning_rate": 8.3339628776301e-07, - "loss": 1.0185, - "num_input_tokens_seen": 124807085, - "step": 5882 - }, - { - "epoch": 0.7073889256297722, - "flos": 24478681642560.0, - "grad_norm": 3.421577196792561, - "learning_rate": 8.327636522858033e-07, - "loss": 0.7889, - "num_input_tokens_seen": 124826410, - "step": 5883 - }, - { - "epoch": 0.7075091685204112, - "flos": 14252451370920.0, - "grad_norm": 7.115166822181382, - "learning_rate": 8.321311938774225e-07, - "loss": 0.9886, - "num_input_tokens_seen": 124845220, - "step": 5884 - }, - { - "epoch": 0.7076294114110503, - "flos": 14750623165080.0, - "grad_norm": 5.306609893923255, - "learning_rate": 8.314989126338104e-07, - "loss": 1.0147, - "num_input_tokens_seen": 124864950, - "step": 5885 - }, - { - "epoch": 0.7077496543016895, - "flos": 12731530837200.0, - "grad_norm": 3.920649241964349, - "learning_rate": 8.308668086508847e-07, - "loss": 1.0745, - "num_input_tokens_seen": 124882750, - "step": 5886 - }, - { - "epoch": 0.7078698971923285, - "flos": 32502761898120.0, - "grad_norm": 7.032298098966964, - "learning_rate": 8.302348820245342e-07, - "loss": 0.9597, - "num_input_tokens_seen": 124905035, - "step": 5887 - }, - { - "epoch": 0.7079901400829676, - "flos": 19182493487760.0, - "grad_norm": 5.850141161658249, - "learning_rate": 8.296031328506232e-07, - "loss": 0.9218, - "num_input_tokens_seen": 124924505, - "step": 5888 - }, - { - "epoch": 0.7081103829736067, - "flos": 17370656367000.0, - "grad_norm": 9.969082148169566, - "learning_rate": 8.289715612249857e-07, - "loss": 0.99, - "num_input_tokens_seen": 124944840, - "step": 5889 - }, - { - "epoch": 0.7082306258642458, - "flos": 13144109573160.0, - "grad_norm": 6.594702773839998, - "learning_rate": 8.283401672434305e-07, - "loss": 0.996, - "num_input_tokens_seen": 124959785, - "step": 5890 - }, - { - "epoch": 0.7083508687548848, - "flos": 16691419886040.0, - "grad_norm": 4.3261727567719195, - "learning_rate": 8.277089510017412e-07, - "loss": 0.9324, - "num_input_tokens_seen": 124980310, - "step": 5891 - }, - { - "epoch": 0.708471111645524, - "flos": 15931005611520.0, - "grad_norm": 4.489530251787359, - "learning_rate": 8.270779125956719e-07, - "loss": 1.0403, - "num_input_tokens_seen": 125000410, - "step": 5892 - }, - { - "epoch": 0.7085913545361631, - "flos": 14855354584800.0, - "grad_norm": 5.796858548452059, - "learning_rate": 8.264470521209505e-07, - "loss": 1.0313, - "num_input_tokens_seen": 125018495, - "step": 5893 - }, - { - "epoch": 0.7087115974268021, - "flos": 10607615104920.0, - "grad_norm": 5.164837339042686, - "learning_rate": 8.258163696732785e-07, - "loss": 0.9927, - "num_input_tokens_seen": 125035805, - "step": 5894 - }, - { - "epoch": 0.7088318403174413, - "flos": 15297619258920.0, - "grad_norm": 3.9497651911610667, - "learning_rate": 8.251858653483288e-07, - "loss": 0.9902, - "num_input_tokens_seen": 125053690, - "step": 5895 - }, - { - "epoch": 0.7089520832080803, - "flos": 10969253893560.0, - "grad_norm": 4.62439682610057, - "learning_rate": 8.245555392417501e-07, - "loss": 1.0833, - "num_input_tokens_seen": 125068065, - "step": 5896 - }, - { - "epoch": 0.7090723260987194, - "flos": 14488748623440.0, - "grad_norm": 4.926913924527416, - "learning_rate": 8.239253914491613e-07, - "loss": 1.0132, - "num_input_tokens_seen": 125086110, - "step": 5897 - }, - { - "epoch": 0.7091925689893585, - "flos": 18266131892160.0, - "grad_norm": 7.169157556391084, - "learning_rate": 8.232954220661556e-07, - "loss": 0.9756, - "num_input_tokens_seen": 125108565, - "step": 5898 - }, - { - "epoch": 0.7093128118799976, - "flos": 17215322277120.0, - "grad_norm": 6.171235256598565, - "learning_rate": 8.226656311882989e-07, - "loss": 0.8914, - "num_input_tokens_seen": 125127595, - "step": 5899 - }, - { - "epoch": 0.7094330547706367, - "flos": 11781221346600.0, - "grad_norm": 5.286607361244807, - "learning_rate": 8.22036018911129e-07, - "loss": 0.9875, - "num_input_tokens_seen": 125145345, - "step": 5900 - }, - { - "epoch": 0.7095532976612757, - "flos": 11520235990200.0, - "grad_norm": 8.831472332768366, - "learning_rate": 8.214065853301599e-07, - "loss": 1.0366, - "num_input_tokens_seen": 125160595, - "step": 5901 - }, - { - "epoch": 0.7096735405519149, - "flos": 50649500596800.0, - "grad_norm": 0.7819812785368632, - "learning_rate": 8.207773305408734e-07, - "loss": 0.8332, - "num_input_tokens_seen": 125227535, - "step": 5902 - }, - { - "epoch": 0.709793783442554, - "flos": 16794618227760.0, - "grad_norm": 4.872084895341725, - "learning_rate": 8.201482546387288e-07, - "loss": 1.0282, - "num_input_tokens_seen": 125246730, - "step": 5903 - }, - { - "epoch": 0.709914026333193, - "flos": 18499362988680.0, - "grad_norm": 2.5847308982809643, - "learning_rate": 8.195193577191553e-07, - "loss": 1.1379, - "num_input_tokens_seen": 125268280, - "step": 5904 - }, - { - "epoch": 0.7100342692238322, - "flos": 17685739811400.0, - "grad_norm": 2.936298205685172, - "learning_rate": 8.188906398775579e-07, - "loss": 1.0725, - "num_input_tokens_seen": 125288545, - "step": 5905 - }, - { - "epoch": 0.7101545121144712, - "flos": 17736219835320.0, - "grad_norm": 7.53493678602447, - "learning_rate": 8.18262101209311e-07, - "loss": 0.9098, - "num_input_tokens_seen": 125307475, - "step": 5906 - }, - { - "epoch": 0.7102747550051103, - "flos": 16900698756120.0, - "grad_norm": 4.896345309278678, - "learning_rate": 8.176337418097626e-07, - "loss": 0.9404, - "num_input_tokens_seen": 125327665, - "step": 5907 - }, - { - "epoch": 0.7103949978957494, - "flos": 10817077944360.0, - "grad_norm": 6.006594530717494, - "learning_rate": 8.170055617742364e-07, - "loss": 1.024, - "num_input_tokens_seen": 125344665, - "step": 5908 - }, - { - "epoch": 0.7105152407863885, - "flos": 16088700641520.0, - "grad_norm": 8.493593831202055, - "learning_rate": 8.163775611980252e-07, - "loss": 0.9307, - "num_input_tokens_seen": 125363495, - "step": 5909 - }, - { - "epoch": 0.7106354836770276, - "flos": 12207628446120.0, - "grad_norm": 4.6446068118327775, - "learning_rate": 8.157497401763982e-07, - "loss": 1.017, - "num_input_tokens_seen": 125380880, - "step": 5910 - }, - { - "epoch": 0.7107557265676667, - "flos": 14331329547480.0, - "grad_norm": 6.823991515159922, - "learning_rate": 8.151220988045935e-07, - "loss": 1.0054, - "num_input_tokens_seen": 125399855, - "step": 5911 - }, - { - "epoch": 0.7108759694583058, - "flos": 15275261433600.0, - "grad_norm": 4.690401120469169, - "learning_rate": 8.144946371778234e-07, - "loss": 1.0535, - "num_input_tokens_seen": 125419685, - "step": 5912 - }, - { - "epoch": 0.7109962123489448, - "flos": 17111480042640.0, - "grad_norm": 3.9921283754871446, - "learning_rate": 8.138673553912751e-07, - "loss": 1.0084, - "num_input_tokens_seen": 125439965, - "step": 5913 - }, - { - "epoch": 0.711116455239584, - "flos": 21723985790280.0, - "grad_norm": 8.419884950843997, - "learning_rate": 8.132402535401059e-07, - "loss": 0.7976, - "num_input_tokens_seen": 125460940, - "step": 5914 - }, - { - "epoch": 0.711236698130223, - "flos": 17818256152560.0, - "grad_norm": 7.2011228745451135, - "learning_rate": 8.126133317194465e-07, - "loss": 0.9705, - "num_input_tokens_seen": 125480850, - "step": 5915 - }, - { - "epoch": 0.7113569410208621, - "flos": 17215383600240.0, - "grad_norm": 10.732426673395196, - "learning_rate": 8.11986590024401e-07, - "loss": 0.9658, - "num_input_tokens_seen": 125500310, - "step": 5916 - }, - { - "epoch": 0.7114771839115013, - "flos": 25287368308680.0, - "grad_norm": 7.891228801929047, - "learning_rate": 8.113600285500442e-07, - "loss": 0.9171, - "num_input_tokens_seen": 125520240, - "step": 5917 - }, - { - "epoch": 0.7115974268021403, - "flos": 14981462659920.0, - "grad_norm": 3.985293054466694, - "learning_rate": 8.107336473914268e-07, - "loss": 0.9713, - "num_input_tokens_seen": 125538590, - "step": 5918 - }, - { - "epoch": 0.7117176696927794, - "flos": 40610743278000.0, - "grad_norm": 0.8117363715806191, - "learning_rate": 8.101074466435694e-07, - "loss": 0.8378, - "num_input_tokens_seen": 125597785, - "step": 5919 - }, - { - "epoch": 0.7118379125834186, - "flos": 11263788544680.0, - "grad_norm": 4.620011190755923, - "learning_rate": 8.094814264014662e-07, - "loss": 0.9083, - "num_input_tokens_seen": 125616260, - "step": 5920 - }, - { - "epoch": 0.7119581554740576, - "flos": 14331084255000.0, - "grad_norm": 4.481620341258139, - "learning_rate": 8.088555867600844e-07, - "loss": 1.044, - "num_input_tokens_seen": 125632145, - "step": 5921 - }, - { - "epoch": 0.7120783983646967, - "flos": 24767881182240.0, - "grad_norm": 5.208244485733071, - "learning_rate": 8.08229927814362e-07, - "loss": 0.8438, - "num_input_tokens_seen": 125654755, - "step": 5922 - }, - { - "epoch": 0.7121986412553358, - "flos": 18762249361800.0, - "grad_norm": 7.503264416118718, - "learning_rate": 8.076044496592134e-07, - "loss": 0.8717, - "num_input_tokens_seen": 125676325, - "step": 5923 - }, - { - "epoch": 0.7123188841459749, - "flos": 7828507102800.0, - "grad_norm": 4.669007304708977, - "learning_rate": 8.069791523895204e-07, - "loss": 1.0107, - "num_input_tokens_seen": 125692495, - "step": 5924 - }, - { - "epoch": 0.7124391270366139, - "flos": 14775188622720.0, - "grad_norm": 6.985335384282433, - "learning_rate": 8.063540361001422e-07, - "loss": 0.9992, - "num_input_tokens_seen": 125710785, - "step": 5925 - }, - { - "epoch": 0.7125593699272531, - "flos": 12470085557400.0, - "grad_norm": 6.238830045424652, - "learning_rate": 8.057291008859069e-07, - "loss": 1.0164, - "num_input_tokens_seen": 125728665, - "step": 5926 - }, - { - "epoch": 0.7126796128178922, - "flos": 20411087002680.0, - "grad_norm": 7.057327131531816, - "learning_rate": 8.051043468416187e-07, - "loss": 0.9085, - "num_input_tokens_seen": 125749635, - "step": 5927 - }, - { - "epoch": 0.7127998557085312, - "flos": 11342206797840.0, - "grad_norm": 5.469384728790199, - "learning_rate": 8.044797740620506e-07, - "loss": 1.0436, - "num_input_tokens_seen": 125767960, - "step": 5928 - }, - { - "epoch": 0.7129200985991703, - "flos": 16637873706120.0, - "grad_norm": 5.561349471187171, - "learning_rate": 8.038553826419494e-07, - "loss": 1.0085, - "num_input_tokens_seen": 125786390, - "step": 5929 - }, - { - "epoch": 0.7130403414898094, - "flos": 15196444580160.0, - "grad_norm": 4.063950969099904, - "learning_rate": 8.032311726760364e-07, - "loss": 1.0327, - "num_input_tokens_seen": 125807330, - "step": 5930 - }, - { - "epoch": 0.7131605843804485, - "flos": 53715514069680.0, - "grad_norm": 3.979254689998922, - "learning_rate": 8.026071442590022e-07, - "loss": 0.9266, - "num_input_tokens_seen": 125833980, - "step": 5931 - }, - { - "epoch": 0.7132808272710875, - "flos": 13019718545400.0, - "grad_norm": 8.79523394439959, - "learning_rate": 8.019832974855134e-07, - "loss": 1.0325, - "num_input_tokens_seen": 125851660, - "step": 5932 - }, - { - "epoch": 0.7134010701617267, - "flos": 16529616207000.0, - "grad_norm": 6.597285041733077, - "learning_rate": 8.013596324502052e-07, - "loss": 1.055, - "num_input_tokens_seen": 125869845, - "step": 5933 - }, - { - "epoch": 0.7135213130523658, - "flos": 16816700099040.0, - "grad_norm": 4.33312237054868, - "learning_rate": 8.007361492476872e-07, - "loss": 1.0065, - "num_input_tokens_seen": 125888890, - "step": 5934 - }, - { - "epoch": 0.7136415559430048, - "flos": 17635075818120.0, - "grad_norm": 33.154550866571284, - "learning_rate": 8.001128479725426e-07, - "loss": 1.0105, - "num_input_tokens_seen": 125910515, - "step": 5935 - }, - { - "epoch": 0.713761798833644, - "flos": 12968288013120.0, - "grad_norm": 4.170704652450667, - "learning_rate": 7.994897287193248e-07, - "loss": 1.027, - "num_input_tokens_seen": 125929615, - "step": 5936 - }, - { - "epoch": 0.713882041724283, - "flos": 10999737032280.0, - "grad_norm": 8.295788939922604, - "learning_rate": 7.988667915825605e-07, - "loss": 1.0518, - "num_input_tokens_seen": 125946400, - "step": 5937 - }, - { - "epoch": 0.7140022846149221, - "flos": 17110713503640.0, - "grad_norm": 6.267201974338963, - "learning_rate": 7.982440366567491e-07, - "loss": 0.9736, - "num_input_tokens_seen": 125964610, - "step": 5938 - }, - { - "epoch": 0.7141225275055613, - "flos": 19863722970120.0, - "grad_norm": 5.349263876781318, - "learning_rate": 7.97621464036361e-07, - "loss": 0.979, - "num_input_tokens_seen": 125986090, - "step": 5939 - }, - { - "epoch": 0.7142427703962003, - "flos": 13964018370240.0, - "grad_norm": 7.1468406686429455, - "learning_rate": 7.969990738158417e-07, - "loss": 0.9045, - "num_input_tokens_seen": 126004220, - "step": 5940 - }, - { - "epoch": 0.7143630132868394, - "flos": 14934048792000.0, - "grad_norm": 6.490568801570469, - "learning_rate": 7.963768660896062e-07, - "loss": 1.0712, - "num_input_tokens_seen": 126022350, - "step": 5941 - }, - { - "epoch": 0.7144832561774785, - "flos": 17159691111120.0, - "grad_norm": 3.841894306601646, - "learning_rate": 7.957548409520432e-07, - "loss": 1.0508, - "num_input_tokens_seen": 126041295, - "step": 5942 - }, - { - "epoch": 0.7146034990681176, - "flos": 11551546991040.0, - "grad_norm": 8.127826375143151, - "learning_rate": 7.951329984975135e-07, - "loss": 1.0578, - "num_input_tokens_seen": 126057955, - "step": 5943 - }, - { - "epoch": 0.7147237419587567, - "flos": 49862558524800.0, - "grad_norm": 0.7965670345831317, - "learning_rate": 7.94511338820349e-07, - "loss": 0.7917, - "num_input_tokens_seen": 126119980, - "step": 5944 - }, - { - "epoch": 0.7148439848493958, - "flos": 15820387172280.0, - "grad_norm": 16.893754046429407, - "learning_rate": 7.938898620148575e-07, - "loss": 1.0125, - "num_input_tokens_seen": 126137460, - "step": 5945 - }, - { - "epoch": 0.7149642277400349, - "flos": 12705462963120.0, - "grad_norm": 13.008250626067516, - "learning_rate": 7.932685681753135e-07, - "loss": 0.9436, - "num_input_tokens_seen": 126154460, - "step": 5946 - }, - { - "epoch": 0.7150844706306739, - "flos": 22587107821560.0, - "grad_norm": 5.206495321843375, - "learning_rate": 7.92647457395969e-07, - "loss": 0.8514, - "num_input_tokens_seen": 126176005, - "step": 5947 - }, - { - "epoch": 0.7152047135213131, - "flos": 7670996042160.0, - "grad_norm": 7.324610927000395, - "learning_rate": 7.920265297710444e-07, - "loss": 0.9681, - "num_input_tokens_seen": 126193115, - "step": 5948 - }, - { - "epoch": 0.7153249564119522, - "flos": 14907612979200.0, - "grad_norm": 6.539825504319907, - "learning_rate": 7.914057853947363e-07, - "loss": 0.9546, - "num_input_tokens_seen": 126212015, - "step": 5949 - }, - { - "epoch": 0.7154451993025912, - "flos": 17241543459000.0, - "grad_norm": 4.9906286154813335, - "learning_rate": 7.907852243612089e-07, - "loss": 0.8624, - "num_input_tokens_seen": 126232140, - "step": 5950 - }, - { - "epoch": 0.7155654421932304, - "flos": 16585983250440.0, - "grad_norm": 8.0156407049371, - "learning_rate": 7.901648467646009e-07, - "loss": 0.956, - "num_input_tokens_seen": 126250800, - "step": 5951 - }, - { - "epoch": 0.7156856850838694, - "flos": 16141234989960.0, - "grad_norm": 3.210720676795834, - "learning_rate": 7.895446526990244e-07, - "loss": 0.9489, - "num_input_tokens_seen": 126270535, - "step": 5952 - }, - { - "epoch": 0.7158059279745085, - "flos": 14095461556800.0, - "grad_norm": 3.624724410532079, - "learning_rate": 7.889246422585609e-07, - "loss": 0.9807, - "num_input_tokens_seen": 126289640, - "step": 5953 - }, - { - "epoch": 0.7159261708651476, - "flos": 17163830421720.0, - "grad_norm": 5.150592266210656, - "learning_rate": 7.883048155372675e-07, - "loss": 0.9664, - "num_input_tokens_seen": 126307875, - "step": 5954 - }, - { - "epoch": 0.7160464137557867, - "flos": 12024080172960.0, - "grad_norm": 7.746343180631693, - "learning_rate": 7.876851726291698e-07, - "loss": 0.9331, - "num_input_tokens_seen": 126325895, - "step": 5955 - }, - { - "epoch": 0.7161666566464258, - "flos": 17949546031320.0, - "grad_norm": 6.077912550010585, - "learning_rate": 7.870657136282666e-07, - "loss": 1.0015, - "num_input_tokens_seen": 126344475, - "step": 5956 - }, - { - "epoch": 0.7162868995370649, - "flos": 18840759599640.0, - "grad_norm": 11.851212297067176, - "learning_rate": 7.86446438628531e-07, - "loss": 1.0465, - "num_input_tokens_seen": 126365265, - "step": 5957 - }, - { - "epoch": 0.716407142427704, - "flos": 50130657363120.0, - "grad_norm": 0.7802517425996779, - "learning_rate": 7.858273477239059e-07, - "loss": 0.8277, - "num_input_tokens_seen": 126433405, - "step": 5958 - }, - { - "epoch": 0.716527385318343, - "flos": 14724248675400.0, - "grad_norm": 3.380580668032664, - "learning_rate": 7.852084410083067e-07, - "loss": 0.9365, - "num_input_tokens_seen": 126451945, - "step": 5959 - }, - { - "epoch": 0.7166476282089821, - "flos": 18052529742120.0, - "grad_norm": 3.4429648222531912, - "learning_rate": 7.84589718575621e-07, - "loss": 0.864, - "num_input_tokens_seen": 126472110, - "step": 5960 - }, - { - "epoch": 0.7167678710996213, - "flos": 17162941236480.0, - "grad_norm": 9.869053849126223, - "learning_rate": 7.83971180519708e-07, - "loss": 0.9135, - "num_input_tokens_seen": 126490685, - "step": 5961 - }, - { - "epoch": 0.7168881139902603, - "flos": 21541725302640.0, - "grad_norm": 10.602450868384343, - "learning_rate": 7.833528269344008e-07, - "loss": 0.9741, - "num_input_tokens_seen": 126510310, - "step": 5962 - }, - { - "epoch": 0.7170083568808994, - "flos": 10315226763000.0, - "grad_norm": 13.238360780964998, - "learning_rate": 7.827346579135023e-07, - "loss": 0.9989, - "num_input_tokens_seen": 126527370, - "step": 5963 - }, - { - "epoch": 0.7171285997715385, - "flos": 16586105896680.0, - "grad_norm": 46.88452507391165, - "learning_rate": 7.821166735507885e-07, - "loss": 1.0561, - "num_input_tokens_seen": 126546120, - "step": 5964 - }, - { - "epoch": 0.7172488426621776, - "flos": 11707862250840.0, - "grad_norm": 5.263066878786651, - "learning_rate": 7.81498873940007e-07, - "loss": 0.9149, - "num_input_tokens_seen": 126563055, - "step": 5965 - }, - { - "epoch": 0.7173690855528166, - "flos": 18893416594320.0, - "grad_norm": 5.010648260758481, - "learning_rate": 7.808812591748768e-07, - "loss": 0.9924, - "num_input_tokens_seen": 126583155, - "step": 5966 - }, - { - "epoch": 0.7174893284434558, - "flos": 16193033460960.0, - "grad_norm": 4.202262931822442, - "learning_rate": 7.802638293490915e-07, - "loss": 0.8684, - "num_input_tokens_seen": 126602520, - "step": 5967 - }, - { - "epoch": 0.7176095713340949, - "flos": 16558995529560.0, - "grad_norm": 14.27464973300571, - "learning_rate": 7.796465845563123e-07, - "loss": 1.0048, - "num_input_tokens_seen": 126621115, - "step": 5968 - }, - { - "epoch": 0.7177298142247339, - "flos": 18210500726160.0, - "grad_norm": 4.532468855467322, - "learning_rate": 7.790295248901766e-07, - "loss": 1.0253, - "num_input_tokens_seen": 126641965, - "step": 5969 - }, - { - "epoch": 0.7178500571153731, - "flos": 22567141597920.0, - "grad_norm": 7.924772857395258, - "learning_rate": 7.784126504442902e-07, - "loss": 0.8463, - "num_input_tokens_seen": 126664915, - "step": 5970 - }, - { - "epoch": 0.7179703000060121, - "flos": 13780500758640.0, - "grad_norm": 2.96258531278589, - "learning_rate": 7.777959613122351e-07, - "loss": 0.9014, - "num_input_tokens_seen": 126684460, - "step": 5971 - }, - { - "epoch": 0.7180905428966512, - "flos": 20544553852200.0, - "grad_norm": 7.6098432142270305, - "learning_rate": 7.771794575875604e-07, - "loss": 1.0031, - "num_input_tokens_seen": 126706050, - "step": 5972 - }, - { - "epoch": 0.7182107857872904, - "flos": 14226322173720.0, - "grad_norm": 5.451230307484209, - "learning_rate": 7.765631393637888e-07, - "loss": 0.9912, - "num_input_tokens_seen": 126723965, - "step": 5973 - }, - { - "epoch": 0.7183310286779294, - "flos": 16166873602200.0, - "grad_norm": 9.228100395751149, - "learning_rate": 7.75947006734417e-07, - "loss": 0.6918, - "num_input_tokens_seen": 126741465, - "step": 5974 - }, - { - "epoch": 0.7184512715685685, - "flos": 12148900462560.0, - "grad_norm": 8.287518816850339, - "learning_rate": 7.753310597929101e-07, - "loss": 1.0592, - "num_input_tokens_seen": 126757825, - "step": 5975 - }, - { - "epoch": 0.7185715144592076, - "flos": 46904072221680.0, - "grad_norm": 0.8010656456568218, - "learning_rate": 7.747152986327095e-07, - "loss": 0.8059, - "num_input_tokens_seen": 126818090, - "step": 5976 - }, - { - "epoch": 0.7186917573498467, - "flos": 11447214171600.0, - "grad_norm": 4.9885263405186935, - "learning_rate": 7.740997233472228e-07, - "loss": 0.904, - "num_input_tokens_seen": 126835430, - "step": 5977 - }, - { - "epoch": 0.7188120002404857, - "flos": 20834427946200.0, - "grad_norm": 5.731360035279925, - "learning_rate": 7.734843340298329e-07, - "loss": 0.9342, - "num_input_tokens_seen": 126854975, - "step": 5978 - }, - { - "epoch": 0.7189322431311249, - "flos": 23823274741800.0, - "grad_norm": 6.578974218213355, - "learning_rate": 7.72869130773895e-07, - "loss": 0.9715, - "num_input_tokens_seen": 126875295, - "step": 5979 - }, - { - "epoch": 0.719052486021764, - "flos": 43915685349480.0, - "grad_norm": 0.8022191605600503, - "learning_rate": 7.722541136727343e-07, - "loss": 0.842, - "num_input_tokens_seen": 126931030, - "step": 5980 - }, - { - "epoch": 0.719172728912403, - "flos": 11027705923080.0, - "grad_norm": 8.553185513121704, - "learning_rate": 7.716392828196483e-07, - "loss": 1.0251, - "num_input_tokens_seen": 126948550, - "step": 5981 - }, - { - "epoch": 0.7192929718030422, - "flos": 10995383090760.0, - "grad_norm": 7.941040691137452, - "learning_rate": 7.710246383079064e-07, - "loss": 0.9921, - "num_input_tokens_seen": 126963655, - "step": 5982 - }, - { - "epoch": 0.7194132146936812, - "flos": 15529409262120.0, - "grad_norm": 11.954546994665094, - "learning_rate": 7.704101802307492e-07, - "loss": 1.145, - "num_input_tokens_seen": 126975675, - "step": 5983 - }, - { - "epoch": 0.7195334575843203, - "flos": 19465683361680.0, - "grad_norm": 10.114617947101133, - "learning_rate": 7.697959086813912e-07, - "loss": 1.1017, - "num_input_tokens_seen": 126991560, - "step": 5984 - }, - { - "epoch": 0.7196537004749595, - "flos": 13308611469480.0, - "grad_norm": 4.509059475694092, - "learning_rate": 7.691818237530145e-07, - "loss": 1.0308, - "num_input_tokens_seen": 127010140, - "step": 5985 - }, - { - "epoch": 0.7197739433655985, - "flos": 17447909480880.0, - "grad_norm": 11.489542975171828, - "learning_rate": 7.685679255387774e-07, - "loss": 1.0038, - "num_input_tokens_seen": 127028175, - "step": 5986 - }, - { - "epoch": 0.7198941862562376, - "flos": 12783911877840.0, - "grad_norm": 13.195220567508347, - "learning_rate": 7.679542141318065e-07, - "loss": 0.9896, - "num_input_tokens_seen": 127045000, - "step": 5987 - }, - { - "epoch": 0.7200144291468767, - "flos": 20676272992800.0, - "grad_norm": 7.325421776199562, - "learning_rate": 7.673406896252013e-07, - "loss": 0.9937, - "num_input_tokens_seen": 127066095, - "step": 5988 - }, - { - "epoch": 0.7201346720375158, - "flos": 18054492081960.0, - "grad_norm": 4.148798407823615, - "learning_rate": 7.667273521120347e-07, - "loss": 1.0102, - "num_input_tokens_seen": 127085375, - "step": 5989 - }, - { - "epoch": 0.7202549149281549, - "flos": 10135603169520.0, - "grad_norm": 5.307934403612825, - "learning_rate": 7.661142016853468e-07, - "loss": 1.0277, - "num_input_tokens_seen": 127102455, - "step": 5990 - }, - { - "epoch": 0.7203751578187939, - "flos": 16349226074520.0, - "grad_norm": 11.980117857389931, - "learning_rate": 7.655012384381543e-07, - "loss": 0.9744, - "num_input_tokens_seen": 127121660, - "step": 5991 - }, - { - "epoch": 0.7204954007094331, - "flos": 16845312882600.0, - "grad_norm": 4.578469533617502, - "learning_rate": 7.648884624634415e-07, - "loss": 1.0395, - "num_input_tokens_seen": 127139930, - "step": 5992 - }, - { - "epoch": 0.7206156436000721, - "flos": 11499257935080.0, - "grad_norm": 4.756389536671755, - "learning_rate": 7.642758738541683e-07, - "loss": 1.1127, - "num_input_tokens_seen": 127156230, - "step": 5993 - }, - { - "epoch": 0.7207358864907112, - "flos": 38903361622920.0, - "grad_norm": 0.7793826866229718, - "learning_rate": 7.636634727032621e-07, - "loss": 0.8488, - "num_input_tokens_seen": 127213055, - "step": 5994 - }, - { - "epoch": 0.7208561293813504, - "flos": 13570884611400.0, - "grad_norm": 8.17910172807045, - "learning_rate": 7.630512591036231e-07, - "loss": 1.0211, - "num_input_tokens_seen": 127232085, - "step": 5995 - }, - { - "epoch": 0.7209763722719894, - "flos": 12573989115000.0, - "grad_norm": 6.443793914378883, - "learning_rate": 7.624392331481255e-07, - "loss": 0.8736, - "num_input_tokens_seen": 127249460, - "step": 5996 - }, - { - "epoch": 0.7210966151626285, - "flos": 47844968613360.0, - "grad_norm": 2.1405129507769507, - "learning_rate": 7.618273949296115e-07, - "loss": 0.7707, - "num_input_tokens_seen": 127308690, - "step": 5997 - }, - { - "epoch": 0.7212168580532676, - "flos": 15012742999200.0, - "grad_norm": 33.85948315626473, - "learning_rate": 7.612157445408987e-07, - "loss": 0.9148, - "num_input_tokens_seen": 127326220, - "step": 5998 - }, - { - "epoch": 0.7213371009439067, - "flos": 15877919355000.0, - "grad_norm": 4.444083045465086, - "learning_rate": 7.606042820747716e-07, - "loss": 0.9622, - "num_input_tokens_seen": 127342345, - "step": 5999 - }, - { - "epoch": 0.7214573438345457, - "flos": 13125645765960.0, - "grad_norm": 12.262107626638482, - "learning_rate": 7.599930076239889e-07, - "loss": 1.0791, - "num_input_tokens_seen": 127359350, - "step": 6000 - }, - { - "epoch": 0.7215775867251849, - "flos": 25501338397440.0, - "grad_norm": 11.327042647646453, - "learning_rate": 7.593819212812818e-07, - "loss": 0.9392, - "num_input_tokens_seen": 127380650, - "step": 6001 - }, - { - "epoch": 0.721697829615824, - "flos": 14459307977760.0, - "grad_norm": 3.652376388975151, - "learning_rate": 7.587710231393508e-07, - "loss": 0.9513, - "num_input_tokens_seen": 127398725, - "step": 6002 - }, - { - "epoch": 0.721818072506463, - "flos": 14357274775320.0, - "grad_norm": 9.126818332048192, - "learning_rate": 7.581603132908685e-07, - "loss": 1.0549, - "num_input_tokens_seen": 127416415, - "step": 6003 - }, - { - "epoch": 0.7219383153971022, - "flos": 12889103220960.0, - "grad_norm": 6.429530275074958, - "learning_rate": 7.575497918284795e-07, - "loss": 1.0059, - "num_input_tokens_seen": 127433680, - "step": 6004 - }, - { - "epoch": 0.7220585582877412, - "flos": 12281232834360.0, - "grad_norm": 6.726878089014464, - "learning_rate": 7.569394588447984e-07, - "loss": 0.9769, - "num_input_tokens_seen": 127450415, - "step": 6005 - }, - { - "epoch": 0.7221788011783803, - "flos": 12018223815000.0, - "grad_norm": 7.015045074078813, - "learning_rate": 7.563293144324146e-07, - "loss": 1.0031, - "num_input_tokens_seen": 127465685, - "step": 6006 - }, - { - "epoch": 0.7222990440690195, - "flos": 18709990967400.0, - "grad_norm": 4.254060712659972, - "learning_rate": 7.557193586838834e-07, - "loss": 1.0282, - "num_input_tokens_seen": 127480770, - "step": 6007 - }, - { - "epoch": 0.7224192869596585, - "flos": 12468337848480.0, - "grad_norm": 5.0233344131262685, - "learning_rate": 7.551095916917371e-07, - "loss": 0.924, - "num_input_tokens_seen": 127497820, - "step": 6008 - }, - { - "epoch": 0.7225395298502976, - "flos": 9112670460600.0, - "grad_norm": 7.04528574140031, - "learning_rate": 7.545000135484758e-07, - "loss": 0.8856, - "num_input_tokens_seen": 127514975, - "step": 6009 - }, - { - "epoch": 0.7226597727409367, - "flos": 21123106239360.0, - "grad_norm": 7.716188569497188, - "learning_rate": 7.538906243465714e-07, - "loss": 0.8593, - "num_input_tokens_seen": 127534830, - "step": 6010 - }, - { - "epoch": 0.7227800156315758, - "flos": 9715880290080.0, - "grad_norm": 4.628327075119284, - "learning_rate": 7.5328142417847e-07, - "loss": 1.0091, - "num_input_tokens_seen": 127551315, - "step": 6011 - }, - { - "epoch": 0.7229002585222148, - "flos": 14408950600080.0, - "grad_norm": 4.471476116824295, - "learning_rate": 7.526724131365838e-07, - "loss": 0.9186, - "num_input_tokens_seen": 127571990, - "step": 6012 - }, - { - "epoch": 0.723020501412854, - "flos": 11735125925760.0, - "grad_norm": 5.019588247487677, - "learning_rate": 7.520635913133017e-07, - "loss": 0.9237, - "num_input_tokens_seen": 127590340, - "step": 6013 - }, - { - "epoch": 0.7231407443034931, - "flos": 20335581597720.0, - "grad_norm": 14.190579924504496, - "learning_rate": 7.514549588009798e-07, - "loss": 1.0494, - "num_input_tokens_seen": 127610935, - "step": 6014 - }, - { - "epoch": 0.7232609871941321, - "flos": 21385134088800.0, - "grad_norm": 9.97861482903564, - "learning_rate": 7.508465156919492e-07, - "loss": 0.9449, - "num_input_tokens_seen": 127634165, - "step": 6015 - }, - { - "epoch": 0.7233812300847713, - "flos": 11787384320160.0, - "grad_norm": 31.211752961812618, - "learning_rate": 7.502382620785083e-07, - "loss": 0.8312, - "num_input_tokens_seen": 127650435, - "step": 6016 - }, - { - "epoch": 0.7235014729754103, - "flos": 48160879919880.0, - "grad_norm": 1.8533764416097354, - "learning_rate": 7.496301980529289e-07, - "loss": 0.9228, - "num_input_tokens_seen": 127713365, - "step": 6017 - }, - { - "epoch": 0.7236217158660494, - "flos": 19182094887480.0, - "grad_norm": 8.141797821435798, - "learning_rate": 7.490223237074547e-07, - "loss": 0.9683, - "num_input_tokens_seen": 127732795, - "step": 6018 - }, - { - "epoch": 0.7237419587566886, - "flos": 20964399377880.0, - "grad_norm": 4.314148297643188, - "learning_rate": 7.484146391342989e-07, - "loss": 0.888, - "num_input_tokens_seen": 127752310, - "step": 6019 - }, - { - "epoch": 0.7238622016473276, - "flos": 12626431478760.0, - "grad_norm": 8.792161346065818, - "learning_rate": 7.478071444256484e-07, - "loss": 0.7814, - "num_input_tokens_seen": 127769790, - "step": 6020 - }, - { - "epoch": 0.7239824445379667, - "flos": 18316703900760.0, - "grad_norm": 4.316789345980223, - "learning_rate": 7.471998396736579e-07, - "loss": 1.0023, - "num_input_tokens_seen": 127789890, - "step": 6021 - }, - { - "epoch": 0.7241026874286057, - "flos": 16455214618200.0, - "grad_norm": 3.656464529672138, - "learning_rate": 7.465927249704549e-07, - "loss": 0.9833, - "num_input_tokens_seen": 127807495, - "step": 6022 - }, - { - "epoch": 0.7242229303192449, - "flos": 14384017203720.0, - "grad_norm": 7.389319790518796, - "learning_rate": 7.459858004081398e-07, - "loss": 0.9933, - "num_input_tokens_seen": 127825185, - "step": 6023 - }, - { - "epoch": 0.724343173209884, - "flos": 44607493070040.0, - "grad_norm": 0.6694594363643941, - "learning_rate": 7.453790660787815e-07, - "loss": 0.8305, - "num_input_tokens_seen": 127893000, - "step": 6024 - }, - { - "epoch": 0.724463416100523, - "flos": 24976914759840.0, - "grad_norm": 12.09798292261559, - "learning_rate": 7.447725220744214e-07, - "loss": 0.8646, - "num_input_tokens_seen": 127914965, - "step": 6025 - }, - { - "epoch": 0.7245836589911622, - "flos": 15537503913960.0, - "grad_norm": 5.659780843960317, - "learning_rate": 7.441661684870717e-07, - "loss": 0.9808, - "num_input_tokens_seen": 127934940, - "step": 6026 - }, - { - "epoch": 0.7247039018818012, - "flos": 16353334723560.0, - "grad_norm": 25.47882756911854, - "learning_rate": 7.435600054087152e-07, - "loss": 1.0434, - "num_input_tokens_seen": 127956825, - "step": 6027 - }, - { - "epoch": 0.7248241447724403, - "flos": 22616885744400.0, - "grad_norm": 9.618724808588683, - "learning_rate": 7.42954032931308e-07, - "loss": 0.9699, - "num_input_tokens_seen": 127977585, - "step": 6028 - }, - { - "epoch": 0.7249443876630794, - "flos": 24898281875760.0, - "grad_norm": 3.987661644221328, - "learning_rate": 7.423482511467733e-07, - "loss": 0.9738, - "num_input_tokens_seen": 127998075, - "step": 6029 - }, - { - "epoch": 0.7250646305537185, - "flos": 18762402669600.0, - "grad_norm": 8.571122999648159, - "learning_rate": 7.417426601470099e-07, - "loss": 0.8811, - "num_input_tokens_seen": 128018155, - "step": 6030 - }, - { - "epoch": 0.7251848734443576, - "flos": 21437668437240.0, - "grad_norm": 12.351728656575323, - "learning_rate": 7.411372600238841e-07, - "loss": 1.0091, - "num_input_tokens_seen": 128038490, - "step": 6031 - }, - { - "epoch": 0.7253051163349967, - "flos": 12600424927800.0, - "grad_norm": 6.008965469664891, - "learning_rate": 7.405320508692346e-07, - "loss": 0.9529, - "num_input_tokens_seen": 128056950, - "step": 6032 - }, - { - "epoch": 0.7254253592256358, - "flos": 8903667544560.0, - "grad_norm": 5.053463252535489, - "learning_rate": 7.399270327748727e-07, - "loss": 0.9679, - "num_input_tokens_seen": 128074330, - "step": 6033 - }, - { - "epoch": 0.7255456021162748, - "flos": 19366133745600.0, - "grad_norm": 8.952568103453482, - "learning_rate": 7.39322205832577e-07, - "loss": 0.9733, - "num_input_tokens_seen": 128094940, - "step": 6034 - }, - { - "epoch": 0.725665845006914, - "flos": 15117965003880.0, - "grad_norm": 3.554488023463823, - "learning_rate": 7.387175701341009e-07, - "loss": 1.033, - "num_input_tokens_seen": 128113330, - "step": 6035 - }, - { - "epoch": 0.7257860878975531, - "flos": 11341562905080.0, - "grad_norm": 6.585790666457034, - "learning_rate": 7.381131257711659e-07, - "loss": 0.9473, - "num_input_tokens_seen": 128130155, - "step": 6036 - }, - { - "epoch": 0.7259063307881921, - "flos": 8536049751720.0, - "grad_norm": 34.30207285498119, - "learning_rate": 7.375088728354677e-07, - "loss": 1.0623, - "num_input_tokens_seen": 128144905, - "step": 6037 - }, - { - "epoch": 0.7260265736788313, - "flos": 21697641962160.0, - "grad_norm": 9.19693152630331, - "learning_rate": 7.369048114186691e-07, - "loss": 0.8968, - "num_input_tokens_seen": 128165670, - "step": 6038 - }, - { - "epoch": 0.7261468165694703, - "flos": 15013080276360.0, - "grad_norm": 7.164286669236785, - "learning_rate": 7.363009416124055e-07, - "loss": 1.068, - "num_input_tokens_seen": 128184715, - "step": 6039 - }, - { - "epoch": 0.7262670594601094, - "flos": 15849183925200.0, - "grad_norm": 6.43879642209896, - "learning_rate": 7.356972635082852e-07, - "loss": 0.8557, - "num_input_tokens_seen": 128203290, - "step": 6040 - }, - { - "epoch": 0.7263873023507486, - "flos": 18025940621520.0, - "grad_norm": 12.774682780468623, - "learning_rate": 7.35093777197884e-07, - "loss": 0.9815, - "num_input_tokens_seen": 128223080, - "step": 6041 - }, - { - "epoch": 0.7265075452413876, - "flos": 16979638255800.0, - "grad_norm": 5.938006248977061, - "learning_rate": 7.344904827727525e-07, - "loss": 1.0821, - "num_input_tokens_seen": 128239980, - "step": 6042 - }, - { - "epoch": 0.7266277881320267, - "flos": 20461413718800.0, - "grad_norm": 7.215818660445237, - "learning_rate": 7.338873803244076e-07, - "loss": 0.9364, - "num_input_tokens_seen": 128254935, - "step": 6043 - }, - { - "epoch": 0.7267480310226658, - "flos": 17687426197200.0, - "grad_norm": 20.053638248019283, - "learning_rate": 7.332844699443401e-07, - "loss": 1.0276, - "num_input_tokens_seen": 128273255, - "step": 6044 - }, - { - "epoch": 0.7268682739133049, - "flos": 19365520514400.0, - "grad_norm": 9.050937616662853, - "learning_rate": 7.326817517240121e-07, - "loss": 0.9803, - "num_input_tokens_seen": 128294680, - "step": 6045 - }, - { - "epoch": 0.7269885168039439, - "flos": 23899822639800.0, - "grad_norm": 14.640225823333571, - "learning_rate": 7.320792257548545e-07, - "loss": 1.0594, - "num_input_tokens_seen": 128315575, - "step": 6046 - }, - { - "epoch": 0.7271087596945831, - "flos": 17291992821360.0, - "grad_norm": 5.4866229284002435, - "learning_rate": 7.314768921282704e-07, - "loss": 0.9883, - "num_input_tokens_seen": 128335950, - "step": 6047 - }, - { - "epoch": 0.7272290025852222, - "flos": 16926766630200.0, - "grad_norm": 8.034382843642856, - "learning_rate": 7.30874750935633e-07, - "loss": 0.9425, - "num_input_tokens_seen": 128355355, - "step": 6048 - }, - { - "epoch": 0.7273492454758612, - "flos": 11834184956880.0, - "grad_norm": 12.231990109187057, - "learning_rate": 7.30272802268286e-07, - "loss": 1.0163, - "num_input_tokens_seen": 128372070, - "step": 6049 - }, - { - "epoch": 0.7274694883665004, - "flos": 19963211263080.0, - "grad_norm": 23.17165110731349, - "learning_rate": 7.29671046217547e-07, - "loss": 0.9881, - "num_input_tokens_seen": 128390900, - "step": 6050 - }, - { - "epoch": 0.7275897312571394, - "flos": 21646763337960.0, - "grad_norm": 5.886679868816521, - "learning_rate": 7.290694828746988e-07, - "loss": 1.0409, - "num_input_tokens_seen": 128410285, - "step": 6051 - }, - { - "epoch": 0.7277099741477785, - "flos": 13620138172920.0, - "grad_norm": 6.829819248650603, - "learning_rate": 7.284681123310004e-07, - "loss": 1.0857, - "num_input_tokens_seen": 128428720, - "step": 6052 - }, - { - "epoch": 0.7278302170384175, - "flos": 14672143588800.0, - "grad_norm": 5.671781849161904, - "learning_rate": 7.27866934677678e-07, - "loss": 1.0146, - "num_input_tokens_seen": 128448110, - "step": 6053 - }, - { - "epoch": 0.7279504599290567, - "flos": 13539665595240.0, - "grad_norm": 3.7151088038410722, - "learning_rate": 7.272659500059297e-07, - "loss": 0.9975, - "num_input_tokens_seen": 128465170, - "step": 6054 - }, - { - "epoch": 0.7280707028196958, - "flos": 13518104970480.0, - "grad_norm": 6.427894017692385, - "learning_rate": 7.266651584069264e-07, - "loss": 1.0337, - "num_input_tokens_seen": 128482555, - "step": 6055 - }, - { - "epoch": 0.7281909457103348, - "flos": 26550860226960.0, - "grad_norm": 7.095309317192636, - "learning_rate": 7.260645599718045e-07, - "loss": 0.805, - "num_input_tokens_seen": 128508630, - "step": 6056 - }, - { - "epoch": 0.728311188600974, - "flos": 14671469034480.0, - "grad_norm": 4.881981960614923, - "learning_rate": 7.254641547916767e-07, - "loss": 0.9017, - "num_input_tokens_seen": 128525845, - "step": 6057 - }, - { - "epoch": 0.728431431491613, - "flos": 20545780314600.0, - "grad_norm": 5.132269936601975, - "learning_rate": 7.248639429576226e-07, - "loss": 0.9188, - "num_input_tokens_seen": 128545020, - "step": 6058 - }, - { - "epoch": 0.7285516743822521, - "flos": 18497983218480.0, - "grad_norm": 6.7171046881707674, - "learning_rate": 7.242639245606959e-07, - "loss": 0.9458, - "num_input_tokens_seen": 128564530, - "step": 6059 - }, - { - "epoch": 0.7286719172728913, - "flos": 11604786555360.0, - "grad_norm": 4.105556091482919, - "learning_rate": 7.236640996919168e-07, - "loss": 1.0488, - "num_input_tokens_seen": 128583295, - "step": 6060 - }, - { - "epoch": 0.7287921601635303, - "flos": 15642603272400.0, - "grad_norm": 7.211614607971383, - "learning_rate": 7.230644684422782e-07, - "loss": 0.9291, - "num_input_tokens_seen": 128603245, - "step": 6061 - }, - { - "epoch": 0.7289124030541694, - "flos": 17497745612040.0, - "grad_norm": 6.044536917265826, - "learning_rate": 7.224650309027451e-07, - "loss": 1.0512, - "num_input_tokens_seen": 128622715, - "step": 6062 - }, - { - "epoch": 0.7290326459448085, - "flos": 15193194454800.0, - "grad_norm": 4.816714138514251, - "learning_rate": 7.218657871642506e-07, - "loss": 0.9106, - "num_input_tokens_seen": 128641240, - "step": 6063 - }, - { - "epoch": 0.7291528888354476, - "flos": 13177076298240.0, - "grad_norm": 8.610816682546998, - "learning_rate": 7.212667373177012e-07, - "loss": 0.8417, - "num_input_tokens_seen": 128655955, - "step": 6064 - }, - { - "epoch": 0.7292731317260867, - "flos": 13438030993080.0, - "grad_norm": 4.557891817811824, - "learning_rate": 7.206678814539704e-07, - "loss": 0.9821, - "num_input_tokens_seen": 128673975, - "step": 6065 - }, - { - "epoch": 0.7293933746167258, - "flos": 14959166157720.0, - "grad_norm": 3.5439902518506456, - "learning_rate": 7.20069219663904e-07, - "loss": 0.9648, - "num_input_tokens_seen": 128693580, - "step": 6066 - }, - { - "epoch": 0.7295136175073649, - "flos": 15954988499520.0, - "grad_norm": 11.29994778241271, - "learning_rate": 7.1947075203832e-07, - "loss": 1.009, - "num_input_tokens_seen": 128713280, - "step": 6067 - }, - { - "epoch": 0.7296338603980039, - "flos": 40158912197160.0, - "grad_norm": 0.8612257362458029, - "learning_rate": 7.188724786680049e-07, - "loss": 0.8414, - "num_input_tokens_seen": 128773470, - "step": 6068 - }, - { - "epoch": 0.7297541032886431, - "flos": 17949576692880.0, - "grad_norm": 5.869486067725074, - "learning_rate": 7.182743996437162e-07, - "loss": 0.974, - "num_input_tokens_seen": 128792725, - "step": 6069 - }, - { - "epoch": 0.7298743461792822, - "flos": 18840085045320.0, - "grad_norm": 4.453391833425561, - "learning_rate": 7.176765150561819e-07, - "loss": 0.9076, - "num_input_tokens_seen": 128811050, - "step": 6070 - }, - { - "epoch": 0.7299945890699212, - "flos": 13881859406760.0, - "grad_norm": 10.595184859156872, - "learning_rate": 7.170788249961002e-07, - "loss": 1.0058, - "num_input_tokens_seen": 128829280, - "step": 6071 - }, - { - "epoch": 0.7301148319605604, - "flos": 16296998341680.0, - "grad_norm": 3.075612238280175, - "learning_rate": 7.164813295541418e-07, - "loss": 1.102, - "num_input_tokens_seen": 128848565, - "step": 6072 - }, - { - "epoch": 0.7302350748511994, - "flos": 18050598063840.0, - "grad_norm": 5.598693708923349, - "learning_rate": 7.15884028820944e-07, - "loss": 0.9214, - "num_input_tokens_seen": 128867340, - "step": 6073 - }, - { - "epoch": 0.7303553177418385, - "flos": 19811648545080.0, - "grad_norm": 4.282927247228665, - "learning_rate": 7.152869228871185e-07, - "loss": 0.8121, - "num_input_tokens_seen": 128889545, - "step": 6074 - }, - { - "epoch": 0.7304755606324776, - "flos": 17373477230520.0, - "grad_norm": 4.7461861809415105, - "learning_rate": 7.146900118432457e-07, - "loss": 0.9455, - "num_input_tokens_seen": 128909010, - "step": 6075 - }, - { - "epoch": 0.7305958035231167, - "flos": 16952006642160.0, - "grad_norm": 3.130407766038994, - "learning_rate": 7.140932957798753e-07, - "loss": 1.0887, - "num_input_tokens_seen": 128927170, - "step": 6076 - }, - { - "epoch": 0.7307160464137558, - "flos": 11839796022360.0, - "grad_norm": 6.388406158491303, - "learning_rate": 7.134967747875309e-07, - "loss": 0.9319, - "num_input_tokens_seen": 128945100, - "step": 6077 - }, - { - "epoch": 0.7308362893043949, - "flos": 15484448319000.0, - "grad_norm": 4.463860671381429, - "learning_rate": 7.129004489567014e-07, - "loss": 1.0462, - "num_input_tokens_seen": 128962300, - "step": 6078 - }, - { - "epoch": 0.730956532195034, - "flos": 7513730274000.0, - "grad_norm": 8.400681931199376, - "learning_rate": 7.123043183778512e-07, - "loss": 1.0114, - "num_input_tokens_seen": 128979350, - "step": 6079 - }, - { - "epoch": 0.731076775085673, - "flos": 14042620592760.0, - "grad_norm": 6.650041577202027, - "learning_rate": 7.117083831414114e-07, - "loss": 0.8755, - "num_input_tokens_seen": 128998345, - "step": 6080 - }, - { - "epoch": 0.7311970179763122, - "flos": 14513160773280.0, - "grad_norm": 9.077447600049291, - "learning_rate": 7.11112643337787e-07, - "loss": 0.9279, - "num_input_tokens_seen": 129017110, - "step": 6081 - }, - { - "epoch": 0.7313172608669513, - "flos": 13123867395480.0, - "grad_norm": 8.163698033132526, - "learning_rate": 7.10517099057349e-07, - "loss": 0.9953, - "num_input_tokens_seen": 129033780, - "step": 6082 - }, - { - "epoch": 0.7314375037575903, - "flos": 11447398140960.0, - "grad_norm": 6.65500052459967, - "learning_rate": 7.099217503904411e-07, - "loss": 0.8144, - "num_input_tokens_seen": 129051355, - "step": 6083 - }, - { - "epoch": 0.7315577466482295, - "flos": 12731745468120.0, - "grad_norm": 15.342225910017863, - "learning_rate": 7.093265974273788e-07, - "loss": 1.131, - "num_input_tokens_seen": 129068970, - "step": 6084 - }, - { - "epoch": 0.7316779895388685, - "flos": 13046123696640.0, - "grad_norm": 4.355112706159234, - "learning_rate": 7.087316402584447e-07, - "loss": 0.9495, - "num_input_tokens_seen": 129087515, - "step": 6085 - }, - { - "epoch": 0.7317982324295076, - "flos": 12702826068960.0, - "grad_norm": 10.564097266781092, - "learning_rate": 7.081368789738953e-07, - "loss": 1.0826, - "num_input_tokens_seen": 129104435, - "step": 6086 - }, - { - "epoch": 0.7319184753201466, - "flos": 19387357093200.0, - "grad_norm": 3.8513999424921854, - "learning_rate": 7.075423136639537e-07, - "loss": 1.0091, - "num_input_tokens_seen": 129123410, - "step": 6087 - }, - { - "epoch": 0.7320387182107858, - "flos": 26731710282840.0, - "grad_norm": 2.7086038796692344, - "learning_rate": 7.069479444188149e-07, - "loss": 0.9595, - "num_input_tokens_seen": 129143720, - "step": 6088 - }, - { - "epoch": 0.7321589611014249, - "flos": 12653541845880.0, - "grad_norm": 5.436169867224293, - "learning_rate": 7.063537713286453e-07, - "loss": 1.0452, - "num_input_tokens_seen": 129161120, - "step": 6089 - }, - { - "epoch": 0.7322792039920639, - "flos": 18576033532920.0, - "grad_norm": 5.0808687749778505, - "learning_rate": 7.057597944835803e-07, - "loss": 1.031, - "num_input_tokens_seen": 129180115, - "step": 6090 - }, - { - "epoch": 0.7323994468827031, - "flos": 18050076817320.0, - "grad_norm": 4.638490744818891, - "learning_rate": 7.051660139737253e-07, - "loss": 0.973, - "num_input_tokens_seen": 129198055, - "step": 6091 - }, - { - "epoch": 0.7325196897733421, - "flos": 19156149659640.0, - "grad_norm": 15.733572976394003, - "learning_rate": 7.045724298891565e-07, - "loss": 0.9877, - "num_input_tokens_seen": 129217245, - "step": 6092 - }, - { - "epoch": 0.7326399326639812, - "flos": 18343078390440.0, - "grad_norm": 6.629585838212706, - "learning_rate": 7.039790423199192e-07, - "loss": 0.9176, - "num_input_tokens_seen": 129236605, - "step": 6093 - }, - { - "epoch": 0.7327601755546204, - "flos": 14933956807320.0, - "grad_norm": 25.871174847468392, - "learning_rate": 7.033858513560322e-07, - "loss": 1.005, - "num_input_tokens_seen": 129252620, - "step": 6094 - }, - { - "epoch": 0.7328804184452594, - "flos": 11525233824480.0, - "grad_norm": 6.303360167065813, - "learning_rate": 7.027928570874794e-07, - "loss": 0.9804, - "num_input_tokens_seen": 129270530, - "step": 6095 - }, - { - "epoch": 0.7330006613358985, - "flos": 12652867291560.0, - "grad_norm": 3.850731688490275, - "learning_rate": 7.022000596042194e-07, - "loss": 1.0736, - "num_input_tokens_seen": 129287350, - "step": 6096 - }, - { - "epoch": 0.7331209042265376, - "flos": 15983141359680.0, - "grad_norm": 7.384515604237594, - "learning_rate": 7.016074589961784e-07, - "loss": 1.0414, - "num_input_tokens_seen": 129305635, - "step": 6097 - }, - { - "epoch": 0.7332411471171767, - "flos": 23587253443320.0, - "grad_norm": 5.518477699068132, - "learning_rate": 7.01015055353253e-07, - "loss": 0.8949, - "num_input_tokens_seen": 129327780, - "step": 6098 - }, - { - "epoch": 0.7333613900078157, - "flos": 16162887599400.0, - "grad_norm": 9.36665440713571, - "learning_rate": 7.004228487653123e-07, - "loss": 0.9983, - "num_input_tokens_seen": 129348305, - "step": 6099 - }, - { - "epoch": 0.7334816328984549, - "flos": 15878256632160.0, - "grad_norm": 7.008216123322765, - "learning_rate": 6.998308393221906e-07, - "loss": 1.0077, - "num_input_tokens_seen": 129366430, - "step": 6100 - }, - { - "epoch": 0.733601875789094, - "flos": 14720569288200.0, - "grad_norm": 5.048487355521198, - "learning_rate": 6.992390271136977e-07, - "loss": 0.941, - "num_input_tokens_seen": 129381860, - "step": 6101 - }, - { - "epoch": 0.733722118679733, - "flos": 16035001153800.0, - "grad_norm": 6.874397667595055, - "learning_rate": 6.986474122296094e-07, - "loss": 1.0817, - "num_input_tokens_seen": 129400695, - "step": 6102 - }, - { - "epoch": 0.7338423615703722, - "flos": 14252666001840.0, - "grad_norm": 5.860772294330017, - "learning_rate": 6.980559947596751e-07, - "loss": 0.9507, - "num_input_tokens_seen": 129418955, - "step": 6103 - }, - { - "epoch": 0.7339626044610112, - "flos": 15404803603440.0, - "grad_norm": 4.895913673986195, - "learning_rate": 6.974647747936109e-07, - "loss": 0.9861, - "num_input_tokens_seen": 129437060, - "step": 6104 - }, - { - "epoch": 0.7340828473516503, - "flos": 10791561978360.0, - "grad_norm": 4.9743632153465995, - "learning_rate": 6.968737524211039e-07, - "loss": 1.047, - "num_input_tokens_seen": 129453590, - "step": 6105 - }, - { - "epoch": 0.7342030902422895, - "flos": 16297979511600.0, - "grad_norm": 6.512327681767618, - "learning_rate": 6.962829277318132e-07, - "loss": 1.0101, - "num_input_tokens_seen": 129472905, - "step": 6106 - }, - { - "epoch": 0.7343233331329285, - "flos": 18394784876760.0, - "grad_norm": 4.789685356621005, - "learning_rate": 6.956923008153652e-07, - "loss": 1.0489, - "num_input_tokens_seen": 129492390, - "step": 6107 - }, - { - "epoch": 0.7344435760235676, - "flos": 13099056645360.0, - "grad_norm": 131.0684831133137, - "learning_rate": 6.951018717613593e-07, - "loss": 1.0666, - "num_input_tokens_seen": 129511125, - "step": 6108 - }, - { - "epoch": 0.7345638189142067, - "flos": 12496153431480.0, - "grad_norm": 6.164001232466429, - "learning_rate": 6.945116406593614e-07, - "loss": 0.9892, - "num_input_tokens_seen": 129529700, - "step": 6109 - }, - { - "epoch": 0.7346840618048458, - "flos": 14378314153560.0, - "grad_norm": 7.309295212863486, - "learning_rate": 6.939216075989089e-07, - "loss": 0.9532, - "num_input_tokens_seen": 129547350, - "step": 6110 - }, - { - "epoch": 0.7348043046954849, - "flos": 20676947547120.0, - "grad_norm": 7.717664881737652, - "learning_rate": 6.933317726695109e-07, - "loss": 0.8923, - "num_input_tokens_seen": 129568300, - "step": 6111 - }, - { - "epoch": 0.734924547586124, - "flos": 12705309655320.0, - "grad_norm": 5.112356639987933, - "learning_rate": 6.92742135960644e-07, - "loss": 1.0068, - "num_input_tokens_seen": 129585720, - "step": 6112 - }, - { - "epoch": 0.7350447904767631, - "flos": 45524958481800.0, - "grad_norm": 0.8600528471256508, - "learning_rate": 6.921526975617556e-07, - "loss": 0.8254, - "num_input_tokens_seen": 129644900, - "step": 6113 - }, - { - "epoch": 0.7351650333674021, - "flos": 15327673135800.0, - "grad_norm": 5.11030705434588, - "learning_rate": 6.915634575622631e-07, - "loss": 0.9714, - "num_input_tokens_seen": 129663135, - "step": 6114 - }, - { - "epoch": 0.7352852762580413, - "flos": 12888643297560.0, - "grad_norm": 5.539868208767047, - "learning_rate": 6.909744160515532e-07, - "loss": 0.9393, - "num_input_tokens_seen": 129680995, - "step": 6115 - }, - { - "epoch": 0.7354055191486804, - "flos": 27782366590080.0, - "grad_norm": 7.207625102566075, - "learning_rate": 6.903855731189849e-07, - "loss": 0.8983, - "num_input_tokens_seen": 129703350, - "step": 6116 - }, - { - "epoch": 0.7355257620393194, - "flos": 11525877717240.0, - "grad_norm": 13.9670928040335, - "learning_rate": 6.897969288538825e-07, - "loss": 1.0304, - "num_input_tokens_seen": 129721015, - "step": 6117 - }, - { - "epoch": 0.7356460049299585, - "flos": 12836936811240.0, - "grad_norm": 9.722945408642055, - "learning_rate": 6.892084833455452e-07, - "loss": 1.0354, - "num_input_tokens_seen": 129740305, - "step": 6118 - }, - { - "epoch": 0.7357662478205976, - "flos": 15144615447600.0, - "grad_norm": 4.308882370772756, - "learning_rate": 6.886202366832384e-07, - "loss": 1.0658, - "num_input_tokens_seen": 129761710, - "step": 6119 - }, - { - "epoch": 0.7358864907112367, - "flos": 10057522193520.0, - "grad_norm": 4.892628609707098, - "learning_rate": 6.880321889561987e-07, - "loss": 0.9592, - "num_input_tokens_seen": 129779405, - "step": 6120 - }, - { - "epoch": 0.7360067336018757, - "flos": 15852066111840.0, - "grad_norm": 3.5964171753744885, - "learning_rate": 6.874443402536338e-07, - "loss": 0.8723, - "num_input_tokens_seen": 129798215, - "step": 6121 - }, - { - "epoch": 0.7361269764925149, - "flos": 18183819620880.0, - "grad_norm": 5.201491824145768, - "learning_rate": 6.868566906647177e-07, - "loss": 1.0382, - "num_input_tokens_seen": 129818885, - "step": 6122 - }, - { - "epoch": 0.736247219383154, - "flos": 14462803395600.0, - "grad_norm": 7.788194858662316, - "learning_rate": 6.862692402785984e-07, - "loss": 1.052, - "num_input_tokens_seen": 129838855, - "step": 6123 - }, - { - "epoch": 0.736367462273793, - "flos": 50377103592000.0, - "grad_norm": 0.6950462131435777, - "learning_rate": 6.856819891843899e-07, - "loss": 0.7425, - "num_input_tokens_seen": 129903280, - "step": 6124 - }, - { - "epoch": 0.7364877051644322, - "flos": 15925241238240.0, - "grad_norm": 7.240548649016145, - "learning_rate": 6.8509493747118e-07, - "loss": 0.9421, - "num_input_tokens_seen": 129921810, - "step": 6125 - }, - { - "epoch": 0.7366079480550712, - "flos": 8536325705760.0, - "grad_norm": 42.388230500469554, - "learning_rate": 6.845080852280221e-07, - "loss": 1.0949, - "num_input_tokens_seen": 129938600, - "step": 6126 - }, - { - "epoch": 0.7367281909457103, - "flos": 10634234887080.0, - "grad_norm": 5.563406709644635, - "learning_rate": 6.839214325439409e-07, - "loss": 0.975, - "num_input_tokens_seen": 129956015, - "step": 6127 - }, - { - "epoch": 0.7368484338363495, - "flos": 16714850865960.0, - "grad_norm": 8.948071183516223, - "learning_rate": 6.833349795079327e-07, - "loss": 0.9485, - "num_input_tokens_seen": 129974845, - "step": 6128 - }, - { - "epoch": 0.7369686767269885, - "flos": 19523215544400.0, - "grad_norm": 8.049606067188261, - "learning_rate": 6.827487262089613e-07, - "loss": 0.9081, - "num_input_tokens_seen": 129995070, - "step": 6129 - }, - { - "epoch": 0.7370889196176276, - "flos": 50342603788920.0, - "grad_norm": 0.8999304598857388, - "learning_rate": 6.821626727359606e-07, - "loss": 0.8262, - "num_input_tokens_seen": 130060350, - "step": 6130 - }, - { - "epoch": 0.7372091625082667, - "flos": 12784126508760.0, - "grad_norm": 8.01360880161932, - "learning_rate": 6.815768191778348e-07, - "loss": 0.9883, - "num_input_tokens_seen": 130078150, - "step": 6131 - }, - { - "epoch": 0.7373294053989058, - "flos": 24056536499880.0, - "grad_norm": 5.011272312418173, - "learning_rate": 6.809911656234569e-07, - "loss": 0.9614, - "num_input_tokens_seen": 130099845, - "step": 6132 - }, - { - "epoch": 0.7374496482895448, - "flos": 15274617540840.0, - "grad_norm": 5.615887529947821, - "learning_rate": 6.804057121616707e-07, - "loss": 1.0112, - "num_input_tokens_seen": 130117770, - "step": 6133 - }, - { - "epoch": 0.737569891180184, - "flos": 17740236499680.0, - "grad_norm": 5.718191317591611, - "learning_rate": 6.798204588812888e-07, - "loss": 0.9445, - "num_input_tokens_seen": 130136905, - "step": 6134 - }, - { - "epoch": 0.7376901340708231, - "flos": 14669292063720.0, - "grad_norm": 8.290351443876688, - "learning_rate": 6.792354058710937e-07, - "loss": 0.981, - "num_input_tokens_seen": 130154095, - "step": 6135 - }, - { - "epoch": 0.7378103769614621, - "flos": 16927318538280.0, - "grad_norm": 9.28271020776096, - "learning_rate": 6.786505532198374e-07, - "loss": 0.8796, - "num_input_tokens_seen": 130172760, - "step": 6136 - }, - { - "epoch": 0.7379306198521013, - "flos": 15800053009920.0, - "grad_norm": 6.793160241155196, - "learning_rate": 6.780659010162411e-07, - "loss": 1.0811, - "num_input_tokens_seen": 130191430, - "step": 6137 - }, - { - "epoch": 0.7380508627427403, - "flos": 10529104867080.0, - "grad_norm": 7.699471928707646, - "learning_rate": 6.774814493489975e-07, - "loss": 1.0649, - "num_input_tokens_seen": 130208825, - "step": 6138 - }, - { - "epoch": 0.7381711056333794, - "flos": 15403239863880.0, - "grad_norm": 6.805194542407276, - "learning_rate": 6.768971983067655e-07, - "loss": 0.8978, - "num_input_tokens_seen": 130228875, - "step": 6139 - }, - { - "epoch": 0.7382913485240186, - "flos": 37484566276320.0, - "grad_norm": 1.0475022443870226, - "learning_rate": 6.763131479781772e-07, - "loss": 0.9322, - "num_input_tokens_seen": 130278355, - "step": 6140 - }, - { - "epoch": 0.7384115914146576, - "flos": 15484908242400.0, - "grad_norm": 6.105386900642986, - "learning_rate": 6.757292984518316e-07, - "loss": 1.0018, - "num_input_tokens_seen": 130297475, - "step": 6141 - }, - { - "epoch": 0.7385318343052967, - "flos": 44018117152200.0, - "grad_norm": 0.8617197820081436, - "learning_rate": 6.751456498162981e-07, - "loss": 0.8422, - "num_input_tokens_seen": 130356230, - "step": 6142 - }, - { - "epoch": 0.7386520771959358, - "flos": 12045242197440.0, - "grad_norm": 5.988831934582388, - "learning_rate": 6.745622021601174e-07, - "loss": 1.0812, - "num_input_tokens_seen": 130372975, - "step": 6143 - }, - { - "epoch": 0.7387723200865749, - "flos": 13308734115720.0, - "grad_norm": 4.895943671532022, - "learning_rate": 6.739789555717954e-07, - "loss": 0.9234, - "num_input_tokens_seen": 130389670, - "step": 6144 - }, - { - "epoch": 0.738892562977214, - "flos": 16006786970520.0, - "grad_norm": 6.200058955250666, - "learning_rate": 6.733959101398124e-07, - "loss": 1.0139, - "num_input_tokens_seen": 130407520, - "step": 6145 - }, - { - "epoch": 0.7390128058678531, - "flos": 15270478230240.0, - "grad_norm": 7.353080892694025, - "learning_rate": 6.728130659526143e-07, - "loss": 1.0455, - "num_input_tokens_seen": 130425050, - "step": 6146 - }, - { - "epoch": 0.7391330487584922, - "flos": 18343415667600.0, - "grad_norm": 5.731933739331766, - "learning_rate": 6.7223042309862e-07, - "loss": 0.9205, - "num_input_tokens_seen": 130444970, - "step": 6147 - }, - { - "epoch": 0.7392532916491312, - "flos": 20205211565760.0, - "grad_norm": 7.332473213154676, - "learning_rate": 6.716479816662144e-07, - "loss": 0.961, - "num_input_tokens_seen": 130466420, - "step": 6148 - }, - { - "epoch": 0.7393735345397703, - "flos": 16769132923320.0, - "grad_norm": 6.415170106959028, - "learning_rate": 6.710657417437531e-07, - "loss": 0.9548, - "num_input_tokens_seen": 130485845, - "step": 6149 - }, - { - "epoch": 0.7394937774304094, - "flos": 14174033117760.0, - "grad_norm": 6.1353236797477475, - "learning_rate": 6.704837034195628e-07, - "loss": 1.0031, - "num_input_tokens_seen": 130504030, - "step": 6150 - }, - { - "epoch": 0.7396140203210485, - "flos": 16691849147880.0, - "grad_norm": 5.169374535687075, - "learning_rate": 6.699018667819376e-07, - "loss": 1.0731, - "num_input_tokens_seen": 130523150, - "step": 6151 - }, - { - "epoch": 0.7397342632116876, - "flos": 18184524836760.0, - "grad_norm": 3.7158005502102602, - "learning_rate": 6.693202319191415e-07, - "loss": 0.9501, - "num_input_tokens_seen": 130544605, - "step": 6152 - }, - { - "epoch": 0.7398545061023267, - "flos": 17609590513680.0, - "grad_norm": 3.3424912694961098, - "learning_rate": 6.687387989194084e-07, - "loss": 0.962, - "num_input_tokens_seen": 130563840, - "step": 6153 - }, - { - "epoch": 0.7399747489929658, - "flos": 11682959516040.0, - "grad_norm": 5.966008220691241, - "learning_rate": 6.681575678709404e-07, - "loss": 1.0188, - "num_input_tokens_seen": 130582250, - "step": 6154 - }, - { - "epoch": 0.7400949918836048, - "flos": 17136566746800.0, - "grad_norm": 8.921092518499783, - "learning_rate": 6.67576538861911e-07, - "loss": 0.9305, - "num_input_tokens_seen": 130600545, - "step": 6155 - }, - { - "epoch": 0.740215234774244, - "flos": 15488066383080.0, - "grad_norm": 3.230670521349568, - "learning_rate": 6.669957119804612e-07, - "loss": 1.0437, - "num_input_tokens_seen": 130621900, - "step": 6156 - }, - { - "epoch": 0.7403354776648831, - "flos": 13281593087040.0, - "grad_norm": 28.4956297619011, - "learning_rate": 6.66415087314702e-07, - "loss": 0.926, - "num_input_tokens_seen": 130636575, - "step": 6157 - }, - { - "epoch": 0.7404557205555221, - "flos": 11971085901120.0, - "grad_norm": 15.29871479695195, - "learning_rate": 6.65834664952714e-07, - "loss": 0.9469, - "num_input_tokens_seen": 130653745, - "step": 6158 - }, - { - "epoch": 0.7405759634461613, - "flos": 15064817424240.0, - "grad_norm": 3.9256486101659744, - "learning_rate": 6.652544449825457e-07, - "loss": 0.983, - "num_input_tokens_seen": 130673720, - "step": 6159 - }, - { - "epoch": 0.7406962063368003, - "flos": 14536469106960.0, - "grad_norm": 6.8115698503123046, - "learning_rate": 6.646744274922182e-07, - "loss": 0.987, - "num_input_tokens_seen": 130691885, - "step": 6160 - }, - { - "epoch": 0.7408164492274394, - "flos": 14042559269640.0, - "grad_norm": 8.36618671433767, - "learning_rate": 6.640946125697171e-07, - "loss": 0.975, - "num_input_tokens_seen": 130709135, - "step": 6161 - }, - { - "epoch": 0.7409366921180786, - "flos": 20807409563760.0, - "grad_norm": 7.99913759351533, - "learning_rate": 6.635150003030017e-07, - "loss": 0.9807, - "num_input_tokens_seen": 130727380, - "step": 6162 - }, - { - "epoch": 0.7410569350087176, - "flos": 16297887526920.0, - "grad_norm": 14.848224037913697, - "learning_rate": 6.629355907799981e-07, - "loss": 1.0898, - "num_input_tokens_seen": 130746905, - "step": 6163 - }, - { - "epoch": 0.7411771778993567, - "flos": 21694698452400.0, - "grad_norm": 3.565095591714753, - "learning_rate": 6.623563840886015e-07, - "loss": 0.9182, - "num_input_tokens_seen": 130767550, - "step": 6164 - }, - { - "epoch": 0.7412974207899958, - "flos": 14567350845960.0, - "grad_norm": 3.9832234233126886, - "learning_rate": 6.617773803166795e-07, - "loss": 0.9265, - "num_input_tokens_seen": 130785595, - "step": 6165 - }, - { - "epoch": 0.7414176636806349, - "flos": 15694677697440.0, - "grad_norm": 9.495267391929428, - "learning_rate": 6.611985795520634e-07, - "loss": 1.0485, - "num_input_tokens_seen": 130803860, - "step": 6166 - }, - { - "epoch": 0.7415379065712739, - "flos": 17897073006000.0, - "grad_norm": 5.793903422047495, - "learning_rate": 6.606199818825588e-07, - "loss": 0.9976, - "num_input_tokens_seen": 130824035, - "step": 6167 - }, - { - "epoch": 0.7416581494619131, - "flos": 11943699579960.0, - "grad_norm": 7.945693225422155, - "learning_rate": 6.600415873959377e-07, - "loss": 1.0427, - "num_input_tokens_seen": 130841630, - "step": 6168 - }, - { - "epoch": 0.7417783923525522, - "flos": 20256948713640.0, - "grad_norm": 12.949413177273408, - "learning_rate": 6.594633961799437e-07, - "loss": 0.8701, - "num_input_tokens_seen": 130860390, - "step": 6169 - }, - { - "epoch": 0.7418986352431912, - "flos": 14252757986520.0, - "grad_norm": 5.103537301427369, - "learning_rate": 6.588854083222857e-07, - "loss": 1.0461, - "num_input_tokens_seen": 130879545, - "step": 6170 - }, - { - "epoch": 0.7420188781338304, - "flos": 12941300292240.0, - "grad_norm": 13.12298510389726, - "learning_rate": 6.583076239106444e-07, - "loss": 1.0218, - "num_input_tokens_seen": 130897770, - "step": 6171 - }, - { - "epoch": 0.7421391210244694, - "flos": 9716186905680.0, - "grad_norm": 6.318377789289241, - "learning_rate": 6.577300430326707e-07, - "loss": 0.9813, - "num_input_tokens_seen": 130912435, - "step": 6172 - }, - { - "epoch": 0.7422593639151085, - "flos": 11289948403440.0, - "grad_norm": 5.41038857522379, - "learning_rate": 6.571526657759821e-07, - "loss": 0.9496, - "num_input_tokens_seen": 130927895, - "step": 6173 - }, - { - "epoch": 0.7423796068057477, - "flos": 21460731478440.0, - "grad_norm": 30.63859910117975, - "learning_rate": 6.565754922281663e-07, - "loss": 0.933, - "num_input_tokens_seen": 130949860, - "step": 6174 - }, - { - "epoch": 0.7424998496963867, - "flos": 14567228199720.0, - "grad_norm": 7.459525185372788, - "learning_rate": 6.559985224767801e-07, - "loss": 1.0156, - "num_input_tokens_seen": 130967455, - "step": 6175 - }, - { - "epoch": 0.7426200925870258, - "flos": 15537166636800.0, - "grad_norm": 5.67486274990194, - "learning_rate": 6.55421756609349e-07, - "loss": 0.9833, - "num_input_tokens_seen": 130985430, - "step": 6176 - }, - { - "epoch": 0.7427403354776649, - "flos": 18814661064000.0, - "grad_norm": 8.387239536840465, - "learning_rate": 6.54845194713369e-07, - "loss": 1.0173, - "num_input_tokens_seen": 131006100, - "step": 6177 - }, - { - "epoch": 0.742860578368304, - "flos": 14118677905800.0, - "grad_norm": 9.08782073534538, - "learning_rate": 6.542688368763034e-07, - "loss": 1.0282, - "num_input_tokens_seen": 131024225, - "step": 6178 - }, - { - "epoch": 0.742980821258943, - "flos": 17661358323120.0, - "grad_norm": 6.566903226564295, - "learning_rate": 6.536926831855854e-07, - "loss": 1.0035, - "num_input_tokens_seen": 131043110, - "step": 6179 - }, - { - "epoch": 0.7431010641495821, - "flos": 17949883308480.0, - "grad_norm": 5.648484268100384, - "learning_rate": 6.531167337286165e-07, - "loss": 0.9511, - "num_input_tokens_seen": 131062850, - "step": 6180 - }, - { - "epoch": 0.7432213070402213, - "flos": 15458717722080.0, - "grad_norm": 12.158116371427438, - "learning_rate": 6.52540988592768e-07, - "loss": 1.0264, - "num_input_tokens_seen": 131083590, - "step": 6181 - }, - { - "epoch": 0.7433415499308603, - "flos": 10450594629240.0, - "grad_norm": 3.6037240317021335, - "learning_rate": 6.519654478653814e-07, - "loss": 1.0576, - "num_input_tokens_seen": 131101675, - "step": 6182 - }, - { - "epoch": 0.7434617928214994, - "flos": 48087551485680.0, - "grad_norm": 0.7646321067186693, - "learning_rate": 6.51390111633763e-07, - "loss": 0.805, - "num_input_tokens_seen": 131166670, - "step": 6183 - }, - { - "epoch": 0.7435820357121385, - "flos": 19493774898720.0, - "grad_norm": 5.560624738763763, - "learning_rate": 6.508149799851932e-07, - "loss": 0.9807, - "num_input_tokens_seen": 131188055, - "step": 6184 - }, - { - "epoch": 0.7437022786027776, - "flos": 17057933862720.0, - "grad_norm": 21.555735182041026, - "learning_rate": 6.502400530069183e-07, - "loss": 0.8471, - "num_input_tokens_seen": 131207660, - "step": 6185 - }, - { - "epoch": 0.7438225214934167, - "flos": 15533057987760.0, - "grad_norm": 4.756827216393925, - "learning_rate": 6.496653307861535e-07, - "loss": 0.908, - "num_input_tokens_seen": 131228050, - "step": 6186 - }, - { - "epoch": 0.7439427643840558, - "flos": 14357520067800.0, - "grad_norm": 4.870767565637555, - "learning_rate": 6.490908134100857e-07, - "loss": 0.8816, - "num_input_tokens_seen": 131246235, - "step": 6187 - }, - { - "epoch": 0.7440630072746949, - "flos": 14802912221040.0, - "grad_norm": 12.37888837664377, - "learning_rate": 6.48516500965866e-07, - "loss": 0.9153, - "num_input_tokens_seen": 131265890, - "step": 6188 - }, - { - "epoch": 0.7441832501653339, - "flos": 18866030273160.0, - "grad_norm": 3.3100872249577673, - "learning_rate": 6.479423935406192e-07, - "loss": 1.0436, - "num_input_tokens_seen": 131285595, - "step": 6189 - }, - { - "epoch": 0.7443034930559731, - "flos": 49126035153600.0, - "grad_norm": 0.9170513032458335, - "learning_rate": 6.473684912214357e-07, - "loss": 0.9405, - "num_input_tokens_seen": 131348875, - "step": 6190 - }, - { - "epoch": 0.7444237359466122, - "flos": 13224674135520.0, - "grad_norm": 5.65228284105702, - "learning_rate": 6.467947940953778e-07, - "loss": 0.9318, - "num_input_tokens_seen": 131367120, - "step": 6191 - }, - { - "epoch": 0.7445439788372512, - "flos": 16216709733360.0, - "grad_norm": 10.094358065687029, - "learning_rate": 6.462213022494732e-07, - "loss": 0.9541, - "num_input_tokens_seen": 131386085, - "step": 6192 - }, - { - "epoch": 0.7446642217278904, - "flos": 48008520001320.0, - "grad_norm": 0.785766174875724, - "learning_rate": 6.456480157707201e-07, - "loss": 0.8783, - "num_input_tokens_seen": 131450580, - "step": 6193 - }, - { - "epoch": 0.7447844646185294, - "flos": 12335698861080.0, - "grad_norm": 22.159329607907253, - "learning_rate": 6.450749347460866e-07, - "loss": 1.0741, - "num_input_tokens_seen": 131467275, - "step": 6194 - }, - { - "epoch": 0.7449047075091685, - "flos": 18946962774240.0, - "grad_norm": 5.128028533046807, - "learning_rate": 6.445020592625083e-07, - "loss": 1.0245, - "num_input_tokens_seen": 131487645, - "step": 6195 - }, - { - "epoch": 0.7450249503998077, - "flos": 10002166981560.0, - "grad_norm": 6.427769983379035, - "learning_rate": 6.4392938940689e-07, - "loss": 1.0296, - "num_input_tokens_seen": 131502780, - "step": 6196 - }, - { - "epoch": 0.7451451932904467, - "flos": 13909061758560.0, - "grad_norm": 76.10478616067783, - "learning_rate": 6.433569252661049e-07, - "loss": 0.928, - "num_input_tokens_seen": 131520500, - "step": 6197 - }, - { - "epoch": 0.7452654361810858, - "flos": 8798782817040.0, - "grad_norm": 5.088688494601804, - "learning_rate": 6.427846669269952e-07, - "loss": 0.9267, - "num_input_tokens_seen": 131537840, - "step": 6198 - }, - { - "epoch": 0.7453856790717249, - "flos": 15721021525560.0, - "grad_norm": 9.575366570450246, - "learning_rate": 6.422126144763729e-07, - "loss": 1.0356, - "num_input_tokens_seen": 131556950, - "step": 6199 - }, - { - "epoch": 0.745505921962364, - "flos": 14199763714680.0, - "grad_norm": 4.798385133895411, - "learning_rate": 6.416407680010174e-07, - "loss": 0.9927, - "num_input_tokens_seen": 131571030, - "step": 6200 - }, - { - "epoch": 0.745626164853003, - "flos": 17553591408960.0, - "grad_norm": 7.050190959535633, - "learning_rate": 6.410691275876774e-07, - "loss": 1.032, - "num_input_tokens_seen": 131590170, - "step": 6201 - }, - { - "epoch": 0.7457464077436422, - "flos": 10267383633240.0, - "grad_norm": 9.319457456981091, - "learning_rate": 6.404976933230704e-07, - "loss": 0.985, - "num_input_tokens_seen": 131606410, - "step": 6202 - }, - { - "epoch": 0.7458666506342813, - "flos": 24268850864400.0, - "grad_norm": 3.395501212200588, - "learning_rate": 6.399264652938813e-07, - "loss": 0.9548, - "num_input_tokens_seen": 131627035, - "step": 6203 - }, - { - "epoch": 0.7459868935249203, - "flos": 17267274055920.0, - "grad_norm": 6.0188157376099145, - "learning_rate": 6.393554435867679e-07, - "loss": 0.9667, - "num_input_tokens_seen": 131647605, - "step": 6204 - }, - { - "epoch": 0.7461071364155595, - "flos": 15564001049880.0, - "grad_norm": 11.422489509803128, - "learning_rate": 6.387846282883502e-07, - "loss": 1.0476, - "num_input_tokens_seen": 131663855, - "step": 6205 - }, - { - "epoch": 0.7462273793061985, - "flos": 16268416219680.0, - "grad_norm": 24.85461878998172, - "learning_rate": 6.38214019485223e-07, - "loss": 0.9947, - "num_input_tokens_seen": 131682400, - "step": 6206 - }, - { - "epoch": 0.7463476221968376, - "flos": 14168636683200.0, - "grad_norm": 7.879757609633196, - "learning_rate": 6.376436172639461e-07, - "loss": 0.9405, - "num_input_tokens_seen": 131699965, - "step": 6207 - }, - { - "epoch": 0.7464678650874768, - "flos": 11918643537360.0, - "grad_norm": 9.75925143576216, - "learning_rate": 6.370734217110487e-07, - "loss": 0.8751, - "num_input_tokens_seen": 131718430, - "step": 6208 - }, - { - "epoch": 0.7465881079781158, - "flos": 34387130252520.0, - "grad_norm": 7.07891935560535, - "learning_rate": 6.36503432913031e-07, - "loss": 0.8712, - "num_input_tokens_seen": 131741295, - "step": 6209 - }, - { - "epoch": 0.7467083508687549, - "flos": 13959541782480.0, - "grad_norm": 11.997534180656274, - "learning_rate": 6.359336509563569e-07, - "loss": 0.9145, - "num_input_tokens_seen": 131757035, - "step": 6210 - }, - { - "epoch": 0.7468285937593939, - "flos": 12679701704640.0, - "grad_norm": 6.321063587652824, - "learning_rate": 6.353640759274641e-07, - "loss": 1.0299, - "num_input_tokens_seen": 131775645, - "step": 6211 - }, - { - "epoch": 0.7469488366500331, - "flos": 16448837013720.0, - "grad_norm": 6.393139332893125, - "learning_rate": 6.347947079127556e-07, - "loss": 0.9697, - "num_input_tokens_seen": 131793265, - "step": 6212 - }, - { - "epoch": 0.7470690795406721, - "flos": 11813912117640.0, - "grad_norm": 4.646845184209996, - "learning_rate": 6.342255469986053e-07, - "loss": 0.994, - "num_input_tokens_seen": 131811730, - "step": 6213 - }, - { - "epoch": 0.7471893224313112, - "flos": 17924122050000.0, - "grad_norm": 5.040325816688729, - "learning_rate": 6.336565932713533e-07, - "loss": 0.9917, - "num_input_tokens_seen": 131830875, - "step": 6214 - }, - { - "epoch": 0.7473095653219504, - "flos": 16007369540160.0, - "grad_norm": 4.813144194848651, - "learning_rate": 6.330878468173088e-07, - "loss": 1.0005, - "num_input_tokens_seen": 131850660, - "step": 6215 - }, - { - "epoch": 0.7474298082125894, - "flos": 12836722180320.0, - "grad_norm": 5.38167264299877, - "learning_rate": 6.32519307722752e-07, - "loss": 0.9636, - "num_input_tokens_seen": 131868275, - "step": 6216 - }, - { - "epoch": 0.7475500511032285, - "flos": 44444861528880.0, - "grad_norm": 0.7708486134500994, - "learning_rate": 6.31950976073929e-07, - "loss": 0.8062, - "num_input_tokens_seen": 131922085, - "step": 6217 - }, - { - "epoch": 0.7476702939938676, - "flos": 12600700881840.0, - "grad_norm": 7.248948382698645, - "learning_rate": 6.31382851957055e-07, - "loss": 1.0265, - "num_input_tokens_seen": 131938625, - "step": 6218 - }, - { - "epoch": 0.7477905368845067, - "flos": 19889729521080.0, - "grad_norm": 7.540839836642503, - "learning_rate": 6.308149354583143e-07, - "loss": 0.928, - "num_input_tokens_seen": 131957750, - "step": 6219 - }, - { - "epoch": 0.7479107797751458, - "flos": 19129744508400.0, - "grad_norm": 8.515100865463863, - "learning_rate": 6.302472266638586e-07, - "loss": 1.0341, - "num_input_tokens_seen": 131978010, - "step": 6220 - }, - { - "epoch": 0.7480310226657849, - "flos": 24212269190040.0, - "grad_norm": 5.938960166878503, - "learning_rate": 6.296797256598101e-07, - "loss": 0.9386, - "num_input_tokens_seen": 131999210, - "step": 6221 - }, - { - "epoch": 0.748151265556424, - "flos": 17660806415040.0, - "grad_norm": 10.194478833252862, - "learning_rate": 6.291124325322576e-07, - "loss": 1.0397, - "num_input_tokens_seen": 132019055, - "step": 6222 - }, - { - "epoch": 0.748271508447063, - "flos": 27415331366880.0, - "grad_norm": 3.9000074631057147, - "learning_rate": 6.285453473672595e-07, - "loss": 0.8533, - "num_input_tokens_seen": 132041345, - "step": 6223 - }, - { - "epoch": 0.7483917513377022, - "flos": 15300010860600.0, - "grad_norm": 6.3096131626050385, - "learning_rate": 6.279784702508415e-07, - "loss": 0.9686, - "num_input_tokens_seen": 132061815, - "step": 6224 - }, - { - "epoch": 0.7485119942283412, - "flos": 44609945994840.0, - "grad_norm": 0.8490038939500167, - "learning_rate": 6.274118012689979e-07, - "loss": 0.873, - "num_input_tokens_seen": 132123435, - "step": 6225 - }, - { - "epoch": 0.7486322371189803, - "flos": 21334439433960.0, - "grad_norm": 6.31232142431895, - "learning_rate": 6.268453405076943e-07, - "loss": 0.912, - "num_input_tokens_seen": 132145550, - "step": 6226 - }, - { - "epoch": 0.7487524800096195, - "flos": 13436620561320.0, - "grad_norm": 6.128596592818764, - "learning_rate": 6.262790880528592e-07, - "loss": 1.0528, - "num_input_tokens_seen": 132162890, - "step": 6227 - }, - { - "epoch": 0.7488727229002585, - "flos": 13255954474800.0, - "grad_norm": 10.366370231421524, - "learning_rate": 6.257130439903951e-07, - "loss": 1.0097, - "num_input_tokens_seen": 132179105, - "step": 6228 - }, - { - "epoch": 0.7489929657908976, - "flos": 16796212628880.0, - "grad_norm": 3.635062359848226, - "learning_rate": 6.251472084061695e-07, - "loss": 1.0342, - "num_input_tokens_seen": 132197745, - "step": 6229 - }, - { - "epoch": 0.7491132086815367, - "flos": 14587593023640.0, - "grad_norm": 5.479474347931439, - "learning_rate": 6.245815813860191e-07, - "loss": 1.1269, - "num_input_tokens_seen": 132212975, - "step": 6230 - }, - { - "epoch": 0.7492334515721758, - "flos": 16350360552240.0, - "grad_norm": 4.719802840042893, - "learning_rate": 6.240161630157495e-07, - "loss": 0.9117, - "num_input_tokens_seen": 132232050, - "step": 6231 - }, - { - "epoch": 0.7493536944628149, - "flos": 11604020016360.0, - "grad_norm": 5.497650685103586, - "learning_rate": 6.23450953381133e-07, - "loss": 0.9202, - "num_input_tokens_seen": 132249860, - "step": 6232 - }, - { - "epoch": 0.749473937353454, - "flos": 10841735386680.0, - "grad_norm": 8.094472963307181, - "learning_rate": 6.228859525679131e-07, - "loss": 0.9083, - "num_input_tokens_seen": 132263995, - "step": 6233 - }, - { - "epoch": 0.7495941802440931, - "flos": 13438092316200.0, - "grad_norm": 12.733636865110455, - "learning_rate": 6.223211606617986e-07, - "loss": 1.0307, - "num_input_tokens_seen": 132282135, - "step": 6234 - }, - { - "epoch": 0.7497144231347321, - "flos": 15983478636840.0, - "grad_norm": 9.058041653211173, - "learning_rate": 6.217565777484701e-07, - "loss": 1.0564, - "num_input_tokens_seen": 132300950, - "step": 6235 - }, - { - "epoch": 0.7498346660253713, - "flos": 17241880736160.0, - "grad_norm": 4.174604230384801, - "learning_rate": 6.211922039135722e-07, - "loss": 1.0251, - "num_input_tokens_seen": 132320815, - "step": 6236 - }, - { - "epoch": 0.7499549089160104, - "flos": 17344741800720.0, - "grad_norm": 4.149197898471895, - "learning_rate": 6.206280392427201e-07, - "loss": 1.0326, - "num_input_tokens_seen": 132340120, - "step": 6237 - }, - { - "epoch": 0.7500751518066494, - "flos": 24294765430680.0, - "grad_norm": 3.45886443935562, - "learning_rate": 6.200640838214983e-07, - "loss": 0.9596, - "num_input_tokens_seen": 132362615, - "step": 6238 - }, - { - "epoch": 0.7501953946972886, - "flos": 13361053833240.0, - "grad_norm": 7.706047695254875, - "learning_rate": 6.195003377354578e-07, - "loss": 0.8932, - "num_input_tokens_seen": 132381605, - "step": 6239 - }, - { - "epoch": 0.7503156375879276, - "flos": 14376137182800.0, - "grad_norm": 7.680539585470191, - "learning_rate": 6.189368010701183e-07, - "loss": 0.9489, - "num_input_tokens_seen": 132398385, - "step": 6240 - }, - { - "epoch": 0.7504358804785667, - "flos": 9506356127520.0, - "grad_norm": 5.519298680853103, - "learning_rate": 6.183734739109683e-07, - "loss": 0.9903, - "num_input_tokens_seen": 132415925, - "step": 6241 - }, - { - "epoch": 0.7505561233692057, - "flos": 20991632391240.0, - "grad_norm": 10.369736772689231, - "learning_rate": 6.178103563434629e-07, - "loss": 0.9204, - "num_input_tokens_seen": 132434645, - "step": 6242 - }, - { - "epoch": 0.7506763662598449, - "flos": 14409441185040.0, - "grad_norm": 3.658050876103038, - "learning_rate": 6.172474484530283e-07, - "loss": 1.0789, - "num_input_tokens_seen": 132453100, - "step": 6243 - }, - { - "epoch": 0.750796609150484, - "flos": 26575517669280.0, - "grad_norm": 5.338438015063379, - "learning_rate": 6.166847503250563e-07, - "loss": 0.9792, - "num_input_tokens_seen": 132475060, - "step": 6244 - }, - { - "epoch": 0.750916852041123, - "flos": 13911392037120.0, - "grad_norm": 10.513544701592457, - "learning_rate": 6.161222620449078e-07, - "loss": 1.0154, - "num_input_tokens_seen": 132493555, - "step": 6245 - }, - { - "epoch": 0.7510370949317622, - "flos": 17865670020480.0, - "grad_norm": 4.4249679478061, - "learning_rate": 6.155599836979117e-07, - "loss": 1.0221, - "num_input_tokens_seen": 132511960, - "step": 6246 - }, - { - "epoch": 0.7511573378224012, - "flos": 13518196955160.0, - "grad_norm": 5.196634913052452, - "learning_rate": 6.149979153693649e-07, - "loss": 1.0423, - "num_input_tokens_seen": 132528935, - "step": 6247 - }, - { - "epoch": 0.7512775807130403, - "flos": 14147290689360.0, - "grad_norm": 11.68391371594637, - "learning_rate": 6.144360571445343e-07, - "loss": 0.9877, - "num_input_tokens_seen": 132547800, - "step": 6248 - }, - { - "epoch": 0.7513978236036795, - "flos": 14723788752000.0, - "grad_norm": 4.137063852239277, - "learning_rate": 6.138744091086509e-07, - "loss": 1.0299, - "num_input_tokens_seen": 132567105, - "step": 6249 - }, - { - "epoch": 0.7515180664943185, - "flos": 19627579025400.0, - "grad_norm": 5.901287597846761, - "learning_rate": 6.133129713469183e-07, - "loss": 0.9453, - "num_input_tokens_seen": 132586030, - "step": 6250 - }, - { - "epoch": 0.7516383093849576, - "flos": 24112290312120.0, - "grad_norm": 13.921397122830829, - "learning_rate": 6.127517439445053e-07, - "loss": 0.8652, - "num_input_tokens_seen": 132606595, - "step": 6251 - }, - { - "epoch": 0.7517585522755967, - "flos": 21195637473000.0, - "grad_norm": 5.769027739644785, - "learning_rate": 6.121907269865498e-07, - "loss": 1.0456, - "num_input_tokens_seen": 132625805, - "step": 6252 - }, - { - "epoch": 0.7518787951662358, - "flos": 49992284664000.0, - "grad_norm": 0.9528691307947004, - "learning_rate": 6.116299205581577e-07, - "loss": 0.9567, - "num_input_tokens_seen": 132680355, - "step": 6253 - }, - { - "epoch": 0.7519990380568748, - "flos": 24399619496640.0, - "grad_norm": 5.392791401183592, - "learning_rate": 6.110693247444018e-07, - "loss": 0.9094, - "num_input_tokens_seen": 132701910, - "step": 6254 - }, - { - "epoch": 0.752119280947514, - "flos": 15432343232400.0, - "grad_norm": 5.558863790765799, - "learning_rate": 6.105089396303258e-07, - "loss": 1.0465, - "num_input_tokens_seen": 132720020, - "step": 6255 - }, - { - "epoch": 0.7522395238381531, - "flos": 23349883036200.0, - "grad_norm": 5.158641468678374, - "learning_rate": 6.099487653009383e-07, - "loss": 0.9851, - "num_input_tokens_seen": 132739085, - "step": 6256 - }, - { - "epoch": 0.7523597667287921, - "flos": 16690132100520.0, - "grad_norm": 9.092560680523606, - "learning_rate": 6.093888018412192e-07, - "loss": 1.0532, - "num_input_tokens_seen": 132754995, - "step": 6257 - }, - { - "epoch": 0.7524800096194313, - "flos": 48226108154160.0, - "grad_norm": 0.8099004902761991, - "learning_rate": 6.088290493361125e-07, - "loss": 0.8056, - "num_input_tokens_seen": 132819600, - "step": 6258 - }, - { - "epoch": 0.7526002525100703, - "flos": 9165756717120.0, - "grad_norm": 4.530789897842171, - "learning_rate": 6.082695078705322e-07, - "loss": 0.9274, - "num_input_tokens_seen": 132836800, - "step": 6259 - }, - { - "epoch": 0.7527204954007094, - "flos": 15196260610800.0, - "grad_norm": 6.784678071719044, - "learning_rate": 6.077101775293618e-07, - "loss": 0.9085, - "num_input_tokens_seen": 132855345, - "step": 6260 - }, - { - "epoch": 0.7528407382913486, - "flos": 13434719544600.0, - "grad_norm": 6.439082571515368, - "learning_rate": 6.071510583974504e-07, - "loss": 1.0546, - "num_input_tokens_seen": 132870250, - "step": 6261 - }, - { - "epoch": 0.7529609811819876, - "flos": 10765371458040.0, - "grad_norm": 5.812044843675294, - "learning_rate": 6.065921505596161e-07, - "loss": 0.9412, - "num_input_tokens_seen": 132888250, - "step": 6262 - }, - { - "epoch": 0.7530812240726267, - "flos": 13728242364240.0, - "grad_norm": 3.583135037517079, - "learning_rate": 6.060334541006445e-07, - "loss": 0.997, - "num_input_tokens_seen": 132906465, - "step": 6263 - }, - { - "epoch": 0.7532014669632658, - "flos": 19760953890240.0, - "grad_norm": 2.2775187874949743, - "learning_rate": 6.05474969105289e-07, - "loss": 0.9157, - "num_input_tokens_seen": 132929175, - "step": 6264 - }, - { - "epoch": 0.7533217098539049, - "flos": 9978889309440.0, - "grad_norm": 6.228594937661377, - "learning_rate": 6.049166956582725e-07, - "loss": 0.9624, - "num_input_tokens_seen": 132947160, - "step": 6265 - }, - { - "epoch": 0.753441952744544, - "flos": 18812116154520.0, - "grad_norm": 3.3391412364225244, - "learning_rate": 6.043586338442841e-07, - "loss": 1.0939, - "num_input_tokens_seen": 132965935, - "step": 6266 - }, - { - "epoch": 0.7535621956351831, - "flos": 16979638255800.0, - "grad_norm": 7.318559793008981, - "learning_rate": 6.038007837479815e-07, - "loss": 0.9574, - "num_input_tokens_seen": 132986760, - "step": 6267 - }, - { - "epoch": 0.7536824385258222, - "flos": 15482393994480.0, - "grad_norm": 3.5725296448531387, - "learning_rate": 6.032431454539897e-07, - "loss": 0.8571, - "num_input_tokens_seen": 133005325, - "step": 6268 - }, - { - "epoch": 0.7538026814164612, - "flos": 20596505631000.0, - "grad_norm": 2.863982277437071, - "learning_rate": 6.026857190469014e-07, - "loss": 1.0392, - "num_input_tokens_seen": 133026800, - "step": 6269 - }, - { - "epoch": 0.7539229243071004, - "flos": 14985939247680.0, - "grad_norm": 6.9131446358443505, - "learning_rate": 6.0212850461128e-07, - "loss": 0.9639, - "num_input_tokens_seen": 133045640, - "step": 6270 - }, - { - "epoch": 0.7540431671977395, - "flos": 10712622478680.0, - "grad_norm": 9.694330772843516, - "learning_rate": 6.015715022316516e-07, - "loss": 0.9741, - "num_input_tokens_seen": 133063340, - "step": 6271 - }, - { - "epoch": 0.7541634100883785, - "flos": 13308580807920.0, - "grad_norm": 8.821506091737538, - "learning_rate": 6.010147119925154e-07, - "loss": 1.0061, - "num_input_tokens_seen": 133080815, - "step": 6272 - }, - { - "epoch": 0.7542836529790176, - "flos": 14619578578800.0, - "grad_norm": 6.145070415493785, - "learning_rate": 6.004581339783348e-07, - "loss": 0.8813, - "num_input_tokens_seen": 133098855, - "step": 6273 - }, - { - "epoch": 0.7544038958696567, - "flos": 13541290657920.0, - "grad_norm": 3.5725678179999583, - "learning_rate": 5.999017682735425e-07, - "loss": 0.8978, - "num_input_tokens_seen": 133114965, - "step": 6274 - }, - { - "epoch": 0.7545241387602958, - "flos": 22617498975600.0, - "grad_norm": 6.233873237170401, - "learning_rate": 5.993456149625387e-07, - "loss": 0.8859, - "num_input_tokens_seen": 133135835, - "step": 6275 - }, - { - "epoch": 0.7546443816509348, - "flos": 14404351366080.0, - "grad_norm": 3.57254352604463, - "learning_rate": 5.987896741296909e-07, - "loss": 1.0506, - "num_input_tokens_seen": 133153295, - "step": 6276 - }, - { - "epoch": 0.754764624541574, - "flos": 16848501684840.0, - "grad_norm": 8.090085647186227, - "learning_rate": 5.982339458593361e-07, - "loss": 1.0058, - "num_input_tokens_seen": 133172955, - "step": 6277 - }, - { - "epoch": 0.7548848674322131, - "flos": 18027780315120.0, - "grad_norm": 3.668887477294026, - "learning_rate": 5.976784302357767e-07, - "loss": 1.0713, - "num_input_tokens_seen": 133193240, - "step": 6278 - }, - { - "epoch": 0.7550051103228521, - "flos": 13885354824600.0, - "grad_norm": 3.1106739493932167, - "learning_rate": 5.971231273432855e-07, - "loss": 0.9497, - "num_input_tokens_seen": 133212445, - "step": 6279 - }, - { - "epoch": 0.7551253532134913, - "flos": 45925701855600.0, - "grad_norm": 0.8250394956209137, - "learning_rate": 5.965680372661e-07, - "loss": 0.8141, - "num_input_tokens_seen": 133269730, - "step": 6280 - }, - { - "epoch": 0.7552455961041303, - "flos": 18553123799520.0, - "grad_norm": 5.958483109447921, - "learning_rate": 5.960131600884266e-07, - "loss": 0.7753, - "num_input_tokens_seen": 133288720, - "step": 6281 - }, - { - "epoch": 0.7553658389947694, - "flos": 17424846439680.0, - "grad_norm": 3.3521981159012593, - "learning_rate": 5.954584958944413e-07, - "loss": 0.9866, - "num_input_tokens_seen": 133307105, - "step": 6282 - }, - { - "epoch": 0.7554860818854086, - "flos": 15485460150480.0, - "grad_norm": 4.4262311526621465, - "learning_rate": 5.949040447682854e-07, - "loss": 1.0361, - "num_input_tokens_seen": 133326650, - "step": 6283 - }, - { - "epoch": 0.7556063247760476, - "flos": 11577614865120.0, - "grad_norm": 10.68419073971891, - "learning_rate": 5.943498067940686e-07, - "loss": 0.9091, - "num_input_tokens_seen": 133343395, - "step": 6284 - }, - { - "epoch": 0.7557265676666867, - "flos": 19391895004080.0, - "grad_norm": 4.5455676949029495, - "learning_rate": 5.937957820558686e-07, - "loss": 1.0319, - "num_input_tokens_seen": 133362460, - "step": 6285 - }, - { - "epoch": 0.7558468105573258, - "flos": 44517668716560.0, - "grad_norm": 0.831285169043896, - "learning_rate": 5.932419706377296e-07, - "loss": 0.8972, - "num_input_tokens_seen": 133420485, - "step": 6286 - }, - { - "epoch": 0.7559670534479649, - "flos": 23713146887520.0, - "grad_norm": 6.961020249753657, - "learning_rate": 5.92688372623666e-07, - "loss": 0.9647, - "num_input_tokens_seen": 133438910, - "step": 6287 - }, - { - "epoch": 0.7560872963386039, - "flos": 9926385622560.0, - "grad_norm": 3.4973777757274584, - "learning_rate": 5.921349880976574e-07, - "loss": 0.9612, - "num_input_tokens_seen": 133456465, - "step": 6288 - }, - { - "epoch": 0.7562075392292431, - "flos": 14488442007840.0, - "grad_norm": 7.937561275215627, - "learning_rate": 5.915818171436515e-07, - "loss": 1.0366, - "num_input_tokens_seen": 133475520, - "step": 6289 - }, - { - "epoch": 0.7563277821198822, - "flos": 14463048688080.0, - "grad_norm": 11.545992227757539, - "learning_rate": 5.910288598455642e-07, - "loss": 0.968, - "num_input_tokens_seen": 133494590, - "step": 6290 - }, - { - "epoch": 0.7564480250105212, - "flos": 13177566883200.0, - "grad_norm": 5.334174785197621, - "learning_rate": 5.90476116287278e-07, - "loss": 0.9561, - "num_input_tokens_seen": 133511910, - "step": 6291 - }, - { - "epoch": 0.7565682679011604, - "flos": 15065767932600.0, - "grad_norm": 4.020724965481583, - "learning_rate": 5.899235865526456e-07, - "loss": 0.9075, - "num_input_tokens_seen": 133530925, - "step": 6292 - }, - { - "epoch": 0.7566885107917994, - "flos": 14514847159080.0, - "grad_norm": 11.86105523752656, - "learning_rate": 5.893712707254825e-07, - "loss": 1.0415, - "num_input_tokens_seen": 133548105, - "step": 6293 - }, - { - "epoch": 0.7568087536824385, - "flos": 13492006434840.0, - "grad_norm": 8.56799763632634, - "learning_rate": 5.888191688895769e-07, - "loss": 0.8714, - "num_input_tokens_seen": 133565085, - "step": 6294 - }, - { - "epoch": 0.7569289965730777, - "flos": 10759944361920.0, - "grad_norm": 5.006341917644289, - "learning_rate": 5.882672811286813e-07, - "loss": 0.8318, - "num_input_tokens_seen": 133581085, - "step": 6295 - }, - { - "epoch": 0.7570492394637167, - "flos": 14744000268120.0, - "grad_norm": 7.962839365483775, - "learning_rate": 5.877156075265166e-07, - "loss": 0.9227, - "num_input_tokens_seen": 133597070, - "step": 6296 - }, - { - "epoch": 0.7571694823543558, - "flos": 11075303760360.0, - "grad_norm": 6.073097326646101, - "learning_rate": 5.871641481667715e-07, - "loss": 0.9188, - "num_input_tokens_seen": 133611235, - "step": 6297 - }, - { - "epoch": 0.7572897252449949, - "flos": 18079425478320.0, - "grad_norm": 5.491620008688167, - "learning_rate": 5.866129031331011e-07, - "loss": 1.0661, - "num_input_tokens_seen": 133630610, - "step": 6298 - }, - { - "epoch": 0.757409968135634, - "flos": 17267550009960.0, - "grad_norm": 7.160086457140756, - "learning_rate": 5.8606187250913e-07, - "loss": 1.0473, - "num_input_tokens_seen": 133648380, - "step": 6299 - }, - { - "epoch": 0.757530211026273, - "flos": 17162971898040.0, - "grad_norm": 10.663508742490526, - "learning_rate": 5.855110563784482e-07, - "loss": 1.0593, - "num_input_tokens_seen": 133666635, - "step": 6300 - }, - { - "epoch": 0.7576504539169122, - "flos": 17032264588920.0, - "grad_norm": 3.594578390427957, - "learning_rate": 5.849604548246156e-07, - "loss": 0.8667, - "num_input_tokens_seen": 133687465, - "step": 6301 - }, - { - "epoch": 0.7577706968075513, - "flos": 15091437206400.0, - "grad_norm": 5.323743165032611, - "learning_rate": 5.844100679311565e-07, - "loss": 1.0225, - "num_input_tokens_seen": 133706145, - "step": 6302 - }, - { - "epoch": 0.7578909396981903, - "flos": 12967981397520.0, - "grad_norm": 4.39560750709675, - "learning_rate": 5.838598957815637e-07, - "loss": 1.0006, - "num_input_tokens_seen": 133723095, - "step": 6303 - }, - { - "epoch": 0.7580111825888295, - "flos": 18054154804800.0, - "grad_norm": 2.719804142884755, - "learning_rate": 5.833099384592996e-07, - "loss": 1.0827, - "num_input_tokens_seen": 133743390, - "step": 6304 - }, - { - "epoch": 0.7581314254794685, - "flos": 16900545448320.0, - "grad_norm": 10.059132267137828, - "learning_rate": 5.827601960477913e-07, - "loss": 0.9304, - "num_input_tokens_seen": 133761035, - "step": 6305 - }, - { - "epoch": 0.7582516683701076, - "flos": 15668793792720.0, - "grad_norm": 10.259573582642519, - "learning_rate": 5.822106686304344e-07, - "loss": 0.9425, - "num_input_tokens_seen": 133780045, - "step": 6306 - }, - { - "epoch": 0.7583719112607467, - "flos": 22564535365320.0, - "grad_norm": 3.9548401259409878, - "learning_rate": 5.816613562905919e-07, - "loss": 0.8064, - "num_input_tokens_seen": 133800950, - "step": 6307 - }, - { - "epoch": 0.7584921541513858, - "flos": 23583512733000.0, - "grad_norm": 3.066499395231408, - "learning_rate": 5.811122591115933e-07, - "loss": 0.937, - "num_input_tokens_seen": 133821655, - "step": 6308 - }, - { - "epoch": 0.7586123970420249, - "flos": 16582058570760.0, - "grad_norm": 6.293830078585334, - "learning_rate": 5.805633771767376e-07, - "loss": 0.9258, - "num_input_tokens_seen": 133838770, - "step": 6309 - }, - { - "epoch": 0.7587326399326639, - "flos": 12995183749320.0, - "grad_norm": 10.644187924509113, - "learning_rate": 5.800147105692888e-07, - "loss": 1.0029, - "num_input_tokens_seen": 133858065, - "step": 6310 - }, - { - "epoch": 0.7588528828233031, - "flos": 12233849628000.0, - "grad_norm": 7.920259510374008, - "learning_rate": 5.794662593724795e-07, - "loss": 1.0134, - "num_input_tokens_seen": 133876790, - "step": 6311 - }, - { - "epoch": 0.7589731257139422, - "flos": 12549117041760.0, - "grad_norm": 3.2784890320934625, - "learning_rate": 5.789180236695091e-07, - "loss": 0.9805, - "num_input_tokens_seen": 133893365, - "step": 6312 - }, - { - "epoch": 0.7590933686045812, - "flos": 11289427156920.0, - "grad_norm": 8.393118319172494, - "learning_rate": 5.78370003543544e-07, - "loss": 1.0837, - "num_input_tokens_seen": 133911840, - "step": 6313 - }, - { - "epoch": 0.7592136114952204, - "flos": 14959595419560.0, - "grad_norm": 4.165597829422477, - "learning_rate": 5.778221990777203e-07, - "loss": 1.0599, - "num_input_tokens_seen": 133929300, - "step": 6314 - }, - { - "epoch": 0.7593338543858594, - "flos": 17998952900640.0, - "grad_norm": 4.372955280416378, - "learning_rate": 5.772746103551372e-07, - "loss": 1.0525, - "num_input_tokens_seen": 133944415, - "step": 6315 - }, - { - "epoch": 0.7594540972764985, - "flos": 22695825244080.0, - "grad_norm": 10.012053282793694, - "learning_rate": 5.767272374588648e-07, - "loss": 0.9539, - "num_input_tokens_seen": 133965540, - "step": 6316 - }, - { - "epoch": 0.7595743401671377, - "flos": 26838955950480.0, - "grad_norm": 6.815734655310575, - "learning_rate": 5.76180080471939e-07, - "loss": 1.0121, - "num_input_tokens_seen": 133988430, - "step": 6317 - }, - { - "epoch": 0.7596945830577767, - "flos": 12960407992200.0, - "grad_norm": 5.897460209808077, - "learning_rate": 5.756331394773631e-07, - "loss": 0.9464, - "num_input_tokens_seen": 134004365, - "step": 6318 - }, - { - "epoch": 0.7598148259484158, - "flos": 15796557592080.0, - "grad_norm": 5.18128723941024, - "learning_rate": 5.750864145581071e-07, - "loss": 0.9948, - "num_input_tokens_seen": 134023305, - "step": 6319 - }, - { - "epoch": 0.7599350688390549, - "flos": 19339054040040.0, - "grad_norm": 5.531729984470839, - "learning_rate": 5.745399057971085e-07, - "loss": 1.0821, - "num_input_tokens_seen": 134044160, - "step": 6320 - }, - { - "epoch": 0.760055311729694, - "flos": 11001822018360.0, - "grad_norm": 4.270602832559684, - "learning_rate": 5.739936132772738e-07, - "loss": 0.9757, - "num_input_tokens_seen": 134062445, - "step": 6321 - }, - { - "epoch": 0.760175554620333, - "flos": 17897287636920.0, - "grad_norm": 6.886871076555568, - "learning_rate": 5.734475370814733e-07, - "loss": 0.9789, - "num_input_tokens_seen": 134081845, - "step": 6322 - }, - { - "epoch": 0.7602957975109722, - "flos": 17321249497680.0, - "grad_norm": 6.354316217715842, - "learning_rate": 5.729016772925483e-07, - "loss": 1.0063, - "num_input_tokens_seen": 134103140, - "step": 6323 - }, - { - "epoch": 0.7604160404016113, - "flos": 17924091388440.0, - "grad_norm": 16.009448122881025, - "learning_rate": 5.723560339933038e-07, - "loss": 0.9382, - "num_input_tokens_seen": 134123195, - "step": 6324 - }, - { - "epoch": 0.7605362832922503, - "flos": 21279605468520.0, - "grad_norm": 5.882874102225605, - "learning_rate": 5.71810607266513e-07, - "loss": 0.8663, - "num_input_tokens_seen": 134141500, - "step": 6325 - }, - { - "epoch": 0.7606565261828895, - "flos": 9821071633200.0, - "grad_norm": 7.710283462320691, - "learning_rate": 5.712653971949184e-07, - "loss": 0.8262, - "num_input_tokens_seen": 134159340, - "step": 6326 - }, - { - "epoch": 0.7607767690735285, - "flos": 13150916439480.0, - "grad_norm": 7.781094101412671, - "learning_rate": 5.707204038612268e-07, - "loss": 0.9866, - "num_input_tokens_seen": 134176490, - "step": 6327 - }, - { - "epoch": 0.7608970119641676, - "flos": 14855446569480.0, - "grad_norm": 7.069647219218171, - "learning_rate": 5.701756273481138e-07, - "loss": 0.9532, - "num_input_tokens_seen": 134193630, - "step": 6328 - }, - { - "epoch": 0.7610172548548068, - "flos": 16928606323800.0, - "grad_norm": 3.3656845101680077, - "learning_rate": 5.696310677382212e-07, - "loss": 0.9606, - "num_input_tokens_seen": 134214745, - "step": 6329 - }, - { - "epoch": 0.7611374977454458, - "flos": 47613638533560.0, - "grad_norm": 0.8243878448735019, - "learning_rate": 5.690867251141576e-07, - "loss": 0.875, - "num_input_tokens_seen": 134281120, - "step": 6330 - }, - { - "epoch": 0.7612577406360849, - "flos": 11053804458720.0, - "grad_norm": 15.230402627132689, - "learning_rate": 5.685425995585013e-07, - "loss": 1.1355, - "num_input_tokens_seen": 134298765, - "step": 6331 - }, - { - "epoch": 0.761377983526724, - "flos": 43324678820880.0, - "grad_norm": 0.7906578856251848, - "learning_rate": 5.679986911537935e-07, - "loss": 0.8482, - "num_input_tokens_seen": 134366015, - "step": 6332 - }, - { - "epoch": 0.7614982264173631, - "flos": 25527498256200.0, - "grad_norm": 4.807691277137059, - "learning_rate": 5.674549999825462e-07, - "loss": 0.8957, - "num_input_tokens_seen": 134388550, - "step": 6333 - }, - { - "epoch": 0.7616184693080021, - "flos": 48641385107400.0, - "grad_norm": 0.9879420003507944, - "learning_rate": 5.669115261272363e-07, - "loss": 1.0039, - "num_input_tokens_seen": 134448590, - "step": 6334 - }, - { - "epoch": 0.7617387121986413, - "flos": 14567289522840.0, - "grad_norm": 4.0484616030436005, - "learning_rate": 5.663682696703081e-07, - "loss": 0.943, - "num_input_tokens_seen": 134466575, - "step": 6335 - }, - { - "epoch": 0.7618589550892804, - "flos": 13203573434160.0, - "grad_norm": 6.120529100073273, - "learning_rate": 5.658252306941746e-07, - "loss": 1.06, - "num_input_tokens_seen": 134485615, - "step": 6336 - }, - { - "epoch": 0.7619791979799194, - "flos": 12361736073600.0, - "grad_norm": 6.217289585407644, - "learning_rate": 5.65282409281212e-07, - "loss": 0.9804, - "num_input_tokens_seen": 134502800, - "step": 6337 - }, - { - "epoch": 0.7620994408705585, - "flos": 9978889309440.0, - "grad_norm": 4.473669798505971, - "learning_rate": 5.64739805513768e-07, - "loss": 0.9236, - "num_input_tokens_seen": 134520065, - "step": 6338 - }, - { - "epoch": 0.7622196837611976, - "flos": 50638155819600.0, - "grad_norm": 0.8215016057143304, - "learning_rate": 5.641974194741541e-07, - "loss": 0.8151, - "num_input_tokens_seen": 134575470, - "step": 6339 - }, - { - "epoch": 0.7623399266518367, - "flos": 43434842884800.0, - "grad_norm": 0.7377974928509539, - "learning_rate": 5.636552512446502e-07, - "loss": 0.8793, - "num_input_tokens_seen": 134636245, - "step": 6340 - }, - { - "epoch": 0.7624601695424758, - "flos": 18841188861480.0, - "grad_norm": 3.9014692424464736, - "learning_rate": 5.631133009075027e-07, - "loss": 1.0081, - "num_input_tokens_seen": 134655150, - "step": 6341 - }, - { - "epoch": 0.7625804124331149, - "flos": 13571068580760.0, - "grad_norm": 5.772287050253768, - "learning_rate": 5.625715685449242e-07, - "loss": 0.9269, - "num_input_tokens_seen": 134672975, - "step": 6342 - }, - { - "epoch": 0.762700655323754, - "flos": 18657303311160.0, - "grad_norm": 6.01351868055523, - "learning_rate": 5.620300542390966e-07, - "loss": 0.9485, - "num_input_tokens_seen": 134693740, - "step": 6343 - }, - { - "epoch": 0.762820898214393, - "flos": 15904447152480.0, - "grad_norm": 4.07228459415546, - "learning_rate": 5.614887580721659e-07, - "loss": 1.0761, - "num_input_tokens_seen": 134713605, - "step": 6344 - }, - { - "epoch": 0.7629411411050322, - "flos": 11101463619120.0, - "grad_norm": 12.392706639650811, - "learning_rate": 5.609476801262481e-07, - "loss": 0.9577, - "num_input_tokens_seen": 134728185, - "step": 6345 - }, - { - "epoch": 0.7630613839956712, - "flos": 9714776473920.0, - "grad_norm": 6.042733284641678, - "learning_rate": 5.604068204834223e-07, - "loss": 0.8623, - "num_input_tokens_seen": 134744800, - "step": 6346 - }, - { - "epoch": 0.7631816268863103, - "flos": 10288698965520.0, - "grad_norm": 9.129899473274055, - "learning_rate": 5.598661792257367e-07, - "loss": 0.9714, - "num_input_tokens_seen": 134761565, - "step": 6347 - }, - { - "epoch": 0.7633018697769495, - "flos": 13518135632040.0, - "grad_norm": 5.06890445196392, - "learning_rate": 5.593257564352071e-07, - "loss": 0.9948, - "num_input_tokens_seen": 134779725, - "step": 6348 - }, - { - "epoch": 0.7634221126675885, - "flos": 15667383360960.0, - "grad_norm": 5.35037160278067, - "learning_rate": 5.58785552193815e-07, - "loss": 0.984, - "num_input_tokens_seen": 134799690, - "step": 6349 - }, - { - "epoch": 0.7635423555582276, - "flos": 20934621455040.0, - "grad_norm": 4.379229000287938, - "learning_rate": 5.582455665835086e-07, - "loss": 0.9839, - "num_input_tokens_seen": 134819705, - "step": 6350 - }, - { - "epoch": 0.7636625984488667, - "flos": 12600118312200.0, - "grad_norm": 7.373937497947268, - "learning_rate": 5.577057996862036e-07, - "loss": 0.956, - "num_input_tokens_seen": 134837050, - "step": 6351 - }, - { - "epoch": 0.7637828413395058, - "flos": 16876378590960.0, - "grad_norm": 2.467035784897504, - "learning_rate": 5.571662515837814e-07, - "loss": 0.9814, - "num_input_tokens_seen": 134858730, - "step": 6352 - }, - { - "epoch": 0.7639030842301449, - "flos": 25894564140960.0, - "grad_norm": 3.1588264751347466, - "learning_rate": 5.566269223580926e-07, - "loss": 1.0607, - "num_input_tokens_seen": 134880160, - "step": 6353 - }, - { - "epoch": 0.764023327120784, - "flos": 20572798697040.0, - "grad_norm": 2.653252919127094, - "learning_rate": 5.560878120909511e-07, - "loss": 0.978, - "num_input_tokens_seen": 134902480, - "step": 6354 - }, - { - "epoch": 0.7641435700114231, - "flos": 46388019189960.0, - "grad_norm": 0.894720598861715, - "learning_rate": 5.55548920864141e-07, - "loss": 0.8812, - "num_input_tokens_seen": 134962855, - "step": 6355 - }, - { - "epoch": 0.7642638129020621, - "flos": 11917938321480.0, - "grad_norm": 8.041410083004527, - "learning_rate": 5.550102487594113e-07, - "loss": 1.0009, - "num_input_tokens_seen": 134981245, - "step": 6356 - }, - { - "epoch": 0.7643840557927013, - "flos": 21672769888920.0, - "grad_norm": 3.569504527482254, - "learning_rate": 5.54471795858477e-07, - "loss": 0.9442, - "num_input_tokens_seen": 135001035, - "step": 6357 - }, - { - "epoch": 0.7645042986833404, - "flos": 12024110834520.0, - "grad_norm": 6.645714961067018, - "learning_rate": 5.539335622430235e-07, - "loss": 1.051, - "num_input_tokens_seen": 135019375, - "step": 6358 - }, - { - "epoch": 0.7646245415739794, - "flos": 12260009486760.0, - "grad_norm": 5.024429531833572, - "learning_rate": 5.533955479946975e-07, - "loss": 0.9693, - "num_input_tokens_seen": 135037875, - "step": 6359 - }, - { - "epoch": 0.7647447844646186, - "flos": 50420868734280.0, - "grad_norm": 0.876184513240692, - "learning_rate": 5.528577531951173e-07, - "loss": 0.9017, - "num_input_tokens_seen": 135098000, - "step": 6360 - }, - { - "epoch": 0.7648650273552576, - "flos": 12521853366840.0, - "grad_norm": 8.107454576015115, - "learning_rate": 5.523201779258653e-07, - "loss": 0.9684, - "num_input_tokens_seen": 135116695, - "step": 6361 - }, - { - "epoch": 0.7649852702458967, - "flos": 15745709629440.0, - "grad_norm": 4.7701594558390665, - "learning_rate": 5.517828222684912e-07, - "loss": 1.0725, - "num_input_tokens_seen": 135137070, - "step": 6362 - }, - { - "epoch": 0.7651055131365359, - "flos": 50021817294360.0, - "grad_norm": 0.770967334406618, - "learning_rate": 5.512456863045117e-07, - "loss": 0.8348, - "num_input_tokens_seen": 135197480, - "step": 6363 - }, - { - "epoch": 0.7652257560271749, - "flos": 13806967233000.0, - "grad_norm": 7.690203431691569, - "learning_rate": 5.507087701154089e-07, - "loss": 0.9656, - "num_input_tokens_seen": 135217120, - "step": 6364 - }, - { - "epoch": 0.765345998917814, - "flos": 11289457818480.0, - "grad_norm": 4.45224043777641, - "learning_rate": 5.50172073782634e-07, - "loss": 0.9716, - "num_input_tokens_seen": 135234820, - "step": 6365 - }, - { - "epoch": 0.7654662418084531, - "flos": 16822433810760.0, - "grad_norm": 5.316133221658381, - "learning_rate": 5.496355973876023e-07, - "loss": 1.1019, - "num_input_tokens_seen": 135253795, - "step": 6366 - }, - { - "epoch": 0.7655864846990922, - "flos": 29617481382960.0, - "grad_norm": 6.892410981077672, - "learning_rate": 5.490993410116984e-07, - "loss": 0.9282, - "num_input_tokens_seen": 135276505, - "step": 6367 - }, - { - "epoch": 0.7657067275897312, - "flos": 30824299642200.0, - "grad_norm": 4.7136562719866895, - "learning_rate": 5.485633047362704e-07, - "loss": 0.9327, - "num_input_tokens_seen": 135298230, - "step": 6368 - }, - { - "epoch": 0.7658269704803703, - "flos": 12260254779240.0, - "grad_norm": 7.049949634994355, - "learning_rate": 5.480274886426341e-07, - "loss": 1.0088, - "num_input_tokens_seen": 135314590, - "step": 6369 - }, - { - "epoch": 0.7659472133710095, - "flos": 8851041211440.0, - "grad_norm": 7.239608157181661, - "learning_rate": 5.474918928120744e-07, - "loss": 1.005, - "num_input_tokens_seen": 135330805, - "step": 6370 - }, - { - "epoch": 0.7660674562616485, - "flos": 16137432956520.0, - "grad_norm": 4.402779152298695, - "learning_rate": 5.469565173258392e-07, - "loss": 1.106, - "num_input_tokens_seen": 135349040, - "step": 6371 - }, - { - "epoch": 0.7661876991522876, - "flos": 12076767829200.0, - "grad_norm": 11.73484376664381, - "learning_rate": 5.464213622651454e-07, - "loss": 0.8633, - "num_input_tokens_seen": 135366575, - "step": 6372 - }, - { - "epoch": 0.7663079420429267, - "flos": 14252696663400.0, - "grad_norm": 9.969216458400007, - "learning_rate": 5.458864277111753e-07, - "loss": 1.0649, - "num_input_tokens_seen": 135384130, - "step": 6373 - }, - { - "epoch": 0.7664281849335658, - "flos": 8929582110840.0, - "grad_norm": 9.037591628247254, - "learning_rate": 5.453517137450769e-07, - "loss": 0.9183, - "num_input_tokens_seen": 135400425, - "step": 6374 - }, - { - "epoch": 0.7665484278242048, - "flos": 15877520754720.0, - "grad_norm": 3.606442007047294, - "learning_rate": 5.448172204479684e-07, - "loss": 0.985, - "num_input_tokens_seen": 135419425, - "step": 6375 - }, - { - "epoch": 0.766668670714844, - "flos": 16791552071760.0, - "grad_norm": 3.2209192762139693, - "learning_rate": 5.442829479009294e-07, - "loss": 0.975, - "num_input_tokens_seen": 135437925, - "step": 6376 - }, - { - "epoch": 0.7667889136054831, - "flos": 13780408773960.0, - "grad_norm": 6.063276732720421, - "learning_rate": 5.437488961850103e-07, - "loss": 0.9415, - "num_input_tokens_seen": 135457445, - "step": 6377 - }, - { - "epoch": 0.7669091564961221, - "flos": 19126310413680.0, - "grad_norm": 5.6036519472460125, - "learning_rate": 5.432150653812258e-07, - "loss": 0.9781, - "num_input_tokens_seen": 135477200, - "step": 6378 - }, - { - "epoch": 0.7670293993867613, - "flos": 8719567363320.0, - "grad_norm": 4.899535425700615, - "learning_rate": 5.42681455570557e-07, - "loss": 1.0537, - "num_input_tokens_seen": 135493450, - "step": 6379 - }, - { - "epoch": 0.7671496422774003, - "flos": 15459208307040.0, - "grad_norm": 5.792294731207471, - "learning_rate": 5.42148066833954e-07, - "loss": 0.8776, - "num_input_tokens_seen": 135512415, - "step": 6380 - }, - { - "epoch": 0.7672698851680394, - "flos": 14960576589480.0, - "grad_norm": 5.192001779353185, - "learning_rate": 5.416148992523289e-07, - "loss": 0.9804, - "num_input_tokens_seen": 135530710, - "step": 6381 - }, - { - "epoch": 0.7673901280586786, - "flos": 12018530430600.0, - "grad_norm": 37.72916307173736, - "learning_rate": 5.410819529065644e-07, - "loss": 1.012, - "num_input_tokens_seen": 135548385, - "step": 6382 - }, - { - "epoch": 0.7675103709493176, - "flos": 20834765223360.0, - "grad_norm": 5.947313410624937, - "learning_rate": 5.405492278775079e-07, - "loss": 0.8729, - "num_input_tokens_seen": 135567885, - "step": 6383 - }, - { - "epoch": 0.7676306138399567, - "flos": 20676794239320.0, - "grad_norm": 3.9913596771205118, - "learning_rate": 5.400167242459732e-07, - "loss": 1.0156, - "num_input_tokens_seen": 135586565, - "step": 6384 - }, - { - "epoch": 0.7677508567305958, - "flos": 16035645046560.0, - "grad_norm": 5.53100447460015, - "learning_rate": 5.394844420927405e-07, - "loss": 1.0349, - "num_input_tokens_seen": 135605895, - "step": 6385 - }, - { - "epoch": 0.7678710996212349, - "flos": 18081295833480.0, - "grad_norm": 3.547628314310018, - "learning_rate": 5.389523814985562e-07, - "loss": 0.9489, - "num_input_tokens_seen": 135625035, - "step": 6386 - }, - { - "epoch": 0.767991342511874, - "flos": 19047585544920.0, - "grad_norm": 9.172308370564444, - "learning_rate": 5.384205425441344e-07, - "loss": 0.9842, - "num_input_tokens_seen": 135645665, - "step": 6387 - }, - { - "epoch": 0.7681115854025131, - "flos": 18762586638960.0, - "grad_norm": 6.003084979105112, - "learning_rate": 5.378889253101537e-07, - "loss": 1.0678, - "num_input_tokens_seen": 135665940, - "step": 6388 - }, - { - "epoch": 0.7682318282931522, - "flos": 16532743686120.0, - "grad_norm": 6.509122462508567, - "learning_rate": 5.373575298772617e-07, - "loss": 1.0323, - "num_input_tokens_seen": 135684780, - "step": 6389 - }, - { - "epoch": 0.7683520711837912, - "flos": 50183590311840.0, - "grad_norm": 0.7496605343280206, - "learning_rate": 5.368263563260689e-07, - "loss": 0.8538, - "num_input_tokens_seen": 135749635, - "step": 6390 - }, - { - "epoch": 0.7684723140744304, - "flos": 13203910711320.0, - "grad_norm": 4.9861058784440875, - "learning_rate": 5.362954047371537e-07, - "loss": 0.8549, - "num_input_tokens_seen": 135768465, - "step": 6391 - }, - { - "epoch": 0.7685925569650695, - "flos": 19548302248560.0, - "grad_norm": 4.944526212113783, - "learning_rate": 5.357646751910627e-07, - "loss": 0.9544, - "num_input_tokens_seen": 135789365, - "step": 6392 - }, - { - "epoch": 0.7687127998557085, - "flos": 17451926145240.0, - "grad_norm": 10.703549831897185, - "learning_rate": 5.352341677683061e-07, - "loss": 1.0237, - "num_input_tokens_seen": 135810385, - "step": 6393 - }, - { - "epoch": 0.7688330427463477, - "flos": 17897686237200.0, - "grad_norm": 6.484852086176665, - "learning_rate": 5.347038825493617e-07, - "loss": 1.0126, - "num_input_tokens_seen": 135831635, - "step": 6394 - }, - { - "epoch": 0.7689532856369867, - "flos": 15062149868520.0, - "grad_norm": 13.17834675678962, - "learning_rate": 5.341738196146732e-07, - "loss": 0.8926, - "num_input_tokens_seen": 135849700, - "step": 6395 - }, - { - "epoch": 0.7690735285276258, - "flos": 17871127778160.0, - "grad_norm": 4.3542259735497595, - "learning_rate": 5.336439790446503e-07, - "loss": 0.9475, - "num_input_tokens_seen": 135868520, - "step": 6396 - }, - { - "epoch": 0.769193771418265, - "flos": 39161807617920.0, - "grad_norm": 4.415601613927035, - "learning_rate": 5.331143609196711e-07, - "loss": 0.8372, - "num_input_tokens_seen": 135892055, - "step": 6397 - }, - { - "epoch": 0.769314014308904, - "flos": 26655438338880.0, - "grad_norm": 2.946687008311397, - "learning_rate": 5.325849653200758e-07, - "loss": 1.0023, - "num_input_tokens_seen": 135915725, - "step": 6398 - }, - { - "epoch": 0.7694342571995431, - "flos": 14646167699400.0, - "grad_norm": 3.7766645386957256, - "learning_rate": 5.32055792326175e-07, - "loss": 0.9833, - "num_input_tokens_seen": 135933870, - "step": 6399 - }, - { - "epoch": 0.7695545000901821, - "flos": 17216242123920.0, - "grad_norm": 21.17751308692489, - "learning_rate": 5.315268420182437e-07, - "loss": 0.9522, - "num_input_tokens_seen": 135952265, - "step": 6400 - }, - { - "epoch": 0.7696747429808213, - "flos": 19942478500440.0, - "grad_norm": 8.91635639922431, - "learning_rate": 5.309981144765221e-07, - "loss": 0.9835, - "num_input_tokens_seen": 135972130, - "step": 6401 - }, - { - "epoch": 0.7697949858714603, - "flos": 8090167013520.0, - "grad_norm": 4.970197742604004, - "learning_rate": 5.304696097812196e-07, - "loss": 0.9757, - "num_input_tokens_seen": 135988450, - "step": 6402 - }, - { - "epoch": 0.7699152287620994, - "flos": 18998914553040.0, - "grad_norm": 11.329063808081324, - "learning_rate": 5.299413280125078e-07, - "loss": 0.8151, - "num_input_tokens_seen": 136006480, - "step": 6403 - }, - { - "epoch": 0.7700354716527386, - "flos": 11709058051680.0, - "grad_norm": 5.069544847211211, - "learning_rate": 5.294132692505284e-07, - "loss": 0.9564, - "num_input_tokens_seen": 136024610, - "step": 6404 - }, - { - "epoch": 0.7701557145433776, - "flos": 13647616478760.0, - "grad_norm": 6.901821183617753, - "learning_rate": 5.288854335753861e-07, - "loss": 1.0041, - "num_input_tokens_seen": 136042590, - "step": 6405 - }, - { - "epoch": 0.7702759574340167, - "flos": 22590357946920.0, - "grad_norm": 4.026303827683323, - "learning_rate": 5.283578210671551e-07, - "loss": 0.9865, - "num_input_tokens_seen": 136064550, - "step": 6406 - }, - { - "epoch": 0.7703962003246558, - "flos": 11892391693920.0, - "grad_norm": 7.313505445951277, - "learning_rate": 5.278304318058719e-07, - "loss": 0.9858, - "num_input_tokens_seen": 136082125, - "step": 6407 - }, - { - "epoch": 0.7705164432152949, - "flos": 25501307735880.0, - "grad_norm": 5.248830392525743, - "learning_rate": 5.273032658715411e-07, - "loss": 1.0166, - "num_input_tokens_seen": 136104655, - "step": 6408 - }, - { - "epoch": 0.7706366861059339, - "flos": 16610947308360.0, - "grad_norm": 10.308793945218833, - "learning_rate": 5.267763233441347e-07, - "loss": 0.9953, - "num_input_tokens_seen": 136125005, - "step": 6409 - }, - { - "epoch": 0.7707569289965731, - "flos": 16297795542240.0, - "grad_norm": 15.896494333798637, - "learning_rate": 5.26249604303588e-07, - "loss": 0.9189, - "num_input_tokens_seen": 136143230, - "step": 6410 - }, - { - "epoch": 0.7708771718872122, - "flos": 12338427739920.0, - "grad_norm": 4.590756433525744, - "learning_rate": 5.257231088298057e-07, - "loss": 0.998, - "num_input_tokens_seen": 136161360, - "step": 6411 - }, - { - "epoch": 0.7709974147778512, - "flos": 51739568104800.0, - "grad_norm": 0.8333138980188547, - "learning_rate": 5.25196837002655e-07, - "loss": 0.7934, - "num_input_tokens_seen": 136220790, - "step": 6412 - }, - { - "epoch": 0.7711176576684904, - "flos": 28464975842640.0, - "grad_norm": 4.719442556728988, - "learning_rate": 5.24670788901971e-07, - "loss": 0.9192, - "num_input_tokens_seen": 136243600, - "step": 6413 - }, - { - "epoch": 0.7712379005591294, - "flos": 26392674612000.0, - "grad_norm": 4.1800165064727635, - "learning_rate": 5.241449646075557e-07, - "loss": 0.8951, - "num_input_tokens_seen": 136266545, - "step": 6414 - }, - { - "epoch": 0.7713581434497685, - "flos": 16186778502720.0, - "grad_norm": 5.154874491010077, - "learning_rate": 5.236193641991762e-07, - "loss": 0.9516, - "num_input_tokens_seen": 136284195, - "step": 6415 - }, - { - "epoch": 0.7714783863404077, - "flos": 17136904023960.0, - "grad_norm": 5.359882475290459, - "learning_rate": 5.23093987756565e-07, - "loss": 0.9258, - "num_input_tokens_seen": 136302610, - "step": 6416 - }, - { - "epoch": 0.7715986292310467, - "flos": 14955824047680.0, - "grad_norm": 8.86989048835388, - "learning_rate": 5.225688353594217e-07, - "loss": 0.9831, - "num_input_tokens_seen": 136321960, - "step": 6417 - }, - { - "epoch": 0.7717188721216858, - "flos": 14619670563480.0, - "grad_norm": 7.637242229500798, - "learning_rate": 5.220439070874108e-07, - "loss": 0.9953, - "num_input_tokens_seen": 136340920, - "step": 6418 - }, - { - "epoch": 0.7718391150123249, - "flos": 18684689632320.0, - "grad_norm": 6.133312890115475, - "learning_rate": 5.215192030201652e-07, - "loss": 0.9368, - "num_input_tokens_seen": 136361630, - "step": 6419 - }, - { - "epoch": 0.771959357902964, - "flos": 15664654482120.0, - "grad_norm": 3.250746127825617, - "learning_rate": 5.209947232372798e-07, - "loss": 1.0863, - "num_input_tokens_seen": 136378840, - "step": 6420 - }, - { - "epoch": 0.772079600793603, - "flos": 21699481655760.0, - "grad_norm": 3.769864556791171, - "learning_rate": 5.204704678183196e-07, - "loss": 1.0312, - "num_input_tokens_seen": 136397295, - "step": 6421 - }, - { - "epoch": 0.7721998436842422, - "flos": 9139780827720.0, - "grad_norm": 6.8796964383245935, - "learning_rate": 5.19946436842813e-07, - "loss": 1.0768, - "num_input_tokens_seen": 136414145, - "step": 6422 - }, - { - "epoch": 0.7723200865748813, - "flos": 23273089845720.0, - "grad_norm": 5.310069885943453, - "learning_rate": 5.194226303902546e-07, - "loss": 0.9158, - "num_input_tokens_seen": 136433600, - "step": 6423 - }, - { - "epoch": 0.7724403294655203, - "flos": 14986797771360.0, - "grad_norm": 4.808955881596605, - "learning_rate": 5.188990485401072e-07, - "loss": 0.9281, - "num_input_tokens_seen": 136452525, - "step": 6424 - }, - { - "epoch": 0.7725605723561595, - "flos": 15694708359000.0, - "grad_norm": 23.655312120196307, - "learning_rate": 5.183756913717954e-07, - "loss": 1.0789, - "num_input_tokens_seen": 136472020, - "step": 6425 - }, - { - "epoch": 0.7726808152467985, - "flos": 24609818213520.0, - "grad_norm": 3.7149459231151565, - "learning_rate": 5.178525589647136e-07, - "loss": 0.9618, - "num_input_tokens_seen": 136493380, - "step": 6426 - }, - { - "epoch": 0.7728010581374376, - "flos": 15849214586760.0, - "grad_norm": 7.8735604559913295, - "learning_rate": 5.173296513982197e-07, - "loss": 1.0112, - "num_input_tokens_seen": 136511625, - "step": 6427 - }, - { - "epoch": 0.7729213010280768, - "flos": 19313875351200.0, - "grad_norm": 4.858904947686198, - "learning_rate": 5.168069687516398e-07, - "loss": 0.883, - "num_input_tokens_seen": 136531115, - "step": 6428 - }, - { - "epoch": 0.7730415439187158, - "flos": 12862728731280.0, - "grad_norm": 5.413792811250668, - "learning_rate": 5.16284511104263e-07, - "loss": 0.9345, - "num_input_tokens_seen": 136549970, - "step": 6429 - }, - { - "epoch": 0.7731617868093549, - "flos": 8404913180760.0, - "grad_norm": 12.488540890130164, - "learning_rate": 5.157622785353457e-07, - "loss": 1.007, - "num_input_tokens_seen": 136567805, - "step": 6430 - }, - { - "epoch": 0.7732820296999939, - "flos": 45964279646160.0, - "grad_norm": 0.688848213905754, - "learning_rate": 5.152402711241113e-07, - "loss": 0.849, - "num_input_tokens_seen": 136635430, - "step": 6431 - }, - { - "epoch": 0.7734022725906331, - "flos": 18002816257200.0, - "grad_norm": 5.4048472442355395, - "learning_rate": 5.147184889497465e-07, - "loss": 1.0624, - "num_input_tokens_seen": 136654620, - "step": 6432 - }, - { - "epoch": 0.7735225154812722, - "flos": 12285770745240.0, - "grad_norm": 4.2365100165538765, - "learning_rate": 5.141969320914072e-07, - "loss": 1.0261, - "num_input_tokens_seen": 136671845, - "step": 6433 - }, - { - "epoch": 0.7736427583719112, - "flos": 23268459950160.0, - "grad_norm": 9.219020619703779, - "learning_rate": 5.136756006282113e-07, - "loss": 0.8409, - "num_input_tokens_seen": 136690230, - "step": 6434 - }, - { - "epoch": 0.7737630012625504, - "flos": 14090892984360.0, - "grad_norm": 4.720929785492555, - "learning_rate": 5.131544946392446e-07, - "loss": 1.0921, - "num_input_tokens_seen": 136705230, - "step": 6435 - }, - { - "epoch": 0.7738832441531894, - "flos": 25706539280040.0, - "grad_norm": 6.4320798110565125, - "learning_rate": 5.126336142035592e-07, - "loss": 0.8668, - "num_input_tokens_seen": 136724985, - "step": 6436 - }, - { - "epoch": 0.7740034870438285, - "flos": 9452380685760.0, - "grad_norm": 8.64771012001583, - "learning_rate": 5.121129594001721e-07, - "loss": 0.9504, - "num_input_tokens_seen": 136738970, - "step": 6437 - }, - { - "epoch": 0.7741237299344677, - "flos": 15691274264280.0, - "grad_norm": 3.713351008945587, - "learning_rate": 5.115925303080661e-07, - "loss": 1.0412, - "num_input_tokens_seen": 136758400, - "step": 6438 - }, - { - "epoch": 0.7742439728251067, - "flos": 14094909648720.0, - "grad_norm": 7.0773715192909235, - "learning_rate": 5.110723270061899e-07, - "loss": 1.0094, - "num_input_tokens_seen": 136774610, - "step": 6439 - }, - { - "epoch": 0.7743642157157458, - "flos": 11813329548000.0, - "grad_norm": 7.664455703152439, - "learning_rate": 5.105523495734572e-07, - "loss": 1.0197, - "num_input_tokens_seen": 136791730, - "step": 6440 - }, - { - "epoch": 0.7744844586063849, - "flos": 14411066247720.0, - "grad_norm": 7.92507225990445, - "learning_rate": 5.100325980887499e-07, - "loss": 0.9656, - "num_input_tokens_seen": 136811375, - "step": 6441 - }, - { - "epoch": 0.774604701497024, - "flos": 16324292678160.0, - "grad_norm": 3.6474776710626866, - "learning_rate": 5.095130726309116e-07, - "loss": 1.053, - "num_input_tokens_seen": 136831270, - "step": 6442 - }, - { - "epoch": 0.774724944387663, - "flos": 43151285072160.0, - "grad_norm": 0.856102486593857, - "learning_rate": 5.089937732787559e-07, - "loss": 0.8932, - "num_input_tokens_seen": 136895550, - "step": 6443 - }, - { - "epoch": 0.7748451872783022, - "flos": 19051172947440.0, - "grad_norm": 10.42623018832509, - "learning_rate": 5.084747001110592e-07, - "loss": 0.883, - "num_input_tokens_seen": 136914895, - "step": 6444 - }, - { - "epoch": 0.7749654301689413, - "flos": 21622596480600.0, - "grad_norm": 11.404815834495093, - "learning_rate": 5.07955853206564e-07, - "loss": 0.928, - "num_input_tokens_seen": 136939320, - "step": 6445 - }, - { - "epoch": 0.7750856730595803, - "flos": 30851103393720.0, - "grad_norm": 7.195986307178847, - "learning_rate": 5.074372326439807e-07, - "loss": 0.9343, - "num_input_tokens_seen": 136962050, - "step": 6446 - }, - { - "epoch": 0.7752059159502195, - "flos": 12496153431480.0, - "grad_norm": 21.43302884199517, - "learning_rate": 5.069188385019814e-07, - "loss": 0.9607, - "num_input_tokens_seen": 136979470, - "step": 6447 - }, - { - "epoch": 0.7753261588408585, - "flos": 8929766080200.0, - "grad_norm": 4.152632207172563, - "learning_rate": 5.064006708592077e-07, - "loss": 0.8192, - "num_input_tokens_seen": 136995435, - "step": 6448 - }, - { - "epoch": 0.7754464017314976, - "flos": 11813820132960.0, - "grad_norm": 7.087729163392427, - "learning_rate": 5.058827297942641e-07, - "loss": 0.981, - "num_input_tokens_seen": 137010260, - "step": 6449 - }, - { - "epoch": 0.7755666446221368, - "flos": 13990116905880.0, - "grad_norm": 4.119771475889142, - "learning_rate": 5.053650153857237e-07, - "loss": 0.9563, - "num_input_tokens_seen": 137028990, - "step": 6450 - }, - { - "epoch": 0.7756868875127758, - "flos": 13252735011000.0, - "grad_norm": 3.9800284818847427, - "learning_rate": 5.048475277121214e-07, - "loss": 0.9303, - "num_input_tokens_seen": 137045925, - "step": 6451 - }, - { - "epoch": 0.7758071304034149, - "flos": 20232260609760.0, - "grad_norm": 3.3788368173863312, - "learning_rate": 5.043302668519598e-07, - "loss": 0.9993, - "num_input_tokens_seen": 137064980, - "step": 6452 - }, - { - "epoch": 0.775927373294054, - "flos": 14620069163760.0, - "grad_norm": 3.3005060385914775, - "learning_rate": 5.038132328837079e-07, - "loss": 0.9493, - "num_input_tokens_seen": 137083090, - "step": 6453 - }, - { - "epoch": 0.7760476161846931, - "flos": 16007492186400.0, - "grad_norm": 4.474718283198313, - "learning_rate": 5.032964258857993e-07, - "loss": 0.9669, - "num_input_tokens_seen": 137102905, - "step": 6454 - }, - { - "epoch": 0.7761678590753321, - "flos": 34783299505800.0, - "grad_norm": 32.59479302322018, - "learning_rate": 5.027798459366329e-07, - "loss": 0.9105, - "num_input_tokens_seen": 137127990, - "step": 6455 - }, - { - "epoch": 0.7762881019659713, - "flos": 18631450068000.0, - "grad_norm": 9.645192157260958, - "learning_rate": 5.02263493114573e-07, - "loss": 0.8645, - "num_input_tokens_seen": 137149505, - "step": 6456 - }, - { - "epoch": 0.7764083448566104, - "flos": 14614090159560.0, - "grad_norm": 6.1687151968765574, - "learning_rate": 5.017473674979502e-07, - "loss": 0.9881, - "num_input_tokens_seen": 137165250, - "step": 6457 - }, - { - "epoch": 0.7765285877472494, - "flos": 48185230746600.0, - "grad_norm": 0.7686566257369654, - "learning_rate": 5.01231469165061e-07, - "loss": 0.8286, - "num_input_tokens_seen": 137220795, - "step": 6458 - }, - { - "epoch": 0.7766488306378886, - "flos": 43909890314640.0, - "grad_norm": 0.9462263560900114, - "learning_rate": 5.007157981941663e-07, - "loss": 0.8454, - "num_input_tokens_seen": 137285875, - "step": 6459 - }, - { - "epoch": 0.7767690735285276, - "flos": 45060525499800.0, - "grad_norm": 0.8882692535851822, - "learning_rate": 5.002003546634928e-07, - "loss": 0.9292, - "num_input_tokens_seen": 137341695, - "step": 6460 - }, - { - "epoch": 0.7768893164191667, - "flos": 14829133402920.0, - "grad_norm": 3.299555263028164, - "learning_rate": 4.996851386512331e-07, - "loss": 0.9918, - "num_input_tokens_seen": 137360120, - "step": 6461 - }, - { - "epoch": 0.7770095593098058, - "flos": 14698548740040.0, - "grad_norm": 4.56727204220098, - "learning_rate": 4.991701502355444e-07, - "loss": 1.057, - "num_input_tokens_seen": 137380305, - "step": 6462 - }, - { - "epoch": 0.7771298022004449, - "flos": 17582664115920.0, - "grad_norm": 4.972208223139676, - "learning_rate": 4.986553894945518e-07, - "loss": 0.996, - "num_input_tokens_seen": 137401235, - "step": 6463 - }, - { - "epoch": 0.777250045091084, - "flos": 17792126955360.0, - "grad_norm": 6.80588539344952, - "learning_rate": 4.981408565063416e-07, - "loss": 1.1051, - "num_input_tokens_seen": 137420900, - "step": 6464 - }, - { - "epoch": 0.777370287981723, - "flos": 14278212629400.0, - "grad_norm": 5.146451019816008, - "learning_rate": 4.976265513489701e-07, - "loss": 0.9772, - "num_input_tokens_seen": 137440590, - "step": 6465 - }, - { - "epoch": 0.7774905308723622, - "flos": 15426456212880.0, - "grad_norm": 3.978052126328613, - "learning_rate": 4.971124741004562e-07, - "loss": 1.0406, - "num_input_tokens_seen": 137459310, - "step": 6466 - }, - { - "epoch": 0.7776107737630013, - "flos": 11341930843800.0, - "grad_norm": 3.8551866298677213, - "learning_rate": 4.965986248387846e-07, - "loss": 0.9902, - "num_input_tokens_seen": 137477345, - "step": 6467 - }, - { - "epoch": 0.7777310166536403, - "flos": 17635290449040.0, - "grad_norm": 3.2033511198292985, - "learning_rate": 4.960850036419073e-07, - "loss": 1.0021, - "num_input_tokens_seen": 137496165, - "step": 6468 - }, - { - "epoch": 0.7778512595442795, - "flos": 12231672657240.0, - "grad_norm": 6.9960272278384, - "learning_rate": 4.955716105877378e-07, - "loss": 1.0156, - "num_input_tokens_seen": 137514655, - "step": 6469 - }, - { - "epoch": 0.7779715024349185, - "flos": 12573897130320.0, - "grad_norm": 4.337696715189843, - "learning_rate": 4.950584457541598e-07, - "loss": 1.0645, - "num_input_tokens_seen": 137532840, - "step": 6470 - }, - { - "epoch": 0.7780917453255576, - "flos": 17294660377080.0, - "grad_norm": 10.569157234886882, - "learning_rate": 4.945455092190183e-07, - "loss": 1.053, - "num_input_tokens_seen": 137553815, - "step": 6471 - }, - { - "epoch": 0.7782119882161967, - "flos": 40333471085160.0, - "grad_norm": 0.7090538978643296, - "learning_rate": 4.940328010601271e-07, - "loss": 0.807, - "num_input_tokens_seen": 137618450, - "step": 6472 - }, - { - "epoch": 0.7783322311068358, - "flos": 33445375337160.0, - "grad_norm": 4.892465458408342, - "learning_rate": 4.935203213552621e-07, - "loss": 0.9869, - "num_input_tokens_seen": 137641910, - "step": 6473 - }, - { - "epoch": 0.7784524739974749, - "flos": 13514425583280.0, - "grad_norm": 7.09325363506393, - "learning_rate": 4.930080701821662e-07, - "loss": 0.8927, - "num_input_tokens_seen": 137659095, - "step": 6474 - }, - { - "epoch": 0.778572716888114, - "flos": 17635474418400.0, - "grad_norm": 5.283023874905108, - "learning_rate": 4.92496047618548e-07, - "loss": 0.998, - "num_input_tokens_seen": 137678575, - "step": 6475 - }, - { - "epoch": 0.7786929597787531, - "flos": 14247453536640.0, - "grad_norm": 6.429164489758911, - "learning_rate": 4.919842537420811e-07, - "loss": 1.0045, - "num_input_tokens_seen": 137695410, - "step": 6476 - }, - { - "epoch": 0.7788132026693921, - "flos": 15537718544880.0, - "grad_norm": 5.737081528239766, - "learning_rate": 4.91472688630404e-07, - "loss": 1.0229, - "num_input_tokens_seen": 137715870, - "step": 6477 - }, - { - "epoch": 0.7789334455600313, - "flos": 7854820269360.0, - "grad_norm": 4.37024037448584, - "learning_rate": 4.909613523611202e-07, - "loss": 0.9694, - "num_input_tokens_seen": 137732470, - "step": 6478 - }, - { - "epoch": 0.7790536884506704, - "flos": 20441478156720.0, - "grad_norm": 10.486547299057545, - "learning_rate": 4.904502450117991e-07, - "loss": 0.9658, - "num_input_tokens_seen": 137753150, - "step": 6479 - }, - { - "epoch": 0.7791739313413094, - "flos": 7776003415920.0, - "grad_norm": 4.5847992142338, - "learning_rate": 4.899393666599762e-07, - "loss": 0.9517, - "num_input_tokens_seen": 137769445, - "step": 6480 - }, - { - "epoch": 0.7792941742319486, - "flos": 10367577142080.0, - "grad_norm": 4.995423893168279, - "learning_rate": 4.894287173831506e-07, - "loss": 0.9498, - "num_input_tokens_seen": 137785125, - "step": 6481 - }, - { - "epoch": 0.7794144171225876, - "flos": 16534307425680.0, - "grad_norm": 4.815764273796163, - "learning_rate": 4.889182972587877e-07, - "loss": 1.067, - "num_input_tokens_seen": 137804140, - "step": 6482 - }, - { - "epoch": 0.7795346600132267, - "flos": 15275261433600.0, - "grad_norm": 3.842040263205767, - "learning_rate": 4.884081063643177e-07, - "loss": 0.8949, - "num_input_tokens_seen": 137822520, - "step": 6483 - }, - { - "epoch": 0.7796549029038659, - "flos": 50166358515120.0, - "grad_norm": 0.8917829113943554, - "learning_rate": 4.878981447771353e-07, - "loss": 0.7964, - "num_input_tokens_seen": 137876620, - "step": 6484 - }, - { - "epoch": 0.7797751457945049, - "flos": 17059436279160.0, - "grad_norm": 3.4060314703320183, - "learning_rate": 4.873884125746035e-07, - "loss": 0.9623, - "num_input_tokens_seen": 137898015, - "step": 6485 - }, - { - "epoch": 0.779895388685144, - "flos": 16114461900000.0, - "grad_norm": 4.414812790561793, - "learning_rate": 4.868789098340456e-07, - "loss": 0.9409, - "num_input_tokens_seen": 137915640, - "step": 6486 - }, - { - "epoch": 0.7800156315757831, - "flos": 16900576109880.0, - "grad_norm": 7.361016934670101, - "learning_rate": 4.863696366327543e-07, - "loss": 0.939, - "num_input_tokens_seen": 137934530, - "step": 6487 - }, - { - "epoch": 0.7801358744664222, - "flos": 18812668062600.0, - "grad_norm": 4.2604779500991725, - "learning_rate": 4.85860593047986e-07, - "loss": 1.0177, - "num_input_tokens_seen": 137954315, - "step": 6488 - }, - { - "epoch": 0.7802561173570612, - "flos": 18735629579640.0, - "grad_norm": 14.309570545760678, - "learning_rate": 4.853517791569613e-07, - "loss": 0.9649, - "num_input_tokens_seen": 137976215, - "step": 6489 - }, - { - "epoch": 0.7803763602477004, - "flos": 28805636576160.0, - "grad_norm": 3.612527370679183, - "learning_rate": 4.848431950368684e-07, - "loss": 0.889, - "num_input_tokens_seen": 137998495, - "step": 6490 - }, - { - "epoch": 0.7804966031383395, - "flos": 47975675922480.0, - "grad_norm": 2.021490390049928, - "learning_rate": 4.843348407648569e-07, - "loss": 0.8122, - "num_input_tokens_seen": 138059495, - "step": 6491 - }, - { - "epoch": 0.7806168460289785, - "flos": 12567335556480.0, - "grad_norm": 21.7903370010905, - "learning_rate": 4.838267164180457e-07, - "loss": 1.0552, - "num_input_tokens_seen": 138074885, - "step": 6492 - }, - { - "epoch": 0.7807370889196176, - "flos": 17027512047120.0, - "grad_norm": 6.0637539225549695, - "learning_rate": 4.833188220735156e-07, - "loss": 1.0668, - "num_input_tokens_seen": 138094275, - "step": 6493 - }, - { - "epoch": 0.7808573318102567, - "flos": 13465509298920.0, - "grad_norm": 12.367199838117662, - "learning_rate": 4.828111578083152e-07, - "loss": 0.9669, - "num_input_tokens_seen": 138110900, - "step": 6494 - }, - { - "epoch": 0.7809775747008958, - "flos": 17052046843200.0, - "grad_norm": 5.1467228582671485, - "learning_rate": 4.823037236994556e-07, - "loss": 1.0387, - "num_input_tokens_seen": 138128785, - "step": 6495 - }, - { - "epoch": 0.7810978175915348, - "flos": 49078713270360.0, - "grad_norm": 0.8437711571760559, - "learning_rate": 4.817965198239136e-07, - "loss": 0.8219, - "num_input_tokens_seen": 138194965, - "step": 6496 - }, - { - "epoch": 0.781218060482174, - "flos": 13934393755200.0, - "grad_norm": 8.365378247434855, - "learning_rate": 4.812895462586331e-07, - "loss": 0.946, - "num_input_tokens_seen": 138212510, - "step": 6497 - }, - { - "epoch": 0.7813383033728131, - "flos": 18236507277120.0, - "grad_norm": 6.706377143045686, - "learning_rate": 4.807828030805207e-07, - "loss": 1.0462, - "num_input_tokens_seen": 138231220, - "step": 6498 - }, - { - "epoch": 0.7814585462634521, - "flos": 14541436279680.0, - "grad_norm": 22.137985987527962, - "learning_rate": 4.802762903664495e-07, - "loss": 0.9097, - "num_input_tokens_seen": 138250120, - "step": 6499 - }, - { - "epoch": 0.7815787891540913, - "flos": 15847466877840.0, - "grad_norm": 36.658332941139555, - "learning_rate": 4.797700081932565e-07, - "loss": 0.9454, - "num_input_tokens_seen": 138267705, - "step": 6500 - }, - { - "epoch": 0.7816990320447303, - "flos": 16061069027880.0, - "grad_norm": 5.100794876087444, - "learning_rate": 4.792639566377442e-07, - "loss": 1.0445, - "num_input_tokens_seen": 138284835, - "step": 6501 - }, - { - "epoch": 0.7818192749353694, - "flos": 17739439299120.0, - "grad_norm": 4.749732562115587, - "learning_rate": 4.78758135776681e-07, - "loss": 1.0049, - "num_input_tokens_seen": 138304410, - "step": 6502 - }, - { - "epoch": 0.7819395178260086, - "flos": 16875060143880.0, - "grad_norm": 3.8670412594665953, - "learning_rate": 4.782525456867989e-07, - "loss": 1.0076, - "num_input_tokens_seen": 138322985, - "step": 6503 - }, - { - "epoch": 0.7820597607166476, - "flos": 16507473012600.0, - "grad_norm": 3.605942187733921, - "learning_rate": 4.777471864447959e-07, - "loss": 1.0596, - "num_input_tokens_seen": 138343445, - "step": 6504 - }, - { - "epoch": 0.7821800036072867, - "flos": 15851943465600.0, - "grad_norm": 5.85316076720919, - "learning_rate": 4.772420581273344e-07, - "loss": 1.0197, - "num_input_tokens_seen": 138360650, - "step": 6505 - }, - { - "epoch": 0.7823002464979258, - "flos": 15302003862000.0, - "grad_norm": 9.999441131233871, - "learning_rate": 4.7673716081104134e-07, - "loss": 0.9999, - "num_input_tokens_seen": 138380545, - "step": 6506 - }, - { - "epoch": 0.7824204893885649, - "flos": 17451864822120.0, - "grad_norm": 10.048588774178146, - "learning_rate": 4.762324945725109e-07, - "loss": 1.0721, - "num_input_tokens_seen": 138399710, - "step": 6507 - }, - { - "epoch": 0.782540732279204, - "flos": 19520977250520.0, - "grad_norm": 16.040938455806405, - "learning_rate": 4.7572805948829844e-07, - "loss": 0.9867, - "num_input_tokens_seen": 138419690, - "step": 6508 - }, - { - "epoch": 0.7826609751698431, - "flos": 17320758912720.0, - "grad_norm": 5.3395885303286965, - "learning_rate": 4.7522385563492795e-07, - "loss": 0.9318, - "num_input_tokens_seen": 138439710, - "step": 6509 - }, - { - "epoch": 0.7827812180604822, - "flos": 17058700401720.0, - "grad_norm": 4.407247443966431, - "learning_rate": 4.747198830888863e-07, - "loss": 0.9126, - "num_input_tokens_seen": 138459300, - "step": 6510 - }, - { - "epoch": 0.7829014609511212, - "flos": 19549436726280.0, - "grad_norm": 5.441046950067771, - "learning_rate": 4.742161419266251e-07, - "loss": 0.9141, - "num_input_tokens_seen": 138478180, - "step": 6511 - }, - { - "epoch": 0.7830217038417604, - "flos": 20807133609720.0, - "grad_norm": 13.571222128767895, - "learning_rate": 4.7371263222456304e-07, - "loss": 0.8654, - "num_input_tokens_seen": 138495220, - "step": 6512 - }, - { - "epoch": 0.7831419467323995, - "flos": 43628080210920.0, - "grad_norm": 0.8394246319763048, - "learning_rate": 4.7320935405908004e-07, - "loss": 0.867, - "num_input_tokens_seen": 138555810, - "step": 6513 - }, - { - "epoch": 0.7832621896230385, - "flos": 13963957047120.0, - "grad_norm": 4.881313832509165, - "learning_rate": 4.7270630750652475e-07, - "loss": 1.0518, - "num_input_tokens_seen": 138571485, - "step": 6514 - }, - { - "epoch": 0.7833824325136777, - "flos": 17792126955360.0, - "grad_norm": 4.496719860292958, - "learning_rate": 4.7220349264320746e-07, - "loss": 1.0398, - "num_input_tokens_seen": 138590290, - "step": 6515 - }, - { - "epoch": 0.7835026754043167, - "flos": 49269834948840.0, - "grad_norm": 0.8559124636322344, - "learning_rate": 4.71700909545407e-07, - "loss": 0.8201, - "num_input_tokens_seen": 138652955, - "step": 6516 - }, - { - "epoch": 0.7836229182949558, - "flos": 14094756340920.0, - "grad_norm": 4.9702973265717585, - "learning_rate": 4.711985582893627e-07, - "loss": 0.9895, - "num_input_tokens_seen": 138671195, - "step": 6517 - }, - { - "epoch": 0.783743161185595, - "flos": 16323556800720.0, - "grad_norm": 5.682208943194635, - "learning_rate": 4.706964389512811e-07, - "loss": 0.9468, - "num_input_tokens_seen": 138690950, - "step": 6518 - }, - { - "epoch": 0.783863404076234, - "flos": 8772193696440.0, - "grad_norm": 3.100634226986167, - "learning_rate": 4.701945516073345e-07, - "loss": 1.1042, - "num_input_tokens_seen": 138708145, - "step": 6519 - }, - { - "epoch": 0.7839836469668731, - "flos": 17242248674880.0, - "grad_norm": 4.163352691100426, - "learning_rate": 4.696928963336577e-07, - "loss": 0.9825, - "num_input_tokens_seen": 138727295, - "step": 6520 - }, - { - "epoch": 0.7841038898575122, - "flos": 44470193525520.0, - "grad_norm": 0.858537647472719, - "learning_rate": 4.6919147320635224e-07, - "loss": 0.861, - "num_input_tokens_seen": 138789725, - "step": 6521 - }, - { - "epoch": 0.7842241327481513, - "flos": 14331544178400.0, - "grad_norm": 6.437094666395751, - "learning_rate": 4.6869028230148286e-07, - "loss": 0.9443, - "num_input_tokens_seen": 138807240, - "step": 6522 - }, - { - "epoch": 0.7843443756387903, - "flos": 19995166156680.0, - "grad_norm": 5.9551015060831585, - "learning_rate": 4.6818932369507957e-07, - "loss": 0.8233, - "num_input_tokens_seen": 138826460, - "step": 6523 - }, - { - "epoch": 0.7844646185294295, - "flos": 15143082369600.0, - "grad_norm": 5.1779651568829195, - "learning_rate": 4.676885974631386e-07, - "loss": 1.1071, - "num_input_tokens_seen": 138844540, - "step": 6524 - }, - { - "epoch": 0.7845848614200686, - "flos": 16819643608800.0, - "grad_norm": 6.334421900842341, - "learning_rate": 4.67188103681619e-07, - "loss": 1.0347, - "num_input_tokens_seen": 138864045, - "step": 6525 - }, - { - "epoch": 0.7847051043107076, - "flos": 16636953859320.0, - "grad_norm": 9.250953418981, - "learning_rate": 4.666878424264453e-07, - "loss": 0.9254, - "num_input_tokens_seen": 138883720, - "step": 6526 - }, - { - "epoch": 0.7848253472013467, - "flos": 13488970940400.0, - "grad_norm": 2.8912626645692754, - "learning_rate": 4.661878137735069e-07, - "loss": 0.9542, - "num_input_tokens_seen": 138901630, - "step": 6527 - }, - { - "epoch": 0.7849455900919858, - "flos": 15039669396960.0, - "grad_norm": 12.109196067226398, - "learning_rate": 4.656880177986571e-07, - "loss": 0.9763, - "num_input_tokens_seen": 138919895, - "step": 6528 - }, - { - "epoch": 0.7850658329826249, - "flos": 13858980334920.0, - "grad_norm": 4.3242469793568175, - "learning_rate": 4.6518845457771607e-07, - "loss": 1.0372, - "num_input_tokens_seen": 138938475, - "step": 6529 - }, - { - "epoch": 0.7851860758732639, - "flos": 8798721493920.0, - "grad_norm": 4.207023769361386, - "learning_rate": 4.646891241864652e-07, - "loss": 1.0087, - "num_input_tokens_seen": 138956760, - "step": 6530 - }, - { - "epoch": 0.7853063187639031, - "flos": 16319386828560.0, - "grad_norm": 6.204954862345898, - "learning_rate": 4.6419002670065397e-07, - "loss": 0.9533, - "num_input_tokens_seen": 138976060, - "step": 6531 - }, - { - "epoch": 0.7854265616545422, - "flos": 12285617437440.0, - "grad_norm": 14.442537295950524, - "learning_rate": 4.6369116219599445e-07, - "loss": 1.07, - "num_input_tokens_seen": 138991765, - "step": 6532 - }, - { - "epoch": 0.7855468045451812, - "flos": 16950136287000.0, - "grad_norm": 3.153858903175622, - "learning_rate": 4.631925307481637e-07, - "loss": 1.0047, - "num_input_tokens_seen": 139011300, - "step": 6533 - }, - { - "epoch": 0.7856670474358204, - "flos": 18264813445080.0, - "grad_norm": 4.666523704155516, - "learning_rate": 4.6269413243280533e-07, - "loss": 0.965, - "num_input_tokens_seen": 139030440, - "step": 6534 - }, - { - "epoch": 0.7857872903264594, - "flos": 12857976189480.0, - "grad_norm": 6.481735390831518, - "learning_rate": 4.621959673255236e-07, - "loss": 0.9481, - "num_input_tokens_seen": 139046460, - "step": 6535 - }, - { - "epoch": 0.7859075332170985, - "flos": 10156734532440.0, - "grad_norm": 3.4504502928618956, - "learning_rate": 4.6169803550189135e-07, - "loss": 1.1233, - "num_input_tokens_seen": 139061875, - "step": 6536 - }, - { - "epoch": 0.7860277761077377, - "flos": 14094296417520.0, - "grad_norm": 4.088233317351701, - "learning_rate": 4.6120033703744355e-07, - "loss": 0.9923, - "num_input_tokens_seen": 139080490, - "step": 6537 - }, - { - "epoch": 0.7861480189983767, - "flos": 18788961128640.0, - "grad_norm": 5.044152437815294, - "learning_rate": 4.607028720076822e-07, - "loss": 1.0072, - "num_input_tokens_seen": 139096890, - "step": 6538 - }, - { - "epoch": 0.7862682618890158, - "flos": 17235963055080.0, - "grad_norm": 4.32883290585144, - "learning_rate": 4.6020564048807074e-07, - "loss": 0.9508, - "num_input_tokens_seen": 139114285, - "step": 6539 - }, - { - "epoch": 0.7863885047796549, - "flos": 33992034153840.0, - "grad_norm": 17.388681417953407, - "learning_rate": 4.5970864255403883e-07, - "loss": 0.9402, - "num_input_tokens_seen": 139135530, - "step": 6540 - }, - { - "epoch": 0.786508747670294, - "flos": 17346581494320.0, - "grad_norm": 4.096340379087506, - "learning_rate": 4.59211878280982e-07, - "loss": 1.0495, - "num_input_tokens_seen": 139154765, - "step": 6541 - }, - { - "epoch": 0.786628990560933, - "flos": 12784463785920.0, - "grad_norm": 8.653807808554703, - "learning_rate": 4.587153477442578e-07, - "loss": 0.921, - "num_input_tokens_seen": 139170800, - "step": 6542 - }, - { - "epoch": 0.7867492334515722, - "flos": 18395612738880.0, - "grad_norm": 74.27284541746229, - "learning_rate": 4.582190510191899e-07, - "loss": 1.0331, - "num_input_tokens_seen": 139189180, - "step": 6543 - }, - { - "epoch": 0.7868694763422113, - "flos": 11734757987040.0, - "grad_norm": 6.484400590593329, - "learning_rate": 4.5772298818106625e-07, - "loss": 1.0973, - "num_input_tokens_seen": 139204690, - "step": 6544 - }, - { - "epoch": 0.7869897192328503, - "flos": 20937656949480.0, - "grad_norm": 12.98370690124592, - "learning_rate": 4.572271593051384e-07, - "loss": 0.9414, - "num_input_tokens_seen": 139221765, - "step": 6545 - }, - { - "epoch": 0.7871099621234895, - "flos": 12128780931120.0, - "grad_norm": 3.5582023262963616, - "learning_rate": 4.567315644666245e-07, - "loss": 0.9985, - "num_input_tokens_seen": 139240280, - "step": 6546 - }, - { - "epoch": 0.7872302050141285, - "flos": 16664462826720.0, - "grad_norm": 7.661642861148457, - "learning_rate": 4.5623620374070507e-07, - "loss": 1.0754, - "num_input_tokens_seen": 139259315, - "step": 6547 - }, - { - "epoch": 0.7873504479047676, - "flos": 47229243319320.0, - "grad_norm": 0.8237525380722099, - "learning_rate": 4.557410772025263e-07, - "loss": 0.8565, - "num_input_tokens_seen": 139320985, - "step": 6548 - }, - { - "epoch": 0.7874706907954068, - "flos": 16533847502280.0, - "grad_norm": 4.412326599690788, - "learning_rate": 4.5524618492719803e-07, - "loss": 0.8761, - "num_input_tokens_seen": 139339925, - "step": 6549 - }, - { - "epoch": 0.7875909336860458, - "flos": 20493399273960.0, - "grad_norm": 3.190126571057593, - "learning_rate": 4.54751526989795e-07, - "loss": 1.0082, - "num_input_tokens_seen": 139361485, - "step": 6550 - }, - { - "epoch": 0.7877111765766849, - "flos": 13255862490120.0, - "grad_norm": 4.049266447345331, - "learning_rate": 4.5425710346535775e-07, - "loss": 1.0322, - "num_input_tokens_seen": 139379150, - "step": 6551 - }, - { - "epoch": 0.787831419467324, - "flos": 19648097157120.0, - "grad_norm": 15.686548698123298, - "learning_rate": 4.537629144288877e-07, - "loss": 1.0469, - "num_input_tokens_seen": 139396325, - "step": 6552 - }, - { - "epoch": 0.7879516623579631, - "flos": 12862698069720.0, - "grad_norm": 6.520815165761516, - "learning_rate": 4.5326895995535477e-07, - "loss": 0.9689, - "num_input_tokens_seen": 139414945, - "step": 6553 - }, - { - "epoch": 0.7880719052486022, - "flos": 14435876997840.0, - "grad_norm": 20.56795570789555, - "learning_rate": 4.527752401196907e-07, - "loss": 1.0682, - "num_input_tokens_seen": 139432680, - "step": 6554 - }, - { - "epoch": 0.7881921481392413, - "flos": 15380330130480.0, - "grad_norm": 6.098222705011035, - "learning_rate": 4.5228175499679254e-07, - "loss": 0.898, - "num_input_tokens_seen": 139451985, - "step": 6555 - }, - { - "epoch": 0.7883123910298804, - "flos": 49099384709880.0, - "grad_norm": 0.8627888237379211, - "learning_rate": 4.5178850466152174e-07, - "loss": 0.7929, - "num_input_tokens_seen": 139510535, - "step": 6556 - }, - { - "epoch": 0.7884326339205194, - "flos": 13702205151720.0, - "grad_norm": 7.14629247615749, - "learning_rate": 4.512954891887031e-07, - "loss": 1.0414, - "num_input_tokens_seen": 139528555, - "step": 6557 - }, - { - "epoch": 0.7885528768111585, - "flos": 12600118312200.0, - "grad_norm": 6.5020651837745165, - "learning_rate": 4.5080270865312806e-07, - "loss": 1.0625, - "num_input_tokens_seen": 139545470, - "step": 6558 - }, - { - "epoch": 0.7886731197017977, - "flos": 13335292574760.0, - "grad_norm": 5.4969727247788756, - "learning_rate": 4.5031016312954985e-07, - "loss": 0.9324, - "num_input_tokens_seen": 139563505, - "step": 6559 - }, - { - "epoch": 0.7887933625924367, - "flos": 23797636129560.0, - "grad_norm": 5.252818849482487, - "learning_rate": 4.498178526926886e-07, - "loss": 0.9718, - "num_input_tokens_seen": 139584090, - "step": 6560 - }, - { - "epoch": 0.7889136054830758, - "flos": 12050025400800.0, - "grad_norm": 5.286416680934534, - "learning_rate": 4.4932577741722635e-07, - "loss": 0.9618, - "num_input_tokens_seen": 139602340, - "step": 6561 - }, - { - "epoch": 0.7890338483737149, - "flos": 20965472532480.0, - "grad_norm": 7.188390792710194, - "learning_rate": 4.4883393737780985e-07, - "loss": 0.9778, - "num_input_tokens_seen": 139623010, - "step": 6562 - }, - { - "epoch": 0.789154091264354, - "flos": 14171304238920.0, - "grad_norm": 5.270401144088037, - "learning_rate": 4.4834233264905254e-07, - "loss": 1.0082, - "num_input_tokens_seen": 139639745, - "step": 6563 - }, - { - "epoch": 0.789274334154993, - "flos": 10266525109560.0, - "grad_norm": 16.410172522159275, - "learning_rate": 4.478509633055294e-07, - "loss": 0.9302, - "num_input_tokens_seen": 139657175, - "step": 6564 - }, - { - "epoch": 0.7893945770456322, - "flos": 15504383881080.0, - "grad_norm": 9.355116854681512, - "learning_rate": 4.473598294217813e-07, - "loss": 1.0107, - "num_input_tokens_seen": 139672320, - "step": 6565 - }, - { - "epoch": 0.7895148199362713, - "flos": 14724401983200.0, - "grad_norm": 3.81397493942501, - "learning_rate": 4.468689310723124e-07, - "loss": 0.9468, - "num_input_tokens_seen": 139689855, - "step": 6566 - }, - { - "epoch": 0.7896350628269103, - "flos": 11813758809840.0, - "grad_norm": 5.725070816335007, - "learning_rate": 4.463782683315913e-07, - "loss": 1.0103, - "num_input_tokens_seen": 139708580, - "step": 6567 - }, - { - "epoch": 0.7897553057175495, - "flos": 16088332702800.0, - "grad_norm": 3.4819846400273264, - "learning_rate": 4.458878412740523e-07, - "loss": 0.9535, - "num_input_tokens_seen": 139727080, - "step": 6568 - }, - { - "epoch": 0.7898755486081885, - "flos": 10266310478640.0, - "grad_norm": 9.568674923672654, - "learning_rate": 4.453976499740919e-07, - "loss": 0.9862, - "num_input_tokens_seen": 139744445, - "step": 6569 - }, - { - "epoch": 0.7899957914988276, - "flos": 12207536461440.0, - "grad_norm": 5.036806059717122, - "learning_rate": 4.4490769450607215e-07, - "loss": 1.0098, - "num_input_tokens_seen": 139761790, - "step": 6570 - }, - { - "epoch": 0.7901160343894668, - "flos": 29484811734000.0, - "grad_norm": 4.740621845644139, - "learning_rate": 4.4441797494431845e-07, - "loss": 0.9606, - "num_input_tokens_seen": 139783315, - "step": 6571 - }, - { - "epoch": 0.7902362772801058, - "flos": 11918980814520.0, - "grad_norm": 5.495713731198451, - "learning_rate": 4.439284913631207e-07, - "loss": 1.0033, - "num_input_tokens_seen": 139800245, - "step": 6572 - }, - { - "epoch": 0.7903565201707449, - "flos": 19313476750920.0, - "grad_norm": 4.1249368836169165, - "learning_rate": 4.434392438367347e-07, - "loss": 1.0613, - "num_input_tokens_seen": 139819390, - "step": 6573 - }, - { - "epoch": 0.790476763061384, - "flos": 22115095886160.0, - "grad_norm": 3.331092049180314, - "learning_rate": 4.4295023243937677e-07, - "loss": 0.961, - "num_input_tokens_seen": 139839315, - "step": 6574 - }, - { - "epoch": 0.7905970059520231, - "flos": 15693297927240.0, - "grad_norm": 4.626782048203917, - "learning_rate": 4.4246145724523123e-07, - "loss": 1.0376, - "num_input_tokens_seen": 139856780, - "step": 6575 - }, - { - "epoch": 0.7907172488426621, - "flos": 14593234750680.0, - "grad_norm": 3.66091282535432, - "learning_rate": 4.41972918328444e-07, - "loss": 0.9848, - "num_input_tokens_seen": 139873935, - "step": 6576 - }, - { - "epoch": 0.7908374917333013, - "flos": 21437607114120.0, - "grad_norm": 4.239314615167418, - "learning_rate": 4.4148461576312646e-07, - "loss": 1.018, - "num_input_tokens_seen": 139893320, - "step": 6577 - }, - { - "epoch": 0.7909577346239404, - "flos": 14908226210400.0, - "grad_norm": 2.901426188235925, - "learning_rate": 4.4099654962335343e-07, - "loss": 0.9729, - "num_input_tokens_seen": 139913490, - "step": 6578 - }, - { - "epoch": 0.7910779775145794, - "flos": 18682114061280.0, - "grad_norm": 9.725658843564597, - "learning_rate": 4.405087199831636e-07, - "loss": 0.9746, - "num_input_tokens_seen": 139933450, - "step": 6579 - }, - { - "epoch": 0.7911982204052186, - "flos": 16035062476920.0, - "grad_norm": 4.639001016283263, - "learning_rate": 4.400211269165619e-07, - "loss": 0.9038, - "num_input_tokens_seen": 139949625, - "step": 6580 - }, - { - "epoch": 0.7913184632958576, - "flos": 16429085421000.0, - "grad_norm": 3.1768157512776716, - "learning_rate": 4.3953377049751416e-07, - "loss": 0.9997, - "num_input_tokens_seen": 139969770, - "step": 6581 - }, - { - "epoch": 0.7914387061864967, - "flos": 8666634414600.0, - "grad_norm": 8.674808150486898, - "learning_rate": 4.390466507999537e-07, - "loss": 1.0005, - "num_input_tokens_seen": 139985240, - "step": 6582 - }, - { - "epoch": 0.7915589490771359, - "flos": 12469686957120.0, - "grad_norm": 8.073528937635384, - "learning_rate": 4.385597678977748e-07, - "loss": 0.9813, - "num_input_tokens_seen": 140003795, - "step": 6583 - }, - { - "epoch": 0.7916791919677749, - "flos": 18210194110560.0, - "grad_norm": 4.713480451267146, - "learning_rate": 4.3807312186483726e-07, - "loss": 0.9721, - "num_input_tokens_seen": 140024235, - "step": 6584 - }, - { - "epoch": 0.791799434858414, - "flos": 13361697726000.0, - "grad_norm": 3.830760562369574, - "learning_rate": 4.375867127749655e-07, - "loss": 1.0083, - "num_input_tokens_seen": 140042230, - "step": 6585 - }, - { - "epoch": 0.7919196777490531, - "flos": 18369084941400.0, - "grad_norm": 22.54074002639443, - "learning_rate": 4.3710054070194744e-07, - "loss": 0.9034, - "num_input_tokens_seen": 140061645, - "step": 6586 - }, - { - "epoch": 0.7920399206396922, - "flos": 8405035827000.0, - "grad_norm": 8.868622973703811, - "learning_rate": 4.3661460571953455e-07, - "loss": 0.8793, - "num_input_tokens_seen": 140078100, - "step": 6587 - }, - { - "epoch": 0.7921601635303313, - "flos": 15327581151120.0, - "grad_norm": 4.496183260267769, - "learning_rate": 4.36128907901443e-07, - "loss": 0.9103, - "num_input_tokens_seen": 140097415, - "step": 6588 - }, - { - "epoch": 0.7922804064209703, - "flos": 12836722180320.0, - "grad_norm": 5.94472281179552, - "learning_rate": 4.356434473213519e-07, - "loss": 0.9472, - "num_input_tokens_seen": 140114585, - "step": 6589 - }, - { - "epoch": 0.7924006493116095, - "flos": 15379962191760.0, - "grad_norm": 3.749933242203638, - "learning_rate": 4.351582240529068e-07, - "loss": 1.0109, - "num_input_tokens_seen": 140135135, - "step": 6590 - }, - { - "epoch": 0.7925208922022485, - "flos": 45994977415800.0, - "grad_norm": 0.6859773333199617, - "learning_rate": 4.346732381697149e-07, - "loss": 0.8381, - "num_input_tokens_seen": 140198985, - "step": 6591 - }, - { - "epoch": 0.7926411350928876, - "flos": 11993443726440.0, - "grad_norm": 4.433162449767691, - "learning_rate": 4.3418848974534825e-07, - "loss": 1.0363, - "num_input_tokens_seen": 140215645, - "step": 6592 - }, - { - "epoch": 0.7927613779835267, - "flos": 24584332909080.0, - "grad_norm": 5.642047734353082, - "learning_rate": 4.3370397885334276e-07, - "loss": 0.908, - "num_input_tokens_seen": 140235995, - "step": 6593 - }, - { - "epoch": 0.7928816208741658, - "flos": 13438122977760.0, - "grad_norm": 6.104880784828861, - "learning_rate": 4.3321970556719777e-07, - "loss": 0.9843, - "num_input_tokens_seen": 140254010, - "step": 6594 - }, - { - "epoch": 0.7930018637648049, - "flos": 13202316310200.0, - "grad_norm": 6.176217630159084, - "learning_rate": 4.3273566996037856e-07, - "loss": 0.9269, - "num_input_tokens_seen": 140270425, - "step": 6595 - }, - { - "epoch": 0.793122106655444, - "flos": 17447602865280.0, - "grad_norm": 6.225698298618391, - "learning_rate": 4.322518721063113e-07, - "loss": 1.0305, - "num_input_tokens_seen": 140288695, - "step": 6596 - }, - { - "epoch": 0.7932423495460831, - "flos": 24556149387360.0, - "grad_norm": 7.900677673632606, - "learning_rate": 4.3176831207838906e-07, - "loss": 0.9151, - "num_input_tokens_seen": 140311825, - "step": 6597 - }, - { - "epoch": 0.7933625924367221, - "flos": 19208837315880.0, - "grad_norm": 6.964255487285594, - "learning_rate": 4.3128498994996685e-07, - "loss": 0.9679, - "num_input_tokens_seen": 140331020, - "step": 6598 - }, - { - "epoch": 0.7934828353273613, - "flos": 21068701535760.0, - "grad_norm": 10.177114985770464, - "learning_rate": 4.308019057943646e-07, - "loss": 0.9305, - "num_input_tokens_seen": 140352465, - "step": 6599 - }, - { - "epoch": 0.7936030782180004, - "flos": 20382995465640.0, - "grad_norm": 5.885109502378055, - "learning_rate": 4.3031905968486535e-07, - "loss": 0.9707, - "num_input_tokens_seen": 140373015, - "step": 6600 - }, - { - "epoch": 0.7937233211086394, - "flos": 11598684904920.0, - "grad_norm": 4.583159194874907, - "learning_rate": 4.298364516947162e-07, - "loss": 0.9062, - "num_input_tokens_seen": 140389965, - "step": 6601 - }, - { - "epoch": 0.7938435639992786, - "flos": 15638555946480.0, - "grad_norm": 3.9445106220215975, - "learning_rate": 4.293540818971295e-07, - "loss": 0.8869, - "num_input_tokens_seen": 140407490, - "step": 6602 - }, - { - "epoch": 0.7939638068899176, - "flos": 15770673687360.0, - "grad_norm": 3.7786114527029695, - "learning_rate": 4.2887195036527934e-07, - "loss": 0.99, - "num_input_tokens_seen": 140426015, - "step": 6603 - }, - { - "epoch": 0.7940840497805567, - "flos": 12571137589920.0, - "grad_norm": 4.88249998999936, - "learning_rate": 4.28390057172306e-07, - "loss": 0.9433, - "num_input_tokens_seen": 140442240, - "step": 6604 - }, - { - "epoch": 0.7942042926711959, - "flos": 16927011922680.0, - "grad_norm": 7.738879932119361, - "learning_rate": 4.279084023913111e-07, - "loss": 0.9422, - "num_input_tokens_seen": 140459835, - "step": 6605 - }, - { - "epoch": 0.7943245355618349, - "flos": 13649517495480.0, - "grad_norm": 4.364488016451867, - "learning_rate": 4.2742698609536096e-07, - "loss": 0.9174, - "num_input_tokens_seen": 140477865, - "step": 6606 - }, - { - "epoch": 0.794444778452474, - "flos": 17790747185160.0, - "grad_norm": 25.117684433264593, - "learning_rate": 4.2694580835748706e-07, - "loss": 1.0037, - "num_input_tokens_seen": 140497445, - "step": 6607 - }, - { - "epoch": 0.7945650213431131, - "flos": 16507319704800.0, - "grad_norm": 4.4154029093813, - "learning_rate": 4.264648692506836e-07, - "loss": 0.9628, - "num_input_tokens_seen": 140515955, - "step": 6608 - }, - { - "epoch": 0.7946852642337522, - "flos": 18548769858000.0, - "grad_norm": 3.9141639802915535, - "learning_rate": 4.2598416884790824e-07, - "loss": 0.9388, - "num_input_tokens_seen": 140534725, - "step": 6609 - }, - { - "epoch": 0.7948055071243912, - "flos": 16901434633560.0, - "grad_norm": 11.36216330118576, - "learning_rate": 4.255037072220828e-07, - "loss": 1.0303, - "num_input_tokens_seen": 140555815, - "step": 6610 - }, - { - "epoch": 0.7949257500150304, - "flos": 15615646213080.0, - "grad_norm": 4.955779029433638, - "learning_rate": 4.2502348444609293e-07, - "loss": 0.9428, - "num_input_tokens_seen": 140575155, - "step": 6611 - }, - { - "epoch": 0.7950459929056695, - "flos": 18341667958680.0, - "grad_norm": 3.2116422227906885, - "learning_rate": 4.2454350059278844e-07, - "loss": 0.9101, - "num_input_tokens_seen": 140595935, - "step": 6612 - }, - { - "epoch": 0.7951662357963085, - "flos": 15742336857840.0, - "grad_norm": 6.898075565162037, - "learning_rate": 4.240637557349824e-07, - "loss": 1.0713, - "num_input_tokens_seen": 140612870, - "step": 6613 - }, - { - "epoch": 0.7952864786869477, - "flos": 17527830150480.0, - "grad_norm": 3.2596921918219217, - "learning_rate": 4.235842499454516e-07, - "loss": 0.8796, - "num_input_tokens_seen": 140632505, - "step": 6614 - }, - { - "epoch": 0.7954067215775867, - "flos": 15507388713960.0, - "grad_norm": 6.131859499999521, - "learning_rate": 4.2310498329693687e-07, - "loss": 1.0435, - "num_input_tokens_seen": 140653125, - "step": 6615 - }, - { - "epoch": 0.7955269644682258, - "flos": 17110406888040.0, - "grad_norm": 3.2534308298020784, - "learning_rate": 4.2262595586214164e-07, - "loss": 1.0364, - "num_input_tokens_seen": 140673940, - "step": 6616 - }, - { - "epoch": 0.795647207358865, - "flos": 17793414740880.0, - "grad_norm": 11.874726302355747, - "learning_rate": 4.221471677137358e-07, - "loss": 0.991, - "num_input_tokens_seen": 140694475, - "step": 6617 - }, - { - "epoch": 0.795767450249504, - "flos": 10346108502000.0, - "grad_norm": 4.249098064983288, - "learning_rate": 4.216686189243492e-07, - "loss": 0.9382, - "num_input_tokens_seen": 140712985, - "step": 6618 - }, - { - "epoch": 0.7958876931401431, - "flos": 13148034252840.0, - "grad_norm": 5.101149724878207, - "learning_rate": 4.211903095665785e-07, - "loss": 0.9563, - "num_input_tokens_seen": 140732090, - "step": 6619 - }, - { - "epoch": 0.7960079360307821, - "flos": 15301819892640.0, - "grad_norm": 3.832993381740351, - "learning_rate": 4.2071223971298277e-07, - "loss": 0.9811, - "num_input_tokens_seen": 140748995, - "step": 6620 - }, - { - "epoch": 0.7961281789214213, - "flos": 18131469241800.0, - "grad_norm": 11.166686062772238, - "learning_rate": 4.2023440943608433e-07, - "loss": 0.8308, - "num_input_tokens_seen": 140768680, - "step": 6621 - }, - { - "epoch": 0.7962484218120603, - "flos": 15589517015880.0, - "grad_norm": 4.199913414562178, - "learning_rate": 4.1975681880837023e-07, - "loss": 1.0075, - "num_input_tokens_seen": 140788405, - "step": 6622 - }, - { - "epoch": 0.7963686647026994, - "flos": 13384024889760.0, - "grad_norm": 3.5514508949396273, - "learning_rate": 4.192794679022895e-07, - "loss": 1.058, - "num_input_tokens_seen": 140806450, - "step": 6623 - }, - { - "epoch": 0.7964889075933386, - "flos": 21175303310640.0, - "grad_norm": 3.58895142333116, - "learning_rate": 4.1880235679025743e-07, - "loss": 0.9486, - "num_input_tokens_seen": 140826265, - "step": 6624 - }, - { - "epoch": 0.7966091504839776, - "flos": 21012671769480.0, - "grad_norm": 4.841040462546225, - "learning_rate": 4.1832548554464986e-07, - "loss": 0.8516, - "num_input_tokens_seen": 140844280, - "step": 6625 - }, - { - "epoch": 0.7967293933746167, - "flos": 48181490036280.0, - "grad_norm": 0.765483882121426, - "learning_rate": 4.178488542378098e-07, - "loss": 0.8338, - "num_input_tokens_seen": 140901580, - "step": 6626 - }, - { - "epoch": 0.7968496362652558, - "flos": 18183880944000.0, - "grad_norm": 5.037926077039524, - "learning_rate": 4.173724629420401e-07, - "loss": 1.1189, - "num_input_tokens_seen": 140922660, - "step": 6627 - }, - { - "epoch": 0.7969698791558949, - "flos": 10240089296760.0, - "grad_norm": 8.809837426061437, - "learning_rate": 4.168963117296087e-07, - "loss": 0.9152, - "num_input_tokens_seen": 140939715, - "step": 6628 - }, - { - "epoch": 0.797090122046534, - "flos": 15721205494920.0, - "grad_norm": 9.863378325636146, - "learning_rate": 4.1642040067274876e-07, - "loss": 0.9722, - "num_input_tokens_seen": 140959105, - "step": 6629 - }, - { - "epoch": 0.7972103649371731, - "flos": 14118064674600.0, - "grad_norm": 6.10569398626848, - "learning_rate": 4.1594472984365493e-07, - "loss": 0.9528, - "num_input_tokens_seen": 140977510, - "step": 6630 - }, - { - "epoch": 0.7973306078278122, - "flos": 25732331200080.0, - "grad_norm": 5.126014609040315, - "learning_rate": 4.154692993144862e-07, - "loss": 1.0037, - "num_input_tokens_seen": 140997000, - "step": 6631 - }, - { - "epoch": 0.7974508507184512, - "flos": 15169916782680.0, - "grad_norm": 6.08231756360847, - "learning_rate": 4.1499410915736476e-07, - "loss": 0.9371, - "num_input_tokens_seen": 141015650, - "step": 6632 - }, - { - "epoch": 0.7975710936090904, - "flos": 48876731851560.0, - "grad_norm": 0.808325976570971, - "learning_rate": 4.145191594443762e-07, - "loss": 0.9373, - "num_input_tokens_seen": 141079725, - "step": 6633 - }, - { - "epoch": 0.7976913364997295, - "flos": 15983447975280.0, - "grad_norm": 7.5052921697278725, - "learning_rate": 4.140444502475713e-07, - "loss": 0.9381, - "num_input_tokens_seen": 141098995, - "step": 6634 - }, - { - "epoch": 0.7978115793903685, - "flos": 10787269359960.0, - "grad_norm": 5.608142600517071, - "learning_rate": 4.1356998163896216e-07, - "loss": 0.9294, - "num_input_tokens_seen": 141115765, - "step": 6635 - }, - { - "epoch": 0.7979318222810077, - "flos": 13990362198360.0, - "grad_norm": 4.810050836560147, - "learning_rate": 4.130957536905255e-07, - "loss": 0.9674, - "num_input_tokens_seen": 141133500, - "step": 6636 - }, - { - "epoch": 0.7980520651716467, - "flos": 11001454079640.0, - "grad_norm": 5.495637550751814, - "learning_rate": 4.1262176647420134e-07, - "loss": 0.9281, - "num_input_tokens_seen": 141151385, - "step": 6637 - }, - { - "epoch": 0.7981723080622858, - "flos": 15851851480920.0, - "grad_norm": 10.483168053549692, - "learning_rate": 4.121480200618923e-07, - "loss": 1.0258, - "num_input_tokens_seen": 141170760, - "step": 6638 - }, - { - "epoch": 0.798292550952925, - "flos": 16297519588200.0, - "grad_norm": 3.7104302953133876, - "learning_rate": 4.116745145254674e-07, - "loss": 1.0281, - "num_input_tokens_seen": 141190015, - "step": 6639 - }, - { - "epoch": 0.798412793843564, - "flos": 46178341719600.0, - "grad_norm": 0.7905939178991349, - "learning_rate": 4.1120124993675476e-07, - "loss": 0.8316, - "num_input_tokens_seen": 141254165, - "step": 6640 - }, - { - "epoch": 0.7985330367342031, - "flos": 9582812040840.0, - "grad_norm": 6.890484936041047, - "learning_rate": 4.107282263675498e-07, - "loss": 0.8408, - "num_input_tokens_seen": 141271555, - "step": 6641 - }, - { - "epoch": 0.7986532796248422, - "flos": 48477404457600.0, - "grad_norm": 0.7416083234348698, - "learning_rate": 4.1025544388960907e-07, - "loss": 0.7689, - "num_input_tokens_seen": 141332315, - "step": 6642 - }, - { - "epoch": 0.7987735225154813, - "flos": 15956736208440.0, - "grad_norm": 4.04226410110848, - "learning_rate": 4.097829025746538e-07, - "loss": 0.9357, - "num_input_tokens_seen": 141353580, - "step": 6643 - }, - { - "epoch": 0.7988937654061203, - "flos": 49314581261040.0, - "grad_norm": 0.7488351756894731, - "learning_rate": 4.0931060249436757e-07, - "loss": 0.853, - "num_input_tokens_seen": 141417140, - "step": 6644 - }, - { - "epoch": 0.7990140082967595, - "flos": 14771110635240.0, - "grad_norm": 4.158514338180703, - "learning_rate": 4.088385437203978e-07, - "loss": 0.9313, - "num_input_tokens_seen": 141433870, - "step": 6645 - }, - { - "epoch": 0.7991342511873986, - "flos": 13462320496680.0, - "grad_norm": 4.18376129413285, - "learning_rate": 4.083667263243564e-07, - "loss": 0.9917, - "num_input_tokens_seen": 141451935, - "step": 6646 - }, - { - "epoch": 0.7992544940780376, - "flos": 14779573225800.0, - "grad_norm": 3.36151764427955, - "learning_rate": 4.0789515037781653e-07, - "loss": 0.9458, - "num_input_tokens_seen": 141472380, - "step": 6647 - }, - { - "epoch": 0.7993747369686768, - "flos": 8903176959600.0, - "grad_norm": 6.596762308073465, - "learning_rate": 4.0742381595231755e-07, - "loss": 1.0454, - "num_input_tokens_seen": 141488825, - "step": 6648 - }, - { - "epoch": 0.7994949798593158, - "flos": 14248036106280.0, - "grad_norm": 26.874959120608445, - "learning_rate": 4.06952723119359e-07, - "loss": 1.0165, - "num_input_tokens_seen": 141508420, - "step": 6649 - }, - { - "epoch": 0.7996152227499549, - "flos": 27598419716640.0, - "grad_norm": 4.363045627170932, - "learning_rate": 4.0648187195040504e-07, - "loss": 0.8948, - "num_input_tokens_seen": 141530345, - "step": 6650 - }, - { - "epoch": 0.799735465640594, - "flos": 50305252460760.0, - "grad_norm": 0.95707428504565, - "learning_rate": 4.060112625168848e-07, - "loss": 0.9553, - "num_input_tokens_seen": 141595175, - "step": 6651 - }, - { - "epoch": 0.7998557085312331, - "flos": 17239734426960.0, - "grad_norm": 13.068564143562753, - "learning_rate": 4.055408948901886e-07, - "loss": 0.9612, - "num_input_tokens_seen": 141616295, - "step": 6652 - }, - { - "epoch": 0.7999759514218722, - "flos": 19628314902840.0, - "grad_norm": 6.210610435079499, - "learning_rate": 4.050707691416708e-07, - "loss": 0.9416, - "num_input_tokens_seen": 141637325, - "step": 6653 - }, - { - "epoch": 0.8000961943125112, - "flos": 48218166810120.0, - "grad_norm": 0.7216846842715422, - "learning_rate": 4.046008853426495e-07, - "loss": 0.8517, - "num_input_tokens_seen": 141700360, - "step": 6654 - }, - { - "epoch": 0.8002164372031504, - "flos": 20469324401280.0, - "grad_norm": 4.524009444425053, - "learning_rate": 4.0413124356440464e-07, - "loss": 0.8567, - "num_input_tokens_seen": 141724125, - "step": 6655 - }, - { - "epoch": 0.8003366800937894, - "flos": 12495632184960.0, - "grad_norm": 4.735950371533771, - "learning_rate": 4.0366184387818223e-07, - "loss": 1.046, - "num_input_tokens_seen": 141742305, - "step": 6656 - }, - { - "epoch": 0.8004569229844285, - "flos": 18500926728240.0, - "grad_norm": 11.377077688427535, - "learning_rate": 4.0319268635518797e-07, - "loss": 1.0664, - "num_input_tokens_seen": 141762600, - "step": 6657 - }, - { - "epoch": 0.8005771658750677, - "flos": 14775771192360.0, - "grad_norm": 15.851160607429952, - "learning_rate": 4.027237710665943e-07, - "loss": 0.9831, - "num_input_tokens_seen": 141780785, - "step": 6658 - }, - { - "epoch": 0.8006974087657067, - "flos": 18369606187920.0, - "grad_norm": 3.781048099392473, - "learning_rate": 4.022550980835344e-07, - "loss": 0.9183, - "num_input_tokens_seen": 141802750, - "step": 6659 - }, - { - "epoch": 0.8008176516563458, - "flos": 12154388881800.0, - "grad_norm": 5.466874329569204, - "learning_rate": 4.017866674771051e-07, - "loss": 1.0316, - "num_input_tokens_seen": 141819955, - "step": 6660 - }, - { - "epoch": 0.8009378945469849, - "flos": 17215904846760.0, - "grad_norm": 4.990720912558704, - "learning_rate": 4.013184793183688e-07, - "loss": 0.9693, - "num_input_tokens_seen": 141841770, - "step": 6661 - }, - { - "epoch": 0.801058137437624, - "flos": 14038634589960.0, - "grad_norm": 7.836924223611014, - "learning_rate": 4.008505336783472e-07, - "loss": 0.9535, - "num_input_tokens_seen": 141859215, - "step": 6662 - }, - { - "epoch": 0.801178380328263, - "flos": 13228445507400.0, - "grad_norm": 8.971878197382747, - "learning_rate": 4.003828306280284e-07, - "loss": 1.0497, - "num_input_tokens_seen": 141876610, - "step": 6663 - }, - { - "epoch": 0.8012986232189022, - "flos": 11106645422760.0, - "grad_norm": 3.5869722888685436, - "learning_rate": 3.999153702383626e-07, - "loss": 1.0061, - "num_input_tokens_seen": 141894220, - "step": 6664 - }, - { - "epoch": 0.8014188661095413, - "flos": 20361250871520.0, - "grad_norm": 8.571512421697381, - "learning_rate": 3.9944815258026263e-07, - "loss": 0.957, - "num_input_tokens_seen": 141915760, - "step": 6665 - }, - { - "epoch": 0.8015391090001803, - "flos": 20882700337800.0, - "grad_norm": 6.027412735549804, - "learning_rate": 3.989811777246057e-07, - "loss": 1.0582, - "num_input_tokens_seen": 141935650, - "step": 6666 - }, - { - "epoch": 0.8016593518908195, - "flos": 50416606777440.0, - "grad_norm": 0.8947842835612818, - "learning_rate": 3.985144457422305e-07, - "loss": 0.9302, - "num_input_tokens_seen": 141989655, - "step": 6667 - }, - { - "epoch": 0.8017795947814585, - "flos": 18522303383640.0, - "grad_norm": 5.847999047964204, - "learning_rate": 3.9804795670394096e-07, - "loss": 0.9793, - "num_input_tokens_seen": 142009500, - "step": 6668 - }, - { - "epoch": 0.8018998376720976, - "flos": 15691672864560.0, - "grad_norm": 4.722083726734421, - "learning_rate": 3.975817106805022e-07, - "loss": 0.9391, - "num_input_tokens_seen": 142027920, - "step": 6669 - }, - { - "epoch": 0.8020200805627368, - "flos": 24662229915720.0, - "grad_norm": 5.323756958490514, - "learning_rate": 3.97115707742645e-07, - "loss": 0.8631, - "num_input_tokens_seen": 142048315, - "step": 6670 - }, - { - "epoch": 0.8021403234533758, - "flos": 14278611229680.0, - "grad_norm": 9.342522684079448, - "learning_rate": 3.966499479610599e-07, - "loss": 0.8723, - "num_input_tokens_seen": 142066130, - "step": 6671 - }, - { - "epoch": 0.8022605663440149, - "flos": 19759206181320.0, - "grad_norm": 5.123809676221837, - "learning_rate": 3.9618443140640225e-07, - "loss": 0.8764, - "num_input_tokens_seen": 142084760, - "step": 6672 - }, - { - "epoch": 0.802380809234654, - "flos": 43119054224520.0, - "grad_norm": 0.7893750544266369, - "learning_rate": 3.957191581492918e-07, - "loss": 0.7742, - "num_input_tokens_seen": 142145240, - "step": 6673 - }, - { - "epoch": 0.8025010521252931, - "flos": 10656102127440.0, - "grad_norm": 7.649690768904537, - "learning_rate": 3.952541282603097e-07, - "loss": 0.9235, - "num_input_tokens_seen": 142160065, - "step": 6674 - }, - { - "epoch": 0.8026212950159322, - "flos": 15746997414960.0, - "grad_norm": 8.197329146689192, - "learning_rate": 3.9478934181000013e-07, - "loss": 1.0634, - "num_input_tokens_seen": 142179810, - "step": 6675 - }, - { - "epoch": 0.8027415379065713, - "flos": 12521914689960.0, - "grad_norm": 4.4676352224180995, - "learning_rate": 3.943247988688714e-07, - "loss": 1.07, - "num_input_tokens_seen": 142198225, - "step": 6676 - }, - { - "epoch": 0.8028617807972104, - "flos": 15614603720040.0, - "grad_norm": 4.57183968875205, - "learning_rate": 3.938604995073933e-07, - "loss": 0.9471, - "num_input_tokens_seen": 142216415, - "step": 6677 - }, - { - "epoch": 0.8029820236878494, - "flos": 18811594908000.0, - "grad_norm": 4.199902061047102, - "learning_rate": 3.9339644379600157e-07, - "loss": 0.891, - "num_input_tokens_seen": 142235965, - "step": 6678 - }, - { - "epoch": 0.8031022665784886, - "flos": 12522343951800.0, - "grad_norm": 4.165390633059109, - "learning_rate": 3.929326318050907e-07, - "loss": 0.9407, - "num_input_tokens_seen": 142253355, - "step": 6679 - }, - { - "epoch": 0.8032225094691277, - "flos": 10922330610600.0, - "grad_norm": 3.6924965837652666, - "learning_rate": 3.924690636050225e-07, - "loss": 1.0123, - "num_input_tokens_seen": 142270485, - "step": 6680 - }, - { - "epoch": 0.8033427523597667, - "flos": 18633289761600.0, - "grad_norm": 4.268243789259895, - "learning_rate": 3.9200573926611915e-07, - "loss": 0.9462, - "num_input_tokens_seen": 142291620, - "step": 6681 - }, - { - "epoch": 0.8034629952504058, - "flos": 15144370155120.0, - "grad_norm": 5.254258744564639, - "learning_rate": 3.9154265885866613e-07, - "loss": 0.954, - "num_input_tokens_seen": 142310650, - "step": 6682 - }, - { - "epoch": 0.8035832381410449, - "flos": 15378919698720.0, - "grad_norm": 112.50861078157529, - "learning_rate": 3.9107982245291394e-07, - "loss": 0.9594, - "num_input_tokens_seen": 142328495, - "step": 6683 - }, - { - "epoch": 0.803703481031684, - "flos": 14564591305560.0, - "grad_norm": 6.059635704849614, - "learning_rate": 3.9061723011907245e-07, - "loss": 0.9872, - "num_input_tokens_seen": 142347570, - "step": 6684 - }, - { - "epoch": 0.803823723922323, - "flos": 16243544146440.0, - "grad_norm": 5.9450147757254355, - "learning_rate": 3.901548819273179e-07, - "loss": 1.0057, - "num_input_tokens_seen": 142367305, - "step": 6685 - }, - { - "epoch": 0.8039439668129622, - "flos": 15171235229760.0, - "grad_norm": 4.5609504472131235, - "learning_rate": 3.896927779477881e-07, - "loss": 0.9202, - "num_input_tokens_seen": 142386285, - "step": 6686 - }, - { - "epoch": 0.8040642097036013, - "flos": 16924405690080.0, - "grad_norm": 8.137367577947602, - "learning_rate": 3.892309182505833e-07, - "loss": 0.9075, - "num_input_tokens_seen": 142403820, - "step": 6687 - }, - { - "epoch": 0.8041844525942403, - "flos": 18448239072000.0, - "grad_norm": 3.9137463470510165, - "learning_rate": 3.887693029057675e-07, - "loss": 1.0909, - "num_input_tokens_seen": 142423050, - "step": 6688 - }, - { - "epoch": 0.8043046954848795, - "flos": 17922772941360.0, - "grad_norm": 14.344091473672291, - "learning_rate": 3.8830793198336684e-07, - "loss": 1.045, - "num_input_tokens_seen": 142442360, - "step": 6689 - }, - { - "epoch": 0.8044249383755185, - "flos": 29801182963920.0, - "grad_norm": 6.639164591251414, - "learning_rate": 3.878468055533721e-07, - "loss": 0.9328, - "num_input_tokens_seen": 142464620, - "step": 6690 - }, - { - "epoch": 0.8045451812661576, - "flos": 14645953068480.0, - "grad_norm": 6.517402827186333, - "learning_rate": 3.8738592368573464e-07, - "loss": 1.0625, - "num_input_tokens_seen": 142481895, - "step": 6691 - }, - { - "epoch": 0.8046654241567968, - "flos": 21171225323160.0, - "grad_norm": 6.337949007820465, - "learning_rate": 3.8692528645037137e-07, - "loss": 1.1006, - "num_input_tokens_seen": 142500795, - "step": 6692 - }, - { - "epoch": 0.8047856670474358, - "flos": 12517990010280.0, - "grad_norm": 5.8395556732876335, - "learning_rate": 3.8646489391715907e-07, - "loss": 1.0155, - "num_input_tokens_seen": 142514810, - "step": 6693 - }, - { - "epoch": 0.8049059099380749, - "flos": 12121759433880.0, - "grad_norm": 12.077366971685095, - "learning_rate": 3.8600474615593903e-07, - "loss": 1.1034, - "num_input_tokens_seen": 142529145, - "step": 6694 - }, - { - "epoch": 0.805026152828714, - "flos": 44533035706200.0, - "grad_norm": 0.8616433118376561, - "learning_rate": 3.8554484323651605e-07, - "loss": 0.8878, - "num_input_tokens_seen": 142590735, - "step": 6695 - }, - { - "epoch": 0.8051463957193531, - "flos": 15405600804000.0, - "grad_norm": 4.822445389837387, - "learning_rate": 3.85085185228657e-07, - "loss": 1.0136, - "num_input_tokens_seen": 142609425, - "step": 6696 - }, - { - "epoch": 0.8052666386099921, - "flos": 22853305643160.0, - "grad_norm": 5.937948109632278, - "learning_rate": 3.8462577220209114e-07, - "loss": 0.9661, - "num_input_tokens_seen": 142629520, - "step": 6697 - }, - { - "epoch": 0.8053868815006313, - "flos": 48089513825520.0, - "grad_norm": 0.6775298015965555, - "learning_rate": 3.8416660422651127e-07, - "loss": 0.837, - "num_input_tokens_seen": 142698890, - "step": 6698 - }, - { - "epoch": 0.8055071243912704, - "flos": 16949032470840.0, - "grad_norm": 4.144024989697139, - "learning_rate": 3.837076813715723e-07, - "loss": 0.9127, - "num_input_tokens_seen": 142718495, - "step": 6699 - }, - { - "epoch": 0.8056273672819094, - "flos": 15144032877960.0, - "grad_norm": 6.012513146019282, - "learning_rate": 3.832490037068941e-07, - "loss": 0.9806, - "num_input_tokens_seen": 142737005, - "step": 6700 - }, - { - "epoch": 0.8057476101725486, - "flos": 18336762109080.0, - "grad_norm": 3.5823206394654887, - "learning_rate": 3.827905713020554e-07, - "loss": 0.9845, - "num_input_tokens_seen": 142754370, - "step": 6701 - }, - { - "epoch": 0.8058678530631876, - "flos": 17451558206520.0, - "grad_norm": 3.890701844708919, - "learning_rate": 3.823323842266017e-07, - "loss": 0.908, - "num_input_tokens_seen": 142773485, - "step": 6702 - }, - { - "epoch": 0.8059880959538267, - "flos": 17608486697520.0, - "grad_norm": 6.13804576179795, - "learning_rate": 3.818744425500393e-07, - "loss": 0.9519, - "num_input_tokens_seen": 142791220, - "step": 6703 - }, - { - "epoch": 0.8061083388444659, - "flos": 15770091117720.0, - "grad_norm": 2.879076182760313, - "learning_rate": 3.8141674634183675e-07, - "loss": 1.0375, - "num_input_tokens_seen": 142809970, - "step": 6704 - }, - { - "epoch": 0.8062285817351049, - "flos": 21410650054800.0, - "grad_norm": 3.082998068512872, - "learning_rate": 3.809592956714278e-07, - "loss": 0.8708, - "num_input_tokens_seen": 142832925, - "step": 6705 - }, - { - "epoch": 0.806348824625744, - "flos": 16191745675440.0, - "grad_norm": 3.459816641386737, - "learning_rate": 3.805020906082057e-07, - "loss": 0.9655, - "num_input_tokens_seen": 142851220, - "step": 6706 - }, - { - "epoch": 0.8064690675163831, - "flos": 16639008183840.0, - "grad_norm": 8.814275468032934, - "learning_rate": 3.8004513122152917e-07, - "loss": 1.0454, - "num_input_tokens_seen": 142869250, - "step": 6707 - }, - { - "epoch": 0.8065893104070222, - "flos": 17110621518960.0, - "grad_norm": 5.447969376823859, - "learning_rate": 3.79588417580718e-07, - "loss": 0.9057, - "num_input_tokens_seen": 142887080, - "step": 6708 - }, - { - "epoch": 0.8067095532976613, - "flos": 15848417386200.0, - "grad_norm": 5.21715078006788, - "learning_rate": 3.791319497550558e-07, - "loss": 0.9792, - "num_input_tokens_seen": 142904630, - "step": 6709 - }, - { - "epoch": 0.8068297961883004, - "flos": 12129271516080.0, - "grad_norm": 4.508335446811446, - "learning_rate": 3.78675727813788e-07, - "loss": 0.9458, - "num_input_tokens_seen": 142921915, - "step": 6710 - }, - { - "epoch": 0.8069500390789395, - "flos": 15643185842040.0, - "grad_norm": 3.7634716921010702, - "learning_rate": 3.782197518261225e-07, - "loss": 0.9518, - "num_input_tokens_seen": 142941075, - "step": 6711 - }, - { - "epoch": 0.8070702819695785, - "flos": 13648904264280.0, - "grad_norm": 6.148267188615621, - "learning_rate": 3.777640218612319e-07, - "loss": 1.1975, - "num_input_tokens_seen": 142958780, - "step": 6712 - }, - { - "epoch": 0.8071905248602176, - "flos": 15302126508240.0, - "grad_norm": 4.8323841259084555, - "learning_rate": 3.773085379882488e-07, - "loss": 0.9401, - "num_input_tokens_seen": 142977555, - "step": 6713 - }, - { - "epoch": 0.8073107677508568, - "flos": 26602474728600.0, - "grad_norm": 3.879847662973176, - "learning_rate": 3.768533002762715e-07, - "loss": 0.9947, - "num_input_tokens_seen": 143000810, - "step": 6714 - }, - { - "epoch": 0.8074310106414958, - "flos": 20204812965480.0, - "grad_norm": 4.260297753400792, - "learning_rate": 3.763983087943572e-07, - "loss": 0.9902, - "num_input_tokens_seen": 143019920, - "step": 6715 - }, - { - "epoch": 0.8075512535321349, - "flos": 17269512349800.0, - "grad_norm": 4.041056924556357, - "learning_rate": 3.759435636115282e-07, - "loss": 1.0259, - "num_input_tokens_seen": 143040425, - "step": 6716 - }, - { - "epoch": 0.807671496422774, - "flos": 18526350709560.0, - "grad_norm": 6.1807081148937675, - "learning_rate": 3.7548906479676967e-07, - "loss": 0.957, - "num_input_tokens_seen": 143059740, - "step": 6717 - }, - { - "epoch": 0.8077917393134131, - "flos": 16873097804040.0, - "grad_norm": 3.04956984066175, - "learning_rate": 3.7503481241902855e-07, - "loss": 0.9327, - "num_input_tokens_seen": 143079435, - "step": 6718 - }, - { - "epoch": 0.8079119822040521, - "flos": 13043210848440.0, - "grad_norm": 3.4095245859982963, - "learning_rate": 3.745808065472145e-07, - "loss": 1.0253, - "num_input_tokens_seen": 143096450, - "step": 6719 - }, - { - "epoch": 0.8080322250946913, - "flos": 16794863520240.0, - "grad_norm": 4.935923071028392, - "learning_rate": 3.741270472501994e-07, - "loss": 0.9904, - "num_input_tokens_seen": 143116810, - "step": 6720 - }, - { - "epoch": 0.8081524679853304, - "flos": 16219346627520.0, - "grad_norm": 5.676084695411303, - "learning_rate": 3.736735345968183e-07, - "loss": 0.9648, - "num_input_tokens_seen": 143136140, - "step": 6721 - }, - { - "epoch": 0.8082727108759694, - "flos": 12496122769920.0, - "grad_norm": 4.022510132809448, - "learning_rate": 3.7322026865586986e-07, - "loss": 1.0142, - "num_input_tokens_seen": 143154895, - "step": 6722 - }, - { - "epoch": 0.8083929537666086, - "flos": 18474153638280.0, - "grad_norm": 5.41591261481331, - "learning_rate": 3.7276724949611206e-07, - "loss": 0.9511, - "num_input_tokens_seen": 143174725, - "step": 6723 - }, - { - "epoch": 0.8085131966572476, - "flos": 19286979615000.0, - "grad_norm": 5.654105607192455, - "learning_rate": 3.723144771862694e-07, - "loss": 0.9632, - "num_input_tokens_seen": 143195085, - "step": 6724 - }, - { - "epoch": 0.8086334395478867, - "flos": 17058700401720.0, - "grad_norm": 5.297029116028596, - "learning_rate": 3.718619517950263e-07, - "loss": 0.9853, - "num_input_tokens_seen": 143215400, - "step": 6725 - }, - { - "epoch": 0.8087536824385259, - "flos": 14482953588600.0, - "grad_norm": 5.408371701959171, - "learning_rate": 3.714096733910301e-07, - "loss": 0.9924, - "num_input_tokens_seen": 143232645, - "step": 6726 - }, - { - "epoch": 0.8088739253291649, - "flos": 18446184747480.0, - "grad_norm": 10.368346137332374, - "learning_rate": 3.709576420428926e-07, - "loss": 0.9188, - "num_input_tokens_seen": 143253165, - "step": 6727 - }, - { - "epoch": 0.808994168219804, - "flos": 20230451577720.0, - "grad_norm": 3.787027154495235, - "learning_rate": 3.7050585781918463e-07, - "loss": 0.9593, - "num_input_tokens_seen": 143273185, - "step": 6728 - }, - { - "epoch": 0.8091144111104431, - "flos": 12338765017080.0, - "grad_norm": 3.72284645963595, - "learning_rate": 3.700543207884428e-07, - "loss": 0.9009, - "num_input_tokens_seen": 143289815, - "step": 6729 - }, - { - "epoch": 0.8092346540010822, - "flos": 22925898199920.0, - "grad_norm": 7.563716569888733, - "learning_rate": 3.6960303101916466e-07, - "loss": 0.9344, - "num_input_tokens_seen": 143309450, - "step": 6730 - }, - { - "epoch": 0.8093548968917212, - "flos": 41532961231560.0, - "grad_norm": 0.8223972169954249, - "learning_rate": 3.6915198857981047e-07, - "loss": 0.8126, - "num_input_tokens_seen": 143374370, - "step": 6731 - }, - { - "epoch": 0.8094751397823604, - "flos": 19497208993440.0, - "grad_norm": 10.89352312754561, - "learning_rate": 3.687011935388027e-07, - "loss": 0.9066, - "num_input_tokens_seen": 143396985, - "step": 6732 - }, - { - "epoch": 0.8095953826729995, - "flos": 17110774826760.0, - "grad_norm": 3.5148064910887777, - "learning_rate": 3.6825064596452646e-07, - "loss": 0.9487, - "num_input_tokens_seen": 143417050, - "step": 6733 - }, - { - "epoch": 0.8097156255636385, - "flos": 17031927311760.0, - "grad_norm": 2.4824836297086113, - "learning_rate": 3.678003459253305e-07, - "loss": 0.9279, - "num_input_tokens_seen": 143437620, - "step": 6734 - }, - { - "epoch": 0.8098358684542777, - "flos": 15485184196440.0, - "grad_norm": 3.9340651531787785, - "learning_rate": 3.673502934895236e-07, - "loss": 0.961, - "num_input_tokens_seen": 143456845, - "step": 6735 - }, - { - "epoch": 0.8099561113449167, - "flos": 49277837616000.0, - "grad_norm": 0.7841533499697417, - "learning_rate": 3.669004887253802e-07, - "loss": 0.8173, - "num_input_tokens_seen": 143522855, - "step": 6736 - }, - { - "epoch": 0.8100763542355558, - "flos": 16769286231120.0, - "grad_norm": 3.663594100066423, - "learning_rate": 3.664509317011335e-07, - "loss": 1.0195, - "num_input_tokens_seen": 143542910, - "step": 6737 - }, - { - "epoch": 0.810196597126195, - "flos": 22565117934960.0, - "grad_norm": 4.46641212611123, - "learning_rate": 3.6600162248498134e-07, - "loss": 0.9499, - "num_input_tokens_seen": 143566260, - "step": 6738 - }, - { - "epoch": 0.810316840016834, - "flos": 17714045979360.0, - "grad_norm": 4.958708014342928, - "learning_rate": 3.6555256114508426e-07, - "loss": 0.992, - "num_input_tokens_seen": 143585775, - "step": 6739 - }, - { - "epoch": 0.8104370829074731, - "flos": 19916441287920.0, - "grad_norm": 3.6065830810742345, - "learning_rate": 3.651037477495642e-07, - "loss": 0.9534, - "num_input_tokens_seen": 143606945, - "step": 6740 - }, - { - "epoch": 0.8105573257981122, - "flos": 17526818319000.0, - "grad_norm": 4.216894349194737, - "learning_rate": 3.6465518236650584e-07, - "loss": 0.8961, - "num_input_tokens_seen": 143626810, - "step": 6741 - }, - { - "epoch": 0.8106775686887513, - "flos": 18762096054000.0, - "grad_norm": 7.683882195431012, - "learning_rate": 3.642068650639558e-07, - "loss": 1.0165, - "num_input_tokens_seen": 143646275, - "step": 6742 - }, - { - "epoch": 0.8107978115793903, - "flos": 19418330816880.0, - "grad_norm": 14.753358620198043, - "learning_rate": 3.6375879590992334e-07, - "loss": 0.8753, - "num_input_tokens_seen": 143666340, - "step": 6743 - }, - { - "epoch": 0.8109180544700295, - "flos": 17738028867360.0, - "grad_norm": 14.11731558204902, - "learning_rate": 3.6331097497238173e-07, - "loss": 1.0329, - "num_input_tokens_seen": 143685505, - "step": 6744 - }, - { - "epoch": 0.8110382973606686, - "flos": 14986521817320.0, - "grad_norm": 3.685291210660705, - "learning_rate": 3.628634023192627e-07, - "loss": 1.0202, - "num_input_tokens_seen": 143705470, - "step": 6745 - }, - { - "epoch": 0.8111585402513076, - "flos": 10896354721200.0, - "grad_norm": 5.174785060381018, - "learning_rate": 3.624160780184644e-07, - "loss": 0.9763, - "num_input_tokens_seen": 143722405, - "step": 6746 - }, - { - "epoch": 0.8112787831419467, - "flos": 17135432269080.0, - "grad_norm": 3.771728254875408, - "learning_rate": 3.6196900213784496e-07, - "loss": 0.9782, - "num_input_tokens_seen": 143741440, - "step": 6747 - }, - { - "epoch": 0.8113990260325858, - "flos": 14539259308920.0, - "grad_norm": 9.068370522486687, - "learning_rate": 3.6152217474522527e-07, - "loss": 1.0918, - "num_input_tokens_seen": 143757975, - "step": 6748 - }, - { - "epoch": 0.8115192689232249, - "flos": 17714628549000.0, - "grad_norm": 17.396408832951792, - "learning_rate": 3.6107559590838975e-07, - "loss": 0.9612, - "num_input_tokens_seen": 143776680, - "step": 6749 - }, - { - "epoch": 0.811639511813864, - "flos": 17107463378280.0, - "grad_norm": 4.353105309216615, - "learning_rate": 3.606292656950822e-07, - "loss": 0.8643, - "num_input_tokens_seen": 143794810, - "step": 6750 - }, - { - "epoch": 0.8117597547045031, - "flos": 16481221169160.0, - "grad_norm": 5.621933503877601, - "learning_rate": 3.601831841730121e-07, - "loss": 1.082, - "num_input_tokens_seen": 143812450, - "step": 6751 - }, - { - "epoch": 0.8118799975951422, - "flos": 16377225626880.0, - "grad_norm": 4.201378214728858, - "learning_rate": 3.5973735140984916e-07, - "loss": 0.9641, - "num_input_tokens_seen": 143832340, - "step": 6752 - }, - { - "epoch": 0.8120002404857812, - "flos": 17526143764680.0, - "grad_norm": 7.141707256948111, - "learning_rate": 3.5929176747322607e-07, - "loss": 1.001, - "num_input_tokens_seen": 143851165, - "step": 6753 - }, - { - "epoch": 0.8121204833764204, - "flos": 41087967678600.0, - "grad_norm": 0.7998899756709441, - "learning_rate": 3.588464324307372e-07, - "loss": 0.8039, - "num_input_tokens_seen": 143914510, - "step": 6754 - }, - { - "epoch": 0.8122407262670595, - "flos": 13807457817960.0, - "grad_norm": 8.221048592096587, - "learning_rate": 3.584013463499391e-07, - "loss": 0.9808, - "num_input_tokens_seen": 143932850, - "step": 6755 - }, - { - "epoch": 0.8123609691576985, - "flos": 40375457856960.0, - "grad_norm": 0.7402262825636772, - "learning_rate": 3.579565092983521e-07, - "loss": 0.883, - "num_input_tokens_seen": 143993690, - "step": 6756 - }, - { - "epoch": 0.8124812120483377, - "flos": 14646412991880.0, - "grad_norm": 4.4001290302431, - "learning_rate": 3.575119213434565e-07, - "loss": 1.0673, - "num_input_tokens_seen": 144011925, - "step": 6757 - }, - { - "epoch": 0.8126014549389767, - "flos": 15983417313720.0, - "grad_norm": 3.5899934457278126, - "learning_rate": 3.5706758255269765e-07, - "loss": 1.0376, - "num_input_tokens_seen": 144030100, - "step": 6758 - }, - { - "epoch": 0.8127216978296158, - "flos": 16554212326200.0, - "grad_norm": 4.4811745098570945, - "learning_rate": 3.566234929934795e-07, - "loss": 0.9248, - "num_input_tokens_seen": 144049020, - "step": 6759 - }, - { - "epoch": 0.812841940720255, - "flos": 17896858375080.0, - "grad_norm": 2.7183271057257152, - "learning_rate": 3.561796527331706e-07, - "loss": 0.9483, - "num_input_tokens_seen": 144070415, - "step": 6760 - }, - { - "epoch": 0.812962183610894, - "flos": 18969841846080.0, - "grad_norm": 5.8441259905780605, - "learning_rate": 3.5573606183910163e-07, - "loss": 0.9934, - "num_input_tokens_seen": 144090140, - "step": 6761 - }, - { - "epoch": 0.8130824265015331, - "flos": 17760969262320.0, - "grad_norm": 3.6017442769084362, - "learning_rate": 3.5529272037856493e-07, - "loss": 1.004, - "num_input_tokens_seen": 144108075, - "step": 6762 - }, - { - "epoch": 0.8132026693921722, - "flos": 48422478507480.0, - "grad_norm": 0.9637622718178157, - "learning_rate": 3.548496284188149e-07, - "loss": 0.799, - "num_input_tokens_seen": 144168000, - "step": 6763 - }, - { - "epoch": 0.8133229122828113, - "flos": 13828619842440.0, - "grad_norm": 5.195564894891254, - "learning_rate": 3.544067860270681e-07, - "loss": 1.0155, - "num_input_tokens_seen": 144185295, - "step": 6764 - }, - { - "epoch": 0.8134431551734503, - "flos": 14672358219720.0, - "grad_norm": 5.298860341025822, - "learning_rate": 3.539641932705029e-07, - "loss": 0.929, - "num_input_tokens_seen": 144203495, - "step": 6765 - }, - { - "epoch": 0.8135633980640895, - "flos": 15275476064520.0, - "grad_norm": 5.979780141303698, - "learning_rate": 3.53521850216262e-07, - "loss": 0.9706, - "num_input_tokens_seen": 144222785, - "step": 6766 - }, - { - "epoch": 0.8136836409547286, - "flos": 14590076610000.0, - "grad_norm": 7.292222297933632, - "learning_rate": 3.530797569314461e-07, - "loss": 0.9981, - "num_input_tokens_seen": 144241530, - "step": 6767 - }, - { - "epoch": 0.8138038838453676, - "flos": 14406528336840.0, - "grad_norm": 8.361282273656693, - "learning_rate": 3.5263791348312235e-07, - "loss": 0.9957, - "num_input_tokens_seen": 144260445, - "step": 6768 - }, - { - "epoch": 0.8139241267360068, - "flos": 21227990966880.0, - "grad_norm": 6.385090938389515, - "learning_rate": 3.521963199383171e-07, - "loss": 0.9277, - "num_input_tokens_seen": 144283120, - "step": 6769 - }, - { - "epoch": 0.8140443696266458, - "flos": 13985272379400.0, - "grad_norm": 4.7071165464911715, - "learning_rate": 3.517549763640197e-07, - "loss": 0.9812, - "num_input_tokens_seen": 144300480, - "step": 6770 - }, - { - "epoch": 0.8141646125172849, - "flos": 19338072870120.0, - "grad_norm": 5.818184974517833, - "learning_rate": 3.513138828271829e-07, - "loss": 0.9406, - "num_input_tokens_seen": 144320070, - "step": 6771 - }, - { - "epoch": 0.8142848554079241, - "flos": 28330313192280.0, - "grad_norm": 5.063665750292813, - "learning_rate": 3.508730393947179e-07, - "loss": 0.9347, - "num_input_tokens_seen": 144343045, - "step": 6772 - }, - { - "epoch": 0.8144050982985631, - "flos": 15800114333040.0, - "grad_norm": 3.9750255319986945, - "learning_rate": 3.504324461335024e-07, - "loss": 0.9364, - "num_input_tokens_seen": 144362875, - "step": 6773 - }, - { - "epoch": 0.8145253411892022, - "flos": 16375508579520.0, - "grad_norm": 3.8731840246928293, - "learning_rate": 3.499921031103732e-07, - "loss": 1.1078, - "num_input_tokens_seen": 144383365, - "step": 6774 - }, - { - "epoch": 0.8146455840798413, - "flos": 17661756923400.0, - "grad_norm": 7.6310768301496275, - "learning_rate": 3.4955201039212987e-07, - "loss": 1.0042, - "num_input_tokens_seen": 144404005, - "step": 6775 - }, - { - "epoch": 0.8147658269704804, - "flos": 14173818486840.0, - "grad_norm": 5.736639339744712, - "learning_rate": 3.4911216804553465e-07, - "loss": 0.8737, - "num_input_tokens_seen": 144422625, - "step": 6776 - }, - { - "epoch": 0.8148860698611194, - "flos": 15038688227040.0, - "grad_norm": 6.1096241307500225, - "learning_rate": 3.4867257613731017e-07, - "loss": 0.926, - "num_input_tokens_seen": 144441540, - "step": 6777 - }, - { - "epoch": 0.8150063127517585, - "flos": 13909245727920.0, - "grad_norm": 4.1704021239842595, - "learning_rate": 3.4823323473414343e-07, - "loss": 1.0755, - "num_input_tokens_seen": 144460780, - "step": 6778 - }, - { - "epoch": 0.8151265556423977, - "flos": 16088332702800.0, - "grad_norm": 7.355174686268664, - "learning_rate": 3.477941439026812e-07, - "loss": 0.972, - "num_input_tokens_seen": 144478720, - "step": 6779 - }, - { - "epoch": 0.8152467985330367, - "flos": 12731837452800.0, - "grad_norm": 4.009005185061818, - "learning_rate": 3.473553037095349e-07, - "loss": 0.9469, - "num_input_tokens_seen": 144497465, - "step": 6780 - }, - { - "epoch": 0.8153670414236758, - "flos": 17762716971240.0, - "grad_norm": 3.64151265206392, - "learning_rate": 3.469167142212743e-07, - "loss": 1.0568, - "num_input_tokens_seen": 144519030, - "step": 6781 - }, - { - "epoch": 0.8154872843143149, - "flos": 22145088439920.0, - "grad_norm": 6.244347724904618, - "learning_rate": 3.4647837550443337e-07, - "loss": 0.8601, - "num_input_tokens_seen": 144537315, - "step": 6782 - }, - { - "epoch": 0.815607527204954, - "flos": 13755046115760.0, - "grad_norm": 4.316812763662959, - "learning_rate": 3.460402876255086e-07, - "loss": 0.9673, - "num_input_tokens_seen": 144554425, - "step": 6783 - }, - { - "epoch": 0.815727770095593, - "flos": 18605106239880.0, - "grad_norm": 6.017342933979035, - "learning_rate": 3.456024506509574e-07, - "loss": 0.9332, - "num_input_tokens_seen": 144575065, - "step": 6784 - }, - { - "epoch": 0.8158480129862322, - "flos": 18028056269160.0, - "grad_norm": 11.385814849414874, - "learning_rate": 3.4516486464719873e-07, - "loss": 0.9706, - "num_input_tokens_seen": 144594175, - "step": 6785 - }, - { - "epoch": 0.8159682558768713, - "flos": 24558234373440.0, - "grad_norm": 4.191337762648172, - "learning_rate": 3.4472752968061445e-07, - "loss": 0.8521, - "num_input_tokens_seen": 144618325, - "step": 6786 - }, - { - "epoch": 0.8160884987675103, - "flos": 13223171719080.0, - "grad_norm": 8.22754966848352, - "learning_rate": 3.442904458175475e-07, - "loss": 0.9643, - "num_input_tokens_seen": 144635365, - "step": 6787 - }, - { - "epoch": 0.8162087416581495, - "flos": 22406962981560.0, - "grad_norm": 3.669237853696862, - "learning_rate": 3.438536131243044e-07, - "loss": 0.9854, - "num_input_tokens_seen": 144656245, - "step": 6788 - }, - { - "epoch": 0.8163289845487885, - "flos": 26836104425400.0, - "grad_norm": 8.035850780478274, - "learning_rate": 3.434170316671503e-07, - "loss": 0.8494, - "num_input_tokens_seen": 144680995, - "step": 6789 - }, - { - "epoch": 0.8164492274394276, - "flos": 9846924876360.0, - "grad_norm": 8.877651624402583, - "learning_rate": 3.4298070151231583e-07, - "loss": 1.1147, - "num_input_tokens_seen": 144696115, - "step": 6790 - }, - { - "epoch": 0.8165694703300668, - "flos": 20650603719000.0, - "grad_norm": 11.978946018121883, - "learning_rate": 3.425446227259916e-07, - "loss": 0.8245, - "num_input_tokens_seen": 144716800, - "step": 6791 - }, - { - "epoch": 0.8166897132207058, - "flos": 17870514546960.0, - "grad_norm": 10.431966993993488, - "learning_rate": 3.421087953743296e-07, - "loss": 1.0499, - "num_input_tokens_seen": 144736285, - "step": 6792 - }, - { - "epoch": 0.8168099561113449, - "flos": 16454938664160.0, - "grad_norm": 4.040685921406674, - "learning_rate": 3.416732195234464e-07, - "loss": 1.0194, - "num_input_tokens_seen": 144756060, - "step": 6793 - }, - { - "epoch": 0.816930199001984, - "flos": 13048024713360.0, - "grad_norm": 3.9318124778092787, - "learning_rate": 3.4123789523941613e-07, - "loss": 1.0191, - "num_input_tokens_seen": 144775605, - "step": 6794 - }, - { - "epoch": 0.8170504418926231, - "flos": 15091406544840.0, - "grad_norm": 5.955770870763839, - "learning_rate": 3.4080282258827884e-07, - "loss": 0.8663, - "num_input_tokens_seen": 144793700, - "step": 6795 - }, - { - "epoch": 0.8171706847832622, - "flos": 13544816737320.0, - "grad_norm": 7.2340899878893445, - "learning_rate": 3.403680016360342e-07, - "loss": 0.9357, - "num_input_tokens_seen": 144812025, - "step": 6796 - }, - { - "epoch": 0.8172909276739013, - "flos": 15248641651440.0, - "grad_norm": 3.447862300079227, - "learning_rate": 3.3993343244864403e-07, - "loss": 0.8985, - "num_input_tokens_seen": 144831335, - "step": 6797 - }, - { - "epoch": 0.8174111705645404, - "flos": 19653922853520.0, - "grad_norm": 6.582588765335081, - "learning_rate": 3.394991150920323e-07, - "loss": 0.9474, - "num_input_tokens_seen": 144854175, - "step": 6798 - }, - { - "epoch": 0.8175314134551794, - "flos": 9926569591920.0, - "grad_norm": 5.329581948037268, - "learning_rate": 3.3906504963208396e-07, - "loss": 0.9645, - "num_input_tokens_seen": 144870590, - "step": 6799 - }, - { - "epoch": 0.8176516563458186, - "flos": 16137984864600.0, - "grad_norm": 4.42912469211808, - "learning_rate": 3.3863123613464774e-07, - "loss": 0.8832, - "num_input_tokens_seen": 144889210, - "step": 6800 - }, - { - "epoch": 0.8177718992364577, - "flos": 15590344878000.0, - "grad_norm": 4.7746900048415695, - "learning_rate": 3.381976746655317e-07, - "loss": 0.9658, - "num_input_tokens_seen": 144908685, - "step": 6801 - }, - { - "epoch": 0.8178921421270967, - "flos": 15641990041200.0, - "grad_norm": 10.274852489888351, - "learning_rate": 3.3776436529050756e-07, - "loss": 0.9008, - "num_input_tokens_seen": 144927955, - "step": 6802 - }, - { - "epoch": 0.8180123850177359, - "flos": 23666683527960.0, - "grad_norm": 3.994955816299115, - "learning_rate": 3.373313080753073e-07, - "loss": 0.9488, - "num_input_tokens_seen": 144951735, - "step": 6803 - }, - { - "epoch": 0.8181326279083749, - "flos": 15695076297720.0, - "grad_norm": 4.005404159593567, - "learning_rate": 3.3689850308562527e-07, - "loss": 1.0094, - "num_input_tokens_seen": 144971900, - "step": 6804 - }, - { - "epoch": 0.818252870799014, - "flos": 11105940206880.0, - "grad_norm": 29.98111384688701, - "learning_rate": 3.364659503871183e-07, - "loss": 1.0025, - "num_input_tokens_seen": 144989555, - "step": 6805 - }, - { - "epoch": 0.8183731136896532, - "flos": 13308672792600.0, - "grad_norm": 4.039159777298116, - "learning_rate": 3.3603365004540417e-07, - "loss": 1.0553, - "num_input_tokens_seen": 145007570, - "step": 6806 - }, - { - "epoch": 0.8184933565802922, - "flos": 18893569902120.0, - "grad_norm": 5.5405314532804555, - "learning_rate": 3.356016021260624e-07, - "loss": 0.9879, - "num_input_tokens_seen": 145027620, - "step": 6807 - }, - { - "epoch": 0.8186135994709313, - "flos": 12417091285560.0, - "grad_norm": 5.100690735060443, - "learning_rate": 3.35169806694634e-07, - "loss": 0.8608, - "num_input_tokens_seen": 145045590, - "step": 6808 - }, - { - "epoch": 0.8187338423615703, - "flos": 45447521398560.0, - "grad_norm": 0.7425569919587066, - "learning_rate": 3.3473826381662186e-07, - "loss": 0.8515, - "num_input_tokens_seen": 145116450, - "step": 6809 - }, - { - "epoch": 0.8188540852522095, - "flos": 12416968639320.0, - "grad_norm": 4.062001593900788, - "learning_rate": 3.3430697355749216e-07, - "loss": 1.0463, - "num_input_tokens_seen": 145133860, - "step": 6810 - }, - { - "epoch": 0.8189743281428485, - "flos": 10162192290120.0, - "grad_norm": 4.888653927753712, - "learning_rate": 3.3387593598266907e-07, - "loss": 0.9696, - "num_input_tokens_seen": 145150190, - "step": 6811 - }, - { - "epoch": 0.8190945710334876, - "flos": 17843097564240.0, - "grad_norm": 4.348907044007378, - "learning_rate": 3.3344515115754225e-07, - "loss": 1.0104, - "num_input_tokens_seen": 145168890, - "step": 6812 - }, - { - "epoch": 0.8192148139241268, - "flos": 15275322756720.0, - "grad_norm": 8.895640267652709, - "learning_rate": 3.33014619147461e-07, - "loss": 1.016, - "num_input_tokens_seen": 145186635, - "step": 6813 - }, - { - "epoch": 0.8193350568147658, - "flos": 17032571204520.0, - "grad_norm": 7.005909741159211, - "learning_rate": 3.325843400177362e-07, - "loss": 0.9309, - "num_input_tokens_seen": 145207695, - "step": 6814 - }, - { - "epoch": 0.8194552997054049, - "flos": 14594369228400.0, - "grad_norm": 6.057439520735084, - "learning_rate": 3.32154313833642e-07, - "loss": 0.9596, - "num_input_tokens_seen": 145227570, - "step": 6815 - }, - { - "epoch": 0.819575542596044, - "flos": 18527117248560.0, - "grad_norm": 10.402725493488285, - "learning_rate": 3.3172454066041164e-07, - "loss": 0.813, - "num_input_tokens_seen": 145246795, - "step": 6816 - }, - { - "epoch": 0.8196957854866831, - "flos": 20730064465200.0, - "grad_norm": 4.38611650961174, - "learning_rate": 3.3129502056324234e-07, - "loss": 0.9735, - "num_input_tokens_seen": 145267880, - "step": 6817 - }, - { - "epoch": 0.8198160283773221, - "flos": 49434679670400.0, - "grad_norm": 0.7832002711065718, - "learning_rate": 3.3086575360729165e-07, - "loss": 0.8523, - "num_input_tokens_seen": 145325135, - "step": 6818 - }, - { - "epoch": 0.8199362712679613, - "flos": 11761623061680.0, - "grad_norm": 3.9205994982553642, - "learning_rate": 3.3043673985767906e-07, - "loss": 0.9397, - "num_input_tokens_seen": 145343920, - "step": 6819 - }, - { - "epoch": 0.8200565141586004, - "flos": 15454701057720.0, - "grad_norm": 8.588222654273409, - "learning_rate": 3.3000797937948564e-07, - "loss": 0.9934, - "num_input_tokens_seen": 145361935, - "step": 6820 - }, - { - "epoch": 0.8201767570492394, - "flos": 49991548786560.0, - "grad_norm": 0.9608215324083005, - "learning_rate": 3.295794722377534e-07, - "loss": 0.9068, - "num_input_tokens_seen": 145425260, - "step": 6821 - }, - { - "epoch": 0.8202969999398786, - "flos": 16428533512920.0, - "grad_norm": 5.47825485397651, - "learning_rate": 3.291512184974876e-07, - "loss": 1.0249, - "num_input_tokens_seen": 145445370, - "step": 6822 - }, - { - "epoch": 0.8204172428305176, - "flos": 20100173530440.0, - "grad_norm": 5.1670175658810225, - "learning_rate": 3.2872321822365346e-07, - "loss": 0.8972, - "num_input_tokens_seen": 145465305, - "step": 6823 - }, - { - "epoch": 0.8205374857211567, - "flos": 14829654649440.0, - "grad_norm": 7.6654431016305855, - "learning_rate": 3.282954714811783e-07, - "loss": 0.9584, - "num_input_tokens_seen": 145483930, - "step": 6824 - }, - { - "epoch": 0.8206577286117959, - "flos": 9270764090880.0, - "grad_norm": 5.340656478190859, - "learning_rate": 3.2786797833495093e-07, - "loss": 0.9421, - "num_input_tokens_seen": 145499005, - "step": 6825 - }, - { - "epoch": 0.8207779715024349, - "flos": 17976533752200.0, - "grad_norm": 10.31861797689618, - "learning_rate": 3.274407388498213e-07, - "loss": 0.9526, - "num_input_tokens_seen": 145516855, - "step": 6826 - }, - { - "epoch": 0.820898214393074, - "flos": 13912097253000.0, - "grad_norm": 3.7906001480935774, - "learning_rate": 3.270137530906021e-07, - "loss": 0.9709, - "num_input_tokens_seen": 145535810, - "step": 6827 - }, - { - "epoch": 0.8210184572837131, - "flos": 11027552615280.0, - "grad_norm": 6.469148337958085, - "learning_rate": 3.265870211220665e-07, - "loss": 1.0586, - "num_input_tokens_seen": 145553365, - "step": 6828 - }, - { - "epoch": 0.8211387001743522, - "flos": 14777120301000.0, - "grad_norm": 4.073259396129137, - "learning_rate": 3.2616054300894934e-07, - "loss": 1.0369, - "num_input_tokens_seen": 145572535, - "step": 6829 - }, - { - "epoch": 0.8212589430649913, - "flos": 19728600396360.0, - "grad_norm": 9.24031921231663, - "learning_rate": 3.2573431881594693e-07, - "loss": 1.075, - "num_input_tokens_seen": 145591800, - "step": 6830 - }, - { - "epoch": 0.8213791859556304, - "flos": 15956950839360.0, - "grad_norm": 4.360615652871772, - "learning_rate": 3.2530834860771663e-07, - "loss": 0.8773, - "num_input_tokens_seen": 145610900, - "step": 6831 - }, - { - "epoch": 0.8214994288462695, - "flos": 11787782920440.0, - "grad_norm": 5.363450172025721, - "learning_rate": 3.248826324488794e-07, - "loss": 0.9592, - "num_input_tokens_seen": 145627915, - "step": 6832 - }, - { - "epoch": 0.8216196717369085, - "flos": 17944026950520.0, - "grad_norm": 18.295992653255215, - "learning_rate": 3.244571704040138e-07, - "loss": 1.1007, - "num_input_tokens_seen": 145647795, - "step": 6833 - }, - { - "epoch": 0.8217399146275477, - "flos": 18052499080560.0, - "grad_norm": 7.541603158363756, - "learning_rate": 3.2403196253766374e-07, - "loss": 0.9747, - "num_input_tokens_seen": 145666595, - "step": 6834 - }, - { - "epoch": 0.8218601575181868, - "flos": 18238009693560.0, - "grad_norm": 6.190996819565735, - "learning_rate": 3.2360700891433254e-07, - "loss": 1.012, - "num_input_tokens_seen": 145685340, - "step": 6835 - }, - { - "epoch": 0.8219804004088258, - "flos": 48449404905240.0, - "grad_norm": 0.8022308998271138, - "learning_rate": 3.231823095984847e-07, - "loss": 0.8209, - "num_input_tokens_seen": 145739700, - "step": 6836 - }, - { - "epoch": 0.822100643299465, - "flos": 13807181863920.0, - "grad_norm": 14.48796423891521, - "learning_rate": 3.2275786465454814e-07, - "loss": 0.979, - "num_input_tokens_seen": 145756070, - "step": 6837 - }, - { - "epoch": 0.822220886190104, - "flos": 17556565580280.0, - "grad_norm": 9.442508404328816, - "learning_rate": 3.2233367414690917e-07, - "loss": 0.9872, - "num_input_tokens_seen": 145777980, - "step": 6838 - }, - { - "epoch": 0.8223411290807431, - "flos": 19811556560400.0, - "grad_norm": 5.1060025812389815, - "learning_rate": 3.219097381399183e-07, - "loss": 1.0623, - "num_input_tokens_seen": 145794875, - "step": 6839 - }, - { - "epoch": 0.8224613719713821, - "flos": 16504805456880.0, - "grad_norm": 68.16667303382204, - "learning_rate": 3.2148605669788584e-07, - "loss": 1.0332, - "num_input_tokens_seen": 145814485, - "step": 6840 - }, - { - "epoch": 0.8225816148620213, - "flos": 11106369468720.0, - "grad_norm": 8.456158388125106, - "learning_rate": 3.2106262988508405e-07, - "loss": 0.9873, - "num_input_tokens_seen": 145832255, - "step": 6841 - }, - { - "epoch": 0.8227018577526604, - "flos": 13124971211640.0, - "grad_norm": 17.035557081531703, - "learning_rate": 3.206394577657465e-07, - "loss": 0.9644, - "num_input_tokens_seen": 145849755, - "step": 6842 - }, - { - "epoch": 0.8228221006432994, - "flos": 15799685071200.0, - "grad_norm": 4.657308778139114, - "learning_rate": 3.202165404040675e-07, - "loss": 0.9262, - "num_input_tokens_seen": 145867395, - "step": 6843 - }, - { - "epoch": 0.8229423435339386, - "flos": 17136965347080.0, - "grad_norm": 71.61020516920057, - "learning_rate": 3.1979387786420396e-07, - "loss": 0.9676, - "num_input_tokens_seen": 145887355, - "step": 6844 - }, - { - "epoch": 0.8230625864245776, - "flos": 16979576932680.0, - "grad_norm": 4.120136746479707, - "learning_rate": 3.1937147021027346e-07, - "loss": 1.0481, - "num_input_tokens_seen": 145905530, - "step": 6845 - }, - { - "epoch": 0.8231828293152167, - "flos": 11735003279520.0, - "grad_norm": 9.089667908295961, - "learning_rate": 3.189493175063547e-07, - "loss": 0.9926, - "num_input_tokens_seen": 145922485, - "step": 6846 - }, - { - "epoch": 0.8233030722058559, - "flos": 13387275015120.0, - "grad_norm": 22.122285320746137, - "learning_rate": 3.1852741981648776e-07, - "loss": 0.8963, - "num_input_tokens_seen": 145940855, - "step": 6847 - }, - { - "epoch": 0.8234233150964949, - "flos": 20257071359880.0, - "grad_norm": 7.261598714295026, - "learning_rate": 3.1810577720467404e-07, - "loss": 0.9181, - "num_input_tokens_seen": 145962305, - "step": 6848 - }, - { - "epoch": 0.823543557987134, - "flos": 24085425237480.0, - "grad_norm": 4.398539621580361, - "learning_rate": 3.176843897348769e-07, - "loss": 0.8, - "num_input_tokens_seen": 145985220, - "step": 6849 - }, - { - "epoch": 0.8236638008777731, - "flos": 12102498426120.0, - "grad_norm": 6.439019181333297, - "learning_rate": 3.1726325747102034e-07, - "loss": 0.9884, - "num_input_tokens_seen": 146003315, - "step": 6850 - }, - { - "epoch": 0.8237840437684122, - "flos": 44117825624160.0, - "grad_norm": 3.9093256934309446, - "learning_rate": 3.1684238047698974e-07, - "loss": 0.8709, - "num_input_tokens_seen": 146031305, - "step": 6851 - }, - { - "epoch": 0.8239042866590512, - "flos": 19445103906840.0, - "grad_norm": 6.068265709769574, - "learning_rate": 3.1642175881663155e-07, - "loss": 0.7452, - "num_input_tokens_seen": 146050755, - "step": 6852 - }, - { - "epoch": 0.8240245295496904, - "flos": 15432895140480.0, - "grad_norm": 8.475721804508561, - "learning_rate": 3.160013925537537e-07, - "loss": 1.0718, - "num_input_tokens_seen": 146071310, - "step": 6853 - }, - { - "epoch": 0.8241447724403295, - "flos": 14198598575400.0, - "grad_norm": 5.656522291173437, - "learning_rate": 3.155812817521266e-07, - "loss": 0.9724, - "num_input_tokens_seen": 146091405, - "step": 6854 - }, - { - "epoch": 0.8242650153309685, - "flos": 15825660960600.0, - "grad_norm": 5.556147001360963, - "learning_rate": 3.151614264754787e-07, - "loss": 0.9958, - "num_input_tokens_seen": 146109070, - "step": 6855 - }, - { - "epoch": 0.8243852582216077, - "flos": 15852311404320.0, - "grad_norm": 5.000429325745187, - "learning_rate": 3.147418267875035e-07, - "loss": 1.0228, - "num_input_tokens_seen": 146126920, - "step": 6856 - }, - { - "epoch": 0.8245055011122467, - "flos": 17530804321800.0, - "grad_norm": 7.043385252694268, - "learning_rate": 3.1432248275185315e-07, - "loss": 0.8721, - "num_input_tokens_seen": 146147150, - "step": 6857 - }, - { - "epoch": 0.8246257440028858, - "flos": 12385443007560.0, - "grad_norm": 6.096768770998412, - "learning_rate": 3.139033944321412e-07, - "loss": 1.0034, - "num_input_tokens_seen": 146164230, - "step": 6858 - }, - { - "epoch": 0.824745986893525, - "flos": 17792893494360.0, - "grad_norm": 5.25738478305327, - "learning_rate": 3.1348456189194507e-07, - "loss": 1.0133, - "num_input_tokens_seen": 146184410, - "step": 6859 - }, - { - "epoch": 0.824866229784164, - "flos": 13310389839960.0, - "grad_norm": 4.059729702600742, - "learning_rate": 3.1306598519479876e-07, - "loss": 1.057, - "num_input_tokens_seen": 146203950, - "step": 6860 - }, - { - "epoch": 0.8249864726748031, - "flos": 16953447735480.0, - "grad_norm": 3.260285753703313, - "learning_rate": 3.1264766440420177e-07, - "loss": 1.0069, - "num_input_tokens_seen": 146226140, - "step": 6861 - }, - { - "epoch": 0.8251067155654422, - "flos": 14409993093120.0, - "grad_norm": 4.3688976781331315, - "learning_rate": 3.122295995836124e-07, - "loss": 0.9061, - "num_input_tokens_seen": 146245730, - "step": 6862 - }, - { - "epoch": 0.8252269584560813, - "flos": 17792525555640.0, - "grad_norm": 4.057194928577695, - "learning_rate": 3.118117907964508e-07, - "loss": 0.9945, - "num_input_tokens_seen": 146267395, - "step": 6863 - }, - { - "epoch": 0.8253472013467203, - "flos": 12128474315520.0, - "grad_norm": 7.9320371557337905, - "learning_rate": 3.1139423810609856e-07, - "loss": 1.0388, - "num_input_tokens_seen": 146283810, - "step": 6864 - }, - { - "epoch": 0.8254674442373595, - "flos": 15927019608720.0, - "grad_norm": 3.909971859725803, - "learning_rate": 3.1097694157589714e-07, - "loss": 0.9783, - "num_input_tokens_seen": 146303415, - "step": 6865 - }, - { - "epoch": 0.8255876871279986, - "flos": 17631764369640.0, - "grad_norm": 6.686189041701348, - "learning_rate": 3.105599012691511e-07, - "loss": 0.9736, - "num_input_tokens_seen": 146321565, - "step": 6866 - }, - { - "epoch": 0.8257079300186376, - "flos": 19550387234640.0, - "grad_norm": 3.1412818942896688, - "learning_rate": 3.101431172491249e-07, - "loss": 1.0498, - "num_input_tokens_seen": 146342830, - "step": 6867 - }, - { - "epoch": 0.8258281729092768, - "flos": 11656523703240.0, - "grad_norm": 20.870151710562023, - "learning_rate": 3.097265895790444e-07, - "loss": 0.9425, - "num_input_tokens_seen": 146360760, - "step": 6868 - }, - { - "epoch": 0.8259484157999158, - "flos": 15222573777360.0, - "grad_norm": 4.357823687211124, - "learning_rate": 3.093103183220962e-07, - "loss": 1.0546, - "num_input_tokens_seen": 146380525, - "step": 6869 - }, - { - "epoch": 0.8260686586905549, - "flos": 41738775345360.0, - "grad_norm": 0.8974830865461723, - "learning_rate": 3.0889430354142796e-07, - "loss": 0.872, - "num_input_tokens_seen": 146441755, - "step": 6870 - }, - { - "epoch": 0.826188901581194, - "flos": 19601879090040.0, - "grad_norm": 7.74383324185044, - "learning_rate": 3.084785453001497e-07, - "loss": 0.9227, - "num_input_tokens_seen": 146462390, - "step": 6871 - }, - { - "epoch": 0.8263091444718331, - "flos": 16848409700160.0, - "grad_norm": 4.6740517536867925, - "learning_rate": 3.080630436613314e-07, - "loss": 1.0407, - "num_input_tokens_seen": 146479880, - "step": 6872 - }, - { - "epoch": 0.8264293873624722, - "flos": 12154787482080.0, - "grad_norm": 9.01210394847535, - "learning_rate": 3.076477986880039e-07, - "loss": 1.0693, - "num_input_tokens_seen": 146497395, - "step": 6873 - }, - { - "epoch": 0.8265496302531112, - "flos": 17137915855440.0, - "grad_norm": 5.435738289108542, - "learning_rate": 3.0723281044315986e-07, - "loss": 0.9194, - "num_input_tokens_seen": 146519070, - "step": 6874 - }, - { - "epoch": 0.8266698731437504, - "flos": 9951778942320.0, - "grad_norm": 10.301341545365698, - "learning_rate": 3.068180789897521e-07, - "loss": 0.9877, - "num_input_tokens_seen": 146537200, - "step": 6875 - }, - { - "epoch": 0.8267901160343895, - "flos": 21938201171520.0, - "grad_norm": 11.137673006951031, - "learning_rate": 3.064036043906966e-07, - "loss": 1.0466, - "num_input_tokens_seen": 146560360, - "step": 6876 - }, - { - "epoch": 0.8269103589250285, - "flos": 29014792800000.0, - "grad_norm": 4.123616073112604, - "learning_rate": 3.059893867088668e-07, - "loss": 0.9068, - "num_input_tokens_seen": 146584225, - "step": 6877 - }, - { - "epoch": 0.8270306018156677, - "flos": 21568437069480.0, - "grad_norm": 3.978027433650806, - "learning_rate": 3.055754260071004e-07, - "loss": 0.8956, - "num_input_tokens_seen": 146606240, - "step": 6878 - }, - { - "epoch": 0.8271508447063067, - "flos": 17948135599560.0, - "grad_norm": 8.07940935147799, - "learning_rate": 3.051617223481948e-07, - "loss": 0.9697, - "num_input_tokens_seen": 146627280, - "step": 6879 - }, - { - "epoch": 0.8272710875969458, - "flos": 12443312467440.0, - "grad_norm": 5.138711590615243, - "learning_rate": 3.047482757949078e-07, - "loss": 0.9776, - "num_input_tokens_seen": 146644630, - "step": 6880 - }, - { - "epoch": 0.827391330487585, - "flos": 14016368749320.0, - "grad_norm": 4.868011355898549, - "learning_rate": 3.043350864099605e-07, - "loss": 1.078, - "num_input_tokens_seen": 146662910, - "step": 6881 - }, - { - "epoch": 0.827511573378224, - "flos": 11917417074960.0, - "grad_norm": 4.754332323500868, - "learning_rate": 3.039221542560315e-07, - "loss": 1.0322, - "num_input_tokens_seen": 146679195, - "step": 6882 - }, - { - "epoch": 0.8276318162688631, - "flos": 13020147807240.0, - "grad_norm": 8.075413025488329, - "learning_rate": 3.0350947939576356e-07, - "loss": 0.9708, - "num_input_tokens_seen": 146698070, - "step": 6883 - }, - { - "epoch": 0.8277520591595022, - "flos": 13726709286240.0, - "grad_norm": 4.9670666416244975, - "learning_rate": 3.0309706189175876e-07, - "loss": 0.9534, - "num_input_tokens_seen": 146717625, - "step": 6884 - }, - { - "epoch": 0.8278723020501413, - "flos": 48635712718800.0, - "grad_norm": 0.7921317056423524, - "learning_rate": 3.0268490180658045e-07, - "loss": 0.8232, - "num_input_tokens_seen": 146780125, - "step": 6885 - }, - { - "epoch": 0.8279925449407803, - "flos": 12888122051040.0, - "grad_norm": 7.106698876046101, - "learning_rate": 3.0227299920275305e-07, - "loss": 1.0025, - "num_input_tokens_seen": 146796160, - "step": 6886 - }, - { - "epoch": 0.8281127878314195, - "flos": 14645953068480.0, - "grad_norm": 5.770194044635253, - "learning_rate": 3.018613541427613e-07, - "loss": 1.0732, - "num_input_tokens_seen": 146815400, - "step": 6887 - }, - { - "epoch": 0.8282330307220586, - "flos": 12757844003760.0, - "grad_norm": 7.033856396261062, - "learning_rate": 3.0144996668905243e-07, - "loss": 0.9636, - "num_input_tokens_seen": 146832500, - "step": 6888 - }, - { - "epoch": 0.8283532736126976, - "flos": 14252604678720.0, - "grad_norm": 23.659652888508052, - "learning_rate": 3.010388369040331e-07, - "loss": 1.0554, - "num_input_tokens_seen": 146850880, - "step": 6889 - }, - { - "epoch": 0.8284735165033368, - "flos": 22721678487240.0, - "grad_norm": 2.6006663385583506, - "learning_rate": 3.0062796485007156e-07, - "loss": 1.0613, - "num_input_tokens_seen": 146871540, - "step": 6890 - }, - { - "epoch": 0.8285937593939758, - "flos": 18972294770880.0, - "grad_norm": 15.634564455490617, - "learning_rate": 3.002173505894965e-07, - "loss": 0.8775, - "num_input_tokens_seen": 146890410, - "step": 6891 - }, - { - "epoch": 0.8287140022846149, - "flos": 14331360209040.0, - "grad_norm": 6.806962308575047, - "learning_rate": 2.998069941845973e-07, - "loss": 0.8401, - "num_input_tokens_seen": 146909200, - "step": 6892 - }, - { - "epoch": 0.8288342451752541, - "flos": 50673514146360.0, - "grad_norm": 0.7551118372512095, - "learning_rate": 2.993968956976258e-07, - "loss": 0.8484, - "num_input_tokens_seen": 146976665, - "step": 6893 - }, - { - "epoch": 0.8289544880658931, - "flos": 17478177988680.0, - "grad_norm": 5.638734453129546, - "learning_rate": 2.9898705519079313e-07, - "loss": 0.9232, - "num_input_tokens_seen": 146995490, - "step": 6894 - }, - { - "epoch": 0.8290747309565322, - "flos": 15825998237760.0, - "grad_norm": 5.248286376303728, - "learning_rate": 2.985774727262715e-07, - "loss": 0.97, - "num_input_tokens_seen": 147014055, - "step": 6895 - }, - { - "epoch": 0.8291949738471713, - "flos": 16531486562160.0, - "grad_norm": 4.625054642638041, - "learning_rate": 2.981681483661949e-07, - "loss": 1.0358, - "num_input_tokens_seen": 147033360, - "step": 6896 - }, - { - "epoch": 0.8293152167378104, - "flos": 37589272582560.0, - "grad_norm": 9.695683917140334, - "learning_rate": 2.9775908217265633e-07, - "loss": 0.9305, - "num_input_tokens_seen": 147058315, - "step": 6897 - }, - { - "epoch": 0.8294354596284494, - "flos": 45356317274880.0, - "grad_norm": 0.8133663180638151, - "learning_rate": 2.9735027420771253e-07, - "loss": 0.7509, - "num_input_tokens_seen": 147118370, - "step": 6898 - }, - { - "epoch": 0.8295557025190886, - "flos": 17661205015320.0, - "grad_norm": 9.012856519528176, - "learning_rate": 2.969417245333774e-07, - "loss": 0.9346, - "num_input_tokens_seen": 147137470, - "step": 6899 - }, - { - "epoch": 0.8296759454097277, - "flos": 17870698516320.0, - "grad_norm": 7.1897165612391865, - "learning_rate": 2.9653343321162915e-07, - "loss": 1.0006, - "num_input_tokens_seen": 147156700, - "step": 6900 - }, - { - "epoch": 0.8297961883003667, - "flos": 17161960066560.0, - "grad_norm": 3.7476763837860774, - "learning_rate": 2.9612540030440446e-07, - "loss": 0.8728, - "num_input_tokens_seen": 147176965, - "step": 6901 - }, - { - "epoch": 0.8299164311910058, - "flos": 48296554401720.0, - "grad_norm": 1.4302455459202648, - "learning_rate": 2.9571762587360206e-07, - "loss": 0.8709, - "num_input_tokens_seen": 147233070, - "step": 6902 - }, - { - "epoch": 0.8300366740816449, - "flos": 17949913970040.0, - "grad_norm": 3.302401859231544, - "learning_rate": 2.953101099810806e-07, - "loss": 0.9667, - "num_input_tokens_seen": 147252395, - "step": 6903 - }, - { - "epoch": 0.830156916972284, - "flos": 12784279816560.0, - "grad_norm": 4.5193278300699244, - "learning_rate": 2.9490285268865965e-07, - "loss": 1.0649, - "num_input_tokens_seen": 147269605, - "step": 6904 - }, - { - "epoch": 0.830277159862923, - "flos": 18735905533680.0, - "grad_norm": 11.17600618820729, - "learning_rate": 2.9449585405812085e-07, - "loss": 1.0327, - "num_input_tokens_seen": 147286705, - "step": 6905 - }, - { - "epoch": 0.8303974027535622, - "flos": 14148302520840.0, - "grad_norm": 3.661077719269057, - "learning_rate": 2.940891141512043e-07, - "loss": 0.9639, - "num_input_tokens_seen": 147304445, - "step": 6906 - }, - { - "epoch": 0.8305176456442013, - "flos": 12155124759240.0, - "grad_norm": 6.033003003823272, - "learning_rate": 2.9368263302961385e-07, - "loss": 0.9373, - "num_input_tokens_seen": 147322865, - "step": 6907 - }, - { - "epoch": 0.8306378885348403, - "flos": 18236292646200.0, - "grad_norm": 5.038842011588478, - "learning_rate": 2.9327641075501075e-07, - "loss": 1.0222, - "num_input_tokens_seen": 147341575, - "step": 6908 - }, - { - "epoch": 0.8307581314254795, - "flos": 24216101885040.0, - "grad_norm": 17.27668092005178, - "learning_rate": 2.9287044738901866e-07, - "loss": 0.8827, - "num_input_tokens_seen": 147359280, - "step": 6909 - }, - { - "epoch": 0.8308783743161186, - "flos": 12440001018960.0, - "grad_norm": 15.148250143890383, - "learning_rate": 2.9246474299322274e-07, - "loss": 1.1424, - "num_input_tokens_seen": 147374560, - "step": 6910 - }, - { - "epoch": 0.8309986172067576, - "flos": 49709186774760.0, - "grad_norm": 0.9048529412723841, - "learning_rate": 2.920592976291678e-07, - "loss": 0.8925, - "num_input_tokens_seen": 147431610, - "step": 6911 - }, - { - "epoch": 0.8311188600973968, - "flos": 15852127434960.0, - "grad_norm": 6.4651352370087745, - "learning_rate": 2.916541113583595e-07, - "loss": 1.0303, - "num_input_tokens_seen": 147449830, - "step": 6912 - }, - { - "epoch": 0.8312391029880358, - "flos": 13308151546080.0, - "grad_norm": 6.433971437949928, - "learning_rate": 2.912491842422642e-07, - "loss": 0.8774, - "num_input_tokens_seen": 147467255, - "step": 6913 - }, - { - "epoch": 0.8313593458786749, - "flos": 14462926041840.0, - "grad_norm": 2.6492803352147662, - "learning_rate": 2.9084451634230857e-07, - "loss": 0.9311, - "num_input_tokens_seen": 147486275, - "step": 6914 - }, - { - "epoch": 0.831479588769314, - "flos": 22905441391320.0, - "grad_norm": 4.491433997606822, - "learning_rate": 2.9044010771988125e-07, - "loss": 0.9492, - "num_input_tokens_seen": 147505810, - "step": 6915 - }, - { - "epoch": 0.8315998316599531, - "flos": 32292747150600.0, - "grad_norm": 4.595375033111693, - "learning_rate": 2.900359584363303e-07, - "loss": 0.9633, - "num_input_tokens_seen": 147528635, - "step": 6916 - }, - { - "epoch": 0.8317200745505922, - "flos": 13015456588560.0, - "grad_norm": 7.6508595700512325, - "learning_rate": 2.8963206855296494e-07, - "loss": 1.0616, - "num_input_tokens_seen": 147544595, - "step": 6917 - }, - { - "epoch": 0.8318403174412313, - "flos": 17215260954000.0, - "grad_norm": 4.04922122767566, - "learning_rate": 2.892284381310548e-07, - "loss": 1.0041, - "num_input_tokens_seen": 147565730, - "step": 6918 - }, - { - "epoch": 0.8319605603318704, - "flos": 15930423041880.0, - "grad_norm": 13.822030900269606, - "learning_rate": 2.888250672318302e-07, - "loss": 0.9401, - "num_input_tokens_seen": 147582850, - "step": 6919 - }, - { - "epoch": 0.8320808032225094, - "flos": 26707574087040.0, - "grad_norm": 6.16484499994047, - "learning_rate": 2.884219559164831e-07, - "loss": 0.9245, - "num_input_tokens_seen": 147605715, - "step": 6920 - }, - { - "epoch": 0.8322010461131486, - "flos": 9008644256760.0, - "grad_norm": 6.18767108102737, - "learning_rate": 2.880191042461635e-07, - "loss": 1.0372, - "num_input_tokens_seen": 147621375, - "step": 6921 - }, - { - "epoch": 0.8323212890037877, - "flos": 11184818383440.0, - "grad_norm": 9.754755278802048, - "learning_rate": 2.876165122819849e-07, - "loss": 1.0296, - "num_input_tokens_seen": 147639075, - "step": 6922 - }, - { - "epoch": 0.8324415318944267, - "flos": 15426854813160.0, - "grad_norm": 3.333404651514686, - "learning_rate": 2.872141800850201e-07, - "loss": 1.0178, - "num_input_tokens_seen": 147655970, - "step": 6923 - }, - { - "epoch": 0.8325617747850659, - "flos": 24395756140080.0, - "grad_norm": 9.64367014369177, - "learning_rate": 2.868121077163024e-07, - "loss": 0.9606, - "num_input_tokens_seen": 147675245, - "step": 6924 - }, - { - "epoch": 0.8326820176757049, - "flos": 13020331776600.0, - "grad_norm": 5.33053489620536, - "learning_rate": 2.864102952368257e-07, - "loss": 0.9483, - "num_input_tokens_seen": 147692890, - "step": 6925 - }, - { - "epoch": 0.832802260566344, - "flos": 25684733362800.0, - "grad_norm": 4.197461251039668, - "learning_rate": 2.860087427075444e-07, - "loss": 0.8195, - "num_input_tokens_seen": 147716860, - "step": 6926 - }, - { - "epoch": 0.8329225034569832, - "flos": 10055467869000.0, - "grad_norm": 6.017665606107058, - "learning_rate": 2.856074501893744e-07, - "loss": 1.0786, - "num_input_tokens_seen": 147731780, - "step": 6927 - }, - { - "epoch": 0.8330427463476222, - "flos": 12810470336880.0, - "grad_norm": 2.9273533996308965, - "learning_rate": 2.8520641774319054e-07, - "loss": 1.0476, - "num_input_tokens_seen": 147749590, - "step": 6928 - }, - { - "epoch": 0.8331629892382613, - "flos": 12784126508760.0, - "grad_norm": 5.811123920926591, - "learning_rate": 2.848056454298309e-07, - "loss": 0.9704, - "num_input_tokens_seen": 147766635, - "step": 6929 - }, - { - "epoch": 0.8332832321289004, - "flos": 12364863552720.0, - "grad_norm": 3.6186367616741326, - "learning_rate": 2.844051333100905e-07, - "loss": 0.8678, - "num_input_tokens_seen": 147783900, - "step": 6930 - }, - { - "epoch": 0.8334034750195395, - "flos": 10659628206840.0, - "grad_norm": 26.459897196851855, - "learning_rate": 2.840048814447269e-07, - "loss": 1.0661, - "num_input_tokens_seen": 147801785, - "step": 6931 - }, - { - "epoch": 0.8335237179101785, - "flos": 13780439435520.0, - "grad_norm": 6.070899100417596, - "learning_rate": 2.836048898944587e-07, - "loss": 0.957, - "num_input_tokens_seen": 147819930, - "step": 6932 - }, - { - "epoch": 0.8336439608008177, - "flos": 15458441768040.0, - "grad_norm": 7.089250523993588, - "learning_rate": 2.832051587199642e-07, - "loss": 0.9464, - "num_input_tokens_seen": 147836905, - "step": 6933 - }, - { - "epoch": 0.8337642036914568, - "flos": 42730059776280.0, - "grad_norm": 0.794397016830932, - "learning_rate": 2.828056879818821e-07, - "loss": 0.8254, - "num_input_tokens_seen": 147895700, - "step": 6934 - }, - { - "epoch": 0.8338844465820958, - "flos": 19339452640320.0, - "grad_norm": 5.631309192274399, - "learning_rate": 2.824064777408117e-07, - "loss": 1.0586, - "num_input_tokens_seen": 147915210, - "step": 6935 - }, - { - "epoch": 0.8340046894727349, - "flos": 21724660344600.0, - "grad_norm": 3.0015505916075265, - "learning_rate": 2.8200752805731263e-07, - "loss": 0.9895, - "num_input_tokens_seen": 147937920, - "step": 6936 - }, - { - "epoch": 0.834124932363374, - "flos": 19313630058720.0, - "grad_norm": 3.80906234597248, - "learning_rate": 2.8160883899190625e-07, - "loss": 1.0357, - "num_input_tokens_seen": 147960910, - "step": 6937 - }, - { - "epoch": 0.8342451752540131, - "flos": 17476154325720.0, - "grad_norm": 7.579623422251565, - "learning_rate": 2.8121041060507234e-07, - "loss": 0.9704, - "num_input_tokens_seen": 147979660, - "step": 6938 - }, - { - "epoch": 0.8343654181446521, - "flos": 18946288219920.0, - "grad_norm": 6.0237481300618665, - "learning_rate": 2.808122429572528e-07, - "loss": 0.9418, - "num_input_tokens_seen": 147999585, - "step": 6939 - }, - { - "epoch": 0.8344856610352913, - "flos": 14750745811320.0, - "grad_norm": 6.7556366168284105, - "learning_rate": 2.804143361088489e-07, - "loss": 0.9808, - "num_input_tokens_seen": 148018485, - "step": 6940 - }, - { - "epoch": 0.8346059039259304, - "flos": 18572078191680.0, - "grad_norm": 6.019929846674853, - "learning_rate": 2.8001669012022277e-07, - "loss": 0.9815, - "num_input_tokens_seen": 148036175, - "step": 6941 - }, - { - "epoch": 0.8347261468165694, - "flos": 20680810903680.0, - "grad_norm": 5.307409742524957, - "learning_rate": 2.7961930505169795e-07, - "loss": 0.921, - "num_input_tokens_seen": 148060140, - "step": 6942 - }, - { - "epoch": 0.8348463897072086, - "flos": 18789206421120.0, - "grad_norm": 3.2602306146319613, - "learning_rate": 2.792221809635558e-07, - "loss": 0.9754, - "num_input_tokens_seen": 148080490, - "step": 6943 - }, - { - "epoch": 0.8349666325978476, - "flos": 16612449724800.0, - "grad_norm": 4.971045006047364, - "learning_rate": 2.788253179160411e-07, - "loss": 0.9762, - "num_input_tokens_seen": 148101370, - "step": 6944 - }, - { - "epoch": 0.8350868754884867, - "flos": 9087031848360.0, - "grad_norm": 13.308974394688592, - "learning_rate": 2.7842871596935725e-07, - "loss": 0.8707, - "num_input_tokens_seen": 148119605, - "step": 6945 - }, - { - "epoch": 0.8352071183791259, - "flos": 18867532689600.0, - "grad_norm": 3.441207273795158, - "learning_rate": 2.780323751836682e-07, - "loss": 0.9148, - "num_input_tokens_seen": 148140540, - "step": 6946 - }, - { - "epoch": 0.8353273612697649, - "flos": 14672296896600.0, - "grad_norm": 2.593680874926025, - "learning_rate": 2.7763629561909876e-07, - "loss": 1.0158, - "num_input_tokens_seen": 148161090, - "step": 6947 - }, - { - "epoch": 0.835447604160404, - "flos": 14015234271600.0, - "grad_norm": 3.1777286728782586, - "learning_rate": 2.772404773357335e-07, - "loss": 0.9941, - "num_input_tokens_seen": 148180215, - "step": 6948 - }, - { - "epoch": 0.8355678470510431, - "flos": 16660752777960.0, - "grad_norm": 11.499096378815898, - "learning_rate": 2.7684492039361853e-07, - "loss": 0.9995, - "num_input_tokens_seen": 148199160, - "step": 6949 - }, - { - "epoch": 0.8356880899416822, - "flos": 15064970732040.0, - "grad_norm": 3.6416263103795043, - "learning_rate": 2.764496248527586e-07, - "loss": 1.0663, - "num_input_tokens_seen": 148217855, - "step": 6950 - }, - { - "epoch": 0.8358083328323213, - "flos": 19968515712960.0, - "grad_norm": 3.744586088279208, - "learning_rate": 2.760545907731211e-07, - "loss": 0.9956, - "num_input_tokens_seen": 148238150, - "step": 6951 - }, - { - "epoch": 0.8359285757229604, - "flos": 19786255225320.0, - "grad_norm": 5.59040024691026, - "learning_rate": 2.75659818214631e-07, - "loss": 0.9012, - "num_input_tokens_seen": 148258975, - "step": 6952 - }, - { - "epoch": 0.8360488186135995, - "flos": 15223677593520.0, - "grad_norm": 9.69827675210649, - "learning_rate": 2.752653072371749e-07, - "loss": 1.0157, - "num_input_tokens_seen": 148278130, - "step": 6953 - }, - { - "epoch": 0.8361690615042385, - "flos": 19676771263800.0, - "grad_norm": 5.9891765246246695, - "learning_rate": 2.7487105790060105e-07, - "loss": 0.9766, - "num_input_tokens_seen": 148297260, - "step": 6954 - }, - { - "epoch": 0.8362893043948777, - "flos": 27992626630080.0, - "grad_norm": 7.530659885950971, - "learning_rate": 2.7447707026471587e-07, - "loss": 0.9209, - "num_input_tokens_seen": 148319955, - "step": 6955 - }, - { - "epoch": 0.8364095472855168, - "flos": 17630537907240.0, - "grad_norm": 5.251902281038755, - "learning_rate": 2.740833443892874e-07, - "loss": 1.0321, - "num_input_tokens_seen": 148337845, - "step": 6956 - }, - { - "epoch": 0.8365297901761558, - "flos": 16163224876560.0, - "grad_norm": 5.339797136260974, - "learning_rate": 2.7368988033404327e-07, - "loss": 1.0224, - "num_input_tokens_seen": 148355080, - "step": 6957 - }, - { - "epoch": 0.836650033066795, - "flos": 20225269774080.0, - "grad_norm": 6.46560341788553, - "learning_rate": 2.732966781586712e-07, - "loss": 1.0802, - "num_input_tokens_seen": 148374545, - "step": 6958 - }, - { - "epoch": 0.836770275957434, - "flos": 15799746394320.0, - "grad_norm": 6.662404255420597, - "learning_rate": 2.729037379228205e-07, - "loss": 0.8979, - "num_input_tokens_seen": 148394450, - "step": 6959 - }, - { - "epoch": 0.8368905188480731, - "flos": 15983018713440.0, - "grad_norm": 3.1899321010143824, - "learning_rate": 2.725110596860998e-07, - "loss": 1.0301, - "num_input_tokens_seen": 148414850, - "step": 6960 - }, - { - "epoch": 0.8370107617387123, - "flos": 9427539274080.0, - "grad_norm": 6.585808912223335, - "learning_rate": 2.7211864350807776e-07, - "loss": 0.9355, - "num_input_tokens_seen": 148432770, - "step": 6961 - }, - { - "epoch": 0.8371310046293513, - "flos": 17972823703440.0, - "grad_norm": 2.916307172872963, - "learning_rate": 2.717264894482836e-07, - "loss": 0.9607, - "num_input_tokens_seen": 148452830, - "step": 6962 - }, - { - "epoch": 0.8372512475199904, - "flos": 14043233823960.0, - "grad_norm": 4.002895737574072, - "learning_rate": 2.7133459756620646e-07, - "loss": 1.0279, - "num_input_tokens_seen": 148469745, - "step": 6963 - }, - { - "epoch": 0.8373714904106295, - "flos": 13754678177040.0, - "grad_norm": 4.487077596436829, - "learning_rate": 2.7094296792129733e-07, - "loss": 0.9525, - "num_input_tokens_seen": 148489065, - "step": 6964 - }, - { - "epoch": 0.8374917333012686, - "flos": 10581424584600.0, - "grad_norm": 5.263160559001047, - "learning_rate": 2.7055160057296424e-07, - "loss": 0.9849, - "num_input_tokens_seen": 148506025, - "step": 6965 - }, - { - "epoch": 0.8376119761919076, - "flos": 21616801445760.0, - "grad_norm": 10.30592497248909, - "learning_rate": 2.7016049558057896e-07, - "loss": 0.9464, - "num_input_tokens_seen": 148527705, - "step": 6966 - }, - { - "epoch": 0.8377322190825467, - "flos": 20964368716320.0, - "grad_norm": 6.328248274744492, - "learning_rate": 2.6976965300347074e-07, - "loss": 0.9419, - "num_input_tokens_seen": 148550035, - "step": 6967 - }, - { - "epoch": 0.8378524619731859, - "flos": 18997902721560.0, - "grad_norm": 3.5396210690725174, - "learning_rate": 2.693790729009309e-07, - "loss": 0.9069, - "num_input_tokens_seen": 148571365, - "step": 6968 - }, - { - "epoch": 0.8379727048638249, - "flos": 14697935508840.0, - "grad_norm": 10.489202851734909, - "learning_rate": 2.6898875533220946e-07, - "loss": 1.1006, - "num_input_tokens_seen": 148590390, - "step": 6969 - }, - { - "epoch": 0.838092947754464, - "flos": 14252574017160.0, - "grad_norm": 6.092073337045879, - "learning_rate": 2.685987003565171e-07, - "loss": 1.0502, - "num_input_tokens_seen": 148608150, - "step": 6970 - }, - { - "epoch": 0.8382131906451031, - "flos": 12836108949120.0, - "grad_norm": 41.59169557285445, - "learning_rate": 2.6820890803302566e-07, - "loss": 0.9726, - "num_input_tokens_seen": 148623395, - "step": 6971 - }, - { - "epoch": 0.8383334335357422, - "flos": 12102743718600.0, - "grad_norm": 5.239380768740114, - "learning_rate": 2.6781937842086557e-07, - "loss": 1.0474, - "num_input_tokens_seen": 148641905, - "step": 6972 - }, - { - "epoch": 0.8384536764263812, - "flos": 14698671386280.0, - "grad_norm": 12.913046315131018, - "learning_rate": 2.6743011157912933e-07, - "loss": 0.9128, - "num_input_tokens_seen": 148661345, - "step": 6973 - }, - { - "epoch": 0.8385739193170204, - "flos": 20650849011480.0, - "grad_norm": 4.900846774056826, - "learning_rate": 2.6704110756686725e-07, - "loss": 0.8726, - "num_input_tokens_seen": 148681890, - "step": 6974 - }, - { - "epoch": 0.8386941622076595, - "flos": 16662653794680.0, - "grad_norm": 17.92928410493919, - "learning_rate": 2.6665236644309085e-07, - "loss": 1.0665, - "num_input_tokens_seen": 148701920, - "step": 6975 - }, - { - "epoch": 0.8388144050982985, - "flos": 16350115259760.0, - "grad_norm": 7.528344064631262, - "learning_rate": 2.662638882667727e-07, - "loss": 1.0194, - "num_input_tokens_seen": 148720580, - "step": 6976 - }, - { - "epoch": 0.8389346479889377, - "flos": 17268408533640.0, - "grad_norm": 3.7695675546745906, - "learning_rate": 2.658756730968443e-07, - "loss": 0.9611, - "num_input_tokens_seen": 148738765, - "step": 6977 - }, - { - "epoch": 0.8390548908795767, - "flos": 15065338670760.0, - "grad_norm": 6.518721981116379, - "learning_rate": 2.654877209921975e-07, - "loss": 1.114, - "num_input_tokens_seen": 148756020, - "step": 6978 - }, - { - "epoch": 0.8391751337702158, - "flos": 25423226759880.0, - "grad_norm": 5.610022377336721, - "learning_rate": 2.651000320116843e-07, - "loss": 0.8511, - "num_input_tokens_seen": 148776625, - "step": 6979 - }, - { - "epoch": 0.839295376660855, - "flos": 15144646109160.0, - "grad_norm": 4.6083345952414545, - "learning_rate": 2.647126062141163e-07, - "loss": 0.9761, - "num_input_tokens_seen": 148795420, - "step": 6980 - }, - { - "epoch": 0.839415619551494, - "flos": 13072866125040.0, - "grad_norm": 6.134302354364012, - "learning_rate": 2.643254436582669e-07, - "loss": 1.0641, - "num_input_tokens_seen": 148814630, - "step": 6981 - }, - { - "epoch": 0.8395358624421331, - "flos": 16507718305080.0, - "grad_norm": 7.537551712693798, - "learning_rate": 2.6393854440286743e-07, - "loss": 1.0401, - "num_input_tokens_seen": 148833520, - "step": 6982 - }, - { - "epoch": 0.8396561053327722, - "flos": 17340111905160.0, - "grad_norm": 3.8173560812251583, - "learning_rate": 2.6355190850661045e-07, - "loss": 0.9319, - "num_input_tokens_seen": 148850075, - "step": 6983 - }, - { - "epoch": 0.8397763482234113, - "flos": 15800512933320.0, - "grad_norm": 4.9622220516437325, - "learning_rate": 2.631655360281486e-07, - "loss": 1.0878, - "num_input_tokens_seen": 148869470, - "step": 6984 - }, - { - "epoch": 0.8398965911140504, - "flos": 15747181384320.0, - "grad_norm": 3.8291838757969154, - "learning_rate": 2.6277942702609323e-07, - "loss": 0.887, - "num_input_tokens_seen": 148888670, - "step": 6985 - }, - { - "epoch": 0.8400168340046895, - "flos": 15300808061160.0, - "grad_norm": 10.968943015462559, - "learning_rate": 2.623935815590186e-07, - "loss": 1.0936, - "num_input_tokens_seen": 148906770, - "step": 6986 - }, - { - "epoch": 0.8401370768953286, - "flos": 15983049375000.0, - "grad_norm": 6.139868303806356, - "learning_rate": 2.6200799968545516e-07, - "loss": 1.0301, - "num_input_tokens_seen": 148926785, - "step": 6987 - }, - { - "epoch": 0.8402573197859676, - "flos": 42398076264240.0, - "grad_norm": 0.822097869940447, - "learning_rate": 2.616226814638969e-07, - "loss": 0.8236, - "num_input_tokens_seen": 148991610, - "step": 6988 - }, - { - "epoch": 0.8403775626766068, - "flos": 16114339253760.0, - "grad_norm": 3.8023186737105235, - "learning_rate": 2.612376269527954e-07, - "loss": 0.9935, - "num_input_tokens_seen": 149011035, - "step": 6989 - }, - { - "epoch": 0.8404978055672458, - "flos": 13911637329600.0, - "grad_norm": 6.022789273343665, - "learning_rate": 2.608528362105635e-07, - "loss": 0.9007, - "num_input_tokens_seen": 149030125, - "step": 6990 - }, - { - "epoch": 0.8406180484578849, - "flos": 19601572474440.0, - "grad_norm": 3.421093577166943, - "learning_rate": 2.6046830929557374e-07, - "loss": 0.9531, - "num_input_tokens_seen": 149049495, - "step": 6991 - }, - { - "epoch": 0.8407382913485241, - "flos": 15721144171800.0, - "grad_norm": 4.723194176870338, - "learning_rate": 2.6008404626615776e-07, - "loss": 1.0584, - "num_input_tokens_seen": 149067715, - "step": 6992 - }, - { - "epoch": 0.8408585342391631, - "flos": 9821531556600.0, - "grad_norm": 13.194075694145898, - "learning_rate": 2.597000471806092e-07, - "loss": 0.9626, - "num_input_tokens_seen": 149084000, - "step": 6993 - }, - { - "epoch": 0.8409787771298022, - "flos": 14326393036320.0, - "grad_norm": 10.897276413845162, - "learning_rate": 2.593163120971793e-07, - "loss": 0.9532, - "num_input_tokens_seen": 149102585, - "step": 6994 - }, - { - "epoch": 0.8410990200204413, - "flos": 16449756860520.0, - "grad_norm": 4.266017350817456, - "learning_rate": 2.5893284107408165e-07, - "loss": 0.9198, - "num_input_tokens_seen": 149119675, - "step": 6995 - }, - { - "epoch": 0.8412192629110804, - "flos": 17086791938760.0, - "grad_norm": 3.649777544114298, - "learning_rate": 2.5854963416948726e-07, - "loss": 1.0126, - "num_input_tokens_seen": 149141660, - "step": 6996 - }, - { - "epoch": 0.8413395058017195, - "flos": 18210562049280.0, - "grad_norm": 3.2079658958448745, - "learning_rate": 2.5816669144152816e-07, - "loss": 0.9091, - "num_input_tokens_seen": 149162560, - "step": 6997 - }, - { - "epoch": 0.8414597486923585, - "flos": 45555293860800.0, - "grad_norm": 0.9109165797767624, - "learning_rate": 2.5778401294829777e-07, - "loss": 0.9619, - "num_input_tokens_seen": 149221020, - "step": 6998 - }, - { - "epoch": 0.8415799915829977, - "flos": 13544387475480.0, - "grad_norm": 5.249062409012304, - "learning_rate": 2.574015987478473e-07, - "loss": 0.8787, - "num_input_tokens_seen": 149238870, - "step": 6999 - }, - { - "epoch": 0.8417002344736367, - "flos": 14062985416680.0, - "grad_norm": 8.354329121089052, - "learning_rate": 2.570194488981887e-07, - "loss": 1.0814, - "num_input_tokens_seen": 149255135, - "step": 7000 - }, - { - "epoch": 0.8418204773642758, - "flos": 44499020940000.0, - "grad_norm": 0.8595875910587111, - "learning_rate": 2.566375634572939e-07, - "loss": 0.8656, - "num_input_tokens_seen": 149315495, - "step": 7001 - }, - { - "epoch": 0.841940720254915, - "flos": 12102958349520.0, - "grad_norm": 6.737437390547888, - "learning_rate": 2.562559424830943e-07, - "loss": 0.9739, - "num_input_tokens_seen": 149333175, - "step": 7002 - }, - { - "epoch": 0.842060963145554, - "flos": 11520419959560.0, - "grad_norm": 4.735257208181697, - "learning_rate": 2.5587458603348256e-07, - "loss": 0.9146, - "num_input_tokens_seen": 149350185, - "step": 7003 - }, - { - "epoch": 0.8421812060361931, - "flos": 15563541126480.0, - "grad_norm": 3.2790322211178267, - "learning_rate": 2.554934941663085e-07, - "loss": 1.0732, - "num_input_tokens_seen": 149367440, - "step": 7004 - }, - { - "epoch": 0.8423014489268322, - "flos": 19781165406360.0, - "grad_norm": 7.836561336006887, - "learning_rate": 2.5511266693938484e-07, - "loss": 0.9448, - "num_input_tokens_seen": 149385620, - "step": 7005 - }, - { - "epoch": 0.8424216918174713, - "flos": 17869870654200.0, - "grad_norm": 9.75279626118117, - "learning_rate": 2.547321044104822e-07, - "loss": 0.9965, - "num_input_tokens_seen": 149406835, - "step": 7006 - }, - { - "epoch": 0.8425419347081103, - "flos": 17603979448200.0, - "grad_norm": 7.5734682857327655, - "learning_rate": 2.5435180663733113e-07, - "loss": 0.9919, - "num_input_tokens_seen": 149426855, - "step": 7007 - }, - { - "epoch": 0.8426621775987495, - "flos": 17655992550120.0, - "grad_norm": 14.086053950399121, - "learning_rate": 2.539717736776241e-07, - "loss": 0.9324, - "num_input_tokens_seen": 149442800, - "step": 7008 - }, - { - "epoch": 0.8427824204893886, - "flos": 16743494311080.0, - "grad_norm": 4.153481845149715, - "learning_rate": 2.535920055890097e-07, - "loss": 0.9883, - "num_input_tokens_seen": 149463815, - "step": 7009 - }, - { - "epoch": 0.8429026633800276, - "flos": 11420839681920.0, - "grad_norm": 7.861372767730384, - "learning_rate": 2.5321250242910006e-07, - "loss": 0.8683, - "num_input_tokens_seen": 149481450, - "step": 7010 - }, - { - "epoch": 0.8430229062706668, - "flos": 15771961472880.0, - "grad_norm": 5.426662123644627, - "learning_rate": 2.5283326425546493e-07, - "loss": 1.0857, - "num_input_tokens_seen": 149500280, - "step": 7011 - }, - { - "epoch": 0.8431431491613058, - "flos": 25209103363320.0, - "grad_norm": 10.627626341304564, - "learning_rate": 2.5245429112563443e-07, - "loss": 0.9206, - "num_input_tokens_seen": 149520675, - "step": 7012 - }, - { - "epoch": 0.8432633920519449, - "flos": 18369391557000.0, - "grad_norm": 6.3831250640651005, - "learning_rate": 2.5207558309709865e-07, - "loss": 1.0518, - "num_input_tokens_seen": 149540130, - "step": 7013 - }, - { - "epoch": 0.8433836349425841, - "flos": 46508362891800.0, - "grad_norm": 0.6906940218581574, - "learning_rate": 2.516971402273065e-07, - "loss": 0.809, - "num_input_tokens_seen": 149605915, - "step": 7014 - }, - { - "epoch": 0.8435038778332231, - "flos": 14357428083120.0, - "grad_norm": 3.9083618558860964, - "learning_rate": 2.513189625736687e-07, - "loss": 0.899, - "num_input_tokens_seen": 149622530, - "step": 7015 - }, - { - "epoch": 0.8436241207238622, - "flos": 14904700131000.0, - "grad_norm": 14.592254758116761, - "learning_rate": 2.509410501935534e-07, - "loss": 0.9438, - "num_input_tokens_seen": 149637885, - "step": 7016 - }, - { - "epoch": 0.8437443636145013, - "flos": 10370122051560.0, - "grad_norm": 5.452263619232694, - "learning_rate": 2.5056340314429116e-07, - "loss": 0.9743, - "num_input_tokens_seen": 149655070, - "step": 7017 - }, - { - "epoch": 0.8438646065051404, - "flos": 15347332743840.0, - "grad_norm": 7.024919839313421, - "learning_rate": 2.5018602148316904e-07, - "loss": 1.0184, - "num_input_tokens_seen": 149670825, - "step": 7018 - }, - { - "epoch": 0.8439848493957794, - "flos": 16555898712000.0, - "grad_norm": 2.8537218253056107, - "learning_rate": 2.498089052674359e-07, - "loss": 1.0207, - "num_input_tokens_seen": 149688520, - "step": 7019 - }, - { - "epoch": 0.8441050922864186, - "flos": 13990454183040.0, - "grad_norm": 4.880623923580146, - "learning_rate": 2.494320545543007e-07, - "loss": 0.971, - "num_input_tokens_seen": 149707810, - "step": 7020 - }, - { - "epoch": 0.8442253351770577, - "flos": 15511098762720.0, - "grad_norm": 7.188178254343095, - "learning_rate": 2.490554694009308e-07, - "loss": 0.8916, - "num_input_tokens_seen": 149728395, - "step": 7021 - }, - { - "epoch": 0.8443455780676967, - "flos": 24502112622480.0, - "grad_norm": 2.7443566071031444, - "learning_rate": 2.4867914986445426e-07, - "loss": 1.0255, - "num_input_tokens_seen": 149750505, - "step": 7022 - }, - { - "epoch": 0.8444658209583359, - "flos": 34469442523800.0, - "grad_norm": 5.509293074493684, - "learning_rate": 2.483030960019581e-07, - "loss": 0.9177, - "num_input_tokens_seen": 149774155, - "step": 7023 - }, - { - "epoch": 0.8445860638489749, - "flos": 49040012833560.0, - "grad_norm": 0.7434624011698759, - "learning_rate": 2.479273078704891e-07, - "loss": 0.7963, - "num_input_tokens_seen": 149827240, - "step": 7024 - }, - { - "epoch": 0.844706306739614, - "flos": 44978949105960.0, - "grad_norm": 0.8230762970693691, - "learning_rate": 2.475517855270552e-07, - "loss": 0.8963, - "num_input_tokens_seen": 149887040, - "step": 7025 - }, - { - "epoch": 0.8448265496302532, - "flos": 10581577892400.0, - "grad_norm": 4.545159189858725, - "learning_rate": 2.4717652902862143e-07, - "loss": 0.948, - "num_input_tokens_seen": 149905735, - "step": 7026 - }, - { - "epoch": 0.8449467925208922, - "flos": 16665382673520.0, - "grad_norm": 4.887890913762178, - "learning_rate": 2.4680153843211495e-07, - "loss": 1.0472, - "num_input_tokens_seen": 149925385, - "step": 7027 - }, - { - "epoch": 0.8450670354115313, - "flos": 16167180217800.0, - "grad_norm": 4.436399511428158, - "learning_rate": 2.464268137944212e-07, - "loss": 0.9503, - "num_input_tokens_seen": 149946400, - "step": 7028 - }, - { - "epoch": 0.8451872783021703, - "flos": 21253568256000.0, - "grad_norm": 29.969483176641535, - "learning_rate": 2.46052355172385e-07, - "loss": 1.01, - "num_input_tokens_seen": 149964160, - "step": 7029 - }, - { - "epoch": 0.8453075211928095, - "flos": 15535848189720.0, - "grad_norm": 4.103590690873974, - "learning_rate": 2.456781626228128e-07, - "loss": 0.9629, - "num_input_tokens_seen": 149983385, - "step": 7030 - }, - { - "epoch": 0.8454277640834486, - "flos": 42046346707560.0, - "grad_norm": 1.0630332506206233, - "learning_rate": 2.453042362024675e-07, - "loss": 0.9721, - "num_input_tokens_seen": 150036350, - "step": 7031 - }, - { - "epoch": 0.8455480069740876, - "flos": 19287746154000.0, - "grad_norm": 5.259365991594892, - "learning_rate": 2.449305759680751e-07, - "loss": 0.9589, - "num_input_tokens_seen": 150057395, - "step": 7032 - }, - { - "epoch": 0.8456682498647268, - "flos": 19365673822200.0, - "grad_norm": 5.634761901536898, - "learning_rate": 2.445571819763188e-07, - "loss": 0.9723, - "num_input_tokens_seen": 150079415, - "step": 7033 - }, - { - "epoch": 0.8457884927553658, - "flos": 14646351668760.0, - "grad_norm": 3.8744761205142377, - "learning_rate": 2.4418405428384227e-07, - "loss": 0.8163, - "num_input_tokens_seen": 150099345, - "step": 7034 - }, - { - "epoch": 0.8459087356460049, - "flos": 10812907972200.0, - "grad_norm": 3.569369469105492, - "learning_rate": 2.4381119294724864e-07, - "loss": 0.9501, - "num_input_tokens_seen": 150116510, - "step": 7035 - }, - { - "epoch": 0.846028978536644, - "flos": 13360685894520.0, - "grad_norm": 5.25635597947698, - "learning_rate": 2.434385980231004e-07, - "loss": 0.7602, - "num_input_tokens_seen": 150135070, - "step": 7036 - }, - { - "epoch": 0.8461492214272831, - "flos": 37664961956880.0, - "grad_norm": 5.047650823810154, - "learning_rate": 2.4306626956792043e-07, - "loss": 0.8786, - "num_input_tokens_seen": 150159735, - "step": 7037 - }, - { - "epoch": 0.8462694643179222, - "flos": 13177689529440.0, - "grad_norm": 2.8321238535506503, - "learning_rate": 2.4269420763819017e-07, - "loss": 0.9774, - "num_input_tokens_seen": 150177500, - "step": 7038 - }, - { - "epoch": 0.8463897072085613, - "flos": 17344741800720.0, - "grad_norm": 7.791417843794643, - "learning_rate": 2.4232241229035223e-07, - "loss": 1.0576, - "num_input_tokens_seen": 150194975, - "step": 7039 - }, - { - "epoch": 0.8465099500992004, - "flos": 49196021477760.0, - "grad_norm": 0.8771132427078314, - "learning_rate": 2.419508835808064e-07, - "loss": 0.8212, - "num_input_tokens_seen": 150251250, - "step": 7040 - }, - { - "epoch": 0.8466301929898394, - "flos": 9690149693160.0, - "grad_norm": 7.358645652055786, - "learning_rate": 2.415796215659134e-07, - "loss": 0.8492, - "num_input_tokens_seen": 150267675, - "step": 7041 - }, - { - "epoch": 0.8467504358804786, - "flos": 13646359354800.0, - "grad_norm": 5.7944268252059254, - "learning_rate": 2.412086263019939e-07, - "loss": 1.0024, - "num_input_tokens_seen": 150285420, - "step": 7042 - }, - { - "epoch": 0.8468706787711177, - "flos": 15143695600800.0, - "grad_norm": 4.924820082423142, - "learning_rate": 2.408378978453276e-07, - "loss": 1.0301, - "num_input_tokens_seen": 150305260, - "step": 7043 - }, - { - "epoch": 0.8469909216617567, - "flos": 46448132491800.0, - "grad_norm": 0.7949748103631576, - "learning_rate": 2.404674362521533e-07, - "loss": 0.89, - "num_input_tokens_seen": 150363475, - "step": 7044 - }, - { - "epoch": 0.8471111645523959, - "flos": 13675002799920.0, - "grad_norm": 4.23938795134703, - "learning_rate": 2.4009724157866997e-07, - "loss": 0.9688, - "num_input_tokens_seen": 150380255, - "step": 7045 - }, - { - "epoch": 0.8472314074430349, - "flos": 15640610271000.0, - "grad_norm": 5.4940460229479795, - "learning_rate": 2.3972731388103564e-07, - "loss": 0.9884, - "num_input_tokens_seen": 150398455, - "step": 7046 - }, - { - "epoch": 0.847351650333674, - "flos": 41422465438560.0, - "grad_norm": 0.80387411396237, - "learning_rate": 2.393576532153687e-07, - "loss": 0.8785, - "num_input_tokens_seen": 150461960, - "step": 7047 - }, - { - "epoch": 0.8474718932243132, - "flos": 29493912669240.0, - "grad_norm": 0.9515589057841956, - "learning_rate": 2.389882596377453e-07, - "loss": 0.8534, - "num_input_tokens_seen": 150515945, - "step": 7048 - }, - { - "epoch": 0.8475921361149522, - "flos": 27258402875880.0, - "grad_norm": 5.5782275791513465, - "learning_rate": 2.386191332042031e-07, - "loss": 0.9926, - "num_input_tokens_seen": 150537560, - "step": 7049 - }, - { - "epoch": 0.8477123790055913, - "flos": 18055166636280.0, - "grad_norm": 7.7708288061488435, - "learning_rate": 2.3825027397073794e-07, - "loss": 0.9571, - "num_input_tokens_seen": 150557755, - "step": 7050 - }, - { - "epoch": 0.8478326218962304, - "flos": 21540529501800.0, - "grad_norm": 37.62537959214664, - "learning_rate": 2.3788168199330515e-07, - "loss": 0.9039, - "num_input_tokens_seen": 150579035, - "step": 7051 - }, - { - "epoch": 0.8479528647868695, - "flos": 27281557901760.0, - "grad_norm": 2.986287248330444, - "learning_rate": 2.3751335732782074e-07, - "loss": 0.9624, - "num_input_tokens_seen": 150600015, - "step": 7052 - }, - { - "epoch": 0.8480731076775085, - "flos": 14880226658040.0, - "grad_norm": 4.061288506256935, - "learning_rate": 2.371453000301582e-07, - "loss": 1.0243, - "num_input_tokens_seen": 150618420, - "step": 7053 - }, - { - "epoch": 0.8481933505681477, - "flos": 23244722354640.0, - "grad_norm": 5.078041428098376, - "learning_rate": 2.3677751015615222e-07, - "loss": 0.9702, - "num_input_tokens_seen": 150640215, - "step": 7054 - }, - { - "epoch": 0.8483135934587868, - "flos": 14724861906600.0, - "grad_norm": 3.590661222509239, - "learning_rate": 2.3640998776159593e-07, - "loss": 1.0743, - "num_input_tokens_seen": 150657440, - "step": 7055 - }, - { - "epoch": 0.8484338363494258, - "flos": 15380176822680.0, - "grad_norm": 4.399124335805365, - "learning_rate": 2.3604273290224253e-07, - "loss": 1.0383, - "num_input_tokens_seen": 150677875, - "step": 7056 - }, - { - "epoch": 0.848554079240065, - "flos": 10608964213560.0, - "grad_norm": 11.61716779798262, - "learning_rate": 2.356757456338039e-07, - "loss": 0.9691, - "num_input_tokens_seen": 150695080, - "step": 7057 - }, - { - "epoch": 0.848674322130704, - "flos": 48736887397560.0, - "grad_norm": 1.7661575594480614, - "learning_rate": 2.3530902601195147e-07, - "loss": 0.8796, - "num_input_tokens_seen": 150763290, - "step": 7058 - }, - { - "epoch": 0.8487945650213431, - "flos": 13096327766520.0, - "grad_norm": 5.055542485892108, - "learning_rate": 2.34942574092317e-07, - "loss": 1.016, - "num_input_tokens_seen": 150778260, - "step": 7059 - }, - { - "epoch": 0.8489148079119821, - "flos": 16687709837280.0, - "grad_norm": 6.178683077546048, - "learning_rate": 2.3457638993049045e-07, - "loss": 0.9845, - "num_input_tokens_seen": 150795970, - "step": 7060 - }, - { - "epoch": 0.8490350508026213, - "flos": 14147229366240.0, - "grad_norm": 8.149054031236012, - "learning_rate": 2.3421047358202252e-07, - "loss": 0.8642, - "num_input_tokens_seen": 150814540, - "step": 7061 - }, - { - "epoch": 0.8491552936932604, - "flos": 17268071256480.0, - "grad_norm": 6.171488380394611, - "learning_rate": 2.3384482510242144e-07, - "loss": 1.0391, - "num_input_tokens_seen": 150832120, - "step": 7062 - }, - { - "epoch": 0.8492755365838994, - "flos": 16004211399480.0, - "grad_norm": 9.249772352561914, - "learning_rate": 2.3347944454715575e-07, - "loss": 0.9969, - "num_input_tokens_seen": 150848230, - "step": 7063 - }, - { - "epoch": 0.8493957794745386, - "flos": 19208929300560.0, - "grad_norm": 4.259099979155959, - "learning_rate": 2.331143319716542e-07, - "loss": 0.8915, - "num_input_tokens_seen": 150867480, - "step": 7064 - }, - { - "epoch": 0.8495160223651776, - "flos": 21279728114760.0, - "grad_norm": 4.359817755853771, - "learning_rate": 2.3274948743130363e-07, - "loss": 0.8711, - "num_input_tokens_seen": 150887035, - "step": 7065 - }, - { - "epoch": 0.8496362652558167, - "flos": 16402342992600.0, - "grad_norm": 8.249993988959694, - "learning_rate": 2.3238491098145085e-07, - "loss": 1.0273, - "num_input_tokens_seen": 150906285, - "step": 7066 - }, - { - "epoch": 0.8497565081464559, - "flos": 10317526380000.0, - "grad_norm": 9.668841895546448, - "learning_rate": 2.3202060267740141e-07, - "loss": 0.9442, - "num_input_tokens_seen": 150923530, - "step": 7067 - }, - { - "epoch": 0.8498767510370949, - "flos": 15008113103640.0, - "grad_norm": 8.635157698984317, - "learning_rate": 2.3165656257442044e-07, - "loss": 0.9947, - "num_input_tokens_seen": 150941770, - "step": 7068 - }, - { - "epoch": 0.849996993927734, - "flos": 16817895899880.0, - "grad_norm": 5.7492217490904665, - "learning_rate": 2.31292790727734e-07, - "loss": 1.1333, - "num_input_tokens_seen": 150959055, - "step": 7069 - }, - { - "epoch": 0.8501172368183731, - "flos": 14593173427560.0, - "grad_norm": 4.8777495235434865, - "learning_rate": 2.3092928719252392e-07, - "loss": 1.0262, - "num_input_tokens_seen": 150977175, - "step": 7070 - }, - { - "epoch": 0.8502374797090122, - "flos": 15825354345000.0, - "grad_norm": 10.392860206599806, - "learning_rate": 2.3056605202393475e-07, - "loss": 1.0014, - "num_input_tokens_seen": 150994455, - "step": 7071 - }, - { - "epoch": 0.8503577225996513, - "flos": 16822403149200.0, - "grad_norm": 8.042046677961265, - "learning_rate": 2.3020308527706888e-07, - "loss": 0.908, - "num_input_tokens_seen": 151013590, - "step": 7072 - }, - { - "epoch": 0.8504779654902904, - "flos": 19049026638240.0, - "grad_norm": 2.955048434252694, - "learning_rate": 2.2984038700698715e-07, - "loss": 1.1107, - "num_input_tokens_seen": 151032620, - "step": 7073 - }, - { - "epoch": 0.8505982083809295, - "flos": 18840636953400.0, - "grad_norm": 4.281229951909036, - "learning_rate": 2.2947795726871222e-07, - "loss": 1.0224, - "num_input_tokens_seen": 151053365, - "step": 7074 - }, - { - "epoch": 0.8507184512715685, - "flos": 14410238385600.0, - "grad_norm": 4.252681335135282, - "learning_rate": 2.2911579611722253e-07, - "loss": 1.0839, - "num_input_tokens_seen": 151072230, - "step": 7075 - }, - { - "epoch": 0.8508386941622077, - "flos": 13491454526760.0, - "grad_norm": 2.826379226638895, - "learning_rate": 2.2875390360745905e-07, - "loss": 1.099, - "num_input_tokens_seen": 151091355, - "step": 7076 - }, - { - "epoch": 0.8509589370528468, - "flos": 11628616135560.0, - "grad_norm": 7.621839524953319, - "learning_rate": 2.2839227979432008e-07, - "loss": 1.0025, - "num_input_tokens_seen": 151108725, - "step": 7077 - }, - { - "epoch": 0.8510791799434858, - "flos": 12941422938480.0, - "grad_norm": 7.025071069758387, - "learning_rate": 2.2803092473266373e-07, - "loss": 1.0753, - "num_input_tokens_seen": 151125970, - "step": 7078 - }, - { - "epoch": 0.851199422834125, - "flos": 16665474658200.0, - "grad_norm": 5.967924210708915, - "learning_rate": 2.2766983847730724e-07, - "loss": 1.0787, - "num_input_tokens_seen": 151145360, - "step": 7079 - }, - { - "epoch": 0.851319665724764, - "flos": 11525387132280.0, - "grad_norm": 3.970937171264164, - "learning_rate": 2.2730902108302663e-07, - "loss": 0.8941, - "num_input_tokens_seen": 151161995, - "step": 7080 - }, - { - "epoch": 0.8514399086154031, - "flos": 13465785252960.0, - "grad_norm": 4.150677379799789, - "learning_rate": 2.269484726045583e-07, - "loss": 0.9197, - "num_input_tokens_seen": 151180630, - "step": 7081 - }, - { - "epoch": 0.8515601515060423, - "flos": 17477288803440.0, - "grad_norm": 8.175628675266498, - "learning_rate": 2.2658819309659672e-07, - "loss": 1.0169, - "num_input_tokens_seen": 151200550, - "step": 7082 - }, - { - "epoch": 0.8516803943966813, - "flos": 13852970669160.0, - "grad_norm": 4.49274262025225, - "learning_rate": 2.2622818261379706e-07, - "loss": 1.0701, - "num_input_tokens_seen": 151217290, - "step": 7083 - }, - { - "epoch": 0.8518006372873204, - "flos": 14383066695360.0, - "grad_norm": 5.089168992771541, - "learning_rate": 2.2586844121077142e-07, - "loss": 0.9832, - "num_input_tokens_seen": 151235520, - "step": 7084 - }, - { - "epoch": 0.8519208801779595, - "flos": 17162511974640.0, - "grad_norm": 26.334163346871065, - "learning_rate": 2.2550896894209215e-07, - "loss": 0.9461, - "num_input_tokens_seen": 151254755, - "step": 7085 - }, - { - "epoch": 0.8520411230685986, - "flos": 45126673580880.0, - "grad_norm": 0.6945146633780823, - "learning_rate": 2.2514976586229184e-07, - "loss": 0.8189, - "num_input_tokens_seen": 151322420, - "step": 7086 - }, - { - "epoch": 0.8521613659592376, - "flos": 47139112350240.0, - "grad_norm": 0.8722033084214446, - "learning_rate": 2.247908320258609e-07, - "loss": 0.854, - "num_input_tokens_seen": 151382230, - "step": 7087 - }, - { - "epoch": 0.8522816088498768, - "flos": 16428901451640.0, - "grad_norm": 12.50875181903904, - "learning_rate": 2.2443216748724914e-07, - "loss": 1.0045, - "num_input_tokens_seen": 151402660, - "step": 7088 - }, - { - "epoch": 0.8524018517405159, - "flos": 22642309725720.0, - "grad_norm": 4.903074172116946, - "learning_rate": 2.2407377230086588e-07, - "loss": 0.9726, - "num_input_tokens_seen": 151424735, - "step": 7089 - }, - { - "epoch": 0.8525220946311549, - "flos": 13249914147480.0, - "grad_norm": 10.835886453894727, - "learning_rate": 2.23715646521079e-07, - "loss": 1.0633, - "num_input_tokens_seen": 151441975, - "step": 7090 - }, - { - "epoch": 0.852642337521794, - "flos": 15480125039040.0, - "grad_norm": 4.383518318556579, - "learning_rate": 2.2335779020221724e-07, - "loss": 1.0626, - "num_input_tokens_seen": 151458315, - "step": 7091 - }, - { - "epoch": 0.8527625804124331, - "flos": 49440229412760.0, - "grad_norm": 0.8473428243427683, - "learning_rate": 2.2300020339856497e-07, - "loss": 0.8526, - "num_input_tokens_seen": 151520720, - "step": 7092 - }, - { - "epoch": 0.8528828233030722, - "flos": 19208040115320.0, - "grad_norm": 5.146582400928397, - "learning_rate": 2.2264288616436966e-07, - "loss": 1.0084, - "num_input_tokens_seen": 151540695, - "step": 7093 - }, - { - "epoch": 0.8530030661937112, - "flos": 12385718961600.0, - "grad_norm": 4.682883876171293, - "learning_rate": 2.222858385538351e-07, - "loss": 0.9631, - "num_input_tokens_seen": 151557215, - "step": 7094 - }, - { - "epoch": 0.8531233090843504, - "flos": 15744667136400.0, - "grad_norm": 21.065083964968473, - "learning_rate": 2.2192906062112527e-07, - "loss": 0.9036, - "num_input_tokens_seen": 151576810, - "step": 7095 - }, - { - "epoch": 0.8532435519749895, - "flos": 26866587564120.0, - "grad_norm": 3.64694948095838, - "learning_rate": 2.2157255242036377e-07, - "loss": 0.9308, - "num_input_tokens_seen": 151600195, - "step": 7096 - }, - { - "epoch": 0.8533637948656285, - "flos": 15197057811360.0, - "grad_norm": 4.090715754400541, - "learning_rate": 2.2121631400563135e-07, - "loss": 0.9626, - "num_input_tokens_seen": 151619745, - "step": 7097 - }, - { - "epoch": 0.8534840377562677, - "flos": 38159694108240.0, - "grad_norm": 0.834965006500502, - "learning_rate": 2.208603454309701e-07, - "loss": 0.8328, - "num_input_tokens_seen": 151677555, - "step": 7098 - }, - { - "epoch": 0.8536042806469067, - "flos": 14777304270360.0, - "grad_norm": 9.557903306949699, - "learning_rate": 2.2050464675037994e-07, - "loss": 0.9285, - "num_input_tokens_seen": 151695900, - "step": 7099 - }, - { - "epoch": 0.8537245235375458, - "flos": 17556565580280.0, - "grad_norm": 7.378361598563084, - "learning_rate": 2.2014921801782016e-07, - "loss": 0.9448, - "num_input_tokens_seen": 151715110, - "step": 7100 - }, - { - "epoch": 0.853844766428185, - "flos": 17342810122440.0, - "grad_norm": 8.085783701737826, - "learning_rate": 2.1979405928720872e-07, - "loss": 0.9703, - "num_input_tokens_seen": 151734485, - "step": 7101 - }, - { - "epoch": 0.853965009318824, - "flos": 14878448287560.0, - "grad_norm": 3.9799355101991587, - "learning_rate": 2.1943917061242257e-07, - "loss": 1.0225, - "num_input_tokens_seen": 151754060, - "step": 7102 - }, - { - "epoch": 0.8540852522094631, - "flos": 17211213628080.0, - "grad_norm": 7.067761916929878, - "learning_rate": 2.1908455204729903e-07, - "loss": 0.8859, - "num_input_tokens_seen": 151772930, - "step": 7103 - }, - { - "epoch": 0.8542054951001022, - "flos": 17845182550320.0, - "grad_norm": 6.615311934425019, - "learning_rate": 2.1873020364563265e-07, - "loss": 1.0009, - "num_input_tokens_seen": 151791715, - "step": 7104 - }, - { - "epoch": 0.8543257379907413, - "flos": 17294384423040.0, - "grad_norm": 9.018667147461382, - "learning_rate": 2.183761254611789e-07, - "loss": 0.9778, - "num_input_tokens_seen": 151811760, - "step": 7105 - }, - { - "epoch": 0.8544459808813804, - "flos": 39551139343320.0, - "grad_norm": 5.345581989915886, - "learning_rate": 2.1802231754764987e-07, - "loss": 0.9254, - "num_input_tokens_seen": 151836920, - "step": 7106 - }, - { - "epoch": 0.8545662237720195, - "flos": 18343476990720.0, - "grad_norm": 8.844544068397875, - "learning_rate": 2.17668779958718e-07, - "loss": 0.9915, - "num_input_tokens_seen": 151859220, - "step": 7107 - }, - { - "epoch": 0.8546864666626586, - "flos": 7801825997520.0, - "grad_norm": 6.538408508776177, - "learning_rate": 2.1731551274801553e-07, - "loss": 1.0163, - "num_input_tokens_seen": 151875380, - "step": 7108 - }, - { - "epoch": 0.8548067095532976, - "flos": 18159346147920.0, - "grad_norm": 10.403948371826573, - "learning_rate": 2.169625159691324e-07, - "loss": 0.8247, - "num_input_tokens_seen": 151894975, - "step": 7109 - }, - { - "epoch": 0.8549269524439368, - "flos": 17582786762160.0, - "grad_norm": 16.12500851653118, - "learning_rate": 2.1660978967561784e-07, - "loss": 0.9614, - "num_input_tokens_seen": 151914030, - "step": 7110 - }, - { - "epoch": 0.8550471953345758, - "flos": 14066388849840.0, - "grad_norm": 26.430300622876484, - "learning_rate": 2.1625733392098035e-07, - "loss": 1.0179, - "num_input_tokens_seen": 151929360, - "step": 7111 - }, - { - "epoch": 0.8551674382252149, - "flos": 16219131996600.0, - "grad_norm": 4.683251044657978, - "learning_rate": 2.159051487586867e-07, - "loss": 1.0228, - "num_input_tokens_seen": 151949210, - "step": 7112 - }, - { - "epoch": 0.8552876811158541, - "flos": 14645799760680.0, - "grad_norm": 5.453820651477854, - "learning_rate": 2.155532342421642e-07, - "loss": 0.9514, - "num_input_tokens_seen": 151966930, - "step": 7113 - }, - { - "epoch": 0.8554079240064931, - "flos": 16428932113200.0, - "grad_norm": 4.206289695337047, - "learning_rate": 2.1520159042479636e-07, - "loss": 1.0133, - "num_input_tokens_seen": 151984940, - "step": 7114 - }, - { - "epoch": 0.8555281668971322, - "flos": 15721634756760.0, - "grad_norm": 3.531848957949303, - "learning_rate": 2.148502173599287e-07, - "loss": 0.938, - "num_input_tokens_seen": 152002800, - "step": 7115 - }, - { - "epoch": 0.8556484097877713, - "flos": 22197530803680.0, - "grad_norm": 4.343903682926564, - "learning_rate": 2.1449911510086372e-07, - "loss": 0.8826, - "num_input_tokens_seen": 152021990, - "step": 7116 - }, - { - "epoch": 0.8557686526784104, - "flos": 17294384423040.0, - "grad_norm": 4.242678675308672, - "learning_rate": 2.141482837008628e-07, - "loss": 0.998, - "num_input_tokens_seen": 152042250, - "step": 7117 - }, - { - "epoch": 0.8558888955690495, - "flos": 12548350502760.0, - "grad_norm": 7.940884086528627, - "learning_rate": 2.1379772321314826e-07, - "loss": 0.9428, - "num_input_tokens_seen": 152060015, - "step": 7118 - }, - { - "epoch": 0.8560091384596886, - "flos": 13596829839240.0, - "grad_norm": 4.544790937359676, - "learning_rate": 2.1344743369089802e-07, - "loss": 1.047, - "num_input_tokens_seen": 152075515, - "step": 7119 - }, - { - "epoch": 0.8561293813503277, - "flos": 17005644806760.0, - "grad_norm": 8.096101515001584, - "learning_rate": 2.130974151872522e-07, - "loss": 1.0542, - "num_input_tokens_seen": 152095570, - "step": 7120 - }, - { - "epoch": 0.8562496242409667, - "flos": 16009914449640.0, - "grad_norm": 6.9128243065029595, - "learning_rate": 2.1274766775530773e-07, - "loss": 1.0092, - "num_input_tokens_seen": 152115155, - "step": 7121 - }, - { - "epoch": 0.8563698671316058, - "flos": 10391222752920.0, - "grad_norm": 3.888884618544883, - "learning_rate": 2.1239819144812077e-07, - "loss": 1.0231, - "num_input_tokens_seen": 152129335, - "step": 7122 - }, - { - "epoch": 0.856490110022245, - "flos": 27967877203080.0, - "grad_norm": 3.223240506933907, - "learning_rate": 2.1204898631870716e-07, - "loss": 0.9271, - "num_input_tokens_seen": 152153945, - "step": 7123 - }, - { - "epoch": 0.856610352912884, - "flos": 20702892774960.0, - "grad_norm": 5.55743125828397, - "learning_rate": 2.1170005242004006e-07, - "loss": 0.9763, - "num_input_tokens_seen": 152175015, - "step": 7124 - }, - { - "epoch": 0.8567305958035231, - "flos": 16979392963320.0, - "grad_norm": 6.621766362867562, - "learning_rate": 2.1135138980505384e-07, - "loss": 0.998, - "num_input_tokens_seen": 152195405, - "step": 7125 - }, - { - "epoch": 0.8568508386941622, - "flos": 15773310581520.0, - "grad_norm": 3.7767213547313996, - "learning_rate": 2.110029985266395e-07, - "loss": 0.9485, - "num_input_tokens_seen": 152214830, - "step": 7126 - }, - { - "epoch": 0.8569710815848013, - "flos": 12256606053600.0, - "grad_norm": 9.402061366241464, - "learning_rate": 2.1065487863764787e-07, - "loss": 0.9672, - "num_input_tokens_seen": 152232895, - "step": 7127 - }, - { - "epoch": 0.8570913244754403, - "flos": 16874416251120.0, - "grad_norm": 8.04496810688646, - "learning_rate": 2.1030703019088846e-07, - "loss": 1.0873, - "num_input_tokens_seen": 152253245, - "step": 7128 - }, - { - "epoch": 0.8572115673660795, - "flos": 14227334005200.0, - "grad_norm": 4.996589260269434, - "learning_rate": 2.099594532391291e-07, - "loss": 0.9301, - "num_input_tokens_seen": 152271650, - "step": 7129 - }, - { - "epoch": 0.8573318102567186, - "flos": 19337919562320.0, - "grad_norm": 7.876886535408468, - "learning_rate": 2.0961214783509806e-07, - "loss": 1.0173, - "num_input_tokens_seen": 152294250, - "step": 7130 - }, - { - "epoch": 0.8574520531473576, - "flos": 17739960545640.0, - "grad_norm": 6.445668195677585, - "learning_rate": 2.0926511403148051e-07, - "loss": 0.9797, - "num_input_tokens_seen": 152312935, - "step": 7131 - }, - { - "epoch": 0.8575722960379968, - "flos": 13309071392880.0, - "grad_norm": 4.31197541957778, - "learning_rate": 2.0891835188092143e-07, - "loss": 0.9728, - "num_input_tokens_seen": 152329655, - "step": 7132 - }, - { - "epoch": 0.8576925389286358, - "flos": 15773463889320.0, - "grad_norm": 5.535339740088951, - "learning_rate": 2.0857186143602434e-07, - "loss": 1.0449, - "num_input_tokens_seen": 152348020, - "step": 7133 - }, - { - "epoch": 0.8578127818192749, - "flos": 16272371560920.0, - "grad_norm": 6.060917598635637, - "learning_rate": 2.0822564274935094e-07, - "loss": 0.9036, - "num_input_tokens_seen": 152367165, - "step": 7134 - }, - { - "epoch": 0.8579330247099141, - "flos": 24846544727880.0, - "grad_norm": 7.015340616626192, - "learning_rate": 2.078796958734239e-07, - "loss": 0.8924, - "num_input_tokens_seen": 152389605, - "step": 7135 - }, - { - "epoch": 0.8580532676005531, - "flos": 14016552718680.0, - "grad_norm": 6.407058888052357, - "learning_rate": 2.0753402086072124e-07, - "loss": 0.9679, - "num_input_tokens_seen": 152407955, - "step": 7136 - }, - { - "epoch": 0.8581735104911922, - "flos": 16159729458720.0, - "grad_norm": 18.032853556025096, - "learning_rate": 2.071886177636828e-07, - "loss": 0.9878, - "num_input_tokens_seen": 152424460, - "step": 7137 - }, - { - "epoch": 0.8582937533818313, - "flos": 16455398587560.0, - "grad_norm": 6.329821097932858, - "learning_rate": 2.0684348663470575e-07, - "loss": 1.0517, - "num_input_tokens_seen": 152444360, - "step": 7138 - }, - { - "epoch": 0.8584139962724704, - "flos": 13831624675320.0, - "grad_norm": 3.998313548293674, - "learning_rate": 2.0649862752614555e-07, - "loss": 0.8442, - "num_input_tokens_seen": 152462790, - "step": 7139 - }, - { - "epoch": 0.8585342391631094, - "flos": 51048981298560.0, - "grad_norm": 0.7754820570379506, - "learning_rate": 2.0615404049031838e-07, - "loss": 0.8234, - "num_input_tokens_seen": 152519480, - "step": 7140 - }, - { - "epoch": 0.8586544820537486, - "flos": 7591198018800.0, - "grad_norm": 17.01145997578728, - "learning_rate": 2.0580972557949616e-07, - "loss": 1.001, - "num_input_tokens_seen": 152534290, - "step": 7141 - }, - { - "epoch": 0.8587747249443877, - "flos": 46401423839760.0, - "grad_norm": 0.7969236172075765, - "learning_rate": 2.054656828459125e-07, - "loss": 0.7786, - "num_input_tokens_seen": 152598120, - "step": 7142 - }, - { - "epoch": 0.8588949678350267, - "flos": 19104136557720.0, - "grad_norm": 3.587421450934637, - "learning_rate": 2.051219123417578e-07, - "loss": 0.9896, - "num_input_tokens_seen": 152617900, - "step": 7143 - }, - { - "epoch": 0.8590152107256659, - "flos": 18579161012040.0, - "grad_norm": 3.875462904322311, - "learning_rate": 2.0477841411918196e-07, - "loss": 0.8215, - "num_input_tokens_seen": 152637145, - "step": 7144 - }, - { - "epoch": 0.859135453616305, - "flos": 18605504840160.0, - "grad_norm": 4.503251490513983, - "learning_rate": 2.0443518823029326e-07, - "loss": 0.9533, - "num_input_tokens_seen": 152657405, - "step": 7145 - }, - { - "epoch": 0.859255696506944, - "flos": 9139320904320.0, - "grad_norm": 5.1259954462704655, - "learning_rate": 2.0409223472715854e-07, - "loss": 0.9791, - "num_input_tokens_seen": 152674270, - "step": 7146 - }, - { - "epoch": 0.8593759393975832, - "flos": 13096297104960.0, - "grad_norm": 5.632672650721463, - "learning_rate": 2.0374955366180434e-07, - "loss": 0.9654, - "num_input_tokens_seen": 152691630, - "step": 7147 - }, - { - "epoch": 0.8594961822882222, - "flos": 15773341243080.0, - "grad_norm": 5.50126356002291, - "learning_rate": 2.034071450862147e-07, - "loss": 0.946, - "num_input_tokens_seen": 152708820, - "step": 7148 - }, - { - "epoch": 0.8596164251788613, - "flos": 16560007361040.0, - "grad_norm": 4.767898567554503, - "learning_rate": 2.030650090523327e-07, - "loss": 0.9949, - "num_input_tokens_seen": 152727730, - "step": 7149 - }, - { - "epoch": 0.8597366680695004, - "flos": 22564688673120.0, - "grad_norm": 3.981317281828212, - "learning_rate": 2.0272314561205995e-07, - "loss": 0.8333, - "num_input_tokens_seen": 152747845, - "step": 7150 - }, - { - "epoch": 0.8598569109601395, - "flos": 15117566403600.0, - "grad_norm": 4.387553705392128, - "learning_rate": 2.023815548172567e-07, - "loss": 0.9558, - "num_input_tokens_seen": 152767635, - "step": 7151 - }, - { - "epoch": 0.8599771538507786, - "flos": 18107425030680.0, - "grad_norm": 2.7432949514319014, - "learning_rate": 2.0204023671974267e-07, - "loss": 0.8937, - "num_input_tokens_seen": 152786740, - "step": 7152 - }, - { - "epoch": 0.8600973967414177, - "flos": 11837251112880.0, - "grad_norm": 4.663233697757112, - "learning_rate": 2.0169919137129532e-07, - "loss": 1.0434, - "num_input_tokens_seen": 152804900, - "step": 7153 - }, - { - "epoch": 0.8602176396320568, - "flos": 17949270077280.0, - "grad_norm": 15.564762303405839, - "learning_rate": 2.013584188236508e-07, - "loss": 0.923, - "num_input_tokens_seen": 152822525, - "step": 7154 - }, - { - "epoch": 0.8603378825226958, - "flos": 14488380684720.0, - "grad_norm": 7.49256528472715, - "learning_rate": 2.0101791912850396e-07, - "loss": 1.019, - "num_input_tokens_seen": 152841785, - "step": 7155 - }, - { - "epoch": 0.8604581254133349, - "flos": 24921712855680.0, - "grad_norm": 4.47330994472463, - "learning_rate": 2.006776923375082e-07, - "loss": 0.8625, - "num_input_tokens_seen": 152863160, - "step": 7156 - }, - { - "epoch": 0.860578368303974, - "flos": 16057542948480.0, - "grad_norm": 7.297323535799466, - "learning_rate": 2.003377385022764e-07, - "loss": 0.9369, - "num_input_tokens_seen": 152881705, - "step": 7157 - }, - { - "epoch": 0.8606986111946131, - "flos": 15143879570160.0, - "grad_norm": 8.685816759566237, - "learning_rate": 1.9999805767437826e-07, - "loss": 0.9985, - "num_input_tokens_seen": 152900315, - "step": 7158 - }, - { - "epoch": 0.8608188540852522, - "flos": 20572032158040.0, - "grad_norm": 5.528549310887094, - "learning_rate": 1.9965864990534386e-07, - "loss": 0.9417, - "num_input_tokens_seen": 152920560, - "step": 7159 - }, - { - "epoch": 0.8609390969758913, - "flos": 21174966033480.0, - "grad_norm": 4.461703206367383, - "learning_rate": 1.9931951524666092e-07, - "loss": 1.0034, - "num_input_tokens_seen": 152941370, - "step": 7160 - }, - { - "epoch": 0.8610593398665304, - "flos": 15089720159040.0, - "grad_norm": 7.74634170327746, - "learning_rate": 1.9898065374977534e-07, - "loss": 1.0343, - "num_input_tokens_seen": 152961295, - "step": 7161 - }, - { - "epoch": 0.8611795827571694, - "flos": 10476631841760.0, - "grad_norm": 4.882748046449604, - "learning_rate": 1.9864206546609342e-07, - "loss": 0.9614, - "num_input_tokens_seen": 152979855, - "step": 7162 - }, - { - "epoch": 0.8612998256478086, - "flos": 17241666105240.0, - "grad_norm": 4.218505400171878, - "learning_rate": 1.983037504469771e-07, - "loss": 1.0678, - "num_input_tokens_seen": 152998285, - "step": 7163 - }, - { - "epoch": 0.8614200685384477, - "flos": 15092541022560.0, - "grad_norm": 2.583953465098809, - "learning_rate": 1.9796570874374984e-07, - "loss": 0.8972, - "num_input_tokens_seen": 153018110, - "step": 7164 - }, - { - "epoch": 0.8615403114290867, - "flos": 14197249466760.0, - "grad_norm": 6.484381765339092, - "learning_rate": 1.976279404076917e-07, - "loss": 1.0046, - "num_input_tokens_seen": 153037230, - "step": 7165 - }, - { - "epoch": 0.8616605543197259, - "flos": 21227592366600.0, - "grad_norm": 4.763241231111105, - "learning_rate": 1.9729044549004193e-07, - "loss": 0.9851, - "num_input_tokens_seen": 153058335, - "step": 7166 - }, - { - "epoch": 0.8617807972103649, - "flos": 20596474969440.0, - "grad_norm": 4.350123059516198, - "learning_rate": 1.9695322404199822e-07, - "loss": 0.9339, - "num_input_tokens_seen": 153080100, - "step": 7167 - }, - { - "epoch": 0.861901040101004, - "flos": 19707254402520.0, - "grad_norm": 7.766953659758247, - "learning_rate": 1.9661627611471654e-07, - "loss": 1.0539, - "num_input_tokens_seen": 153099615, - "step": 7168 - }, - { - "epoch": 0.8620212829916432, - "flos": 35571652009560.0, - "grad_norm": 6.412117703885849, - "learning_rate": 1.9627960175931246e-07, - "loss": 0.9217, - "num_input_tokens_seen": 153124035, - "step": 7169 - }, - { - "epoch": 0.8621415258822822, - "flos": 15010014120360.0, - "grad_norm": 4.720818476753168, - "learning_rate": 1.9594320102685847e-07, - "loss": 0.9664, - "num_input_tokens_seen": 153143025, - "step": 7170 - }, - { - "epoch": 0.8622617687729213, - "flos": 15405968742720.0, - "grad_norm": 4.33890378371214, - "learning_rate": 1.956070739683864e-07, - "loss": 0.8578, - "num_input_tokens_seen": 153162080, - "step": 7171 - }, - { - "epoch": 0.8623820116635604, - "flos": 18684383016720.0, - "grad_norm": 2.9312045225995584, - "learning_rate": 1.9527122063488678e-07, - "loss": 0.9623, - "num_input_tokens_seen": 153182915, - "step": 7172 - }, - { - "epoch": 0.8625022545541995, - "flos": 13939023650760.0, - "grad_norm": 12.035582874416724, - "learning_rate": 1.9493564107730755e-07, - "loss": 1.0319, - "num_input_tokens_seen": 153202635, - "step": 7173 - }, - { - "epoch": 0.8626224974448385, - "flos": 15563663772720.0, - "grad_norm": 6.560557632005918, - "learning_rate": 1.9460033534655684e-07, - "loss": 0.8449, - "num_input_tokens_seen": 153221715, - "step": 7174 - }, - { - "epoch": 0.8627427403354777, - "flos": 16586197881360.0, - "grad_norm": 3.3109528869300173, - "learning_rate": 1.9426530349349978e-07, - "loss": 1.0608, - "num_input_tokens_seen": 153241885, - "step": 7175 - }, - { - "epoch": 0.8628629832261168, - "flos": 11578258757880.0, - "grad_norm": 3.730864466904441, - "learning_rate": 1.9393054556896038e-07, - "loss": 0.8766, - "num_input_tokens_seen": 153259305, - "step": 7176 - }, - { - "epoch": 0.8629832261167558, - "flos": 20015347011240.0, - "grad_norm": 5.702670144532625, - "learning_rate": 1.9359606162372133e-07, - "loss": 0.9149, - "num_input_tokens_seen": 153280630, - "step": 7177 - }, - { - "epoch": 0.863103469007395, - "flos": 14357489406240.0, - "grad_norm": 4.727200854361962, - "learning_rate": 1.9326185170852293e-07, - "loss": 0.9375, - "num_input_tokens_seen": 153299315, - "step": 7178 - }, - { - "epoch": 0.863223711898034, - "flos": 17425122393720.0, - "grad_norm": 3.4452882796147666, - "learning_rate": 1.9292791587406598e-07, - "loss": 0.9495, - "num_input_tokens_seen": 153317895, - "step": 7179 - }, - { - "epoch": 0.8633439547886731, - "flos": 12521393443440.0, - "grad_norm": 6.260910886360955, - "learning_rate": 1.9259425417100661e-07, - "loss": 1.0907, - "num_input_tokens_seen": 153333730, - "step": 7180 - }, - { - "epoch": 0.8634641976793123, - "flos": 9085774724400.0, - "grad_norm": 7.526258151802802, - "learning_rate": 1.9226086664996234e-07, - "loss": 0.9682, - "num_input_tokens_seen": 153351695, - "step": 7181 - }, - { - "epoch": 0.8635844405699513, - "flos": 16979944871400.0, - "grad_norm": 14.943078950487674, - "learning_rate": 1.9192775336150712e-07, - "loss": 0.9705, - "num_input_tokens_seen": 153371715, - "step": 7182 - }, - { - "epoch": 0.8637046834605904, - "flos": 43267397478720.0, - "grad_norm": 0.7854053614550693, - "learning_rate": 1.915949143561739e-07, - "loss": 0.8028, - "num_input_tokens_seen": 153426110, - "step": 7183 - }, - { - "epoch": 0.8638249263512295, - "flos": 14593755997200.0, - "grad_norm": 3.1720149568395124, - "learning_rate": 1.9126234968445498e-07, - "loss": 0.991, - "num_input_tokens_seen": 153445520, - "step": 7184 - }, - { - "epoch": 0.8639451692418686, - "flos": 18946870789560.0, - "grad_norm": 3.1237314319707497, - "learning_rate": 1.9093005939679884e-07, - "loss": 0.9016, - "num_input_tokens_seen": 153467195, - "step": 7185 - }, - { - "epoch": 0.8640654121325076, - "flos": 10686799897080.0, - "grad_norm": 9.463677284446074, - "learning_rate": 1.9059804354361452e-07, - "loss": 0.989, - "num_input_tokens_seen": 153484690, - "step": 7186 - }, - { - "epoch": 0.8641856550231467, - "flos": 22722230395320.0, - "grad_norm": 2.8849732279733633, - "learning_rate": 1.902663021752684e-07, - "loss": 0.939, - "num_input_tokens_seen": 153505840, - "step": 7187 - }, - { - "epoch": 0.8643058979137859, - "flos": 10581945831120.0, - "grad_norm": 7.297108419797749, - "learning_rate": 1.8993483534208556e-07, - "loss": 1.0521, - "num_input_tokens_seen": 153524470, - "step": 7188 - }, - { - "epoch": 0.8644261408044249, - "flos": 9244604232120.0, - "grad_norm": 7.234096579415601, - "learning_rate": 1.8960364309434884e-07, - "loss": 0.9666, - "num_input_tokens_seen": 153541685, - "step": 7189 - }, - { - "epoch": 0.864546383695064, - "flos": 14803770744720.0, - "grad_norm": 24.719342597195016, - "learning_rate": 1.8927272548229967e-07, - "loss": 1.0081, - "num_input_tokens_seen": 153561095, - "step": 7190 - }, - { - "epoch": 0.8646666265857031, - "flos": 15144094201080.0, - "grad_norm": 4.6386324011478335, - "learning_rate": 1.8894208255613876e-07, - "loss": 1.0559, - "num_input_tokens_seen": 153580130, - "step": 7191 - }, - { - "epoch": 0.8647868694763422, - "flos": 14012413408080.0, - "grad_norm": 3.0011430787881324, - "learning_rate": 1.8861171436602397e-07, - "loss": 0.9919, - "num_input_tokens_seen": 153596965, - "step": 7192 - }, - { - "epoch": 0.8649071123669813, - "flos": 18631204775520.0, - "grad_norm": 6.788737720583053, - "learning_rate": 1.882816209620719e-07, - "loss": 1.0365, - "num_input_tokens_seen": 153613395, - "step": 7193 - }, - { - "epoch": 0.8650273552576204, - "flos": 14409809123760.0, - "grad_norm": 3.757006140339646, - "learning_rate": 1.8795180239435738e-07, - "loss": 0.9892, - "num_input_tokens_seen": 153631970, - "step": 7194 - }, - { - "epoch": 0.8651475981482595, - "flos": 17031651357720.0, - "grad_norm": 5.266620847446381, - "learning_rate": 1.8762225871291348e-07, - "loss": 0.9814, - "num_input_tokens_seen": 153647565, - "step": 7195 - }, - { - "epoch": 0.8652678410388985, - "flos": 15402197370840.0, - "grad_norm": 3.043295454671838, - "learning_rate": 1.8729298996773201e-07, - "loss": 1.0368, - "num_input_tokens_seen": 153666035, - "step": 7196 - }, - { - "epoch": 0.8653880839295377, - "flos": 46700220447720.0, - "grad_norm": 0.849363725600036, - "learning_rate": 1.8696399620876301e-07, - "loss": 0.8616, - "num_input_tokens_seen": 153722785, - "step": 7197 - }, - { - "epoch": 0.8655083268201768, - "flos": 12574541023080.0, - "grad_norm": 6.804871093515676, - "learning_rate": 1.866352774859141e-07, - "loss": 1.0112, - "num_input_tokens_seen": 153737730, - "step": 7198 - }, - { - "epoch": 0.8656285697108158, - "flos": 14698027493520.0, - "grad_norm": 4.7144058137928715, - "learning_rate": 1.8630683384905188e-07, - "loss": 0.9065, - "num_input_tokens_seen": 153756780, - "step": 7199 - }, - { - "epoch": 0.865748812601455, - "flos": 13223692965600.0, - "grad_norm": 3.598994930686677, - "learning_rate": 1.8597866534800045e-07, - "loss": 1.1215, - "num_input_tokens_seen": 153771615, - "step": 7200 - }, - { - "epoch": 0.865869055492094, - "flos": 50595070779720.0, - "grad_norm": 6.271206393714718, - "learning_rate": 1.8565077203254398e-07, - "loss": 0.9687, - "num_input_tokens_seen": 153796795, - "step": 7201 - }, - { - "epoch": 0.8659892983827331, - "flos": 12312022588680.0, - "grad_norm": 5.120150621106033, - "learning_rate": 1.8532315395242203e-07, - "loss": 0.9468, - "num_input_tokens_seen": 153812965, - "step": 7202 - }, - { - "epoch": 0.8661095412733723, - "flos": 12679517735280.0, - "grad_norm": 3.9732571937852588, - "learning_rate": 1.849958111573353e-07, - "loss": 0.9392, - "num_input_tokens_seen": 153831290, - "step": 7203 - }, - { - "epoch": 0.8662297841640113, - "flos": 12915998957160.0, - "grad_norm": 3.3127326163901776, - "learning_rate": 1.8466874369694074e-07, - "loss": 0.8703, - "num_input_tokens_seen": 153848705, - "step": 7204 - }, - { - "epoch": 0.8663500270546504, - "flos": 11577645526680.0, - "grad_norm": 8.654481782963128, - "learning_rate": 1.843419516208542e-07, - "loss": 0.9302, - "num_input_tokens_seen": 153865350, - "step": 7205 - }, - { - "epoch": 0.8664702699452895, - "flos": 12678843180960.0, - "grad_norm": 5.197154976098082, - "learning_rate": 1.8401543497865047e-07, - "loss": 1.0076, - "num_input_tokens_seen": 153883070, - "step": 7206 - }, - { - "epoch": 0.8665905128359286, - "flos": 21907963325280.0, - "grad_norm": 10.597033146651494, - "learning_rate": 1.836891938198608e-07, - "loss": 0.8619, - "num_input_tokens_seen": 153903215, - "step": 7207 - }, - { - "epoch": 0.8667107557265676, - "flos": 13225900597920.0, - "grad_norm": 11.341870028662395, - "learning_rate": 1.8336322819397677e-07, - "loss": 0.9364, - "num_input_tokens_seen": 153920470, - "step": 7208 - }, - { - "epoch": 0.8668309986172068, - "flos": 14252267401560.0, - "grad_norm": 4.949963062803612, - "learning_rate": 1.8303753815044654e-07, - "loss": 0.8364, - "num_input_tokens_seen": 153939495, - "step": 7209 - }, - { - "epoch": 0.8669512415078459, - "flos": 15352698516840.0, - "grad_norm": 7.2671542619442535, - "learning_rate": 1.827121237386773e-07, - "loss": 0.9237, - "num_input_tokens_seen": 153956660, - "step": 7210 - }, - { - "epoch": 0.8670714843984849, - "flos": 12540562466520.0, - "grad_norm": 8.023445582548266, - "learning_rate": 1.8238698500803374e-07, - "loss": 0.9796, - "num_input_tokens_seen": 153969145, - "step": 7211 - }, - { - "epoch": 0.8671917272891241, - "flos": 43453055851440.0, - "grad_norm": 0.8369934329273189, - "learning_rate": 1.820621220078391e-07, - "loss": 0.8483, - "num_input_tokens_seen": 154032775, - "step": 7212 - }, - { - "epoch": 0.8673119701797631, - "flos": 14517300083880.0, - "grad_norm": 5.256568069904085, - "learning_rate": 1.8173753478737553e-07, - "loss": 0.896, - "num_input_tokens_seen": 154052930, - "step": 7213 - }, - { - "epoch": 0.8674322130704022, - "flos": 13938901004520.0, - "grad_norm": 3.8995065083425566, - "learning_rate": 1.8141322339588205e-07, - "loss": 1.0153, - "num_input_tokens_seen": 154069990, - "step": 7214 - }, - { - "epoch": 0.8675524559610414, - "flos": 18521352875280.0, - "grad_norm": 5.283575685801036, - "learning_rate": 1.810891878825569e-07, - "loss": 0.9251, - "num_input_tokens_seen": 154089685, - "step": 7215 - }, - { - "epoch": 0.8676726988516804, - "flos": 10634326871760.0, - "grad_norm": 5.555085630675349, - "learning_rate": 1.8076542829655561e-07, - "loss": 0.9325, - "num_input_tokens_seen": 154108210, - "step": 7216 - }, - { - "epoch": 0.8677929417423195, - "flos": 11524283316120.0, - "grad_norm": 9.419971077585062, - "learning_rate": 1.8044194468699203e-07, - "loss": 1.0358, - "num_input_tokens_seen": 154125240, - "step": 7217 - }, - { - "epoch": 0.8679131846329585, - "flos": 13361544418200.0, - "grad_norm": 4.44408218708832, - "learning_rate": 1.8011873710293912e-07, - "loss": 0.9594, - "num_input_tokens_seen": 154143465, - "step": 7218 - }, - { - "epoch": 0.8680334275235977, - "flos": 23981153741160.0, - "grad_norm": 5.8147427733742285, - "learning_rate": 1.7979580559342677e-07, - "loss": 0.9144, - "num_input_tokens_seen": 154163915, - "step": 7219 - }, - { - "epoch": 0.8681536704142367, - "flos": 17556381610920.0, - "grad_norm": 4.599327801227537, - "learning_rate": 1.7947315020744358e-07, - "loss": 0.8964, - "num_input_tokens_seen": 154184730, - "step": 7220 - }, - { - "epoch": 0.8682739133048758, - "flos": 14200070330280.0, - "grad_norm": 4.3853937119753805, - "learning_rate": 1.7915077099393594e-07, - "loss": 1.0216, - "num_input_tokens_seen": 154201050, - "step": 7221 - }, - { - "epoch": 0.868394156195515, - "flos": 11788028212920.0, - "grad_norm": 3.887251528928597, - "learning_rate": 1.788286680018083e-07, - "loss": 0.9667, - "num_input_tokens_seen": 154219480, - "step": 7222 - }, - { - "epoch": 0.868514399086154, - "flos": 19942570485120.0, - "grad_norm": 9.400473611141596, - "learning_rate": 1.7850684127992443e-07, - "loss": 0.9478, - "num_input_tokens_seen": 154238945, - "step": 7223 - }, - { - "epoch": 0.8686346419767931, - "flos": 14253003279000.0, - "grad_norm": 12.1785392489126, - "learning_rate": 1.7818529087710378e-07, - "loss": 0.931, - "num_input_tokens_seen": 154259020, - "step": 7224 - }, - { - "epoch": 0.8687548848674322, - "flos": 12915447049080.0, - "grad_norm": 6.084067766282423, - "learning_rate": 1.7786401684212637e-07, - "loss": 1.0694, - "num_input_tokens_seen": 154277570, - "step": 7225 - }, - { - "epoch": 0.8688751277580713, - "flos": 50458440241440.0, - "grad_norm": 0.73931832109998, - "learning_rate": 1.7754301922372883e-07, - "loss": 0.8012, - "num_input_tokens_seen": 154326935, - "step": 7226 - }, - { - "epoch": 0.8689953706487104, - "flos": 19155904367160.0, - "grad_norm": 9.084084949252421, - "learning_rate": 1.7722229807060617e-07, - "loss": 1.0377, - "num_input_tokens_seen": 154345235, - "step": 7227 - }, - { - "epoch": 0.8691156135393495, - "flos": 24711146200080.0, - "grad_norm": 6.018340372345381, - "learning_rate": 1.7690185343141172e-07, - "loss": 1.0486, - "num_input_tokens_seen": 154364870, - "step": 7228 - }, - { - "epoch": 0.8692358564299886, - "flos": 13466183853240.0, - "grad_norm": 7.141010966815121, - "learning_rate": 1.7658168535475615e-07, - "loss": 0.9302, - "num_input_tokens_seen": 154382375, - "step": 7229 - }, - { - "epoch": 0.8693560993206276, - "flos": 21645168936840.0, - "grad_norm": 3.04915013589172, - "learning_rate": 1.7626179388920948e-07, - "loss": 0.8722, - "num_input_tokens_seen": 154403375, - "step": 7230 - }, - { - "epoch": 0.8694763422112668, - "flos": 19367176238640.0, - "grad_norm": 54.08356480874643, - "learning_rate": 1.7594217908329866e-07, - "loss": 1.0318, - "num_input_tokens_seen": 154425280, - "step": 7231 - }, - { - "epoch": 0.8695965851019059, - "flos": 18604033085280.0, - "grad_norm": 4.7298058345371725, - "learning_rate": 1.7562284098550895e-07, - "loss": 0.9564, - "num_input_tokens_seen": 154444710, - "step": 7232 - }, - { - "epoch": 0.8697168279925449, - "flos": 48214058161080.0, - "grad_norm": 0.8803182491917777, - "learning_rate": 1.753037796442838e-07, - "loss": 0.8886, - "num_input_tokens_seen": 154503870, - "step": 7233 - }, - { - "epoch": 0.8698370708831841, - "flos": 13989902274960.0, - "grad_norm": 5.7958473466840426, - "learning_rate": 1.74984995108024e-07, - "loss": 0.9766, - "num_input_tokens_seen": 154521520, - "step": 7234 - }, - { - "epoch": 0.8699573137738231, - "flos": 9060749343360.0, - "grad_norm": 4.751861408319067, - "learning_rate": 1.7466648742508981e-07, - "loss": 1.0548, - "num_input_tokens_seen": 154537425, - "step": 7235 - }, - { - "epoch": 0.8700775566644622, - "flos": 12338489063040.0, - "grad_norm": 6.593634292527434, - "learning_rate": 1.7434825664379837e-07, - "loss": 1.0626, - "num_input_tokens_seen": 154555650, - "step": 7236 - }, - { - "epoch": 0.8701977995551013, - "flos": 9689720431320.0, - "grad_norm": 10.56870065200447, - "learning_rate": 1.740303028124246e-07, - "loss": 1.0751, - "num_input_tokens_seen": 154571430, - "step": 7237 - }, - { - "epoch": 0.8703180424457404, - "flos": 21778359832320.0, - "grad_norm": 4.022634600182686, - "learning_rate": 1.7371262597920212e-07, - "loss": 0.9827, - "num_input_tokens_seen": 154593210, - "step": 7238 - }, - { - "epoch": 0.8704382853363795, - "flos": 13911024098400.0, - "grad_norm": 3.5050060710772777, - "learning_rate": 1.7339522619232195e-07, - "loss": 0.9825, - "num_input_tokens_seen": 154611310, - "step": 7239 - }, - { - "epoch": 0.8705585282270186, - "flos": 18945276388440.0, - "grad_norm": 7.099639464344766, - "learning_rate": 1.730781034999338e-07, - "loss": 0.9628, - "num_input_tokens_seen": 154632610, - "step": 7240 - }, - { - "epoch": 0.8706787711176577, - "flos": 24318135087480.0, - "grad_norm": 6.722162843692433, - "learning_rate": 1.7276125795014497e-07, - "loss": 0.9624, - "num_input_tokens_seen": 154650780, - "step": 7241 - }, - { - "epoch": 0.8707990140082967, - "flos": 10319580704520.0, - "grad_norm": 4.394376950416137, - "learning_rate": 1.7244468959102054e-07, - "loss": 0.8989, - "num_input_tokens_seen": 154667555, - "step": 7242 - }, - { - "epoch": 0.8709192568989359, - "flos": 14252635340280.0, - "grad_norm": 5.668337556919108, - "learning_rate": 1.7212839847058348e-07, - "loss": 1.0784, - "num_input_tokens_seen": 154682405, - "step": 7243 - }, - { - "epoch": 0.871039499789575, - "flos": 11839918668600.0, - "grad_norm": 4.047620549989737, - "learning_rate": 1.718123846368147e-07, - "loss": 0.9698, - "num_input_tokens_seen": 154701170, - "step": 7244 - }, - { - "epoch": 0.871159742680214, - "flos": 14960208650760.0, - "grad_norm": 5.264718993713118, - "learning_rate": 1.714966481376543e-07, - "loss": 0.9466, - "num_input_tokens_seen": 154717570, - "step": 7245 - }, - { - "epoch": 0.8712799855708532, - "flos": 20125904127360.0, - "grad_norm": 3.5441996025123546, - "learning_rate": 1.7118118902099797e-07, - "loss": 1.0294, - "num_input_tokens_seen": 154735375, - "step": 7246 - }, - { - "epoch": 0.8714002284614922, - "flos": 15799593086520.0, - "grad_norm": 75.28818655776116, - "learning_rate": 1.7086600733470146e-07, - "loss": 1.0252, - "num_input_tokens_seen": 154755765, - "step": 7247 - }, - { - "epoch": 0.8715204713521313, - "flos": 15220948714680.0, - "grad_norm": 11.429865055439057, - "learning_rate": 1.7055110312657738e-07, - "loss": 0.9902, - "num_input_tokens_seen": 154774980, - "step": 7248 - }, - { - "epoch": 0.8716407142427703, - "flos": 16664677457640.0, - "grad_norm": 4.628627900715949, - "learning_rate": 1.702364764443962e-07, - "loss": 0.975, - "num_input_tokens_seen": 154793775, - "step": 7249 - }, - { - "epoch": 0.8717609571334095, - "flos": 19911044853360.0, - "grad_norm": 5.312943204455927, - "learning_rate": 1.6992212733588685e-07, - "loss": 0.949, - "num_input_tokens_seen": 154813160, - "step": 7250 - }, - { - "epoch": 0.8718812000240486, - "flos": 18129537563520.0, - "grad_norm": 3.0037282982787192, - "learning_rate": 1.6960805584873538e-07, - "loss": 0.9745, - "num_input_tokens_seen": 154830880, - "step": 7251 - }, - { - "epoch": 0.8720014429146876, - "flos": 16637689736760.0, - "grad_norm": 4.212992716875534, - "learning_rate": 1.6929426203058684e-07, - "loss": 1.0114, - "num_input_tokens_seen": 154851025, - "step": 7252 - }, - { - "epoch": 0.8721216858053268, - "flos": 17320421635560.0, - "grad_norm": 12.462788239324384, - "learning_rate": 1.689807459290431e-07, - "loss": 1.039, - "num_input_tokens_seen": 154869400, - "step": 7253 - }, - { - "epoch": 0.8722419286959658, - "flos": 24159213595080.0, - "grad_norm": 8.451281785525754, - "learning_rate": 1.6866750759166437e-07, - "loss": 0.934, - "num_input_tokens_seen": 154889100, - "step": 7254 - }, - { - "epoch": 0.8723621715866049, - "flos": 13020270453480.0, - "grad_norm": 6.357952886809454, - "learning_rate": 1.6835454706596865e-07, - "loss": 0.9961, - "num_input_tokens_seen": 154906650, - "step": 7255 - }, - { - "epoch": 0.8724824144772441, - "flos": 15638494623360.0, - "grad_norm": 7.184155291316848, - "learning_rate": 1.680418643994317e-07, - "loss": 0.9709, - "num_input_tokens_seen": 154924940, - "step": 7256 - }, - { - "epoch": 0.8726026573678831, - "flos": 47756646676320.0, - "grad_norm": 0.9328947762605484, - "learning_rate": 1.6772945963948738e-07, - "loss": 0.913, - "num_input_tokens_seen": 154982825, - "step": 7257 - }, - { - "epoch": 0.8727229002585222, - "flos": 9427968535920.0, - "grad_norm": 9.71945766804094, - "learning_rate": 1.6741733283352733e-07, - "loss": 0.987, - "num_input_tokens_seen": 155000150, - "step": 7258 - }, - { - "epoch": 0.8728431431491613, - "flos": 15483344502840.0, - "grad_norm": 3.3553020538028195, - "learning_rate": 1.6710548402890102e-07, - "loss": 1.0675, - "num_input_tokens_seen": 155020395, - "step": 7259 - }, - { - "epoch": 0.8729633860398004, - "flos": 25815900595320.0, - "grad_norm": 5.428470348908566, - "learning_rate": 1.6679391327291527e-07, - "loss": 0.887, - "num_input_tokens_seen": 155041320, - "step": 7260 - }, - { - "epoch": 0.8730836289304394, - "flos": 11575529879040.0, - "grad_norm": 4.6331072708710765, - "learning_rate": 1.6648262061283492e-07, - "loss": 0.8919, - "num_input_tokens_seen": 155056340, - "step": 7261 - }, - { - "epoch": 0.8732038718210786, - "flos": 15065430655440.0, - "grad_norm": 9.918472211153185, - "learning_rate": 1.6617160609588353e-07, - "loss": 0.9676, - "num_input_tokens_seen": 155075235, - "step": 7262 - }, - { - "epoch": 0.8733241147117177, - "flos": 11755337441880.0, - "grad_norm": 13.575562545894737, - "learning_rate": 1.6586086976924163e-07, - "loss": 0.9362, - "num_input_tokens_seen": 155090455, - "step": 7263 - }, - { - "epoch": 0.8734443576023567, - "flos": 14331574839960.0, - "grad_norm": 5.92080235894853, - "learning_rate": 1.6555041168004747e-07, - "loss": 1.0141, - "num_input_tokens_seen": 155109495, - "step": 7264 - }, - { - "epoch": 0.8735646004929959, - "flos": 12784402462800.0, - "grad_norm": 5.97688705460672, - "learning_rate": 1.6524023187539715e-07, - "loss": 0.9112, - "num_input_tokens_seen": 155127500, - "step": 7265 - }, - { - "epoch": 0.873684843383635, - "flos": 14380123185600.0, - "grad_norm": 4.542846573083034, - "learning_rate": 1.649303304023446e-07, - "loss": 0.9652, - "num_input_tokens_seen": 155146975, - "step": 7266 - }, - { - "epoch": 0.873805086274274, - "flos": 11998349576040.0, - "grad_norm": 4.720078439222394, - "learning_rate": 1.6462070730790246e-07, - "loss": 1.014, - "num_input_tokens_seen": 155165855, - "step": 7267 - }, - { - "epoch": 0.8739253291649132, - "flos": 12784494447480.0, - "grad_norm": 8.063927812344014, - "learning_rate": 1.6431136263903912e-07, - "loss": 1.0047, - "num_input_tokens_seen": 155184575, - "step": 7268 - }, - { - "epoch": 0.8740455720555522, - "flos": 15144523462920.0, - "grad_norm": 3.9384550117188715, - "learning_rate": 1.6400229644268282e-07, - "loss": 0.9621, - "num_input_tokens_seen": 155202650, - "step": 7269 - }, - { - "epoch": 0.8741658149461913, - "flos": 10712806448040.0, - "grad_norm": 3.5101472353145566, - "learning_rate": 1.6369350876571852e-07, - "loss": 1.0356, - "num_input_tokens_seen": 155220525, - "step": 7270 - }, - { - "epoch": 0.8742860578368304, - "flos": 16376888349720.0, - "grad_norm": 6.49410214979204, - "learning_rate": 1.6338499965498874e-07, - "loss": 1.0193, - "num_input_tokens_seen": 155238975, - "step": 7271 - }, - { - "epoch": 0.8744063007274695, - "flos": 20045768826840.0, - "grad_norm": 2.7962999658464613, - "learning_rate": 1.630767691572943e-07, - "loss": 1.0008, - "num_input_tokens_seen": 155258715, - "step": 7272 - }, - { - "epoch": 0.8745265436181086, - "flos": 45841457906040.0, - "grad_norm": 0.745309147537337, - "learning_rate": 1.6276881731939306e-07, - "loss": 0.7884, - "num_input_tokens_seen": 155320325, - "step": 7273 - }, - { - "epoch": 0.8746467865087477, - "flos": 20414981020800.0, - "grad_norm": 5.715549261176356, - "learning_rate": 1.6246114418800193e-07, - "loss": 0.9821, - "num_input_tokens_seen": 155340325, - "step": 7274 - }, - { - "epoch": 0.8747670293993868, - "flos": 17056400784720.0, - "grad_norm": 10.71248022943406, - "learning_rate": 1.6215374980979423e-07, - "loss": 0.9912, - "num_input_tokens_seen": 155360455, - "step": 7275 - }, - { - "epoch": 0.8748872722900258, - "flos": 32318293778160.0, - "grad_norm": 4.96444419615709, - "learning_rate": 1.6184663423140133e-07, - "loss": 0.907, - "num_input_tokens_seen": 155380475, - "step": 7276 - }, - { - "epoch": 0.875007515180665, - "flos": 14015356917840.0, - "grad_norm": 3.4095043070439104, - "learning_rate": 1.615397974994126e-07, - "loss": 0.8695, - "num_input_tokens_seen": 155398000, - "step": 7277 - }, - { - "epoch": 0.875127758071304, - "flos": 16140253820040.0, - "grad_norm": 5.2149419539559485, - "learning_rate": 1.6123323966037438e-07, - "loss": 1.0326, - "num_input_tokens_seen": 155416240, - "step": 7278 - }, - { - "epoch": 0.8752480009619431, - "flos": 16638364291080.0, - "grad_norm": 6.888442336071545, - "learning_rate": 1.6092696076079216e-07, - "loss": 1.0128, - "num_input_tokens_seen": 155434335, - "step": 7279 - }, - { - "epoch": 0.8753682438525822, - "flos": 18657303311160.0, - "grad_norm": 3.4298519712040867, - "learning_rate": 1.6062096084712785e-07, - "loss": 0.9646, - "num_input_tokens_seen": 155455405, - "step": 7280 - }, - { - "epoch": 0.8754884867432213, - "flos": 16582395847920.0, - "grad_norm": 8.414027136938222, - "learning_rate": 1.6031523996580098e-07, - "loss": 0.9344, - "num_input_tokens_seen": 155472685, - "step": 7281 - }, - { - "epoch": 0.8756087296338604, - "flos": 8798629509240.0, - "grad_norm": 14.209826140229405, - "learning_rate": 1.6000979816318981e-07, - "loss": 0.8841, - "num_input_tokens_seen": 155490870, - "step": 7282 - }, - { - "epoch": 0.8757289725244994, - "flos": 13439594732640.0, - "grad_norm": 13.41332536559377, - "learning_rate": 1.5970463548562886e-07, - "loss": 0.9757, - "num_input_tokens_seen": 155509745, - "step": 7283 - }, - { - "epoch": 0.8758492154151386, - "flos": 17976809706240.0, - "grad_norm": 14.61895470510535, - "learning_rate": 1.5939975197941192e-07, - "loss": 0.938, - "num_input_tokens_seen": 155531120, - "step": 7284 - }, - { - "epoch": 0.8759694583057777, - "flos": 46947193471200.0, - "grad_norm": 1.3204942837684952, - "learning_rate": 1.5909514769078892e-07, - "loss": 0.7888, - "num_input_tokens_seen": 155595945, - "step": 7285 - }, - { - "epoch": 0.8760897011964167, - "flos": 18106045260480.0, - "grad_norm": 3.7992336654919403, - "learning_rate": 1.5879082266596867e-07, - "loss": 0.9952, - "num_input_tokens_seen": 155617005, - "step": 7286 - }, - { - "epoch": 0.8762099440870559, - "flos": 20649131964120.0, - "grad_norm": 5.598216058779036, - "learning_rate": 1.5848677695111645e-07, - "loss": 0.9615, - "num_input_tokens_seen": 155638325, - "step": 7287 - }, - { - "epoch": 0.8763301869776949, - "flos": 15347884651920.0, - "grad_norm": 6.2273095806324825, - "learning_rate": 1.5818301059235562e-07, - "loss": 0.9262, - "num_input_tokens_seen": 155653220, - "step": 7288 - }, - { - "epoch": 0.876450429868334, - "flos": 17529945798120.0, - "grad_norm": 3.8586017281850857, - "learning_rate": 1.578795236357684e-07, - "loss": 1.0413, - "num_input_tokens_seen": 155674405, - "step": 7289 - }, - { - "epoch": 0.8765706727589732, - "flos": 14378160845760.0, - "grad_norm": 4.839380274418377, - "learning_rate": 1.5757631612739218e-07, - "loss": 1.086, - "num_input_tokens_seen": 155687670, - "step": 7290 - }, - { - "epoch": 0.8766909156496122, - "flos": 51114950958360.0, - "grad_norm": 0.8517877123438823, - "learning_rate": 1.572733881132242e-07, - "loss": 0.8991, - "num_input_tokens_seen": 155748035, - "step": 7291 - }, - { - "epoch": 0.8768111585402513, - "flos": 49788831490320.0, - "grad_norm": 0.7552184312518037, - "learning_rate": 1.5697073963921814e-07, - "loss": 0.8443, - "num_input_tokens_seen": 155806995, - "step": 7292 - }, - { - "epoch": 0.8769314014308904, - "flos": 13356761214840.0, - "grad_norm": 5.746802062691381, - "learning_rate": 1.566683707512857e-07, - "loss": 1.0699, - "num_input_tokens_seen": 155824390, - "step": 7293 - }, - { - "epoch": 0.8770516443215295, - "flos": 10581823184880.0, - "grad_norm": 4.206203085197016, - "learning_rate": 1.5636628149529553e-07, - "loss": 1.0205, - "num_input_tokens_seen": 155841900, - "step": 7294 - }, - { - "epoch": 0.8771718872121685, - "flos": 22564474042200.0, - "grad_norm": 36.49726084007025, - "learning_rate": 1.560644719170743e-07, - "loss": 1.0244, - "num_input_tokens_seen": 155862490, - "step": 7295 - }, - { - "epoch": 0.8772921301028077, - "flos": 25758858997560.0, - "grad_norm": 6.989984977389498, - "learning_rate": 1.5576294206240692e-07, - "loss": 0.9457, - "num_input_tokens_seen": 155881735, - "step": 7296 - }, - { - "epoch": 0.8774123729934468, - "flos": 40867380240960.0, - "grad_norm": 6.222265735352972, - "learning_rate": 1.5546169197703507e-07, - "loss": 0.9112, - "num_input_tokens_seen": 155907730, - "step": 7297 - }, - { - "epoch": 0.8775326158840858, - "flos": 17005338191160.0, - "grad_norm": 7.679013910566374, - "learning_rate": 1.5516072170665774e-07, - "loss": 0.9927, - "num_input_tokens_seen": 155925420, - "step": 7298 - }, - { - "epoch": 0.877652858774725, - "flos": 12123967066200.0, - "grad_norm": 23.620889295977246, - "learning_rate": 1.5486003129693214e-07, - "loss": 1.086, - "num_input_tokens_seen": 155942505, - "step": 7299 - }, - { - "epoch": 0.877773101665364, - "flos": 11683266131640.0, - "grad_norm": 4.436286008016376, - "learning_rate": 1.545596207934725e-07, - "loss": 0.999, - "num_input_tokens_seen": 155960710, - "step": 7300 - }, - { - "epoch": 0.8778933445560031, - "flos": 15667597991880.0, - "grad_norm": 4.0253074670722695, - "learning_rate": 1.5425949024185147e-07, - "loss": 0.9977, - "num_input_tokens_seen": 155980455, - "step": 7301 - }, - { - "epoch": 0.8780135874466423, - "flos": 16035338430960.0, - "grad_norm": 6.182720457620584, - "learning_rate": 1.5395963968759818e-07, - "loss": 0.9016, - "num_input_tokens_seen": 156000450, - "step": 7302 - }, - { - "epoch": 0.8781338303372813, - "flos": 44040419202480.0, - "grad_norm": 5.225951747818978, - "learning_rate": 1.536600691761998e-07, - "loss": 0.8701, - "num_input_tokens_seen": 156026000, - "step": 7303 - }, - { - "epoch": 0.8782540732279204, - "flos": 16114461900000.0, - "grad_norm": 10.05442267886493, - "learning_rate": 1.5336077875310084e-07, - "loss": 0.9351, - "num_input_tokens_seen": 156044945, - "step": 7304 - }, - { - "epoch": 0.8783743161185595, - "flos": 11367937394760.0, - "grad_norm": 5.152179197701638, - "learning_rate": 1.5306176846370321e-07, - "loss": 0.966, - "num_input_tokens_seen": 156062810, - "step": 7305 - }, - { - "epoch": 0.8784945590091986, - "flos": 18553031814840.0, - "grad_norm": 6.1419816011024695, - "learning_rate": 1.5276303835336712e-07, - "loss": 0.9541, - "num_input_tokens_seen": 156083070, - "step": 7306 - }, - { - "epoch": 0.8786148018998376, - "flos": 44897526019920.0, - "grad_norm": 0.7769665210119462, - "learning_rate": 1.524645884674094e-07, - "loss": 0.7869, - "num_input_tokens_seen": 156139720, - "step": 7307 - }, - { - "epoch": 0.8787350447904768, - "flos": 15379900868640.0, - "grad_norm": 4.497127251777156, - "learning_rate": 1.521664188511047e-07, - "loss": 1.018, - "num_input_tokens_seen": 156159465, - "step": 7308 - }, - { - "epoch": 0.8788552876811159, - "flos": 18128679039840.0, - "grad_norm": 6.150669104921222, - "learning_rate": 1.518685295496851e-07, - "loss": 1.0335, - "num_input_tokens_seen": 156177045, - "step": 7309 - }, - { - "epoch": 0.8789755305717549, - "flos": 15852526035240.0, - "grad_norm": 3.6508557557548906, - "learning_rate": 1.5157092060833975e-07, - "loss": 1.0784, - "num_input_tokens_seen": 156196415, - "step": 7310 - }, - { - "epoch": 0.879095773462394, - "flos": 20883221584320.0, - "grad_norm": 6.411486165171957, - "learning_rate": 1.5127359207221658e-07, - "loss": 0.8788, - "num_input_tokens_seen": 156215615, - "step": 7311 - }, - { - "epoch": 0.8792160163530331, - "flos": 11813206901760.0, - "grad_norm": 9.031951137900485, - "learning_rate": 1.5097654398641923e-07, - "loss": 0.9499, - "num_input_tokens_seen": 156233240, - "step": 7312 - }, - { - "epoch": 0.8793362592436722, - "flos": 17425766286480.0, - "grad_norm": 3.585500786145664, - "learning_rate": 1.5067977639601014e-07, - "loss": 0.9574, - "num_input_tokens_seen": 156255720, - "step": 7313 - }, - { - "epoch": 0.8794565021343113, - "flos": 10267015694520.0, - "grad_norm": 5.315546934708403, - "learning_rate": 1.5038328934600864e-07, - "loss": 0.9318, - "num_input_tokens_seen": 156272075, - "step": 7314 - }, - { - "epoch": 0.8795767450249504, - "flos": 28228096020480.0, - "grad_norm": 3.53968977263876, - "learning_rate": 1.5008708288139161e-07, - "loss": 0.9251, - "num_input_tokens_seen": 156294155, - "step": 7315 - }, - { - "epoch": 0.8796969879155895, - "flos": 16319141536080.0, - "grad_norm": 8.859196306206028, - "learning_rate": 1.497911570470931e-07, - "loss": 0.9577, - "num_input_tokens_seen": 156313880, - "step": 7316 - }, - { - "epoch": 0.8798172308062285, - "flos": 20201532178560.0, - "grad_norm": 5.424393038921336, - "learning_rate": 1.494955118880048e-07, - "loss": 1.0801, - "num_input_tokens_seen": 156334585, - "step": 7317 - }, - { - "epoch": 0.8799374736968677, - "flos": 17059068340440.0, - "grad_norm": 2.7721679360683367, - "learning_rate": 1.4920014744897634e-07, - "loss": 0.9588, - "num_input_tokens_seen": 156353720, - "step": 7318 - }, - { - "epoch": 0.8800577165875068, - "flos": 18238561601640.0, - "grad_norm": 8.331748303036779, - "learning_rate": 1.4890506377481392e-07, - "loss": 1.0843, - "num_input_tokens_seen": 156372530, - "step": 7319 - }, - { - "epoch": 0.8801779594781458, - "flos": 16664585472960.0, - "grad_norm": 8.08638991412495, - "learning_rate": 1.486102609102815e-07, - "loss": 0.8596, - "num_input_tokens_seen": 156392800, - "step": 7320 - }, - { - "epoch": 0.880298202368785, - "flos": 8089339151400.0, - "grad_norm": 6.875340956122791, - "learning_rate": 1.483157389001004e-07, - "loss": 1.0736, - "num_input_tokens_seen": 156410080, - "step": 7321 - }, - { - "epoch": 0.880418445259424, - "flos": 16111641036480.0, - "grad_norm": 4.316524231277867, - "learning_rate": 1.4802149778894933e-07, - "loss": 1.0006, - "num_input_tokens_seen": 156428590, - "step": 7322 - }, - { - "epoch": 0.8805386881500631, - "flos": 14567442830640.0, - "grad_norm": 3.72847789747315, - "learning_rate": 1.4772753762146484e-07, - "loss": 1.099, - "num_input_tokens_seen": 156447565, - "step": 7323 - }, - { - "epoch": 0.8806589310407023, - "flos": 26078296383480.0, - "grad_norm": 4.30272683910484, - "learning_rate": 1.474338584422401e-07, - "loss": 0.9246, - "num_input_tokens_seen": 156472495, - "step": 7324 - }, - { - "epoch": 0.8807791739313413, - "flos": 16664462826720.0, - "grad_norm": 4.60204122284319, - "learning_rate": 1.4714046029582595e-07, - "loss": 0.9793, - "num_input_tokens_seen": 156491280, - "step": 7325 - }, - { - "epoch": 0.8808994168219804, - "flos": 18473019160560.0, - "grad_norm": 3.8667253314424026, - "learning_rate": 1.46847343226731e-07, - "loss": 0.992, - "num_input_tokens_seen": 156512040, - "step": 7326 - }, - { - "epoch": 0.8810196597126195, - "flos": 12102253133640.0, - "grad_norm": 11.8370919146181, - "learning_rate": 1.465545072794203e-07, - "loss": 0.9207, - "num_input_tokens_seen": 156529635, - "step": 7327 - }, - { - "epoch": 0.8811399026032586, - "flos": 16350207244440.0, - "grad_norm": 4.241109412460602, - "learning_rate": 1.4626195249831774e-07, - "loss": 0.9831, - "num_input_tokens_seen": 156550255, - "step": 7328 - }, - { - "epoch": 0.8812601454938976, - "flos": 10213193560560.0, - "grad_norm": 4.235175138092566, - "learning_rate": 1.4596967892780244e-07, - "loss": 0.9474, - "num_input_tokens_seen": 156566305, - "step": 7329 - }, - { - "epoch": 0.8813803883845368, - "flos": 15984122529600.0, - "grad_norm": 4.021333785199546, - "learning_rate": 1.4567768661221314e-07, - "loss": 0.9747, - "num_input_tokens_seen": 156586595, - "step": 7330 - }, - { - "epoch": 0.8815006312751759, - "flos": 15275108125800.0, - "grad_norm": 5.041686234028994, - "learning_rate": 1.4538597559584442e-07, - "loss": 0.9635, - "num_input_tokens_seen": 156604105, - "step": 7331 - }, - { - "epoch": 0.8816208741658149, - "flos": 16193432061240.0, - "grad_norm": 12.24577352053397, - "learning_rate": 1.4509454592294823e-07, - "loss": 1.0058, - "num_input_tokens_seen": 156624310, - "step": 7332 - }, - { - "epoch": 0.8817411170564541, - "flos": 12595427093520.0, - "grad_norm": 5.568782387584689, - "learning_rate": 1.448033976377354e-07, - "loss": 1.0186, - "num_input_tokens_seen": 156639015, - "step": 7333 - }, - { - "epoch": 0.8818613599470931, - "flos": 13151376362880.0, - "grad_norm": 10.484689722013453, - "learning_rate": 1.445125307843713e-07, - "loss": 0.9642, - "num_input_tokens_seen": 156656960, - "step": 7334 - }, - { - "epoch": 0.8819816028377322, - "flos": 19653922853520.0, - "grad_norm": 6.8069026245307604, - "learning_rate": 1.442219454069813e-07, - "loss": 0.971, - "num_input_tokens_seen": 156677705, - "step": 7335 - }, - { - "epoch": 0.8821018457283714, - "flos": 16639008183840.0, - "grad_norm": 5.178294827445256, - "learning_rate": 1.4393164154964676e-07, - "loss": 0.8926, - "num_input_tokens_seen": 156696955, - "step": 7336 - }, - { - "epoch": 0.8822220886190104, - "flos": 20755457784960.0, - "grad_norm": 11.484102622195143, - "learning_rate": 1.4364161925640649e-07, - "loss": 1.1637, - "num_input_tokens_seen": 156718075, - "step": 7337 - }, - { - "epoch": 0.8823423315096495, - "flos": 14541007017840.0, - "grad_norm": 59.308546282212774, - "learning_rate": 1.4335187857125663e-07, - "loss": 1.0739, - "num_input_tokens_seen": 156736495, - "step": 7338 - }, - { - "epoch": 0.8824625744002886, - "flos": 17215107646200.0, - "grad_norm": 5.055196509461089, - "learning_rate": 1.4306241953815023e-07, - "loss": 0.9663, - "num_input_tokens_seen": 156757275, - "step": 7339 - }, - { - "epoch": 0.8825828172909277, - "flos": 17554971179160.0, - "grad_norm": 7.819145610994688, - "learning_rate": 1.4277324220099862e-07, - "loss": 0.928, - "num_input_tokens_seen": 156778905, - "step": 7340 - }, - { - "epoch": 0.8827030601815667, - "flos": 15957410762760.0, - "grad_norm": 4.519525552678453, - "learning_rate": 1.4248434660366938e-07, - "loss": 0.9674, - "num_input_tokens_seen": 156798100, - "step": 7341 - }, - { - "epoch": 0.8828233030722058, - "flos": 14095645526160.0, - "grad_norm": 3.6156544412981995, - "learning_rate": 1.4219573278998808e-07, - "loss": 0.9283, - "num_input_tokens_seen": 156816280, - "step": 7342 - }, - { - "epoch": 0.882943545962845, - "flos": 28096438203000.0, - "grad_norm": 4.054060871015436, - "learning_rate": 1.4190740080373685e-07, - "loss": 0.8882, - "num_input_tokens_seen": 156836280, - "step": 7343 - }, - { - "epoch": 0.883063788853484, - "flos": 13511574058200.0, - "grad_norm": 3.5504921101937885, - "learning_rate": 1.4161935068865538e-07, - "loss": 1.076, - "num_input_tokens_seen": 156851145, - "step": 7344 - }, - { - "epoch": 0.8831840317441231, - "flos": 13281593087040.0, - "grad_norm": 4.785499429658285, - "learning_rate": 1.4133158248844113e-07, - "loss": 0.9848, - "num_input_tokens_seen": 156869770, - "step": 7345 - }, - { - "epoch": 0.8833042746347622, - "flos": 19098249538200.0, - "grad_norm": 15.536774357049632, - "learning_rate": 1.4104409624674785e-07, - "loss": 0.9548, - "num_input_tokens_seen": 156889275, - "step": 7346 - }, - { - "epoch": 0.8834245175254013, - "flos": 18579007704240.0, - "grad_norm": 4.093876494571815, - "learning_rate": 1.407568920071873e-07, - "loss": 1.0083, - "num_input_tokens_seen": 156907860, - "step": 7347 - }, - { - "epoch": 0.8835447604160404, - "flos": 21831108811680.0, - "grad_norm": 5.103173648487879, - "learning_rate": 1.4046996981332782e-07, - "loss": 0.8926, - "num_input_tokens_seen": 156927465, - "step": 7348 - }, - { - "epoch": 0.8836650033066795, - "flos": 17583216024000.0, - "grad_norm": 3.1905281791978988, - "learning_rate": 1.4018332970869516e-07, - "loss": 1.0034, - "num_input_tokens_seen": 156945125, - "step": 7349 - }, - { - "epoch": 0.8837852461973186, - "flos": 18082951557720.0, - "grad_norm": 3.5536629007127076, - "learning_rate": 1.398969717367733e-07, - "loss": 1.0683, - "num_input_tokens_seen": 156966170, - "step": 7350 - }, - { - "epoch": 0.8839054890879576, - "flos": 12627351325560.0, - "grad_norm": 13.684675926912167, - "learning_rate": 1.396108959410014e-07, - "loss": 0.9903, - "num_input_tokens_seen": 156985105, - "step": 7351 - }, - { - "epoch": 0.8840257319785968, - "flos": 16900852063920.0, - "grad_norm": 6.772842750705294, - "learning_rate": 1.3932510236477745e-07, - "loss": 1.0258, - "num_input_tokens_seen": 157005495, - "step": 7352 - }, - { - "epoch": 0.8841459748692359, - "flos": 20703260713680.0, - "grad_norm": 5.716500595542512, - "learning_rate": 1.3903959105145636e-07, - "loss": 0.777, - "num_input_tokens_seen": 157025705, - "step": 7353 - }, - { - "epoch": 0.8842662177598749, - "flos": 17290091804640.0, - "grad_norm": 5.999872842077051, - "learning_rate": 1.387543620443492e-07, - "loss": 1.0536, - "num_input_tokens_seen": 157042270, - "step": 7354 - }, - { - "epoch": 0.8843864606505141, - "flos": 17790961816080.0, - "grad_norm": 4.574541062646382, - "learning_rate": 1.3846941538672606e-07, - "loss": 1.067, - "num_input_tokens_seen": 157060695, - "step": 7355 - }, - { - "epoch": 0.8845067035411531, - "flos": 20073768379200.0, - "grad_norm": 5.116800960472451, - "learning_rate": 1.3818475112181193e-07, - "loss": 1.0347, - "num_input_tokens_seen": 157079210, - "step": 7356 - }, - { - "epoch": 0.8846269464317922, - "flos": 9054984970080.0, - "grad_norm": 4.420799313245645, - "learning_rate": 1.3790036929279091e-07, - "loss": 1.0158, - "num_input_tokens_seen": 157096085, - "step": 7357 - }, - { - "epoch": 0.8847471893224313, - "flos": 13203450787920.0, - "grad_norm": 4.803553005351323, - "learning_rate": 1.3761626994280363e-07, - "loss": 0.8135, - "num_input_tokens_seen": 157113275, - "step": 7358 - }, - { - "epoch": 0.8848674322130704, - "flos": 25524861362040.0, - "grad_norm": 3.6800930828263843, - "learning_rate": 1.3733245311494735e-07, - "loss": 0.9581, - "num_input_tokens_seen": 157135650, - "step": 7359 - }, - { - "epoch": 0.8849876751037095, - "flos": 17242095367080.0, - "grad_norm": 5.26560344748232, - "learning_rate": 1.3704891885227676e-07, - "loss": 0.937, - "num_input_tokens_seen": 157155415, - "step": 7360 - }, - { - "epoch": 0.8851079179943486, - "flos": 15269711691240.0, - "grad_norm": 5.787393800490262, - "learning_rate": 1.367656671978037e-07, - "loss": 1.0, - "num_input_tokens_seen": 157172600, - "step": 7361 - }, - { - "epoch": 0.8852281608849877, - "flos": 10814349065520.0, - "grad_norm": 10.983944706743602, - "learning_rate": 1.36482698194498e-07, - "loss": 0.9706, - "num_input_tokens_seen": 157188865, - "step": 7362 - }, - { - "epoch": 0.8853484037756267, - "flos": 16560835223160.0, - "grad_norm": 5.187088180189585, - "learning_rate": 1.3620001188528506e-07, - "loss": 0.9367, - "num_input_tokens_seen": 157209305, - "step": 7363 - }, - { - "epoch": 0.8854686466662659, - "flos": 17867448390960.0, - "grad_norm": 10.856965995724783, - "learning_rate": 1.3591760831304865e-07, - "loss": 0.9534, - "num_input_tokens_seen": 157226715, - "step": 7364 - }, - { - "epoch": 0.885588889556905, - "flos": 15190833514680.0, - "grad_norm": 5.351860093801317, - "learning_rate": 1.356354875206287e-07, - "loss": 1.027, - "num_input_tokens_seen": 157244270, - "step": 7365 - }, - { - "epoch": 0.885709132447544, - "flos": 19155873705600.0, - "grad_norm": 3.4515035973941424, - "learning_rate": 1.3535364955082296e-07, - "loss": 0.9103, - "num_input_tokens_seen": 157263840, - "step": 7366 - }, - { - "epoch": 0.8858293753381832, - "flos": 18578425134600.0, - "grad_norm": 4.525968854674801, - "learning_rate": 1.3507209444638613e-07, - "loss": 0.8695, - "num_input_tokens_seen": 157285560, - "step": 7367 - }, - { - "epoch": 0.8859496182288222, - "flos": 16558351636800.0, - "grad_norm": 3.470654214096836, - "learning_rate": 1.347908222500298e-07, - "loss": 0.9656, - "num_input_tokens_seen": 157305355, - "step": 7368 - }, - { - "epoch": 0.8860698611194613, - "flos": 11944711411440.0, - "grad_norm": 4.16005973883064, - "learning_rate": 1.3450983300442276e-07, - "loss": 0.9113, - "num_input_tokens_seen": 157324305, - "step": 7369 - }, - { - "epoch": 0.8861901040101005, - "flos": 17556596241840.0, - "grad_norm": 16.649162459286146, - "learning_rate": 1.3422912675219068e-07, - "loss": 0.9601, - "num_input_tokens_seen": 157343780, - "step": 7370 - }, - { - "epoch": 0.8863103469007395, - "flos": 17370503059200.0, - "grad_norm": 6.251959836766157, - "learning_rate": 1.339487035359166e-07, - "loss": 1.0089, - "num_input_tokens_seen": 157363870, - "step": 7371 - }, - { - "epoch": 0.8864305897913786, - "flos": 15668241884640.0, - "grad_norm": 2.8887258528488786, - "learning_rate": 1.336685633981409e-07, - "loss": 1.0787, - "num_input_tokens_seen": 157384675, - "step": 7372 - }, - { - "epoch": 0.8865508326820177, - "flos": 13545000706680.0, - "grad_norm": 5.1211268978951985, - "learning_rate": 1.333887063813597e-07, - "loss": 0.9661, - "num_input_tokens_seen": 157402500, - "step": 7373 - }, - { - "epoch": 0.8866710755726568, - "flos": 10896446705880.0, - "grad_norm": 12.1365666227994, - "learning_rate": 1.331091325280278e-07, - "loss": 0.887, - "num_input_tokens_seen": 157421190, - "step": 7374 - }, - { - "epoch": 0.8867913184632958, - "flos": 14252236740000.0, - "grad_norm": 4.760944607074918, - "learning_rate": 1.3282984188055625e-07, - "loss": 1.0125, - "num_input_tokens_seen": 157440700, - "step": 7375 - }, - { - "epoch": 0.8869115613539349, - "flos": 16610855323680.0, - "grad_norm": 6.791037251457512, - "learning_rate": 1.3255083448131288e-07, - "loss": 1.0195, - "num_input_tokens_seen": 157459465, - "step": 7376 - }, - { - "epoch": 0.8870318042445741, - "flos": 15116799864600.0, - "grad_norm": 5.328613079906129, - "learning_rate": 1.3227211037262365e-07, - "loss": 1.0072, - "num_input_tokens_seen": 157476425, - "step": 7377 - }, - { - "epoch": 0.8871520471352131, - "flos": 14199917022480.0, - "grad_norm": 8.021800378408525, - "learning_rate": 1.319936695967696e-07, - "loss": 1.0843, - "num_input_tokens_seen": 157493970, - "step": 7378 - }, - { - "epoch": 0.8872722900258522, - "flos": 16061712920640.0, - "grad_norm": 9.5266522433806, - "learning_rate": 1.3171551219599097e-07, - "loss": 1.0458, - "num_input_tokens_seen": 157512215, - "step": 7379 - }, - { - "epoch": 0.8873925329164913, - "flos": 15746782784040.0, - "grad_norm": 6.149016337309888, - "learning_rate": 1.3143763821248377e-07, - "loss": 0.9961, - "num_input_tokens_seen": 157529020, - "step": 7380 - }, - { - "epoch": 0.8875127758071304, - "flos": 13623081682680.0, - "grad_norm": 4.402698140405193, - "learning_rate": 1.3116004768840118e-07, - "loss": 0.9509, - "num_input_tokens_seen": 157547115, - "step": 7381 - }, - { - "epoch": 0.8876330186977694, - "flos": 12834085286160.0, - "grad_norm": 3.5068523902687705, - "learning_rate": 1.3088274066585348e-07, - "loss": 0.9592, - "num_input_tokens_seen": 157564445, - "step": 7382 - }, - { - "epoch": 0.8877532615884086, - "flos": 15635152513320.0, - "grad_norm": 4.463736756496605, - "learning_rate": 1.3060571718690749e-07, - "loss": 1.1341, - "num_input_tokens_seen": 157581660, - "step": 7383 - }, - { - "epoch": 0.8878735044790477, - "flos": 51665227839120.0, - "grad_norm": 0.7899077392375744, - "learning_rate": 1.3032897729358805e-07, - "loss": 0.8289, - "num_input_tokens_seen": 157642335, - "step": 7384 - }, - { - "epoch": 0.8879937473696867, - "flos": 19601081889480.0, - "grad_norm": 10.74596666477925, - "learning_rate": 1.3005252102787645e-07, - "loss": 1.0238, - "num_input_tokens_seen": 157660995, - "step": 7385 - }, - { - "epoch": 0.8881139902603259, - "flos": 15797385454200.0, - "grad_norm": 3.0735549694859308, - "learning_rate": 1.297763484317105e-07, - "loss": 0.9561, - "num_input_tokens_seen": 157679010, - "step": 7386 - }, - { - "epoch": 0.888234233150965, - "flos": 14406957598680.0, - "grad_norm": 6.45604190989603, - "learning_rate": 1.2950045954698551e-07, - "loss": 0.9221, - "num_input_tokens_seen": 157696565, - "step": 7387 - }, - { - "epoch": 0.888354476041604, - "flos": 12860827714560.0, - "grad_norm": 3.7439038635121484, - "learning_rate": 1.2922485441555343e-07, - "loss": 0.9846, - "num_input_tokens_seen": 157715365, - "step": 7388 - }, - { - "epoch": 0.8884747189322432, - "flos": 15639291823920.0, - "grad_norm": 5.3103507687218965, - "learning_rate": 1.2894953307922363e-07, - "loss": 1.0498, - "num_input_tokens_seen": 157734045, - "step": 7389 - }, - { - "epoch": 0.8885949618228822, - "flos": 14038174666560.0, - "grad_norm": 4.352139282353888, - "learning_rate": 1.2867449557976208e-07, - "loss": 1.0708, - "num_input_tokens_seen": 157751865, - "step": 7390 - }, - { - "epoch": 0.8887152047135213, - "flos": 14226352835280.0, - "grad_norm": 5.629533424354787, - "learning_rate": 1.283997419588916e-07, - "loss": 0.9788, - "num_input_tokens_seen": 157771055, - "step": 7391 - }, - { - "epoch": 0.8888354476041604, - "flos": 13177444236960.0, - "grad_norm": 53.48663448338636, - "learning_rate": 1.2812527225829216e-07, - "loss": 0.8465, - "num_input_tokens_seen": 157789000, - "step": 7392 - }, - { - "epoch": 0.8889556904947995, - "flos": 15406612635480.0, - "grad_norm": 5.95443270680451, - "learning_rate": 1.2785108651960052e-07, - "loss": 0.9854, - "num_input_tokens_seen": 157810355, - "step": 7393 - }, - { - "epoch": 0.8890759333854386, - "flos": 19496871716280.0, - "grad_norm": 5.432319014405135, - "learning_rate": 1.2757718478441094e-07, - "loss": 1.0277, - "num_input_tokens_seen": 157830820, - "step": 7394 - }, - { - "epoch": 0.8891961762760777, - "flos": 17425367686200.0, - "grad_norm": 5.809455648583723, - "learning_rate": 1.2730356709427302e-07, - "loss": 0.9922, - "num_input_tokens_seen": 157849220, - "step": 7395 - }, - { - "epoch": 0.8893164191667168, - "flos": 29643426610800.0, - "grad_norm": 4.937987870848458, - "learning_rate": 1.2703023349069542e-07, - "loss": 0.8155, - "num_input_tokens_seen": 157873790, - "step": 7396 - }, - { - "epoch": 0.8894366620573558, - "flos": 23954411312760.0, - "grad_norm": 6.799014401534636, - "learning_rate": 1.2675718401514223e-07, - "loss": 0.8417, - "num_input_tokens_seen": 157897690, - "step": 7397 - }, - { - "epoch": 0.889556904947995, - "flos": 11971300532040.0, - "grad_norm": 10.338128443738658, - "learning_rate": 1.264844187090346e-07, - "loss": 0.9684, - "num_input_tokens_seen": 157914535, - "step": 7398 - }, - { - "epoch": 0.889677147838634, - "flos": 18526350709560.0, - "grad_norm": 3.3874634800592833, - "learning_rate": 1.262119376137516e-07, - "loss": 0.98, - "num_input_tokens_seen": 157935315, - "step": 7399 - }, - { - "epoch": 0.8897973907292731, - "flos": 18840667614960.0, - "grad_norm": 3.5628759620782176, - "learning_rate": 1.2593974077062707e-07, - "loss": 1.0651, - "num_input_tokens_seen": 157956655, - "step": 7400 - }, - { - "epoch": 0.8899176336199123, - "flos": 18683616477720.0, - "grad_norm": 10.923635883846163, - "learning_rate": 1.2566782822095423e-07, - "loss": 0.862, - "num_input_tokens_seen": 157976630, - "step": 7401 - }, - { - "epoch": 0.8900378765105513, - "flos": 14775249945840.0, - "grad_norm": 6.5672293923761975, - "learning_rate": 1.2539620000598162e-07, - "loss": 0.9339, - "num_input_tokens_seen": 157995685, - "step": 7402 - }, - { - "epoch": 0.8901581194011904, - "flos": 11656922303520.0, - "grad_norm": 4.2475805688752875, - "learning_rate": 1.2512485616691492e-07, - "loss": 1.02, - "num_input_tokens_seen": 158012460, - "step": 7403 - }, - { - "epoch": 0.8902783622918296, - "flos": 25084283073720.0, - "grad_norm": 3.4927926104660956, - "learning_rate": 1.2485379674491681e-07, - "loss": 1.0357, - "num_input_tokens_seen": 158038375, - "step": 7404 - }, - { - "epoch": 0.8903986051824686, - "flos": 12180518079000.0, - "grad_norm": 14.407099965605337, - "learning_rate": 1.2458302178110657e-07, - "loss": 1.0046, - "num_input_tokens_seen": 158056460, - "step": 7405 - }, - { - "epoch": 0.8905188480731077, - "flos": 18133278273840.0, - "grad_norm": 4.291469878095337, - "learning_rate": 1.2431253131656118e-07, - "loss": 1.0492, - "num_input_tokens_seen": 158075655, - "step": 7406 - }, - { - "epoch": 0.8906390909637467, - "flos": 16610885985240.0, - "grad_norm": 3.040489038483719, - "learning_rate": 1.240423253923133e-07, - "loss": 0.9951, - "num_input_tokens_seen": 158094980, - "step": 7407 - }, - { - "epoch": 0.8907593338543859, - "flos": 14960208650760.0, - "grad_norm": 6.020714605760156, - "learning_rate": 1.237724040493533e-07, - "loss": 0.9178, - "num_input_tokens_seen": 158113325, - "step": 7408 - }, - { - "epoch": 0.8908795767450249, - "flos": 15535449589440.0, - "grad_norm": 9.289616060566463, - "learning_rate": 1.2350276732862773e-07, - "loss": 0.9375, - "num_input_tokens_seen": 158134070, - "step": 7409 - }, - { - "epoch": 0.890999819635664, - "flos": 47477166851160.0, - "grad_norm": 0.8819304929882997, - "learning_rate": 1.2323341527103993e-07, - "loss": 0.8485, - "num_input_tokens_seen": 158188990, - "step": 7410 - }, - { - "epoch": 0.8911200625263032, - "flos": 19129345908120.0, - "grad_norm": 12.262559330259121, - "learning_rate": 1.2296434791745135e-07, - "loss": 1.0626, - "num_input_tokens_seen": 158207160, - "step": 7411 - }, - { - "epoch": 0.8912403054169422, - "flos": 14828642817960.0, - "grad_norm": 3.8605351113712683, - "learning_rate": 1.2269556530867875e-07, - "loss": 0.988, - "num_input_tokens_seen": 158225435, - "step": 7412 - }, - { - "epoch": 0.8913605483075813, - "flos": 19234843866840.0, - "grad_norm": 5.627679144415594, - "learning_rate": 1.2242706748549614e-07, - "loss": 1.0454, - "num_input_tokens_seen": 158243150, - "step": 7413 - }, - { - "epoch": 0.8914807911982204, - "flos": 16794618227760.0, - "grad_norm": 4.570925105844788, - "learning_rate": 1.2215885448863473e-07, - "loss": 1.0402, - "num_input_tokens_seen": 158263745, - "step": 7414 - }, - { - "epoch": 0.8916010340888595, - "flos": 17399085181200.0, - "grad_norm": 3.4070976672470303, - "learning_rate": 1.2189092635878152e-07, - "loss": 1.0235, - "num_input_tokens_seen": 158284915, - "step": 7415 - }, - { - "epoch": 0.8917212769794985, - "flos": 15065982563520.0, - "grad_norm": 8.236090754597642, - "learning_rate": 1.216232831365822e-07, - "loss": 1.003, - "num_input_tokens_seen": 158303580, - "step": 7416 - }, - { - "epoch": 0.8918415198701377, - "flos": 18154072359600.0, - "grad_norm": 3.7040113201256744, - "learning_rate": 1.2135592486263678e-07, - "loss": 1.0342, - "num_input_tokens_seen": 158322550, - "step": 7417 - }, - { - "epoch": 0.8919617627607768, - "flos": 27022810839240.0, - "grad_norm": 3.503030963217461, - "learning_rate": 1.2108885157750415e-07, - "loss": 0.836, - "num_input_tokens_seen": 158344630, - "step": 7418 - }, - { - "epoch": 0.8920820056514158, - "flos": 18657793896120.0, - "grad_norm": 5.889695961946586, - "learning_rate": 1.2082206332169897e-07, - "loss": 1.0234, - "num_input_tokens_seen": 158364445, - "step": 7419 - }, - { - "epoch": 0.892202248542055, - "flos": 12311317372800.0, - "grad_norm": 6.333508873899342, - "learning_rate": 1.2055556013569225e-07, - "loss": 0.9614, - "num_input_tokens_seen": 158379675, - "step": 7420 - }, - { - "epoch": 0.892322491432694, - "flos": 15143787585480.0, - "grad_norm": 12.30568758336624, - "learning_rate": 1.2028934205991315e-07, - "loss": 1.0466, - "num_input_tokens_seen": 158398715, - "step": 7421 - }, - { - "epoch": 0.8924427343233331, - "flos": 17088631632360.0, - "grad_norm": 2.415614545831599, - "learning_rate": 1.2002340913474607e-07, - "loss": 0.9972, - "num_input_tokens_seen": 158422070, - "step": 7422 - }, - { - "epoch": 0.8925629772139723, - "flos": 21384735488520.0, - "grad_norm": 5.693214312581635, - "learning_rate": 1.1975776140053317e-07, - "loss": 0.9689, - "num_input_tokens_seen": 158441760, - "step": 7423 - }, - { - "epoch": 0.8926832201046113, - "flos": 16061958213120.0, - "grad_norm": 15.311314128187405, - "learning_rate": 1.194923988975729e-07, - "loss": 0.9492, - "num_input_tokens_seen": 158461080, - "step": 7424 - }, - { - "epoch": 0.8928034629952504, - "flos": 9375342202800.0, - "grad_norm": 6.316167691266534, - "learning_rate": 1.192273216661206e-07, - "loss": 0.951, - "num_input_tokens_seen": 158478890, - "step": 7425 - }, - { - "epoch": 0.8929237058858895, - "flos": 39246321973440.0, - "grad_norm": 0.7514211779543575, - "learning_rate": 1.189625297463881e-07, - "loss": 0.8371, - "num_input_tokens_seen": 158540300, - "step": 7426 - }, - { - "epoch": 0.8930439487765286, - "flos": 20577704546640.0, - "grad_norm": 3.1560570686218505, - "learning_rate": 1.1869802317854394e-07, - "loss": 1.0144, - "num_input_tokens_seen": 158563805, - "step": 7427 - }, - { - "epoch": 0.8931641916671677, - "flos": 15930852303720.0, - "grad_norm": 5.860137971158667, - "learning_rate": 1.1843380200271425e-07, - "loss": 0.9505, - "num_input_tokens_seen": 158582725, - "step": 7428 - }, - { - "epoch": 0.8932844345578068, - "flos": 18390737550840.0, - "grad_norm": 4.9237946172873155, - "learning_rate": 1.181698662589805e-07, - "loss": 1.0275, - "num_input_tokens_seen": 158602030, - "step": 7429 - }, - { - "epoch": 0.8934046774484459, - "flos": 16294300124400.0, - "grad_norm": 41.08417931563271, - "learning_rate": 1.1790621598738249e-07, - "loss": 0.9877, - "num_input_tokens_seen": 158620065, - "step": 7430 - }, - { - "epoch": 0.8935249203390849, - "flos": 17398931873400.0, - "grad_norm": 4.416220564475543, - "learning_rate": 1.1764285122791461e-07, - "loss": 0.9813, - "num_input_tokens_seen": 158640505, - "step": 7431 - }, - { - "epoch": 0.8936451632297241, - "flos": 11132590650600.0, - "grad_norm": 6.037202891745772, - "learning_rate": 1.173797720205294e-07, - "loss": 1.0006, - "num_input_tokens_seen": 158658260, - "step": 7432 - }, - { - "epoch": 0.8937654061203631, - "flos": 25055271689880.0, - "grad_norm": 6.093683643468841, - "learning_rate": 1.1711697840513602e-07, - "loss": 0.9446, - "num_input_tokens_seen": 158677415, - "step": 7433 - }, - { - "epoch": 0.8938856490110022, - "flos": 11394679823160.0, - "grad_norm": 15.015928838952467, - "learning_rate": 1.1685447042160012e-07, - "loss": 0.9322, - "num_input_tokens_seen": 158695170, - "step": 7434 - }, - { - "epoch": 0.8940058919016414, - "flos": 14698211462880.0, - "grad_norm": 2.8796660664133844, - "learning_rate": 1.1659224810974367e-07, - "loss": 0.9385, - "num_input_tokens_seen": 158714850, - "step": 7435 - }, - { - "epoch": 0.8941261347922804, - "flos": 17950220585640.0, - "grad_norm": 3.331002294552785, - "learning_rate": 1.1633031150934591e-07, - "loss": 0.9088, - "num_input_tokens_seen": 158737600, - "step": 7436 - }, - { - "epoch": 0.8942463776829195, - "flos": 13859624227680.0, - "grad_norm": 6.076815034970069, - "learning_rate": 1.1606866066014176e-07, - "loss": 1.0167, - "num_input_tokens_seen": 158756370, - "step": 7437 - }, - { - "epoch": 0.8943666205735585, - "flos": 15845136599280.0, - "grad_norm": 4.0564020019640665, - "learning_rate": 1.1580729560182434e-07, - "loss": 0.9797, - "num_input_tokens_seen": 158771945, - "step": 7438 - }, - { - "epoch": 0.8944868634641977, - "flos": 13410368717880.0, - "grad_norm": 3.8235883962819703, - "learning_rate": 1.1554621637404171e-07, - "loss": 0.9476, - "num_input_tokens_seen": 158789755, - "step": 7439 - }, - { - "epoch": 0.8946071063548368, - "flos": 10210709974200.0, - "grad_norm": 6.424993718363144, - "learning_rate": 1.1528542301639999e-07, - "loss": 0.8267, - "num_input_tokens_seen": 158806265, - "step": 7440 - }, - { - "epoch": 0.8947273492454758, - "flos": 14252574017160.0, - "grad_norm": 8.195700695346025, - "learning_rate": 1.1502491556846105e-07, - "loss": 1.0443, - "num_input_tokens_seen": 158824480, - "step": 7441 - }, - { - "epoch": 0.894847592136115, - "flos": 13150303208280.0, - "grad_norm": 6.690378300529741, - "learning_rate": 1.1476469406974331e-07, - "loss": 1.0406, - "num_input_tokens_seen": 158839800, - "step": 7442 - }, - { - "epoch": 0.894967835026754, - "flos": 16692217086600.0, - "grad_norm": 3.307845822663328, - "learning_rate": 1.1450475855972341e-07, - "loss": 0.9981, - "num_input_tokens_seen": 158860310, - "step": 7443 - }, - { - "epoch": 0.8950880779173931, - "flos": 10733171271960.0, - "grad_norm": 6.212015617414838, - "learning_rate": 1.1424510907783158e-07, - "loss": 0.93, - "num_input_tokens_seen": 158877310, - "step": 7444 - }, - { - "epoch": 0.8952083208080323, - "flos": 15695750852040.0, - "grad_norm": 8.017052120001729, - "learning_rate": 1.1398574566345787e-07, - "loss": 1.0525, - "num_input_tokens_seen": 158897665, - "step": 7445 - }, - { - "epoch": 0.8953285636986713, - "flos": 16530168115080.0, - "grad_norm": 4.543251507685323, - "learning_rate": 1.1372666835594702e-07, - "loss": 1.0468, - "num_input_tokens_seen": 158915710, - "step": 7446 - }, - { - "epoch": 0.8954488065893104, - "flos": 11578074788520.0, - "grad_norm": 5.384464359261488, - "learning_rate": 1.1346787719460071e-07, - "loss": 0.9324, - "num_input_tokens_seen": 158934315, - "step": 7447 - }, - { - "epoch": 0.8955690494799495, - "flos": 12939859198920.0, - "grad_norm": 4.031738724765705, - "learning_rate": 1.1320937221867732e-07, - "loss": 0.9583, - "num_input_tokens_seen": 158951615, - "step": 7448 - }, - { - "epoch": 0.8956892923705886, - "flos": 18107149076640.0, - "grad_norm": 3.8547087966831635, - "learning_rate": 1.1295115346739192e-07, - "loss": 1.0274, - "num_input_tokens_seen": 158971335, - "step": 7449 - }, - { - "epoch": 0.8958095352612276, - "flos": 37665084603120.0, - "grad_norm": 6.683418240408175, - "learning_rate": 1.1269322097991629e-07, - "loss": 0.9581, - "num_input_tokens_seen": 158994340, - "step": 7450 - }, - { - "epoch": 0.8959297781518668, - "flos": 16482233000640.0, - "grad_norm": 10.76669434660984, - "learning_rate": 1.1243557479537846e-07, - "loss": 0.9111, - "num_input_tokens_seen": 159013950, - "step": 7451 - }, - { - "epoch": 0.8960500210425059, - "flos": 14431707025680.0, - "grad_norm": 5.8337793815466314, - "learning_rate": 1.121782149528634e-07, - "loss": 0.9246, - "num_input_tokens_seen": 159030770, - "step": 7452 - }, - { - "epoch": 0.8961702639331449, - "flos": 14121498769320.0, - "grad_norm": 5.958818571521586, - "learning_rate": 1.1192114149141208e-07, - "loss": 1.0039, - "num_input_tokens_seen": 159050125, - "step": 7453 - }, - { - "epoch": 0.8962905068237841, - "flos": 9086878540560.0, - "grad_norm": 4.03331567639566, - "learning_rate": 1.1166435445002197e-07, - "loss": 0.8779, - "num_input_tokens_seen": 159067515, - "step": 7454 - }, - { - "epoch": 0.8964107497144231, - "flos": 16664585472960.0, - "grad_norm": 9.152748453893722, - "learning_rate": 1.1140785386764818e-07, - "loss": 0.9118, - "num_input_tokens_seen": 159085935, - "step": 7455 - }, - { - "epoch": 0.8965309926050622, - "flos": 13832912460840.0, - "grad_norm": 3.6491139211688046, - "learning_rate": 1.1115163978320153e-07, - "loss": 0.9149, - "num_input_tokens_seen": 159104385, - "step": 7456 - }, - { - "epoch": 0.8966512354957014, - "flos": 20414459774280.0, - "grad_norm": 3.8922004420778533, - "learning_rate": 1.1089571223554917e-07, - "loss": 1.0406, - "num_input_tokens_seen": 159124990, - "step": 7457 - }, - { - "epoch": 0.8967714783863404, - "flos": 16615178603640.0, - "grad_norm": 6.0067123378043386, - "learning_rate": 1.1064007126351537e-07, - "loss": 1.0746, - "num_input_tokens_seen": 159145425, - "step": 7458 - }, - { - "epoch": 0.8968917212769795, - "flos": 17451466221840.0, - "grad_norm": 3.6274266506484953, - "learning_rate": 1.1038471690588003e-07, - "loss": 0.9817, - "num_input_tokens_seen": 159164290, - "step": 7459 - }, - { - "epoch": 0.8970119641676186, - "flos": 16689610854000.0, - "grad_norm": 5.764681110809701, - "learning_rate": 1.1012964920138145e-07, - "loss": 1.0206, - "num_input_tokens_seen": 159183595, - "step": 7460 - }, - { - "epoch": 0.8971322070582577, - "flos": 17454041792880.0, - "grad_norm": 3.4797998335700098, - "learning_rate": 1.0987486818871205e-07, - "loss": 0.9836, - "num_input_tokens_seen": 159206905, - "step": 7461 - }, - { - "epoch": 0.8972524499488967, - "flos": 15483743103120.0, - "grad_norm": 6.325681614995108, - "learning_rate": 1.0962037390652245e-07, - "loss": 0.9441, - "num_input_tokens_seen": 159225645, - "step": 7462 - }, - { - "epoch": 0.8973726928395359, - "flos": 15432803155800.0, - "grad_norm": 3.9591634716443114, - "learning_rate": 1.0936616639341911e-07, - "loss": 0.9417, - "num_input_tokens_seen": 159245655, - "step": 7463 - }, - { - "epoch": 0.897492935730175, - "flos": 38515654960200.0, - "grad_norm": 0.7460399029059406, - "learning_rate": 1.0911224568796473e-07, - "loss": 0.7929, - "num_input_tokens_seen": 159303570, - "step": 7464 - }, - { - "epoch": 0.897613178620814, - "flos": 12962646286080.0, - "grad_norm": 6.995006687687368, - "learning_rate": 1.0885861182867984e-07, - "loss": 0.9273, - "num_input_tokens_seen": 159321395, - "step": 7465 - }, - { - "epoch": 0.8977334215114532, - "flos": 23529629275920.0, - "grad_norm": 3.9843788296550224, - "learning_rate": 1.0860526485403942e-07, - "loss": 0.935, - "num_input_tokens_seen": 159342390, - "step": 7466 - }, - { - "epoch": 0.8978536644020922, - "flos": 10739426230200.0, - "grad_norm": 18.439528324607014, - "learning_rate": 1.0835220480247675e-07, - "loss": 1.004, - "num_input_tokens_seen": 159360605, - "step": 7467 - }, - { - "epoch": 0.8979739072927313, - "flos": 12757935988440.0, - "grad_norm": 6.082870396109009, - "learning_rate": 1.0809943171238067e-07, - "loss": 1.0577, - "num_input_tokens_seen": 159378250, - "step": 7468 - }, - { - "epoch": 0.8980941501833704, - "flos": 15824434498200.0, - "grad_norm": 16.292084177168295, - "learning_rate": 1.078469456220965e-07, - "loss": 0.8621, - "num_input_tokens_seen": 159398125, - "step": 7469 - }, - { - "epoch": 0.8982143930740095, - "flos": 26812704107040.0, - "grad_norm": 5.632448816926166, - "learning_rate": 1.0759474656992606e-07, - "loss": 0.9139, - "num_input_tokens_seen": 159420615, - "step": 7470 - }, - { - "epoch": 0.8983346359646486, - "flos": 12809979751920.0, - "grad_norm": 7.686215425178757, - "learning_rate": 1.0734283459412785e-07, - "loss": 1.0033, - "num_input_tokens_seen": 159437185, - "step": 7471 - }, - { - "epoch": 0.8984548788552876, - "flos": 14593878643440.0, - "grad_norm": 6.628183427739294, - "learning_rate": 1.0709120973291707e-07, - "loss": 1.0233, - "num_input_tokens_seen": 159456685, - "step": 7472 - }, - { - "epoch": 0.8985751217459268, - "flos": 12600578235600.0, - "grad_norm": 6.243226615345357, - "learning_rate": 1.0683987202446475e-07, - "loss": 0.9888, - "num_input_tokens_seen": 159474590, - "step": 7473 - }, - { - "epoch": 0.8986953646365659, - "flos": 15354323579520.0, - "grad_norm": 9.09157302859507, - "learning_rate": 1.0658882150689862e-07, - "loss": 0.9373, - "num_input_tokens_seen": 159493170, - "step": 7474 - }, - { - "epoch": 0.8988156075272049, - "flos": 9899704517280.0, - "grad_norm": 6.019704886772119, - "learning_rate": 1.0633805821830288e-07, - "loss": 0.9985, - "num_input_tokens_seen": 159509575, - "step": 7475 - }, - { - "epoch": 0.8989358504178441, - "flos": 20702770128720.0, - "grad_norm": 4.3947055086977915, - "learning_rate": 1.0608758219671753e-07, - "loss": 1.0558, - "num_input_tokens_seen": 159528335, - "step": 7476 - }, - { - "epoch": 0.8990560933084831, - "flos": 14357796021840.0, - "grad_norm": 12.166719062030445, - "learning_rate": 1.0583739348014065e-07, - "loss": 0.9262, - "num_input_tokens_seen": 159549140, - "step": 7477 - }, - { - "epoch": 0.8991763361991222, - "flos": 18159775409760.0, - "grad_norm": 8.041495471245538, - "learning_rate": 1.0558749210652518e-07, - "loss": 1.0785, - "num_input_tokens_seen": 159568790, - "step": 7478 - }, - { - "epoch": 0.8992965790897613, - "flos": 17871219762840.0, - "grad_norm": 2.6136360326774297, - "learning_rate": 1.053378781137808e-07, - "loss": 1.0781, - "num_input_tokens_seen": 159589430, - "step": 7479 - }, - { - "epoch": 0.8994168219804004, - "flos": 11368335995040.0, - "grad_norm": 28.984882492187847, - "learning_rate": 1.0508855153977392e-07, - "loss": 1.003, - "num_input_tokens_seen": 159605615, - "step": 7480 - }, - { - "epoch": 0.8995370648710395, - "flos": 17660131860720.0, - "grad_norm": 5.449977720722542, - "learning_rate": 1.0483951242232669e-07, - "loss": 0.8815, - "num_input_tokens_seen": 159625810, - "step": 7481 - }, - { - "epoch": 0.8996573077616786, - "flos": 45185984134080.0, - "grad_norm": 1.0413022103105476, - "learning_rate": 1.0459076079921936e-07, - "loss": 0.8373, - "num_input_tokens_seen": 159678190, - "step": 7482 - }, - { - "epoch": 0.8997775506523177, - "flos": 12912166262160.0, - "grad_norm": 8.109928648760622, - "learning_rate": 1.0434229670818618e-07, - "loss": 1.0721, - "num_input_tokens_seen": 159694585, - "step": 7483 - }, - { - "epoch": 0.8998977935429567, - "flos": 17186218908600.0, - "grad_norm": 3.7954706805306215, - "learning_rate": 1.0409412018691944e-07, - "loss": 1.0285, - "num_input_tokens_seen": 159714770, - "step": 7484 - }, - { - "epoch": 0.9000180364335959, - "flos": 14747127747240.0, - "grad_norm": 13.727271142600081, - "learning_rate": 1.0384623127306724e-07, - "loss": 0.9834, - "num_input_tokens_seen": 159731835, - "step": 7485 - }, - { - "epoch": 0.900138279324235, - "flos": 13620966035040.0, - "grad_norm": 4.26269072612153, - "learning_rate": 1.0359863000423397e-07, - "loss": 1.0174, - "num_input_tokens_seen": 159749690, - "step": 7486 - }, - { - "epoch": 0.900258522214874, - "flos": 20467055445840.0, - "grad_norm": 7.076471872522186, - "learning_rate": 1.0335131641798112e-07, - "loss": 0.9407, - "num_input_tokens_seen": 159771370, - "step": 7487 - }, - { - "epoch": 0.9003787651055132, - "flos": 41708138898840.0, - "grad_norm": 0.8642874674908759, - "learning_rate": 1.0310429055182512e-07, - "loss": 0.8541, - "num_input_tokens_seen": 159825410, - "step": 7488 - }, - { - "epoch": 0.9004990079961522, - "flos": 18185414022000.0, - "grad_norm": 2.9909074002250873, - "learning_rate": 1.0285755244324024e-07, - "loss": 0.9635, - "num_input_tokens_seen": 159845875, - "step": 7489 - }, - { - "epoch": 0.9006192508867913, - "flos": 16589601314520.0, - "grad_norm": 3.2982482942628666, - "learning_rate": 1.0261110212965629e-07, - "loss": 0.9197, - "num_input_tokens_seen": 159867390, - "step": 7490 - }, - { - "epoch": 0.9007394937774305, - "flos": 12783666585360.0, - "grad_norm": 10.727511636927277, - "learning_rate": 1.023649396484596e-07, - "loss": 1.0094, - "num_input_tokens_seen": 159886165, - "step": 7491 - }, - { - "epoch": 0.9008597366680695, - "flos": 30770784123840.0, - "grad_norm": 24.7856053569437, - "learning_rate": 1.0211906503699275e-07, - "loss": 0.9027, - "num_input_tokens_seen": 159908860, - "step": 7492 - }, - { - "epoch": 0.9009799795587086, - "flos": 10554866125560.0, - "grad_norm": 8.042262975954843, - "learning_rate": 1.0187347833255455e-07, - "loss": 1.0406, - "num_input_tokens_seen": 159924485, - "step": 7493 - }, - { - "epoch": 0.9011002224493477, - "flos": 15327305197080.0, - "grad_norm": 3.5359910853864744, - "learning_rate": 1.0162817957240056e-07, - "loss": 1.0222, - "num_input_tokens_seen": 159944100, - "step": 7494 - }, - { - "epoch": 0.9012204653399868, - "flos": 50969673860160.0, - "grad_norm": 0.9859420473988367, - "learning_rate": 1.0138316879374253e-07, - "loss": 0.9108, - "num_input_tokens_seen": 160013110, - "step": 7495 - }, - { - "epoch": 0.9013407082306258, - "flos": 11026142183520.0, - "grad_norm": 7.767672171058151, - "learning_rate": 1.0113844603374833e-07, - "loss": 0.965, - "num_input_tokens_seen": 160029355, - "step": 7496 - }, - { - "epoch": 0.901460951121265, - "flos": 10634081579280.0, - "grad_norm": 6.554698266621819, - "learning_rate": 1.0089401132954178e-07, - "loss": 0.9526, - "num_input_tokens_seen": 160047055, - "step": 7497 - }, - { - "epoch": 0.9015811940119041, - "flos": 15799746394320.0, - "grad_norm": 4.544420346603194, - "learning_rate": 1.006498647182037e-07, - "loss": 0.9438, - "num_input_tokens_seen": 160065430, - "step": 7498 - }, - { - "epoch": 0.9017014369025431, - "flos": 17766825620280.0, - "grad_norm": 4.53489885412977, - "learning_rate": 1.004060062367713e-07, - "loss": 0.9468, - "num_input_tokens_seen": 160086245, - "step": 7499 - }, - { - "epoch": 0.9018216797931822, - "flos": 12836875488120.0, - "grad_norm": 3.649576470872757, - "learning_rate": 1.0016243592223728e-07, - "loss": 0.9195, - "num_input_tokens_seen": 160106365, - "step": 7500 - }, - { - "epoch": 0.9019419226838213, - "flos": 26602597374840.0, - "grad_norm": 4.0329704457701, - "learning_rate": 9.991915381155114e-08, - "loss": 0.8858, - "num_input_tokens_seen": 160129065, - "step": 7501 - }, - { - "epoch": 0.9020621655744604, - "flos": 16665260027280.0, - "grad_norm": 8.932766601273764, - "learning_rate": 9.967615994161871e-08, - "loss": 0.9751, - "num_input_tokens_seen": 160148445, - "step": 7502 - }, - { - "epoch": 0.9021824084650995, - "flos": 16245813101880.0, - "grad_norm": 3.5219482522571437, - "learning_rate": 9.943345434930161e-08, - "loss": 1.0079, - "num_input_tokens_seen": 160168415, - "step": 7503 - }, - { - "epoch": 0.9023026513557386, - "flos": 15721236156480.0, - "grad_norm": 7.5019381244154895, - "learning_rate": 9.919103707141885e-08, - "loss": 0.9134, - "num_input_tokens_seen": 160187015, - "step": 7504 - }, - { - "epoch": 0.9024228942463777, - "flos": 17208883349520.0, - "grad_norm": 19.820787495540113, - "learning_rate": 9.89489081447441e-08, - "loss": 0.9922, - "num_input_tokens_seen": 160203935, - "step": 7505 - }, - { - "epoch": 0.9025431371370167, - "flos": 17791299093240.0, - "grad_norm": 5.31905095720347, - "learning_rate": 9.870706760600844e-08, - "loss": 1.0493, - "num_input_tokens_seen": 160223605, - "step": 7506 - }, - { - "epoch": 0.9026633800276559, - "flos": 13439564071080.0, - "grad_norm": 4.2103343524590855, - "learning_rate": 9.846551549189918e-08, - "loss": 0.9456, - "num_input_tokens_seen": 160242930, - "step": 7507 - }, - { - "epoch": 0.902783622918295, - "flos": 23115302831040.0, - "grad_norm": 9.500325247567597, - "learning_rate": 9.822425183905902e-08, - "loss": 0.8954, - "num_input_tokens_seen": 160263175, - "step": 7508 - }, - { - "epoch": 0.902903865808934, - "flos": 45615493599240.0, - "grad_norm": 0.9395976124007647, - "learning_rate": 9.798327668408823e-08, - "loss": 0.9951, - "num_input_tokens_seen": 160324530, - "step": 7509 - }, - { - "epoch": 0.9030241086995732, - "flos": 16925816121840.0, - "grad_norm": 3.9101380796515253, - "learning_rate": 9.774259006354158e-08, - "loss": 0.9075, - "num_input_tokens_seen": 160344320, - "step": 7510 - }, - { - "epoch": 0.9031443515902122, - "flos": 18788715836160.0, - "grad_norm": 5.928122091418303, - "learning_rate": 9.750219201393184e-08, - "loss": 0.9835, - "num_input_tokens_seen": 160364005, - "step": 7511 - }, - { - "epoch": 0.9032645944808513, - "flos": 17742321485760.0, - "grad_norm": 2.840879899308473, - "learning_rate": 9.726208257172697e-08, - "loss": 1.0083, - "num_input_tokens_seen": 160385420, - "step": 7512 - }, - { - "epoch": 0.9033848373714904, - "flos": 15039362781360.0, - "grad_norm": 3.6114276812433315, - "learning_rate": 9.702226177335115e-08, - "loss": 0.9791, - "num_input_tokens_seen": 160403635, - "step": 7513 - }, - { - "epoch": 0.9035050802621295, - "flos": 18709990967400.0, - "grad_norm": 6.977875760005355, - "learning_rate": 9.67827296551853e-08, - "loss": 0.9549, - "num_input_tokens_seen": 160424640, - "step": 7514 - }, - { - "epoch": 0.9036253231527686, - "flos": 17213543906640.0, - "grad_norm": 8.390801852331972, - "learning_rate": 9.65434862535659e-08, - "loss": 0.8941, - "num_input_tokens_seen": 160443730, - "step": 7515 - }, - { - "epoch": 0.9037455660434077, - "flos": 12806361687840.0, - "grad_norm": 11.74137854311722, - "learning_rate": 9.630453160478635e-08, - "loss": 0.8702, - "num_input_tokens_seen": 160458805, - "step": 7516 - }, - { - "epoch": 0.9038658089340468, - "flos": 17110529534280.0, - "grad_norm": 5.7939955971029224, - "learning_rate": 9.60658657450959e-08, - "loss": 1.0347, - "num_input_tokens_seen": 160478825, - "step": 7517 - }, - { - "epoch": 0.9039860518246858, - "flos": 15510270900600.0, - "grad_norm": 4.838170733021946, - "learning_rate": 9.582748871069979e-08, - "loss": 1.015, - "num_input_tokens_seen": 160497985, - "step": 7518 - }, - { - "epoch": 0.904106294715325, - "flos": 18946104250560.0, - "grad_norm": 7.517236612144052, - "learning_rate": 9.558940053775954e-08, - "loss": 1.0537, - "num_input_tokens_seen": 160516345, - "step": 7519 - }, - { - "epoch": 0.904226537605964, - "flos": 12600792866520.0, - "grad_norm": 4.856038508971109, - "learning_rate": 9.535160126239294e-08, - "loss": 0.9123, - "num_input_tokens_seen": 160532690, - "step": 7520 - }, - { - "epoch": 0.9043467804966031, - "flos": 17634891848760.0, - "grad_norm": 2.466809246307062, - "learning_rate": 9.511409092067424e-08, - "loss": 0.9377, - "num_input_tokens_seen": 160552765, - "step": 7521 - }, - { - "epoch": 0.9044670233872423, - "flos": 16082997591360.0, - "grad_norm": 7.220869273429955, - "learning_rate": 9.487686954863327e-08, - "loss": 0.904, - "num_input_tokens_seen": 160572205, - "step": 7522 - }, - { - "epoch": 0.9045872662778813, - "flos": 16902814403760.0, - "grad_norm": 5.346800886896864, - "learning_rate": 9.46399371822566e-08, - "loss": 0.992, - "num_input_tokens_seen": 160591700, - "step": 7523 - }, - { - "epoch": 0.9047075091685204, - "flos": 10735501550520.0, - "grad_norm": 4.658634266604373, - "learning_rate": 9.440329385748657e-08, - "loss": 0.9361, - "num_input_tokens_seen": 160608490, - "step": 7524 - }, - { - "epoch": 0.9048277520591596, - "flos": 12757353418800.0, - "grad_norm": 5.982087098860197, - "learning_rate": 9.416693961022137e-08, - "loss": 0.9378, - "num_input_tokens_seen": 160626460, - "step": 7525 - }, - { - "epoch": 0.9049479949497986, - "flos": 15537933175800.0, - "grad_norm": 22.145035976674393, - "learning_rate": 9.393087447631654e-08, - "loss": 0.9988, - "num_input_tokens_seen": 160644460, - "step": 7526 - }, - { - "epoch": 0.9050682378404377, - "flos": 14747894286240.0, - "grad_norm": 3.5273461832944264, - "learning_rate": 9.36950984915823e-08, - "loss": 0.9496, - "num_input_tokens_seen": 160662535, - "step": 7527 - }, - { - "epoch": 0.9051884807310768, - "flos": 15327673135800.0, - "grad_norm": 10.091987196107922, - "learning_rate": 9.345961169178607e-08, - "loss": 0.9262, - "num_input_tokens_seen": 160681940, - "step": 7528 - }, - { - "epoch": 0.9053087236217159, - "flos": 15563387818680.0, - "grad_norm": 2.4701202066800754, - "learning_rate": 9.322441411265081e-08, - "loss": 0.9533, - "num_input_tokens_seen": 160702645, - "step": 7529 - }, - { - "epoch": 0.9054289665123549, - "flos": 12076215921120.0, - "grad_norm": 4.467756254236082, - "learning_rate": 9.298950578985554e-08, - "loss": 0.9513, - "num_input_tokens_seen": 160719440, - "step": 7530 - }, - { - "epoch": 0.905549209402994, - "flos": 14750745811320.0, - "grad_norm": 2.81611511164796, - "learning_rate": 9.275488675903665e-08, - "loss": 0.9322, - "num_input_tokens_seen": 160738105, - "step": 7531 - }, - { - "epoch": 0.9056694522936332, - "flos": 15404067726000.0, - "grad_norm": 3.4549384555220826, - "learning_rate": 9.252055705578454e-08, - "loss": 0.9618, - "num_input_tokens_seen": 160757325, - "step": 7532 - }, - { - "epoch": 0.9057896951842722, - "flos": 21069130797600.0, - "grad_norm": 4.761424330136029, - "learning_rate": 9.228651671564747e-08, - "loss": 0.9404, - "num_input_tokens_seen": 160779075, - "step": 7533 - }, - { - "epoch": 0.9059099380749113, - "flos": 19864182893520.0, - "grad_norm": 13.488235999417292, - "learning_rate": 9.205276577412901e-08, - "loss": 1.012, - "num_input_tokens_seen": 160801575, - "step": 7534 - }, - { - "epoch": 0.9060301809655504, - "flos": 12574387715280.0, - "grad_norm": 13.288354602507873, - "learning_rate": 9.181930426668905e-08, - "loss": 0.9946, - "num_input_tokens_seen": 160818090, - "step": 7535 - }, - { - "epoch": 0.9061504238561895, - "flos": 22643413541880.0, - "grad_norm": 3.931985899635094, - "learning_rate": 9.158613222874346e-08, - "loss": 0.9177, - "num_input_tokens_seen": 160839435, - "step": 7536 - }, - { - "epoch": 0.9062706667468285, - "flos": 14226751435560.0, - "grad_norm": 4.240005859514902, - "learning_rate": 9.135324969566394e-08, - "loss": 1.0454, - "num_input_tokens_seen": 160858655, - "step": 7537 - }, - { - "epoch": 0.9063909096374677, - "flos": 13068389537280.0, - "grad_norm": 7.4631226859693305, - "learning_rate": 9.112065670277913e-08, - "loss": 0.9747, - "num_input_tokens_seen": 160874740, - "step": 7538 - }, - { - "epoch": 0.9065111525281068, - "flos": 23928374100240.0, - "grad_norm": 4.272065048622632, - "learning_rate": 9.088835328537303e-08, - "loss": 0.9499, - "num_input_tokens_seen": 160896050, - "step": 7539 - }, - { - "epoch": 0.9066313954187458, - "flos": 16612572371040.0, - "grad_norm": 4.862062551242761, - "learning_rate": 9.065633947868568e-08, - "loss": 0.9305, - "num_input_tokens_seen": 160915375, - "step": 7540 - }, - { - "epoch": 0.906751638309385, - "flos": 18683555154600.0, - "grad_norm": 5.658898845086031, - "learning_rate": 9.042461531791379e-08, - "loss": 1.0127, - "num_input_tokens_seen": 160933515, - "step": 7541 - }, - { - "epoch": 0.906871881200024, - "flos": 11708659451400.0, - "grad_norm": 3.4724516491857185, - "learning_rate": 9.019318083820903e-08, - "loss": 0.9921, - "num_input_tokens_seen": 160951815, - "step": 7542 - }, - { - "epoch": 0.9069921240906631, - "flos": 17501333014560.0, - "grad_norm": 4.100523957058672, - "learning_rate": 8.996203607468045e-08, - "loss": 1.0744, - "num_input_tokens_seen": 160970535, - "step": 7543 - }, - { - "epoch": 0.9071123669813023, - "flos": 18054890682240.0, - "grad_norm": 3.457186420643602, - "learning_rate": 8.973118106239241e-08, - "loss": 0.9831, - "num_input_tokens_seen": 160992860, - "step": 7544 - }, - { - "epoch": 0.9072326098719413, - "flos": 19026024920160.0, - "grad_norm": 16.25954778851827, - "learning_rate": 8.95006158363656e-08, - "loss": 1.165, - "num_input_tokens_seen": 161012765, - "step": 7545 - }, - { - "epoch": 0.9073528527625804, - "flos": 16979791563600.0, - "grad_norm": 7.386958626211556, - "learning_rate": 8.9270340431576e-08, - "loss": 0.9959, - "num_input_tokens_seen": 161031575, - "step": 7546 - }, - { - "epoch": 0.9074730956532195, - "flos": 27022074961800.0, - "grad_norm": 5.077228361345583, - "learning_rate": 8.904035488295658e-08, - "loss": 0.9543, - "num_input_tokens_seen": 161050795, - "step": 7547 - }, - { - "epoch": 0.9075933385438586, - "flos": 46661305380000.0, - "grad_norm": 0.69150710582599, - "learning_rate": 8.881065922539632e-08, - "loss": 0.7939, - "num_input_tokens_seen": 161110955, - "step": 7548 - }, - { - "epoch": 0.9077135814344977, - "flos": 14142078224160.0, - "grad_norm": 7.738405229556421, - "learning_rate": 8.85812534937389e-08, - "loss": 0.9669, - "num_input_tokens_seen": 161128775, - "step": 7549 - }, - { - "epoch": 0.9078338243251368, - "flos": 12416692685280.0, - "grad_norm": 5.217057553272741, - "learning_rate": 8.835213772278583e-08, - "loss": 0.8897, - "num_input_tokens_seen": 161145350, - "step": 7550 - }, - { - "epoch": 0.9079540672157759, - "flos": 20518884578400.0, - "grad_norm": 5.520150465089386, - "learning_rate": 8.812331194729373e-08, - "loss": 1.0182, - "num_input_tokens_seen": 161164715, - "step": 7551 - }, - { - "epoch": 0.9080743101064149, - "flos": 16717487760120.0, - "grad_norm": 3.913835408519432, - "learning_rate": 8.789477620197461e-08, - "loss": 0.9464, - "num_input_tokens_seen": 161183960, - "step": 7552 - }, - { - "epoch": 0.9081945529970541, - "flos": 16187912980440.0, - "grad_norm": 5.814537757613824, - "learning_rate": 8.766653052149831e-08, - "loss": 1.0187, - "num_input_tokens_seen": 161198865, - "step": 7553 - }, - { - "epoch": 0.9083147958876931, - "flos": 13381847919000.0, - "grad_norm": 15.920616631762316, - "learning_rate": 8.743857494048823e-08, - "loss": 0.9765, - "num_input_tokens_seen": 161215400, - "step": 7554 - }, - { - "epoch": 0.9084350387783322, - "flos": 13407363885000.0, - "grad_norm": 4.021389515940481, - "learning_rate": 8.721090949352605e-08, - "loss": 0.8702, - "num_input_tokens_seen": 161231360, - "step": 7555 - }, - { - "epoch": 0.9085552816689714, - "flos": 14620191810000.0, - "grad_norm": 3.784331398168579, - "learning_rate": 8.698353421514793e-08, - "loss": 0.947, - "num_input_tokens_seen": 161249455, - "step": 7556 - }, - { - "epoch": 0.9086755245596104, - "flos": 12836016964440.0, - "grad_norm": 3.9310874189711504, - "learning_rate": 8.67564491398467e-08, - "loss": 1.0216, - "num_input_tokens_seen": 161266180, - "step": 7557 - }, - { - "epoch": 0.9087957674502495, - "flos": 13565978761800.0, - "grad_norm": 4.471026074372062, - "learning_rate": 8.652965430207104e-08, - "loss": 0.9708, - "num_input_tokens_seen": 161283805, - "step": 7558 - }, - { - "epoch": 0.9089160103408886, - "flos": 12833257424040.0, - "grad_norm": 4.86581710210803, - "learning_rate": 8.630314973622521e-08, - "loss": 0.8981, - "num_input_tokens_seen": 161301070, - "step": 7559 - }, - { - "epoch": 0.9090362532315277, - "flos": 23797206867720.0, - "grad_norm": 4.914976486817286, - "learning_rate": 8.607693547666995e-08, - "loss": 0.947, - "num_input_tokens_seen": 161323330, - "step": 7560 - }, - { - "epoch": 0.9091564961221668, - "flos": 51193583842440.0, - "grad_norm": 0.9291396891237115, - "learning_rate": 8.585101155772201e-08, - "loss": 0.8558, - "num_input_tokens_seen": 161385170, - "step": 7561 - }, - { - "epoch": 0.9092767390128058, - "flos": 17578279512840.0, - "grad_norm": 3.490379326216758, - "learning_rate": 8.562537801365377e-08, - "loss": 0.9151, - "num_input_tokens_seen": 161404625, - "step": 7562 - }, - { - "epoch": 0.909396981903445, - "flos": 16769654169840.0, - "grad_norm": 6.2437096030271615, - "learning_rate": 8.540003487869362e-08, - "loss": 0.9142, - "num_input_tokens_seen": 161426015, - "step": 7563 - }, - { - "epoch": 0.909517224794084, - "flos": 16638640245120.0, - "grad_norm": 7.2681381637213915, - "learning_rate": 8.517498218702557e-08, - "loss": 1.0235, - "num_input_tokens_seen": 161443665, - "step": 7564 - }, - { - "epoch": 0.9096374676847231, - "flos": 13623418959840.0, - "grad_norm": 4.470444151384125, - "learning_rate": 8.49502199727905e-08, - "loss": 0.9331, - "num_input_tokens_seen": 161461410, - "step": 7565 - }, - { - "epoch": 0.9097577105753623, - "flos": 23745285750480.0, - "grad_norm": 4.545596857444056, - "learning_rate": 8.472574827008428e-08, - "loss": 0.8696, - "num_input_tokens_seen": 161482015, - "step": 7566 - }, - { - "epoch": 0.9098779534660013, - "flos": 15562008048480.0, - "grad_norm": 2.970049443202648, - "learning_rate": 8.450156711295942e-08, - "loss": 1.0648, - "num_input_tokens_seen": 161501905, - "step": 7567 - }, - { - "epoch": 0.9099981963566404, - "flos": 18309222480120.0, - "grad_norm": 5.036084429153614, - "learning_rate": 8.427767653542383e-08, - "loss": 1.0838, - "num_input_tokens_seen": 161516795, - "step": 7568 - }, - { - "epoch": 0.9101184392472795, - "flos": 14960760558840.0, - "grad_norm": 4.085808425527597, - "learning_rate": 8.405407657144125e-08, - "loss": 0.9293, - "num_input_tokens_seen": 161535675, - "step": 7569 - }, - { - "epoch": 0.9102386821379186, - "flos": 17607536189160.0, - "grad_norm": 3.720835637814163, - "learning_rate": 8.383076725493232e-08, - "loss": 0.9436, - "num_input_tokens_seen": 161552715, - "step": 7570 - }, - { - "epoch": 0.9103589250285576, - "flos": 16033682706720.0, - "grad_norm": 7.77970754510121, - "learning_rate": 8.360774861977216e-08, - "loss": 0.909, - "num_input_tokens_seen": 161571555, - "step": 7571 - }, - { - "epoch": 0.9104791679191968, - "flos": 18053817527640.0, - "grad_norm": 3.8743049244297922, - "learning_rate": 8.338502069979281e-08, - "loss": 0.9722, - "num_input_tokens_seen": 161591585, - "step": 7572 - }, - { - "epoch": 0.9105994108098359, - "flos": 10186420470600.0, - "grad_norm": 10.672123427173442, - "learning_rate": 8.316258352878214e-08, - "loss": 1.0091, - "num_input_tokens_seen": 161607725, - "step": 7573 - }, - { - "epoch": 0.9107196537004749, - "flos": 19019616654120.0, - "grad_norm": 3.4099392292055586, - "learning_rate": 8.294043714048338e-08, - "loss": 0.9334, - "num_input_tokens_seen": 161626525, - "step": 7574 - }, - { - "epoch": 0.9108398965911141, - "flos": 47639491776720.0, - "grad_norm": 0.8008880544009056, - "learning_rate": 8.271858156859624e-08, - "loss": 0.872, - "num_input_tokens_seen": 161691615, - "step": 7575 - }, - { - "epoch": 0.9109601394817531, - "flos": 18080989217880.0, - "grad_norm": 2.9590686546887723, - "learning_rate": 8.249701684677557e-08, - "loss": 0.9621, - "num_input_tokens_seen": 161712660, - "step": 7576 - }, - { - "epoch": 0.9110803823723922, - "flos": 16166965586880.0, - "grad_norm": 3.1063558211233238, - "learning_rate": 8.227574300863294e-08, - "loss": 1.033, - "num_input_tokens_seen": 161732550, - "step": 7577 - }, - { - "epoch": 0.9112006252630314, - "flos": 34652929473840.0, - "grad_norm": 5.523350478893704, - "learning_rate": 8.205476008773548e-08, - "loss": 0.9241, - "num_input_tokens_seen": 161756270, - "step": 7578 - }, - { - "epoch": 0.9113208681536704, - "flos": 21386084597160.0, - "grad_norm": 19.044725479867534, - "learning_rate": 8.183406811760596e-08, - "loss": 1.0528, - "num_input_tokens_seen": 161775720, - "step": 7579 - }, - { - "epoch": 0.9114411110443095, - "flos": 18213352251240.0, - "grad_norm": 4.864856241264375, - "learning_rate": 8.161366713172313e-08, - "loss": 0.9723, - "num_input_tokens_seen": 161797830, - "step": 7580 - }, - { - "epoch": 0.9115613539349486, - "flos": 13041248508600.0, - "grad_norm": 4.143693815975277, - "learning_rate": 8.139355716352137e-08, - "loss": 1.0748, - "num_input_tokens_seen": 161812390, - "step": 7581 - }, - { - "epoch": 0.9116815968255877, - "flos": 15432741832680.0, - "grad_norm": 5.530623233435787, - "learning_rate": 8.117373824639196e-08, - "loss": 0.9281, - "num_input_tokens_seen": 161832375, - "step": 7582 - }, - { - "epoch": 0.9118018397162267, - "flos": 46799064847920.0, - "grad_norm": 0.7599331394073273, - "learning_rate": 8.095421041368067e-08, - "loss": 0.8393, - "num_input_tokens_seen": 161891510, - "step": 7583 - }, - { - "epoch": 0.9119220826068659, - "flos": 14855354584800.0, - "grad_norm": 6.8351754597576955, - "learning_rate": 8.073497369868999e-08, - "loss": 0.9377, - "num_input_tokens_seen": 161909690, - "step": 7584 - }, - { - "epoch": 0.912042325497505, - "flos": 20204966273280.0, - "grad_norm": 7.8028507305154235, - "learning_rate": 8.051602813467772e-08, - "loss": 0.9789, - "num_input_tokens_seen": 161931265, - "step": 7585 - }, - { - "epoch": 0.912162568388144, - "flos": 12443465775240.0, - "grad_norm": 4.22595075033568, - "learning_rate": 8.029737375485756e-08, - "loss": 0.9376, - "num_input_tokens_seen": 161950215, - "step": 7586 - }, - { - "epoch": 0.9122828112787832, - "flos": 14068351189680.0, - "grad_norm": 5.099144074664831, - "learning_rate": 8.007901059239986e-08, - "loss": 0.9587, - "num_input_tokens_seen": 161969215, - "step": 7587 - }, - { - "epoch": 0.9124030541694222, - "flos": 14776997654760.0, - "grad_norm": 2.99626642597456, - "learning_rate": 7.986093868042964e-08, - "loss": 1.0322, - "num_input_tokens_seen": 161989180, - "step": 7588 - }, - { - "epoch": 0.9125232970600613, - "flos": 17923784772840.0, - "grad_norm": 3.654610421100598, - "learning_rate": 7.964315805202826e-08, - "loss": 0.9045, - "num_input_tokens_seen": 162009480, - "step": 7589 - }, - { - "epoch": 0.9126435399507005, - "flos": 13990668813960.0, - "grad_norm": 15.083961487786922, - "learning_rate": 7.942566874023304e-08, - "loss": 0.9624, - "num_input_tokens_seen": 162028385, - "step": 7590 - }, - { - "epoch": 0.9127637828413395, - "flos": 13885753424880.0, - "grad_norm": 8.944506479789267, - "learning_rate": 7.920847077803649e-08, - "loss": 0.9255, - "num_input_tokens_seen": 162045895, - "step": 7591 - }, - { - "epoch": 0.9128840257319786, - "flos": 14357979991200.0, - "grad_norm": 6.609035618502946, - "learning_rate": 7.899156419838826e-08, - "loss": 1.0296, - "num_input_tokens_seen": 162064585, - "step": 7592 - }, - { - "epoch": 0.9130042686226177, - "flos": 17683286886600.0, - "grad_norm": 6.126317096709376, - "learning_rate": 7.87749490341918e-08, - "loss": 0.883, - "num_input_tokens_seen": 162084580, - "step": 7593 - }, - { - "epoch": 0.9131245115132568, - "flos": 16765606843920.0, - "grad_norm": 7.172589301812873, - "learning_rate": 7.855862531830836e-08, - "loss": 1.0713, - "num_input_tokens_seen": 162100410, - "step": 7594 - }, - { - "epoch": 0.9132447544038959, - "flos": 14142108885720.0, - "grad_norm": 4.351622329382803, - "learning_rate": 7.834259308355373e-08, - "loss": 0.9533, - "num_input_tokens_seen": 162118895, - "step": 7595 - }, - { - "epoch": 0.9133649972945349, - "flos": 15616443413640.0, - "grad_norm": 4.881088758240349, - "learning_rate": 7.812685236269989e-08, - "loss": 0.9689, - "num_input_tokens_seen": 162137275, - "step": 7596 - }, - { - "epoch": 0.9134852401851741, - "flos": 51023128055400.0, - "grad_norm": 0.8423531590528673, - "learning_rate": 7.791140318847445e-08, - "loss": 0.8459, - "num_input_tokens_seen": 162195130, - "step": 7597 - }, - { - "epoch": 0.9136054830758131, - "flos": 16799248123320.0, - "grad_norm": 10.891217296371298, - "learning_rate": 7.769624559356081e-08, - "loss": 1.0236, - "num_input_tokens_seen": 162218245, - "step": 7598 - }, - { - "epoch": 0.9137257259664522, - "flos": 16663328349000.0, - "grad_norm": 4.2821400406381755, - "learning_rate": 7.748137961059842e-08, - "loss": 0.9376, - "num_input_tokens_seen": 162231945, - "step": 7599 - }, - { - "epoch": 0.9138459688570914, - "flos": 13564323037560.0, - "grad_norm": 7.871905006269726, - "learning_rate": 7.726680527218211e-08, - "loss": 0.8852, - "num_input_tokens_seen": 162248705, - "step": 7600 - }, - { - "epoch": 0.9139662117477304, - "flos": 33080026499760.0, - "grad_norm": 4.480201400589459, - "learning_rate": 7.70525226108627e-08, - "loss": 0.9732, - "num_input_tokens_seen": 162272095, - "step": 7601 - }, - { - "epoch": 0.9140864546383695, - "flos": 15825844929960.0, - "grad_norm": 3.3762672305806127, - "learning_rate": 7.683853165914666e-08, - "loss": 1.0272, - "num_input_tokens_seen": 162289585, - "step": 7602 - }, - { - "epoch": 0.9142066975290086, - "flos": 12416999300880.0, - "grad_norm": 3.593360149965266, - "learning_rate": 7.662483244949602e-08, - "loss": 0.9962, - "num_input_tokens_seen": 162306565, - "step": 7603 - }, - { - "epoch": 0.9143269404196477, - "flos": 12547859917800.0, - "grad_norm": 7.773040730687556, - "learning_rate": 7.641142501432951e-08, - "loss": 1.0386, - "num_input_tokens_seen": 162322480, - "step": 7604 - }, - { - "epoch": 0.9144471833102867, - "flos": 23766754390560.0, - "grad_norm": 4.267092050765035, - "learning_rate": 7.619830938602013e-08, - "loss": 0.9708, - "num_input_tokens_seen": 162343425, - "step": 7605 - }, - { - "epoch": 0.9145674262009259, - "flos": 14960361958560.0, - "grad_norm": 6.92109624128666, - "learning_rate": 7.598548559689777e-08, - "loss": 1.0338, - "num_input_tokens_seen": 162361545, - "step": 7606 - }, - { - "epoch": 0.914687669091565, - "flos": 11892545001720.0, - "grad_norm": 6.874976695628116, - "learning_rate": 7.577295367924751e-08, - "loss": 1.0306, - "num_input_tokens_seen": 162377665, - "step": 7607 - }, - { - "epoch": 0.914807911982204, - "flos": 18341238696840.0, - "grad_norm": 3.9826672054231236, - "learning_rate": 7.556071366531002e-08, - "loss": 1.063, - "num_input_tokens_seen": 162398355, - "step": 7608 - }, - { - "epoch": 0.9149281548728432, - "flos": 13622928374880.0, - "grad_norm": 5.592987008510583, - "learning_rate": 7.53487655872822e-08, - "loss": 1.0026, - "num_input_tokens_seen": 162417245, - "step": 7609 - }, - { - "epoch": 0.9150483977634822, - "flos": 19129897816200.0, - "grad_norm": 48.59350130563371, - "learning_rate": 7.513710947731656e-08, - "loss": 0.9726, - "num_input_tokens_seen": 162438175, - "step": 7610 - }, - { - "epoch": 0.9151686406541213, - "flos": 15564154357680.0, - "grad_norm": 8.166504942661556, - "learning_rate": 7.492574536752095e-08, - "loss": 1.07, - "num_input_tokens_seen": 162457885, - "step": 7611 - }, - { - "epoch": 0.9152888835447605, - "flos": 19444705306560.0, - "grad_norm": 3.584644358723447, - "learning_rate": 7.471467328995907e-08, - "loss": 1.0131, - "num_input_tokens_seen": 162476415, - "step": 7612 - }, - { - "epoch": 0.9154091264353995, - "flos": 9427815228120.0, - "grad_norm": 5.718080908817674, - "learning_rate": 7.450389327665018e-08, - "loss": 0.8241, - "num_input_tokens_seen": 162493970, - "step": 7613 - }, - { - "epoch": 0.9155293693260386, - "flos": 14330808300960.0, - "grad_norm": 6.4808687221167, - "learning_rate": 7.429340535957029e-08, - "loss": 0.881, - "num_input_tokens_seen": 162508885, - "step": 7614 - }, - { - "epoch": 0.9156496122166777, - "flos": 13728671626080.0, - "grad_norm": 6.054568421208058, - "learning_rate": 7.40832095706494e-08, - "loss": 0.9349, - "num_input_tokens_seen": 162525300, - "step": 7615 - }, - { - "epoch": 0.9157698551073168, - "flos": 14988146880000.0, - "grad_norm": 4.141094828887155, - "learning_rate": 7.387330594177443e-08, - "loss": 1.0251, - "num_input_tokens_seen": 162547095, - "step": 7616 - }, - { - "epoch": 0.9158900979979558, - "flos": 17924520650280.0, - "grad_norm": 4.693134024643511, - "learning_rate": 7.366369450478749e-08, - "loss": 1.0244, - "num_input_tokens_seen": 162567925, - "step": 7617 - }, - { - "epoch": 0.916010340888595, - "flos": 21483334596240.0, - "grad_norm": 5.181710194948536, - "learning_rate": 7.345437529148646e-08, - "loss": 0.8858, - "num_input_tokens_seen": 162586655, - "step": 7618 - }, - { - "epoch": 0.9161305837792341, - "flos": 12102038502720.0, - "grad_norm": 4.371406387180204, - "learning_rate": 7.324534833362483e-08, - "loss": 0.9594, - "num_input_tokens_seen": 162603950, - "step": 7619 - }, - { - "epoch": 0.9162508266698731, - "flos": 16271819652840.0, - "grad_norm": 4.450497143010687, - "learning_rate": 7.303661366291192e-08, - "loss": 0.9101, - "num_input_tokens_seen": 162624340, - "step": 7620 - }, - { - "epoch": 0.9163710695605123, - "flos": 14174063779320.0, - "grad_norm": 3.5408032580330087, - "learning_rate": 7.28281713110126e-08, - "loss": 1.0479, - "num_input_tokens_seen": 162642135, - "step": 7621 - }, - { - "epoch": 0.9164913124511513, - "flos": 16192788168480.0, - "grad_norm": 5.81349583780716, - "learning_rate": 7.262002130954759e-08, - "loss": 0.9979, - "num_input_tokens_seen": 162660310, - "step": 7622 - }, - { - "epoch": 0.9166115553417904, - "flos": 17714383256520.0, - "grad_norm": 4.77469200219452, - "learning_rate": 7.241216369009296e-08, - "loss": 1.0145, - "num_input_tokens_seen": 162680215, - "step": 7623 - }, - { - "epoch": 0.9167317982324296, - "flos": 18290728011360.0, - "grad_norm": 9.254412037016664, - "learning_rate": 7.220459848418037e-08, - "loss": 0.8955, - "num_input_tokens_seen": 162700010, - "step": 7624 - }, - { - "epoch": 0.9168520411230686, - "flos": 11053528504680.0, - "grad_norm": 4.212060442441011, - "learning_rate": 7.199732572329708e-08, - "loss": 1.0224, - "num_input_tokens_seen": 162717630, - "step": 7625 - }, - { - "epoch": 0.9169722840137077, - "flos": 21563745850800.0, - "grad_norm": 6.982246228122918, - "learning_rate": 7.179034543888684e-08, - "loss": 0.9771, - "num_input_tokens_seen": 162736855, - "step": 7626 - }, - { - "epoch": 0.9170925269043467, - "flos": 15982773420960.0, - "grad_norm": 4.536818456826885, - "learning_rate": 7.158365766234808e-08, - "loss": 1.0044, - "num_input_tokens_seen": 162755425, - "step": 7627 - }, - { - "epoch": 0.9172127697949859, - "flos": 16272003622200.0, - "grad_norm": 4.041441344177562, - "learning_rate": 7.137726242503527e-08, - "loss": 0.9472, - "num_input_tokens_seen": 162774065, - "step": 7628 - }, - { - "epoch": 0.917333012685625, - "flos": 12360141672480.0, - "grad_norm": 5.070918113148141, - "learning_rate": 7.11711597582585e-08, - "loss": 1.0078, - "num_input_tokens_seen": 162791145, - "step": 7629 - }, - { - "epoch": 0.917453255576264, - "flos": 10109228679840.0, - "grad_norm": 5.914491423721023, - "learning_rate": 7.096534969328271e-08, - "loss": 1.0285, - "num_input_tokens_seen": 162808310, - "step": 7630 - }, - { - "epoch": 0.9175734984669032, - "flos": 14724861906600.0, - "grad_norm": 7.30663905874081, - "learning_rate": 7.075983226132987e-08, - "loss": 1.0628, - "num_input_tokens_seen": 162826960, - "step": 7631 - }, - { - "epoch": 0.9176937413575422, - "flos": 10476969118920.0, - "grad_norm": 7.123639696334547, - "learning_rate": 7.055460749357656e-08, - "loss": 1.01, - "num_input_tokens_seen": 162842960, - "step": 7632 - }, - { - "epoch": 0.9178139842481813, - "flos": 13095315935040.0, - "grad_norm": 6.632996537493914, - "learning_rate": 7.034967542115521e-08, - "loss": 0.9335, - "num_input_tokens_seen": 162860945, - "step": 7633 - }, - { - "epoch": 0.9179342271388204, - "flos": 14226230189040.0, - "grad_norm": 12.903019018054396, - "learning_rate": 7.014503607515388e-08, - "loss": 0.9762, - "num_input_tokens_seen": 162879970, - "step": 7634 - }, - { - "epoch": 0.9180544700294595, - "flos": 17552180977200.0, - "grad_norm": 4.030525793036804, - "learning_rate": 6.994068948661592e-08, - "loss": 0.9018, - "num_input_tokens_seen": 162897845, - "step": 7635 - }, - { - "epoch": 0.9181747129200986, - "flos": 11840255945760.0, - "grad_norm": 5.422743142402797, - "learning_rate": 6.973663568654142e-08, - "loss": 0.9812, - "num_input_tokens_seen": 162915270, - "step": 7636 - }, - { - "epoch": 0.9182949558107377, - "flos": 17260957774560.0, - "grad_norm": 5.443727982451349, - "learning_rate": 6.953287470588386e-08, - "loss": 0.8771, - "num_input_tokens_seen": 162932945, - "step": 7637 - }, - { - "epoch": 0.9184151987013768, - "flos": 15687349584600.0, - "grad_norm": 4.581270458805465, - "learning_rate": 6.932940657555452e-08, - "loss": 1.0749, - "num_input_tokens_seen": 162948795, - "step": 7638 - }, - { - "epoch": 0.9185354415920158, - "flos": 22936936361520.0, - "grad_norm": 3.3377039071989816, - "learning_rate": 6.912623132641938e-08, - "loss": 0.9884, - "num_input_tokens_seen": 162973605, - "step": 7639 - }, - { - "epoch": 0.918655684482655, - "flos": 14909483334360.0, - "grad_norm": 12.1217861045416, - "learning_rate": 6.892334898929952e-08, - "loss": 0.9848, - "num_input_tokens_seen": 162993570, - "step": 7640 - }, - { - "epoch": 0.918775927373294, - "flos": 11001822018360.0, - "grad_norm": 4.741835152169628, - "learning_rate": 6.872075959497236e-08, - "loss": 1.0655, - "num_input_tokens_seen": 163012065, - "step": 7641 - }, - { - "epoch": 0.9188961702639331, - "flos": 21331403939520.0, - "grad_norm": 3.9807599591019986, - "learning_rate": 6.85184631741702e-08, - "loss": 1.0507, - "num_input_tokens_seen": 163032350, - "step": 7642 - }, - { - "epoch": 0.9190164131545723, - "flos": 14695697214960.0, - "grad_norm": 10.738477697480093, - "learning_rate": 6.831645975758161e-08, - "loss": 1.0061, - "num_input_tokens_seen": 163050010, - "step": 7643 - }, - { - "epoch": 0.9191366560452113, - "flos": 18238254986040.0, - "grad_norm": 4.62364486224167, - "learning_rate": 6.811474937585026e-08, - "loss": 0.9033, - "num_input_tokens_seen": 163069520, - "step": 7644 - }, - { - "epoch": 0.9192568989358504, - "flos": 15223003039200.0, - "grad_norm": 4.911582624279273, - "learning_rate": 6.79133320595755e-08, - "loss": 1.0075, - "num_input_tokens_seen": 163089160, - "step": 7645 - }, - { - "epoch": 0.9193771418264896, - "flos": 16481159846040.0, - "grad_norm": 4.258123152013098, - "learning_rate": 6.771220783931198e-08, - "loss": 0.9631, - "num_input_tokens_seen": 163109040, - "step": 7646 - }, - { - "epoch": 0.9194973847171286, - "flos": 50550441565680.0, - "grad_norm": 0.8587123049662795, - "learning_rate": 6.751137674556994e-08, - "loss": 0.9034, - "num_input_tokens_seen": 163169145, - "step": 7647 - }, - { - "epoch": 0.9196176276077677, - "flos": 10397998957680.0, - "grad_norm": 8.451257411240926, - "learning_rate": 6.731083880881572e-08, - "loss": 1.006, - "num_input_tokens_seen": 163185085, - "step": 7648 - }, - { - "epoch": 0.9197378704984068, - "flos": 16560130007280.0, - "grad_norm": 5.014036693607632, - "learning_rate": 6.711059405947072e-08, - "loss": 1.0356, - "num_input_tokens_seen": 163202995, - "step": 7649 - }, - { - "epoch": 0.9198581133890459, - "flos": 14409533169720.0, - "grad_norm": 6.697808422521125, - "learning_rate": 6.691064252791156e-08, - "loss": 0.9941, - "num_input_tokens_seen": 163222190, - "step": 7650 - }, - { - "epoch": 0.9199783562796849, - "flos": 12521638735920.0, - "grad_norm": 5.399187005619946, - "learning_rate": 6.67109842444713e-08, - "loss": 1.0047, - "num_input_tokens_seen": 163240840, - "step": 7651 - }, - { - "epoch": 0.9200985991703241, - "flos": 12521945351520.0, - "grad_norm": 24.855445114812746, - "learning_rate": 6.651161923943704e-08, - "loss": 0.9927, - "num_input_tokens_seen": 163258465, - "step": 7652 - }, - { - "epoch": 0.9202188420609632, - "flos": 14908410179760.0, - "grad_norm": 5.207415853434933, - "learning_rate": 6.631254754305326e-08, - "loss": 0.9924, - "num_input_tokens_seen": 163277645, - "step": 7653 - }, - { - "epoch": 0.9203390849516022, - "flos": 9821316925680.0, - "grad_norm": 7.871283729096882, - "learning_rate": 6.611376918551848e-08, - "loss": 1.014, - "num_input_tokens_seen": 163296150, - "step": 7654 - }, - { - "epoch": 0.9204593278422414, - "flos": 15036879195000.0, - "grad_norm": 6.052406639441074, - "learning_rate": 6.591528419698744e-08, - "loss": 1.0149, - "num_input_tokens_seen": 163315655, - "step": 7655 - }, - { - "epoch": 0.9205795707328804, - "flos": 10240334589240.0, - "grad_norm": 4.588936288284527, - "learning_rate": 6.571709260756986e-08, - "loss": 1.0445, - "num_input_tokens_seen": 163332020, - "step": 7656 - }, - { - "epoch": 0.9206998136235195, - "flos": 15929380548840.0, - "grad_norm": 8.772377391707396, - "learning_rate": 6.551919444733122e-08, - "loss": 0.9708, - "num_input_tokens_seen": 163349555, - "step": 7657 - }, - { - "epoch": 0.9208200565141585, - "flos": 38166107922360.0, - "grad_norm": 3.217390643723775, - "learning_rate": 6.53215897462931e-08, - "loss": 0.8876, - "num_input_tokens_seen": 163373030, - "step": 7658 - }, - { - "epoch": 0.9209402994047977, - "flos": 21801974781600.0, - "grad_norm": 6.155419618013716, - "learning_rate": 6.512427853443103e-08, - "loss": 0.9765, - "num_input_tokens_seen": 163394830, - "step": 7659 - }, - { - "epoch": 0.9210605422954368, - "flos": 20755672415880.0, - "grad_norm": 3.170793019931156, - "learning_rate": 6.492726084167799e-08, - "loss": 0.9863, - "num_input_tokens_seen": 163416665, - "step": 7660 - }, - { - "epoch": 0.9211807851860758, - "flos": 39245708742240.0, - "grad_norm": 0.7983869906161584, - "learning_rate": 6.473053669792072e-08, - "loss": 0.8103, - "num_input_tokens_seen": 163471075, - "step": 7661 - }, - { - "epoch": 0.921301028076715, - "flos": 13617746571240.0, - "grad_norm": 4.372923439945078, - "learning_rate": 6.453410613300248e-08, - "loss": 0.9524, - "num_input_tokens_seen": 163488725, - "step": 7662 - }, - { - "epoch": 0.921421270967354, - "flos": 19601541812880.0, - "grad_norm": 5.096779700440204, - "learning_rate": 6.43379691767214e-08, - "loss": 0.8029, - "num_input_tokens_seen": 163507650, - "step": 7663 - }, - { - "epoch": 0.9215415138579931, - "flos": 50280717664680.0, - "grad_norm": 0.7617591651562949, - "learning_rate": 6.414212585883105e-08, - "loss": 0.8348, - "num_input_tokens_seen": 163570000, - "step": 7664 - }, - { - "epoch": 0.9216617567486323, - "flos": 25369741903080.0, - "grad_norm": 3.477161004486713, - "learning_rate": 6.394657620904143e-08, - "loss": 0.9177, - "num_input_tokens_seen": 163592830, - "step": 7665 - }, - { - "epoch": 0.9217819996392713, - "flos": 21043492185360.0, - "grad_norm": 6.37510172444882, - "learning_rate": 6.375132025701657e-08, - "loss": 0.9446, - "num_input_tokens_seen": 163614850, - "step": 7666 - }, - { - "epoch": 0.9219022425299104, - "flos": 10293052907040.0, - "grad_norm": 7.586140403037609, - "learning_rate": 6.355635803237724e-08, - "loss": 0.9069, - "num_input_tokens_seen": 163630270, - "step": 7667 - }, - { - "epoch": 0.9220224854205495, - "flos": 12810500998440.0, - "grad_norm": 5.331972763243791, - "learning_rate": 6.336168956469867e-08, - "loss": 1.0226, - "num_input_tokens_seen": 163648465, - "step": 7668 - }, - { - "epoch": 0.9221427283111886, - "flos": 17634922510320.0, - "grad_norm": 3.2734358976446707, - "learning_rate": 6.316731488351168e-08, - "loss": 0.947, - "num_input_tokens_seen": 163669375, - "step": 7669 - }, - { - "epoch": 0.9222629712018277, - "flos": 9768690592560.0, - "grad_norm": 12.821117647820639, - "learning_rate": 6.297323401830334e-08, - "loss": 0.8636, - "num_input_tokens_seen": 163687880, - "step": 7670 - }, - { - "epoch": 0.9223832140924668, - "flos": 15353618363640.0, - "grad_norm": 7.968742580036374, - "learning_rate": 6.277944699851523e-08, - "loss": 0.9056, - "num_input_tokens_seen": 163707120, - "step": 7671 - }, - { - "epoch": 0.9225034569831059, - "flos": 15012926968560.0, - "grad_norm": 5.410336047328255, - "learning_rate": 6.25859538535447e-08, - "loss": 0.9654, - "num_input_tokens_seen": 163727635, - "step": 7672 - }, - { - "epoch": 0.9226236998737449, - "flos": 8798476201440.0, - "grad_norm": 8.982542696988569, - "learning_rate": 6.239275461274474e-08, - "loss": 1.0029, - "num_input_tokens_seen": 163743730, - "step": 7673 - }, - { - "epoch": 0.9227439427643841, - "flos": 18658131173280.0, - "grad_norm": 3.0473640245077007, - "learning_rate": 6.219984930542299e-08, - "loss": 1.0907, - "num_input_tokens_seen": 163764190, - "step": 7674 - }, - { - "epoch": 0.9228641856550232, - "flos": 12731438852520.0, - "grad_norm": 4.663787271375752, - "learning_rate": 6.200723796084383e-08, - "loss": 0.9643, - "num_input_tokens_seen": 163782005, - "step": 7675 - }, - { - "epoch": 0.9229844285456622, - "flos": 50431544505240.0, - "grad_norm": 0.7421283497579754, - "learning_rate": 6.181492060822546e-08, - "loss": 0.8728, - "num_input_tokens_seen": 163841900, - "step": 7676 - }, - { - "epoch": 0.9231046714363014, - "flos": 12731653483440.0, - "grad_norm": 3.858299568857934, - "learning_rate": 6.162289727674274e-08, - "loss": 1.0354, - "num_input_tokens_seen": 163859300, - "step": 7677 - }, - { - "epoch": 0.9232249143269404, - "flos": 12652867291560.0, - "grad_norm": 5.72895710879767, - "learning_rate": 6.143116799552527e-08, - "loss": 1.1118, - "num_input_tokens_seen": 163875265, - "step": 7678 - }, - { - "epoch": 0.9233451572175795, - "flos": 16638824214480.0, - "grad_norm": 5.54238615408615, - "learning_rate": 6.123973279365802e-08, - "loss": 0.7792, - "num_input_tokens_seen": 163893960, - "step": 7679 - }, - { - "epoch": 0.9234654001082186, - "flos": 12753612708480.0, - "grad_norm": 3.501235743762256, - "learning_rate": 6.10485917001824e-08, - "loss": 1.0015, - "num_input_tokens_seen": 163911535, - "step": 7680 - }, - { - "epoch": 0.9235856429988577, - "flos": 17605941788040.0, - "grad_norm": 6.689584282473348, - "learning_rate": 6.085774474409322e-08, - "loss": 1.0364, - "num_input_tokens_seen": 163931815, - "step": 7681 - }, - { - "epoch": 0.9237058858894968, - "flos": 9951778942320.0, - "grad_norm": 8.822920029484552, - "learning_rate": 6.066719195434267e-08, - "loss": 0.9349, - "num_input_tokens_seen": 163949335, - "step": 7682 - }, - { - "epoch": 0.9238261287801359, - "flos": 20440711617720.0, - "grad_norm": 5.726351329454022, - "learning_rate": 6.047693335983717e-08, - "loss": 0.882, - "num_input_tokens_seen": 163971400, - "step": 7683 - }, - { - "epoch": 0.923946371670775, - "flos": 16428502851360.0, - "grad_norm": 7.150249514194199, - "learning_rate": 6.028696898943853e-08, - "loss": 1.0319, - "num_input_tokens_seen": 163990180, - "step": 7684 - }, - { - "epoch": 0.924066614561414, - "flos": 15533364603360.0, - "grad_norm": 10.209107121834782, - "learning_rate": 6.00972988719648e-08, - "loss": 0.9265, - "num_input_tokens_seen": 164008135, - "step": 7685 - }, - { - "epoch": 0.9241868574520532, - "flos": 20309421738960.0, - "grad_norm": 8.549658285529102, - "learning_rate": 5.990792303618807e-08, - "loss": 0.9359, - "num_input_tokens_seen": 164027435, - "step": 7686 - }, - { - "epoch": 0.9243071003426923, - "flos": 21751372111440.0, - "grad_norm": 3.190567036997989, - "learning_rate": 5.971884151083695e-08, - "loss": 0.9181, - "num_input_tokens_seen": 164049565, - "step": 7687 - }, - { - "epoch": 0.9244273432333313, - "flos": 20414735728320.0, - "grad_norm": 5.595970693319281, - "learning_rate": 5.9530054324595124e-08, - "loss": 0.9685, - "num_input_tokens_seen": 164069400, - "step": 7688 - }, - { - "epoch": 0.9245475861239704, - "flos": 47421045100200.0, - "grad_norm": 0.9235042786697878, - "learning_rate": 5.934156150610103e-08, - "loss": 0.8258, - "num_input_tokens_seen": 164126485, - "step": 7689 - }, - { - "epoch": 0.9246678290146095, - "flos": 17238814580160.0, - "grad_norm": 6.90502836029615, - "learning_rate": 5.915336308394914e-08, - "loss": 0.997, - "num_input_tokens_seen": 164145040, - "step": 7690 - }, - { - "epoch": 0.9247880719052486, - "flos": 13465325329560.0, - "grad_norm": 4.000524248100936, - "learning_rate": 5.89654590866886e-08, - "loss": 0.9992, - "num_input_tokens_seen": 164164260, - "step": 7691 - }, - { - "epoch": 0.9249083147958876, - "flos": 17084124383040.0, - "grad_norm": 3.945524485718879, - "learning_rate": 5.877784954282483e-08, - "loss": 1.1093, - "num_input_tokens_seen": 164183320, - "step": 7692 - }, - { - "epoch": 0.9250285576865268, - "flos": 21935135015520.0, - "grad_norm": 9.198557732355043, - "learning_rate": 5.8590534480817963e-08, - "loss": 0.9522, - "num_input_tokens_seen": 164204765, - "step": 7693 - }, - { - "epoch": 0.9251488005771659, - "flos": 7460582694360.0, - "grad_norm": 5.753893984945346, - "learning_rate": 5.840351392908349e-08, - "loss": 0.9496, - "num_input_tokens_seen": 164220205, - "step": 7694 - }, - { - "epoch": 0.9252690434678049, - "flos": 16769592846720.0, - "grad_norm": 9.11664732024972, - "learning_rate": 5.821678791599205e-08, - "loss": 0.9318, - "num_input_tokens_seen": 164239370, - "step": 7695 - }, - { - "epoch": 0.9253892863584441, - "flos": 15247813789320.0, - "grad_norm": 5.3237499721981205, - "learning_rate": 5.803035646986965e-08, - "loss": 1.027, - "num_input_tokens_seen": 164258425, - "step": 7696 - }, - { - "epoch": 0.9255095292490831, - "flos": 12364250321520.0, - "grad_norm": 3.975366798607605, - "learning_rate": 5.7844219618998766e-08, - "loss": 0.9011, - "num_input_tokens_seen": 164272470, - "step": 7697 - }, - { - "epoch": 0.9256297721397222, - "flos": 17605880464920.0, - "grad_norm": 4.355840747496826, - "learning_rate": 5.765837739161505e-08, - "loss": 0.9253, - "num_input_tokens_seen": 164291310, - "step": 7698 - }, - { - "epoch": 0.9257500150303614, - "flos": 16428809466960.0, - "grad_norm": 3.344601816788691, - "learning_rate": 5.7472829815911504e-08, - "loss": 0.9772, - "num_input_tokens_seen": 164309855, - "step": 7699 - }, - { - "epoch": 0.9258702579210004, - "flos": 16035277107840.0, - "grad_norm": 3.8434038704197127, - "learning_rate": 5.7287576920035164e-08, - "loss": 1.041, - "num_input_tokens_seen": 164328590, - "step": 7700 - }, - { - "epoch": 0.9259905008116395, - "flos": 21381516024720.0, - "grad_norm": 10.296219615310234, - "learning_rate": 5.7102618732088435e-08, - "loss": 0.9927, - "num_input_tokens_seen": 164347640, - "step": 7701 - }, - { - "epoch": 0.9261107437022786, - "flos": 17478086004000.0, - "grad_norm": 2.952185910384896, - "learning_rate": 5.6917955280130216e-08, - "loss": 0.9743, - "num_input_tokens_seen": 164368840, - "step": 7702 - }, - { - "epoch": 0.9262309865929177, - "flos": 15642848564880.0, - "grad_norm": 5.806676757371604, - "learning_rate": 5.6733586592172755e-08, - "loss": 0.9389, - "num_input_tokens_seen": 164388055, - "step": 7703 - }, - { - "epoch": 0.9263512294835567, - "flos": 14436122290320.0, - "grad_norm": 7.191479186807615, - "learning_rate": 5.6549512696185244e-08, - "loss": 1.0375, - "num_input_tokens_seen": 164406275, - "step": 7704 - }, - { - "epoch": 0.9264714723741959, - "flos": 15065645286360.0, - "grad_norm": 3.947603976599843, - "learning_rate": 5.636573362009156e-08, - "loss": 0.9017, - "num_input_tokens_seen": 164426055, - "step": 7705 - }, - { - "epoch": 0.926591715264835, - "flos": 12758426573400.0, - "grad_norm": 3.591974632589912, - "learning_rate": 5.618224939177074e-08, - "loss": 0.9816, - "num_input_tokens_seen": 164443680, - "step": 7706 - }, - { - "epoch": 0.926711958155474, - "flos": 25810902761040.0, - "grad_norm": 6.575024379206377, - "learning_rate": 5.599906003905719e-08, - "loss": 0.932, - "num_input_tokens_seen": 164465945, - "step": 7707 - }, - { - "epoch": 0.9268322010461132, - "flos": 14934355407600.0, - "grad_norm": 141.35396478123633, - "learning_rate": 5.581616558974023e-08, - "loss": 1.0407, - "num_input_tokens_seen": 164484230, - "step": 7708 - }, - { - "epoch": 0.9269524439367522, - "flos": 16322698277040.0, - "grad_norm": 9.574147828429693, - "learning_rate": 5.5633566071565444e-08, - "loss": 1.0189, - "num_input_tokens_seen": 164503190, - "step": 7709 - }, - { - "epoch": 0.9270726868273913, - "flos": 29985559099200.0, - "grad_norm": 3.6529708895062494, - "learning_rate": 5.5451261512232896e-08, - "loss": 0.9241, - "num_input_tokens_seen": 164525590, - "step": 7710 - }, - { - "epoch": 0.9271929297180305, - "flos": 14042436623400.0, - "grad_norm": 11.17186953671195, - "learning_rate": 5.5269251939397576e-08, - "loss": 0.8544, - "num_input_tokens_seen": 164544825, - "step": 7711 - }, - { - "epoch": 0.9273131726086695, - "flos": 14168544698520.0, - "grad_norm": 3.640448701052539, - "learning_rate": 5.508753738067073e-08, - "loss": 0.9957, - "num_input_tokens_seen": 164564085, - "step": 7712 - }, - { - "epoch": 0.9274334154993086, - "flos": 16534062133200.0, - "grad_norm": 5.230804045160008, - "learning_rate": 5.4906117863617875e-08, - "loss": 1.0118, - "num_input_tokens_seen": 164583190, - "step": 7713 - }, - { - "epoch": 0.9275536583899477, - "flos": 22669144138800.0, - "grad_norm": 5.420952534324067, - "learning_rate": 5.4724993415760533e-08, - "loss": 1.0009, - "num_input_tokens_seen": 164601265, - "step": 7714 - }, - { - "epoch": 0.9276739012805868, - "flos": 13255617197640.0, - "grad_norm": 6.304819098591711, - "learning_rate": 5.454416406457496e-08, - "loss": 0.963, - "num_input_tokens_seen": 164620080, - "step": 7715 - }, - { - "epoch": 0.9277941441712259, - "flos": 9792826788360.0, - "grad_norm": 6.3328330528364365, - "learning_rate": 5.436362983749299e-08, - "loss": 0.9625, - "num_input_tokens_seen": 164634970, - "step": 7716 - }, - { - "epoch": 0.927914387061865, - "flos": 16534123456320.0, - "grad_norm": 9.465130709124956, - "learning_rate": 5.418339076190137e-08, - "loss": 0.8642, - "num_input_tokens_seen": 164654200, - "step": 7717 - }, - { - "epoch": 0.9280346299525041, - "flos": 12807281534640.0, - "grad_norm": 3.496698457217609, - "learning_rate": 5.400344686514202e-08, - "loss": 1.118, - "num_input_tokens_seen": 164671505, - "step": 7718 - }, - { - "epoch": 0.9281548728431431, - "flos": 15875497091760.0, - "grad_norm": 7.773667896632752, - "learning_rate": 5.38237981745131e-08, - "loss": 0.8945, - "num_input_tokens_seen": 164689340, - "step": 7719 - }, - { - "epoch": 0.9282751157337822, - "flos": 13361207141040.0, - "grad_norm": 7.22388727931966, - "learning_rate": 5.364444471726592e-08, - "loss": 1.0314, - "num_input_tokens_seen": 164708265, - "step": 7720 - }, - { - "epoch": 0.9283953586244214, - "flos": 18185720637600.0, - "grad_norm": 6.1579949863731835, - "learning_rate": 5.346538652060939e-08, - "loss": 1.0185, - "num_input_tokens_seen": 164729340, - "step": 7721 - }, - { - "epoch": 0.9285156015150604, - "flos": 12915416387520.0, - "grad_norm": 18.907270924139954, - "learning_rate": 5.3286623611705994e-08, - "loss": 0.9332, - "num_input_tokens_seen": 164747105, - "step": 7722 - }, - { - "epoch": 0.9286358444056995, - "flos": 47545982487960.0, - "grad_norm": 0.8913691573988651, - "learning_rate": 5.3108156017673824e-08, - "loss": 0.8797, - "num_input_tokens_seen": 164808585, - "step": 7723 - }, - { - "epoch": 0.9287560872963386, - "flos": 15877827370320.0, - "grad_norm": 3.5985587413933913, - "learning_rate": 5.2929983765586775e-08, - "loss": 0.9459, - "num_input_tokens_seen": 164827085, - "step": 7724 - }, - { - "epoch": 0.9288763301869777, - "flos": 18288428394360.0, - "grad_norm": 12.116554071532029, - "learning_rate": 5.275210688247278e-08, - "loss": 0.8534, - "num_input_tokens_seen": 164847130, - "step": 7725 - }, - { - "epoch": 0.9289965730776167, - "flos": 8666879707080.0, - "grad_norm": 7.755196090767145, - "learning_rate": 5.257452539531604e-08, - "loss": 1.0649, - "num_input_tokens_seen": 164863920, - "step": 7726 - }, - { - "epoch": 0.9291168159682559, - "flos": 18996952213200.0, - "grad_norm": 3.3025560652358164, - "learning_rate": 5.2397239331055445e-08, - "loss": 0.9201, - "num_input_tokens_seen": 164885640, - "step": 7727 - }, - { - "epoch": 0.929237058858895, - "flos": 10266893048280.0, - "grad_norm": 8.072872143694429, - "learning_rate": 5.2220248716585036e-08, - "loss": 1.0349, - "num_input_tokens_seen": 164903040, - "step": 7728 - }, - { - "epoch": 0.929357301749534, - "flos": 16947714023760.0, - "grad_norm": 5.856724009673691, - "learning_rate": 5.204355357875445e-08, - "loss": 0.9759, - "num_input_tokens_seen": 164921105, - "step": 7729 - }, - { - "epoch": 0.9294775446401732, - "flos": 9139320904320.0, - "grad_norm": 3.13800136618162, - "learning_rate": 5.1867153944367584e-08, - "loss": 0.9339, - "num_input_tokens_seen": 164937215, - "step": 7730 - }, - { - "epoch": 0.9295977875308122, - "flos": 18656199495000.0, - "grad_norm": 5.6982929819126396, - "learning_rate": 5.16910498401848e-08, - "loss": 0.9697, - "num_input_tokens_seen": 164956385, - "step": 7731 - }, - { - "epoch": 0.9297180304214513, - "flos": 11656891641960.0, - "grad_norm": 5.734962703513717, - "learning_rate": 5.151524129292073e-08, - "loss": 1.0419, - "num_input_tokens_seen": 164974000, - "step": 7732 - }, - { - "epoch": 0.9298382733120905, - "flos": 17110222918680.0, - "grad_norm": 4.888561069209689, - "learning_rate": 5.1339728329245155e-08, - "loss": 0.8992, - "num_input_tokens_seen": 164994285, - "step": 7733 - }, - { - "epoch": 0.9299585162027295, - "flos": 15721297479600.0, - "grad_norm": 5.693269590844369, - "learning_rate": 5.116451097578367e-08, - "loss": 1.0051, - "num_input_tokens_seen": 165013045, - "step": 7734 - }, - { - "epoch": 0.9300787590933686, - "flos": 15250052083200.0, - "grad_norm": 2.812561288801611, - "learning_rate": 5.0989589259115895e-08, - "loss": 0.9701, - "num_input_tokens_seen": 165033650, - "step": 7735 - }, - { - "epoch": 0.9301990019840077, - "flos": 12595580401320.0, - "grad_norm": 8.581281348808133, - "learning_rate": 5.081496320577816e-08, - "loss": 0.9242, - "num_input_tokens_seen": 165050490, - "step": 7736 - }, - { - "epoch": 0.9303192448746468, - "flos": 42149790341760.0, - "grad_norm": 0.9556741239547057, - "learning_rate": 5.0640632842260835e-08, - "loss": 0.8875, - "num_input_tokens_seen": 165110470, - "step": 7737 - }, - { - "epoch": 0.9304394877652858, - "flos": 41260084737960.0, - "grad_norm": 7.059236295636853, - "learning_rate": 5.0466598195009426e-08, - "loss": 0.9522, - "num_input_tokens_seen": 165137060, - "step": 7738 - }, - { - "epoch": 0.930559730655925, - "flos": 14908042241040.0, - "grad_norm": 4.4583876181651805, - "learning_rate": 5.0292859290425036e-08, - "loss": 0.9226, - "num_input_tokens_seen": 165154650, - "step": 7739 - }, - { - "epoch": 0.9306799735465641, - "flos": 16533786179160.0, - "grad_norm": 5.992427497842133, - "learning_rate": 5.011941615486348e-08, - "loss": 1.0011, - "num_input_tokens_seen": 165173485, - "step": 7740 - }, - { - "epoch": 0.9308002164372031, - "flos": 10765156827120.0, - "grad_norm": 6.613426896376848, - "learning_rate": 4.994626881463659e-08, - "loss": 1.084, - "num_input_tokens_seen": 165189460, - "step": 7741 - }, - { - "epoch": 0.9309204593278423, - "flos": 21988037302680.0, - "grad_norm": 5.333343724399617, - "learning_rate": 4.9773417296009814e-08, - "loss": 0.9385, - "num_input_tokens_seen": 165210700, - "step": 7742 - }, - { - "epoch": 0.9310407022184813, - "flos": 16374680717400.0, - "grad_norm": 5.6180122205113125, - "learning_rate": 4.960086162520527e-08, - "loss": 0.8794, - "num_input_tokens_seen": 165230510, - "step": 7743 - }, - { - "epoch": 0.9311609451091204, - "flos": 15721573433640.0, - "grad_norm": 4.936204764534894, - "learning_rate": 4.942860182839936e-08, - "loss": 1.0593, - "num_input_tokens_seen": 165248575, - "step": 7744 - }, - { - "epoch": 0.9312811879997596, - "flos": 14981248029000.0, - "grad_norm": 4.176925516674984, - "learning_rate": 4.925663793172341e-08, - "loss": 1.0198, - "num_input_tokens_seen": 165266255, - "step": 7745 - }, - { - "epoch": 0.9314014308903986, - "flos": 48081235204320.0, - "grad_norm": 0.8425151124045948, - "learning_rate": 4.908496996126477e-08, - "loss": 0.8411, - "num_input_tokens_seen": 165329435, - "step": 7746 - }, - { - "epoch": 0.9315216737810377, - "flos": 16036043646840.0, - "grad_norm": 4.634684141192692, - "learning_rate": 4.89135979430646e-08, - "loss": 0.9911, - "num_input_tokens_seen": 165349200, - "step": 7747 - }, - { - "epoch": 0.9316419166716768, - "flos": 17054867706720.0, - "grad_norm": 7.572299125780291, - "learning_rate": 4.874252190312078e-08, - "loss": 1.0669, - "num_input_tokens_seen": 165369305, - "step": 7748 - }, - { - "epoch": 0.9317621595623159, - "flos": 21545036751120.0, - "grad_norm": 3.0664501016967955, - "learning_rate": 4.857174186738477e-08, - "loss": 0.8737, - "num_input_tokens_seen": 165392375, - "step": 7749 - }, - { - "epoch": 0.931882402452955, - "flos": 11132590650600.0, - "grad_norm": 5.8332366208733255, - "learning_rate": 4.840125786176408e-08, - "loss": 0.9618, - "num_input_tokens_seen": 165408300, - "step": 7750 - }, - { - "epoch": 0.932002645343594, - "flos": 20205027596400.0, - "grad_norm": 5.264749788169543, - "learning_rate": 4.823106991212067e-08, - "loss": 0.9997, - "num_input_tokens_seen": 165427260, - "step": 7751 - }, - { - "epoch": 0.9321228882342332, - "flos": 10843943019000.0, - "grad_norm": 4.114866119839636, - "learning_rate": 4.806117804427212e-08, - "loss": 1.0622, - "num_input_tokens_seen": 165444915, - "step": 7752 - }, - { - "epoch": 0.9322431311248722, - "flos": 12679640381520.0, - "grad_norm": 9.78862915541229, - "learning_rate": 4.7891582283990926e-08, - "loss": 0.8765, - "num_input_tokens_seen": 165463360, - "step": 7753 - }, - { - "epoch": 0.9323633740155113, - "flos": 17188733156520.0, - "grad_norm": 11.422935676640057, - "learning_rate": 4.772228265700473e-08, - "loss": 0.95, - "num_input_tokens_seen": 165483940, - "step": 7754 - }, - { - "epoch": 0.9324836169061504, - "flos": 10629237052800.0, - "grad_norm": 5.575025707236922, - "learning_rate": 4.75532791889961e-08, - "loss": 0.9781, - "num_input_tokens_seen": 165500360, - "step": 7755 - }, - { - "epoch": 0.9326038597967895, - "flos": 13203910711320.0, - "grad_norm": 4.11107806172418, - "learning_rate": 4.738457190560252e-08, - "loss": 0.8745, - "num_input_tokens_seen": 165519190, - "step": 7756 - }, - { - "epoch": 0.9327241026874286, - "flos": 13439380101720.0, - "grad_norm": 73.3672669476995, - "learning_rate": 4.721616083241664e-08, - "loss": 1.0152, - "num_input_tokens_seen": 165537165, - "step": 7757 - }, - { - "epoch": 0.9328443455780677, - "flos": 21070111967520.0, - "grad_norm": 3.286305919242552, - "learning_rate": 4.7048045994986684e-08, - "loss": 0.9985, - "num_input_tokens_seen": 165557745, - "step": 7758 - }, - { - "epoch": 0.9329645884687068, - "flos": 21437760421920.0, - "grad_norm": 4.404909186926373, - "learning_rate": 4.688022741881559e-08, - "loss": 1.1309, - "num_input_tokens_seen": 165577990, - "step": 7759 - }, - { - "epoch": 0.9330848313593458, - "flos": 15534100480800.0, - "grad_norm": 2.9554148681956405, - "learning_rate": 4.671270512936076e-08, - "loss": 0.987, - "num_input_tokens_seen": 165596870, - "step": 7760 - }, - { - "epoch": 0.933205074249985, - "flos": 15720929540880.0, - "grad_norm": 6.404972898141548, - "learning_rate": 4.6545479152035884e-08, - "loss": 1.0541, - "num_input_tokens_seen": 165615760, - "step": 7761 - }, - { - "epoch": 0.9333253171406241, - "flos": 10844035003680.0, - "grad_norm": 8.000204560525557, - "learning_rate": 4.637854951220821e-08, - "loss": 1.0003, - "num_input_tokens_seen": 165632265, - "step": 7762 - }, - { - "epoch": 0.9334455600312631, - "flos": 11106277484040.0, - "grad_norm": 4.598873058434291, - "learning_rate": 4.621191623520171e-08, - "loss": 0.9752, - "num_input_tokens_seen": 165650415, - "step": 7763 - }, - { - "epoch": 0.9335658029219023, - "flos": 15747028076520.0, - "grad_norm": 5.507994303847881, - "learning_rate": 4.604557934629372e-08, - "loss": 1.0738, - "num_input_tokens_seen": 165669210, - "step": 7764 - }, - { - "epoch": 0.9336860458125413, - "flos": 14383955880600.0, - "grad_norm": 5.361260174318933, - "learning_rate": 4.587953887071805e-08, - "loss": 1.0389, - "num_input_tokens_seen": 165688750, - "step": 7765 - }, - { - "epoch": 0.9338062887031804, - "flos": 14852625705960.0, - "grad_norm": 6.655193979372515, - "learning_rate": 4.5713794833662554e-08, - "loss": 1.0877, - "num_input_tokens_seen": 165707685, - "step": 7766 - }, - { - "epoch": 0.9339265315938196, - "flos": 16507442351040.0, - "grad_norm": 6.201204478260749, - "learning_rate": 4.5548347260270236e-08, - "loss": 0.8567, - "num_input_tokens_seen": 165726695, - "step": 7767 - }, - { - "epoch": 0.9340467744844586, - "flos": 16218978688800.0, - "grad_norm": 3.9626855848878213, - "learning_rate": 4.538319617564012e-08, - "loss": 0.9216, - "num_input_tokens_seen": 165745435, - "step": 7768 - }, - { - "epoch": 0.9341670173750977, - "flos": 16822924395720.0, - "grad_norm": 3.986486496272242, - "learning_rate": 4.521834160482485e-08, - "loss": 0.9762, - "num_input_tokens_seen": 165763895, - "step": 7769 - }, - { - "epoch": 0.9342872602657368, - "flos": 17658844075200.0, - "grad_norm": 3.5069385960764112, - "learning_rate": 4.5053783572832846e-08, - "loss": 1.0477, - "num_input_tokens_seen": 165783795, - "step": 7770 - }, - { - "epoch": 0.9344075031563759, - "flos": 18339705618840.0, - "grad_norm": 10.173526096675925, - "learning_rate": 4.488952210462771e-08, - "loss": 0.9922, - "num_input_tokens_seen": 165803720, - "step": 7771 - }, - { - "epoch": 0.9345277460470149, - "flos": 18180998757360.0, - "grad_norm": 3.132686469760781, - "learning_rate": 4.4725557225127495e-08, - "loss": 1.08, - "num_input_tokens_seen": 165821780, - "step": 7772 - }, - { - "epoch": 0.9346479889376541, - "flos": 24478650981000.0, - "grad_norm": 3.4521458130736136, - "learning_rate": 4.456188895920565e-08, - "loss": 1.0284, - "num_input_tokens_seen": 165843255, - "step": 7773 - }, - { - "epoch": 0.9347682318282932, - "flos": 13540125518640.0, - "grad_norm": 8.127867676776695, - "learning_rate": 4.439851733169031e-08, - "loss": 1.0655, - "num_input_tokens_seen": 165860765, - "step": 7774 - }, - { - "epoch": 0.9348884747189322, - "flos": 18683524493040.0, - "grad_norm": 4.655848953077941, - "learning_rate": 4.4235442367365204e-08, - "loss": 0.9074, - "num_input_tokens_seen": 165880795, - "step": 7775 - }, - { - "epoch": 0.9350087176095714, - "flos": 12994202579400.0, - "grad_norm": 6.000355074230666, - "learning_rate": 4.4072664090968545e-08, - "loss": 1.0026, - "num_input_tokens_seen": 165898900, - "step": 7776 - }, - { - "epoch": 0.9351289605002104, - "flos": 13702235813280.0, - "grad_norm": 5.277768403875939, - "learning_rate": 4.391018252719347e-08, - "loss": 1.0629, - "num_input_tokens_seen": 165918415, - "step": 7777 - }, - { - "epoch": 0.9352492033908495, - "flos": 13328761662480.0, - "grad_norm": 6.092160374594973, - "learning_rate": 4.374799770068849e-08, - "loss": 0.9111, - "num_input_tokens_seen": 165934810, - "step": 7778 - }, - { - "epoch": 0.9353694462814887, - "flos": 21040763306520.0, - "grad_norm": 6.264747006361146, - "learning_rate": 4.358610963605658e-08, - "loss": 0.9838, - "num_input_tokens_seen": 165954980, - "step": 7779 - }, - { - "epoch": 0.9354896891721277, - "flos": 21857268670440.0, - "grad_norm": 5.896835655319691, - "learning_rate": 4.342451835785677e-08, - "loss": 0.9027, - "num_input_tokens_seen": 165975610, - "step": 7780 - }, - { - "epoch": 0.9356099320627668, - "flos": 13806875248320.0, - "grad_norm": 3.737708355763807, - "learning_rate": 4.3263223890601665e-08, - "loss": 0.9794, - "num_input_tokens_seen": 165994040, - "step": 7781 - }, - { - "epoch": 0.9357301749534058, - "flos": 13543896890520.0, - "grad_norm": 5.819032013611839, - "learning_rate": 4.31022262587597e-08, - "loss": 1.0175, - "num_input_tokens_seen": 166012435, - "step": 7782 - }, - { - "epoch": 0.935850417844045, - "flos": 16743739603560.0, - "grad_norm": 8.30388544898138, - "learning_rate": 4.2941525486754225e-08, - "loss": 0.883, - "num_input_tokens_seen": 166032475, - "step": 7783 - }, - { - "epoch": 0.935970660734684, - "flos": 13177628206320.0, - "grad_norm": 7.121986153284639, - "learning_rate": 4.278112159896286e-08, - "loss": 1.0174, - "num_input_tokens_seen": 166050035, - "step": 7784 - }, - { - "epoch": 0.9360909036253231, - "flos": 14645799760680.0, - "grad_norm": 2.8973890192140423, - "learning_rate": 4.2621014619719896e-08, - "loss": 0.9069, - "num_input_tokens_seen": 166067520, - "step": 7785 - }, - { - "epoch": 0.9362111465159623, - "flos": 44232332533440.0, - "grad_norm": 0.7910714588946423, - "learning_rate": 4.246120457331215e-08, - "loss": 0.8576, - "num_input_tokens_seen": 166129415, - "step": 7786 - }, - { - "epoch": 0.9363313894066013, - "flos": 17189315726160.0, - "grad_norm": 6.476607852864427, - "learning_rate": 4.2301691483983325e-08, - "loss": 0.9432, - "num_input_tokens_seen": 166149255, - "step": 7787 - }, - { - "epoch": 0.9364516322972404, - "flos": 14278611229680.0, - "grad_norm": 6.7361522904892865, - "learning_rate": 4.214247537593163e-08, - "loss": 0.9837, - "num_input_tokens_seen": 166168225, - "step": 7788 - }, - { - "epoch": 0.9365718751878795, - "flos": 14697843524160.0, - "grad_norm": 5.885439424273847, - "learning_rate": 4.1983556273309293e-08, - "loss": 1.0396, - "num_input_tokens_seen": 166186695, - "step": 7789 - }, - { - "epoch": 0.9366921180785186, - "flos": 13224919428000.0, - "grad_norm": 4.716652309184694, - "learning_rate": 4.182493420022526e-08, - "loss": 0.9238, - "num_input_tokens_seen": 166202085, - "step": 7790 - }, - { - "epoch": 0.9368123609691577, - "flos": 18342373174560.0, - "grad_norm": 3.86263628475402, - "learning_rate": 4.166660918074139e-08, - "loss": 1.0134, - "num_input_tokens_seen": 166221710, - "step": 7791 - }, - { - "epoch": 0.9369326038597968, - "flos": 18182777127840.0, - "grad_norm": 3.1328030631287107, - "learning_rate": 4.15085812388758e-08, - "loss": 0.9603, - "num_input_tokens_seen": 166243650, - "step": 7792 - }, - { - "epoch": 0.9370528467504359, - "flos": 16506062580840.0, - "grad_norm": 5.2220099769009005, - "learning_rate": 4.135085039860153e-08, - "loss": 1.0221, - "num_input_tokens_seen": 166262770, - "step": 7793 - }, - { - "epoch": 0.9371730896410749, - "flos": 17761398524160.0, - "grad_norm": 4.068564480772562, - "learning_rate": 4.1193416683845906e-08, - "loss": 1.0137, - "num_input_tokens_seen": 166281420, - "step": 7794 - }, - { - "epoch": 0.9372933325317141, - "flos": 10995321767640.0, - "grad_norm": 5.8117780339596266, - "learning_rate": 4.103628011849136e-08, - "loss": 1.056, - "num_input_tokens_seen": 166296500, - "step": 7795 - }, - { - "epoch": 0.9374135754223532, - "flos": 15537411929280.0, - "grad_norm": 4.708891416349679, - "learning_rate": 4.0879440726375506e-08, - "loss": 0.9755, - "num_input_tokens_seen": 166314005, - "step": 7796 - }, - { - "epoch": 0.9375338183129922, - "flos": 16082077744560.0, - "grad_norm": 6.397438800454945, - "learning_rate": 4.0722898531291074e-08, - "loss": 0.783, - "num_input_tokens_seen": 166330965, - "step": 7797 - }, - { - "epoch": 0.9376540612036314, - "flos": 18579191673600.0, - "grad_norm": 8.584120137222769, - "learning_rate": 4.0566653556985295e-08, - "loss": 0.9905, - "num_input_tokens_seen": 166351230, - "step": 7798 - }, - { - "epoch": 0.9377743040942704, - "flos": 13988951766600.0, - "grad_norm": 7.807005392012866, - "learning_rate": 4.0410705827159886e-08, - "loss": 1.0168, - "num_input_tokens_seen": 166368245, - "step": 7799 - }, - { - "epoch": 0.9378945469849095, - "flos": 10790948747160.0, - "grad_norm": 7.651549348202527, - "learning_rate": 4.0255055365472356e-08, - "loss": 0.9329, - "num_input_tokens_seen": 166386060, - "step": 7800 - }, - { - "epoch": 0.9380147898755486, - "flos": 14616696392160.0, - "grad_norm": 10.141559763470072, - "learning_rate": 4.009970219553471e-08, - "loss": 0.9693, - "num_input_tokens_seen": 166402730, - "step": 7801 - }, - { - "epoch": 0.9381350327661877, - "flos": 18705514379640.0, - "grad_norm": 5.326332842440652, - "learning_rate": 3.99446463409141e-08, - "loss": 0.9815, - "num_input_tokens_seen": 166420305, - "step": 7802 - }, - { - "epoch": 0.9382552756568268, - "flos": 16769531523600.0, - "grad_norm": 16.431131615979485, - "learning_rate": 3.978988782513215e-08, - "loss": 0.9072, - "num_input_tokens_seen": 166437520, - "step": 7803 - }, - { - "epoch": 0.9383755185474659, - "flos": 20256887390520.0, - "grad_norm": 4.6266620845570445, - "learning_rate": 3.963542667166586e-08, - "loss": 0.9856, - "num_input_tokens_seen": 166457345, - "step": 7804 - }, - { - "epoch": 0.938495761438105, - "flos": 14515337744040.0, - "grad_norm": 3.0605029387202096, - "learning_rate": 3.9481262903946486e-08, - "loss": 0.9088, - "num_input_tokens_seen": 166476510, - "step": 7805 - }, - { - "epoch": 0.938616004328744, - "flos": 49629909997920.0, - "grad_norm": 0.7848782403025149, - "learning_rate": 3.932739654536066e-08, - "loss": 0.8009, - "num_input_tokens_seen": 166538930, - "step": 7806 - }, - { - "epoch": 0.9387362472193832, - "flos": 13409295563280.0, - "grad_norm": 7.474837499785086, - "learning_rate": 3.917382761925014e-08, - "loss": 0.9707, - "num_input_tokens_seen": 166554485, - "step": 7807 - }, - { - "epoch": 0.9388564901100223, - "flos": 18864497195160.0, - "grad_norm": 9.76062371934422, - "learning_rate": 3.9020556148910754e-08, - "loss": 1.0154, - "num_input_tokens_seen": 166573560, - "step": 7808 - }, - { - "epoch": 0.9389767330006613, - "flos": 42183247651800.0, - "grad_norm": 0.7291362665287892, - "learning_rate": 3.8867582157593895e-08, - "loss": 0.8172, - "num_input_tokens_seen": 166627485, - "step": 7809 - }, - { - "epoch": 0.9390969758913005, - "flos": 22171309621800.0, - "grad_norm": 3.0096033883107656, - "learning_rate": 3.871490566850544e-08, - "loss": 0.9835, - "num_input_tokens_seen": 166651415, - "step": 7810 - }, - { - "epoch": 0.9392172187819395, - "flos": 15930944288400.0, - "grad_norm": 5.544173837520737, - "learning_rate": 3.856252670480642e-08, - "loss": 0.9342, - "num_input_tokens_seen": 166669795, - "step": 7811 - }, - { - "epoch": 0.9393374616725786, - "flos": 13990576829280.0, - "grad_norm": 8.90757682687934, - "learning_rate": 3.841044528961279e-08, - "loss": 1.036, - "num_input_tokens_seen": 166687310, - "step": 7812 - }, - { - "epoch": 0.9394577045632178, - "flos": 17189162418360.0, - "grad_norm": 12.986040690203353, - "learning_rate": 3.825866144599477e-08, - "loss": 1.0126, - "num_input_tokens_seen": 166706085, - "step": 7813 - }, - { - "epoch": 0.9395779474538568, - "flos": 13490381372160.0, - "grad_norm": 14.411765674897856, - "learning_rate": 3.8107175196978145e-08, - "loss": 0.9853, - "num_input_tokens_seen": 166722110, - "step": 7814 - }, - { - "epoch": 0.9396981903444959, - "flos": 10109381987640.0, - "grad_norm": 5.623427785921729, - "learning_rate": 3.7955986565542996e-08, - "loss": 0.9751, - "num_input_tokens_seen": 166739910, - "step": 7815 - }, - { - "epoch": 0.9398184332351349, - "flos": 24819648991680.0, - "grad_norm": 7.409021044272492, - "learning_rate": 3.780509557462497e-08, - "loss": 0.9167, - "num_input_tokens_seen": 166759830, - "step": 7816 - }, - { - "epoch": 0.9399386761257741, - "flos": 18052775034600.0, - "grad_norm": 5.893325793634025, - "learning_rate": 3.765450224711375e-08, - "loss": 0.978, - "num_input_tokens_seen": 166780055, - "step": 7817 - }, - { - "epoch": 0.9400589190164131, - "flos": 19441639150560.0, - "grad_norm": 10.21924237572858, - "learning_rate": 3.750420660585396e-08, - "loss": 1.0186, - "num_input_tokens_seen": 166801715, - "step": 7818 - }, - { - "epoch": 0.9401791619070522, - "flos": 16634960857920.0, - "grad_norm": 9.919192935353639, - "learning_rate": 3.735420867364603e-08, - "loss": 1.0282, - "num_input_tokens_seen": 166822415, - "step": 7819 - }, - { - "epoch": 0.9402994047976914, - "flos": 25605671216880.0, - "grad_norm": 9.495043214335038, - "learning_rate": 3.7204508473244186e-08, - "loss": 0.8495, - "num_input_tokens_seen": 166845760, - "step": 7820 - }, - { - "epoch": 0.9404196476883304, - "flos": 15799715732760.0, - "grad_norm": 6.1716939006262574, - "learning_rate": 3.7055106027357395e-08, - "loss": 0.9235, - "num_input_tokens_seen": 166865345, - "step": 7821 - }, - { - "epoch": 0.9405398905789695, - "flos": 13412852304240.0, - "grad_norm": 6.721743746176123, - "learning_rate": 3.690600135865063e-08, - "loss": 0.949, - "num_input_tokens_seen": 166881990, - "step": 7822 - }, - { - "epoch": 0.9406601334696086, - "flos": 50326199854320.0, - "grad_norm": 0.7584335692327024, - "learning_rate": 3.675719448974246e-08, - "loss": 0.8288, - "num_input_tokens_seen": 166946800, - "step": 7823 - }, - { - "epoch": 0.9407803763602477, - "flos": 15748315862040.0, - "grad_norm": 19.46624494818691, - "learning_rate": 3.6608685443207054e-08, - "loss": 0.8162, - "num_input_tokens_seen": 166965670, - "step": 7824 - }, - { - "epoch": 0.9409006192508867, - "flos": 13386784430160.0, - "grad_norm": 5.0975686953687935, - "learning_rate": 3.646047424157306e-08, - "loss": 0.9028, - "num_input_tokens_seen": 166982365, - "step": 7825 - }, - { - "epoch": 0.9410208621415259, - "flos": 16612909648200.0, - "grad_norm": 9.153436951223268, - "learning_rate": 3.631256090732382e-08, - "loss": 0.893, - "num_input_tokens_seen": 167002545, - "step": 7826 - }, - { - "epoch": 0.941141105032165, - "flos": 16162213045080.0, - "grad_norm": 4.117477725257681, - "learning_rate": 3.6164945462897833e-08, - "loss": 1.0559, - "num_input_tokens_seen": 167021555, - "step": 7827 - }, - { - "epoch": 0.941261347922804, - "flos": 14698824694080.0, - "grad_norm": 4.582517007899212, - "learning_rate": 3.6017627930687856e-08, - "loss": 0.9822, - "num_input_tokens_seen": 167041100, - "step": 7828 - }, - { - "epoch": 0.9413815908134432, - "flos": 13776024170880.0, - "grad_norm": 7.268585848738289, - "learning_rate": 3.587060833304267e-08, - "loss": 0.9928, - "num_input_tokens_seen": 167059010, - "step": 7829 - }, - { - "epoch": 0.9415018337040822, - "flos": 12390716795880.0, - "grad_norm": 9.571147524158178, - "learning_rate": 3.5723886692264225e-08, - "loss": 0.8679, - "num_input_tokens_seen": 167076270, - "step": 7830 - }, - { - "epoch": 0.9416220765947213, - "flos": 22695181351320.0, - "grad_norm": 6.473332795058967, - "learning_rate": 3.557746303061071e-08, - "loss": 0.8451, - "num_input_tokens_seen": 167097745, - "step": 7831 - }, - { - "epoch": 0.9417423194853605, - "flos": 16715893359000.0, - "grad_norm": 3.793439653199431, - "learning_rate": 3.543133737029391e-08, - "loss": 0.952, - "num_input_tokens_seen": 167117975, - "step": 7832 - }, - { - "epoch": 0.9418625623759995, - "flos": 17005859437680.0, - "grad_norm": 6.1891095158448355, - "learning_rate": 3.5285509733481214e-08, - "loss": 0.9077, - "num_input_tokens_seen": 167137420, - "step": 7833 - }, - { - "epoch": 0.9419828052666386, - "flos": 12810041075040.0, - "grad_norm": 3.2509495741783043, - "learning_rate": 3.513998014229469e-08, - "loss": 0.9861, - "num_input_tokens_seen": 167156090, - "step": 7834 - }, - { - "epoch": 0.9421030481572777, - "flos": 12548013225600.0, - "grad_norm": 5.558326956055666, - "learning_rate": 3.499474861881069e-08, - "loss": 1.0882, - "num_input_tokens_seen": 167173035, - "step": 7835 - }, - { - "epoch": 0.9422232910479168, - "flos": 14331728147760.0, - "grad_norm": 9.576889481130555, - "learning_rate": 3.4849815185061136e-08, - "loss": 0.8937, - "num_input_tokens_seen": 167192645, - "step": 7836 - }, - { - "epoch": 0.9423435339385559, - "flos": 13072896786600.0, - "grad_norm": 7.453905492578046, - "learning_rate": 3.470517986303223e-08, - "loss": 1.0026, - "num_input_tokens_seen": 167211350, - "step": 7837 - }, - { - "epoch": 0.942463776829195, - "flos": 14249630507400.0, - "grad_norm": 13.81468107514639, - "learning_rate": 3.4560842674664856e-08, - "loss": 1.0143, - "num_input_tokens_seen": 167229585, - "step": 7838 - }, - { - "epoch": 0.9425840197198341, - "flos": 16086523670760.0, - "grad_norm": 12.186105188614151, - "learning_rate": 3.441680364185506e-08, - "loss": 0.98, - "num_input_tokens_seen": 167249175, - "step": 7839 - }, - { - "epoch": 0.9427042626104731, - "flos": 14147474658720.0, - "grad_norm": 12.693361628952331, - "learning_rate": 3.427306278645314e-08, - "loss": 0.9848, - "num_input_tokens_seen": 167267350, - "step": 7840 - }, - { - "epoch": 0.9428245055011123, - "flos": 16245414501600.0, - "grad_norm": 6.372920276765112, - "learning_rate": 3.4129620130264767e-08, - "loss": 0.947, - "num_input_tokens_seen": 167285430, - "step": 7841 - }, - { - "epoch": 0.9429447483917514, - "flos": 14875075515960.0, - "grad_norm": 4.75408559067533, - "learning_rate": 3.398647569505009e-08, - "loss": 1.0098, - "num_input_tokens_seen": 167302575, - "step": 7842 - }, - { - "epoch": 0.9430649912823904, - "flos": 13361115156360.0, - "grad_norm": 5.789322120106253, - "learning_rate": 3.384362950252373e-08, - "loss": 0.9578, - "num_input_tokens_seen": 167319265, - "step": 7843 - }, - { - "epoch": 0.9431852341730296, - "flos": 23215097739600.0, - "grad_norm": 5.792787329108777, - "learning_rate": 3.3701081574355473e-08, - "loss": 0.7972, - "num_input_tokens_seen": 167340945, - "step": 7844 - }, - { - "epoch": 0.9433054770636686, - "flos": 47907682599720.0, - "grad_norm": 0.6509885130482631, - "learning_rate": 3.3558831932169796e-08, - "loss": 0.7644, - "num_input_tokens_seen": 167409335, - "step": 7845 - }, - { - "epoch": 0.9434257199543077, - "flos": 18605044916760.0, - "grad_norm": 4.698212044079408, - "learning_rate": 3.341688059754588e-08, - "loss": 1.1101, - "num_input_tokens_seen": 167424710, - "step": 7846 - }, - { - "epoch": 0.9435459628449467, - "flos": 17787221105760.0, - "grad_norm": 5.139701783550104, - "learning_rate": 3.327522759201762e-08, - "loss": 0.9966, - "num_input_tokens_seen": 167444300, - "step": 7847 - }, - { - "epoch": 0.9436662057355859, - "flos": 15746752122480.0, - "grad_norm": 5.571149979776884, - "learning_rate": 3.313387293707359e-08, - "loss": 0.8906, - "num_input_tokens_seen": 167462725, - "step": 7848 - }, - { - "epoch": 0.943786448626225, - "flos": 14277047490120.0, - "grad_norm": 4.036149943066417, - "learning_rate": 3.29928166541571e-08, - "loss": 0.902, - "num_input_tokens_seen": 167481400, - "step": 7849 - }, - { - "epoch": 0.943906691516864, - "flos": 15694769682120.0, - "grad_norm": 15.589947009336056, - "learning_rate": 3.2852058764666346e-08, - "loss": 1.0228, - "num_input_tokens_seen": 167500220, - "step": 7850 - }, - { - "epoch": 0.9440269344075032, - "flos": 25186868184240.0, - "grad_norm": 4.042862128726941, - "learning_rate": 3.2711599289954264e-08, - "loss": 0.9175, - "num_input_tokens_seen": 167523975, - "step": 7851 - }, - { - "epoch": 0.9441471772981422, - "flos": 13643691799080.0, - "grad_norm": 5.283854367209315, - "learning_rate": 3.257143825132847e-08, - "loss": 0.9975, - "num_input_tokens_seen": 167541865, - "step": 7852 - }, - { - "epoch": 0.9442674201887813, - "flos": 18317286470400.0, - "grad_norm": 6.525759492717082, - "learning_rate": 3.243157567005106e-08, - "loss": 0.9815, - "num_input_tokens_seen": 167559765, - "step": 7853 - }, - { - "epoch": 0.9443876630794205, - "flos": 10974834297480.0, - "grad_norm": 7.979402729523174, - "learning_rate": 3.2292011567339296e-08, - "loss": 0.8609, - "num_input_tokens_seen": 167577290, - "step": 7854 - }, - { - "epoch": 0.9445079059700595, - "flos": 9448578652320.0, - "grad_norm": 7.3511515227845425, - "learning_rate": 3.21527459643649e-08, - "loss": 0.7637, - "num_input_tokens_seen": 167593895, - "step": 7855 - }, - { - "epoch": 0.9446281488606986, - "flos": 16821912564240.0, - "grad_norm": 3.68862742629817, - "learning_rate": 3.2013778882254536e-08, - "loss": 0.9681, - "num_input_tokens_seen": 167612410, - "step": 7856 - }, - { - "epoch": 0.9447483917513377, - "flos": 18186149899440.0, - "grad_norm": 9.004592995095571, - "learning_rate": 3.1875110342088676e-08, - "loss": 0.9844, - "num_input_tokens_seen": 167633580, - "step": 7857 - }, - { - "epoch": 0.9448686346419768, - "flos": 17451711514320.0, - "grad_norm": 6.642225220796798, - "learning_rate": 3.1736740364904035e-08, - "loss": 0.8765, - "num_input_tokens_seen": 167653830, - "step": 7858 - }, - { - "epoch": 0.9449888775326158, - "flos": 10398489542640.0, - "grad_norm": 3.7821610039018747, - "learning_rate": 3.159866897169094e-08, - "loss": 0.9797, - "num_input_tokens_seen": 167671750, - "step": 7859 - }, - { - "epoch": 0.945109120423255, - "flos": 10920276286080.0, - "grad_norm": 8.304361360305744, - "learning_rate": 3.146089618339487e-08, - "loss": 0.9814, - "num_input_tokens_seen": 167688325, - "step": 7860 - }, - { - "epoch": 0.9452293633138941, - "flos": 18107670323160.0, - "grad_norm": 18.158879468856362, - "learning_rate": 3.132342202091554e-08, - "loss": 0.9034, - "num_input_tokens_seen": 167708270, - "step": 7861 - }, - { - "epoch": 0.9453496062045331, - "flos": 15065614624800.0, - "grad_norm": 8.557751752140835, - "learning_rate": 3.1186246505107595e-08, - "loss": 0.8976, - "num_input_tokens_seen": 167727130, - "step": 7862 - }, - { - "epoch": 0.9454698490951723, - "flos": 14199947684040.0, - "grad_norm": 4.453661434589544, - "learning_rate": 3.104936965678084e-08, - "loss": 1.0573, - "num_input_tokens_seen": 167745180, - "step": 7863 - }, - { - "epoch": 0.9455900919858113, - "flos": 14960944528200.0, - "grad_norm": 3.25034286451098, - "learning_rate": 3.091279149669956e-08, - "loss": 1.0243, - "num_input_tokens_seen": 167763690, - "step": 7864 - }, - { - "epoch": 0.9457103348764504, - "flos": 14724585952560.0, - "grad_norm": 4.287658572809559, - "learning_rate": 3.0776512045581624e-08, - "loss": 0.9653, - "num_input_tokens_seen": 167782200, - "step": 7865 - }, - { - "epoch": 0.9458305777670896, - "flos": 15217913220240.0, - "grad_norm": 4.7754800924645675, - "learning_rate": 3.0640531324101384e-08, - "loss": 0.9975, - "num_input_tokens_seen": 167799685, - "step": 7866 - }, - { - "epoch": 0.9459508206577286, - "flos": 14200192976520.0, - "grad_norm": 3.964130025430612, - "learning_rate": 3.0504849352886554e-08, - "loss": 0.9785, - "num_input_tokens_seen": 167817550, - "step": 7867 - }, - { - "epoch": 0.9460710635483677, - "flos": 8562270933600.0, - "grad_norm": 5.405774222369753, - "learning_rate": 3.036946615252023e-08, - "loss": 0.9417, - "num_input_tokens_seen": 167832800, - "step": 7868 - }, - { - "epoch": 0.9461913064390068, - "flos": 24452061860400.0, - "grad_norm": 6.84320567853548, - "learning_rate": 3.0234381743539984e-08, - "loss": 0.8968, - "num_input_tokens_seen": 167850135, - "step": 7869 - }, - { - "epoch": 0.9463115493296459, - "flos": 13806752602080.0, - "grad_norm": 171.0021015952728, - "learning_rate": 3.0099596146437863e-08, - "loss": 1.026, - "num_input_tokens_seen": 167866960, - "step": 7870 - }, - { - "epoch": 0.946431792220285, - "flos": 50540077958400.0, - "grad_norm": 0.7887127544961596, - "learning_rate": 2.996510938166086e-08, - "loss": 0.8572, - "num_input_tokens_seen": 167929655, - "step": 7871 - }, - { - "epoch": 0.9465520351109241, - "flos": 13435332775800.0, - "grad_norm": 11.40615970105524, - "learning_rate": 2.983092146960997e-08, - "loss": 0.9704, - "num_input_tokens_seen": 167946720, - "step": 7872 - }, - { - "epoch": 0.9466722780015632, - "flos": 13570976596080.0, - "grad_norm": 6.349229071970046, - "learning_rate": 2.9697032430642256e-08, - "loss": 1.0178, - "num_input_tokens_seen": 167964655, - "step": 7873 - }, - { - "epoch": 0.9467925208922022, - "flos": 12207045876480.0, - "grad_norm": 5.223816017451197, - "learning_rate": 2.9563442285067906e-08, - "loss": 0.9525, - "num_input_tokens_seen": 167981420, - "step": 7874 - }, - { - "epoch": 0.9469127637828414, - "flos": 20781740289960.0, - "grad_norm": 5.328012593893111, - "learning_rate": 2.943015105315294e-08, - "loss": 1.0254, - "num_input_tokens_seen": 168001335, - "step": 7875 - }, - { - "epoch": 0.9470330066734804, - "flos": 18526013432400.0, - "grad_norm": 14.68295152725629, - "learning_rate": 2.929715875511718e-08, - "loss": 0.8937, - "num_input_tokens_seen": 168020090, - "step": 7876 - }, - { - "epoch": 0.9471532495641195, - "flos": 16665076057920.0, - "grad_norm": 11.284827344095042, - "learning_rate": 2.9164465411135375e-08, - "loss": 0.9264, - "num_input_tokens_seen": 168039580, - "step": 7877 - }, - { - "epoch": 0.9472734924547586, - "flos": 11185155660600.0, - "grad_norm": 3.6441224943272843, - "learning_rate": 2.9032071041337426e-08, - "loss": 1.0277, - "num_input_tokens_seen": 168057535, - "step": 7878 - }, - { - "epoch": 0.9473937353453977, - "flos": 7854697623120.0, - "grad_norm": 3.9796003867576015, - "learning_rate": 2.889997566580704e-08, - "loss": 0.9541, - "num_input_tokens_seen": 168075410, - "step": 7879 - }, - { - "epoch": 0.9475139782360368, - "flos": 18342863759520.0, - "grad_norm": 9.081458315261704, - "learning_rate": 2.8768179304583086e-08, - "loss": 0.9299, - "num_input_tokens_seen": 168097185, - "step": 7880 - }, - { - "epoch": 0.9476342211266758, - "flos": 16218794719440.0, - "grad_norm": 13.201148035237019, - "learning_rate": 2.8636681977659117e-08, - "loss": 0.9643, - "num_input_tokens_seen": 168116555, - "step": 7881 - }, - { - "epoch": 0.947754464017315, - "flos": 14331298885920.0, - "grad_norm": 10.152721742915173, - "learning_rate": 2.850548370498318e-08, - "loss": 1.0076, - "num_input_tokens_seen": 168134115, - "step": 7882 - }, - { - "epoch": 0.9478747069079541, - "flos": 17582848085280.0, - "grad_norm": 7.379243308871407, - "learning_rate": 2.8374584506457798e-08, - "loss": 0.9287, - "num_input_tokens_seen": 168155110, - "step": 7883 - }, - { - "epoch": 0.9479949497985931, - "flos": 15170560675440.0, - "grad_norm": 4.894208381397546, - "learning_rate": 2.824398440193998e-08, - "loss": 0.8874, - "num_input_tokens_seen": 168173630, - "step": 7884 - }, - { - "epoch": 0.9481151926892323, - "flos": 12861348961080.0, - "grad_norm": 11.01016528595386, - "learning_rate": 2.811368341124232e-08, - "loss": 0.9263, - "num_input_tokens_seen": 168192420, - "step": 7885 - }, - { - "epoch": 0.9482354355798713, - "flos": 15721604095200.0, - "grad_norm": 20.024892193793377, - "learning_rate": 2.7983681554131222e-08, - "loss": 0.9052, - "num_input_tokens_seen": 168212400, - "step": 7886 - }, - { - "epoch": 0.9483556784705104, - "flos": 13518871509480.0, - "grad_norm": 10.969326254115987, - "learning_rate": 2.7853978850327365e-08, - "loss": 0.9034, - "num_input_tokens_seen": 168231290, - "step": 7887 - }, - { - "epoch": 0.9484759213611496, - "flos": 18344212868160.0, - "grad_norm": 4.0678037794233175, - "learning_rate": 2.7724575319507225e-08, - "loss": 1.1004, - "num_input_tokens_seen": 168250720, - "step": 7888 - }, - { - "epoch": 0.9485961642517886, - "flos": 14672020942560.0, - "grad_norm": 6.009982705496026, - "learning_rate": 2.759547098130044e-08, - "loss": 1.0107, - "num_input_tokens_seen": 168269170, - "step": 7889 - }, - { - "epoch": 0.9487164071424277, - "flos": 16113879330360.0, - "grad_norm": 4.492773611560174, - "learning_rate": 2.746666585529267e-08, - "loss": 0.988, - "num_input_tokens_seen": 168289165, - "step": 7890 - }, - { - "epoch": 0.9488366500330668, - "flos": 27650248849200.0, - "grad_norm": 4.0945391877069595, - "learning_rate": 2.73381599610234e-08, - "loss": 0.9588, - "num_input_tokens_seen": 168309285, - "step": 7891 - }, - { - "epoch": 0.9489568929237059, - "flos": 19863048415800.0, - "grad_norm": 3.9739775584277894, - "learning_rate": 2.7209953317987033e-08, - "loss": 0.9389, - "num_input_tokens_seen": 168330045, - "step": 7892 - }, - { - "epoch": 0.9490771358143449, - "flos": 23951866403280.0, - "grad_norm": 3.5752001139512966, - "learning_rate": 2.7082045945631793e-08, - "loss": 1.0039, - "num_input_tokens_seen": 168351980, - "step": 7893 - }, - { - "epoch": 0.9491973787049841, - "flos": 10450563967680.0, - "grad_norm": 7.373317203239655, - "learning_rate": 2.6954437863361712e-08, - "loss": 0.9121, - "num_input_tokens_seen": 168369615, - "step": 7894 - }, - { - "epoch": 0.9493176215956232, - "flos": 18023794312320.0, - "grad_norm": 3.9624979831437717, - "learning_rate": 2.6827129090534862e-08, - "loss": 0.9372, - "num_input_tokens_seen": 168389635, - "step": 7895 - }, - { - "epoch": 0.9494378644862622, - "flos": 14931994467480.0, - "grad_norm": 16.370973980264584, - "learning_rate": 2.670011964646335e-08, - "loss": 1.0088, - "num_input_tokens_seen": 168408035, - "step": 7896 - }, - { - "epoch": 0.9495581073769014, - "flos": 10738996968360.0, - "grad_norm": 11.005955211151065, - "learning_rate": 2.657340955041487e-08, - "loss": 0.9021, - "num_input_tokens_seen": 168426530, - "step": 7897 - }, - { - "epoch": 0.9496783502675404, - "flos": 20384007297120.0, - "grad_norm": 7.575033095888664, - "learning_rate": 2.6446998821611167e-08, - "loss": 0.935, - "num_input_tokens_seen": 168446445, - "step": 7898 - }, - { - "epoch": 0.9497985931581795, - "flos": 10503098316120.0, - "grad_norm": 5.317541804182962, - "learning_rate": 2.6320887479228228e-08, - "loss": 0.9356, - "num_input_tokens_seen": 168462765, - "step": 7899 - }, - { - "epoch": 0.9499188360488187, - "flos": 19361105249760.0, - "grad_norm": 5.027692781201774, - "learning_rate": 2.619507554239786e-08, - "loss": 0.9415, - "num_input_tokens_seen": 168481045, - "step": 7900 - }, - { - "epoch": 0.9500390789394577, - "flos": 17294323099920.0, - "grad_norm": 3.50897727608029, - "learning_rate": 2.606956303020502e-08, - "loss": 0.9382, - "num_input_tokens_seen": 168501570, - "step": 7901 - }, - { - "epoch": 0.9501593218300968, - "flos": 10134867292080.0, - "grad_norm": 4.876429030459823, - "learning_rate": 2.5944349961690036e-08, - "loss": 1.0651, - "num_input_tokens_seen": 168518310, - "step": 7902 - }, - { - "epoch": 0.9502795647207359, - "flos": 27651659280960.0, - "grad_norm": 3.717458292272444, - "learning_rate": 2.581943635584749e-08, - "loss": 0.9489, - "num_input_tokens_seen": 168540860, - "step": 7903 - }, - { - "epoch": 0.950399807611375, - "flos": 29146481279040.0, - "grad_norm": 4.84848676847651, - "learning_rate": 2.569482223162689e-08, - "loss": 0.8806, - "num_input_tokens_seen": 168564555, - "step": 7904 - }, - { - "epoch": 0.950520050502014, - "flos": 16664646796080.0, - "grad_norm": 9.912464009973677, - "learning_rate": 2.5570507607932e-08, - "loss": 0.9556, - "num_input_tokens_seen": 168584190, - "step": 7905 - }, - { - "epoch": 0.9506402933926532, - "flos": 12599106480720.0, - "grad_norm": 10.75880603731812, - "learning_rate": 2.54464925036213e-08, - "loss": 0.8379, - "num_input_tokens_seen": 168601200, - "step": 7906 - }, - { - "epoch": 0.9507605362832923, - "flos": 23220126235440.0, - "grad_norm": 13.144575942531747, - "learning_rate": 2.532277693750773e-08, - "loss": 0.8266, - "num_input_tokens_seen": 168621845, - "step": 7907 - }, - { - "epoch": 0.9508807791739313, - "flos": 13905106417320.0, - "grad_norm": 4.554987745383702, - "learning_rate": 2.5199360928358948e-08, - "loss": 0.9821, - "num_input_tokens_seen": 168638800, - "step": 7908 - }, - { - "epoch": 0.9510010220645704, - "flos": 15250113406320.0, - "grad_norm": 4.575454922763871, - "learning_rate": 2.507624449489665e-08, - "loss": 1.0995, - "num_input_tokens_seen": 168657150, - "step": 7909 - }, - { - "epoch": 0.9511212649552095, - "flos": 13387213692000.0, - "grad_norm": 5.742941130923419, - "learning_rate": 2.495342765579811e-08, - "loss": 0.8764, - "num_input_tokens_seen": 168675530, - "step": 7910 - }, - { - "epoch": 0.9512415078458486, - "flos": 14774207452800.0, - "grad_norm": 5.466453898561448, - "learning_rate": 2.4830910429693984e-08, - "loss": 0.9363, - "num_input_tokens_seen": 168695210, - "step": 7911 - }, - { - "epoch": 0.9513617507364877, - "flos": 13020423761280.0, - "grad_norm": 3.8499217533235486, - "learning_rate": 2.470869283517052e-08, - "loss": 1.0227, - "num_input_tokens_seen": 168712965, - "step": 7912 - }, - { - "epoch": 0.9514819936271268, - "flos": 17792924155920.0, - "grad_norm": 4.110316411668124, - "learning_rate": 2.458677489076777e-08, - "loss": 0.9979, - "num_input_tokens_seen": 168733695, - "step": 7913 - }, - { - "epoch": 0.9516022365177659, - "flos": 13230775785960.0, - "grad_norm": 3.836800099518101, - "learning_rate": 2.446515661498072e-08, - "loss": 1.0586, - "num_input_tokens_seen": 168752745, - "step": 7914 - }, - { - "epoch": 0.9517224794084049, - "flos": 18052652388360.0, - "grad_norm": 3.6617590978314998, - "learning_rate": 2.434383802625861e-08, - "loss": 0.9635, - "num_input_tokens_seen": 168771420, - "step": 7915 - }, - { - "epoch": 0.9518427222990441, - "flos": 15249316205760.0, - "grad_norm": 7.157663530566164, - "learning_rate": 2.4222819143005168e-08, - "loss": 0.974, - "num_input_tokens_seen": 168790735, - "step": 7916 - }, - { - "epoch": 0.9519629651896832, - "flos": 14935121946600.0, - "grad_norm": 8.57318562255303, - "learning_rate": 2.4102099983579706e-08, - "loss": 1.0369, - "num_input_tokens_seen": 168809605, - "step": 7917 - }, - { - "epoch": 0.9520832080803222, - "flos": 15406122050520.0, - "grad_norm": 5.177077520458046, - "learning_rate": 2.3981680566294236e-08, - "loss": 0.9906, - "num_input_tokens_seen": 168828925, - "step": 7918 - }, - { - "epoch": 0.9522034509709614, - "flos": 16452117800640.0, - "grad_norm": 4.267633545120698, - "learning_rate": 2.3861560909416822e-08, - "loss": 0.9773, - "num_input_tokens_seen": 168848195, - "step": 7919 - }, - { - "epoch": 0.9523236938616004, - "flos": 17554909856040.0, - "grad_norm": 3.823199033919933, - "learning_rate": 2.3741741031169325e-08, - "loss": 1.0566, - "num_input_tokens_seen": 168867485, - "step": 7920 - }, - { - "epoch": 0.9524439367522395, - "flos": 16112376913920.0, - "grad_norm": 8.470821338450554, - "learning_rate": 2.3622220949728544e-08, - "loss": 0.9193, - "num_input_tokens_seen": 168886090, - "step": 7921 - }, - { - "epoch": 0.9525641796428787, - "flos": 24293232352680.0, - "grad_norm": 6.262038576712097, - "learning_rate": 2.3503000683225526e-08, - "loss": 0.8427, - "num_input_tokens_seen": 168903525, - "step": 7922 - }, - { - "epoch": 0.9526844225335177, - "flos": 11839918668600.0, - "grad_norm": 8.406625278420428, - "learning_rate": 2.3384080249745585e-08, - "loss": 1.0604, - "num_input_tokens_seen": 168921135, - "step": 7923 - }, - { - "epoch": 0.9528046654241568, - "flos": 26365993506720.0, - "grad_norm": 5.627994842125672, - "learning_rate": 2.3265459667329178e-08, - "loss": 1.0535, - "num_input_tokens_seen": 168940345, - "step": 7924 - }, - { - "epoch": 0.9529249083147959, - "flos": 12937406274120.0, - "grad_norm": 6.975263485324211, - "learning_rate": 2.31471389539708e-08, - "loss": 1.0804, - "num_input_tokens_seen": 168957190, - "step": 7925 - }, - { - "epoch": 0.953045151205435, - "flos": 20414827713000.0, - "grad_norm": 4.974833477051232, - "learning_rate": 2.3029118127619872e-08, - "loss": 0.9464, - "num_input_tokens_seen": 168976625, - "step": 7926 - }, - { - "epoch": 0.953165394096074, - "flos": 15511068101160.0, - "grad_norm": 5.402292203071145, - "learning_rate": 2.2911397206179628e-08, - "loss": 1.0903, - "num_input_tokens_seen": 168993095, - "step": 7927 - }, - { - "epoch": 0.9532856369867132, - "flos": 14174278410240.0, - "grad_norm": 3.6943991824813738, - "learning_rate": 2.279397620750845e-08, - "loss": 0.8467, - "num_input_tokens_seen": 169011860, - "step": 7928 - }, - { - "epoch": 0.9534058798773523, - "flos": 10634449518000.0, - "grad_norm": 4.009859094213204, - "learning_rate": 2.2676855149419195e-08, - "loss": 1.0024, - "num_input_tokens_seen": 169028750, - "step": 7929 - }, - { - "epoch": 0.9535261227679913, - "flos": 12469288356840.0, - "grad_norm": 6.339785083774313, - "learning_rate": 2.2560034049678988e-08, - "loss": 0.968, - "num_input_tokens_seen": 169042820, - "step": 7930 - }, - { - "epoch": 0.9536463656586305, - "flos": 16450124799240.0, - "grad_norm": 3.6584344842666496, - "learning_rate": 2.2443512926008988e-08, - "loss": 0.9815, - "num_input_tokens_seen": 169061870, - "step": 7931 - }, - { - "epoch": 0.9537666085492695, - "flos": 13204033357560.0, - "grad_norm": 6.663185768548078, - "learning_rate": 2.2327291796085946e-08, - "loss": 0.9276, - "num_input_tokens_seen": 169079950, - "step": 7932 - }, - { - "epoch": 0.9538868514399086, - "flos": 13465785252960.0, - "grad_norm": 5.880395238139889, - "learning_rate": 2.2211370677540197e-08, - "loss": 0.9947, - "num_input_tokens_seen": 169096195, - "step": 7933 - }, - { - "epoch": 0.9540070943305478, - "flos": 11892790294200.0, - "grad_norm": 9.98811129536186, - "learning_rate": 2.2095749587957012e-08, - "loss": 0.9952, - "num_input_tokens_seen": 169113820, - "step": 7934 - }, - { - "epoch": 0.9541273372211868, - "flos": 14304893734680.0, - "grad_norm": 5.196860451371134, - "learning_rate": 2.1980428544876138e-08, - "loss": 0.9164, - "num_input_tokens_seen": 169132180, - "step": 7935 - }, - { - "epoch": 0.9542475801118259, - "flos": 19025779627680.0, - "grad_norm": 4.188665427114313, - "learning_rate": 2.1865407565791584e-08, - "loss": 0.9638, - "num_input_tokens_seen": 169153470, - "step": 7936 - }, - { - "epoch": 0.954367823002465, - "flos": 16585952588880.0, - "grad_norm": 3.68658216012833, - "learning_rate": 2.175068666815183e-08, - "loss": 0.9945, - "num_input_tokens_seen": 169174030, - "step": 7937 - }, - { - "epoch": 0.9544880658931041, - "flos": 10528767589920.0, - "grad_norm": 5.494552341932182, - "learning_rate": 2.163626586935985e-08, - "loss": 1.0132, - "num_input_tokens_seen": 169190290, - "step": 7938 - }, - { - "epoch": 0.9546083087837431, - "flos": 20729175279960.0, - "grad_norm": 3.7264639723446433, - "learning_rate": 2.1522145186773755e-08, - "loss": 0.8435, - "num_input_tokens_seen": 169208930, - "step": 7939 - }, - { - "epoch": 0.9547285516743822, - "flos": 15013141599480.0, - "grad_norm": 5.686902025248491, - "learning_rate": 2.140832463770481e-08, - "loss": 1.0808, - "num_input_tokens_seen": 169227845, - "step": 7940 - }, - { - "epoch": 0.9548487945650214, - "flos": 19575106000080.0, - "grad_norm": 4.737281234642266, - "learning_rate": 2.129480423941987e-08, - "loss": 0.9814, - "num_input_tokens_seen": 169244235, - "step": 7941 - }, - { - "epoch": 0.9549690374556604, - "flos": 15825844929960.0, - "grad_norm": 4.374637915750388, - "learning_rate": 2.1181584009140052e-08, - "loss": 1.0346, - "num_input_tokens_seen": 169263495, - "step": 7942 - }, - { - "epoch": 0.9550892803462995, - "flos": 12462880090800.0, - "grad_norm": 17.070367920390584, - "learning_rate": 2.10686639640405e-08, - "loss": 1.0577, - "num_input_tokens_seen": 169277305, - "step": 7943 - }, - { - "epoch": 0.9552095232369386, - "flos": 17320697589600.0, - "grad_norm": 10.724492528331323, - "learning_rate": 2.0956044121251294e-08, - "loss": 1.041, - "num_input_tokens_seen": 169295810, - "step": 7944 - }, - { - "epoch": 0.9553297661275777, - "flos": 16166168386320.0, - "grad_norm": 5.279159041682485, - "learning_rate": 2.084372449785654e-08, - "loss": 1.0357, - "num_input_tokens_seen": 169315365, - "step": 7945 - }, - { - "epoch": 0.9554500090182168, - "flos": 10896170751840.0, - "grad_norm": 5.403270098633703, - "learning_rate": 2.0731705110895282e-08, - "loss": 0.9149, - "num_input_tokens_seen": 169332575, - "step": 7946 - }, - { - "epoch": 0.9555702519088559, - "flos": 16717211806080.0, - "grad_norm": 5.253489833056251, - "learning_rate": 2.0619985977360587e-08, - "loss": 1.0891, - "num_input_tokens_seen": 169350615, - "step": 7947 - }, - { - "epoch": 0.955690494799495, - "flos": 16320337336920.0, - "grad_norm": 5.597532389429114, - "learning_rate": 2.0508567114200237e-08, - "loss": 1.0112, - "num_input_tokens_seen": 169370250, - "step": 7948 - }, - { - "epoch": 0.955810737690134, - "flos": 18526687986720.0, - "grad_norm": 4.726038476568598, - "learning_rate": 2.0397448538316485e-08, - "loss": 1.0093, - "num_input_tokens_seen": 169391010, - "step": 7949 - }, - { - "epoch": 0.9559309805807732, - "flos": 14802942882600.0, - "grad_norm": 4.293992507949356, - "learning_rate": 2.028663026656563e-08, - "loss": 0.8878, - "num_input_tokens_seen": 169409585, - "step": 7950 - }, - { - "epoch": 0.9560512234714122, - "flos": 15326140057800.0, - "grad_norm": 3.620981159126423, - "learning_rate": 2.0176112315758885e-08, - "loss": 0.9509, - "num_input_tokens_seen": 169427095, - "step": 7951 - }, - { - "epoch": 0.9561714663620513, - "flos": 12358669917600.0, - "grad_norm": 7.767912314834471, - "learning_rate": 2.0065894702661957e-08, - "loss": 0.9191, - "num_input_tokens_seen": 169443490, - "step": 7952 - }, - { - "epoch": 0.9562917092526905, - "flos": 18574255162440.0, - "grad_norm": 4.446313406787352, - "learning_rate": 1.9955977443994577e-08, - "loss": 1.0086, - "num_input_tokens_seen": 169463200, - "step": 7953 - }, - { - "epoch": 0.9564119521433295, - "flos": 17136290792760.0, - "grad_norm": 4.821322194697023, - "learning_rate": 1.9846360556430965e-08, - "loss": 0.8512, - "num_input_tokens_seen": 169481220, - "step": 7954 - }, - { - "epoch": 0.9565321950339686, - "flos": 22821902657640.0, - "grad_norm": 4.771863912133586, - "learning_rate": 1.973704405660004e-08, - "loss": 0.8281, - "num_input_tokens_seen": 169502055, - "step": 7955 - }, - { - "epoch": 0.9566524379246077, - "flos": 16771493863440.0, - "grad_norm": 3.264757981261168, - "learning_rate": 1.9628027961085203e-08, - "loss": 0.9959, - "num_input_tokens_seen": 169525005, - "step": 7956 - }, - { - "epoch": 0.9567726808152468, - "flos": 27176550528000.0, - "grad_norm": 3.2775296873469246, - "learning_rate": 1.9519312286423894e-08, - "loss": 1.0588, - "num_input_tokens_seen": 169547920, - "step": 7957 - }, - { - "epoch": 0.9568929237058859, - "flos": 16163776784640.0, - "grad_norm": 3.154110324882172, - "learning_rate": 1.9410897049108255e-08, - "loss": 1.0009, - "num_input_tokens_seen": 169566920, - "step": 7958 - }, - { - "epoch": 0.957013166596525, - "flos": 16952987812080.0, - "grad_norm": 4.607117180736888, - "learning_rate": 1.9302782265584905e-08, - "loss": 1.1277, - "num_input_tokens_seen": 169587305, - "step": 7959 - }, - { - "epoch": 0.9571334094871641, - "flos": 12495509538720.0, - "grad_norm": 6.34648276412548, - "learning_rate": 1.9194967952254282e-08, - "loss": 1.0828, - "num_input_tokens_seen": 169605600, - "step": 7960 - }, - { - "epoch": 0.9572536523778031, - "flos": 10863326673000.0, - "grad_norm": 4.69477544115489, - "learning_rate": 1.9087454125472635e-08, - "loss": 1.0184, - "num_input_tokens_seen": 169619795, - "step": 7961 - }, - { - "epoch": 0.9573738952684423, - "flos": 17763422187120.0, - "grad_norm": 4.412366369023237, - "learning_rate": 1.8980240801548696e-08, - "loss": 1.0041, - "num_input_tokens_seen": 169638705, - "step": 7962 - }, - { - "epoch": 0.9574941381590814, - "flos": 18337681955880.0, - "grad_norm": 5.635803359814646, - "learning_rate": 1.8873327996747458e-08, - "loss": 0.9638, - "num_input_tokens_seen": 169656925, - "step": 7963 - }, - { - "epoch": 0.9576143810497204, - "flos": 23037129870360.0, - "grad_norm": 12.934054445698644, - "learning_rate": 1.8766715727287053e-08, - "loss": 0.887, - "num_input_tokens_seen": 169678350, - "step": 7964 - }, - { - "epoch": 0.9577346239403596, - "flos": 19758807581040.0, - "grad_norm": 3.3914523895084736, - "learning_rate": 1.8660404009340546e-08, - "loss": 1.0221, - "num_input_tokens_seen": 169698520, - "step": 7965 - }, - { - "epoch": 0.9578548668309986, - "flos": 41122994276280.0, - "grad_norm": 0.9155154621624846, - "learning_rate": 1.8554392859035485e-08, - "loss": 0.8959, - "num_input_tokens_seen": 169755990, - "step": 7966 - }, - { - "epoch": 0.9579751097216377, - "flos": 14016859334280.0, - "grad_norm": 4.685412743299489, - "learning_rate": 1.8448682292453444e-08, - "loss": 1.0145, - "num_input_tokens_seen": 169774785, - "step": 7967 - }, - { - "epoch": 0.9580953526122769, - "flos": 12784494447480.0, - "grad_norm": 3.0232340426334985, - "learning_rate": 1.8343272325631154e-08, - "loss": 0.8891, - "num_input_tokens_seen": 169793450, - "step": 7968 - }, - { - "epoch": 0.9582155955029159, - "flos": 17259700650600.0, - "grad_norm": 7.42055523833399, - "learning_rate": 1.8238162974558492e-08, - "loss": 1.0127, - "num_input_tokens_seen": 169807100, - "step": 7969 - }, - { - "epoch": 0.958335838393555, - "flos": 16192358906640.0, - "grad_norm": 3.713328151619365, - "learning_rate": 1.8133354255181144e-08, - "loss": 0.9721, - "num_input_tokens_seen": 169827135, - "step": 7970 - }, - { - "epoch": 0.958456081284194, - "flos": 11972710963800.0, - "grad_norm": 3.7337523384201856, - "learning_rate": 1.802884618339795e-08, - "loss": 0.9727, - "num_input_tokens_seen": 169845660, - "step": 7971 - }, - { - "epoch": 0.9585763241748332, - "flos": 14174186425560.0, - "grad_norm": 4.89558174082046, - "learning_rate": 1.7924638775062894e-08, - "loss": 1.0299, - "num_input_tokens_seen": 169864500, - "step": 7972 - }, - { - "epoch": 0.9586965670654722, - "flos": 15194911502160.0, - "grad_norm": 3.6498513596460014, - "learning_rate": 1.7820732045984444e-08, - "loss": 1.0413, - "num_input_tokens_seen": 169884365, - "step": 7973 - }, - { - "epoch": 0.9588168099561113, - "flos": 15223677593520.0, - "grad_norm": 4.415526884788279, - "learning_rate": 1.7717126011924655e-08, - "loss": 0.9648, - "num_input_tokens_seen": 169905670, - "step": 7974 - }, - { - "epoch": 0.9589370528467505, - "flos": 8272703455200.0, - "grad_norm": 7.459207863567303, - "learning_rate": 1.7613820688600957e-08, - "loss": 0.9836, - "num_input_tokens_seen": 169921295, - "step": 7975 - }, - { - "epoch": 0.9590572957373895, - "flos": 16665597304440.0, - "grad_norm": 3.8848584194233955, - "learning_rate": 1.7510816091684588e-08, - "loss": 1.0217, - "num_input_tokens_seen": 169940940, - "step": 7976 - }, - { - "epoch": 0.9591775386280286, - "flos": 16009117249080.0, - "grad_norm": 5.486169592093639, - "learning_rate": 1.740811223680083e-08, - "loss": 1.006, - "num_input_tokens_seen": 169957515, - "step": 7977 - }, - { - "epoch": 0.9592977815186677, - "flos": 12889164544080.0, - "grad_norm": 5.839080086983483, - "learning_rate": 1.7305709139530334e-08, - "loss": 0.9592, - "num_input_tokens_seen": 169976015, - "step": 7978 - }, - { - "epoch": 0.9594180244093068, - "flos": 11702956401240.0, - "grad_norm": 10.91195852852377, - "learning_rate": 1.7203606815407334e-08, - "loss": 0.9669, - "num_input_tokens_seen": 169990330, - "step": 7979 - }, - { - "epoch": 0.9595382672999458, - "flos": 14589984625320.0, - "grad_norm": 25.73719009724182, - "learning_rate": 1.7101805279920557e-08, - "loss": 1.021, - "num_input_tokens_seen": 170008210, - "step": 7980 - }, - { - "epoch": 0.959658510190585, - "flos": 16088240718120.0, - "grad_norm": 3.698282286429362, - "learning_rate": 1.7000304548513643e-08, - "loss": 1.0399, - "num_input_tokens_seen": 170028035, - "step": 7981 - }, - { - "epoch": 0.9597787530812241, - "flos": 13570639318920.0, - "grad_norm": 8.385572450189839, - "learning_rate": 1.6899104636583394e-08, - "loss": 1.0475, - "num_input_tokens_seen": 170045805, - "step": 7982 - }, - { - "epoch": 0.9598989959718631, - "flos": 43013372296440.0, - "grad_norm": 1.3175964408481335, - "learning_rate": 1.6798205559482638e-08, - "loss": 0.8795, - "num_input_tokens_seen": 170107905, - "step": 7983 - }, - { - "epoch": 0.9600192388625023, - "flos": 14829470680080.0, - "grad_norm": 6.686991164199196, - "learning_rate": 1.669760733251713e-08, - "loss": 0.9859, - "num_input_tokens_seen": 170126500, - "step": 7984 - }, - { - "epoch": 0.9601394817531413, - "flos": 14512056957120.0, - "grad_norm": 3.885972638617454, - "learning_rate": 1.659730997094755e-08, - "loss": 1.0635, - "num_input_tokens_seen": 170144710, - "step": 7985 - }, - { - "epoch": 0.9602597246437804, - "flos": 15275322756720.0, - "grad_norm": 3.3097701430247697, - "learning_rate": 1.6497313489989283e-08, - "loss": 0.8507, - "num_input_tokens_seen": 170164255, - "step": 7986 - }, - { - "epoch": 0.9603799675344196, - "flos": 21332293124760.0, - "grad_norm": 10.708678916569662, - "learning_rate": 1.639761790481131e-08, - "loss": 0.9052, - "num_input_tokens_seen": 170184855, - "step": 7987 - }, - { - "epoch": 0.9605002104250586, - "flos": 19942693131360.0, - "grad_norm": 7.439129049944524, - "learning_rate": 1.6298223230537754e-08, - "loss": 1.0191, - "num_input_tokens_seen": 170202375, - "step": 7988 - }, - { - "epoch": 0.9606204533156977, - "flos": 25396729623960.0, - "grad_norm": 4.317699670111874, - "learning_rate": 1.619912948224611e-08, - "loss": 0.9155, - "num_input_tokens_seen": 170223300, - "step": 7989 - }, - { - "epoch": 0.9607406962063368, - "flos": 18916663604880.0, - "grad_norm": 24.66691190580511, - "learning_rate": 1.6100336674969682e-08, - "loss": 0.8205, - "num_input_tokens_seen": 170241860, - "step": 7990 - }, - { - "epoch": 0.9608609390969759, - "flos": 18023150419560.0, - "grad_norm": 3.884235574148844, - "learning_rate": 1.600184482369449e-08, - "loss": 0.9927, - "num_input_tokens_seen": 170261495, - "step": 7991 - }, - { - "epoch": 0.960981181987615, - "flos": 14960484604800.0, - "grad_norm": 4.8813146139989625, - "learning_rate": 1.5903653943362126e-08, - "loss": 1.1166, - "num_input_tokens_seen": 170280210, - "step": 7992 - }, - { - "epoch": 0.9611014248782541, - "flos": 12627933895200.0, - "grad_norm": 5.284514913393002, - "learning_rate": 1.580576404886802e-08, - "loss": 0.985, - "num_input_tokens_seen": 170298460, - "step": 7993 - }, - { - "epoch": 0.9612216677688932, - "flos": 13728365010480.0, - "grad_norm": 5.002480273191189, - "learning_rate": 1.570817515506162e-08, - "loss": 1.0297, - "num_input_tokens_seen": 170316870, - "step": 7994 - }, - { - "epoch": 0.9613419106595322, - "flos": 11179084671720.0, - "grad_norm": 5.781019381485529, - "learning_rate": 1.561088727674753e-08, - "loss": 1.0389, - "num_input_tokens_seen": 170330800, - "step": 7995 - }, - { - "epoch": 0.9614621535501714, - "flos": 18290360072640.0, - "grad_norm": 9.32627924512801, - "learning_rate": 1.551390042868417e-08, - "loss": 0.9268, - "num_input_tokens_seen": 170352290, - "step": 7996 - }, - { - "epoch": 0.9615823964408104, - "flos": 12624346492680.0, - "grad_norm": 3.189672440360363, - "learning_rate": 1.5417214625584207e-08, - "loss": 0.9469, - "num_input_tokens_seen": 170369665, - "step": 7997 - }, - { - "epoch": 0.9617026393314495, - "flos": 14328232729920.0, - "grad_norm": 5.994696180266451, - "learning_rate": 1.5320829882114806e-08, - "loss": 1.0776, - "num_input_tokens_seen": 170387460, - "step": 7998 - }, - { - "epoch": 0.9618228822220887, - "flos": 14384109188400.0, - "grad_norm": 3.2156367384211615, - "learning_rate": 1.5224746212897378e-08, - "loss": 1.0062, - "num_input_tokens_seen": 170406475, - "step": 7999 - }, - { - "epoch": 0.9619431251127277, - "flos": 14933128945200.0, - "grad_norm": 2.804487215929415, - "learning_rate": 1.512896363250804e-08, - "loss": 0.9998, - "num_input_tokens_seen": 170426305, - "step": 8000 - }, - { - "epoch": 0.9620633680033668, - "flos": 15904508475600.0, - "grad_norm": 3.324653928716121, - "learning_rate": 1.503348215547673e-08, - "loss": 0.9883, - "num_input_tokens_seen": 170447115, - "step": 8001 - }, - { - "epoch": 0.962183610894006, - "flos": 13092863010240.0, - "grad_norm": 4.33524505309794, - "learning_rate": 1.4938301796288078e-08, - "loss": 1.031, - "num_input_tokens_seen": 170463405, - "step": 8002 - }, - { - "epoch": 0.962303853784645, - "flos": 13066641828360.0, - "grad_norm": 5.078448945496706, - "learning_rate": 1.4843422569380537e-08, - "loss": 1.0425, - "num_input_tokens_seen": 170479880, - "step": 8003 - }, - { - "epoch": 0.9624240966752841, - "flos": 18784607187120.0, - "grad_norm": 3.4703272077651524, - "learning_rate": 1.4748844489147483e-08, - "loss": 1.0527, - "num_input_tokens_seen": 170496590, - "step": 8004 - }, - { - "epoch": 0.9625443395659231, - "flos": 10345525932360.0, - "grad_norm": 3.835022360725317, - "learning_rate": 1.4654567569936326e-08, - "loss": 0.9302, - "num_input_tokens_seen": 170513885, - "step": 8005 - }, - { - "epoch": 0.9626645824565623, - "flos": 13018768037040.0, - "grad_norm": 10.777718228135218, - "learning_rate": 1.456059182604874e-08, - "loss": 1.0487, - "num_input_tokens_seen": 170532410, - "step": 8006 - }, - { - "epoch": 0.9627848253472013, - "flos": 11734420709880.0, - "grad_norm": 17.235800385865645, - "learning_rate": 1.4466917271740653e-08, - "loss": 0.9968, - "num_input_tokens_seen": 170550330, - "step": 8007 - }, - { - "epoch": 0.9629050682378404, - "flos": 14829532003200.0, - "grad_norm": 4.398706566828432, - "learning_rate": 1.4373543921222697e-08, - "loss": 0.8957, - "num_input_tokens_seen": 170569635, - "step": 8008 - }, - { - "epoch": 0.9630253111284796, - "flos": 12049933416120.0, - "grad_norm": 5.448251377227193, - "learning_rate": 1.428047178865932e-08, - "loss": 0.9997, - "num_input_tokens_seen": 170586145, - "step": 8009 - }, - { - "epoch": 0.9631455540191186, - "flos": 14435662366920.0, - "grad_norm": 3.4754992284245705, - "learning_rate": 1.4187700888169451e-08, - "loss": 0.9694, - "num_input_tokens_seen": 170605040, - "step": 8010 - }, - { - "epoch": 0.9632657969097577, - "flos": 47226637086720.0, - "grad_norm": 0.82604752274338, - "learning_rate": 1.40952312338265e-08, - "loss": 0.8755, - "num_input_tokens_seen": 170669405, - "step": 8011 - }, - { - "epoch": 0.9633860398003968, - "flos": 31741918361760.0, - "grad_norm": 4.916863302835409, - "learning_rate": 1.4003062839657909e-08, - "loss": 0.9118, - "num_input_tokens_seen": 170691605, - "step": 8012 - }, - { - "epoch": 0.9635062826910359, - "flos": 17661787584960.0, - "grad_norm": 10.477487317029576, - "learning_rate": 1.391119571964583e-08, - "loss": 1.0256, - "num_input_tokens_seen": 170712265, - "step": 8013 - }, - { - "epoch": 0.9636265255816749, - "flos": 11289672449400.0, - "grad_norm": 5.0991057341184565, - "learning_rate": 1.3819629887726225e-08, - "loss": 0.9569, - "num_input_tokens_seen": 170730075, - "step": 8014 - }, - { - "epoch": 0.9637467684723141, - "flos": 16062019536240.0, - "grad_norm": 4.443339843707074, - "learning_rate": 1.3728365357789317e-08, - "loss": 0.9949, - "num_input_tokens_seen": 170749160, - "step": 8015 - }, - { - "epoch": 0.9638670113629532, - "flos": 12442637913120.0, - "grad_norm": 3.9641894472430064, - "learning_rate": 1.3637402143680254e-08, - "loss": 0.9834, - "num_input_tokens_seen": 170763780, - "step": 8016 - }, - { - "epoch": 0.9639872542535922, - "flos": 39402514587000.0, - "grad_norm": 0.7560624039693972, - "learning_rate": 1.3546740259197998e-08, - "loss": 0.8, - "num_input_tokens_seen": 170816310, - "step": 8017 - }, - { - "epoch": 0.9641074971442314, - "flos": 17163585129240.0, - "grad_norm": 9.257627899003408, - "learning_rate": 1.3456379718095989e-08, - "loss": 0.9102, - "num_input_tokens_seen": 170836445, - "step": 8018 - }, - { - "epoch": 0.9642277400348704, - "flos": 47793292788600.0, - "grad_norm": 0.9152411010859737, - "learning_rate": 1.3366320534081487e-08, - "loss": 0.878, - "num_input_tokens_seen": 170898845, - "step": 8019 - }, - { - "epoch": 0.9643479829255095, - "flos": 22040326358640.0, - "grad_norm": 5.113678499298666, - "learning_rate": 1.3276562720816675e-08, - "loss": 0.9672, - "num_input_tokens_seen": 170920075, - "step": 8020 - }, - { - "epoch": 0.9644682258161487, - "flos": 14226904743360.0, - "grad_norm": 12.3447598973746, - "learning_rate": 1.3187106291917549e-08, - "loss": 1.0536, - "num_input_tokens_seen": 170936785, - "step": 8021 - }, - { - "epoch": 0.9645884687067877, - "flos": 15039270796680.0, - "grad_norm": 3.680517277228639, - "learning_rate": 1.309795126095503e-08, - "loss": 0.9334, - "num_input_tokens_seen": 170954805, - "step": 8022 - }, - { - "epoch": 0.9647087115974268, - "flos": 13433677051560.0, - "grad_norm": 5.706271205164939, - "learning_rate": 1.3009097641453192e-08, - "loss": 1.0296, - "num_input_tokens_seen": 170972375, - "step": 8023 - }, - { - "epoch": 0.9648289544880659, - "flos": 11709180697920.0, - "grad_norm": 4.65850570623661, - "learning_rate": 1.2920545446891474e-08, - "loss": 0.9835, - "num_input_tokens_seen": 170988815, - "step": 8024 - }, - { - "epoch": 0.964949197378705, - "flos": 17107739332320.0, - "grad_norm": 3.3112039009707197, - "learning_rate": 1.2832294690703127e-08, - "loss": 0.9308, - "num_input_tokens_seen": 171007510, - "step": 8025 - }, - { - "epoch": 0.965069440269344, - "flos": 16610610031200.0, - "grad_norm": 8.741683931821557, - "learning_rate": 1.2744345386275668e-08, - "loss": 0.9964, - "num_input_tokens_seen": 171026770, - "step": 8026 - }, - { - "epoch": 0.9651896831599832, - "flos": 18055013328480.0, - "grad_norm": 3.0214698559363504, - "learning_rate": 1.265669754695109e-08, - "loss": 1.0, - "num_input_tokens_seen": 171046060, - "step": 8027 - }, - { - "epoch": 0.9653099260506223, - "flos": 15825568975920.0, - "grad_norm": 4.371961355822093, - "learning_rate": 1.2569351186025201e-08, - "loss": 1.0536, - "num_input_tokens_seen": 171064235, - "step": 8028 - }, - { - "epoch": 0.9654301689412613, - "flos": 19051019639640.0, - "grad_norm": 3.9277527067495503, - "learning_rate": 1.2482306316748737e-08, - "loss": 0.9891, - "num_input_tokens_seen": 171084400, - "step": 8029 - }, - { - "epoch": 0.9655504118319005, - "flos": 12331620873600.0, - "grad_norm": 11.470070412111212, - "learning_rate": 1.2395562952326021e-08, - "loss": 1.0111, - "num_input_tokens_seen": 171101280, - "step": 8030 - }, - { - "epoch": 0.9656706547225395, - "flos": 15720714909960.0, - "grad_norm": 5.741604522383536, - "learning_rate": 1.2309121105916309e-08, - "loss": 1.0291, - "num_input_tokens_seen": 171119290, - "step": 8031 - }, - { - "epoch": 0.9657908976131786, - "flos": 26445055652640.0, - "grad_norm": 3.6978483954919623, - "learning_rate": 1.222298079063222e-08, - "loss": 0.9166, - "num_input_tokens_seen": 171140150, - "step": 8032 - }, - { - "epoch": 0.9659111405038178, - "flos": 17347072079280.0, - "grad_norm": 9.633626663687592, - "learning_rate": 1.2137142019541524e-08, - "loss": 0.952, - "num_input_tokens_seen": 171158425, - "step": 8033 - }, - { - "epoch": 0.9660313833944568, - "flos": 17792280263160.0, - "grad_norm": 11.475161057216692, - "learning_rate": 1.2051604805666027e-08, - "loss": 0.9683, - "num_input_tokens_seen": 171175270, - "step": 8034 - }, - { - "epoch": 0.9661516262850959, - "flos": 8300059114800.0, - "grad_norm": 6.766766106136391, - "learning_rate": 1.196636916198135e-08, - "loss": 1.005, - "num_input_tokens_seen": 171192530, - "step": 8035 - }, - { - "epoch": 0.9662718691757349, - "flos": 14226506143080.0, - "grad_norm": 8.207681026455113, - "learning_rate": 1.1881435101418036e-08, - "loss": 1.0054, - "num_input_tokens_seen": 171211665, - "step": 8036 - }, - { - "epoch": 0.9663921120663741, - "flos": 48713855017920.0, - "grad_norm": 0.9443330340651462, - "learning_rate": 1.1796802636860003e-08, - "loss": 0.9327, - "num_input_tokens_seen": 171279915, - "step": 8037 - }, - { - "epoch": 0.9665123549570132, - "flos": 18736273472400.0, - "grad_norm": 5.39753064103151, - "learning_rate": 1.1712471781146316e-08, - "loss": 0.9433, - "num_input_tokens_seen": 171298970, - "step": 8038 - }, - { - "epoch": 0.9666325978476522, - "flos": 31397547579480.0, - "grad_norm": 6.673622349220082, - "learning_rate": 1.1628442547069628e-08, - "loss": 0.8963, - "num_input_tokens_seen": 171320890, - "step": 8039 - }, - { - "epoch": 0.9667528407382914, - "flos": 15301359969240.0, - "grad_norm": 9.147996326627608, - "learning_rate": 1.1544714947377521e-08, - "loss": 0.9905, - "num_input_tokens_seen": 171338295, - "step": 8040 - }, - { - "epoch": 0.9668730836289304, - "flos": 16979607594240.0, - "grad_norm": 4.968046318464846, - "learning_rate": 1.1461288994770945e-08, - "loss": 0.9288, - "num_input_tokens_seen": 171357090, - "step": 8041 - }, - { - "epoch": 0.9669933265195695, - "flos": 20152125309240.0, - "grad_norm": 7.595233285722749, - "learning_rate": 1.1378164701906002e-08, - "loss": 0.9941, - "num_input_tokens_seen": 171378575, - "step": 8042 - }, - { - "epoch": 0.9671135694102087, - "flos": 15956214961920.0, - "grad_norm": 10.780203682810038, - "learning_rate": 1.1295342081392156e-08, - "loss": 0.8907, - "num_input_tokens_seen": 171397655, - "step": 8043 - }, - { - "epoch": 0.9672338123008477, - "flos": 14304065872560.0, - "grad_norm": 3.6795756653072362, - "learning_rate": 1.1212821145793804e-08, - "loss": 0.9139, - "num_input_tokens_seen": 171416990, - "step": 8044 - }, - { - "epoch": 0.9673540551914868, - "flos": 12019204984920.0, - "grad_norm": 3.6162814824421377, - "learning_rate": 1.1130601907629156e-08, - "loss": 1.009, - "num_input_tokens_seen": 171434440, - "step": 8045 - }, - { - "epoch": 0.9674742980821259, - "flos": 44304189212760.0, - "grad_norm": 0.8297875827117672, - "learning_rate": 1.1048684379370899e-08, - "loss": 0.896, - "num_input_tokens_seen": 171494845, - "step": 8046 - }, - { - "epoch": 0.967594540972765, - "flos": 13256322413520.0, - "grad_norm": 3.7191908118298804, - "learning_rate": 1.0967068573445759e-08, - "loss": 0.9663, - "num_input_tokens_seen": 171512050, - "step": 8047 - }, - { - "epoch": 0.967714783863404, - "flos": 14750868457560.0, - "grad_norm": 6.299403437873757, - "learning_rate": 1.0885754502234945e-08, - "loss": 0.8709, - "num_input_tokens_seen": 171531430, - "step": 8048 - }, - { - "epoch": 0.9678350267540432, - "flos": 16481497123200.0, - "grad_norm": 3.941651471829849, - "learning_rate": 1.08047421780737e-08, - "loss": 1.0073, - "num_input_tokens_seen": 171550340, - "step": 8049 - }, - { - "epoch": 0.9679552696446823, - "flos": 15432649848000.0, - "grad_norm": 6.950153804696378, - "learning_rate": 1.0724031613251305e-08, - "loss": 0.9602, - "num_input_tokens_seen": 171567960, - "step": 8050 - }, - { - "epoch": 0.9680755125353213, - "flos": 19128610030680.0, - "grad_norm": 4.824419791167756, - "learning_rate": 1.0643622820011744e-08, - "loss": 0.8832, - "num_input_tokens_seen": 171588735, - "step": 8051 - }, - { - "epoch": 0.9681957554259605, - "flos": 20174513796120.0, - "grad_norm": 6.7665659754485254, - "learning_rate": 1.0563515810552814e-08, - "loss": 0.906, - "num_input_tokens_seen": 171605425, - "step": 8052 - }, - { - "epoch": 0.9683159983165995, - "flos": 14592836150400.0, - "grad_norm": 3.325809443333929, - "learning_rate": 1.0483710597026795e-08, - "loss": 0.9621, - "num_input_tokens_seen": 171625005, - "step": 8053 - }, - { - "epoch": 0.9684362412072386, - "flos": 17216456754840.0, - "grad_norm": 3.9770378023595003, - "learning_rate": 1.0404207191540227e-08, - "loss": 0.9576, - "num_input_tokens_seen": 171645180, - "step": 8054 - }, - { - "epoch": 0.9685564840978778, - "flos": 15878348616840.0, - "grad_norm": 5.87975524652457, - "learning_rate": 1.0325005606153236e-08, - "loss": 0.9727, - "num_input_tokens_seen": 171664360, - "step": 8055 - }, - { - "epoch": 0.9686767269885168, - "flos": 10156949163360.0, - "grad_norm": 5.339516373640043, - "learning_rate": 1.0246105852881104e-08, - "loss": 1.0098, - "num_input_tokens_seen": 171679180, - "step": 8056 - }, - { - "epoch": 0.9687969698791559, - "flos": 15117474418920.0, - "grad_norm": 5.421376318081136, - "learning_rate": 1.0167507943692476e-08, - "loss": 1.0085, - "num_input_tokens_seen": 171697985, - "step": 8057 - }, - { - "epoch": 0.968917212769795, - "flos": 14068719128400.0, - "grad_norm": 10.862382957960497, - "learning_rate": 1.008921189051093e-08, - "loss": 0.9426, - "num_input_tokens_seen": 171715050, - "step": 8058 - }, - { - "epoch": 0.9690374556604341, - "flos": 15399744446040.0, - "grad_norm": 5.2854911433069836, - "learning_rate": 1.0011217705213848e-08, - "loss": 0.9919, - "num_input_tokens_seen": 171732645, - "step": 8059 - }, - { - "epoch": 0.9691576985510731, - "flos": 23273028522600.0, - "grad_norm": 45.82927834808497, - "learning_rate": 9.933525399632658e-09, - "loss": 0.984, - "num_input_tokens_seen": 171750600, - "step": 8060 - }, - { - "epoch": 0.9692779414417123, - "flos": 25448926695240.0, - "grad_norm": 13.799196214349204, - "learning_rate": 9.856134985553488e-09, - "loss": 0.8808, - "num_input_tokens_seen": 171770045, - "step": 8061 - }, - { - "epoch": 0.9693981843323514, - "flos": 20205395535120.0, - "grad_norm": 7.1752591926997225, - "learning_rate": 9.77904647471628e-09, - "loss": 0.9531, - "num_input_tokens_seen": 171792945, - "step": 8062 - }, - { - "epoch": 0.9695184272229904, - "flos": 16795783367040.0, - "grad_norm": 3.605042206071401, - "learning_rate": 9.702259878815454e-09, - "loss": 0.9743, - "num_input_tokens_seen": 171812990, - "step": 8063 - }, - { - "epoch": 0.9696386701136296, - "flos": 16559946037920.0, - "grad_norm": 5.895049601338914, - "learning_rate": 9.625775209499254e-09, - "loss": 0.9721, - "num_input_tokens_seen": 171832715, - "step": 8064 - }, - { - "epoch": 0.9697589130042686, - "flos": 10686309312120.0, - "grad_norm": 6.93237087587597, - "learning_rate": 9.549592478370172e-09, - "loss": 0.9634, - "num_input_tokens_seen": 171850615, - "step": 8065 - }, - { - "epoch": 0.9698791558949077, - "flos": 13386815091720.0, - "grad_norm": 17.168684658708024, - "learning_rate": 9.473711696985632e-09, - "loss": 1.0214, - "num_input_tokens_seen": 171869665, - "step": 8066 - }, - { - "epoch": 0.9699993987855468, - "flos": 12705524286240.0, - "grad_norm": 5.01623379348962, - "learning_rate": 9.398132876856201e-09, - "loss": 0.9729, - "num_input_tokens_seen": 171888350, - "step": 8067 - }, - { - "epoch": 0.9701196416761859, - "flos": 48105340738560.0, - "grad_norm": 0.7734397734021, - "learning_rate": 9.322856029447379e-09, - "loss": 0.8732, - "num_input_tokens_seen": 171949255, - "step": 8068 - }, - { - "epoch": 0.970239884566825, - "flos": 17266231562880.0, - "grad_norm": 4.488057076987094, - "learning_rate": 9.247881166178695e-09, - "loss": 1.0273, - "num_input_tokens_seen": 171967685, - "step": 8069 - }, - { - "epoch": 0.970360127457464, - "flos": 18002386995360.0, - "grad_norm": 6.081274790384979, - "learning_rate": 9.173208298423274e-09, - "loss": 0.9964, - "num_input_tokens_seen": 171988610, - "step": 8070 - }, - { - "epoch": 0.9704803703481032, - "flos": 21071982322680.0, - "grad_norm": 3.384155993362976, - "learning_rate": 9.09883743750961e-09, - "loss": 0.9909, - "num_input_tokens_seen": 172011220, - "step": 8071 - }, - { - "epoch": 0.9706006132387422, - "flos": 12308680478640.0, - "grad_norm": 4.472443661364237, - "learning_rate": 9.024768594719124e-09, - "loss": 1.0575, - "num_input_tokens_seen": 172029320, - "step": 8072 - }, - { - "epoch": 0.9707208561293813, - "flos": 12888704620680.0, - "grad_norm": 5.032208653452753, - "learning_rate": 8.95100178128816e-09, - "loss": 0.9471, - "num_input_tokens_seen": 172048180, - "step": 8073 - }, - { - "epoch": 0.9708410990200205, - "flos": 22276102364640.0, - "grad_norm": 4.945762384811527, - "learning_rate": 8.877537008407321e-09, - "loss": 0.9392, - "num_input_tokens_seen": 172067950, - "step": 8074 - }, - { - "epoch": 0.9709613419106595, - "flos": 21777286677720.0, - "grad_norm": 5.540311815045663, - "learning_rate": 8.804374287221028e-09, - "loss": 0.9049, - "num_input_tokens_seen": 172088905, - "step": 8075 - }, - { - "epoch": 0.9710815848012986, - "flos": 16872607219080.0, - "grad_norm": 10.16500449012098, - "learning_rate": 8.731513628827958e-09, - "loss": 1.0725, - "num_input_tokens_seen": 172107990, - "step": 8076 - }, - { - "epoch": 0.9712018276919377, - "flos": 16894750413480.0, - "grad_norm": 5.5486321903104745, - "learning_rate": 8.658955044280825e-09, - "loss": 1.0539, - "num_input_tokens_seen": 172126635, - "step": 8077 - }, - { - "epoch": 0.9713220705825768, - "flos": 16586013912000.0, - "grad_norm": 5.677915105052694, - "learning_rate": 8.586698544587268e-09, - "loss": 0.9921, - "num_input_tokens_seen": 172147965, - "step": 8078 - }, - { - "epoch": 0.9714423134732159, - "flos": 15774015797400.0, - "grad_norm": 6.141601174297372, - "learning_rate": 8.514744140707853e-09, - "loss": 0.9695, - "num_input_tokens_seen": 172166825, - "step": 8079 - }, - { - "epoch": 0.971562556363855, - "flos": 14357366760000.0, - "grad_norm": 3.6442542589257862, - "learning_rate": 8.443091843558515e-09, - "loss": 0.9881, - "num_input_tokens_seen": 172185630, - "step": 8080 - }, - { - "epoch": 0.9716827992544941, - "flos": 17763728802720.0, - "grad_norm": 6.681868989995456, - "learning_rate": 8.37174166400878e-09, - "loss": 0.873, - "num_input_tokens_seen": 172200925, - "step": 8081 - }, - { - "epoch": 0.9718030421451331, - "flos": 17556657564960.0, - "grad_norm": 24.73695600084012, - "learning_rate": 8.300693612881992e-09, - "loss": 1.0714, - "num_input_tokens_seen": 172220710, - "step": 8082 - }, - { - "epoch": 0.9719232850357723, - "flos": 15694677697440.0, - "grad_norm": 3.7833184503831125, - "learning_rate": 8.22994770095664e-09, - "loss": 1.0292, - "num_input_tokens_seen": 172239005, - "step": 8083 - }, - { - "epoch": 0.9720435279264114, - "flos": 16819919562840.0, - "grad_norm": 6.674203649443733, - "learning_rate": 8.159503938964585e-09, - "loss": 0.9771, - "num_input_tokens_seen": 172256045, - "step": 8084 - }, - { - "epoch": 0.9721637708170504, - "flos": 20204506349880.0, - "grad_norm": 5.309508255425234, - "learning_rate": 8.089362337592164e-09, - "loss": 0.9422, - "num_input_tokens_seen": 172279390, - "step": 8085 - }, - { - "epoch": 0.9722840137076896, - "flos": 20754047353200.0, - "grad_norm": 2.4955450419001344, - "learning_rate": 8.019522907479536e-09, - "loss": 0.9451, - "num_input_tokens_seen": 172299470, - "step": 8086 - }, - { - "epoch": 0.9724042565983286, - "flos": 13648658971800.0, - "grad_norm": 4.101318817165896, - "learning_rate": 7.949985659221558e-09, - "loss": 0.9892, - "num_input_tokens_seen": 172316455, - "step": 8087 - }, - { - "epoch": 0.9725244994889677, - "flos": 16376735041920.0, - "grad_norm": 8.798317609704169, - "learning_rate": 7.880750603366904e-09, - "loss": 1.0088, - "num_input_tokens_seen": 172335045, - "step": 8088 - }, - { - "epoch": 0.9726447423796069, - "flos": 16612235093880.0, - "grad_norm": 7.062407906531059, - "learning_rate": 7.811817750418282e-09, - "loss": 1.0155, - "num_input_tokens_seen": 172353525, - "step": 8089 - }, - { - "epoch": 0.9727649852702459, - "flos": 18893447255880.0, - "grad_norm": 3.6590265670107973, - "learning_rate": 7.743187110833105e-09, - "loss": 1.0236, - "num_input_tokens_seen": 172376005, - "step": 8090 - }, - { - "epoch": 0.972885228160885, - "flos": 14567596138440.0, - "grad_norm": 3.223494357163745, - "learning_rate": 7.674858695022602e-09, - "loss": 1.0365, - "num_input_tokens_seen": 172394080, - "step": 8091 - }, - { - "epoch": 0.9730054710515241, - "flos": 12442852544040.0, - "grad_norm": 3.8442728570880154, - "learning_rate": 7.606832513351591e-09, - "loss": 0.9803, - "num_input_tokens_seen": 172411750, - "step": 8092 - }, - { - "epoch": 0.9731257139421632, - "flos": 49391926359600.0, - "grad_norm": 0.9207707920383592, - "learning_rate": 7.539108576140264e-09, - "loss": 0.9044, - "num_input_tokens_seen": 172475580, - "step": 8093 - }, - { - "epoch": 0.9732459568328022, - "flos": 13098933999120.0, - "grad_norm": 5.0900796409626805, - "learning_rate": 7.471686893661732e-09, - "loss": 0.9158, - "num_input_tokens_seen": 172493595, - "step": 8094 - }, - { - "epoch": 0.9733661997234414, - "flos": 14826987093720.0, - "grad_norm": 3.7390587455580815, - "learning_rate": 7.4045674761442636e-09, - "loss": 0.8659, - "num_input_tokens_seen": 172510645, - "step": 8095 - }, - { - "epoch": 0.9734864426140805, - "flos": 16898951047200.0, - "grad_norm": 3.9229195104349976, - "learning_rate": 7.337750333769488e-09, - "loss": 0.9597, - "num_input_tokens_seen": 172530170, - "step": 8096 - }, - { - "epoch": 0.9736066855047195, - "flos": 25002737341440.0, - "grad_norm": 9.720332075184235, - "learning_rate": 7.2712354766737425e-09, - "loss": 0.9553, - "num_input_tokens_seen": 172550220, - "step": 8097 - }, - { - "epoch": 0.9737269283953586, - "flos": 14487000914520.0, - "grad_norm": 17.05175800824874, - "learning_rate": 7.2050229149469565e-09, - "loss": 1.0337, - "num_input_tokens_seen": 172569950, - "step": 8098 - }, - { - "epoch": 0.9738471712859977, - "flos": 20596536292560.0, - "grad_norm": 4.5432200210632585, - "learning_rate": 7.139112658633984e-09, - "loss": 0.8569, - "num_input_tokens_seen": 172589820, - "step": 8099 - }, - { - "epoch": 0.9739674141766368, - "flos": 19785120747600.0, - "grad_norm": 3.5592521452026804, - "learning_rate": 7.073504717733048e-09, - "loss": 0.9196, - "num_input_tokens_seen": 172609105, - "step": 8100 - }, - { - "epoch": 0.9740876570672758, - "flos": 49313692075800.0, - "grad_norm": 0.7347845599316901, - "learning_rate": 7.008199102196855e-09, - "loss": 0.8199, - "num_input_tokens_seen": 172670250, - "step": 8101 - }, - { - "epoch": 0.974207899957915, - "flos": 41675448127800.0, - "grad_norm": 0.8405526296668749, - "learning_rate": 6.9431958219321464e-09, - "loss": 0.8481, - "num_input_tokens_seen": 172726135, - "step": 8102 - }, - { - "epoch": 0.9743281428485541, - "flos": 16060547781360.0, - "grad_norm": 3.2705770663483573, - "learning_rate": 6.878494886800146e-09, - "loss": 1.0124, - "num_input_tokens_seen": 172746630, - "step": 8103 - }, - { - "epoch": 0.9744483857391931, - "flos": 14198169313560.0, - "grad_norm": 3.453893929739746, - "learning_rate": 6.814096306615669e-09, - "loss": 0.9826, - "num_input_tokens_seen": 172764490, - "step": 8104 - }, - { - "epoch": 0.9745686286298323, - "flos": 12521301458760.0, - "grad_norm": 4.298335378606181, - "learning_rate": 6.750000091148011e-09, - "loss": 0.8694, - "num_input_tokens_seen": 172781505, - "step": 8105 - }, - { - "epoch": 0.9746888715204713, - "flos": 20990957836920.0, - "grad_norm": 5.018593358389879, - "learning_rate": 6.686206250120729e-09, - "loss": 0.955, - "num_input_tokens_seen": 172802720, - "step": 8106 - }, - { - "epoch": 0.9748091144111104, - "flos": 13099332599400.0, - "grad_norm": 4.706132807798636, - "learning_rate": 6.622714793210749e-09, - "loss": 0.9664, - "num_input_tokens_seen": 172821360, - "step": 8107 - }, - { - "epoch": 0.9749293573017496, - "flos": 14669813310240.0, - "grad_norm": 4.356089239013815, - "learning_rate": 6.559525730050364e-09, - "loss": 1.0091, - "num_input_tokens_seen": 172841180, - "step": 8108 - }, - { - "epoch": 0.9750496001923886, - "flos": 13098719368200.0, - "grad_norm": 6.281914367461338, - "learning_rate": 6.496639070224574e-09, - "loss": 0.9858, - "num_input_tokens_seen": 172859385, - "step": 8109 - }, - { - "epoch": 0.9751698430830277, - "flos": 13885201516800.0, - "grad_norm": 6.295913509293461, - "learning_rate": 6.4340548232739714e-09, - "loss": 1.0659, - "num_input_tokens_seen": 172875305, - "step": 8110 - }, - { - "epoch": 0.9752900859736668, - "flos": 16743708942000.0, - "grad_norm": 5.550626447909587, - "learning_rate": 6.371772998692071e-09, - "loss": 1.013, - "num_input_tokens_seen": 172894280, - "step": 8111 - }, - { - "epoch": 0.9754103288643059, - "flos": 14410085077800.0, - "grad_norm": 7.858316824266305, - "learning_rate": 6.309793605927094e-09, - "loss": 0.8709, - "num_input_tokens_seen": 172912320, - "step": 8112 - }, - { - "epoch": 0.975530571754945, - "flos": 13724992238880.0, - "grad_norm": 7.657558780834372, - "learning_rate": 6.248116654381297e-09, - "loss": 1.0194, - "num_input_tokens_seen": 172930510, - "step": 8113 - }, - { - "epoch": 0.9756508146455841, - "flos": 16767017275680.0, - "grad_norm": 4.962051963278831, - "learning_rate": 6.186742153410751e-09, - "loss": 0.9456, - "num_input_tokens_seen": 172949725, - "step": 8114 - }, - { - "epoch": 0.9757710575362232, - "flos": 16324476647520.0, - "grad_norm": 4.516032164277516, - "learning_rate": 6.125670112326453e-09, - "loss": 1.0925, - "num_input_tokens_seen": 172968705, - "step": 8115 - }, - { - "epoch": 0.9758913004268622, - "flos": 19917299811600.0, - "grad_norm": 4.193468537528825, - "learning_rate": 6.064900540392548e-09, - "loss": 0.926, - "num_input_tokens_seen": 172990520, - "step": 8116 - }, - { - "epoch": 0.9760115433175014, - "flos": 15773341243080.0, - "grad_norm": 5.446102484560758, - "learning_rate": 6.0044334468278835e-09, - "loss": 1.0155, - "num_input_tokens_seen": 173009585, - "step": 8117 - }, - { - "epoch": 0.9761317862081405, - "flos": 18684505662960.0, - "grad_norm": 5.39781315639379, - "learning_rate": 5.944268840805345e-09, - "loss": 0.9493, - "num_input_tokens_seen": 173030050, - "step": 8118 - }, - { - "epoch": 0.9762520290987795, - "flos": 18917368820760.0, - "grad_norm": 4.0337954033186865, - "learning_rate": 5.88440673145163e-09, - "loss": 0.8682, - "num_input_tokens_seen": 173050820, - "step": 8119 - }, - { - "epoch": 0.9763722719894187, - "flos": 12758487896520.0, - "grad_norm": 3.861069033337813, - "learning_rate": 5.824847127848142e-09, - "loss": 1.0527, - "num_input_tokens_seen": 173069065, - "step": 8120 - }, - { - "epoch": 0.9764925148800577, - "flos": 15956981500920.0, - "grad_norm": 4.4539843349172745, - "learning_rate": 5.765590039029433e-09, - "loss": 1.0066, - "num_input_tokens_seen": 173088105, - "step": 8121 - }, - { - "epoch": 0.9766127577706968, - "flos": 26235010243560.0, - "grad_norm": 3.4009683239882555, - "learning_rate": 5.706635473985422e-09, - "loss": 0.943, - "num_input_tokens_seen": 173111695, - "step": 8122 - }, - { - "epoch": 0.976733000661336, - "flos": 15852311404320.0, - "grad_norm": 5.372494313133641, - "learning_rate": 5.6479834416591764e-09, - "loss": 1.0777, - "num_input_tokens_seen": 173130775, - "step": 8123 - }, - { - "epoch": 0.976853243551975, - "flos": 18368165094600.0, - "grad_norm": 5.455280741347685, - "learning_rate": 5.589633950947803e-09, - "loss": 0.9099, - "num_input_tokens_seen": 173147995, - "step": 8124 - }, - { - "epoch": 0.9769734864426141, - "flos": 15196444580160.0, - "grad_norm": 3.7300527929911365, - "learning_rate": 5.5315870107035535e-09, - "loss": 0.9186, - "num_input_tokens_seen": 173165765, - "step": 8125 - }, - { - "epoch": 0.9770937293332532, - "flos": 9873575320080.0, - "grad_norm": 7.576203596221295, - "learning_rate": 5.473842629731607e-09, - "loss": 1.0061, - "num_input_tokens_seen": 173183985, - "step": 8126 - }, - { - "epoch": 0.9772139722238923, - "flos": 12705984209640.0, - "grad_norm": 15.830750318277666, - "learning_rate": 5.416400816792066e-09, - "loss": 1.0044, - "num_input_tokens_seen": 173201220, - "step": 8127 - }, - { - "epoch": 0.9773342151145313, - "flos": 14512424895840.0, - "grad_norm": 4.758541658899544, - "learning_rate": 5.359261580598407e-09, - "loss": 0.9909, - "num_input_tokens_seen": 173216780, - "step": 8128 - }, - { - "epoch": 0.9774544580051704, - "flos": 8325636403920.0, - "grad_norm": 11.182330594160138, - "learning_rate": 5.302424929819027e-09, - "loss": 1.0, - "num_input_tokens_seen": 173230510, - "step": 8129 - }, - { - "epoch": 0.9775747008958096, - "flos": 9506264142840.0, - "grad_norm": 5.547068417897594, - "learning_rate": 5.24589087307592e-09, - "loss": 0.9509, - "num_input_tokens_seen": 173247850, - "step": 8130 - }, - { - "epoch": 0.9776949437864486, - "flos": 42604233203280.0, - "grad_norm": 6.115300497686271, - "learning_rate": 5.189659418944891e-09, - "loss": 0.8792, - "num_input_tokens_seen": 173277745, - "step": 8131 - }, - { - "epoch": 0.9778151866770877, - "flos": 15039178812000.0, - "grad_norm": 5.535542347430845, - "learning_rate": 5.133730575956674e-09, - "loss": 1.0014, - "num_input_tokens_seen": 173297135, - "step": 8132 - }, - { - "epoch": 0.9779354295677268, - "flos": 14829470680080.0, - "grad_norm": 4.039429166520067, - "learning_rate": 5.0781043525953696e-09, - "loss": 0.9455, - "num_input_tokens_seen": 173314920, - "step": 8133 - }, - { - "epoch": 0.9780556724583659, - "flos": 16664830765440.0, - "grad_norm": 3.8722268302618845, - "learning_rate": 5.0227807572995605e-09, - "loss": 0.9563, - "num_input_tokens_seen": 173336615, - "step": 8134 - }, - { - "epoch": 0.9781759153490049, - "flos": 14855661200400.0, - "grad_norm": 7.583279144876004, - "learning_rate": 4.967759798461646e-09, - "loss": 0.902, - "num_input_tokens_seen": 173354680, - "step": 8135 - }, - { - "epoch": 0.9782961582396441, - "flos": 20152278617040.0, - "grad_norm": 8.534881824821705, - "learning_rate": 4.913041484428282e-09, - "loss": 0.9726, - "num_input_tokens_seen": 173374875, - "step": 8136 - }, - { - "epoch": 0.9784164011302832, - "flos": 18182409189120.0, - "grad_norm": 10.401187432798576, - "learning_rate": 4.858625823500384e-09, - "loss": 0.9838, - "num_input_tokens_seen": 173392295, - "step": 8137 - }, - { - "epoch": 0.9785366440209222, - "flos": 21353700441720.0, - "grad_norm": 5.4773723531358955, - "learning_rate": 4.80451282393246e-09, - "loss": 0.9674, - "num_input_tokens_seen": 173412000, - "step": 8138 - }, - { - "epoch": 0.9786568869115614, - "flos": 23063289729120.0, - "grad_norm": 6.1003088450808045, - "learning_rate": 4.750702493933722e-09, - "loss": 0.9033, - "num_input_tokens_seen": 173431605, - "step": 8139 - }, - { - "epoch": 0.9787771298022004, - "flos": 16586381850720.0, - "grad_norm": 5.20392755957345, - "learning_rate": 4.697194841666974e-09, - "loss": 1.0773, - "num_input_tokens_seen": 173450250, - "step": 8140 - }, - { - "epoch": 0.9788973726928395, - "flos": 15247292542800.0, - "grad_norm": 6.3573285683436955, - "learning_rate": 4.6439898752492764e-09, - "loss": 1.0431, - "num_input_tokens_seen": 173470110, - "step": 8141 - }, - { - "epoch": 0.9790176155834787, - "flos": 49338134887200.0, - "grad_norm": 0.7487155724254264, - "learning_rate": 4.591087602751731e-09, - "loss": 0.8669, - "num_input_tokens_seen": 173531690, - "step": 8142 - }, - { - "epoch": 0.9791378584741177, - "flos": 15220059529440.0, - "grad_norm": 3.871944822800995, - "learning_rate": 4.538488032199916e-09, - "loss": 0.9484, - "num_input_tokens_seen": 173549510, - "step": 8143 - }, - { - "epoch": 0.9792581013647568, - "flos": 14301582286200.0, - "grad_norm": 7.4573990504829855, - "learning_rate": 4.486191171572784e-09, - "loss": 0.9092, - "num_input_tokens_seen": 173566500, - "step": 8144 - }, - { - "epoch": 0.9793783442553959, - "flos": 16870920833280.0, - "grad_norm": 4.403299195828441, - "learning_rate": 4.434197028803766e-09, - "loss": 1.0083, - "num_input_tokens_seen": 173585445, - "step": 8145 - }, - { - "epoch": 0.979498587146035, - "flos": 16375937841360.0, - "grad_norm": 7.192428889130244, - "learning_rate": 4.3825056117805514e-09, - "loss": 1.0411, - "num_input_tokens_seen": 173601050, - "step": 8146 - }, - { - "epoch": 0.979618830036674, - "flos": 10109136695160.0, - "grad_norm": 7.403131399800025, - "learning_rate": 4.331116928344425e-09, - "loss": 1.0197, - "num_input_tokens_seen": 173617085, - "step": 8147 - }, - { - "epoch": 0.9797390729273132, - "flos": 11840286607320.0, - "grad_norm": 7.504917058823132, - "learning_rate": 4.28003098629115e-09, - "loss": 0.8534, - "num_input_tokens_seen": 173632940, - "step": 8148 - }, - { - "epoch": 0.9798593158179523, - "flos": 17449319912640.0, - "grad_norm": 5.239732238392885, - "learning_rate": 4.229247793370305e-09, - "loss": 1.0139, - "num_input_tokens_seen": 173651785, - "step": 8149 - }, - { - "epoch": 0.9799795587085913, - "flos": 19444919937480.0, - "grad_norm": 3.244337430768424, - "learning_rate": 4.178767357285951e-09, - "loss": 0.9279, - "num_input_tokens_seen": 173673135, - "step": 8150 - }, - { - "epoch": 0.9800998015992305, - "flos": 18710174936760.0, - "grad_norm": 4.339348848075362, - "learning_rate": 4.128589685695516e-09, - "loss": 0.9225, - "num_input_tokens_seen": 173693280, - "step": 8151 - }, - { - "epoch": 0.9802200444898695, - "flos": 11836975158840.0, - "grad_norm": 8.4081747694789, - "learning_rate": 4.078714786211135e-09, - "loss": 1.0765, - "num_input_tokens_seen": 173708850, - "step": 8152 - }, - { - "epoch": 0.9803402873805086, - "flos": 17714045979360.0, - "grad_norm": 11.36181681544181, - "learning_rate": 4.029142666398977e-09, - "loss": 0.9947, - "num_input_tokens_seen": 173728735, - "step": 8153 - }, - { - "epoch": 0.9804605302711478, - "flos": 16035491738760.0, - "grad_norm": 6.848548913667799, - "learning_rate": 3.979873333778805e-09, - "loss": 1.03, - "num_input_tokens_seen": 173746630, - "step": 8154 - }, - { - "epoch": 0.9805807731617868, - "flos": 27778349925720.0, - "grad_norm": 3.993405150806305, - "learning_rate": 3.930906795824862e-09, - "loss": 0.9591, - "num_input_tokens_seen": 173767025, - "step": 8155 - }, - { - "epoch": 0.9807010160524259, - "flos": 12626860740600.0, - "grad_norm": 4.1114062956493695, - "learning_rate": 3.882243059965207e-09, - "loss": 0.9988, - "num_input_tokens_seen": 173784460, - "step": 8156 - }, - { - "epoch": 0.980821258943065, - "flos": 9558767829720.0, - "grad_norm": 4.720933421668449, - "learning_rate": 3.833882133582156e-09, - "loss": 0.8878, - "num_input_tokens_seen": 173799840, - "step": 8157 - }, - { - "epoch": 0.9809415018337041, - "flos": 15406612635480.0, - "grad_norm": 4.870450954041888, - "learning_rate": 3.785824024012285e-09, - "loss": 1.0014, - "num_input_tokens_seen": 173818560, - "step": 8158 - }, - { - "epoch": 0.9810617447243432, - "flos": 16560099345720.0, - "grad_norm": 4.33145090933517, - "learning_rate": 3.738068738545541e-09, - "loss": 1.0073, - "num_input_tokens_seen": 173837365, - "step": 8159 - }, - { - "epoch": 0.9811819876149822, - "flos": 12993313394160.0, - "grad_norm": 5.5265929881585, - "learning_rate": 3.6906162844265733e-09, - "loss": 1.004, - "num_input_tokens_seen": 173854170, - "step": 8160 - }, - { - "epoch": 0.9813022305056214, - "flos": 16062172844040.0, - "grad_norm": 5.615655724680747, - "learning_rate": 3.643466668853845e-09, - "loss": 0.9395, - "num_input_tokens_seen": 173871915, - "step": 8161 - }, - { - "epoch": 0.9814224733962604, - "flos": 18082920896160.0, - "grad_norm": 7.908510390116218, - "learning_rate": 3.59661989898008e-09, - "loss": 0.9709, - "num_input_tokens_seen": 173892690, - "step": 8162 - }, - { - "epoch": 0.9815427162868995, - "flos": 17790379246440.0, - "grad_norm": 6.109560131965599, - "learning_rate": 3.5500759819115934e-09, - "loss": 0.9927, - "num_input_tokens_seen": 173912775, - "step": 8163 - }, - { - "epoch": 0.9816629591775387, - "flos": 14667636339480.0, - "grad_norm": 25.60013918838809, - "learning_rate": 3.5038349247094034e-09, - "loss": 1.0467, - "num_input_tokens_seen": 173929755, - "step": 8164 - }, - { - "epoch": 0.9817832020681777, - "flos": 12547921240920.0, - "grad_norm": 4.243997711008445, - "learning_rate": 3.4578967343878994e-09, - "loss": 0.997, - "num_input_tokens_seen": 173945680, - "step": 8165 - }, - { - "epoch": 0.9819034449588168, - "flos": 16010619665520.0, - "grad_norm": 7.446294889854277, - "learning_rate": 3.4122614179161733e-09, - "loss": 1.0343, - "num_input_tokens_seen": 173965360, - "step": 8166 - }, - { - "epoch": 0.9820236878494559, - "flos": 14200223638080.0, - "grad_norm": 4.407067838482113, - "learning_rate": 3.36692898221691e-09, - "loss": 0.9938, - "num_input_tokens_seen": 173983445, - "step": 8167 - }, - { - "epoch": 0.982143930740095, - "flos": 13335016620720.0, - "grad_norm": 3.4640509591880373, - "learning_rate": 3.3218994341668305e-09, - "loss": 0.9584, - "num_input_tokens_seen": 174002095, - "step": 8168 - }, - { - "epoch": 0.982264173630734, - "flos": 18919576453080.0, - "grad_norm": 5.392459457733168, - "learning_rate": 3.2771727805971373e-09, - "loss": 0.9803, - "num_input_tokens_seen": 174023200, - "step": 8169 - }, - { - "epoch": 0.9823844165213732, - "flos": 15639445131720.0, - "grad_norm": 6.621528147623147, - "learning_rate": 3.232749028292847e-09, - "loss": 0.9927, - "num_input_tokens_seen": 174039885, - "step": 8170 - }, - { - "epoch": 0.9825046594120123, - "flos": 15563479803360.0, - "grad_norm": 4.580234289048773, - "learning_rate": 3.188628183992792e-09, - "loss": 1.1071, - "num_input_tokens_seen": 174059870, - "step": 8171 - }, - { - "epoch": 0.9826249023026513, - "flos": 42581900491440.0, - "grad_norm": 0.8346755582982794, - "learning_rate": 3.1448102543902844e-09, - "loss": 0.8862, - "num_input_tokens_seen": 174123505, - "step": 8172 - }, - { - "epoch": 0.9827451451932905, - "flos": 11368519964400.0, - "grad_norm": 5.6302928183042695, - "learning_rate": 3.1012952461324515e-09, - "loss": 0.8902, - "num_input_tokens_seen": 174142200, - "step": 8173 - }, - { - "epoch": 0.9828653880839295, - "flos": 14380491124320.0, - "grad_norm": 10.207546987544543, - "learning_rate": 3.0580831658204575e-09, - "loss": 0.971, - "num_input_tokens_seen": 174159500, - "step": 8174 - }, - { - "epoch": 0.9829856309745686, - "flos": 15353587702080.0, - "grad_norm": 12.936029765305396, - "learning_rate": 3.015174020009281e-09, - "loss": 1.0123, - "num_input_tokens_seen": 174178545, - "step": 8175 - }, - { - "epoch": 0.9831058738652078, - "flos": 16896743414880.0, - "grad_norm": 3.559224948952181, - "learning_rate": 2.9725678152086043e-09, - "loss": 0.9772, - "num_input_tokens_seen": 174196835, - "step": 8176 - }, - { - "epoch": 0.9832261167558468, - "flos": 7953756654240.0, - "grad_norm": 5.728935468228028, - "learning_rate": 2.930264557881257e-09, - "loss": 1.045, - "num_input_tokens_seen": 174211740, - "step": 8177 - }, - { - "epoch": 0.9833463596464859, - "flos": 42944643096240.0, - "grad_norm": 0.8068151400676054, - "learning_rate": 2.8882642544452163e-09, - "loss": 0.8574, - "num_input_tokens_seen": 174276185, - "step": 8178 - }, - { - "epoch": 0.983466602537125, - "flos": 9611854086240.0, - "grad_norm": 4.490770199312388, - "learning_rate": 2.8465669112716083e-09, - "loss": 0.9717, - "num_input_tokens_seen": 174293430, - "step": 8179 - }, - { - "epoch": 0.9835868454277641, - "flos": 16295189309640.0, - "grad_norm": 4.972704771034388, - "learning_rate": 2.8051725346858177e-09, - "loss": 0.9722, - "num_input_tokens_seen": 174313410, - "step": 8180 - }, - { - "epoch": 0.9837070883184031, - "flos": 19706917125360.0, - "grad_norm": 6.3618974861297035, - "learning_rate": 2.7640811309674883e-09, - "loss": 0.9271, - "num_input_tokens_seen": 174332630, - "step": 8181 - }, - { - "epoch": 0.9838273312090423, - "flos": 20834151992160.0, - "grad_norm": 3.230404983500037, - "learning_rate": 2.7232927063498557e-09, - "loss": 1.0353, - "num_input_tokens_seen": 174352725, - "step": 8182 - }, - { - "epoch": 0.9839475740996814, - "flos": 28644354143640.0, - "grad_norm": 4.910113131896003, - "learning_rate": 2.682807267020859e-09, - "loss": 0.9089, - "num_input_tokens_seen": 174375205, - "step": 8183 - }, - { - "epoch": 0.9840678169903204, - "flos": 17188457202480.0, - "grad_norm": 5.51090840655641, - "learning_rate": 2.642624819121808e-09, - "loss": 0.8517, - "num_input_tokens_seen": 174395075, - "step": 8184 - }, - { - "epoch": 0.9841880598809596, - "flos": 10372023068280.0, - "grad_norm": 5.456621071344001, - "learning_rate": 2.6027453687487154e-09, - "loss": 0.8456, - "num_input_tokens_seen": 174411885, - "step": 8185 - }, - { - "epoch": 0.9843083027715986, - "flos": 15877091492880.0, - "grad_norm": 27.85404748731827, - "learning_rate": 2.5631689219509643e-09, - "loss": 0.7335, - "num_input_tokens_seen": 174430285, - "step": 8186 - }, - { - "epoch": 0.9844285456622377, - "flos": 15486012058560.0, - "grad_norm": 5.247903314351108, - "learning_rate": 2.523895484732197e-09, - "loss": 1.0677, - "num_input_tokens_seen": 174449460, - "step": 8187 - }, - { - "epoch": 0.9845487885528769, - "flos": 12779465951640.0, - "grad_norm": 3.861101019378818, - "learning_rate": 2.4849250630505357e-09, - "loss": 0.9805, - "num_input_tokens_seen": 174467425, - "step": 8188 - }, - { - "epoch": 0.9846690314435159, - "flos": 17949852646920.0, - "grad_norm": 3.59517051154463, - "learning_rate": 2.4462576628172528e-09, - "loss": 0.9596, - "num_input_tokens_seen": 174485775, - "step": 8189 - }, - { - "epoch": 0.984789274334155, - "flos": 13151682978480.0, - "grad_norm": 10.338992220319925, - "learning_rate": 2.407893289898766e-09, - "loss": 0.9688, - "num_input_tokens_seen": 174504525, - "step": 8190 - }, - { - "epoch": 0.984909517224794, - "flos": 19471263765600.0, - "grad_norm": 7.283121416979323, - "learning_rate": 2.3698319501144202e-09, - "loss": 1.0667, - "num_input_tokens_seen": 174525230, - "step": 8191 - }, - { - "epoch": 0.9850297601154332, - "flos": 13282328964480.0, - "grad_norm": 5.323404049691281, - "learning_rate": 2.3320736492382644e-09, - "loss": 0.9556, - "num_input_tokens_seen": 174543785, - "step": 8192 - }, - { - "epoch": 0.9851500030060723, - "flos": 15850839649440.0, - "grad_norm": 4.14448016653839, - "learning_rate": 2.29461839299816e-09, - "loss": 0.9114, - "num_input_tokens_seen": 174563220, - "step": 8193 - }, - { - "epoch": 0.9852702458967113, - "flos": 18757619466240.0, - "grad_norm": 4.637487513276021, - "learning_rate": 2.257466187076229e-09, - "loss": 1.0296, - "num_input_tokens_seen": 174582145, - "step": 8194 - }, - { - "epoch": 0.9853904887873505, - "flos": 14826741801240.0, - "grad_norm": 3.7475193719954865, - "learning_rate": 2.2206170371081854e-09, - "loss": 0.9247, - "num_input_tokens_seen": 174600450, - "step": 8195 - }, - { - "epoch": 0.9855107316779895, - "flos": 17974663397040.0, - "grad_norm": 3.66195260081093, - "learning_rate": 2.1840709486842247e-09, - "loss": 1.0776, - "num_input_tokens_seen": 174619790, - "step": 8196 - }, - { - "epoch": 0.9856309745686286, - "flos": 13518043647360.0, - "grad_norm": 21.849742673148636, - "learning_rate": 2.1478279273481335e-09, - "loss": 1.0093, - "num_input_tokens_seen": 174637995, - "step": 8197 - }, - { - "epoch": 0.9857512174592677, - "flos": 24347606394720.0, - "grad_norm": 4.232767285527592, - "learning_rate": 2.1118879785981815e-09, - "loss": 1.0142, - "num_input_tokens_seen": 174657855, - "step": 8198 - }, - { - "epoch": 0.9858714603499068, - "flos": 18500405481720.0, - "grad_norm": 3.866113502024309, - "learning_rate": 2.0762511078862288e-09, - "loss": 1.0227, - "num_input_tokens_seen": 174677920, - "step": 8199 - }, - { - "epoch": 0.9859917032405459, - "flos": 16848563007960.0, - "grad_norm": 7.96029585628756, - "learning_rate": 2.0409173206186183e-09, - "loss": 0.8803, - "num_input_tokens_seen": 174696880, - "step": 8200 - }, - { - "epoch": 0.986111946131185, - "flos": 14147811935880.0, - "grad_norm": 4.908016591411674, - "learning_rate": 2.0058866221550617e-09, - "loss": 1.0944, - "num_input_tokens_seen": 174714840, - "step": 8201 - }, - { - "epoch": 0.9862321890218241, - "flos": 14069148390240.0, - "grad_norm": 5.698008460421857, - "learning_rate": 1.971159017809976e-09, - "loss": 0.9825, - "num_input_tokens_seen": 174732850, - "step": 8202 - }, - { - "epoch": 0.9863524319124631, - "flos": 15379318299000.0, - "grad_norm": 4.709087457719196, - "learning_rate": 1.93673451285159e-09, - "loss": 0.9981, - "num_input_tokens_seen": 174751620, - "step": 8203 - }, - { - "epoch": 0.9864726748031023, - "flos": 37747513972560.0, - "grad_norm": 0.7433326319475956, - "learning_rate": 1.9026131125019495e-09, - "loss": 0.8272, - "num_input_tokens_seen": 174808710, - "step": 8204 - }, - { - "epoch": 0.9865929176937414, - "flos": 16609812830640.0, - "grad_norm": 2.3369118179967283, - "learning_rate": 1.8687948219371363e-09, - "loss": 1.09, - "num_input_tokens_seen": 174827655, - "step": 8205 - }, - { - "epoch": 0.9867131605843804, - "flos": 15354047625480.0, - "grad_norm": 7.654646226717547, - "learning_rate": 1.835279646287491e-09, - "loss": 1.1068, - "num_input_tokens_seen": 174845385, - "step": 8206 - }, - { - "epoch": 0.9868334034750196, - "flos": 15824311851960.0, - "grad_norm": 3.9765385175480157, - "learning_rate": 1.8020675906371685e-09, - "loss": 1.0015, - "num_input_tokens_seen": 174864500, - "step": 8207 - }, - { - "epoch": 0.9869536463656586, - "flos": 18366999955320.0, - "grad_norm": 5.049845007604697, - "learning_rate": 1.7691586600243612e-09, - "loss": 0.9727, - "num_input_tokens_seen": 174883120, - "step": 8208 - }, - { - "epoch": 0.9870738892562977, - "flos": 11603744062320.0, - "grad_norm": 6.4930691080297605, - "learning_rate": 1.7365528594415202e-09, - "loss": 1.0934, - "num_input_tokens_seen": 174896910, - "step": 8209 - }, - { - "epoch": 0.9871941321469369, - "flos": 25318342032360.0, - "grad_norm": 4.259393968770812, - "learning_rate": 1.7042501938346888e-09, - "loss": 0.9059, - "num_input_tokens_seen": 174919360, - "step": 8210 - }, - { - "epoch": 0.9873143750375759, - "flos": 15222911054520.0, - "grad_norm": 5.703831960677359, - "learning_rate": 1.6722506681043913e-09, - "loss": 1.0003, - "num_input_tokens_seen": 174938040, - "step": 8211 - }, - { - "epoch": 0.987434617928215, - "flos": 11552129560680.0, - "grad_norm": 10.251062849811294, - "learning_rate": 1.640554287104745e-09, - "loss": 0.9141, - "num_input_tokens_seen": 174956035, - "step": 8212 - }, - { - "epoch": 0.9875548608188541, - "flos": 12647532180120.0, - "grad_norm": 4.958822060655942, - "learning_rate": 1.609161055644348e-09, - "loss": 1.0031, - "num_input_tokens_seen": 174971680, - "step": 8213 - }, - { - "epoch": 0.9876751037094932, - "flos": 18598667312280.0, - "grad_norm": 6.020690212286646, - "learning_rate": 1.5780709784849467e-09, - "loss": 0.9009, - "num_input_tokens_seen": 174988420, - "step": 8214 - }, - { - "epoch": 0.9877953466001322, - "flos": 11310343888920.0, - "grad_norm": 4.394808150998406, - "learning_rate": 1.5472840603436565e-09, - "loss": 1.0438, - "num_input_tokens_seen": 175005370, - "step": 8215 - }, - { - "epoch": 0.9879155894907714, - "flos": 13334556697320.0, - "grad_norm": 4.396919273039696, - "learning_rate": 1.5168003058900757e-09, - "loss": 1.0221, - "num_input_tokens_seen": 175023090, - "step": 8216 - }, - { - "epoch": 0.9880358323814105, - "flos": 15903987229080.0, - "grad_norm": 5.781847453820279, - "learning_rate": 1.4866197197491715e-09, - "loss": 1.1446, - "num_input_tokens_seen": 175042170, - "step": 8217 - }, - { - "epoch": 0.9881560752720495, - "flos": 11079351086280.0, - "grad_norm": 5.149236729919963, - "learning_rate": 1.4567423064988371e-09, - "loss": 0.9799, - "num_input_tokens_seen": 175059240, - "step": 8218 - }, - { - "epoch": 0.9882763181626887, - "flos": 15269313090960.0, - "grad_norm": 10.303006327868086, - "learning_rate": 1.4271680706718913e-09, - "loss": 1.0058, - "num_input_tokens_seen": 175076635, - "step": 8219 - }, - { - "epoch": 0.9883965610533277, - "flos": 19966001465040.0, - "grad_norm": 3.679783910958313, - "learning_rate": 1.3978970167543013e-09, - "loss": 1.0492, - "num_input_tokens_seen": 175096535, - "step": 8220 - }, - { - "epoch": 0.9885168039439668, - "flos": 9950675126160.0, - "grad_norm": 16.379300783019556, - "learning_rate": 1.3689291491867372e-09, - "loss": 0.9883, - "num_input_tokens_seen": 175114570, - "step": 8221 - }, - { - "epoch": 0.988637046834606, - "flos": 18814630402440.0, - "grad_norm": 4.53950012513585, - "learning_rate": 1.3402644723636836e-09, - "loss": 0.9646, - "num_input_tokens_seen": 175136320, - "step": 8222 - }, - { - "epoch": 0.988757289725245, - "flos": 17950281908760.0, - "grad_norm": 3.2733431053663193, - "learning_rate": 1.311902990633218e-09, - "loss": 1.0497, - "num_input_tokens_seen": 175155005, - "step": 8223 - }, - { - "epoch": 0.9888775326158841, - "flos": 18762188038680.0, - "grad_norm": 4.164775164741977, - "learning_rate": 1.2838447082978987e-09, - "loss": 0.931, - "num_input_tokens_seen": 175175880, - "step": 8224 - }, - { - "epoch": 0.9889977755065231, - "flos": 17294016484320.0, - "grad_norm": 8.354745085392763, - "learning_rate": 1.2560896296143208e-09, - "loss": 1.0442, - "num_input_tokens_seen": 175194065, - "step": 8225 - }, - { - "epoch": 0.9891180183971623, - "flos": 13438030993080.0, - "grad_norm": 4.990234375, - "learning_rate": 1.2286377587926722e-09, - "loss": 1.0323, - "num_input_tokens_seen": 175210575, - "step": 8226 - }, - { - "epoch": 0.9892382612878013, - "flos": 18631051467720.0, - "grad_norm": 6.308617332579761, - "learning_rate": 1.2014890999973992e-09, - "loss": 0.9814, - "num_input_tokens_seen": 175227215, - "step": 8227 - }, - { - "epoch": 0.9893585041784404, - "flos": 18108958108680.0, - "grad_norm": 5.240677139850849, - "learning_rate": 1.1746436573472073e-09, - "loss": 1.0009, - "num_input_tokens_seen": 175248670, - "step": 8228 - }, - { - "epoch": 0.9894787470690796, - "flos": 14327619498720.0, - "grad_norm": 6.015935716407324, - "learning_rate": 1.1481014349141726e-09, - "loss": 0.9134, - "num_input_tokens_seen": 175265610, - "step": 8229 - }, - { - "epoch": 0.9895989899597186, - "flos": 17529853813440.0, - "grad_norm": 3.9312835546374534, - "learning_rate": 1.121862436724852e-09, - "loss": 1.0611, - "num_input_tokens_seen": 175284170, - "step": 8230 - }, - { - "epoch": 0.9897192328503577, - "flos": 15485092211760.0, - "grad_norm": 6.061215608750585, - "learning_rate": 1.0959266667598388e-09, - "loss": 0.9392, - "num_input_tokens_seen": 175302705, - "step": 8231 - }, - { - "epoch": 0.9898394757409968, - "flos": 15144646109160.0, - "grad_norm": 7.950312568709833, - "learning_rate": 1.0702941289533196e-09, - "loss": 0.9654, - "num_input_tokens_seen": 175321100, - "step": 8232 - }, - { - "epoch": 0.9899597186316359, - "flos": 13145581328040.0, - "grad_norm": 4.955604486171617, - "learning_rate": 1.0449648271939615e-09, - "loss": 1.1116, - "num_input_tokens_seen": 175337165, - "step": 8233 - }, - { - "epoch": 0.990079961522275, - "flos": 16898889724080.0, - "grad_norm": 4.067840821489283, - "learning_rate": 1.0199387653240243e-09, - "loss": 0.9627, - "num_input_tokens_seen": 175356575, - "step": 8234 - }, - { - "epoch": 0.9902002044129141, - "flos": 11604326631960.0, - "grad_norm": 5.773394882602348, - "learning_rate": 9.952159471400267e-10, - "loss": 0.9324, - "num_input_tokens_seen": 175373335, - "step": 8235 - }, - { - "epoch": 0.9903204473035532, - "flos": 16030984489440.0, - "grad_norm": 4.345396758863724, - "learning_rate": 9.707963763923022e-10, - "loss": 1.0643, - "num_input_tokens_seen": 175392105, - "step": 8236 - }, - { - "epoch": 0.9904406901941922, - "flos": 11420962328160.0, - "grad_norm": 6.353010362152819, - "learning_rate": 9.466800567854427e-10, - "loss": 1.0143, - "num_input_tokens_seen": 175410425, - "step": 8237 - }, - { - "epoch": 0.9905609330848314, - "flos": 18972448078680.0, - "grad_norm": 5.1125780617273575, - "learning_rate": 9.228669919778553e-10, - "loss": 0.9107, - "num_input_tokens_seen": 175429070, - "step": 8238 - }, - { - "epoch": 0.9906811759754705, - "flos": 16428472189800.0, - "grad_norm": 5.2482356785330015, - "learning_rate": 8.993571855817617e-10, - "loss": 1.0201, - "num_input_tokens_seen": 175447620, - "step": 8239 - }, - { - "epoch": 0.9908014188661095, - "flos": 15694585712760.0, - "grad_norm": 3.8859628220221283, - "learning_rate": 8.761506411638642e-10, - "loss": 0.9654, - "num_input_tokens_seen": 175466805, - "step": 8240 - }, - { - "epoch": 0.9909216617567487, - "flos": 13647800448120.0, - "grad_norm": 3.66103460791203, - "learning_rate": 8.53247362244236e-10, - "loss": 0.96, - "num_input_tokens_seen": 175485335, - "step": 8241 - }, - { - "epoch": 0.9910419046473877, - "flos": 16794372935280.0, - "grad_norm": 4.020175833960738, - "learning_rate": 8.306473522976532e-10, - "loss": 0.9128, - "num_input_tokens_seen": 175504460, - "step": 8242 - }, - { - "epoch": 0.9911621475380268, - "flos": 16140805728120.0, - "grad_norm": 7.498320073493832, - "learning_rate": 8.083506147522623e-10, - "loss": 0.9491, - "num_input_tokens_seen": 175523575, - "step": 8243 - }, - { - "epoch": 0.991282390428666, - "flos": 9532945248120.0, - "grad_norm": 5.41628696382377, - "learning_rate": 7.863571529906909e-10, - "loss": 1.0762, - "num_input_tokens_seen": 175538880, - "step": 8244 - }, - { - "epoch": 0.991402633319305, - "flos": 44260761347640.0, - "grad_norm": 0.8023366197902545, - "learning_rate": 7.646669703489372e-10, - "loss": 0.8794, - "num_input_tokens_seen": 175602910, - "step": 8245 - }, - { - "epoch": 0.9915228762099441, - "flos": 13308212869200.0, - "grad_norm": 5.432078497834933, - "learning_rate": 7.432800701177023e-10, - "loss": 0.8067, - "num_input_tokens_seen": 175620630, - "step": 8246 - }, - { - "epoch": 0.9916431191005832, - "flos": 47209527936240.0, - "grad_norm": 0.8447494239139088, - "learning_rate": 7.221964555415017e-10, - "loss": 0.8298, - "num_input_tokens_seen": 175680010, - "step": 8247 - }, - { - "epoch": 0.9917633619912223, - "flos": 11735401879800.0, - "grad_norm": 12.376646606637273, - "learning_rate": 7.01416129818222e-10, - "loss": 0.9726, - "num_input_tokens_seen": 175697350, - "step": 8248 - }, - { - "epoch": 0.9918836048818613, - "flos": 18081019879440.0, - "grad_norm": 15.900117482645028, - "learning_rate": 6.809390961006745e-10, - "loss": 0.8128, - "num_input_tokens_seen": 175717200, - "step": 8249 - }, - { - "epoch": 0.9920038477725005, - "flos": 17819329307160.0, - "grad_norm": 4.062012863296584, - "learning_rate": 6.607653574948191e-10, - "loss": 0.9157, - "num_input_tokens_seen": 175737700, - "step": 8250 - }, - { - "epoch": 0.9921240906631396, - "flos": 15506407544040.0, - "grad_norm": 5.729539708794957, - "learning_rate": 6.408949170613187e-10, - "loss": 1.0464, - "num_input_tokens_seen": 175756685, - "step": 8251 - }, - { - "epoch": 0.9922443335537786, - "flos": 17688039428400.0, - "grad_norm": 4.29148098935918, - "learning_rate": 6.213277778144288e-10, - "loss": 1.0457, - "num_input_tokens_seen": 175778050, - "step": 8252 - }, - { - "epoch": 0.9923645764444178, - "flos": 15350981469480.0, - "grad_norm": 4.9605935765996945, - "learning_rate": 6.020639427224416e-10, - "loss": 0.9021, - "num_input_tokens_seen": 175795415, - "step": 8253 - }, - { - "epoch": 0.9924848193350568, - "flos": 17785780012440.0, - "grad_norm": 8.449176310284075, - "learning_rate": 5.831034147076864e-10, - "loss": 0.9464, - "num_input_tokens_seen": 175812385, - "step": 8254 - }, - { - "epoch": 0.9926050622256959, - "flos": 49351012742400.0, - "grad_norm": 0.7220465072409997, - "learning_rate": 5.644461966463065e-10, - "loss": 0.8037, - "num_input_tokens_seen": 175879715, - "step": 8255 - }, - { - "epoch": 0.9927253051163349, - "flos": 14856029139120.0, - "grad_norm": 6.529611190273322, - "learning_rate": 5.460922913687049e-10, - "loss": 0.9786, - "num_input_tokens_seen": 175898525, - "step": 8256 - }, - { - "epoch": 0.9928455480069741, - "flos": 15850808987880.0, - "grad_norm": 3.5206614044165536, - "learning_rate": 5.280417016593208e-10, - "loss": 0.9722, - "num_input_tokens_seen": 175918035, - "step": 8257 - }, - { - "epoch": 0.9929657908976132, - "flos": 12311961265560.0, - "grad_norm": 4.031670836727345, - "learning_rate": 5.102944302559642e-10, - "loss": 0.9776, - "num_input_tokens_seen": 175935250, - "step": 8258 - }, - { - "epoch": 0.9930860337882522, - "flos": 16114400576880.0, - "grad_norm": 5.1848785027125395, - "learning_rate": 4.9285047985137e-10, - "loss": 1.0065, - "num_input_tokens_seen": 175954390, - "step": 8259 - }, - { - "epoch": 0.9932062766788914, - "flos": 20047853812920.0, - "grad_norm": 5.932238044083356, - "learning_rate": 4.757098530916436e-10, - "loss": 0.9765, - "num_input_tokens_seen": 175974555, - "step": 8260 - }, - { - "epoch": 0.9933265195695304, - "flos": 14304679103760.0, - "grad_norm": 8.03743093410499, - "learning_rate": 4.5887255257670563e-10, - "loss": 1.0074, - "num_input_tokens_seen": 175991315, - "step": 8261 - }, - { - "epoch": 0.9934467624601695, - "flos": 15171143245080.0, - "grad_norm": 15.795200459125086, - "learning_rate": 4.4233858086117906e-10, - "loss": 0.9955, - "num_input_tokens_seen": 176009560, - "step": 8262 - }, - { - "epoch": 0.9935670053508087, - "flos": 14016859334280.0, - "grad_norm": 6.200285677327194, - "learning_rate": 4.261079404528356e-10, - "loss": 0.8945, - "num_input_tokens_seen": 176028760, - "step": 8263 - }, - { - "epoch": 0.9936872482414477, - "flos": 15613959827280.0, - "grad_norm": 6.493346403501752, - "learning_rate": 4.1018063381437205e-10, - "loss": 0.9122, - "num_input_tokens_seen": 176048865, - "step": 8264 - }, - { - "epoch": 0.9938074911320868, - "flos": 49994584281000.0, - "grad_norm": 0.9372391019825054, - "learning_rate": 3.9455666336141167e-10, - "loss": 0.8889, - "num_input_tokens_seen": 176112365, - "step": 8265 - }, - { - "epoch": 0.9939277340227259, - "flos": 10659045637200.0, - "grad_norm": 9.363893160282613, - "learning_rate": 3.7923603146450267e-10, - "loss": 1.0421, - "num_input_tokens_seen": 176128145, - "step": 8266 - }, - { - "epoch": 0.994047976913365, - "flos": 12548043887160.0, - "grad_norm": 11.459641334553258, - "learning_rate": 3.642187404473418e-10, - "loss": 1.016, - "num_input_tokens_seen": 176146025, - "step": 8267 - }, - { - "epoch": 0.994168219804004, - "flos": 13596707193000.0, - "grad_norm": 6.5867078156415, - "learning_rate": 3.495047925885508e-10, - "loss": 1.0777, - "num_input_tokens_seen": 176164080, - "step": 8268 - }, - { - "epoch": 0.9942884626946432, - "flos": 12647194902960.0, - "grad_norm": 7.261321600220992, - "learning_rate": 3.350941901199e-10, - "loss": 1.0614, - "num_input_tokens_seen": 176180720, - "step": 8269 - }, - { - "epoch": 0.9944087055852823, - "flos": 13325848814280.0, - "grad_norm": 4.655336840905814, - "learning_rate": 3.2098693522764066e-10, - "loss": 1.0622, - "num_input_tokens_seen": 176193640, - "step": 8270 - }, - { - "epoch": 0.9945289484759213, - "flos": 14908747456920.0, - "grad_norm": 8.616623943918984, - "learning_rate": 3.071830300516165e-10, - "loss": 1.0456, - "num_input_tokens_seen": 176211190, - "step": 8271 - }, - { - "epoch": 0.9946491913665605, - "flos": 10424036170200.0, - "grad_norm": 13.721370229556612, - "learning_rate": 2.9368247668615234e-10, - "loss": 0.9184, - "num_input_tokens_seen": 176229500, - "step": 8272 - }, - { - "epoch": 0.9947694342571995, - "flos": 8923143183240.0, - "grad_norm": 5.897070477454416, - "learning_rate": 2.804852771789434e-10, - "loss": 0.8292, - "num_input_tokens_seen": 176242520, - "step": 8273 - }, - { - "epoch": 0.9948896771478386, - "flos": 13360931187000.0, - "grad_norm": 12.000871626669651, - "learning_rate": 2.675914335321661e-10, - "loss": 0.7853, - "num_input_tokens_seen": 176260995, - "step": 8274 - }, - { - "epoch": 0.9950099200384778, - "flos": 17713831348440.0, - "grad_norm": 7.104528632316316, - "learning_rate": 2.550009477018111e-10, - "loss": 0.9999, - "num_input_tokens_seen": 176279485, - "step": 8275 - }, - { - "epoch": 0.9951301629291168, - "flos": 16875550728840.0, - "grad_norm": 3.2371048026998, - "learning_rate": 2.4271382159790634e-10, - "loss": 0.8575, - "num_input_tokens_seen": 176296635, - "step": 8276 - }, - { - "epoch": 0.9952504058197559, - "flos": 15799777055880.0, - "grad_norm": 9.568852328021803, - "learning_rate": 2.3073005708429406e-10, - "loss": 1.0869, - "num_input_tokens_seen": 176316000, - "step": 8277 - }, - { - "epoch": 0.995370648710395, - "flos": 15061935237600.0, - "grad_norm": 4.399881291955589, - "learning_rate": 2.190496559788535e-10, - "loss": 0.948, - "num_input_tokens_seen": 176334005, - "step": 8278 - }, - { - "epoch": 0.9954908916010341, - "flos": 10502975669880.0, - "grad_norm": 14.937725751759055, - "learning_rate": 2.0767262005372265e-10, - "loss": 0.9865, - "num_input_tokens_seen": 176351240, - "step": 8279 - }, - { - "epoch": 0.9956111344916732, - "flos": 13623112344240.0, - "grad_norm": 4.2260362787598655, - "learning_rate": 1.965989510346322e-10, - "loss": 0.98, - "num_input_tokens_seen": 176370080, - "step": 8280 - }, - { - "epoch": 0.9957313773823123, - "flos": 14225892911880.0, - "grad_norm": 4.96889759540269, - "learning_rate": 1.8582865060134955e-10, - "loss": 0.9342, - "num_input_tokens_seen": 176387990, - "step": 8281 - }, - { - "epoch": 0.9958516202729514, - "flos": 41135749485240.0, - "grad_norm": 0.7996023665113098, - "learning_rate": 1.7536172038790098e-10, - "loss": 0.8177, - "num_input_tokens_seen": 176448020, - "step": 8282 - }, - { - "epoch": 0.9959718631635904, - "flos": 19785611332560.0, - "grad_norm": 7.912601377491604, - "learning_rate": 1.651981619819054e-10, - "loss": 0.9119, - "num_input_tokens_seen": 176464890, - "step": 8283 - }, - { - "epoch": 0.9960921060542296, - "flos": 17084124383040.0, - "grad_norm": 5.630534522258758, - "learning_rate": 1.5533797692546257e-10, - "loss": 0.9166, - "num_input_tokens_seen": 176483345, - "step": 8284 - }, - { - "epoch": 0.9962123489448687, - "flos": 13255954474800.0, - "grad_norm": 6.303929922730989, - "learning_rate": 1.4578116671404296e-10, - "loss": 1.0572, - "num_input_tokens_seen": 176501345, - "step": 8285 - }, - { - "epoch": 0.9963325918355077, - "flos": 14199825037800.0, - "grad_norm": 4.84166825382559, - "learning_rate": 1.3652773279759777e-10, - "loss": 0.9432, - "num_input_tokens_seen": 176517715, - "step": 8286 - }, - { - "epoch": 0.9964528347261468, - "flos": 23612830732440.0, - "grad_norm": 8.839684943889491, - "learning_rate": 1.2757767657989305e-10, - "loss": 0.8529, - "num_input_tokens_seen": 176541225, - "step": 8287 - }, - { - "epoch": 0.9965730776167859, - "flos": 16426356542160.0, - "grad_norm": 15.685761252776404, - "learning_rate": 1.1893099941850948e-10, - "loss": 1.0917, - "num_input_tokens_seen": 176559840, - "step": 8288 - }, - { - "epoch": 0.996693320507425, - "flos": 16323556800720.0, - "grad_norm": 18.660531171003264, - "learning_rate": 1.105877026252866e-10, - "loss": 0.9913, - "num_input_tokens_seen": 176577890, - "step": 8289 - }, - { - "epoch": 0.996813563398064, - "flos": 9321182791680.0, - "grad_norm": 3.6287857544832978, - "learning_rate": 1.0254778746565663e-10, - "loss": 0.9502, - "num_input_tokens_seen": 176592885, - "step": 8290 - }, - { - "epoch": 0.9969338062887032, - "flos": 10345679240160.0, - "grad_norm": 5.761784957246729, - "learning_rate": 9.481125515953259e-11, - "loss": 0.9608, - "num_input_tokens_seen": 176610665, - "step": 8291 - }, - { - "epoch": 0.9970540491793423, - "flos": 18313821714120.0, - "grad_norm": 3.9974546678750853, - "learning_rate": 8.737810688064228e-11, - "loss": 1.028, - "num_input_tokens_seen": 176630220, - "step": 8292 - }, - { - "epoch": 0.9971742920699813, - "flos": 15248733636120.0, - "grad_norm": 3.7986757380264637, - "learning_rate": 8.024834375608414e-11, - "loss": 1.0154, - "num_input_tokens_seen": 176648530, - "step": 8293 - }, - { - "epoch": 0.9972945349606205, - "flos": 51718804680600.0, - "grad_norm": 0.8251555845188953, - "learning_rate": 7.342196686788149e-11, - "loss": 0.8904, - "num_input_tokens_seen": 176701415, - "step": 8294 - }, - { - "epoch": 0.9974147778512595, - "flos": 13960338983040.0, - "grad_norm": 9.309843157994921, - "learning_rate": 6.689897725142834e-11, - "loss": 0.9044, - "num_input_tokens_seen": 176720610, - "step": 8295 - }, - { - "epoch": 0.9975350207418986, - "flos": 11289212526000.0, - "grad_norm": 8.291886318754287, - "learning_rate": 6.067937589615545e-11, - "loss": 1.1003, - "num_input_tokens_seen": 176738405, - "step": 8296 - }, - { - "epoch": 0.9976552636325378, - "flos": 42916766190120.0, - "grad_norm": 0.7568596030196181, - "learning_rate": 5.476316374575241e-11, - "loss": 0.8111, - "num_input_tokens_seen": 176801610, - "step": 8297 - }, - { - "epoch": 0.9977755065231768, - "flos": 15979553957160.0, - "grad_norm": 8.200503208258159, - "learning_rate": 4.9150341697723476e-11, - "loss": 0.957, - "num_input_tokens_seen": 176821220, - "step": 8298 - }, - { - "epoch": 0.9978957494138159, - "flos": 18525860124600.0, - "grad_norm": 3.642721333897319, - "learning_rate": 4.384091060338768e-11, - "loss": 0.8853, - "num_input_tokens_seen": 176841410, - "step": 8299 - }, - { - "epoch": 0.998015992304455, - "flos": 16087872779400.0, - "grad_norm": 7.6437380325086455, - "learning_rate": 3.883487126810081e-11, - "loss": 0.9566, - "num_input_tokens_seen": 176860390, - "step": 8300 - }, - { - "epoch": 0.9981362351950941, - "flos": 12915385725960.0, - "grad_norm": 4.6262244588357655, - "learning_rate": 3.41322244516995e-11, - "loss": 1.0289, - "num_input_tokens_seen": 176878055, - "step": 8301 - }, - { - "epoch": 0.9982564780857331, - "flos": 23875686444000.0, - "grad_norm": 6.019342715351363, - "learning_rate": 2.9732970866946925e-11, - "loss": 0.8535, - "num_input_tokens_seen": 176897655, - "step": 8302 - }, - { - "epoch": 0.9983767209763723, - "flos": 10974956943720.0, - "grad_norm": 8.79448574000506, - "learning_rate": 2.563711118175327e-11, - "loss": 1.0138, - "num_input_tokens_seen": 176914260, - "step": 8303 - }, - { - "epoch": 0.9984969638670114, - "flos": 14173695840600.0, - "grad_norm": 4.067391839538302, - "learning_rate": 2.184464601717728e-11, - "loss": 1.058, - "num_input_tokens_seen": 176932295, - "step": 8304 - }, - { - "epoch": 0.9986172067576504, - "flos": 14462159502840.0, - "grad_norm": 4.317283520509159, - "learning_rate": 1.8355575948758585e-11, - "loss": 0.9978, - "num_input_tokens_seen": 176950000, - "step": 8305 - }, - { - "epoch": 0.9987374496482896, - "flos": 16870675540800.0, - "grad_norm": 5.951056173493754, - "learning_rate": 1.5169901505407424e-11, - "loss": 0.9578, - "num_input_tokens_seen": 176966785, - "step": 8306 - }, - { - "epoch": 0.9988576925389286, - "flos": 17816907043920.0, - "grad_norm": 4.2783106159656725, - "learning_rate": 1.228762317073695e-11, - "loss": 0.968, - "num_input_tokens_seen": 176985335, - "step": 8307 - }, - { - "epoch": 0.9989779354295677, - "flos": 22302967439280.0, - "grad_norm": 5.836966917868644, - "learning_rate": 9.70874138195299e-12, - "loss": 1.0119, - "num_input_tokens_seen": 177006965, - "step": 8308 - }, - { - "epoch": 0.9990981783202069, - "flos": 13885569455520.0, - "grad_norm": 7.490361186996921, - "learning_rate": 7.433256530076093e-12, - "loss": 0.9707, - "num_input_tokens_seen": 177026640, - "step": 8309 - }, - { - "epoch": 0.9992184212108459, - "flos": 12179322278160.0, - "grad_norm": 8.624396261563174, - "learning_rate": 5.46116896038562e-12, - "loss": 0.9808, - "num_input_tokens_seen": 177040770, - "step": 8310 - }, - { - "epoch": 0.999338664101485, - "flos": 33235391251200.0, - "grad_norm": 3.853155522407368, - "learning_rate": 3.792478972197699e-12, - "loss": 0.8562, - "num_input_tokens_seen": 177061075, - "step": 8311 - }, - { - "epoch": 0.9994589069921241, - "flos": 10712438509320.0, - "grad_norm": 4.41433990417036, - "learning_rate": 2.4271868181990895e-12, - "loss": 0.9098, - "num_input_tokens_seen": 177077960, - "step": 8312 - }, - { - "epoch": 0.9995791498827632, - "flos": 8824636060200.0, - "grad_norm": 4.410106080870532, - "learning_rate": 1.3652927060014973e-12, - "loss": 1.032, - "num_input_tokens_seen": 177093275, - "step": 8313 - }, - { - "epoch": 0.9996993927734023, - "flos": 13649701464840.0, - "grad_norm": 4.071298783607068, - "learning_rate": 6.067967965872612e-13, - "loss": 0.8687, - "num_input_tokens_seen": 177112605, - "step": 8314 - }, - { - "epoch": 0.9998196356640414, - "flos": 45061236263760.0, - "grad_norm": 19.08065861554902, - "learning_rate": 1.5169920497548615e-13, - "loss": 0.9998, - "num_input_tokens_seen": 177136945, - "step": 8315 - }, - { - "epoch": 0.9999398785546805, - "flos": 36421455827640.0, - "grad_norm": 2.9171146230477514, - "learning_rate": 0.0, - "loss": 0.7958, - "num_input_tokens_seen": 177185545, - "step": 8316 - } - ], - "logging_steps": 1.0, - "max_steps": 8316, - "num_input_tokens_seen": 177185545, - "num_train_epochs": 1, - "save_steps": 832, - "stateful_callbacks": { - "TrainerControl": { - "args": { - "should_epoch_stop": false, - "should_evaluate": false, - "should_log": false, - "should_save": true, - "should_training_stop": true - }, - "attributes": {} - } - }, - "total_flos": 4.968106781143204e+17, - "train_batch_size": 5, - "trial_name": null, - "trial_params": null -} diff --git a/sft/smoe_cosinegating/training_args.bin b/sft/smoe_cosinegating/training_args.bin deleted file mode 100644 index 234a354574b3a19ef3c708d8d0b62d1018089038..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/training_args.bin +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:85c0e039896aa5a4e53d1871e1fc5ef84322f540588eb5c09d7fac45151b2ed8 -size 8120 diff --git a/sft/smoe_cosinegating/zero_to_fp32.py b/sft/smoe_cosinegating/zero_to_fp32.py deleted file mode 100644 index 24cc342e78d1a006c782b3a4cd68d9ce786d8fd8..0000000000000000000000000000000000000000 --- a/sft/smoe_cosinegating/zero_to_fp32.py +++ /dev/null @@ -1,604 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: Apache-2.0 - -# DeepSpeed Team - -# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets -# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in -# the future. Once extracted, the weights don't require DeepSpeed and can be used in any -# application. -# -# example: python zero_to_fp32.py . pytorch_model.bin - -import argparse -import torch -import glob -import math -import os -import re -from collections import OrderedDict -from dataclasses import dataclass - -# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with -# DeepSpeed data structures it has to be available in the current python environment. -from deepspeed.utils import logger -from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS, - FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES, - FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS) - - -@dataclass -class zero_model_state: - buffers: dict() - param_shapes: dict() - shared_params: list - ds_version: int - frozen_param_shapes: dict() - frozen_param_fragments: dict() - - -debug = 0 - -# load to cpu -device = torch.device('cpu') - - -def atoi(text): - return int(text) if text.isdigit() else text - - -def natural_keys(text): - ''' - alist.sort(key=natural_keys) sorts in human order - http://nedbatchelder.com/blog/200712/human_sorting.html - (See Toothy's implementation in the comments) - ''' - return [atoi(c) for c in re.split(r'(\d+)', text)] - - -def get_model_state_file(checkpoint_dir, zero_stage): - if not os.path.isdir(checkpoint_dir): - raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist") - - # there should be only one file - if zero_stage <= 2: - file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt") - elif zero_stage == 3: - file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt") - - if not os.path.exists(file): - raise FileNotFoundError(f"can't find model states file at '{file}'") - - return file - - -def get_checkpoint_files(checkpoint_dir, glob_pattern): - # XXX: need to test that this simple glob rule works for multi-node setup too - ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys) - - if len(ckpt_files) == 0: - raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'") - - return ckpt_files - - -def get_optim_files(checkpoint_dir): - return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt") - - -def get_model_state_files(checkpoint_dir): - return get_checkpoint_files(checkpoint_dir, "*_model_states.pt") - - -def parse_model_states(files): - zero_model_states = [] - for file in files: - state_dict = torch.load(file, map_location=device) - - if BUFFER_NAMES not in state_dict: - raise ValueError(f"{file} is not a model state checkpoint") - buffer_names = state_dict[BUFFER_NAMES] - if debug: - print("Found buffers:", buffer_names) - - # recover just the buffers while restoring them to fp32 if they were saved in fp16 - buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names} - param_shapes = state_dict[PARAM_SHAPES] - - # collect parameters that are included in param_shapes - param_names = [] - for s in param_shapes: - for name in s.keys(): - param_names.append(name) - - # update with frozen parameters - frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None) - if frozen_param_shapes is not None: - if debug: - print(f"Found frozen_param_shapes: {frozen_param_shapes}") - param_names += list(frozen_param_shapes.keys()) - - # handle shared params - shared_params = [[k, v] for k, v in state_dict["shared_params"].items()] - - ds_version = state_dict.get(DS_VERSION, None) - - frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None) - - z_model_state = zero_model_state(buffers=buffers, - param_shapes=param_shapes, - shared_params=shared_params, - ds_version=ds_version, - frozen_param_shapes=frozen_param_shapes, - frozen_param_fragments=frozen_param_fragments) - zero_model_states.append(z_model_state) - - return zero_model_states - - -def parse_optim_states(files, ds_checkpoint_dir): - - total_files = len(files) - state_dicts = [] - for f in files: - state_dict = torch.load(f, map_location=device) - # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights - # and also handle the case where it was already removed by another helper script - state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None) - state_dicts.append(state_dict) - - if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]: - raise ValueError(f"{files[0]} is not a zero checkpoint") - zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE] - world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT] - - # For ZeRO-2 each param group can have different partition_count as data parallelism for expert - # parameters can be different from data parallelism for non-expert parameters. So we can just - # use the max of the partition_count to get the dp world_size. - - if type(world_size) is list: - world_size = max(world_size) - - if world_size != total_files: - raise ValueError( - f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. " - "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes." - ) - - # the groups are named differently in each stage - if zero_stage <= 2: - fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS - elif zero_stage == 3: - fp32_groups_key = FP32_FLAT_GROUPS - else: - raise ValueError(f"unknown zero stage {zero_stage}") - - if zero_stage <= 2: - fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))] - elif zero_stage == 3: - # if there is more than one param group, there will be multiple flattened tensors - one - # flattened tensor per group - for simplicity merge them into a single tensor - # - # XXX: could make the script more memory efficient for when there are multiple groups - it - # will require matching the sub-lists of param_shapes for each param group flattened tensor - - fp32_flat_groups = [ - torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts)) - ] - - return zero_stage, world_size, fp32_flat_groups - - -def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters): - """ - Returns fp32 state_dict reconstructed from ds checkpoint - - Args: - - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are) - - """ - print(f"Processing zero checkpoint '{ds_checkpoint_dir}'") - - optim_files = get_optim_files(ds_checkpoint_dir) - zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir) - print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}") - - model_files = get_model_state_files(ds_checkpoint_dir) - - zero_model_states = parse_model_states(model_files) - print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}') - - if zero_stage <= 2: - return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states, - exclude_frozen_parameters) - elif zero_stage == 3: - return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states, - exclude_frozen_parameters) - - -def _zero2_merge_frozen_params(state_dict, zero_model_states): - if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0: - return - - frozen_param_shapes = zero_model_states[0].frozen_param_shapes - frozen_param_fragments = zero_model_states[0].frozen_param_fragments - - if debug: - num_elem = sum(s.numel() for s in frozen_param_shapes.values()) - print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}') - - wanted_params = len(frozen_param_shapes) - wanted_numel = sum(s.numel() for s in frozen_param_shapes.values()) - avail_numel = sum([p.numel() for p in frozen_param_fragments.values()]) - print(f'Frozen params: Have {avail_numel} numels to process.') - print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params') - - total_params = 0 - total_numel = 0 - for name, shape in frozen_param_shapes.items(): - total_params += 1 - unpartitioned_numel = shape.numel() - total_numel += unpartitioned_numel - - state_dict[name] = frozen_param_fragments[name] - - if debug: - print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ") - - print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements") - - -def _has_callable(obj, fn): - attr = getattr(obj, fn, None) - return callable(attr) - - -def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states): - param_shapes = zero_model_states[0].param_shapes - - # Reconstruction protocol: - # - # XXX: document this - - if debug: - for i in range(world_size): - for j in range(len(fp32_flat_groups[0])): - print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}") - - # XXX: memory usage doubles here (zero2) - num_param_groups = len(fp32_flat_groups[0]) - merged_single_partition_of_fp32_groups = [] - for i in range(num_param_groups): - merged_partitions = [sd[i] for sd in fp32_flat_groups] - full_single_fp32_vector = torch.cat(merged_partitions, 0) - merged_single_partition_of_fp32_groups.append(full_single_fp32_vector) - avail_numel = sum( - [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups]) - - if debug: - wanted_params = sum([len(shapes) for shapes in param_shapes]) - wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes]) - # not asserting if there is a mismatch due to possible padding - print(f"Have {avail_numel} numels to process.") - print(f"Need {wanted_numel} numels in {wanted_params} params.") - - # params - # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support - # out-of-core computing solution - total_numel = 0 - total_params = 0 - for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups): - offset = 0 - avail_numel = full_single_fp32_vector.numel() - for name, shape in shapes.items(): - - unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape) - total_numel += unpartitioned_numel - total_params += 1 - - if debug: - print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ") - state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape) - offset += unpartitioned_numel - - # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and - # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex - # paddings performed in the code it's almost impossible to predict the exact numbers w/o the - # live optimizer object, so we are checking that the numbers are within the right range - align_to = 2 * world_size - - def zero2_align(x): - return align_to * math.ceil(x / align_to) - - if debug: - print(f"original offset={offset}, avail_numel={avail_numel}") - - offset = zero2_align(offset) - avail_numel = zero2_align(avail_numel) - - if debug: - print(f"aligned offset={offset}, avail_numel={avail_numel}") - - # Sanity check - if offset != avail_numel: - raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong") - - print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements") - - -def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states, - exclude_frozen_parameters): - state_dict = OrderedDict() - - # buffers - buffers = zero_model_states[0].buffers - state_dict.update(buffers) - if debug: - print(f"added {len(buffers)} buffers") - - if not exclude_frozen_parameters: - _zero2_merge_frozen_params(state_dict, zero_model_states) - - _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states) - - # recover shared parameters - for pair in zero_model_states[0].shared_params: - if pair[1] in state_dict: - state_dict[pair[0]] = state_dict[pair[1]] - - return state_dict - - -def zero3_partitioned_param_info(unpartitioned_numel, world_size): - remainder = unpartitioned_numel % world_size - padding_numel = (world_size - remainder) if remainder else 0 - partitioned_numel = math.ceil(unpartitioned_numel / world_size) - return partitioned_numel, padding_numel - - -def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states): - if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0: - return - - if debug: - for i in range(world_size): - num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values()) - print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}') - - frozen_param_shapes = zero_model_states[0].frozen_param_shapes - wanted_params = len(frozen_param_shapes) - wanted_numel = sum(s.numel() for s in frozen_param_shapes.values()) - avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size - print(f'Frozen params: Have {avail_numel} numels to process.') - print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params') - - total_params = 0 - total_numel = 0 - for name, shape in zero_model_states[0].frozen_param_shapes.items(): - total_params += 1 - unpartitioned_numel = shape.numel() - total_numel += unpartitioned_numel - - param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states) - state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape) - - partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size) - - if debug: - print( - f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}" - ) - - print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements") - - -def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states): - param_shapes = zero_model_states[0].param_shapes - avail_numel = fp32_flat_groups[0].numel() * world_size - # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each - # param, re-consolidating each param, while dealing with padding if any - - # merge list of dicts, preserving order - param_shapes = {k: v for d in param_shapes for k, v in d.items()} - - if debug: - for i in range(world_size): - print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}") - - wanted_params = len(param_shapes) - wanted_numel = sum(shape.numel() for shape in param_shapes.values()) - # not asserting if there is a mismatch due to possible padding - avail_numel = fp32_flat_groups[0].numel() * world_size - print(f"Trainable params: Have {avail_numel} numels to process.") - print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.") - - # params - # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support - # out-of-core computing solution - offset = 0 - total_numel = 0 - total_params = 0 - for name, shape in param_shapes.items(): - - unpartitioned_numel = shape.numel() - total_numel += unpartitioned_numel - total_params += 1 - - partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size) - - if debug: - print( - f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}" - ) - - # XXX: memory usage doubles here - state_dict[name] = torch.cat( - tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)), - 0).narrow(0, 0, unpartitioned_numel).view(shape) - offset += partitioned_numel - - offset *= world_size - - # Sanity check - if offset != avail_numel: - raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong") - - print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements") - - -def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states, - exclude_frozen_parameters): - state_dict = OrderedDict() - - # buffers - buffers = zero_model_states[0].buffers - state_dict.update(buffers) - if debug: - print(f"added {len(buffers)} buffers") - - if not exclude_frozen_parameters: - _zero3_merge_frozen_params(state_dict, world_size, zero_model_states) - - _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states) - - # recover shared parameters - for pair in zero_model_states[0].shared_params: - if pair[1] in state_dict: - state_dict[pair[0]] = state_dict[pair[1]] - - return state_dict - - -def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False): - """ - Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with - ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example - via a model hub. - - Args: - - ``checkpoint_dir``: path to the desired checkpoint folder - - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14`` - - ``exclude_frozen_parameters``: exclude frozen parameters - - Returns: - - pytorch ``state_dict`` - - Note: this approach may not work if your application doesn't have sufficient free CPU memory and - you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with - the checkpoint. - - A typical usage might be :: - - from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint - # do the training and checkpoint saving - state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu - model = model.cpu() # move to cpu - model.load_state_dict(state_dict) - # submit to model hub or save the model to share with others - - In this example the ``model`` will no longer be usable in the deepspeed context of the same - application. i.e. you will need to re-initialize the deepspeed engine, since - ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it. - - If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead. - - """ - if tag is None: - latest_path = os.path.join(checkpoint_dir, 'latest') - if os.path.isfile(latest_path): - with open(latest_path, 'r') as fd: - tag = fd.read().strip() - else: - raise ValueError(f"Unable to find 'latest' file at {latest_path}") - - ds_checkpoint_dir = os.path.join(checkpoint_dir, tag) - - if not os.path.isdir(ds_checkpoint_dir): - raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist") - - return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters) - - -def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False): - """ - Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be - loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed. - - Args: - - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``) - - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin) - - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14`` - - ``exclude_frozen_parameters``: exclude frozen parameters - """ - - state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters) - print(f"Saving fp32 state dict to {output_file}") - torch.save(state_dict, output_file) - - -def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None): - """ - 1. Put the provided model to cpu - 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` - 3. Load it into the provided model - - Args: - - ``model``: the model object to update - - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``) - - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14`` - - Returns: - - ``model`: modified model - - Make sure you have plenty of CPU memory available before you call this function. If you don't - have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it - conveniently placed for you in the checkpoint folder. - - A typical usage might be :: - - from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint - model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir) - # submit to model hub or save the model to share with others - - Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context - of the same application. i.e. you will need to re-initialize the deepspeed engine, since - ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it. - - """ - logger.info(f"Extracting fp32 weights") - state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag) - - logger.info(f"Overwriting model with fp32 weights") - model = model.cpu() - model.load_state_dict(state_dict, strict=False) - - return model - - -if __name__ == "__main__": - - parser = argparse.ArgumentParser() - parser.add_argument("checkpoint_dir", - type=str, - help="path to the desired checkpoint folder, e.g., path/checkpoint-12") - parser.add_argument( - "output_file", - type=str, - help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)") - parser.add_argument("-t", - "--tag", - type=str, - default=None, - help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1") - parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters") - parser.add_argument("-d", "--debug", action='store_true', help="enable debug") - args = parser.parse_args() - - debug = args.debug - - convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir, - args.output_file, - tag=args.tag, - exclude_frozen_parameters=args.exclude_frozen_parameters) diff --git a/sft/smoe_perturbed/added_tokens.json b/sft/smoe_perturbed/added_tokens.json deleted file mode 100644 index c9d3d3a1b74d87e381e471f7b33784015d2dc0ea..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/added_tokens.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "<|assistant|>": 32001, - "<|endoftext|>": 32000, - "<|end|>": 32007, - "<|placeholder1|>": 32002, - "<|placeholder2|>": 32003, - "<|placeholder3|>": 32004, - "<|placeholder4|>": 32005, - "<|placeholder5|>": 32008, - "<|placeholder6|>": 32009, - "<|system|>": 32006, - "<|user|>": 32010 -} diff --git a/sft/smoe_perturbed/config.json b/sft/smoe_perturbed/config.json deleted file mode 100644 index 614560797a82b0ee874563694021c969d38937a5..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/config.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "_name_or_path": "/cm/archive/namnv78/checkpoints/phi3mini-clip/pft", - "architectures": [ - "LlavaPhiForCausalLM" - ], - "attention_bias": false, - "attention_dropout": 0.0, - "auto_map": { - "AutoConfig": "microsoft/Phi-3-mini-4k-instruct--configuration_phi3.Phi3Config", - "AutoModelForCausalLM": "microsoft/Phi-3-mini-4k-instruct--modeling_phi3.Phi3ForCausalLM" - }, - "balance_loss_coef": 0.1, - "bos_token_id": 1, - "clip_smoe": true, - "dropout": false, - "embd_pdrop": 0.0, - "eos_token_id": 32000, - "freeze_mm_mlp_adapter": false, - "hidden_act": "silu", - "hidden_size": 3072, - "image_aspect_ratio": "pad", - "initializer_range": 0.02, - "intermediate_size": 8192, - "local_rank": 0, - "max_position_embeddings": 4096, - "mlp_smoe": true, - "mm_hidden_size": 1024, - "mm_patch_merge_type": "flat", - "mm_projector_lr": null, - "mm_projector_type": "moe", - "mm_use_im_patch_token": false, - "mm_use_im_start_end": false, - "mm_vision_select_feature": "patch", - "mm_vision_select_layer": -2, - "mm_vision_tower": "openai/clip-vit-large-patch14-336", - "model_type": "llava_phi", - "moe_name": "smoe_perturbed", - "num_attention_heads": 32, - "num_experts": 4, - "num_hidden_layers": 32, - "num_key_value_heads": 32, - "num_layers": 3, - "num_selected": 2, - "original_max_position_embeddings": 4096, - "pad_token_id": 32000, - "resid_pdrop": 0.0, - "rms_norm_eps": 1e-05, - "rope_scaling": null, - "rope_theta": 10000.0, - "router_z_loss_coef": 0.01, - "scales": [ - 1, - 3 - ], - "sliding_window": 2047, - "tie_word_embeddings": false, - "tokenizer_model_max_length": 2048, - "tokenizer_padding_side": "right", - "torch_dtype": "bfloat16", - "training": true, - "transformers_version": "4.43.0", - "tune_mm_mlp_adapter": false, - "use_cache": false, - "use_mm_proj": true, - "vocab_size": 32064 -} diff --git a/sft/smoe_perturbed/generation_config.json b/sft/smoe_perturbed/generation_config.json deleted file mode 100644 index f79d092444f37c54d37a669a57923ca3276d762c..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/generation_config.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "_from_model_config": true, - "bos_token_id": 1, - "do_sample": true, - "eos_token_id": [ - 32000, - 32001, - 32007 - ], - "pad_token_id": 32000, - "transformers_version": "4.43.0" -} diff --git a/sft/smoe_perturbed/latest b/sft/smoe_perturbed/latest deleted file mode 100644 index 15b842fabe685a86c9c52effdd8958a64045bed5..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/latest +++ /dev/null @@ -1 +0,0 @@ -global_step8316 \ No newline at end of file diff --git a/sft/smoe_perturbed/model-00001-of-00002.safetensors b/sft/smoe_perturbed/model-00001-of-00002.safetensors deleted file mode 100644 index f3400c4b50ce93886b6926114b83703c6a671f2d..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/model-00001-of-00002.safetensors +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:35235c9cd0dc125c4bd69c45172d0926b849e4a0681db424679dbc664b28ba47 -size 4972489328 diff --git a/sft/smoe_perturbed/model-00002-of-00002.safetensors b/sft/smoe_perturbed/model-00002-of-00002.safetensors deleted file mode 100644 index 957d3c2b6018947bc2f16a71e754f7e50bad0f08..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/model-00002-of-00002.safetensors +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6b6f5c330830811ff52a1aae60b195425f54bd2439eb2e451cd1c97fcea1d941 -size 4611602520 diff --git a/sft/smoe_perturbed/model.safetensors.index.json b/sft/smoe_perturbed/model.safetensors.index.json deleted file mode 100644 index 6620b4c4a8f524334bc853c81e8131e24c2a88e2..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/model.safetensors.index.json +++ /dev/null @@ -1,945 +0,0 @@ -{ - "metadata": { - "total_size": 9583958216 - }, - "weight_map": { - "lm_head.weight": "model-00002-of-00002.safetensors", - "model.embed_tokens.weight": "model-00001-of-00002.safetensors", - "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.0.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.0.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.1.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.1.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.10.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.10.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.11.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.11.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.12.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.12.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.13.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.13.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.14.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.14.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.15.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.15.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.16.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.16.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.17.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.17.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.18.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.18.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.19.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.19.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.2.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.2.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.20.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.20.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.21.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.21.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.22.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.22.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.23.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.23.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.24.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.24.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.25.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.25.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.26.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.26.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.27.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.27.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.28.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.28.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.29.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.29.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.3.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.3.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.30.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.30.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.31.mlp.gate_up_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", - "model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.31.self_attn.qkv_proj.weight": "model-00002-of-00002.safetensors", - "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.4.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.4.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.5.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.5.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.6.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.6.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.7.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.7.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.8.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.8.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.9.mlp.gate_up_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", - "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", - "model.layers.9.self_attn.qkv_proj.weight": "model-00001-of-00002.safetensors", - "model.mm_projector.moelayer.experts.0.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.0.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.0.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.0.2.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.1.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.1.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.1.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.1.2.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.2.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.2.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.2.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.2.2.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.3.0.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.3.0.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.3.2.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.experts.3.2.weight": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.mm_projector.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.norm.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.embeddings.class_embedding": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.embeddings.position_embedding.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.0.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.0.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.0.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.0.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.1.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.1.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.1.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.1.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.2.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.2.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.2.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.2.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.3.fc1.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.3.fc1.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.3.fc2.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.experts.3.fc2.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.gate.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.moelayer.gate.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.pre_layrnorm.bias": "model-00002-of-00002.safetensors", - "model.vision_tower.vision_model.pre_layrnorm.weight": "model-00002-of-00002.safetensors" - } -} diff --git a/sft/smoe_perturbed/rng_state_0.pth b/sft/smoe_perturbed/rng_state_0.pth deleted file mode 100644 index 71ea030e2b6ccf2942e534710e59240994fbf63d..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/rng_state_0.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:98d7182be6bef60e0a2c9cafee3f25cd25e5efbb81e449bb83476786049e3afd -size 15024 diff --git a/sft/smoe_perturbed/rng_state_1.pth b/sft/smoe_perturbed/rng_state_1.pth deleted file mode 100644 index 9d2963e51043b85c2837399b5ae8212b62ea2cf9..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/rng_state_1.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ca439560b72fdfdeca80538c1d7fd13a79cb40a4113abdd40bed2ee18c276f6e -size 15024 diff --git a/sft/smoe_perturbed/rng_state_2.pth b/sft/smoe_perturbed/rng_state_2.pth deleted file mode 100644 index 51d113c2fd99f3ab9ae0a827bc55e4424d99d271..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/rng_state_2.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:37d8a08a60f1e45bbe669ccc291b732178afde092185a2275107087813030b6c -size 15024 diff --git a/sft/smoe_perturbed/rng_state_3.pth b/sft/smoe_perturbed/rng_state_3.pth deleted file mode 100644 index 2e37d90e8d2dbd6c0377326df7ded780972f9ced..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/rng_state_3.pth +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5990ef8a1c2a5a5daffd1a6f0a3bfedabc1eacf2b1a98ac77694877c0faf73e4 -size 15024 diff --git a/sft/smoe_perturbed/special_tokens_map.json b/sft/smoe_perturbed/special_tokens_map.json deleted file mode 100644 index 3e4d5a5bc1cb51753cc9ae0305ece0da60052b10..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/special_tokens_map.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "bos_token": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - }, - "eos_token": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - }, - "pad_token": "", - "unk_token": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false - } -} diff --git a/sft/smoe_perturbed/tokenizer.model b/sft/smoe_perturbed/tokenizer.model deleted file mode 100644 index 6c00c742ce03c627d6cd5b795984876fa49fa899..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/tokenizer.model +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347 -size 499723 diff --git a/sft/smoe_perturbed/tokenizer_config.json b/sft/smoe_perturbed/tokenizer_config.json deleted file mode 100644 index 3bd56c6314b14d6a33a69cd1802e04dbc1e47840..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/tokenizer_config.json +++ /dev/null @@ -1,132 +0,0 @@ -{ - "add_bos_token": true, - "add_eos_token": false, - "add_prefix_space": true, - "added_tokens_decoder": { - "0": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "1": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "2": { - "content": "", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": false - }, - "32000": { - "content": "<|endoftext|>", - "lstrip": false, - "normalized": false, - "rstrip": false, - "single_word": false, - "special": true - }, - "32001": { - "content": "<|assistant|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32002": { - "content": "<|placeholder1|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32003": { - "content": "<|placeholder2|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32004": { - "content": "<|placeholder3|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32005": { - "content": "<|placeholder4|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32006": { - "content": "<|system|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32007": { - "content": "<|end|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32008": { - "content": "<|placeholder5|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32009": { - "content": "<|placeholder6|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - }, - "32010": { - "content": "<|user|>", - "lstrip": false, - "normalized": false, - "rstrip": true, - "single_word": false, - "special": true - } - }, - "bos_token": "", - "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') %}{{'<|user|>' + '\n' + message['content'] + '<|end|>' + '\n' + '<|assistant|>' + '\n'}}{% elif (message['role'] == 'assistant') %}{{message['content'] + '<|end|>' + '\n'}}{% endif %}{% endfor %}", - "clean_up_tokenization_spaces": false, - "eos_token": "<|endoftext|>", - "legacy": false, - "model_max_length": 2048, - "pad_token": "", - "padding_side": "right", - "sp_model_kwargs": {}, - "spaces_between_special_tokens": false, - "tokenizer_class": "LlamaTokenizer", - "unk_token": "", - "use_default_system_prompt": false -} diff --git a/sft/smoe_perturbed/trainer_state.json b/sft/smoe_perturbed/trainer_state.json deleted file mode 100644 index bda0420e270428d641059d8a23fdf4ac019b33e7..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/trainer_state.json +++ /dev/null @@ -1,74877 +0,0 @@ -{ - "best_metric": null, - "best_model_checkpoint": null, - "epoch": 0.9999398785546805, - "eval_steps": 500, - "global_step": 8316, - "is_hyper_param_search": false, - "is_local_process_zero": true, - "is_world_process_zero": true, - "log_history": [ - { - "epoch": 0.00012024289063909097, - "flos": 17735413654320.0, - "grad_norm": 17.594072626926447, - "learning_rate": 0.0, - "loss": 1.6653, - "num_input_tokens_seen": 20095, - "step": 1 - }, - { - "epoch": 0.00024048578127818193, - "flos": 21856563454560.0, - "grad_norm": 16.613870437618345, - "learning_rate": 5.021476677069823e-07, - "loss": 1.6223, - "num_input_tokens_seen": 38475, - "step": 2 - }, - { - "epoch": 0.0003607286719172729, - "flos": 13492343712000.0, - "grad_norm": 12.9488169243839, - "learning_rate": 7.958852231401551e-07, - "loss": 1.4783, - "num_input_tokens_seen": 56760, - "step": 3 - }, - { - "epoch": 0.00048097156255636386, - "flos": 13700580089040.0, - "grad_norm": 13.311951531945539, - "learning_rate": 1.0042953354139647e-06, - "loss": 1.5603, - "num_input_tokens_seen": 75345, - "step": 4 - }, - { - "epoch": 0.0006012144531954548, - "flos": 9873820612560.0, - "grad_norm": 15.036561784783688, - "learning_rate": 1.1659507774310057e-06, - "loss": 1.6254, - "num_input_tokens_seen": 92950, - "step": 5 - }, - { - "epoch": 0.0007214573438345458, - "flos": 16455275941320.0, - "grad_norm": 15.630850467205072, - "learning_rate": 1.2980328908471373e-06, - "loss": 1.4991, - "num_input_tokens_seen": 112915, - "step": 6 - }, - { - "epoch": 0.0008417002344736367, - "flos": 48451980476280.0, - "grad_norm": 3.0689450390401136, - "learning_rate": 1.4097067265369432e-06, - "loss": 1.0099, - "num_input_tokens_seen": 177630, - "step": 7 - }, - { - "epoch": 0.0009619431251127277, - "flos": 15112139307480.0, - "grad_norm": 23.90821580969866, - "learning_rate": 1.506443003120947e-06, - "loss": 1.4501, - "num_input_tokens_seen": 192850, - "step": 8 - }, - { - "epoch": 0.0010821860157518186, - "flos": 16586320527600.0, - "grad_norm": 10.27814360188059, - "learning_rate": 1.5917704462803102e-06, - "loss": 1.5513, - "num_input_tokens_seen": 209115, - "step": 9 - }, - { - "epoch": 0.0012024289063909096, - "flos": 12042176041440.0, - "grad_norm": 25.403692550785678, - "learning_rate": 1.6680984451379884e-06, - "loss": 1.5949, - "num_input_tokens_seen": 221905, - "step": 10 - }, - { - "epoch": 0.0013226717970300007, - "flos": 15117903680760.0, - "grad_norm": 8.05366967002053, - "learning_rate": 1.7371455188905097e-06, - "loss": 1.3778, - "num_input_tokens_seen": 241555, - "step": 11 - }, - { - "epoch": 0.0014429146876690916, - "flos": 19394225282640.0, - "grad_norm": 18.2673625135958, - "learning_rate": 1.8001805585541196e-06, - "loss": 1.3579, - "num_input_tokens_seen": 262405, - "step": 12 - }, - { - "epoch": 0.0015631575783081825, - "flos": 13518104970480.0, - "grad_norm": 8.493334232172577, - "learning_rate": 1.8581671739548328e-06, - "loss": 1.4366, - "num_input_tokens_seen": 279860, - "step": 13 - }, - { - "epoch": 0.0016834004689472734, - "flos": 34415160466440.0, - "grad_norm": 5.580133498587695, - "learning_rate": 1.9118543942439254e-06, - "loss": 1.2713, - "num_input_tokens_seen": 301765, - "step": 14 - }, - { - "epoch": 0.0018036433595863645, - "flos": 24950785562640.0, - "grad_norm": 6.210466398210426, - "learning_rate": 1.961836000571161e-06, - "loss": 1.2814, - "num_input_tokens_seen": 323140, - "step": 15 - }, - { - "epoch": 0.0019238862502254555, - "flos": 42776124535560.0, - "grad_norm": 2.570830597301575, - "learning_rate": 2.0085906708279293e-06, - "loss": 0.8639, - "num_input_tokens_seen": 378230, - "step": 16 - }, - { - "epoch": 0.0020441291408645466, - "flos": 14777641547520.0, - "grad_norm": 9.721860136525322, - "learning_rate": 2.0525099325728135e-06, - "loss": 1.3316, - "num_input_tokens_seen": 396130, - "step": 17 - }, - { - "epoch": 0.0021643720315036373, - "flos": 45714920569080.0, - "grad_norm": 2.255055152912181, - "learning_rate": 2.0939181139872922e-06, - "loss": 0.9481, - "num_input_tokens_seen": 457565, - "step": 18 - }, - { - "epoch": 0.0022846149221427284, - "flos": 22302415531200.0, - "grad_norm": 22.671355417030373, - "learning_rate": 2.1330868934640175e-06, - "loss": 1.1776, - "num_input_tokens_seen": 477960, - "step": 19 - }, - { - "epoch": 0.002404857812781819, - "flos": 36534502078200.0, - "grad_norm": 2.1200345311861386, - "learning_rate": 2.170246112844971e-06, - "loss": 0.9962, - "num_input_tokens_seen": 532020, - "step": 20 - }, - { - "epoch": 0.0025251007034209102, - "flos": 11184726398760.0, - "grad_norm": 6.650942696890736, - "learning_rate": 2.2055919496770983e-06, - "loss": 1.2086, - "num_input_tokens_seen": 549880, - "step": 21 - }, - { - "epoch": 0.0026453435940600014, - "flos": 27021829669320.0, - "grad_norm": 5.064903724711911, - "learning_rate": 2.2392931865974923e-06, - "loss": 1.1056, - "num_input_tokens_seen": 572290, - "step": 22 - }, - { - "epoch": 0.002765586484699092, - "flos": 15012834983880.0, - "grad_norm": 7.218938915868794, - "learning_rate": 2.271496085962064e-06, - "loss": 1.2269, - "num_input_tokens_seen": 589705, - "step": 23 - }, - { - "epoch": 0.002885829375338183, - "flos": 14671714326960.0, - "grad_norm": 6.172978787769954, - "learning_rate": 2.3023282262611022e-06, - "loss": 1.2382, - "num_input_tokens_seen": 608200, - "step": 24 - }, - { - "epoch": 0.003006072265977274, - "flos": 24845716865760.0, - "grad_norm": 17.01010459836346, - "learning_rate": 2.3319015548620114e-06, - "loss": 1.1186, - "num_input_tokens_seen": 629060, - "step": 25 - }, - { - "epoch": 0.003126315156616365, - "flos": 17369644535520.0, - "grad_norm": 2.8415203366307846, - "learning_rate": 2.3603148416618152e-06, - "loss": 1.1374, - "num_input_tokens_seen": 648295, - "step": 26 - }, - { - "epoch": 0.003246558047255456, - "flos": 16795323443640.0, - "grad_norm": 7.187956422328733, - "learning_rate": 2.3876556694204647e-06, - "loss": 1.2177, - "num_input_tokens_seen": 668170, - "step": 27 - }, - { - "epoch": 0.003366800937894547, - "flos": 12625388985720.0, - "grad_norm": 5.4563723625803675, - "learning_rate": 2.414002061950908e-06, - "loss": 1.1071, - "num_input_tokens_seen": 686765, - "step": 28 - }, - { - "epoch": 0.003487043828533638, - "flos": 17373139953360.0, - "grad_norm": 5.59358547544143, - "learning_rate": 2.4394238264681557e-06, - "loss": 1.2202, - "num_input_tokens_seen": 706220, - "step": 29 - }, - { - "epoch": 0.003607286719172729, - "flos": 18605014255200.0, - "grad_norm": 4.087291485895498, - "learning_rate": 2.4639836682781433e-06, - "loss": 1.2078, - "num_input_tokens_seen": 726070, - "step": 30 - }, - { - "epoch": 0.00372752960981182, - "flos": 14618137485480.0, - "grad_norm": 3.790599267531037, - "learning_rate": 2.487738122623307e-06, - "loss": 1.1952, - "num_input_tokens_seen": 744700, - "step": 31 - }, - { - "epoch": 0.003847772500450911, - "flos": 16114339253760.0, - "grad_norm": 6.148292627045593, - "learning_rate": 2.510738338534912e-06, - "loss": 1.1861, - "num_input_tokens_seen": 763145, - "step": 32 - }, - { - "epoch": 0.003968015391090002, - "flos": 12731622821880.0, - "grad_norm": 4.316297545294082, - "learning_rate": 2.5330307420306648e-06, - "loss": 1.2424, - "num_input_tokens_seen": 779715, - "step": 33 - }, - { - "epoch": 0.004088258281729093, - "flos": 19440903273120.0, - "grad_norm": 3.622735203912372, - "learning_rate": 2.554657600279796e-06, - "loss": 1.0838, - "num_input_tokens_seen": 800710, - "step": 34 - }, - { - "epoch": 0.004208501172368184, - "flos": 16534062133200.0, - "grad_norm": 5.2202283398610145, - "learning_rate": 2.5756575039679493e-06, - "loss": 1.2605, - "num_input_tokens_seen": 819980, - "step": 35 - }, - { - "epoch": 0.0043287440630072746, - "flos": 12260530733280.0, - "grad_norm": 4.196785950443836, - "learning_rate": 2.5960657816942747e-06, - "loss": 1.1687, - "num_input_tokens_seen": 838615, - "step": 36 - }, - { - "epoch": 0.004448986953646365, - "flos": 37978506775200.0, - "grad_norm": 1.04257674833502, - "learning_rate": 2.6159148575788668e-06, - "loss": 0.8699, - "num_input_tokens_seen": 896370, - "step": 37 - }, - { - "epoch": 0.004569229844285457, - "flos": 9480656192160.0, - "grad_norm": 4.671274203961766, - "learning_rate": 2.635234561171e-06, - "loss": 1.2026, - "num_input_tokens_seen": 914485, - "step": 38 - }, - { - "epoch": 0.0046894727349245475, - "flos": 11467119072120.0, - "grad_norm": 6.466528764545575, - "learning_rate": 2.6540523970949877e-06, - "loss": 1.1637, - "num_input_tokens_seen": 929050, - "step": 39 - }, - { - "epoch": 0.004809715625563638, - "flos": 17005123560240.0, - "grad_norm": 7.526741926269175, - "learning_rate": 2.6723937805519533e-06, - "loss": 1.1522, - "num_input_tokens_seen": 946270, - "step": 40 - }, - { - "epoch": 0.00492995851620273, - "flos": 14747127747240.0, - "grad_norm": 4.070742417839461, - "learning_rate": 2.690282243737839e-06, - "loss": 1.1533, - "num_input_tokens_seen": 964925, - "step": 41 - }, - { - "epoch": 0.0050502014068418205, - "flos": 14435417074440.0, - "grad_norm": 4.302759642316868, - "learning_rate": 2.7077396173840807e-06, - "loss": 1.2086, - "num_input_tokens_seen": 982930, - "step": 42 - }, - { - "epoch": 0.005170444297480911, - "flos": 18500558789520.0, - "grad_norm": 5.377773013169741, - "learning_rate": 2.7247861909342594e-06, - "loss": 1.1377, - "num_input_tokens_seen": 1003575, - "step": 43 - }, - { - "epoch": 0.005290687188120003, - "flos": 14877037855800.0, - "grad_norm": 5.170858876884787, - "learning_rate": 2.7414408543044743e-06, - "loss": 1.0465, - "num_input_tokens_seen": 1018935, - "step": 44 - }, - { - "epoch": 0.005410930078759093, - "flos": 11210579641920.0, - "grad_norm": 7.438228283014334, - "learning_rate": 2.7577212237113157e-06, - "loss": 1.0174, - "num_input_tokens_seen": 1035695, - "step": 45 - }, - { - "epoch": 0.005531172969398184, - "flos": 14986184540160.0, - "grad_norm": 5.365471489231661, - "learning_rate": 2.7736437536690466e-06, - "loss": 1.2621, - "num_input_tokens_seen": 1055045, - "step": 46 - }, - { - "epoch": 0.005651415860037276, - "flos": 14798619602640.0, - "grad_norm": 4.88546373335148, - "learning_rate": 2.789223836941131e-06, - "loss": 1.2917, - "num_input_tokens_seen": 1074900, - "step": 47 - }, - { - "epoch": 0.005771658750676366, - "flos": 9348231835680.0, - "grad_norm": 4.068049235433266, - "learning_rate": 2.8044758939680847e-06, - "loss": 1.2783, - "num_input_tokens_seen": 1090690, - "step": 48 - }, - { - "epoch": 0.005891901641315457, - "flos": 17372526722160.0, - "grad_norm": 5.730649148276705, - "learning_rate": 2.8194134530738863e-06, - "loss": 1.2264, - "num_input_tokens_seen": 1109180, - "step": 49 - }, - { - "epoch": 0.006012144531954548, - "flos": 16402986885360.0, - "grad_norm": 4.883516355519751, - "learning_rate": 2.834049222568994e-06, - "loss": 1.1202, - "num_input_tokens_seen": 1126250, - "step": 50 - }, - { - "epoch": 0.006132387422593639, - "flos": 16009546510920.0, - "grad_norm": 20.07012243243984, - "learning_rate": 2.848395155712969e-06, - "loss": 1.1481, - "num_input_tokens_seen": 1146190, - "step": 51 - }, - { - "epoch": 0.00625263031323273, - "flos": 19673367830640.0, - "grad_norm": 5.529079205248389, - "learning_rate": 2.8624625093687977e-06, - "loss": 1.2094, - "num_input_tokens_seen": 1163045, - "step": 52 - }, - { - "epoch": 0.006372873203871821, - "flos": 16427491019880.0, - "grad_norm": 11.670223093654382, - "learning_rate": 2.876261897070029e-06, - "loss": 1.1042, - "num_input_tokens_seen": 1182895, - "step": 53 - }, - { - "epoch": 0.006493116094510912, - "flos": 16244586639480.0, - "grad_norm": 7.2451900443036115, - "learning_rate": 2.889803337127447e-06, - "loss": 1.1474, - "num_input_tokens_seen": 1201215, - "step": 54 - }, - { - "epoch": 0.006613358985150003, - "flos": 16399154190360.0, - "grad_norm": 4.965746085513335, - "learning_rate": 2.903096296321516e-06, - "loss": 1.0732, - "num_input_tokens_seen": 1219080, - "step": 55 - }, - { - "epoch": 0.006733601875789094, - "flos": 18890043822720.0, - "grad_norm": 4.7878053891460475, - "learning_rate": 2.9161497296578907e-06, - "loss": 1.15, - "num_input_tokens_seen": 1238190, - "step": 56 - }, - { - "epoch": 0.006853844766428185, - "flos": 10974711651240.0, - "grad_norm": 5.22073399641312, - "learning_rate": 2.928972116604173e-06, - "loss": 1.0879, - "num_input_tokens_seen": 1254185, - "step": 57 - }, - { - "epoch": 0.006974087657067276, - "flos": 17242095367080.0, - "grad_norm": 4.44023971872754, - "learning_rate": 2.9415714941751377e-06, - "loss": 1.2298, - "num_input_tokens_seen": 1275125, - "step": 58 - }, - { - "epoch": 0.007094330547706367, - "flos": 18340288188480.0, - "grad_norm": 3.2168938182682836, - "learning_rate": 2.9539554871897396e-06, - "loss": 1.1593, - "num_input_tokens_seen": 1295015, - "step": 59 - }, - { - "epoch": 0.007214573438345458, - "flos": 15139341659280.0, - "grad_norm": 4.2472224415513695, - "learning_rate": 2.9661313359851253e-06, - "loss": 1.1941, - "num_input_tokens_seen": 1312620, - "step": 60 - }, - { - "epoch": 0.007334816328984549, - "flos": 17740389807480.0, - "grad_norm": 4.823709091879005, - "learning_rate": 2.978105921839922e-06, - "loss": 1.1684, - "num_input_tokens_seen": 1332885, - "step": 61 - }, - { - "epoch": 0.00745505921962364, - "flos": 13121107855080.0, - "grad_norm": 7.622458612887038, - "learning_rate": 2.9898857903302893e-06, - "loss": 0.9619, - "num_input_tokens_seen": 1351555, - "step": 62 - }, - { - "epoch": 0.007575302110262731, - "flos": 13097922167640.0, - "grad_norm": 5.755195260932081, - "learning_rate": 3.001477172817253e-06, - "loss": 1.1066, - "num_input_tokens_seen": 1369165, - "step": 63 - }, - { - "epoch": 0.007695545000901822, - "flos": 17766273712200.0, - "grad_norm": 6.447675348140563, - "learning_rate": 3.012886006241894e-06, - "loss": 1.1949, - "num_input_tokens_seen": 1388270, - "step": 64 - }, - { - "epoch": 0.007815787891540913, - "flos": 15143787585480.0, - "grad_norm": 5.0932779854035735, - "learning_rate": 3.0241179513858383e-06, - "loss": 1.1218, - "num_input_tokens_seen": 1407300, - "step": 65 - }, - { - "epoch": 0.007936030782180003, - "flos": 15324055071720.0, - "grad_norm": 3.2780202319746827, - "learning_rate": 3.035178409737647e-06, - "loss": 1.1138, - "num_input_tokens_seen": 1424470, - "step": 66 - }, - { - "epoch": 0.008056273672819095, - "flos": 14278979168400.0, - "grad_norm": 3.0147262578026655, - "learning_rate": 3.046072539090907e-06, - "loss": 1.108, - "num_input_tokens_seen": 1442155, - "step": 67 - }, - { - "epoch": 0.008176516563458186, - "flos": 12994233240960.0, - "grad_norm": 5.433504763436048, - "learning_rate": 3.056805267986779e-06, - "loss": 1.2735, - "num_input_tokens_seen": 1459345, - "step": 68 - }, - { - "epoch": 0.008296759454097276, - "flos": 15537595898640.0, - "grad_norm": 4.301091902736575, - "learning_rate": 3.0673813091022194e-06, - "loss": 1.1804, - "num_input_tokens_seen": 1478285, - "step": 69 - }, - { - "epoch": 0.008417002344736368, - "flos": 45395845573800.0, - "grad_norm": 1.0790653826832208, - "learning_rate": 3.0778051716749317e-06, - "loss": 0.8926, - "num_input_tokens_seen": 1541550, - "step": 70 - }, - { - "epoch": 0.008537245235375458, - "flos": 16324078047240.0, - "grad_norm": 4.749770108733204, - "learning_rate": 3.0880811730470094e-06, - "loss": 1.1356, - "num_input_tokens_seen": 1560725, - "step": 71 - }, - { - "epoch": 0.008657488126014549, - "flos": 44372146325880.0, - "grad_norm": 0.952834163130494, - "learning_rate": 3.098213449401257e-06, - "loss": 0.8462, - "num_input_tokens_seen": 1627375, - "step": 72 - }, - { - "epoch": 0.00877773101665364, - "flos": 21591806726280.0, - "grad_norm": 5.256935261839161, - "learning_rate": 3.1082059657570015e-06, - "loss": 1.2066, - "num_input_tokens_seen": 1646330, - "step": 73 - }, - { - "epoch": 0.00889797390729273, - "flos": 16717671729480.0, - "grad_norm": 4.574727226422029, - "learning_rate": 3.1180625252858496e-06, - "loss": 1.179, - "num_input_tokens_seen": 1664480, - "step": 74 - }, - { - "epoch": 0.009018216797931822, - "flos": 16402680269760.0, - "grad_norm": 4.409250306570405, - "learning_rate": 3.1277867780021663e-06, - "loss": 1.0235, - "num_input_tokens_seen": 1680835, - "step": 75 - }, - { - "epoch": 0.009138459688570914, - "flos": 11258330787000.0, - "grad_norm": 4.169094573597322, - "learning_rate": 3.1373822288779824e-06, - "loss": 1.183, - "num_input_tokens_seen": 1697135, - "step": 76 - }, - { - "epoch": 0.009258702579210003, - "flos": 19234813205280.0, - "grad_norm": 9.12642459983988, - "learning_rate": 3.1468522454274533e-06, - "loss": 1.0119, - "num_input_tokens_seen": 1717210, - "step": 77 - }, - { - "epoch": 0.009378945469849095, - "flos": 19152838211160.0, - "grad_norm": 5.296991127564135, - "learning_rate": 3.15620006480197e-06, - "loss": 1.1383, - "num_input_tokens_seen": 1736200, - "step": 78 - }, - { - "epoch": 0.009499188360488187, - "flos": 25470517981560.0, - "grad_norm": 7.982197503804707, - "learning_rate": 3.1654288004333087e-06, - "loss": 0.9765, - "num_input_tokens_seen": 1754585, - "step": 79 - }, - { - "epoch": 0.009619431251127276, - "flos": 15271949985120.0, - "grad_norm": 18.48135921483413, - "learning_rate": 3.1745414482589353e-06, - "loss": 0.9777, - "num_input_tokens_seen": 1773515, - "step": 80 - }, - { - "epoch": 0.009739674141766368, - "flos": 12338765017080.0, - "grad_norm": 3.1201207025117133, - "learning_rate": 3.1835408925606204e-06, - "loss": 1.0815, - "num_input_tokens_seen": 1791375, - "step": 81 - }, - { - "epoch": 0.00985991703240546, - "flos": 19601725782240.0, - "grad_norm": 6.410281815307354, - "learning_rate": 3.1924299114448214e-06, - "loss": 1.1132, - "num_input_tokens_seen": 1811575, - "step": 82 - }, - { - "epoch": 0.00998015992304455, - "flos": 9742070810400.0, - "grad_norm": 5.775865841646872, - "learning_rate": 3.2012111819909055e-06, - "loss": 1.0661, - "num_input_tokens_seen": 1828625, - "step": 83 - }, - { - "epoch": 0.010100402813683641, - "flos": 14329397869200.0, - "grad_norm": 2.9556705866762893, - "learning_rate": 3.2098872850910627e-06, - "loss": 1.1753, - "num_input_tokens_seen": 1845020, - "step": 84 - }, - { - "epoch": 0.010220645704322733, - "flos": 12181008663960.0, - "grad_norm": 3.8807707854681732, - "learning_rate": 3.2184607100038194e-06, - "loss": 1.1178, - "num_input_tokens_seen": 1863380, - "step": 85 - }, - { - "epoch": 0.010340888594961822, - "flos": 15248886943920.0, - "grad_norm": 4.31286553202045, - "learning_rate": 3.2269338586412414e-06, - "loss": 1.1496, - "num_input_tokens_seen": 1880685, - "step": 86 - }, - { - "epoch": 0.010461131485600914, - "flos": 16350207244440.0, - "grad_norm": 3.884058915379901, - "learning_rate": 3.2353090496083106e-06, - "loss": 1.1953, - "num_input_tokens_seen": 1898240, - "step": 87 - }, - { - "epoch": 0.010581374376240005, - "flos": 23927944838400.0, - "grad_norm": 3.911646540855078, - "learning_rate": 3.2435885220114572e-06, - "loss": 1.0489, - "num_input_tokens_seen": 1919310, - "step": 88 - }, - { - "epoch": 0.010701617266879095, - "flos": 15459330953280.0, - "grad_norm": 4.245393388221162, - "learning_rate": 3.2517744390519113e-06, - "loss": 1.163, - "num_input_tokens_seen": 1937895, - "step": 89 - }, - { - "epoch": 0.010821860157518187, - "flos": 13516418584680.0, - "grad_norm": 3.5192753130357493, - "learning_rate": 3.259868891418298e-06, - "loss": 0.9776, - "num_input_tokens_seen": 1955380, - "step": 90 - }, - { - "epoch": 0.010942103048157278, - "flos": 18107057091960.0, - "grad_norm": 2.7311684253653055, - "learning_rate": 3.2678739004917757e-06, - "loss": 1.0622, - "num_input_tokens_seen": 1974835, - "step": 91 - }, - { - "epoch": 0.011062345938796368, - "flos": 19578325463880.0, - "grad_norm": 3.1544152394103904, - "learning_rate": 3.275791421376029e-06, - "loss": 1.1496, - "num_input_tokens_seen": 1998000, - "step": 92 - }, - { - "epoch": 0.01118258882943546, - "flos": 11368673272200.0, - "grad_norm": 4.696349069232197, - "learning_rate": 3.2836233457634622e-06, - "loss": 1.1774, - "num_input_tokens_seen": 2015855, - "step": 93 - }, - { - "epoch": 0.011302831720074551, - "flos": 14672756820000.0, - "grad_norm": 3.65697204763585, - "learning_rate": 3.2913715046481135e-06, - "loss": 1.0678, - "num_input_tokens_seen": 2035320, - "step": 94 - }, - { - "epoch": 0.011423074610713641, - "flos": 9212404046040.0, - "grad_norm": 5.352095305935644, - "learning_rate": 3.299037670895023e-06, - "loss": 1.1059, - "num_input_tokens_seen": 2051655, - "step": 95 - }, - { - "epoch": 0.011543317501352733, - "flos": 21620051571120.0, - "grad_norm": 4.999777025973082, - "learning_rate": 3.3066235616750667e-06, - "loss": 1.0396, - "num_input_tokens_seen": 2072610, - "step": 96 - }, - { - "epoch": 0.011663560391991824, - "flos": 10974527681880.0, - "grad_norm": 3.1657133340514445, - "learning_rate": 3.3141308407736276e-06, - "loss": 1.1386, - "num_input_tokens_seen": 2088965, - "step": 97 - }, - { - "epoch": 0.011783803282630914, - "flos": 14121866708040.0, - "grad_norm": 4.385481349924114, - "learning_rate": 3.321561120780869e-06, - "loss": 1.0872, - "num_input_tokens_seen": 2107395, - "step": 98 - }, - { - "epoch": 0.011904046173270006, - "flos": 15873596075040.0, - "grad_norm": 5.956644134450405, - "learning_rate": 3.3289159651708192e-06, - "loss": 1.2388, - "num_input_tokens_seen": 2124690, - "step": 99 - }, - { - "epoch": 0.012024289063909096, - "flos": 13545521953200.0, - "grad_norm": 4.859703126482014, - "learning_rate": 3.3361968902759768e-06, - "loss": 1.2017, - "num_input_tokens_seen": 2144090, - "step": 100 - }, - { - "epoch": 0.012144531954548187, - "flos": 10607461797120.0, - "grad_norm": 4.309181595211301, - "learning_rate": 3.343405367163663e-06, - "loss": 1.1724, - "num_input_tokens_seen": 2160740, - "step": 101 - }, - { - "epoch": 0.012264774845187279, - "flos": 10686431958360.0, - "grad_norm": 4.716683244619902, - "learning_rate": 3.350542823419951e-06, - "loss": 1.0408, - "num_input_tokens_seen": 2177060, - "step": 102 - }, - { - "epoch": 0.012385017735826368, - "flos": 9842877550440.0, - "grad_norm": 20.0034359841472, - "learning_rate": 3.3576106448465615e-06, - "loss": 1.0979, - "num_input_tokens_seen": 2190160, - "step": 103 - }, - { - "epoch": 0.01250526062646546, - "flos": 16796120644200.0, - "grad_norm": 4.189131006337333, - "learning_rate": 3.3646101770757797e-06, - "loss": 1.1046, - "num_input_tokens_seen": 2208670, - "step": 104 - }, - { - "epoch": 0.012625503517104552, - "flos": 24713445817080.0, - "grad_norm": 2.5398198802792047, - "learning_rate": 3.371542727108104e-06, - "loss": 1.0732, - "num_input_tokens_seen": 2230565, - "step": 105 - }, - { - "epoch": 0.012745746407743641, - "flos": 12626768755920.0, - "grad_norm": 6.055195291206423, - "learning_rate": 3.3784095647770114e-06, - "loss": 1.1218, - "num_input_tokens_seen": 2248930, - "step": 106 - }, - { - "epoch": 0.012865989298382733, - "flos": 14619823871280.0, - "grad_norm": 4.753026500034918, - "learning_rate": 3.3852119241449547e-06, - "loss": 1.1072, - "num_input_tokens_seen": 2267770, - "step": 107 - }, - { - "epoch": 0.012986232189021825, - "flos": 17029842325680.0, - "grad_norm": 3.7379104286838882, - "learning_rate": 3.3919510048344295e-06, - "loss": 1.1839, - "num_input_tokens_seen": 2285500, - "step": 108 - }, - { - "epoch": 0.013106475079660914, - "flos": 16868559893160.0, - "grad_norm": 9.38512839927088, - "learning_rate": 3.3986279732976907e-06, - "loss": 1.0766, - "num_input_tokens_seen": 2303695, - "step": 109 - }, - { - "epoch": 0.013226717970300006, - "flos": 14984007569400.0, - "grad_norm": 2.820083228431956, - "learning_rate": 3.4052439640284983e-06, - "loss": 1.174, - "num_input_tokens_seen": 2322330, - "step": 110 - }, - { - "epoch": 0.013346960860939098, - "flos": 17687763474360.0, - "grad_norm": 4.6237321095199135, - "learning_rate": 3.4118000807190217e-06, - "loss": 1.0408, - "num_input_tokens_seen": 2342930, - "step": 111 - }, - { - "epoch": 0.013467203751578187, - "flos": 20072081993400.0, - "grad_norm": 3.5464615769950267, - "learning_rate": 3.4182973973648723e-06, - "loss": 0.9924, - "num_input_tokens_seen": 2363860, - "step": 112 - }, - { - "epoch": 0.013587446642217279, - "flos": 13413220242960.0, - "grad_norm": 7.605047918666577, - "learning_rate": 3.424736959321014e-06, - "loss": 1.1564, - "num_input_tokens_seen": 2381385, - "step": 113 - }, - { - "epoch": 0.01370768953285637, - "flos": 17058761724840.0, - "grad_norm": 4.267427503777891, - "learning_rate": 3.431119784311155e-06, - "loss": 1.1063, - "num_input_tokens_seen": 2400780, - "step": 114 - }, - { - "epoch": 0.01382793242349546, - "flos": 27992657291640.0, - "grad_norm": 8.556645751895863, - "learning_rate": 3.43744686339307e-06, - "loss": 1.0027, - "num_input_tokens_seen": 2422820, - "step": 115 - }, - { - "epoch": 0.013948175314134552, - "flos": 29538265929240.0, - "grad_norm": 6.443178209600743, - "learning_rate": 3.44371916188212e-06, - "loss": 1.1304, - "num_input_tokens_seen": 2443295, - "step": 116 - }, - { - "epoch": 0.014068418204773643, - "flos": 15955172468880.0, - "grad_norm": 6.416348866441016, - "learning_rate": 3.449937620235143e-06, - "loss": 1.0937, - "num_input_tokens_seen": 2463610, - "step": 117 - }, - { - "epoch": 0.014188661095412733, - "flos": 16926766630200.0, - "grad_norm": 2.3304922882129264, - "learning_rate": 3.456103154896722e-06, - "loss": 1.105, - "num_input_tokens_seen": 2484605, - "step": 118 - }, - { - "epoch": 0.014308903986051825, - "flos": 16822771087920.0, - "grad_norm": 6.505406991833373, - "learning_rate": 3.462216659109757e-06, - "loss": 1.1481, - "num_input_tokens_seen": 2504505, - "step": 119 - }, - { - "epoch": 0.014429146876690916, - "flos": 14672235573480.0, - "grad_norm": 4.840907684208743, - "learning_rate": 3.4682790036921077e-06, - "loss": 1.0719, - "num_input_tokens_seen": 2522885, - "step": 120 - }, - { - "epoch": 0.014549389767330006, - "flos": 14356753528800.0, - "grad_norm": 3.0979736473386104, - "learning_rate": 3.4742910377810193e-06, - "loss": 1.06, - "num_input_tokens_seen": 2540065, - "step": 121 - }, - { - "epoch": 0.014669632657969098, - "flos": 12757752019080.0, - "grad_norm": 10.339120618366573, - "learning_rate": 3.4802535895469042e-06, - "loss": 1.1049, - "num_input_tokens_seen": 2558535, - "step": 122 - }, - { - "epoch": 0.01478987554860819, - "flos": 16161845106360.0, - "grad_norm": 7.932249239839208, - "learning_rate": 3.4861674668779934e-06, - "loss": 1.1251, - "num_input_tokens_seen": 2576485, - "step": 123 - }, - { - "epoch": 0.01491011843924728, - "flos": 12178402431360.0, - "grad_norm": 4.875422532770557, - "learning_rate": 3.492033458037272e-06, - "loss": 1.0741, - "num_input_tokens_seen": 2594775, - "step": 124 - }, - { - "epoch": 0.01503036132988637, - "flos": 12520105657920.0, - "grad_norm": 5.305701000268301, - "learning_rate": 3.497852332293018e-06, - "loss": 1.0911, - "num_input_tokens_seen": 2610070, - "step": 125 - }, - { - "epoch": 0.015150604220525462, - "flos": 13385711275560.0, - "grad_norm": 3.334817238165068, - "learning_rate": 3.5036248405242356e-06, - "loss": 1.203, - "num_input_tokens_seen": 2628545, - "step": 126 - }, - { - "epoch": 0.015270847111164552, - "flos": 28148880566760.0, - "grad_norm": 4.159583339732558, - "learning_rate": 3.509351715802146e-06, - "loss": 1.0541, - "num_input_tokens_seen": 2649150, - "step": 127 - }, - { - "epoch": 0.015391090001803644, - "flos": 31269661133880.0, - "grad_norm": 4.2974964039306505, - "learning_rate": 3.5150336739488763e-06, - "loss": 1.0113, - "num_input_tokens_seen": 2671155, - "step": 128 - }, - { - "epoch": 0.015511332892442733, - "flos": 13413312227640.0, - "grad_norm": 4.168649405162202, - "learning_rate": 3.5206714140744143e-06, - "loss": 1.0526, - "num_input_tokens_seen": 2690930, - "step": 129 - }, - { - "epoch": 0.015631575783081827, - "flos": 17451558206520.0, - "grad_norm": 25.6032932547413, - "learning_rate": 3.5262656190928208e-06, - "loss": 1.0862, - "num_input_tokens_seen": 2708950, - "step": 130 - }, - { - "epoch": 0.015751818673720917, - "flos": 44616973040160.0, - "grad_norm": 0.9415205256959094, - "learning_rate": 3.5318169562186737e-06, - "loss": 0.9463, - "num_input_tokens_seen": 2777515, - "step": 131 - }, - { - "epoch": 0.015872061564360006, - "flos": 16979576932680.0, - "grad_norm": 7.330046899913508, - "learning_rate": 3.5373260774446292e-06, - "loss": 1.0538, - "num_input_tokens_seen": 2797685, - "step": 132 - }, - { - "epoch": 0.0159923044549991, - "flos": 16612603032600.0, - "grad_norm": 3.7645031536087568, - "learning_rate": 3.542793620000961e-06, - "loss": 1.1329, - "num_input_tokens_seen": 2816880, - "step": 133 - }, - { - "epoch": 0.01611254734563819, - "flos": 12652928614680.0, - "grad_norm": 3.574359228156389, - "learning_rate": 3.5482202067978894e-06, - "loss": 1.0872, - "num_input_tokens_seen": 2833810, - "step": 134 - }, - { - "epoch": 0.01623279023627728, - "flos": 14877651087000.0, - "grad_norm": 4.12289374937705, - "learning_rate": 3.553606446851471e-06, - "loss": 0.9783, - "num_input_tokens_seen": 2850270, - "step": 135 - }, - { - "epoch": 0.016353033126916373, - "flos": 11132314696560.0, - "grad_norm": 3.9486061775700594, - "learning_rate": 3.5589529356937613e-06, - "loss": 1.063, - "num_input_tokens_seen": 2868385, - "step": 136 - }, - { - "epoch": 0.016473276017555463, - "flos": 13334985959160.0, - "grad_norm": 5.558280801977605, - "learning_rate": 3.5642602557679627e-06, - "loss": 0.9931, - "num_input_tokens_seen": 2886555, - "step": 137 - }, - { - "epoch": 0.016593518908194552, - "flos": 17320268327760.0, - "grad_norm": 4.087186254190091, - "learning_rate": 3.569528976809202e-06, - "loss": 1.069, - "num_input_tokens_seen": 2903490, - "step": 138 - }, - { - "epoch": 0.016713761798833646, - "flos": 15878256632160.0, - "grad_norm": 7.663623980032177, - "learning_rate": 3.5747596562115522e-06, - "loss": 1.1105, - "num_input_tokens_seen": 2923825, - "step": 139 - }, - { - "epoch": 0.016834004689472735, - "flos": 12626707432800.0, - "grad_norm": 6.145526977489781, - "learning_rate": 3.5799528393819138e-06, - "loss": 1.1329, - "num_input_tokens_seen": 2942625, - "step": 140 - }, - { - "epoch": 0.016954247580111825, - "flos": 14565173875200.0, - "grad_norm": 3.421406334959197, - "learning_rate": 3.585109060081286e-06, - "loss": 1.1022, - "num_input_tokens_seen": 2962145, - "step": 141 - }, - { - "epoch": 0.017074490470750915, - "flos": 15693113957880.0, - "grad_norm": 9.241513457431457, - "learning_rate": 3.590228840753992e-06, - "loss": 1.0089, - "num_input_tokens_seen": 2982295, - "step": 142 - }, - { - "epoch": 0.01719473336139001, - "flos": 11315403046320.0, - "grad_norm": 5.4434022987296675, - "learning_rate": 3.5953126928453423e-06, - "loss": 1.0979, - "num_input_tokens_seen": 2999565, - "step": 143 - }, - { - "epoch": 0.017314976252029098, - "flos": 15983110698120.0, - "grad_norm": 3.2249694290486444, - "learning_rate": 3.600361117108239e-06, - "loss": 1.0297, - "num_input_tokens_seen": 3019085, - "step": 144 - }, - { - "epoch": 0.017435219142668188, - "flos": 15642511287720.0, - "grad_norm": 7.645429415172362, - "learning_rate": 3.6053746038991616e-06, - "loss": 1.187, - "num_input_tokens_seen": 3037890, - "step": 145 - }, - { - "epoch": 0.01755546203330728, - "flos": 51738893550480.0, - "grad_norm": 0.973022085736994, - "learning_rate": 3.6103536334639843e-06, - "loss": 0.8513, - "num_input_tokens_seen": 3090875, - "step": 146 - }, - { - "epoch": 0.01767570492394637, - "flos": 18027657668880.0, - "grad_norm": 4.733844387421937, - "learning_rate": 3.615298676214041e-06, - "loss": 1.0767, - "num_input_tokens_seen": 3110875, - "step": 147 - }, - { - "epoch": 0.01779594781458546, - "flos": 14515000466880.0, - "grad_norm": 12.09936140913185, - "learning_rate": 3.6202101929928317e-06, - "loss": 1.1169, - "num_input_tokens_seen": 3129185, - "step": 148 - }, - { - "epoch": 0.017916190705224554, - "flos": 11499748520040.0, - "grad_norm": 3.209061940347569, - "learning_rate": 3.6250886353337413e-06, - "loss": 1.113, - "num_input_tokens_seen": 3146435, - "step": 149 - }, - { - "epoch": 0.018036433595863644, - "flos": 16585676634840.0, - "grad_norm": 4.754597296283787, - "learning_rate": 3.6299344457091488e-06, - "loss": 1.0983, - "num_input_tokens_seen": 3167015, - "step": 150 - }, - { - "epoch": 0.018156676486502734, - "flos": 13177352252280.0, - "grad_norm": 4.842362137359817, - "learning_rate": 3.634748057771256e-06, - "loss": 1.146, - "num_input_tokens_seen": 3182675, - "step": 151 - }, - { - "epoch": 0.018276919377141827, - "flos": 18108191569680.0, - "grad_norm": 3.9875519895602367, - "learning_rate": 3.639529896584965e-06, - "loss": 1.0856, - "num_input_tokens_seen": 3203770, - "step": 152 - }, - { - "epoch": 0.018397162267780917, - "flos": 14226107542800.0, - "grad_norm": 29.525293918215805, - "learning_rate": 3.6442803788531233e-06, - "loss": 1.1, - "num_input_tokens_seen": 3221450, - "step": 153 - }, - { - "epoch": 0.018517405158420007, - "flos": 19629388057440.0, - "grad_norm": 3.5627840163135645, - "learning_rate": 3.6489999131344357e-06, - "loss": 1.1763, - "num_input_tokens_seen": 3243945, - "step": 154 - }, - { - "epoch": 0.0186376480490591, - "flos": 13727874425520.0, - "grad_norm": 2.760961188585506, - "learning_rate": 3.653688900054313e-06, - "loss": 1.1323, - "num_input_tokens_seen": 3262195, - "step": 155 - }, - { - "epoch": 0.01875789093969819, - "flos": 18998546614320.0, - "grad_norm": 6.836712009695569, - "learning_rate": 3.6583477325089526e-06, - "loss": 0.9867, - "num_input_tokens_seen": 3282455, - "step": 156 - }, - { - "epoch": 0.01887813383033728, - "flos": 17320574943360.0, - "grad_norm": 4.098521240402717, - "learning_rate": 3.6629767958628916e-06, - "loss": 1.2611, - "num_input_tokens_seen": 3299550, - "step": 157 - }, - { - "epoch": 0.018998376720976373, - "flos": 10345403286120.0, - "grad_norm": 6.3784983890582545, - "learning_rate": 3.667576468140291e-06, - "loss": 1.0777, - "num_input_tokens_seen": 3317085, - "step": 158 - }, - { - "epoch": 0.019118619611615463, - "flos": 20878346396280.0, - "grad_norm": 15.09427567666382, - "learning_rate": 3.672147120210184e-06, - "loss": 1.1129, - "num_input_tokens_seen": 3333405, - "step": 159 - }, - { - "epoch": 0.019238862502254553, - "flos": 14829409356960.0, - "grad_norm": 9.026743678671156, - "learning_rate": 3.6766891159659177e-06, - "loss": 1.0982, - "num_input_tokens_seen": 3351535, - "step": 160 - }, - { - "epoch": 0.019359105392893646, - "flos": 15169978105800.0, - "grad_norm": 9.324817678187886, - "learning_rate": 3.6812028124990075e-06, - "loss": 1.0962, - "num_input_tokens_seen": 3368525, - "step": 161 - }, - { - "epoch": 0.019479348283532736, - "flos": 11520542605800.0, - "grad_norm": 4.878268393364617, - "learning_rate": 3.6856885602676016e-06, - "loss": 1.047, - "num_input_tokens_seen": 3384280, - "step": 162 - }, - { - "epoch": 0.019599591174171826, - "flos": 15695137620840.0, - "grad_norm": 10.820063949649823, - "learning_rate": 3.6901467032597733e-06, - "loss": 1.1726, - "num_input_tokens_seen": 3402485, - "step": 163 - }, - { - "epoch": 0.01971983406481092, - "flos": 13911606668040.0, - "grad_norm": 4.979759447040652, - "learning_rate": 3.694577579151804e-06, - "loss": 1.094, - "num_input_tokens_seen": 3420615, - "step": 164 - }, - { - "epoch": 0.01984007695545001, - "flos": 13544908722000.0, - "grad_norm": 5.879677391803861, - "learning_rate": 3.6989815194616703e-06, - "loss": 0.9722, - "num_input_tokens_seen": 3437530, - "step": 165 - }, - { - "epoch": 0.0199603198460891, - "flos": 14801624435520.0, - "grad_norm": 4.831026557206609, - "learning_rate": 3.703358849697888e-06, - "loss": 1.0252, - "num_input_tokens_seen": 3457160, - "step": 166 - }, - { - "epoch": 0.020080562736728192, - "flos": 15536461420920.0, - "grad_norm": 2.804133538498728, - "learning_rate": 3.7077098895038803e-06, - "loss": 1.0558, - "num_input_tokens_seen": 3476250, - "step": 167 - }, - { - "epoch": 0.020200805627367282, - "flos": 15405754111800.0, - "grad_norm": 7.1845025488045655, - "learning_rate": 3.712034952798045e-06, - "loss": 1.1955, - "num_input_tokens_seen": 3494085, - "step": 168 - }, - { - "epoch": 0.02032104851800637, - "flos": 23924694713040.0, - "grad_norm": 5.402956449969642, - "learning_rate": 3.7163343479096656e-06, - "loss": 1.0563, - "num_input_tokens_seen": 3515380, - "step": 169 - }, - { - "epoch": 0.020441291408645465, - "flos": 22591400439960.0, - "grad_norm": 4.207577980665231, - "learning_rate": 3.720608377710802e-06, - "loss": 1.0519, - "num_input_tokens_seen": 3535190, - "step": 170 - }, - { - "epoch": 0.020561534299284555, - "flos": 14829378695400.0, - "grad_norm": 5.665336939058139, - "learning_rate": 3.7248573397443277e-06, - "loss": 1.087, - "num_input_tokens_seen": 3553835, - "step": 171 - }, - { - "epoch": 0.020681777189923645, - "flos": 14908440841320.0, - "grad_norm": 5.130044640835767, - "learning_rate": 3.729081526348224e-06, - "loss": 1.2069, - "num_input_tokens_seen": 3572085, - "step": 172 - }, - { - "epoch": 0.020802020080562738, - "flos": 20126762651040.0, - "grad_norm": 3.6540311944107104, - "learning_rate": 3.7332812247762777e-06, - "loss": 1.0637, - "num_input_tokens_seen": 3593105, - "step": 173 - }, - { - "epoch": 0.020922262971201828, - "flos": 13962883892520.0, - "grad_norm": 3.8546934575412517, - "learning_rate": 3.737456717315293e-06, - "loss": 1.176, - "num_input_tokens_seen": 3611790, - "step": 174 - }, - { - "epoch": 0.021042505861840918, - "flos": 11077235438640.0, - "grad_norm": 18.06901712553321, - "learning_rate": 3.7416082813989552e-06, - "loss": 1.1371, - "num_input_tokens_seen": 3628505, - "step": 175 - }, - { - "epoch": 0.02116274875248001, - "flos": 15012865645440.0, - "grad_norm": 8.988880069551778, - "learning_rate": 3.745736189718439e-06, - "loss": 1.1294, - "num_input_tokens_seen": 3647480, - "step": 176 - }, - { - "epoch": 0.0212829916431191, - "flos": 17581008391680.0, - "grad_norm": 5.2927237028146426, - "learning_rate": 3.749840710329894e-06, - "loss": 0.9364, - "num_input_tokens_seen": 3667905, - "step": 177 - }, - { - "epoch": 0.02140323453375819, - "flos": 11780117530440.0, - "grad_norm": 13.328592665630122, - "learning_rate": 3.7539221067588938e-06, - "loss": 1.202, - "num_input_tokens_seen": 3681600, - "step": 178 - }, - { - "epoch": 0.021523477424397284, - "flos": 14406773629320.0, - "grad_norm": 5.0544646743172414, - "learning_rate": 3.757980638101964e-06, - "loss": 1.1459, - "num_input_tokens_seen": 3694815, - "step": 179 - }, - { - "epoch": 0.021643720315036374, - "flos": 18578946381120.0, - "grad_norm": 11.784833853727465, - "learning_rate": 3.7620165591252806e-06, - "loss": 1.1228, - "num_input_tokens_seen": 3712635, - "step": 180 - }, - { - "epoch": 0.021763963205675464, - "flos": 17632530908640.0, - "grad_norm": 14.496076381861059, - "learning_rate": 3.766030120360636e-06, - "loss": 1.1632, - "num_input_tokens_seen": 3731985, - "step": 181 - }, - { - "epoch": 0.021884206096314557, - "flos": 18370403388480.0, - "grad_norm": 5.171495498609534, - "learning_rate": 3.7700215681987578e-06, - "loss": 1.1085, - "num_input_tokens_seen": 3751475, - "step": 182 - }, - { - "epoch": 0.022004448986953647, - "flos": 14251408877880.0, - "grad_norm": 12.730475342319558, - "learning_rate": 3.7739911449800767e-06, - "loss": 1.0487, - "num_input_tokens_seen": 3771250, - "step": 183 - }, - { - "epoch": 0.022124691877592736, - "flos": 14536898368800.0, - "grad_norm": 3.344793281725067, - "learning_rate": 3.7779390890830114e-06, - "loss": 1.0295, - "num_input_tokens_seen": 3789620, - "step": 184 - }, - { - "epoch": 0.02224493476823183, - "flos": 16769960785440.0, - "grad_norm": 3.7944013913046892, - "learning_rate": 3.7818656350098723e-06, - "loss": 1.0817, - "num_input_tokens_seen": 3810290, - "step": 185 - }, - { - "epoch": 0.02236517765887092, - "flos": 11971392516720.0, - "grad_norm": 10.022454419468918, - "learning_rate": 3.7857710134704447e-06, - "loss": 0.9939, - "num_input_tokens_seen": 3828945, - "step": 186 - }, - { - "epoch": 0.02248542054951001, - "flos": 31087063369080.0, - "grad_norm": 4.260576887157061, - "learning_rate": 3.7896554514633234e-06, - "loss": 1.0087, - "num_input_tokens_seen": 3852950, - "step": 187 - }, - { - "epoch": 0.022605663440149103, - "flos": 16612388401680.0, - "grad_norm": 6.279279257362533, - "learning_rate": 3.7935191723550955e-06, - "loss": 1.0762, - "num_input_tokens_seen": 3871625, - "step": 188 - }, - { - "epoch": 0.022725906330788193, - "flos": 20674157345160.0, - "grad_norm": 4.7478998460369874, - "learning_rate": 3.797362395957408e-06, - "loss": 1.1152, - "num_input_tokens_seen": 3891910, - "step": 189 - }, - { - "epoch": 0.022846149221427282, - "flos": 17423098730760.0, - "grad_norm": 5.075921434460296, - "learning_rate": 3.8011853386020055e-06, - "loss": 1.0083, - "num_input_tokens_seen": 3912535, - "step": 190 - }, - { - "epoch": 0.022966392112066376, - "flos": 10975018266840.0, - "grad_norm": 5.4045506270613055, - "learning_rate": 3.804988213213804e-06, - "loss": 1.1124, - "num_input_tokens_seen": 3930495, - "step": 191 - }, - { - "epoch": 0.023086635002705466, - "flos": 39817362278400.0, - "grad_norm": 1.0149308400011066, - "learning_rate": 3.808771229382049e-06, - "loss": 0.8831, - "num_input_tokens_seen": 3989680, - "step": 192 - }, - { - "epoch": 0.023206877893344555, - "flos": 13698127164240.0, - "grad_norm": 4.475403640700176, - "learning_rate": 3.8125345934296324e-06, - "loss": 1.0835, - "num_input_tokens_seen": 4007710, - "step": 193 - }, - { - "epoch": 0.02332712078398365, - "flos": 16400227344960.0, - "grad_norm": 5.7168879578283915, - "learning_rate": 3.81627850848061e-06, - "loss": 1.0929, - "num_input_tokens_seen": 4028025, - "step": 194 - }, - { - "epoch": 0.02344736367462274, - "flos": 17373017307120.0, - "grad_norm": 10.56847289459222, - "learning_rate": 3.820003174525994e-06, - "loss": 1.0679, - "num_input_tokens_seen": 4047170, - "step": 195 - }, - { - "epoch": 0.02356760656526183, - "flos": 15327489166440.0, - "grad_norm": 4.780367900783967, - "learning_rate": 3.823708788487851e-06, - "loss": 1.0632, - "num_input_tokens_seen": 4063890, - "step": 196 - }, - { - "epoch": 0.02368784945590092, - "flos": 17924888589000.0, - "grad_norm": 8.59484678725495, - "learning_rate": 3.827395544281781e-06, - "loss": 1.0651, - "num_input_tokens_seen": 4085950, - "step": 197 - }, - { - "epoch": 0.02380809234654001, - "flos": 19626996455760.0, - "grad_norm": 10.03909403873937, - "learning_rate": 3.831063632877802e-06, - "loss": 1.0218, - "num_input_tokens_seen": 4105990, - "step": 198 - }, - { - "epoch": 0.0239283352371791, - "flos": 12941484261600.0, - "grad_norm": 6.651823909426758, - "learning_rate": 3.834713242359712e-06, - "loss": 0.9819, - "num_input_tokens_seen": 4123540, - "step": 199 - }, - { - "epoch": 0.02404857812781819, - "flos": 15194451578760.0, - "grad_norm": 4.083040629165836, - "learning_rate": 3.838344557982959e-06, - "loss": 1.098, - "num_input_tokens_seen": 4144640, - "step": 200 - }, - { - "epoch": 0.024168821018457284, - "flos": 11787598951080.0, - "grad_norm": 8.463579126519665, - "learning_rate": 3.841957762231063e-06, - "loss": 1.0775, - "num_input_tokens_seen": 4161890, - "step": 201 - }, - { - "epoch": 0.024289063909096374, - "flos": 16219499935320.0, - "grad_norm": 3.6241036984173514, - "learning_rate": 3.8455530348706454e-06, - "loss": 1.1066, - "num_input_tokens_seen": 4181210, - "step": 202 - }, - { - "epoch": 0.024409306799735464, - "flos": 12574050438120.0, - "grad_norm": 8.324784229678457, - "learning_rate": 3.849130553005099e-06, - "loss": 1.0047, - "num_input_tokens_seen": 4199145, - "step": 203 - }, - { - "epoch": 0.024529549690374557, - "flos": 15353649025200.0, - "grad_norm": 8.581095085773946, - "learning_rate": 3.852690491126933e-06, - "loss": 1.0632, - "num_input_tokens_seen": 4218915, - "step": 204 - }, - { - "epoch": 0.024649792581013647, - "flos": 18181305372960.0, - "grad_norm": 7.799080760292613, - "learning_rate": 3.856233021168845e-06, - "loss": 1.1302, - "num_input_tokens_seen": 4238330, - "step": 205 - }, - { - "epoch": 0.024770035471652737, - "flos": 24609787551960.0, - "grad_norm": 4.2451357212544005, - "learning_rate": 3.859758312553544e-06, - "loss": 1.1483, - "num_input_tokens_seen": 4260270, - "step": 206 - }, - { - "epoch": 0.02489027836229183, - "flos": 15273851001840.0, - "grad_norm": 12.493098070120496, - "learning_rate": 3.8632665322423735e-06, - "loss": 1.1488, - "num_input_tokens_seen": 4279645, - "step": 207 - }, - { - "epoch": 0.02501052125293092, - "flos": 16505204057160.0, - "grad_norm": 6.975601728181938, - "learning_rate": 3.866757844782762e-06, - "loss": 1.0797, - "num_input_tokens_seen": 4299870, - "step": 208 - }, - { - "epoch": 0.02513076414357001, - "flos": 18782767493520.0, - "grad_norm": 7.558185954761751, - "learning_rate": 3.870232412354527e-06, - "loss": 1.1443, - "num_input_tokens_seen": 4316435, - "step": 209 - }, - { - "epoch": 0.025251007034209103, - "flos": 9585816873720.0, - "grad_norm": 4.919533599518555, - "learning_rate": 3.873690394815086e-06, - "loss": 1.1552, - "num_input_tokens_seen": 4332735, - "step": 210 - }, - { - "epoch": 0.025371249924848193, - "flos": 10634142902400.0, - "grad_norm": 11.417543103809319, - "learning_rate": 3.877131949743587e-06, - "loss": 1.1393, - "num_input_tokens_seen": 4349320, - "step": 211 - }, - { - "epoch": 0.025491492815487283, - "flos": 18183175728120.0, - "grad_norm": 7.966264164970538, - "learning_rate": 3.880557232483993e-06, - "loss": 1.0118, - "num_input_tokens_seen": 4368480, - "step": 212 - }, - { - "epoch": 0.025611735706126376, - "flos": 14645370498840.0, - "grad_norm": 11.119151431851854, - "learning_rate": 3.883966396187164e-06, - "loss": 1.1085, - "num_input_tokens_seen": 4387470, - "step": 213 - }, - { - "epoch": 0.025731978596765466, - "flos": 13518196955160.0, - "grad_norm": 5.426041379203454, - "learning_rate": 3.887359591851937e-06, - "loss": 1.1322, - "num_input_tokens_seen": 4404795, - "step": 214 - }, - { - "epoch": 0.025852221487404556, - "flos": 15747242707440.0, - "grad_norm": 7.184091041531808, - "learning_rate": 3.890736968365265e-06, - "loss": 1.1624, - "num_input_tokens_seen": 4424830, - "step": 215 - }, - { - "epoch": 0.02597246437804365, - "flos": 18893723209920.0, - "grad_norm": 5.389429109025911, - "learning_rate": 3.894098672541412e-06, - "loss": 1.0829, - "num_input_tokens_seen": 4445455, - "step": 216 - }, - { - "epoch": 0.02609270726868274, - "flos": 23299035073560.0, - "grad_norm": 6.200525926002826, - "learning_rate": 3.89744484916025e-06, - "loss": 0.9801, - "num_input_tokens_seen": 4466285, - "step": 217 - }, - { - "epoch": 0.02621295015932183, - "flos": 18678373350960.0, - "grad_norm": 12.151566493144195, - "learning_rate": 3.900775641004673e-06, - "loss": 1.1, - "num_input_tokens_seen": 4485320, - "step": 218 - }, - { - "epoch": 0.026333193049960922, - "flos": 30664948887960.0, - "grad_norm": 5.451820720352983, - "learning_rate": 3.904091188897156e-06, - "loss": 0.9555, - "num_input_tokens_seen": 4504175, - "step": 219 - }, - { - "epoch": 0.026453435940600012, - "flos": 12728250050280.0, - "grad_norm": 3.5762205427890636, - "learning_rate": 3.90739163173548e-06, - "loss": 1.0416, - "num_input_tokens_seen": 4521730, - "step": 220 - }, - { - "epoch": 0.026573678831239102, - "flos": 13461492634560.0, - "grad_norm": 14.491939014042211, - "learning_rate": 3.910677106527646e-06, - "loss": 1.1081, - "num_input_tokens_seen": 4538640, - "step": 221 - }, - { - "epoch": 0.026693921721878195, - "flos": 20833722730320.0, - "grad_norm": 6.416817634424933, - "learning_rate": 3.913947748426004e-06, - "loss": 1.0661, - "num_input_tokens_seen": 4555180, - "step": 222 - }, - { - "epoch": 0.026814164612517285, - "flos": 9970917303840.0, - "grad_norm": 12.509759373778689, - "learning_rate": 3.9172036907606136e-06, - "loss": 0.9864, - "num_input_tokens_seen": 4568380, - "step": 223 - }, - { - "epoch": 0.026934407503156375, - "flos": 16715893359000.0, - "grad_norm": 4.598730384762016, - "learning_rate": 3.920445065071855e-06, - "loss": 1.1677, - "num_input_tokens_seen": 4589265, - "step": 224 - }, - { - "epoch": 0.027054650393795468, - "flos": 20624627829600.0, - "grad_norm": 6.254827237384839, - "learning_rate": 3.923672001142322e-06, - "loss": 1.0008, - "num_input_tokens_seen": 4609295, - "step": 225 - }, - { - "epoch": 0.027174893284434558, - "flos": 22407852166800.0, - "grad_norm": 4.386894188597241, - "learning_rate": 3.926884627027996e-06, - "loss": 1.0764, - "num_input_tokens_seen": 4632785, - "step": 226 - }, - { - "epoch": 0.027295136175073648, - "flos": 15668671146480.0, - "grad_norm": 7.677638978833704, - "learning_rate": 3.930083069088744e-06, - "loss": 1.0088, - "num_input_tokens_seen": 4652505, - "step": 227 - }, - { - "epoch": 0.02741537906571274, - "flos": 43519608080880.0, - "grad_norm": 1.0118828128656803, - "learning_rate": 3.933267452018137e-06, - "loss": 0.8643, - "num_input_tokens_seen": 4712020, - "step": 228 - }, - { - "epoch": 0.02753562195635183, - "flos": 17502314184480.0, - "grad_norm": 7.018754630626534, - "learning_rate": 3.936437898872622e-06, - "loss": 1.067, - "num_input_tokens_seen": 4731715, - "step": 229 - }, - { - "epoch": 0.02765586484699092, - "flos": 24321354551280.0, - "grad_norm": 8.846145359405224, - "learning_rate": 3.9395945311000525e-06, - "loss": 1.0226, - "num_input_tokens_seen": 4753060, - "step": 230 - }, - { - "epoch": 0.027776107737630014, - "flos": 10476478533960.0, - "grad_norm": 10.121916124684903, - "learning_rate": 3.942737468567608e-06, - "loss": 1.1346, - "num_input_tokens_seen": 4770795, - "step": 231 - }, - { - "epoch": 0.027896350628269104, - "flos": 34257649405800.0, - "grad_norm": 12.365180541315842, - "learning_rate": 3.9458668295891026e-06, - "loss": 1.091, - "num_input_tokens_seen": 4792800, - "step": 232 - }, - { - "epoch": 0.028016593518908194, - "flos": 15402136047720.0, - "grad_norm": 11.116573614353474, - "learning_rate": 3.948982730951712e-06, - "loss": 1.0909, - "num_input_tokens_seen": 4810615, - "step": 233 - }, - { - "epoch": 0.028136836409547287, - "flos": 13070259892440.0, - "grad_norm": 17.946950091258543, - "learning_rate": 3.9520852879421254e-06, - "loss": 1.0503, - "num_input_tokens_seen": 4827680, - "step": 234 - }, - { - "epoch": 0.028257079300186377, - "flos": 22512246309360.0, - "grad_norm": 7.188024883178369, - "learning_rate": 3.955174614372137e-06, - "loss": 1.0393, - "num_input_tokens_seen": 4847165, - "step": 235 - }, - { - "epoch": 0.028377322190825467, - "flos": 16717058498280.0, - "grad_norm": 5.69518423263054, - "learning_rate": 3.9582508226037045e-06, - "loss": 1.0726, - "num_input_tokens_seen": 4867025, - "step": 236 - }, - { - "epoch": 0.02849756508146456, - "flos": 14536039845120.0, - "grad_norm": 14.541388012567138, - "learning_rate": 3.9613140235734636e-06, - "loss": 1.1487, - "num_input_tokens_seen": 4883725, - "step": 237 - }, - { - "epoch": 0.02861780797210365, - "flos": 10083774036960.0, - "grad_norm": 5.826748028208232, - "learning_rate": 3.96436432681674e-06, - "loss": 1.0379, - "num_input_tokens_seen": 4901435, - "step": 238 - }, - { - "epoch": 0.02873805086274274, - "flos": 18366110770080.0, - "grad_norm": 5.783562934707858, - "learning_rate": 3.967401840491044e-06, - "loss": 1.1208, - "num_input_tokens_seen": 4920435, - "step": 239 - }, - { - "epoch": 0.028858293753381833, - "flos": 12253815851640.0, - "grad_norm": 4.583811694526609, - "learning_rate": 3.97042667139909e-06, - "loss": 1.0992, - "num_input_tokens_seen": 4937480, - "step": 240 - }, - { - "epoch": 0.028978536644020923, - "flos": 16375600564200.0, - "grad_norm": 5.835653152585849, - "learning_rate": 3.973438925011327e-06, - "loss": 1.103, - "num_input_tokens_seen": 4955485, - "step": 241 - }, - { - "epoch": 0.029098779534660012, - "flos": 20178530460480.0, - "grad_norm": 10.810120778357463, - "learning_rate": 3.976438705488002e-06, - "loss": 1.1488, - "num_input_tokens_seen": 4974865, - "step": 242 - }, - { - "epoch": 0.029219022425299106, - "flos": 9795034420680.0, - "grad_norm": 32.18541786348089, - "learning_rate": 3.9794261157007744e-06, - "loss": 1.1556, - "num_input_tokens_seen": 4992340, - "step": 243 - }, - { - "epoch": 0.029339265315938196, - "flos": 13776453432720.0, - "grad_norm": 14.909381017801657, - "learning_rate": 3.982401257253887e-06, - "loss": 1.0607, - "num_input_tokens_seen": 5010400, - "step": 244 - }, - { - "epoch": 0.029459508206577285, - "flos": 11079964317480.0, - "grad_norm": 18.076034174414733, - "learning_rate": 3.985364230504893e-06, - "loss": 1.1183, - "num_input_tokens_seen": 5028005, - "step": 245 - }, - { - "epoch": 0.02957975109721638, - "flos": 20100480146040.0, - "grad_norm": 5.751968046988284, - "learning_rate": 3.988315134584976e-06, - "loss": 1.0657, - "num_input_tokens_seen": 5047405, - "step": 246 - }, - { - "epoch": 0.02969999398785547, - "flos": 17293801853400.0, - "grad_norm": 7.7654761563011725, - "learning_rate": 3.991254067418851e-06, - "loss": 1.0354, - "num_input_tokens_seen": 5067665, - "step": 247 - }, - { - "epoch": 0.02982023687849456, - "flos": 25028682569280.0, - "grad_norm": 5.033686267192606, - "learning_rate": 3.994181125744254e-06, - "loss": 1.0576, - "num_input_tokens_seen": 5089190, - "step": 248 - }, - { - "epoch": 0.02994047976913365, - "flos": 18631542052680.0, - "grad_norm": 13.162389776885954, - "learning_rate": 3.99709640513106e-06, - "loss": 0.979, - "num_input_tokens_seen": 5109790, - "step": 249 - }, - { - "epoch": 0.03006072265977274, - "flos": 18234085013880.0, - "grad_norm": 8.816120242785594, - "learning_rate": 4e-06, - "loss": 1.0751, - "num_input_tokens_seen": 5129345, - "step": 250 - }, - { - "epoch": 0.03018096555041183, - "flos": 15720806894640.0, - "grad_norm": 9.551887956712937, - "learning_rate": 3.999999848300794e-06, - "loss": 1.122, - "num_input_tokens_seen": 5148050, - "step": 251 - }, - { - "epoch": 0.030301208441050925, - "flos": 21513879058080.0, - "grad_norm": 6.354211759146694, - "learning_rate": 3.999999393203203e-06, - "loss": 1.115, - "num_input_tokens_seen": 5170180, - "step": 252 - }, - { - "epoch": 0.030421451331690014, - "flos": 16794526243080.0, - "grad_norm": 16.074064492728844, - "learning_rate": 3.999998634707293e-06, - "loss": 1.0802, - "num_input_tokens_seen": 5189450, - "step": 253 - }, - { - "epoch": 0.030541694222329104, - "flos": 19890158782920.0, - "grad_norm": 6.692159865253473, - "learning_rate": 3.999997572813182e-06, - "loss": 1.2003, - "num_input_tokens_seen": 5206980, - "step": 254 - }, - { - "epoch": 0.030661937112968194, - "flos": 13177720191000.0, - "grad_norm": 10.81428080981781, - "learning_rate": 3.999996207521028e-06, - "loss": 1.0994, - "num_input_tokens_seen": 5225410, - "step": 255 - }, - { - "epoch": 0.030782180003607287, - "flos": 9139167596520.0, - "grad_norm": 7.371128941357435, - "learning_rate": 3.999994538831039e-06, - "loss": 1.0356, - "num_input_tokens_seen": 5241715, - "step": 256 - }, - { - "epoch": 0.030902422894246377, - "flos": 16589754622320.0, - "grad_norm": 6.548992970636276, - "learning_rate": 3.99999256674347e-06, - "loss": 1.0668, - "num_input_tokens_seen": 5261585, - "step": 257 - }, - { - "epoch": 0.031022665784885467, - "flos": 39416379160200.0, - "grad_norm": 0.9114375121145346, - "learning_rate": 3.999990291258618e-06, - "loss": 0.7904, - "num_input_tokens_seen": 5319995, - "step": 258 - }, - { - "epoch": 0.03114290867552456, - "flos": 13701837213000.0, - "grad_norm": 4.997889645582807, - "learning_rate": 3.999987712376829e-06, - "loss": 1.0912, - "num_input_tokens_seen": 5338035, - "step": 259 - }, - { - "epoch": 0.031263151566163654, - "flos": 14881637089800.0, - "grad_norm": 9.317797082372083, - "learning_rate": 3.999984830098494e-06, - "loss": 1.0355, - "num_input_tokens_seen": 5357335, - "step": 260 - }, - { - "epoch": 0.03138339445680274, - "flos": 10449889413360.0, - "grad_norm": 29.486652005345995, - "learning_rate": 3.999981644424051e-06, - "loss": 1.1827, - "num_input_tokens_seen": 5371855, - "step": 261 - }, - { - "epoch": 0.03150363734744183, - "flos": 8195143725720.0, - "grad_norm": 7.5021567740355195, - "learning_rate": 3.999978155353982e-06, - "loss": 1.0936, - "num_input_tokens_seen": 5388720, - "step": 262 - }, - { - "epoch": 0.03162388023808092, - "flos": 24057732300720.0, - "grad_norm": 6.012816727319891, - "learning_rate": 3.9999743628888186e-06, - "loss": 1.0261, - "num_input_tokens_seen": 5410230, - "step": 263 - }, - { - "epoch": 0.03174412312872001, - "flos": 14774575391520.0, - "grad_norm": 19.310321573891756, - "learning_rate": 3.999970267029133e-06, - "loss": 1.1027, - "num_input_tokens_seen": 5428910, - "step": 264 - }, - { - "epoch": 0.0318643660193591, - "flos": 16870706202360.0, - "grad_norm": 4.769922738719109, - "learning_rate": 3.999965867775548e-06, - "loss": 1.0285, - "num_input_tokens_seen": 5449025, - "step": 265 - }, - { - "epoch": 0.0319846089099982, - "flos": 9821010310080.0, - "grad_norm": 11.046083845542451, - "learning_rate": 3.9999611651287315e-06, - "loss": 1.0851, - "num_input_tokens_seen": 5466900, - "step": 266 - }, - { - "epoch": 0.03210485180063729, - "flos": 10420878029520.0, - "grad_norm": 26.90284485929764, - "learning_rate": 3.999956159089396e-06, - "loss": 1.0249, - "num_input_tokens_seen": 5484070, - "step": 267 - }, - { - "epoch": 0.03222509469127638, - "flos": 20597793416520.0, - "grad_norm": 7.164318616775484, - "learning_rate": 3.999950849658302e-06, - "loss": 1.0105, - "num_input_tokens_seen": 5502710, - "step": 268 - }, - { - "epoch": 0.03234533758191547, - "flos": 11997583037040.0, - "grad_norm": 16.571876952019966, - "learning_rate": 3.999945236836254e-06, - "loss": 1.0703, - "num_input_tokens_seen": 5521395, - "step": 269 - }, - { - "epoch": 0.03246558047255456, - "flos": 13465570622040.0, - "grad_norm": 6.834253140703155, - "learning_rate": 3.999939320624103e-06, - "loss": 1.1516, - "num_input_tokens_seen": 5536265, - "step": 270 - }, - { - "epoch": 0.03258582336319365, - "flos": 16871043479520.0, - "grad_norm": 23.262195957006885, - "learning_rate": 3.999933101022749e-06, - "loss": 1.1233, - "num_input_tokens_seen": 5556390, - "step": 271 - }, - { - "epoch": 0.032706066253832745, - "flos": 19704709493040.0, - "grad_norm": 5.251160447978622, - "learning_rate": 3.999926578033132e-06, - "loss": 1.0901, - "num_input_tokens_seen": 5575925, - "step": 272 - }, - { - "epoch": 0.032826309144471835, - "flos": 32608198533720.0, - "grad_norm": 16.158980177624432, - "learning_rate": 3.999919751656244e-06, - "loss": 0.8706, - "num_input_tokens_seen": 5602545, - "step": 273 - }, - { - "epoch": 0.032946552035110925, - "flos": 18369391557000.0, - "grad_norm": 5.644484618103225, - "learning_rate": 3.9999126218931195e-06, - "loss": 0.9967, - "num_input_tokens_seen": 5620300, - "step": 274 - }, - { - "epoch": 0.033066794925750015, - "flos": 10686401296800.0, - "grad_norm": 5.778840753798609, - "learning_rate": 3.99990518874484e-06, - "loss": 1.1357, - "num_input_tokens_seen": 5636460, - "step": 275 - }, - { - "epoch": 0.033187037816389105, - "flos": 16186625194920.0, - "grad_norm": 8.151839309680938, - "learning_rate": 3.999897452212534e-06, - "loss": 1.1476, - "num_input_tokens_seen": 5653510, - "step": 276 - }, - { - "epoch": 0.033307280707028195, - "flos": 16586381850720.0, - "grad_norm": 7.023906484185562, - "learning_rate": 3.999889412297374e-06, - "loss": 1.223, - "num_input_tokens_seen": 5672655, - "step": 277 - }, - { - "epoch": 0.03342752359766729, - "flos": 20545443037440.0, - "grad_norm": 5.82537727935087, - "learning_rate": 3.999881069000581e-06, - "loss": 1.0137, - "num_input_tokens_seen": 5692105, - "step": 278 - }, - { - "epoch": 0.03354776648830638, - "flos": 13748975126880.0, - "grad_norm": 6.082371500698536, - "learning_rate": 3.99987242232342e-06, - "loss": 1.0977, - "num_input_tokens_seen": 5706830, - "step": 279 - }, - { - "epoch": 0.03366800937894547, - "flos": 12653204568720.0, - "grad_norm": 9.863941422832275, - "learning_rate": 3.9998634722672026e-06, - "loss": 1.0238, - "num_input_tokens_seen": 5726605, - "step": 280 - }, - { - "epoch": 0.03378825226958456, - "flos": 25655262055560.0, - "grad_norm": 6.078917307072099, - "learning_rate": 3.999854218833286e-06, - "loss": 1.0118, - "num_input_tokens_seen": 5747145, - "step": 281 - }, - { - "epoch": 0.03390849516022365, - "flos": 18290360072640.0, - "grad_norm": 8.162798477954679, - "learning_rate": 3.999844662023075e-06, - "loss": 1.056, - "num_input_tokens_seen": 5766740, - "step": 282 - }, - { - "epoch": 0.03402873805086274, - "flos": 15114806863200.0, - "grad_norm": 5.131103231024654, - "learning_rate": 3.999834801838018e-06, - "loss": 1.1501, - "num_input_tokens_seen": 5785440, - "step": 283 - }, - { - "epoch": 0.03414898094150183, - "flos": 16140407127840.0, - "grad_norm": 16.42699766973278, - "learning_rate": 3.9998246382796115e-06, - "loss": 0.9744, - "num_input_tokens_seen": 5804740, - "step": 284 - }, - { - "epoch": 0.03426922383214093, - "flos": 13387152368880.0, - "grad_norm": 5.00479315849578, - "learning_rate": 3.999814171349399e-06, - "loss": 1.1292, - "num_input_tokens_seen": 5822320, - "step": 285 - }, - { - "epoch": 0.03438946672278002, - "flos": 24794225010360.0, - "grad_norm": 26.485433929789007, - "learning_rate": 3.9998034010489655e-06, - "loss": 0.9663, - "num_input_tokens_seen": 5845730, - "step": 286 - }, - { - "epoch": 0.03450970961341911, - "flos": 15747487999920.0, - "grad_norm": 5.12141367409822, - "learning_rate": 3.999792327379946e-06, - "loss": 0.9888, - "num_input_tokens_seen": 5864825, - "step": 287 - }, - { - "epoch": 0.034629952504058197, - "flos": 15432373893960.0, - "grad_norm": 11.868439799790451, - "learning_rate": 3.999780950344021e-06, - "loss": 1.1947, - "num_input_tokens_seen": 5882735, - "step": 288 - }, - { - "epoch": 0.034750195394697286, - "flos": 14226690112440.0, - "grad_norm": 6.2938243206223525, - "learning_rate": 3.999769269942916e-06, - "loss": 1.0406, - "num_input_tokens_seen": 5902495, - "step": 289 - }, - { - "epoch": 0.034870438285336376, - "flos": 19917177165360.0, - "grad_norm": 10.366207211566099, - "learning_rate": 3.999757286178402e-06, - "loss": 1.0457, - "num_input_tokens_seen": 5924650, - "step": 290 - }, - { - "epoch": 0.03499068117597547, - "flos": 15642051364320.0, - "grad_norm": 117.62335612560118, - "learning_rate": 3.999744999052299e-06, - "loss": 1.1315, - "num_input_tokens_seen": 5945760, - "step": 291 - }, - { - "epoch": 0.03511092406661456, - "flos": 40959473549880.0, - "grad_norm": 0.9685447229271649, - "learning_rate": 3.9997324085664675e-06, - "loss": 0.8948, - "num_input_tokens_seen": 6005710, - "step": 292 - }, - { - "epoch": 0.03523116695725365, - "flos": 16296231802680.0, - "grad_norm": 19.33911905643342, - "learning_rate": 3.999719514722821e-06, - "loss": 1.1474, - "num_input_tokens_seen": 6025560, - "step": 293 - }, - { - "epoch": 0.03535140984789274, - "flos": 26340078940440.0, - "grad_norm": 8.624226853368327, - "learning_rate": 3.999706317523314e-06, - "loss": 0.9829, - "num_input_tokens_seen": 6043840, - "step": 294 - }, - { - "epoch": 0.03547165273853183, - "flos": 14514969805320.0, - "grad_norm": 7.0284043720874605, - "learning_rate": 3.999692816969948e-06, - "loss": 1.0851, - "num_input_tokens_seen": 6063095, - "step": 295 - }, - { - "epoch": 0.03559189562917092, - "flos": 50023718311080.0, - "grad_norm": 0.9335959845480254, - "learning_rate": 3.999679013064772e-06, - "loss": 0.9379, - "num_input_tokens_seen": 6129560, - "step": 296 - }, - { - "epoch": 0.03571213851981002, - "flos": 15379226314320.0, - "grad_norm": 10.37209306920404, - "learning_rate": 3.99966490580988e-06, - "loss": 1.0848, - "num_input_tokens_seen": 6146640, - "step": 297 - }, - { - "epoch": 0.03583238141044911, - "flos": 31401380274480.0, - "grad_norm": 12.364709756532147, - "learning_rate": 3.999650495207411e-06, - "loss": 0.8923, - "num_input_tokens_seen": 6172385, - "step": 298 - }, - { - "epoch": 0.0359526243010882, - "flos": 13408590347400.0, - "grad_norm": 6.548112192449982, - "learning_rate": 3.999635781259553e-06, - "loss": 1.1307, - "num_input_tokens_seen": 6187370, - "step": 299 - }, - { - "epoch": 0.03607286719172729, - "flos": 44142508179960.0, - "grad_norm": 1.2190566044058346, - "learning_rate": 3.999620763968535e-06, - "loss": 0.7891, - "num_input_tokens_seen": 6245965, - "step": 300 - }, - { - "epoch": 0.03619311008236638, - "flos": 19811556560400.0, - "grad_norm": 3.804378927826362, - "learning_rate": 3.999605443336638e-06, - "loss": 1.1044, - "num_input_tokens_seen": 6267815, - "step": 301 - }, - { - "epoch": 0.03631335297300547, - "flos": 9607101544440.0, - "grad_norm": 10.87277725129371, - "learning_rate": 3.999589819366185e-06, - "loss": 1.1253, - "num_input_tokens_seen": 6281325, - "step": 302 - }, - { - "epoch": 0.036433595863644565, - "flos": 19676311340400.0, - "grad_norm": 6.6846760999892565, - "learning_rate": 3.999573892059547e-06, - "loss": 1.0645, - "num_input_tokens_seen": 6300175, - "step": 303 - }, - { - "epoch": 0.036553838754283655, - "flos": 17477994019320.0, - "grad_norm": 8.41376100246156, - "learning_rate": 3.999557661419138e-06, - "loss": 1.0378, - "num_input_tokens_seen": 6320045, - "step": 304 - }, - { - "epoch": 0.036674081644922744, - "flos": 16638762891360.0, - "grad_norm": 5.050342419789861, - "learning_rate": 3.9995411274474225e-06, - "loss": 1.0299, - "num_input_tokens_seen": 6339045, - "step": 305 - }, - { - "epoch": 0.036794324535561834, - "flos": 19574707399800.0, - "grad_norm": 9.915426823234291, - "learning_rate": 3.999524290146908e-06, - "loss": 1.0571, - "num_input_tokens_seen": 6358970, - "step": 306 - }, - { - "epoch": 0.036914567426200924, - "flos": 13806507309600.0, - "grad_norm": 4.916662162304555, - "learning_rate": 3.9995071495201485e-06, - "loss": 1.1289, - "num_input_tokens_seen": 6375795, - "step": 307 - }, - { - "epoch": 0.037034810316840014, - "flos": 15851759496240.0, - "grad_norm": 7.782020867705277, - "learning_rate": 3.999489705569744e-06, - "loss": 1.1866, - "num_input_tokens_seen": 6393215, - "step": 308 - }, - { - "epoch": 0.03715505320747911, - "flos": 13177628206320.0, - "grad_norm": 6.521795849760787, - "learning_rate": 3.999471958298341e-06, - "loss": 1.0903, - "num_input_tokens_seen": 6411845, - "step": 309 - }, - { - "epoch": 0.0372752960981182, - "flos": 25658941442760.0, - "grad_norm": 5.810420166685688, - "learning_rate": 3.999453907708631e-06, - "loss": 0.9988, - "num_input_tokens_seen": 6433970, - "step": 310 - }, - { - "epoch": 0.03739553898875729, - "flos": 14777212285680.0, - "grad_norm": 4.5538888430626745, - "learning_rate": 3.999435553803353e-06, - "loss": 1.0492, - "num_input_tokens_seen": 6453090, - "step": 311 - }, - { - "epoch": 0.03751578187939638, - "flos": 14381656263600.0, - "grad_norm": 17.840997112788166, - "learning_rate": 3.999416896585292e-06, - "loss": 1.0736, - "num_input_tokens_seen": 6469840, - "step": 312 - }, - { - "epoch": 0.03763602477003547, - "flos": 14672296896600.0, - "grad_norm": 21.581921309109116, - "learning_rate": 3.9993979360572775e-06, - "loss": 1.0837, - "num_input_tokens_seen": 6489700, - "step": 313 - }, - { - "epoch": 0.03775626766067456, - "flos": 11814341379480.0, - "grad_norm": 14.94105647057902, - "learning_rate": 3.999378672222185e-06, - "loss": 1.0403, - "num_input_tokens_seen": 6507205, - "step": 314 - }, - { - "epoch": 0.03787651055131366, - "flos": 15012681676080.0, - "grad_norm": 10.43868701147024, - "learning_rate": 3.9993591050829385e-06, - "loss": 1.0611, - "num_input_tokens_seen": 6524790, - "step": 315 - }, - { - "epoch": 0.037996753441952746, - "flos": 15642541949280.0, - "grad_norm": 3.993762995022689, - "learning_rate": 3.999339234642506e-06, - "loss": 1.0293, - "num_input_tokens_seen": 6544260, - "step": 316 - }, - { - "epoch": 0.038116996332591836, - "flos": 19732709045400.0, - "grad_norm": 10.908741139093276, - "learning_rate": 3.9993190609038994e-06, - "loss": 1.0635, - "num_input_tokens_seen": 6562745, - "step": 317 - }, - { - "epoch": 0.038237239223230926, - "flos": 15038718888600.0, - "grad_norm": 4.353505767359297, - "learning_rate": 3.999298583870182e-06, - "loss": 1.0742, - "num_input_tokens_seen": 6582050, - "step": 318 - }, - { - "epoch": 0.038357482113870016, - "flos": 18185536668240.0, - "grad_norm": 5.9792244761083655, - "learning_rate": 3.999277803544458e-06, - "loss": 1.002, - "num_input_tokens_seen": 6601925, - "step": 319 - }, - { - "epoch": 0.038477725004509106, - "flos": 42388417872840.0, - "grad_norm": 0.960444254006614, - "learning_rate": 3.999256719929882e-06, - "loss": 0.8996, - "num_input_tokens_seen": 6662920, - "step": 320 - }, - { - "epoch": 0.0385979678951482, - "flos": 48201302952120.0, - "grad_norm": 1.3413586296466866, - "learning_rate": 3.999235333029651e-06, - "loss": 0.9812, - "num_input_tokens_seen": 6716580, - "step": 321 - }, - { - "epoch": 0.03871821078578729, - "flos": 16166076401640.0, - "grad_norm": 9.446494155561346, - "learning_rate": 3.999213642847009e-06, - "loss": 1.052, - "num_input_tokens_seen": 6736885, - "step": 322 - }, - { - "epoch": 0.03883845367642638, - "flos": 18705085117800.0, - "grad_norm": 129.42073466991292, - "learning_rate": 3.999191649385247e-06, - "loss": 1.1473, - "num_input_tokens_seen": 6757780, - "step": 323 - }, - { - "epoch": 0.03895869656706547, - "flos": 40760435640840.0, - "grad_norm": 0.8649791401348935, - "learning_rate": 3.999169352647702e-06, - "loss": 0.8644, - "num_input_tokens_seen": 6818680, - "step": 324 - }, - { - "epoch": 0.03907893945770456, - "flos": 17687303550960.0, - "grad_norm": 6.8738307999050035, - "learning_rate": 3.999146752637755e-06, - "loss": 1.0499, - "num_input_tokens_seen": 6839445, - "step": 325 - }, - { - "epoch": 0.03919918234834365, - "flos": 13019595899160.0, - "grad_norm": 7.742084029013421, - "learning_rate": 3.999123849358836e-06, - "loss": 1.1334, - "num_input_tokens_seen": 6856830, - "step": 326 - }, - { - "epoch": 0.03931942523898275, - "flos": 17947123768080.0, - "grad_norm": 5.930837969442041, - "learning_rate": 3.999100642814418e-06, - "loss": 0.9738, - "num_input_tokens_seen": 6876990, - "step": 327 - }, - { - "epoch": 0.03943966812962184, - "flos": 16533479563560.0, - "grad_norm": 100.31753784078585, - "learning_rate": 3.999077133008022e-06, - "loss": 1.1236, - "num_input_tokens_seen": 6895295, - "step": 328 - }, - { - "epoch": 0.03955991102026093, - "flos": 20781586982160.0, - "grad_norm": 4.6875919587334, - "learning_rate": 3.9990533199432145e-06, - "loss": 1.1364, - "num_input_tokens_seen": 6916510, - "step": 329 - }, - { - "epoch": 0.03968015391090002, - "flos": 12468981741240.0, - "grad_norm": 4.622086096760234, - "learning_rate": 3.999029203623608e-06, - "loss": 0.9958, - "num_input_tokens_seen": 6933950, - "step": 330 - }, - { - "epoch": 0.03980039680153911, - "flos": 15485306842680.0, - "grad_norm": 10.510614025933425, - "learning_rate": 3.99900478405286e-06, - "loss": 1.0904, - "num_input_tokens_seen": 6952980, - "step": 331 - }, - { - "epoch": 0.0399206396921782, - "flos": 10738996968360.0, - "grad_norm": 4.080387817725417, - "learning_rate": 3.998980061234676e-06, - "loss": 1.0384, - "num_input_tokens_seen": 6970615, - "step": 332 - }, - { - "epoch": 0.040040882582817294, - "flos": 10183201006800.0, - "grad_norm": 6.3548378845234295, - "learning_rate": 3.9989550351728055e-06, - "loss": 0.9835, - "num_input_tokens_seen": 6987265, - "step": 333 - }, - { - "epoch": 0.040161125473456384, - "flos": 13675278753960.0, - "grad_norm": 6.499632018016782, - "learning_rate": 3.998929705871046e-06, - "loss": 1.0637, - "num_input_tokens_seen": 7004340, - "step": 334 - }, - { - "epoch": 0.040281368364095474, - "flos": 12626094201600.0, - "grad_norm": 8.298650002133488, - "learning_rate": 3.99890407333324e-06, - "loss": 1.1212, - "num_input_tokens_seen": 7022590, - "step": 335 - }, - { - "epoch": 0.040401611254734564, - "flos": 13885876071120.0, - "grad_norm": 15.41620752149189, - "learning_rate": 3.998878137563275e-06, - "loss": 1.0942, - "num_input_tokens_seen": 7041860, - "step": 336 - }, - { - "epoch": 0.040521854145373654, - "flos": 15668671146480.0, - "grad_norm": 6.539486596723818, - "learning_rate": 3.998851898565085e-06, - "loss": 1.0741, - "num_input_tokens_seen": 7061385, - "step": 337 - }, - { - "epoch": 0.04064209703601274, - "flos": 16114369915320.0, - "grad_norm": 3.881365193548017, - "learning_rate": 3.998825356342653e-06, - "loss": 1.0705, - "num_input_tokens_seen": 7081280, - "step": 338 - }, - { - "epoch": 0.04076233992665183, - "flos": 27546774553440.0, - "grad_norm": 26.62537938953582, - "learning_rate": 3.998798510900003e-06, - "loss": 0.9498, - "num_input_tokens_seen": 7103800, - "step": 339 - }, - { - "epoch": 0.04088258281729093, - "flos": 18421803259200.0, - "grad_norm": 6.3955474027490835, - "learning_rate": 3.998771362241207e-06, - "loss": 1.067, - "num_input_tokens_seen": 7123925, - "step": 340 - }, - { - "epoch": 0.04100282570793002, - "flos": 14040780899160.0, - "grad_norm": 5.313219448067274, - "learning_rate": 3.998743910370385e-06, - "loss": 1.1064, - "num_input_tokens_seen": 7142505, - "step": 341 - }, - { - "epoch": 0.04112306859856911, - "flos": 16035675708120.0, - "grad_norm": 5.094332047422843, - "learning_rate": 3.998716155291702e-06, - "loss": 0.9663, - "num_input_tokens_seen": 7160065, - "step": 342 - }, - { - "epoch": 0.0412433114892082, - "flos": 17814024857280.0, - "grad_norm": 8.201012097678525, - "learning_rate": 3.998688097009366e-06, - "loss": 1.1332, - "num_input_tokens_seen": 7180550, - "step": 343 - }, - { - "epoch": 0.04136355437984729, - "flos": 17922987572280.0, - "grad_norm": 5.170734014589591, - "learning_rate": 3.998659735527636e-06, - "loss": 1.0373, - "num_input_tokens_seen": 7199360, - "step": 344 - }, - { - "epoch": 0.04148379727048638, - "flos": 16324354001280.0, - "grad_norm": 3.8257008876543543, - "learning_rate": 3.998631070850813e-06, - "loss": 1.0045, - "num_input_tokens_seen": 7219700, - "step": 345 - }, - { - "epoch": 0.041604040161125476, - "flos": 9925619083560.0, - "grad_norm": 4.743286407696099, - "learning_rate": 3.9986021029832455e-06, - "loss": 1.0633, - "num_input_tokens_seen": 7236735, - "step": 346 - }, - { - "epoch": 0.041724283051764566, - "flos": 8508479461200.0, - "grad_norm": 9.346449037957711, - "learning_rate": 3.9985728319293285e-06, - "loss": 1.143, - "num_input_tokens_seen": 7250430, - "step": 347 - }, - { - "epoch": 0.041844525942403656, - "flos": 8719567363320.0, - "grad_norm": 6.576094143994125, - "learning_rate": 3.998543257693501e-06, - "loss": 1.0774, - "num_input_tokens_seen": 7266905, - "step": 348 - }, - { - "epoch": 0.041964768833042745, - "flos": 16901128017960.0, - "grad_norm": 3.8674586345529316, - "learning_rate": 3.998513380280251e-06, - "loss": 1.1143, - "num_input_tokens_seen": 7286905, - "step": 349 - }, - { - "epoch": 0.042085011723681835, - "flos": 8353176032880.0, - "grad_norm": 6.2405109851643585, - "learning_rate": 3.99848319969411e-06, - "loss": 1.1805, - "num_input_tokens_seen": 7304225, - "step": 350 - }, - { - "epoch": 0.042205254614320925, - "flos": 11945600596680.0, - "grad_norm": 5.3387536678243555, - "learning_rate": 3.9984527159396564e-06, - "loss": 1.0165, - "num_input_tokens_seen": 7322585, - "step": 351 - }, - { - "epoch": 0.04232549750496002, - "flos": 17870637193200.0, - "grad_norm": 4.673624687055445, - "learning_rate": 3.9984219290215154e-06, - "loss": 1.0639, - "num_input_tokens_seen": 7342480, - "step": 352 - }, - { - "epoch": 0.04244574039559911, - "flos": 19024951765560.0, - "grad_norm": 4.412046475107752, - "learning_rate": 3.998390838944356e-06, - "loss": 1.1226, - "num_input_tokens_seen": 7363705, - "step": 353 - }, - { - "epoch": 0.0425659832862382, - "flos": 14855661200400.0, - "grad_norm": 6.565399110840036, - "learning_rate": 3.998359445712895e-06, - "loss": 1.1136, - "num_input_tokens_seen": 7382530, - "step": 354 - }, - { - "epoch": 0.04268622617687729, - "flos": 16586136558240.0, - "grad_norm": 16.671425610449923, - "learning_rate": 3.9983277493318955e-06, - "loss": 1.0335, - "num_input_tokens_seen": 7401545, - "step": 355 - }, - { - "epoch": 0.04280646906751638, - "flos": 18499822912080.0, - "grad_norm": 9.281200267517221, - "learning_rate": 3.998295749806165e-06, - "loss": 1.0409, - "num_input_tokens_seen": 7422490, - "step": 356 - }, - { - "epoch": 0.04292671195815547, - "flos": 19155935028720.0, - "grad_norm": 4.158733113755942, - "learning_rate": 3.998263447140558e-06, - "loss": 1.0621, - "num_input_tokens_seen": 7442410, - "step": 357 - }, - { - "epoch": 0.04304695484879457, - "flos": 28175653656720.0, - "grad_norm": 5.604420123224394, - "learning_rate": 3.998230841339976e-06, - "loss": 1.05, - "num_input_tokens_seen": 7464140, - "step": 358 - }, - { - "epoch": 0.04316719773943366, - "flos": 13937950496160.0, - "grad_norm": 4.21017941815218, - "learning_rate": 3.998197932409363e-06, - "loss": 1.0806, - "num_input_tokens_seen": 7481870, - "step": 359 - }, - { - "epoch": 0.04328744063007275, - "flos": 15954129975840.0, - "grad_norm": 5.669731134300277, - "learning_rate": 3.9981647203537125e-06, - "loss": 1.0941, - "num_input_tokens_seen": 7499090, - "step": 360 - }, - { - "epoch": 0.04340768352071184, - "flos": 15113549739240.0, - "grad_norm": 5.25086150594028, - "learning_rate": 3.998131205178063e-06, - "loss": 1.1869, - "num_input_tokens_seen": 7517280, - "step": 361 - }, - { - "epoch": 0.04352792641135093, - "flos": 8143621208760.0, - "grad_norm": 4.278946528333111, - "learning_rate": 3.998097386887498e-06, - "loss": 0.9873, - "num_input_tokens_seen": 7534075, - "step": 362 - }, - { - "epoch": 0.04364816930199002, - "flos": 16796365936680.0, - "grad_norm": 3.6153996376749413, - "learning_rate": 3.998063265487148e-06, - "loss": 1.0698, - "num_input_tokens_seen": 7554845, - "step": 363 - }, - { - "epoch": 0.043768412192629114, - "flos": 10189026703200.0, - "grad_norm": 2.9044022787909216, - "learning_rate": 3.99802884098219e-06, - "loss": 1.0417, - "num_input_tokens_seen": 7572675, - "step": 364 - }, - { - "epoch": 0.043888655083268203, - "flos": 18841188861480.0, - "grad_norm": 5.836921496591594, - "learning_rate": 3.997994113377845e-06, - "loss": 1.046, - "num_input_tokens_seen": 7591295, - "step": 365 - }, - { - "epoch": 0.04400889797390729, - "flos": 19391986988760.0, - "grad_norm": 7.647177790238539, - "learning_rate": 3.9979590826793815e-06, - "loss": 1.0707, - "num_input_tokens_seen": 7612205, - "step": 366 - }, - { - "epoch": 0.04412914086454638, - "flos": 14277906013800.0, - "grad_norm": 4.552527411104454, - "learning_rate": 3.997923748892113e-06, - "loss": 1.0324, - "num_input_tokens_seen": 7631245, - "step": 367 - }, - { - "epoch": 0.04424938375518547, - "flos": 16064441799480.0, - "grad_norm": 4.945085902996296, - "learning_rate": 3.9978881120214015e-06, - "loss": 1.1187, - "num_input_tokens_seen": 7652485, - "step": 368 - }, - { - "epoch": 0.04436962664582456, - "flos": 17161776097200.0, - "grad_norm": 5.395060696264856, - "learning_rate": 3.997852172072652e-06, - "loss": 1.0356, - "num_input_tokens_seen": 7673420, - "step": 369 - }, - { - "epoch": 0.04448986953646366, - "flos": 12914925802560.0, - "grad_norm": 7.4757052800116295, - "learning_rate": 3.9978159290513155e-06, - "loss": 1.1215, - "num_input_tokens_seen": 7691220, - "step": 370 - }, - { - "epoch": 0.04461011242710275, - "flos": 21463736311320.0, - "grad_norm": 3.866486826912828, - "learning_rate": 3.997779382962892e-06, - "loss": 1.0399, - "num_input_tokens_seen": 7713825, - "step": 371 - }, - { - "epoch": 0.04473035531774184, - "flos": 21201064569120.0, - "grad_norm": 4.393819603836686, - "learning_rate": 3.997742533812924e-06, - "loss": 0.9705, - "num_input_tokens_seen": 7736810, - "step": 372 - }, - { - "epoch": 0.04485059820838093, - "flos": 9270273505920.0, - "grad_norm": 10.350125195027081, - "learning_rate": 3.997705381607001e-06, - "loss": 1.1438, - "num_input_tokens_seen": 7753345, - "step": 373 - }, - { - "epoch": 0.04497084109902002, - "flos": 48760778300880.0, - "grad_norm": 1.0162187454653915, - "learning_rate": 3.997667926350761e-06, - "loss": 0.8658, - "num_input_tokens_seen": 7811395, - "step": 374 - }, - { - "epoch": 0.04509108398965911, - "flos": 46065147709320.0, - "grad_norm": 0.8610086345882074, - "learning_rate": 3.997630168049886e-06, - "loss": 0.8347, - "num_input_tokens_seen": 7869480, - "step": 375 - }, - { - "epoch": 0.045211326880298205, - "flos": 15824066559480.0, - "grad_norm": 4.637054199836538, - "learning_rate": 3.997592106710101e-06, - "loss": 1.0122, - "num_input_tokens_seen": 7888660, - "step": 376 - }, - { - "epoch": 0.045331569770937295, - "flos": 22930712064840.0, - "grad_norm": 12.338040115298652, - "learning_rate": 3.997553742337182e-06, - "loss": 0.8973, - "num_input_tokens_seen": 7907805, - "step": 377 - }, - { - "epoch": 0.045451812661576385, - "flos": 15746936091840.0, - "grad_norm": 3.299357848833716, - "learning_rate": 3.997515074936949e-06, - "loss": 1.1457, - "num_input_tokens_seen": 7928400, - "step": 378 - }, - { - "epoch": 0.045572055552215475, - "flos": 11735739156960.0, - "grad_norm": 27.384312548071545, - "learning_rate": 3.997476104515268e-06, - "loss": 1.0851, - "num_input_tokens_seen": 7946310, - "step": 379 - }, - { - "epoch": 0.045692298442854565, - "flos": 12469778941800.0, - "grad_norm": 7.601505291087396, - "learning_rate": 3.9974368310780485e-06, - "loss": 1.0079, - "num_input_tokens_seen": 7963205, - "step": 380 - }, - { - "epoch": 0.045812541333493655, - "flos": 19051234270560.0, - "grad_norm": 6.390323846338617, - "learning_rate": 3.997397254631251e-06, - "loss": 0.968, - "num_input_tokens_seen": 7983545, - "step": 381 - }, - { - "epoch": 0.04593278422413275, - "flos": 43123622796960.0, - "grad_norm": 0.8871850959446964, - "learning_rate": 3.997357375180878e-06, - "loss": 0.8837, - "num_input_tokens_seen": 8047545, - "step": 382 - }, - { - "epoch": 0.04605302711477184, - "flos": 15485276181120.0, - "grad_norm": 10.260716860231543, - "learning_rate": 3.997317192732979e-06, - "loss": 0.9755, - "num_input_tokens_seen": 8066045, - "step": 383 - }, - { - "epoch": 0.04617327000541093, - "flos": 13803287845800.0, - "grad_norm": 7.641589269658643, - "learning_rate": 3.99727670729365e-06, - "loss": 1.0528, - "num_input_tokens_seen": 8084325, - "step": 384 - }, - { - "epoch": 0.04629351289605002, - "flos": 18081326495040.0, - "grad_norm": 3.5194078226918113, - "learning_rate": 3.997235918869033e-06, - "loss": 1.0194, - "num_input_tokens_seen": 8105080, - "step": 385 - }, - { - "epoch": 0.04641375578668911, - "flos": 14593357396920.0, - "grad_norm": 4.678097055029364, - "learning_rate": 3.997194827465315e-06, - "loss": 1.0642, - "num_input_tokens_seen": 8123395, - "step": 386 - }, - { - "epoch": 0.0465339986773282, - "flos": 9297015934320.0, - "grad_norm": 5.279993393055799, - "learning_rate": 3.997153433088728e-06, - "loss": 1.1521, - "num_input_tokens_seen": 8140240, - "step": 387 - }, - { - "epoch": 0.0466542415679673, - "flos": 18185475345120.0, - "grad_norm": 5.271022848509276, - "learning_rate": 3.997111735745554e-06, - "loss": 1.037, - "num_input_tokens_seen": 8162930, - "step": 388 - }, - { - "epoch": 0.04677448445860639, - "flos": 15799654409640.0, - "grad_norm": 8.297062586156244, - "learning_rate": 3.997069735442118e-06, - "loss": 1.0634, - "num_input_tokens_seen": 8182345, - "step": 389 - }, - { - "epoch": 0.04689472734924548, - "flos": 20047884474480.0, - "grad_norm": 4.850262601369754, - "learning_rate": 3.997027432184792e-06, - "loss": 1.031, - "num_input_tokens_seen": 8206725, - "step": 390 - }, - { - "epoch": 0.04701497023988457, - "flos": 16455245279760.0, - "grad_norm": 3.671369282411797, - "learning_rate": 3.99698482597999e-06, - "loss": 1.1134, - "num_input_tokens_seen": 8224125, - "step": 391 - }, - { - "epoch": 0.04713521313052366, - "flos": 46415497495800.0, - "grad_norm": 0.9370463227228122, - "learning_rate": 3.99694191683418e-06, - "loss": 0.8893, - "num_input_tokens_seen": 8284645, - "step": 392 - }, - { - "epoch": 0.047255456021162746, - "flos": 13309439331600.0, - "grad_norm": 3.156647458875708, - "learning_rate": 3.996898704753867e-06, - "loss": 1.053, - "num_input_tokens_seen": 8302315, - "step": 393 - }, - { - "epoch": 0.04737569891180184, - "flos": 15668119238400.0, - "grad_norm": 4.329601710724757, - "learning_rate": 3.996855189745609e-06, - "loss": 1.1184, - "num_input_tokens_seen": 8321300, - "step": 394 - }, - { - "epoch": 0.04749594180244093, - "flos": 20701206389160.0, - "grad_norm": 4.9181302796273805, - "learning_rate": 3.996811371816007e-06, - "loss": 1.1551, - "num_input_tokens_seen": 8343445, - "step": 395 - }, - { - "epoch": 0.04761618469308002, - "flos": 25052512149480.0, - "grad_norm": 5.733246320611276, - "learning_rate": 3.996767250971707e-06, - "loss": 1.0059, - "num_input_tokens_seen": 8365905, - "step": 396 - }, - { - "epoch": 0.04773642758371911, - "flos": 18238929540360.0, - "grad_norm": 18.525818001297203, - "learning_rate": 3.996722827219403e-06, - "loss": 1.088, - "num_input_tokens_seen": 8387240, - "step": 397 - }, - { - "epoch": 0.0478566704743582, - "flos": 14646014391600.0, - "grad_norm": 8.879789967157999, - "learning_rate": 3.996678100565833e-06, - "loss": 1.0397, - "num_input_tokens_seen": 8406015, - "step": 398 - }, - { - "epoch": 0.04797691336499729, - "flos": 13354338951600.0, - "grad_norm": 5.482654790819668, - "learning_rate": 3.996633071017783e-06, - "loss": 1.096, - "num_input_tokens_seen": 8422365, - "step": 399 - }, - { - "epoch": 0.04809715625563638, - "flos": 14981646629280.0, - "grad_norm": 7.800618406381167, - "learning_rate": 3.996587738582084e-06, - "loss": 1.0447, - "num_input_tokens_seen": 8438885, - "step": 400 - }, - { - "epoch": 0.04821739914627548, - "flos": 16927195892040.0, - "grad_norm": 5.376052221169472, - "learning_rate": 3.9965421032656115e-06, - "loss": 1.0781, - "num_input_tokens_seen": 8458535, - "step": 401 - }, - { - "epoch": 0.04833764203691457, - "flos": 15773709181800.0, - "grad_norm": 9.33509328688282, - "learning_rate": 3.99649616507529e-06, - "loss": 1.168, - "num_input_tokens_seen": 8477350, - "step": 402 - }, - { - "epoch": 0.04845788492755366, - "flos": 47186894156880.0, - "grad_norm": 0.9616380634439547, - "learning_rate": 3.996449924018088e-06, - "loss": 0.9101, - "num_input_tokens_seen": 8537530, - "step": 403 - }, - { - "epoch": 0.04857812781819275, - "flos": 13675616031120.0, - "grad_norm": 4.633269675021056, - "learning_rate": 3.99640338010102e-06, - "loss": 1.0362, - "num_input_tokens_seen": 8556355, - "step": 404 - }, - { - "epoch": 0.04869837070883184, - "flos": 17111878642920.0, - "grad_norm": 9.119891265087086, - "learning_rate": 3.996356533331146e-06, - "loss": 1.0047, - "num_input_tokens_seen": 8577945, - "step": 405 - }, - { - "epoch": 0.04881861359947093, - "flos": 17919400169760.0, - "grad_norm": 4.995519156144264, - "learning_rate": 3.996309383715573e-06, - "loss": 0.8298, - "num_input_tokens_seen": 8596445, - "step": 406 - }, - { - "epoch": 0.048938856490110025, - "flos": 11578289419440.0, - "grad_norm": 3.513113930696138, - "learning_rate": 3.996261931261454e-06, - "loss": 0.9521, - "num_input_tokens_seen": 8614745, - "step": 407 - }, - { - "epoch": 0.049059099380749115, - "flos": 21303067110000.0, - "grad_norm": 4.961842467901367, - "learning_rate": 3.996214175975987e-06, - "loss": 1.0943, - "num_input_tokens_seen": 8634985, - "step": 408 - }, - { - "epoch": 0.049179342271388204, - "flos": 25632720260880.0, - "grad_norm": 3.9395731962985714, - "learning_rate": 3.996166117866417e-06, - "loss": 1.0276, - "num_input_tokens_seen": 8656640, - "step": 409 - }, - { - "epoch": 0.049299585162027294, - "flos": 10319335412040.0, - "grad_norm": 9.7925986854191, - "learning_rate": 3.996117756940035e-06, - "loss": 1.1009, - "num_input_tokens_seen": 8673045, - "step": 410 - }, - { - "epoch": 0.049419828052666384, - "flos": 13880908898400.0, - "grad_norm": 6.716370059043764, - "learning_rate": 3.996069093204175e-06, - "loss": 1.2026, - "num_input_tokens_seen": 8688725, - "step": 411 - }, - { - "epoch": 0.049540070943305474, - "flos": 9635285066160.0, - "grad_norm": 9.780064325101598, - "learning_rate": 3.996020126666221e-06, - "loss": 1.1021, - "num_input_tokens_seen": 8705425, - "step": 412 - }, - { - "epoch": 0.04966031383394457, - "flos": 15508339222320.0, - "grad_norm": 8.129877123214976, - "learning_rate": 3.995970857333601e-06, - "loss": 1.0578, - "num_input_tokens_seen": 8725555, - "step": 413 - }, - { - "epoch": 0.04978055672458366, - "flos": 20385908313840.0, - "grad_norm": 6.211393826593429, - "learning_rate": 3.995921285213789e-06, - "loss": 1.0292, - "num_input_tokens_seen": 8745535, - "step": 414 - }, - { - "epoch": 0.04990079961522275, - "flos": 14068627143720.0, - "grad_norm": 4.895232056834363, - "learning_rate": 3.995871410314305e-06, - "loss": 1.0311, - "num_input_tokens_seen": 8763815, - "step": 415 - }, - { - "epoch": 0.05002104250586184, - "flos": 44909821305480.0, - "grad_norm": 1.0314608271709989, - "learning_rate": 3.995821232642714e-06, - "loss": 0.8889, - "num_input_tokens_seen": 8821940, - "step": 416 - }, - { - "epoch": 0.05014128539650093, - "flos": 19890220106040.0, - "grad_norm": 12.177531856024528, - "learning_rate": 3.995770752206629e-06, - "loss": 1.0482, - "num_input_tokens_seen": 8842735, - "step": 417 - }, - { - "epoch": 0.05026152828714002, - "flos": 12542248852320.0, - "grad_norm": 5.238539584828153, - "learning_rate": 3.995719969013709e-06, - "loss": 1.1989, - "num_input_tokens_seen": 8859635, - "step": 418 - }, - { - "epoch": 0.05038177117777912, - "flos": 13568952933120.0, - "grad_norm": 12.831706022324521, - "learning_rate": 3.995668883071655e-06, - "loss": 1.0783, - "num_input_tokens_seen": 8875580, - "step": 419 - }, - { - "epoch": 0.050502014068418206, - "flos": 14671990281000.0, - "grad_norm": 11.207930168832851, - "learning_rate": 3.995617494388219e-06, - "loss": 1.1326, - "num_input_tokens_seen": 8893420, - "step": 420 - }, - { - "epoch": 0.050622256959057296, - "flos": 15170346044520.0, - "grad_norm": 7.572568134191508, - "learning_rate": 3.995565802971196e-06, - "loss": 1.0331, - "num_input_tokens_seen": 8913115, - "step": 421 - }, - { - "epoch": 0.050742499849696386, - "flos": 19707070433160.0, - "grad_norm": 4.465910109220482, - "learning_rate": 3.995513808828427e-06, - "loss": 0.9121, - "num_input_tokens_seen": 8935630, - "step": 422 - }, - { - "epoch": 0.050862742740335476, - "flos": 14095952141760.0, - "grad_norm": 3.4300435431027063, - "learning_rate": 3.9954615119678e-06, - "loss": 0.9891, - "num_input_tokens_seen": 8953905, - "step": 423 - }, - { - "epoch": 0.050982985630974566, - "flos": 15689894494080.0, - "grad_norm": 4.524442412865796, - "learning_rate": 3.995408912397248e-06, - "loss": 1.026, - "num_input_tokens_seen": 8971520, - "step": 424 - }, - { - "epoch": 0.05110322852161366, - "flos": 14724677937240.0, - "grad_norm": 6.467392931134121, - "learning_rate": 3.99535601012475e-06, - "loss": 1.1618, - "num_input_tokens_seen": 8986570, - "step": 425 - }, - { - "epoch": 0.05122347141225275, - "flos": 20334723074040.0, - "grad_norm": 4.373479306374517, - "learning_rate": 3.995302805158333e-06, - "loss": 0.9895, - "num_input_tokens_seen": 9008945, - "step": 426 - }, - { - "epoch": 0.05134371430289184, - "flos": 13993305708120.0, - "grad_norm": 3.839364379887996, - "learning_rate": 3.9952492975060665e-06, - "loss": 1.0654, - "num_input_tokens_seen": 9028735, - "step": 427 - }, - { - "epoch": 0.05146395719353093, - "flos": 24583443723840.0, - "grad_norm": 7.299759680567776, - "learning_rate": 3.995195487176067e-06, - "loss": 1.0992, - "num_input_tokens_seen": 9048685, - "step": 428 - }, - { - "epoch": 0.05158420008417002, - "flos": 15456939351600.0, - "grad_norm": 4.655896269399817, - "learning_rate": 3.995141374176499e-06, - "loss": 1.0876, - "num_input_tokens_seen": 9066800, - "step": 429 - }, - { - "epoch": 0.05170444297480911, - "flos": 51967059941520.0, - "grad_norm": 0.9353671926712699, - "learning_rate": 3.995086958515572e-06, - "loss": 0.9069, - "num_input_tokens_seen": 9124540, - "step": 430 - }, - { - "epoch": 0.05182468586544821, - "flos": 44681925320400.0, - "grad_norm": 0.9143326514991705, - "learning_rate": 3.995032240201538e-06, - "loss": 0.876, - "num_input_tokens_seen": 9186655, - "step": 431 - }, - { - "epoch": 0.0519449287560873, - "flos": 29449110582000.0, - "grad_norm": 0.9811697653750728, - "learning_rate": 3.9949772192427e-06, - "loss": 0.8835, - "num_input_tokens_seen": 9233000, - "step": 432 - }, - { - "epoch": 0.05206517164672639, - "flos": 12391636642680.0, - "grad_norm": 5.6742997464272795, - "learning_rate": 3.994921895647405e-06, - "loss": 1.0249, - "num_input_tokens_seen": 9250890, - "step": 433 - }, - { - "epoch": 0.05218541453736548, - "flos": 45819651988800.0, - "grad_norm": 0.9019790176501321, - "learning_rate": 3.994866269424043e-06, - "loss": 0.8025, - "num_input_tokens_seen": 9306980, - "step": 434 - }, - { - "epoch": 0.05230565742800457, - "flos": 13701438612720.0, - "grad_norm": 6.552715454983863, - "learning_rate": 3.9948103405810545e-06, - "loss": 1.0092, - "num_input_tokens_seen": 9325650, - "step": 435 - }, - { - "epoch": 0.05242590031864366, - "flos": 17999780762760.0, - "grad_norm": 23.436170535210497, - "learning_rate": 3.994754109126923e-06, - "loss": 1.0837, - "num_input_tokens_seen": 9346865, - "step": 436 - }, - { - "epoch": 0.052546143209282754, - "flos": 18656046187200.0, - "grad_norm": 5.034858120659014, - "learning_rate": 3.994697575070181e-06, - "loss": 1.1551, - "num_input_tokens_seen": 9366045, - "step": 437 - }, - { - "epoch": 0.052666386099921844, - "flos": 15742398180960.0, - "grad_norm": 5.159441520867445, - "learning_rate": 3.994640738419402e-06, - "loss": 1.1369, - "num_input_tokens_seen": 9385140, - "step": 438 - }, - { - "epoch": 0.052786628990560934, - "flos": 16982367134640.0, - "grad_norm": 3.9890151586237397, - "learning_rate": 3.9945835991832075e-06, - "loss": 1.0331, - "num_input_tokens_seen": 9406745, - "step": 439 - }, - { - "epoch": 0.052906871881200024, - "flos": 17501241029880.0, - "grad_norm": 24.162757855582214, - "learning_rate": 3.994526157370268e-06, - "loss": 1.162, - "num_input_tokens_seen": 9425080, - "step": 440 - }, - { - "epoch": 0.053027114771839114, - "flos": 40401525731040.0, - "grad_norm": 1.0446088375472182, - "learning_rate": 3.994468412989296e-06, - "loss": 0.8436, - "num_input_tokens_seen": 9486210, - "step": 441 - }, - { - "epoch": 0.053147357662478203, - "flos": 12259672209600.0, - "grad_norm": 5.417359156236656, - "learning_rate": 3.994410366049052e-06, - "loss": 1.171, - "num_input_tokens_seen": 9503790, - "step": 442 - }, - { - "epoch": 0.0532676005531173, - "flos": 12154634174280.0, - "grad_norm": 6.950754786060811, - "learning_rate": 3.994352016558341e-06, - "loss": 1.0614, - "num_input_tokens_seen": 9520815, - "step": 443 - }, - { - "epoch": 0.05338784344375639, - "flos": 19812323099400.0, - "grad_norm": 7.024071585236122, - "learning_rate": 3.994293364526014e-06, - "loss": 0.9686, - "num_input_tokens_seen": 9541420, - "step": 444 - }, - { - "epoch": 0.05350808633439548, - "flos": 15275782680120.0, - "grad_norm": 4.120470276445671, - "learning_rate": 3.99423440996097e-06, - "loss": 1.0623, - "num_input_tokens_seen": 9560680, - "step": 445 - }, - { - "epoch": 0.05362832922503457, - "flos": 14252757986520.0, - "grad_norm": 11.237244898666116, - "learning_rate": 3.994175152872152e-06, - "loss": 1.035, - "num_input_tokens_seen": 9579485, - "step": 446 - }, - { - "epoch": 0.05374857211567366, - "flos": 18602377361040.0, - "grad_norm": 4.385833623761552, - "learning_rate": 3.994115593268548e-06, - "loss": 1.0238, - "num_input_tokens_seen": 9598985, - "step": 447 - }, - { - "epoch": 0.05386881500631275, - "flos": 19572254475000.0, - "grad_norm": 4.497603520065553, - "learning_rate": 3.994055731159195e-06, - "loss": 1.0435, - "num_input_tokens_seen": 9616175, - "step": 448 - }, - { - "epoch": 0.053989057896951846, - "flos": 16768458369000.0, - "grad_norm": 6.7702522224086525, - "learning_rate": 3.993995566553172e-06, - "loss": 1.103, - "num_input_tokens_seen": 9634860, - "step": 449 - }, - { - "epoch": 0.054109300787590936, - "flos": 17950067277840.0, - "grad_norm": 3.324875942583322, - "learning_rate": 3.993935099459607e-06, - "loss": 0.9939, - "num_input_tokens_seen": 9656195, - "step": 450 - }, - { - "epoch": 0.054229543678230026, - "flos": 16952895827400.0, - "grad_norm": 5.629072791765543, - "learning_rate": 3.993874329887673e-06, - "loss": 0.9763, - "num_input_tokens_seen": 9674570, - "step": 451 - }, - { - "epoch": 0.054349786568869116, - "flos": 11547377018880.0, - "grad_norm": 4.215575655796158, - "learning_rate": 3.993813257846589e-06, - "loss": 1.0882, - "num_input_tokens_seen": 9691045, - "step": 452 - }, - { - "epoch": 0.054470029459508205, - "flos": 13229579985120.0, - "grad_norm": 7.362402809421244, - "learning_rate": 3.993751883345619e-06, - "loss": 1.1641, - "num_input_tokens_seen": 9709125, - "step": 453 - }, - { - "epoch": 0.054590272350147295, - "flos": 12600455589360.0, - "grad_norm": 7.950657789739839, - "learning_rate": 3.993690206394073e-06, - "loss": 1.0987, - "num_input_tokens_seen": 9725145, - "step": 454 - }, - { - "epoch": 0.054710515240786385, - "flos": 12601007497440.0, - "grad_norm": 3.2051962505844696, - "learning_rate": 3.993628227001307e-06, - "loss": 1.109, - "num_input_tokens_seen": 9743065, - "step": 455 - }, - { - "epoch": 0.05483075813142548, - "flos": 34466284383120.0, - "grad_norm": 3.803491047880375, - "learning_rate": 3.993565945176726e-06, - "loss": 0.9486, - "num_input_tokens_seen": 9763810, - "step": 456 - }, - { - "epoch": 0.05495100102206457, - "flos": 13963711754640.0, - "grad_norm": 5.091402788499092, - "learning_rate": 3.993503360929776e-06, - "loss": 1.059, - "num_input_tokens_seen": 9782415, - "step": 457 - }, - { - "epoch": 0.05507124391270366, - "flos": 18763046562360.0, - "grad_norm": 2.878632282694288, - "learning_rate": 3.99344047426995e-06, - "loss": 1.0445, - "num_input_tokens_seen": 9803395, - "step": 458 - }, - { - "epoch": 0.05519148680334275, - "flos": 16061620935960.0, - "grad_norm": 8.329781143958705, - "learning_rate": 3.993377285206789e-06, - "loss": 1.1516, - "num_input_tokens_seen": 9822900, - "step": 459 - }, - { - "epoch": 0.05531172969398184, - "flos": 28962841021200.0, - "grad_norm": 5.8203583785943485, - "learning_rate": 3.99331379374988e-06, - "loss": 1.1021, - "num_input_tokens_seen": 9846225, - "step": 460 - }, - { - "epoch": 0.05543197258462093, - "flos": 16691726501640.0, - "grad_norm": 12.362304340353202, - "learning_rate": 3.993249999908852e-06, - "loss": 1.0323, - "num_input_tokens_seen": 9866095, - "step": 461 - }, - { - "epoch": 0.05555221547526003, - "flos": 13203665418840.0, - "grad_norm": 3.9502728090358654, - "learning_rate": 3.993185903693384e-06, - "loss": 1.0973, - "num_input_tokens_seen": 9882615, - "step": 462 - }, - { - "epoch": 0.05567245836589912, - "flos": 16770175416360.0, - "grad_norm": 5.996004045542344, - "learning_rate": 3.9931215051131995e-06, - "loss": 1.0578, - "num_input_tokens_seen": 9902980, - "step": 463 - }, - { - "epoch": 0.05579270125653821, - "flos": 19759604781600.0, - "grad_norm": 4.151292640951769, - "learning_rate": 3.993056804178068e-06, - "loss": 1.0351, - "num_input_tokens_seen": 9924245, - "step": 464 - }, - { - "epoch": 0.0559129441471773, - "flos": 19232789542320.0, - "grad_norm": 4.884783196064959, - "learning_rate": 3.992991800897803e-06, - "loss": 1.0771, - "num_input_tokens_seen": 9943770, - "step": 465 - }, - { - "epoch": 0.05603318703781639, - "flos": 10763654410680.0, - "grad_norm": 8.952162020092373, - "learning_rate": 3.9929264952822665e-06, - "loss": 1.1167, - "num_input_tokens_seen": 9961025, - "step": 466 - }, - { - "epoch": 0.05615342992845548, - "flos": 15820111218240.0, - "grad_norm": 3.941609493644776, - "learning_rate": 3.992860887341366e-06, - "loss": 1.112, - "num_input_tokens_seen": 9978915, - "step": 467 - }, - { - "epoch": 0.056273672819094574, - "flos": 16768366384320.0, - "grad_norm": 28.738468942655395, - "learning_rate": 3.992794977085052e-06, - "loss": 1.0357, - "num_input_tokens_seen": 9996635, - "step": 468 - }, - { - "epoch": 0.056393915709733664, - "flos": 14089727845080.0, - "grad_norm": 5.988861872026837, - "learning_rate": 3.992728764523326e-06, - "loss": 1.0758, - "num_input_tokens_seen": 10015300, - "step": 469 - }, - { - "epoch": 0.05651415860037275, - "flos": 15747212045880.0, - "grad_norm": 6.749617954321331, - "learning_rate": 3.99266224966623e-06, - "loss": 1.0288, - "num_input_tokens_seen": 10035935, - "step": 470 - }, - { - "epoch": 0.05663440149101184, - "flos": 13806568632720.0, - "grad_norm": 5.038255257228171, - "learning_rate": 3.992595432523855e-06, - "loss": 1.1001, - "num_input_tokens_seen": 10052945, - "step": 471 - }, - { - "epoch": 0.05675464438165093, - "flos": 16110598543440.0, - "grad_norm": 4.498500786164222, - "learning_rate": 3.992528313106338e-06, - "loss": 1.0936, - "num_input_tokens_seen": 10070865, - "step": 472 - }, - { - "epoch": 0.05687488727229002, - "flos": 11963911096080.0, - "grad_norm": 4.705541249843582, - "learning_rate": 3.9924608914238595e-06, - "loss": 1.0435, - "num_input_tokens_seen": 10085580, - "step": 473 - }, - { - "epoch": 0.05699513016292912, - "flos": 20781709628400.0, - "grad_norm": 4.761677091696363, - "learning_rate": 3.992393167486648e-06, - "loss": 1.0691, - "num_input_tokens_seen": 10104450, - "step": 474 - }, - { - "epoch": 0.05711537305356821, - "flos": 13413189581400.0, - "grad_norm": 7.9230619064314105, - "learning_rate": 3.992325141304977e-06, - "loss": 1.0328, - "num_input_tokens_seen": 10122125, - "step": 475 - }, - { - "epoch": 0.0572356159442073, - "flos": 19049425238520.0, - "grad_norm": 3.9717431983719034, - "learning_rate": 3.992256812889166e-06, - "loss": 1.0853, - "num_input_tokens_seen": 10137950, - "step": 476 - }, - { - "epoch": 0.05735585883484639, - "flos": 25055823597960.0, - "grad_norm": 6.9552371813812055, - "learning_rate": 3.992188182249582e-06, - "loss": 0.9898, - "num_input_tokens_seen": 10159565, - "step": 477 - }, - { - "epoch": 0.05747610172548548, - "flos": 13282604918520.0, - "grad_norm": 3.674559305976657, - "learning_rate": 3.992119249396633e-06, - "loss": 1.1333, - "num_input_tokens_seen": 10177970, - "step": 478 - }, - { - "epoch": 0.05759634461612457, - "flos": 19916226657000.0, - "grad_norm": 7.390630419610196, - "learning_rate": 3.992050014340778e-06, - "loss": 1.0542, - "num_input_tokens_seen": 10198045, - "step": 479 - }, - { - "epoch": 0.057716587506763666, - "flos": 49620772853040.0, - "grad_norm": 0.9980743464143635, - "learning_rate": 3.99198047709252e-06, - "loss": 0.803, - "num_input_tokens_seen": 10259285, - "step": 480 - }, - { - "epoch": 0.057836830397402755, - "flos": 17792096293800.0, - "grad_norm": 41.41179797737295, - "learning_rate": 3.991910637662408e-06, - "loss": 1.0075, - "num_input_tokens_seen": 10279295, - "step": 481 - }, - { - "epoch": 0.057957073288041845, - "flos": 18212432404440.0, - "grad_norm": 12.342876625550248, - "learning_rate": 3.9918404960610355e-06, - "loss": 1.0364, - "num_input_tokens_seen": 10298045, - "step": 482 - }, - { - "epoch": 0.058077316178680935, - "flos": 14750868457560.0, - "grad_norm": 3.8036021225609926, - "learning_rate": 3.991770052299043e-06, - "loss": 0.9999, - "num_input_tokens_seen": 10315995, - "step": 483 - }, - { - "epoch": 0.058197559069320025, - "flos": 13413434873880.0, - "grad_norm": 4.536506077383422, - "learning_rate": 3.991699306387118e-06, - "loss": 1.1016, - "num_input_tokens_seen": 10334185, - "step": 484 - }, - { - "epoch": 0.058317801959959115, - "flos": 17687272889400.0, - "grad_norm": 6.305146419268009, - "learning_rate": 3.991628258335991e-06, - "loss": 1.0241, - "num_input_tokens_seen": 10356110, - "step": 485 - }, - { - "epoch": 0.05843804485059821, - "flos": 16533540886680.0, - "grad_norm": 4.248990556102662, - "learning_rate": 3.991556908156442e-06, - "loss": 1.0925, - "num_input_tokens_seen": 10372355, - "step": 486 - }, - { - "epoch": 0.0585582877412373, - "flos": 16456594388400.0, - "grad_norm": 12.426478731210485, - "learning_rate": 3.9914852558592914e-06, - "loss": 1.0959, - "num_input_tokens_seen": 10393125, - "step": 487 - }, - { - "epoch": 0.05867853063187639, - "flos": 16711140817200.0, - "grad_norm": 4.761367847509461, - "learning_rate": 3.991413301455413e-06, - "loss": 1.0083, - "num_input_tokens_seen": 10409295, - "step": 488 - }, - { - "epoch": 0.05879877352251548, - "flos": 21016381818240.0, - "grad_norm": 3.8368416087823833, - "learning_rate": 3.991341044955719e-06, - "loss": 1.0034, - "num_input_tokens_seen": 10428770, - "step": 489 - }, - { - "epoch": 0.05891901641315457, - "flos": 14305384319640.0, - "grad_norm": 23.273347022734505, - "learning_rate": 3.991268486371172e-06, - "loss": 1.0499, - "num_input_tokens_seen": 10447045, - "step": 490 - }, - { - "epoch": 0.05903925930379366, - "flos": 17529761828760.0, - "grad_norm": 6.081573998346421, - "learning_rate": 3.991195625712779e-06, - "loss": 1.0936, - "num_input_tokens_seen": 10463730, - "step": 491 - }, - { - "epoch": 0.05915950219443276, - "flos": 15090609344280.0, - "grad_norm": 3.5565640704710666, - "learning_rate": 3.991122462991592e-06, - "loss": 1.0385, - "num_input_tokens_seen": 10482970, - "step": 492 - }, - { - "epoch": 0.05927974508507185, - "flos": 6935177886840.0, - "grad_norm": 4.7214121603696, - "learning_rate": 3.991048998218712e-06, - "loss": 1.038, - "num_input_tokens_seen": 10495995, - "step": 493 - }, - { - "epoch": 0.05939998797571094, - "flos": 12941238969120.0, - "grad_norm": 4.633936933724638, - "learning_rate": 3.990975231405281e-06, - "loss": 0.9867, - "num_input_tokens_seen": 10514165, - "step": 494 - }, - { - "epoch": 0.05952023086635003, - "flos": 20125965450480.0, - "grad_norm": 4.368039943638193, - "learning_rate": 3.990901162562491e-06, - "loss": 1.0265, - "num_input_tokens_seen": 10534575, - "step": 495 - }, - { - "epoch": 0.05964047375698912, - "flos": 10528583620560.0, - "grad_norm": 4.358336403567884, - "learning_rate": 3.9908267917015765e-06, - "loss": 1.1264, - "num_input_tokens_seen": 10552355, - "step": 496 - }, - { - "epoch": 0.059760716647628206, - "flos": 16481589107880.0, - "grad_norm": 3.0191631216108576, - "learning_rate": 3.990752118833821e-06, - "loss": 1.1519, - "num_input_tokens_seen": 10569515, - "step": 497 - }, - { - "epoch": 0.0598809595382673, - "flos": 16166260371000.0, - "grad_norm": 2.7482896167554682, - "learning_rate": 3.990677143970553e-06, - "loss": 1.0108, - "num_input_tokens_seen": 10590045, - "step": 498 - }, - { - "epoch": 0.06000120242890639, - "flos": 15720929540880.0, - "grad_norm": 3.7305047957191784, - "learning_rate": 3.990601867123144e-06, - "loss": 1.0378, - "num_input_tokens_seen": 10609490, - "step": 499 - }, - { - "epoch": 0.06012144531954548, - "flos": 13596891162360.0, - "grad_norm": 3.911707491331848, - "learning_rate": 3.990526288303014e-06, - "loss": 1.0808, - "num_input_tokens_seen": 10628000, - "step": 500 - }, - { - "epoch": 0.06024168821018457, - "flos": 16192389568200.0, - "grad_norm": 4.06717049547144, - "learning_rate": 3.9904504075216295e-06, - "loss": 1.1304, - "num_input_tokens_seen": 10648480, - "step": 501 - }, - { - "epoch": 0.06036193110082366, - "flos": 13308672792600.0, - "grad_norm": 4.694871523138244, - "learning_rate": 3.990374224790501e-06, - "loss": 1.1752, - "num_input_tokens_seen": 10666405, - "step": 502 - }, - { - "epoch": 0.06048217399146275, - "flos": 12180947340840.0, - "grad_norm": 3.5364260927209186, - "learning_rate": 3.990297740121185e-06, - "loss": 0.936, - "num_input_tokens_seen": 10684060, - "step": 503 - }, - { - "epoch": 0.06060241688210185, - "flos": 17084155044600.0, - "grad_norm": 2.8525161611649166, - "learning_rate": 3.990220953525284e-06, - "loss": 1.009, - "num_input_tokens_seen": 10700890, - "step": 504 - }, - { - "epoch": 0.06072265977274094, - "flos": 10319304750480.0, - "grad_norm": 3.9083679560989686, - "learning_rate": 3.9901438650144465e-06, - "loss": 0.9715, - "num_input_tokens_seen": 10716860, - "step": 505 - }, - { - "epoch": 0.06084290266338003, - "flos": 14593357396920.0, - "grad_norm": 4.552954316196363, - "learning_rate": 3.990066474600367e-06, - "loss": 1.1318, - "num_input_tokens_seen": 10734550, - "step": 506 - }, - { - "epoch": 0.06096314555401912, - "flos": 15851667511560.0, - "grad_norm": 4.244645953651563, - "learning_rate": 3.989988782294786e-06, - "loss": 0.9186, - "num_input_tokens_seen": 10754360, - "step": 507 - }, - { - "epoch": 0.06108338844465821, - "flos": 13567327870440.0, - "grad_norm": 2.9932806742170226, - "learning_rate": 3.989910788109489e-06, - "loss": 1.1749, - "num_input_tokens_seen": 10770730, - "step": 508 - }, - { - "epoch": 0.0612036313352973, - "flos": 23954748589920.0, - "grad_norm": 3.4799507003887613, - "learning_rate": 3.989832492056307e-06, - "loss": 0.9814, - "num_input_tokens_seen": 10791475, - "step": 509 - }, - { - "epoch": 0.06132387422593639, - "flos": 19575443277240.0, - "grad_norm": 6.066493341394321, - "learning_rate": 3.989753894147119e-06, - "loss": 1.0212, - "num_input_tokens_seen": 10811320, - "step": 510 - }, - { - "epoch": 0.061444117116575485, - "flos": 18421588628280.0, - "grad_norm": 3.507915537792791, - "learning_rate": 3.989674994393846e-06, - "loss": 1.0273, - "num_input_tokens_seen": 10830515, - "step": 511 - }, - { - "epoch": 0.061564360007214575, - "flos": 20309544385200.0, - "grad_norm": 5.010444413687897, - "learning_rate": 3.98959579280846e-06, - "loss": 1.1717, - "num_input_tokens_seen": 10848635, - "step": 512 - }, - { - "epoch": 0.061684602897853665, - "flos": 8510595108840.0, - "grad_norm": 4.560902773184856, - "learning_rate": 3.989516289402973e-06, - "loss": 1.0563, - "num_input_tokens_seen": 10863985, - "step": 513 - }, - { - "epoch": 0.061804845788492754, - "flos": 13855699548000.0, - "grad_norm": 5.280022292234583, - "learning_rate": 3.989436484189447e-06, - "loss": 1.0522, - "num_input_tokens_seen": 10881650, - "step": 514 - }, - { - "epoch": 0.061925088679131844, - "flos": 10843912357440.0, - "grad_norm": 4.528895325357166, - "learning_rate": 3.9893563771799885e-06, - "loss": 1.0289, - "num_input_tokens_seen": 10897845, - "step": 515 - }, - { - "epoch": 0.062045331569770934, - "flos": 18446430039960.0, - "grad_norm": 3.7211273031666185, - "learning_rate": 3.989275968386749e-06, - "loss": 1.1, - "num_input_tokens_seen": 10915475, - "step": 516 - }, - { - "epoch": 0.06216557446041003, - "flos": 20021050061400.0, - "grad_norm": 3.431787634247951, - "learning_rate": 3.989195257821926e-06, - "loss": 0.9983, - "num_input_tokens_seen": 10933680, - "step": 517 - }, - { - "epoch": 0.06228581735104912, - "flos": 16691971794120.0, - "grad_norm": 3.524399315265974, - "learning_rate": 3.989114245497765e-06, - "loss": 1.073, - "num_input_tokens_seen": 10953200, - "step": 518 - }, - { - "epoch": 0.06240606024168821, - "flos": 10739058291480.0, - "grad_norm": 3.144278455725751, - "learning_rate": 3.989032931426554e-06, - "loss": 1.1773, - "num_input_tokens_seen": 10970075, - "step": 519 - }, - { - "epoch": 0.06252630313232731, - "flos": 14646290345640.0, - "grad_norm": 3.6052037035230735, - "learning_rate": 3.9889513156206295e-06, - "loss": 1.1001, - "num_input_tokens_seen": 10989235, - "step": 520 - }, - { - "epoch": 0.06264654602296639, - "flos": 14751420365640.0, - "grad_norm": 4.991136132748723, - "learning_rate": 3.988869398092371e-06, - "loss": 0.9319, - "num_input_tokens_seen": 11008865, - "step": 521 - }, - { - "epoch": 0.06276678891360549, - "flos": 21095505287280.0, - "grad_norm": 7.96746265260186, - "learning_rate": 3.988787178854206e-06, - "loss": 1.0026, - "num_input_tokens_seen": 11028120, - "step": 522 - }, - { - "epoch": 0.06288703180424457, - "flos": 15720132340320.0, - "grad_norm": 4.442128306453024, - "learning_rate": 3.988704657918608e-06, - "loss": 1.1077, - "num_input_tokens_seen": 11046900, - "step": 523 - }, - { - "epoch": 0.06300727469488367, - "flos": 10582068477360.0, - "grad_norm": 6.3388598907339375, - "learning_rate": 3.988621835298094e-06, - "loss": 1.0372, - "num_input_tokens_seen": 11063835, - "step": 524 - }, - { - "epoch": 0.06312751758552275, - "flos": 17451650191200.0, - "grad_norm": 4.3979723940230535, - "learning_rate": 3.988538711005229e-06, - "loss": 1.1542, - "num_input_tokens_seen": 11083010, - "step": 525 - }, - { - "epoch": 0.06324776047616185, - "flos": 15275200110480.0, - "grad_norm": 3.2656755215327924, - "learning_rate": 3.988455285052622e-06, - "loss": 1.123, - "num_input_tokens_seen": 11098910, - "step": 526 - }, - { - "epoch": 0.06336800336680094, - "flos": 15405110219040.0, - "grad_norm": 4.456117073205908, - "learning_rate": 3.98837155745293e-06, - "loss": 1.0608, - "num_input_tokens_seen": 11116670, - "step": 527 - }, - { - "epoch": 0.06348824625744003, - "flos": 13833525692040.0, - "grad_norm": 5.034296096950642, - "learning_rate": 3.988287528218854e-06, - "loss": 0.9999, - "num_input_tokens_seen": 11135175, - "step": 528 - }, - { - "epoch": 0.06360848914807912, - "flos": 10943829912240.0, - "grad_norm": 8.64420425203378, - "learning_rate": 3.98820319736314e-06, - "loss": 1.125, - "num_input_tokens_seen": 11151510, - "step": 529 - }, - { - "epoch": 0.0637287320387182, - "flos": 14618382777960.0, - "grad_norm": 2.251172078344603, - "learning_rate": 3.988118564898582e-06, - "loss": 1.0848, - "num_input_tokens_seen": 11170770, - "step": 530 - }, - { - "epoch": 0.0638489749293573, - "flos": 12331222273320.0, - "grad_norm": 4.556756153432582, - "learning_rate": 3.988033630838019e-06, - "loss": 1.1257, - "num_input_tokens_seen": 11184530, - "step": 531 - }, - { - "epoch": 0.0639692178199964, - "flos": 16928912939400.0, - "grad_norm": 3.112858187123154, - "learning_rate": 3.987948395194334e-06, - "loss": 1.1051, - "num_input_tokens_seen": 11206630, - "step": 532 - }, - { - "epoch": 0.06408946071063548, - "flos": 13097554228920.0, - "grad_norm": 3.080466836749263, - "learning_rate": 3.987862857980458e-06, - "loss": 1.0115, - "num_input_tokens_seen": 11222295, - "step": 533 - }, - { - "epoch": 0.06420970360127458, - "flos": 19339483301880.0, - "grad_norm": 2.6918794301822255, - "learning_rate": 3.987777019209368e-06, - "loss": 0.9919, - "num_input_tokens_seen": 11242530, - "step": 534 - }, - { - "epoch": 0.06432994649191366, - "flos": 16822280502960.0, - "grad_norm": 3.065828655294997, - "learning_rate": 3.987690878894084e-06, - "loss": 1.0398, - "num_input_tokens_seen": 11261965, - "step": 535 - }, - { - "epoch": 0.06445018938255276, - "flos": 16638456275760.0, - "grad_norm": 3.712444698037444, - "learning_rate": 3.987604437047673e-06, - "loss": 1.0729, - "num_input_tokens_seen": 11281485, - "step": 536 - }, - { - "epoch": 0.06457043227319184, - "flos": 13938287773320.0, - "grad_norm": 7.910984476316552, - "learning_rate": 3.987517693683251e-06, - "loss": 1.0115, - "num_input_tokens_seen": 11299780, - "step": 537 - }, - { - "epoch": 0.06469067516383094, - "flos": 12020278139520.0, - "grad_norm": 5.124694070756581, - "learning_rate": 3.9874306488139745e-06, - "loss": 1.1874, - "num_input_tokens_seen": 11314760, - "step": 538 - }, - { - "epoch": 0.06481091805447003, - "flos": 16561417792800.0, - "grad_norm": 3.9776956259083733, - "learning_rate": 3.987343302453049e-06, - "loss": 1.094, - "num_input_tokens_seen": 11335755, - "step": 539 - }, - { - "epoch": 0.06493116094510912, - "flos": 20784591815040.0, - "grad_norm": 2.8466151967930755, - "learning_rate": 3.987255654613724e-06, - "loss": 1.0525, - "num_input_tokens_seen": 11359240, - "step": 540 - }, - { - "epoch": 0.06505140383574821, - "flos": 14095154941200.0, - "grad_norm": 3.2494305331823954, - "learning_rate": 3.987167705309296e-06, - "loss": 0.9305, - "num_input_tokens_seen": 11378235, - "step": 541 - }, - { - "epoch": 0.0651716467263873, - "flos": 12699759912960.0, - "grad_norm": 4.695021837730118, - "learning_rate": 3.987079454553108e-06, - "loss": 1.1796, - "num_input_tokens_seen": 11395905, - "step": 542 - }, - { - "epoch": 0.0652918896170264, - "flos": 14796871893720.0, - "grad_norm": 3.4259477181261806, - "learning_rate": 3.986990902358546e-06, - "loss": 1.148, - "num_input_tokens_seen": 11412565, - "step": 543 - }, - { - "epoch": 0.06541213250766549, - "flos": 15537963837360.0, - "grad_norm": 2.7403696669815636, - "learning_rate": 3.986902048739045e-06, - "loss": 1.1592, - "num_input_tokens_seen": 11432230, - "step": 544 - }, - { - "epoch": 0.06553237539830457, - "flos": 16427552343000.0, - "grad_norm": 6.087729272304337, - "learning_rate": 3.986812893708082e-06, - "loss": 1.0248, - "num_input_tokens_seen": 11448140, - "step": 545 - }, - { - "epoch": 0.06565261828894367, - "flos": 12698962712400.0, - "grad_norm": 3.621175129293887, - "learning_rate": 3.9867234372791826e-06, - "loss": 1.0439, - "num_input_tokens_seen": 11465815, - "step": 546 - }, - { - "epoch": 0.06577286117958275, - "flos": 16192144275720.0, - "grad_norm": 3.860353044760024, - "learning_rate": 3.986633679465918e-06, - "loss": 1.1014, - "num_input_tokens_seen": 11485690, - "step": 547 - }, - { - "epoch": 0.06589310407022185, - "flos": 16848532346400.0, - "grad_norm": 5.729813011007714, - "learning_rate": 3.986543620281904e-06, - "loss": 1.0511, - "num_input_tokens_seen": 11505060, - "step": 548 - }, - { - "epoch": 0.06601334696086093, - "flos": 19124961305040.0, - "grad_norm": 5.027989818019466, - "learning_rate": 3.986453259740802e-06, - "loss": 1.1453, - "num_input_tokens_seen": 11522950, - "step": 549 - }, - { - "epoch": 0.06613358985150003, - "flos": 8850642611160.0, - "grad_norm": 7.0629121103145085, - "learning_rate": 3.986362597856319e-06, - "loss": 1.0079, - "num_input_tokens_seen": 11539170, - "step": 550 - }, - { - "epoch": 0.06625383274213913, - "flos": 12993711994440.0, - "grad_norm": 5.868647935851213, - "learning_rate": 3.986271634642211e-06, - "loss": 1.0351, - "num_input_tokens_seen": 11555870, - "step": 551 - }, - { - "epoch": 0.06637407563277821, - "flos": 10868416491960.0, - "grad_norm": 4.6743886048054835, - "learning_rate": 3.986180370112274e-06, - "loss": 1.0471, - "num_input_tokens_seen": 11572110, - "step": 552 - }, - { - "epoch": 0.0664943185234173, - "flos": 17085105552960.0, - "grad_norm": 3.5939994393731953, - "learning_rate": 3.986088804280354e-06, - "loss": 0.9716, - "num_input_tokens_seen": 11591560, - "step": 553 - }, - { - "epoch": 0.06661456141405639, - "flos": 14879981365560.0, - "grad_norm": 4.304279398049644, - "learning_rate": 3.985996937160342e-06, - "loss": 1.1646, - "num_input_tokens_seen": 11610470, - "step": 554 - }, - { - "epoch": 0.06673480430469549, - "flos": 37350338435880.0, - "grad_norm": 4.909318681050292, - "learning_rate": 3.985904768766173e-06, - "loss": 0.9275, - "num_input_tokens_seen": 11632965, - "step": 555 - }, - { - "epoch": 0.06685504719533458, - "flos": 11473496676600.0, - "grad_norm": 6.236054693739508, - "learning_rate": 3.98581229911183e-06, - "loss": 0.9797, - "num_input_tokens_seen": 11651605, - "step": 556 - }, - { - "epoch": 0.06697529008597367, - "flos": 15982681436280.0, - "grad_norm": 4.660001127054627, - "learning_rate": 3.985719528211341e-06, - "loss": 1.142, - "num_input_tokens_seen": 11670695, - "step": 557 - }, - { - "epoch": 0.06709553297661276, - "flos": 44874769594320.0, - "grad_norm": 0.9498420207526682, - "learning_rate": 3.985626456078777e-06, - "loss": 0.9041, - "num_input_tokens_seen": 11735070, - "step": 558 - }, - { - "epoch": 0.06721577586725185, - "flos": 7879477711680.0, - "grad_norm": 10.570898204740242, - "learning_rate": 3.985533082728259e-06, - "loss": 1.0935, - "num_input_tokens_seen": 11750445, - "step": 559 - }, - { - "epoch": 0.06733601875789094, - "flos": 18448361718240.0, - "grad_norm": 3.526949669336343, - "learning_rate": 3.985439408173951e-06, - "loss": 0.972, - "num_input_tokens_seen": 11770390, - "step": 560 - }, - { - "epoch": 0.06745626164853002, - "flos": 14776875008520.0, - "grad_norm": 50.20756234280101, - "learning_rate": 3.9853454324300634e-06, - "loss": 0.9302, - "num_input_tokens_seen": 11789320, - "step": 561 - }, - { - "epoch": 0.06757650453916912, - "flos": 14069516328960.0, - "grad_norm": 3.9326976979125288, - "learning_rate": 3.985251155510852e-06, - "loss": 1.0087, - "num_input_tokens_seen": 11808070, - "step": 562 - }, - { - "epoch": 0.06769674742980822, - "flos": 18316795885440.0, - "grad_norm": 3.6430675515281843, - "learning_rate": 3.98515657743062e-06, - "loss": 1.0429, - "num_input_tokens_seen": 11827255, - "step": 563 - }, - { - "epoch": 0.0678169903204473, - "flos": 9214581016800.0, - "grad_norm": 4.265911609676444, - "learning_rate": 3.985061698203711e-06, - "loss": 1.0037, - "num_input_tokens_seen": 11844090, - "step": 564 - }, - { - "epoch": 0.0679372332110864, - "flos": 50752514969160.0, - "grad_norm": 1.2627998656896617, - "learning_rate": 3.984966517844523e-06, - "loss": 0.8995, - "num_input_tokens_seen": 11899055, - "step": 565 - }, - { - "epoch": 0.06805747610172548, - "flos": 20125812142680.0, - "grad_norm": 8.31088355847084, - "learning_rate": 3.984871036367492e-06, - "loss": 1.0503, - "num_input_tokens_seen": 11918800, - "step": 566 - }, - { - "epoch": 0.06817771899236458, - "flos": 14278764537480.0, - "grad_norm": 4.781103194233131, - "learning_rate": 3.984775253787102e-06, - "loss": 1.0673, - "num_input_tokens_seen": 11936810, - "step": 567 - }, - { - "epoch": 0.06829796188300366, - "flos": 12704819070360.0, - "grad_norm": 4.270233012861342, - "learning_rate": 3.984679170117885e-06, - "loss": 1.101, - "num_input_tokens_seen": 11952735, - "step": 568 - }, - { - "epoch": 0.06841820477364276, - "flos": 10240549220160.0, - "grad_norm": 7.613516242685551, - "learning_rate": 3.984582785374415e-06, - "loss": 1.0124, - "num_input_tokens_seen": 11969895, - "step": 569 - }, - { - "epoch": 0.06853844766428185, - "flos": 15584427196920.0, - "grad_norm": 3.8004500273199553, - "learning_rate": 3.9844860995713155e-06, - "loss": 1.0442, - "num_input_tokens_seen": 11989155, - "step": 570 - }, - { - "epoch": 0.06865869055492094, - "flos": 11892851617320.0, - "grad_norm": 7.022030631133013, - "learning_rate": 3.9843891127232524e-06, - "loss": 1.045, - "num_input_tokens_seen": 12006410, - "step": 571 - }, - { - "epoch": 0.06877893344556003, - "flos": 14146953412200.0, - "grad_norm": 3.8375874562191883, - "learning_rate": 3.984291824844938e-06, - "loss": 0.8973, - "num_input_tokens_seen": 12021225, - "step": 572 - }, - { - "epoch": 0.06889917633619912, - "flos": 16376489749440.0, - "grad_norm": 4.896428476508313, - "learning_rate": 3.984194235951132e-06, - "loss": 1.0677, - "num_input_tokens_seen": 12037090, - "step": 573 - }, - { - "epoch": 0.06901941922683821, - "flos": 14882587598160.0, - "grad_norm": 7.6410488642974945, - "learning_rate": 3.9840963460566375e-06, - "loss": 1.0586, - "num_input_tokens_seen": 12055590, - "step": 574 - }, - { - "epoch": 0.06913966211747731, - "flos": 17656789750680.0, - "grad_norm": 2.6818109667993952, - "learning_rate": 3.983998155176305e-06, - "loss": 1.1233, - "num_input_tokens_seen": 12075670, - "step": 575 - }, - { - "epoch": 0.06925990500811639, - "flos": 41772355301640.0, - "grad_norm": 0.8342287299714387, - "learning_rate": 3.9838996633250305e-06, - "loss": 0.8247, - "num_input_tokens_seen": 12135905, - "step": 576 - }, - { - "epoch": 0.06938014789875549, - "flos": 8981595212760.0, - "grad_norm": 3.7585070438093178, - "learning_rate": 3.983800870517753e-06, - "loss": 1.1142, - "num_input_tokens_seen": 12152415, - "step": 577 - }, - { - "epoch": 0.06950039078939457, - "flos": 16218948027240.0, - "grad_norm": 6.708890271810922, - "learning_rate": 3.983701776769463e-06, - "loss": 1.0235, - "num_input_tokens_seen": 12169545, - "step": 578 - }, - { - "epoch": 0.06962063368003367, - "flos": 15586880121720.0, - "grad_norm": 4.436871336341524, - "learning_rate": 3.9836023820951885e-06, - "loss": 1.0837, - "num_input_tokens_seen": 12188480, - "step": 579 - }, - { - "epoch": 0.06974087657067275, - "flos": 14699775202440.0, - "grad_norm": 8.16947852273914, - "learning_rate": 3.983502686510011e-06, - "loss": 0.9147, - "num_input_tokens_seen": 12209030, - "step": 580 - }, - { - "epoch": 0.06986111946131185, - "flos": 16088210056560.0, - "grad_norm": 5.1951505936942874, - "learning_rate": 3.9834026900290525e-06, - "loss": 0.9545, - "num_input_tokens_seen": 12228145, - "step": 581 - }, - { - "epoch": 0.06998136235195095, - "flos": 19182278856840.0, - "grad_norm": 4.985254287891582, - "learning_rate": 3.983302392667482e-06, - "loss": 1.22, - "num_input_tokens_seen": 12248710, - "step": 582 - }, - { - "epoch": 0.07010160524259003, - "flos": 15983509298400.0, - "grad_norm": 3.8748518392479276, - "learning_rate": 3.983201794440517e-06, - "loss": 1.1704, - "num_input_tokens_seen": 12268005, - "step": 583 - }, - { - "epoch": 0.07022184813322913, - "flos": 12993742656000.0, - "grad_norm": 4.651552429290828, - "learning_rate": 3.9831008953634165e-06, - "loss": 0.9012, - "num_input_tokens_seen": 12287015, - "step": 584 - }, - { - "epoch": 0.07034209102386821, - "flos": 17551598407560.0, - "grad_norm": 4.814187088554868, - "learning_rate": 3.9829996954514864e-06, - "loss": 1.0419, - "num_input_tokens_seen": 12305875, - "step": 585 - }, - { - "epoch": 0.0704623339145073, - "flos": 18501417313200.0, - "grad_norm": 3.2403223762725912, - "learning_rate": 3.982898194720079e-06, - "loss": 1.0716, - "num_input_tokens_seen": 12326325, - "step": 586 - }, - { - "epoch": 0.0705825768051464, - "flos": 18028730823480.0, - "grad_norm": 4.495929572507477, - "learning_rate": 3.982796393184592e-06, - "loss": 1.0346, - "num_input_tokens_seen": 12345125, - "step": 587 - }, - { - "epoch": 0.07070281969578548, - "flos": 47289417944280.0, - "grad_norm": 0.8912571119476806, - "learning_rate": 3.98269429086047e-06, - "loss": 0.8822, - "num_input_tokens_seen": 12402685, - "step": 588 - }, - { - "epoch": 0.07082306258642458, - "flos": 16817068037760.0, - "grad_norm": 4.2898771141254, - "learning_rate": 3.982591887763199e-06, - "loss": 1.0775, - "num_input_tokens_seen": 12419865, - "step": 589 - }, - { - "epoch": 0.07094330547706366, - "flos": 9793654650480.0, - "grad_norm": 19.76258777306963, - "learning_rate": 3.982489183908316e-06, - "loss": 1.0416, - "num_input_tokens_seen": 12436005, - "step": 590 - }, - { - "epoch": 0.07106354836770276, - "flos": 17530528367760.0, - "grad_norm": 6.386168327294624, - "learning_rate": 3.982386179311399e-06, - "loss": 1.0712, - "num_input_tokens_seen": 12456245, - "step": 591 - }, - { - "epoch": 0.07118379125834184, - "flos": 11473619322840.0, - "grad_norm": 5.452715926689456, - "learning_rate": 3.982282873988075e-06, - "loss": 1.098, - "num_input_tokens_seen": 12473840, - "step": 592 - }, - { - "epoch": 0.07130403414898094, - "flos": 13991006091120.0, - "grad_norm": 3.170511638615784, - "learning_rate": 3.982179267954016e-06, - "loss": 1.1056, - "num_input_tokens_seen": 12493990, - "step": 593 - }, - { - "epoch": 0.07142427703962004, - "flos": 15537473252400.0, - "grad_norm": 5.544330883880504, - "learning_rate": 3.982075361224937e-06, - "loss": 1.186, - "num_input_tokens_seen": 12512075, - "step": 594 - }, - { - "epoch": 0.07154451993025912, - "flos": 12967981397520.0, - "grad_norm": 3.336484341614811, - "learning_rate": 3.981971153816602e-06, - "loss": 1.1206, - "num_input_tokens_seen": 12529400, - "step": 595 - }, - { - "epoch": 0.07166476282089822, - "flos": 15744575151720.0, - "grad_norm": 3.699881634236539, - "learning_rate": 3.981866645744819e-06, - "loss": 1.1894, - "num_input_tokens_seen": 12549835, - "step": 596 - }, - { - "epoch": 0.0717850057115373, - "flos": 9977969462640.0, - "grad_norm": 10.34846504365578, - "learning_rate": 3.9817618370254416e-06, - "loss": 1.0383, - "num_input_tokens_seen": 12566210, - "step": 597 - }, - { - "epoch": 0.0719052486021764, - "flos": 22038210711000.0, - "grad_norm": 5.403031113064243, - "learning_rate": 3.9816567276743684e-06, - "loss": 1.1011, - "num_input_tokens_seen": 12585795, - "step": 598 - }, - { - "epoch": 0.0720254914928155, - "flos": 15116953172400.0, - "grad_norm": 5.170718152989134, - "learning_rate": 3.9815513177075466e-06, - "loss": 0.9954, - "num_input_tokens_seen": 12604300, - "step": 599 - }, - { - "epoch": 0.07214573438345458, - "flos": 19811648545080.0, - "grad_norm": 5.095215007524255, - "learning_rate": 3.9814456071409646e-06, - "loss": 0.9385, - "num_input_tokens_seen": 12624555, - "step": 600 - }, - { - "epoch": 0.07226597727409367, - "flos": 18133032981360.0, - "grad_norm": 4.216707766688182, - "learning_rate": 3.981339595990659e-06, - "loss": 1.0774, - "num_input_tokens_seen": 12642805, - "step": 601 - }, - { - "epoch": 0.07238622016473276, - "flos": 16717487760120.0, - "grad_norm": 4.505253162785752, - "learning_rate": 3.981233284272713e-06, - "loss": 1.0496, - "num_input_tokens_seen": 12662270, - "step": 602 - }, - { - "epoch": 0.07250646305537185, - "flos": 18112300218720.0, - "grad_norm": 4.9380782549294056, - "learning_rate": 3.981126672003253e-06, - "loss": 1.1278, - "num_input_tokens_seen": 12684665, - "step": 603 - }, - { - "epoch": 0.07262670594601094, - "flos": 19333350989880.0, - "grad_norm": 7.770433867331493, - "learning_rate": 3.981019759198451e-06, - "loss": 1.0128, - "num_input_tokens_seen": 12703335, - "step": 604 - }, - { - "epoch": 0.07274694883665003, - "flos": 18972540063360.0, - "grad_norm": 4.108536435278948, - "learning_rate": 3.980912545874528e-06, - "loss": 1.0499, - "num_input_tokens_seen": 12723220, - "step": 605 - }, - { - "epoch": 0.07286719172728913, - "flos": 21280034730360.0, - "grad_norm": 6.539684926880736, - "learning_rate": 3.980805032047746e-06, - "loss": 1.0815, - "num_input_tokens_seen": 12744410, - "step": 606 - }, - { - "epoch": 0.07298743461792821, - "flos": 12310397526000.0, - "grad_norm": 4.20869842053568, - "learning_rate": 3.980697217734415e-06, - "loss": 1.0262, - "num_input_tokens_seen": 12761870, - "step": 607 - }, - { - "epoch": 0.07310767750856731, - "flos": 13831226075040.0, - "grad_norm": 39.85871502050634, - "learning_rate": 3.980589102950891e-06, - "loss": 1.1378, - "num_input_tokens_seen": 12779755, - "step": 608 - }, - { - "epoch": 0.07322792039920639, - "flos": 20781464335920.0, - "grad_norm": 15.479768408489475, - "learning_rate": 3.9804806877135755e-06, - "loss": 1.0118, - "num_input_tokens_seen": 12797520, - "step": 609 - }, - { - "epoch": 0.07334816328984549, - "flos": 16691665178520.0, - "grad_norm": 6.017146723115533, - "learning_rate": 3.980371972038915e-06, - "loss": 1.0907, - "num_input_tokens_seen": 12817730, - "step": 610 - }, - { - "epoch": 0.07346840618048459, - "flos": 16320674614080.0, - "grad_norm": 12.763507942950701, - "learning_rate": 3.980262955943399e-06, - "loss": 1.0701, - "num_input_tokens_seen": 12837115, - "step": 611 - }, - { - "epoch": 0.07358864907112367, - "flos": 12520136319480.0, - "grad_norm": 5.691114011028372, - "learning_rate": 3.980153639443569e-06, - "loss": 1.1161, - "num_input_tokens_seen": 12852820, - "step": 612 - }, - { - "epoch": 0.07370889196176277, - "flos": 17136536085240.0, - "grad_norm": 10.472609625502331, - "learning_rate": 3.980044022556005e-06, - "loss": 1.0371, - "num_input_tokens_seen": 12872225, - "step": 613 - }, - { - "epoch": 0.07382913485240185, - "flos": 18422324505720.0, - "grad_norm": 3.6627940766159472, - "learning_rate": 3.9799341052973375e-06, - "loss": 0.9551, - "num_input_tokens_seen": 12891780, - "step": 614 - }, - { - "epoch": 0.07394937774304094, - "flos": 11945508612000.0, - "grad_norm": 3.3577135812594876, - "learning_rate": 3.979823887684241e-06, - "loss": 0.9615, - "num_input_tokens_seen": 12910440, - "step": 615 - }, - { - "epoch": 0.07406962063368003, - "flos": 14697598231680.0, - "grad_norm": 4.196885934504441, - "learning_rate": 3.979713369733434e-06, - "loss": 1.0904, - "num_input_tokens_seen": 12928025, - "step": 616 - }, - { - "epoch": 0.07418986352431912, - "flos": 15220151514120.0, - "grad_norm": 4.2106879165657585, - "learning_rate": 3.979602551461683e-06, - "loss": 1.0639, - "num_input_tokens_seen": 12948525, - "step": 617 - }, - { - "epoch": 0.07431010641495822, - "flos": 8457938114160.0, - "grad_norm": 4.219826342006203, - "learning_rate": 3.979491432885799e-06, - "loss": 1.1483, - "num_input_tokens_seen": 12964510, - "step": 618 - }, - { - "epoch": 0.0744303493055973, - "flos": 14880042688680.0, - "grad_norm": 3.312125130800477, - "learning_rate": 3.97938001402264e-06, - "loss": 1.0526, - "num_input_tokens_seen": 12983355, - "step": 619 - }, - { - "epoch": 0.0745505921962364, - "flos": 11499349919760.0, - "grad_norm": 8.969099190855799, - "learning_rate": 3.979268294889105e-06, - "loss": 1.0334, - "num_input_tokens_seen": 12998625, - "step": 620 - }, - { - "epoch": 0.07467083508687548, - "flos": 36430604068680.0, - "grad_norm": 4.061233557858327, - "learning_rate": 3.979156275502143e-06, - "loss": 0.9808, - "num_input_tokens_seen": 13022005, - "step": 621 - }, - { - "epoch": 0.07479107797751458, - "flos": 12416723346840.0, - "grad_norm": 4.324041650689712, - "learning_rate": 3.979043955878749e-06, - "loss": 1.147, - "num_input_tokens_seen": 13039570, - "step": 622 - }, - { - "epoch": 0.07491132086815366, - "flos": 16688353730040.0, - "grad_norm": 3.8782191287412666, - "learning_rate": 3.978931336035959e-06, - "loss": 1.0527, - "num_input_tokens_seen": 13058100, - "step": 623 - }, - { - "epoch": 0.07503156375879276, - "flos": 14305445642760.0, - "grad_norm": 5.817387976459224, - "learning_rate": 3.9788184159908595e-06, - "loss": 1.0441, - "num_input_tokens_seen": 13074950, - "step": 624 - }, - { - "epoch": 0.07515180664943186, - "flos": 10680882216000.0, - "grad_norm": 3.6978146104654277, - "learning_rate": 3.97870519576058e-06, - "loss": 1.0377, - "num_input_tokens_seen": 13091095, - "step": 625 - }, - { - "epoch": 0.07527204954007094, - "flos": 15117535742040.0, - "grad_norm": 5.409631454409272, - "learning_rate": 3.978591675362295e-06, - "loss": 1.0271, - "num_input_tokens_seen": 13109530, - "step": 626 - }, - { - "epoch": 0.07539229243071004, - "flos": 15143756923920.0, - "grad_norm": 3.5072884284966506, - "learning_rate": 3.978477854813226e-06, - "loss": 1.1026, - "num_input_tokens_seen": 13128590, - "step": 627 - }, - { - "epoch": 0.07551253532134912, - "flos": 9191977899000.0, - "grad_norm": 3.99043333927137, - "learning_rate": 3.97836373413064e-06, - "loss": 1.0496, - "num_input_tokens_seen": 13146365, - "step": 628 - }, - { - "epoch": 0.07563277821198822, - "flos": 13623541606080.0, - "grad_norm": 4.131449656776601, - "learning_rate": 3.978249313331848e-06, - "loss": 0.9797, - "num_input_tokens_seen": 13164315, - "step": 629 - }, - { - "epoch": 0.07575302110262731, - "flos": 13859593566120.0, - "grad_norm": 4.3032341527011555, - "learning_rate": 3.978134592434208e-06, - "loss": 0.8444, - "num_input_tokens_seen": 13181785, - "step": 630 - }, - { - "epoch": 0.0758732639932664, - "flos": 48667452981480.0, - "grad_norm": 1.0098256548925233, - "learning_rate": 3.978019571455123e-06, - "loss": 0.876, - "num_input_tokens_seen": 13233450, - "step": 631 - }, - { - "epoch": 0.07599350688390549, - "flos": 13465662606720.0, - "grad_norm": 3.2907527829942165, - "learning_rate": 3.977904250412042e-06, - "loss": 1.0667, - "num_input_tokens_seen": 13252125, - "step": 632 - }, - { - "epoch": 0.07611374977454458, - "flos": 14960392620120.0, - "grad_norm": 3.1002106687273243, - "learning_rate": 3.97778862932246e-06, - "loss": 1.0877, - "num_input_tokens_seen": 13269010, - "step": 633 - }, - { - "epoch": 0.07623399266518367, - "flos": 13124480626680.0, - "grad_norm": 4.424762335822753, - "learning_rate": 3.9776727082039144e-06, - "loss": 1.1698, - "num_input_tokens_seen": 13285700, - "step": 634 - }, - { - "epoch": 0.07635423555582276, - "flos": 31920248621640.0, - "grad_norm": 0.8207586619175401, - "learning_rate": 3.977556487073991e-06, - "loss": 0.8097, - "num_input_tokens_seen": 13339975, - "step": 635 - }, - { - "epoch": 0.07647447844646185, - "flos": 15457675229040.0, - "grad_norm": 2.2115528342248836, - "learning_rate": 3.97743996595032e-06, - "loss": 1.0299, - "num_input_tokens_seen": 13359735, - "step": 636 - }, - { - "epoch": 0.07659472133710095, - "flos": 16928177061960.0, - "grad_norm": 2.6492292182915778, - "learning_rate": 3.9773231448505804e-06, - "loss": 1.044, - "num_input_tokens_seen": 13381245, - "step": 637 - }, - { - "epoch": 0.07671496422774003, - "flos": 15248519005200.0, - "grad_norm": 3.072393047311885, - "learning_rate": 3.977206023792491e-06, - "loss": 0.9952, - "num_input_tokens_seen": 13400855, - "step": 638 - }, - { - "epoch": 0.07683520711837913, - "flos": 12022179156240.0, - "grad_norm": 3.97786584865552, - "learning_rate": 3.97708860279382e-06, - "loss": 1.0342, - "num_input_tokens_seen": 13418685, - "step": 639 - }, - { - "epoch": 0.07695545000901821, - "flos": 16690745331720.0, - "grad_norm": 5.223217359828394, - "learning_rate": 3.97697088187238e-06, - "loss": 1.0183, - "num_input_tokens_seen": 13438920, - "step": 640 - }, - { - "epoch": 0.07707569289965731, - "flos": 12491216920320.0, - "grad_norm": 3.6599235664266216, - "learning_rate": 3.976852861046029e-06, - "loss": 1.1354, - "num_input_tokens_seen": 13455255, - "step": 641 - }, - { - "epoch": 0.0771959357902964, - "flos": 18342986405760.0, - "grad_norm": 2.709119291459434, - "learning_rate": 3.97673454033267e-06, - "loss": 1.028, - "num_input_tokens_seen": 13477075, - "step": 642 - }, - { - "epoch": 0.07731617868093549, - "flos": 14068995082440.0, - "grad_norm": 3.221217718706482, - "learning_rate": 3.976615919750254e-06, - "loss": 1.0488, - "num_input_tokens_seen": 13494495, - "step": 643 - }, - { - "epoch": 0.07743642157157458, - "flos": 15144155524200.0, - "grad_norm": 3.5526376032638782, - "learning_rate": 3.976496999316775e-06, - "loss": 1.0857, - "num_input_tokens_seen": 13512970, - "step": 644 - }, - { - "epoch": 0.07755666446221367, - "flos": 14169219252840.0, - "grad_norm": 5.194650341085307, - "learning_rate": 3.976377779050271e-06, - "loss": 1.072, - "num_input_tokens_seen": 13530820, - "step": 645 - }, - { - "epoch": 0.07767690735285276, - "flos": 16796089982640.0, - "grad_norm": 5.019926989203001, - "learning_rate": 3.976258258968831e-06, - "loss": 1.0612, - "num_input_tokens_seen": 13549085, - "step": 646 - }, - { - "epoch": 0.07779715024349185, - "flos": 15799225147800.0, - "grad_norm": 3.737884532322733, - "learning_rate": 3.976138439090583e-06, - "loss": 0.9631, - "num_input_tokens_seen": 13566885, - "step": 647 - }, - { - "epoch": 0.07791739313413094, - "flos": 14877927041040.0, - "grad_norm": 4.596766512982569, - "learning_rate": 3.976018319433706e-06, - "loss": 1.0651, - "num_input_tokens_seen": 13584150, - "step": 648 - }, - { - "epoch": 0.07803763602477004, - "flos": 13697176655880.0, - "grad_norm": 4.22378081839665, - "learning_rate": 3.9758979000164205e-06, - "loss": 1.155, - "num_input_tokens_seen": 13600690, - "step": 649 - }, - { - "epoch": 0.07815787891540912, - "flos": 16140315143160.0, - "grad_norm": 5.4344498421339695, - "learning_rate": 3.975777180856995e-06, - "loss": 0.9373, - "num_input_tokens_seen": 13619530, - "step": 650 - }, - { - "epoch": 0.07827812180604822, - "flos": 16140591097200.0, - "grad_norm": 5.0608870209039925, - "learning_rate": 3.975656161973742e-06, - "loss": 1.0758, - "num_input_tokens_seen": 13638335, - "step": 651 - }, - { - "epoch": 0.0783983646966873, - "flos": 15432251247720.0, - "grad_norm": 5.956038275335028, - "learning_rate": 3.9755348433850194e-06, - "loss": 1.1083, - "num_input_tokens_seen": 13653395, - "step": 652 - }, - { - "epoch": 0.0785186075873264, - "flos": 43402826668080.0, - "grad_norm": 1.1179538046039807, - "learning_rate": 3.975413225109232e-06, - "loss": 0.9476, - "num_input_tokens_seen": 13713665, - "step": 653 - }, - { - "epoch": 0.0786388504779655, - "flos": 16979025024600.0, - "grad_norm": 6.434437736364717, - "learning_rate": 3.975291307164829e-06, - "loss": 1.1559, - "num_input_tokens_seen": 13732030, - "step": 654 - }, - { - "epoch": 0.07875909336860458, - "flos": 10712806448040.0, - "grad_norm": 4.174984805142283, - "learning_rate": 3.975169089570306e-06, - "loss": 1.0857, - "num_input_tokens_seen": 13750125, - "step": 655 - }, - { - "epoch": 0.07887933625924368, - "flos": 15799746394320.0, - "grad_norm": 4.024794503409799, - "learning_rate": 3.975046572344202e-06, - "loss": 1.1493, - "num_input_tokens_seen": 13766305, - "step": 656 - }, - { - "epoch": 0.07899957914988276, - "flos": 14751144411600.0, - "grad_norm": 5.566620233003276, - "learning_rate": 3.974923755505103e-06, - "loss": 0.9417, - "num_input_tokens_seen": 13785255, - "step": 657 - }, - { - "epoch": 0.07911982204052186, - "flos": 16350391213800.0, - "grad_norm": 2.819985917644645, - "learning_rate": 3.974800639071641e-06, - "loss": 1.139, - "num_input_tokens_seen": 13805695, - "step": 658 - }, - { - "epoch": 0.07924006493116094, - "flos": 16428104251080.0, - "grad_norm": 3.6018779889527415, - "learning_rate": 3.974677223062492e-06, - "loss": 1.2322, - "num_input_tokens_seen": 13822630, - "step": 659 - }, - { - "epoch": 0.07936030782180004, - "flos": 11656768995720.0, - "grad_norm": 4.735232637485039, - "learning_rate": 3.974553507496378e-06, - "loss": 0.9542, - "num_input_tokens_seen": 13840925, - "step": 660 - }, - { - "epoch": 0.07948055071243913, - "flos": 16875274774800.0, - "grad_norm": 4.597194288667685, - "learning_rate": 3.974429492392068e-06, - "loss": 1.1161, - "num_input_tokens_seen": 13860670, - "step": 661 - }, - { - "epoch": 0.07960079360307822, - "flos": 13885508132400.0, - "grad_norm": 7.024577997639565, - "learning_rate": 3.974305177768373e-06, - "loss": 1.1333, - "num_input_tokens_seen": 13878600, - "step": 662 - }, - { - "epoch": 0.07972103649371731, - "flos": 16717119821400.0, - "grad_norm": 4.621377093027782, - "learning_rate": 3.974180563644152e-06, - "loss": 1.0832, - "num_input_tokens_seen": 13896885, - "step": 663 - }, - { - "epoch": 0.0798412793843564, - "flos": 11839734699240.0, - "grad_norm": 3.4304482000921723, - "learning_rate": 3.97405565003831e-06, - "loss": 1.1163, - "num_input_tokens_seen": 13912690, - "step": 664 - }, - { - "epoch": 0.07996152227499549, - "flos": 12915293741280.0, - "grad_norm": 5.46506554874014, - "learning_rate": 3.973930436969794e-06, - "loss": 1.0092, - "num_input_tokens_seen": 13930865, - "step": 665 - }, - { - "epoch": 0.08008176516563459, - "flos": 14619670563480.0, - "grad_norm": 4.307980588028012, - "learning_rate": 3.973804924457602e-06, - "loss": 1.0873, - "num_input_tokens_seen": 13948665, - "step": 666 - }, - { - "epoch": 0.08020200805627367, - "flos": 22697480968320.0, - "grad_norm": 3.789804916796252, - "learning_rate": 3.973679112520771e-06, - "loss": 1.0887, - "num_input_tokens_seen": 13970100, - "step": 667 - }, - { - "epoch": 0.08032225094691277, - "flos": 12598922511360.0, - "grad_norm": 6.916976676642393, - "learning_rate": 3.973553001178389e-06, - "loss": 1.2198, - "num_input_tokens_seen": 13987325, - "step": 668 - }, - { - "epoch": 0.08044249383755185, - "flos": 17111296073280.0, - "grad_norm": 13.114544291459625, - "learning_rate": 3.973426590449585e-06, - "loss": 0.9857, - "num_input_tokens_seen": 14005000, - "step": 669 - }, - { - "epoch": 0.08056273672819095, - "flos": 12915539033760.0, - "grad_norm": 4.389287887549647, - "learning_rate": 3.9732998803535364e-06, - "loss": 0.9884, - "num_input_tokens_seen": 14022780, - "step": 670 - }, - { - "epoch": 0.08068297961883003, - "flos": 13959112520640.0, - "grad_norm": 3.6937900495859157, - "learning_rate": 3.973172870909465e-06, - "loss": 1.0897, - "num_input_tokens_seen": 14037265, - "step": 671 - }, - { - "epoch": 0.08080322250946913, - "flos": 16455061310400.0, - "grad_norm": 5.73387073097836, - "learning_rate": 3.973045562136638e-06, - "loss": 1.0323, - "num_input_tokens_seen": 14053800, - "step": 672 - }, - { - "epoch": 0.08092346540010822, - "flos": 15459790876680.0, - "grad_norm": 9.331096744492479, - "learning_rate": 3.972917954054368e-06, - "loss": 1.1384, - "num_input_tokens_seen": 14072075, - "step": 673 - }, - { - "epoch": 0.08104370829074731, - "flos": 14934140776680.0, - "grad_norm": 5.707891776648848, - "learning_rate": 3.972790046682013e-06, - "loss": 1.0484, - "num_input_tokens_seen": 14090470, - "step": 674 - }, - { - "epoch": 0.0811639511813864, - "flos": 14251807478160.0, - "grad_norm": 3.368434878527977, - "learning_rate": 3.972661840038977e-06, - "loss": 1.0184, - "num_input_tokens_seen": 14110480, - "step": 675 - }, - { - "epoch": 0.08128419407202549, - "flos": 11918735522040.0, - "grad_norm": 3.6383553826477555, - "learning_rate": 3.972533334144707e-06, - "loss": 1.0556, - "num_input_tokens_seen": 14127125, - "step": 676 - }, - { - "epoch": 0.08140443696266458, - "flos": 16901066694840.0, - "grad_norm": 6.799853424287679, - "learning_rate": 3.972404529018699e-06, - "loss": 1.0123, - "num_input_tokens_seen": 14146705, - "step": 677 - }, - { - "epoch": 0.08152467985330367, - "flos": 17347348033320.0, - "grad_norm": 7.087215152782239, - "learning_rate": 3.972275424680493e-06, - "loss": 1.0819, - "num_input_tokens_seen": 14166535, - "step": 678 - }, - { - "epoch": 0.08164492274394276, - "flos": 13701806551440.0, - "grad_norm": 7.212691237428274, - "learning_rate": 3.972146021149673e-06, - "loss": 1.1308, - "num_input_tokens_seen": 14184530, - "step": 679 - }, - { - "epoch": 0.08176516563458186, - "flos": 10476938457360.0, - "grad_norm": 4.417483032401226, - "learning_rate": 3.972016318445868e-06, - "loss": 1.023, - "num_input_tokens_seen": 14202250, - "step": 680 - }, - { - "epoch": 0.08188540852522094, - "flos": 16062479459640.0, - "grad_norm": 3.8005798851061834, - "learning_rate": 3.971886316588757e-06, - "loss": 1.1401, - "num_input_tokens_seen": 14222475, - "step": 681 - }, - { - "epoch": 0.08200565141586004, - "flos": 13806721940520.0, - "grad_norm": 5.975764601558715, - "learning_rate": 3.9717560155980595e-06, - "loss": 0.9648, - "num_input_tokens_seen": 14237845, - "step": 682 - }, - { - "epoch": 0.08212589430649912, - "flos": 14619486594120.0, - "grad_norm": 3.681112155373625, - "learning_rate": 3.971625415493542e-06, - "loss": 1.141, - "num_input_tokens_seen": 14255885, - "step": 683 - }, - { - "epoch": 0.08224613719713822, - "flos": 18470259620160.0, - "grad_norm": 7.989480493340402, - "learning_rate": 3.971494516295017e-06, - "loss": 1.1088, - "num_input_tokens_seen": 14275055, - "step": 684 - }, - { - "epoch": 0.08236638008777732, - "flos": 16900606771440.0, - "grad_norm": 3.9034206061592784, - "learning_rate": 3.971363318022341e-06, - "loss": 1.0674, - "num_input_tokens_seen": 14296115, - "step": 685 - }, - { - "epoch": 0.0824866229784164, - "flos": 27702445920480.0, - "grad_norm": 5.403900341490101, - "learning_rate": 3.971231820695417e-06, - "loss": 0.9163, - "num_input_tokens_seen": 14319450, - "step": 686 - }, - { - "epoch": 0.0826068658690555, - "flos": 16425007433520.0, - "grad_norm": 2.89608529364661, - "learning_rate": 3.971100024334193e-06, - "loss": 1.0469, - "num_input_tokens_seen": 14336690, - "step": 687 - }, - { - "epoch": 0.08272710875969458, - "flos": 15008051780520.0, - "grad_norm": 5.3717146303073475, - "learning_rate": 3.970967928958663e-06, - "loss": 1.0949, - "num_input_tokens_seen": 14353525, - "step": 688 - }, - { - "epoch": 0.08284735165033368, - "flos": 13518810186360.0, - "grad_norm": 3.0965352477351695, - "learning_rate": 3.970835534588865e-06, - "loss": 1.0628, - "num_input_tokens_seen": 14370740, - "step": 689 - }, - { - "epoch": 0.08296759454097276, - "flos": 11840225284200.0, - "grad_norm": 5.652306293608617, - "learning_rate": 3.970702841244883e-06, - "loss": 1.0904, - "num_input_tokens_seen": 14388780, - "step": 690 - }, - { - "epoch": 0.08308783743161186, - "flos": 12758395911840.0, - "grad_norm": 4.1550031813920345, - "learning_rate": 3.970569848946847e-06, - "loss": 1.0443, - "num_input_tokens_seen": 14408315, - "step": 691 - }, - { - "epoch": 0.08320808032225095, - "flos": 10655304926880.0, - "grad_norm": 4.187122157116766, - "learning_rate": 3.970436557714932e-06, - "loss": 1.0283, - "num_input_tokens_seen": 14424555, - "step": 692 - }, - { - "epoch": 0.08332832321289003, - "flos": 15904876414320.0, - "grad_norm": 5.422498507747911, - "learning_rate": 3.970302967569358e-06, - "loss": 1.083, - "num_input_tokens_seen": 14442865, - "step": 693 - }, - { - "epoch": 0.08344856610352913, - "flos": 17582388161880.0, - "grad_norm": 5.596661369468147, - "learning_rate": 3.9701690785303896e-06, - "loss": 0.9097, - "num_input_tokens_seen": 14461780, - "step": 694 - }, - { - "epoch": 0.08356880899416821, - "flos": 18051027325680.0, - "grad_norm": 5.580704805715855, - "learning_rate": 3.970034890618339e-06, - "loss": 1.1002, - "num_input_tokens_seen": 14481190, - "step": 695 - }, - { - "epoch": 0.08368905188480731, - "flos": 17320636266480.0, - "grad_norm": 4.737031600463423, - "learning_rate": 3.969900403853562e-06, - "loss": 1.1024, - "num_input_tokens_seen": 14499950, - "step": 696 - }, - { - "epoch": 0.08380929477544641, - "flos": 12781121675880.0, - "grad_norm": 3.669223298666697, - "learning_rate": 3.96976561825646e-06, - "loss": 1.0136, - "num_input_tokens_seen": 14516760, - "step": 697 - }, - { - "epoch": 0.08392953766608549, - "flos": 18709929644280.0, - "grad_norm": 5.0640350651785, - "learning_rate": 3.969630533847479e-06, - "loss": 1.1022, - "num_input_tokens_seen": 14535440, - "step": 698 - }, - { - "epoch": 0.08404978055672459, - "flos": 15983202682800.0, - "grad_norm": 9.889347132472546, - "learning_rate": 3.969495150647113e-06, - "loss": 1.0688, - "num_input_tokens_seen": 14553330, - "step": 699 - }, - { - "epoch": 0.08417002344736367, - "flos": 17661082369080.0, - "grad_norm": 5.261521367574252, - "learning_rate": 3.969359468675899e-06, - "loss": 0.9946, - "num_input_tokens_seen": 14573180, - "step": 700 - }, - { - "epoch": 0.08429026633800277, - "flos": 11997184436760.0, - "grad_norm": 3.4309531550732557, - "learning_rate": 3.969223487954418e-06, - "loss": 1.1196, - "num_input_tokens_seen": 14590360, - "step": 701 - }, - { - "epoch": 0.08441050922864185, - "flos": 16953233104560.0, - "grad_norm": 4.226537454076091, - "learning_rate": 3.969087208503301e-06, - "loss": 1.0554, - "num_input_tokens_seen": 14610160, - "step": 702 - }, - { - "epoch": 0.08453075211928095, - "flos": 18159376809480.0, - "grad_norm": 4.180519320703049, - "learning_rate": 3.968950630343219e-06, - "loss": 1.061, - "num_input_tokens_seen": 14626865, - "step": 703 - }, - { - "epoch": 0.08465099500992004, - "flos": 13855576901760.0, - "grad_norm": 3.046596573065773, - "learning_rate": 3.968813753494892e-06, - "loss": 1.1564, - "num_input_tokens_seen": 14644745, - "step": 704 - }, - { - "epoch": 0.08477123790055913, - "flos": 20912723553120.0, - "grad_norm": 5.832067225064864, - "learning_rate": 3.968676577979084e-06, - "loss": 0.982, - "num_input_tokens_seen": 14664015, - "step": 705 - }, - { - "epoch": 0.08489148079119822, - "flos": 13203573434160.0, - "grad_norm": 47.197334447045314, - "learning_rate": 3.968539103816605e-06, - "loss": 1.0109, - "num_input_tokens_seen": 14681535, - "step": 706 - }, - { - "epoch": 0.0850117236818373, - "flos": 16686176759280.0, - "grad_norm": 3.2500793740776817, - "learning_rate": 3.9684013310283085e-06, - "loss": 1.1152, - "num_input_tokens_seen": 14699940, - "step": 707 - }, - { - "epoch": 0.0851319665724764, - "flos": 29014915446240.0, - "grad_norm": 3.1491218372178125, - "learning_rate": 3.9682632596350956e-06, - "loss": 0.8592, - "num_input_tokens_seen": 14720825, - "step": 708 - }, - { - "epoch": 0.0852522094631155, - "flos": 11229687341880.0, - "grad_norm": 4.024184311378921, - "learning_rate": 3.968124889657911e-06, - "loss": 1.011, - "num_input_tokens_seen": 14735645, - "step": 709 - }, - { - "epoch": 0.08537245235375458, - "flos": 10286583317880.0, - "grad_norm": 4.024256828363454, - "learning_rate": 3.967986221117746e-06, - "loss": 1.14, - "num_input_tokens_seen": 14751305, - "step": 710 - }, - { - "epoch": 0.08549269524439368, - "flos": 18841219523040.0, - "grad_norm": 3.475316062111097, - "learning_rate": 3.967847254035635e-06, - "loss": 1.0987, - "num_input_tokens_seen": 14770410, - "step": 711 - }, - { - "epoch": 0.08561293813503276, - "flos": 9585632904360.0, - "grad_norm": 3.596788133886387, - "learning_rate": 3.967707988432661e-06, - "loss": 1.0892, - "num_input_tokens_seen": 14787835, - "step": 712 - }, - { - "epoch": 0.08573318102567186, - "flos": 19182340179960.0, - "grad_norm": 3.8487230090452087, - "learning_rate": 3.967568424329949e-06, - "loss": 1.1101, - "num_input_tokens_seen": 14807980, - "step": 713 - }, - { - "epoch": 0.08585342391631094, - "flos": 48193478706240.0, - "grad_norm": 0.9727746669276084, - "learning_rate": 3.967428561748671e-06, - "loss": 0.8253, - "num_input_tokens_seen": 14875670, - "step": 714 - }, - { - "epoch": 0.08597366680695004, - "flos": 15957778701480.0, - "grad_norm": 2.5440758611236616, - "learning_rate": 3.967288400710045e-06, - "loss": 1.0915, - "num_input_tokens_seen": 14894855, - "step": 715 - }, - { - "epoch": 0.08609390969758914, - "flos": 16743862249800.0, - "grad_norm": 28.479982004910045, - "learning_rate": 3.9671479412353335e-06, - "loss": 1.1125, - "num_input_tokens_seen": 14913040, - "step": 716 - }, - { - "epoch": 0.08621415258822822, - "flos": 18422263182600.0, - "grad_norm": 7.485963019811438, - "learning_rate": 3.967007183345843e-06, - "loss": 0.9661, - "num_input_tokens_seen": 14932615, - "step": 717 - }, - { - "epoch": 0.08633439547886732, - "flos": 9585080996280.0, - "grad_norm": 5.174868175652011, - "learning_rate": 3.966866127062927e-06, - "loss": 1.1231, - "num_input_tokens_seen": 14949460, - "step": 718 - }, - { - "epoch": 0.0864546383695064, - "flos": 41338865381760.0, - "grad_norm": 0.9160231730266044, - "learning_rate": 3.966724772407982e-06, - "loss": 0.9145, - "num_input_tokens_seen": 15006695, - "step": 719 - }, - { - "epoch": 0.0865748812601455, - "flos": 14225800927200.0, - "grad_norm": 3.4528363827878827, - "learning_rate": 3.966583119402454e-06, - "loss": 1.1203, - "num_input_tokens_seen": 15023180, - "step": 720 - }, - { - "epoch": 0.08669512415078459, - "flos": 25161014941080.0, - "grad_norm": 2.6554147641338592, - "learning_rate": 3.9664411680678305e-06, - "loss": 1.0515, - "num_input_tokens_seen": 15044655, - "step": 721 - }, - { - "epoch": 0.08681536704142367, - "flos": 47008374379560.0, - "grad_norm": 0.8854419405452845, - "learning_rate": 3.966298918425644e-06, - "loss": 0.8723, - "num_input_tokens_seen": 15101865, - "step": 722 - }, - { - "epoch": 0.08693560993206277, - "flos": 24634199701800.0, - "grad_norm": 2.8679106837135464, - "learning_rate": 3.966156370497476e-06, - "loss": 1.0573, - "num_input_tokens_seen": 15125195, - "step": 723 - }, - { - "epoch": 0.08705585282270185, - "flos": 16455643880040.0, - "grad_norm": 3.663979470424525, - "learning_rate": 3.96601352430495e-06, - "loss": 1.1109, - "num_input_tokens_seen": 15144685, - "step": 724 - }, - { - "epoch": 0.08717609571334095, - "flos": 21017914896240.0, - "grad_norm": 26.191875520124913, - "learning_rate": 3.965870379869735e-06, - "loss": 1.0535, - "num_input_tokens_seen": 15166450, - "step": 725 - }, - { - "epoch": 0.08729633860398003, - "flos": 14671806311640.0, - "grad_norm": 4.329112907616817, - "learning_rate": 3.965726937213547e-06, - "loss": 1.0885, - "num_input_tokens_seen": 15184805, - "step": 726 - }, - { - "epoch": 0.08741658149461913, - "flos": 13020362438160.0, - "grad_norm": 3.1412963149712807, - "learning_rate": 3.965583196358144e-06, - "loss": 1.0351, - "num_input_tokens_seen": 15203560, - "step": 727 - }, - { - "epoch": 0.08753682438525823, - "flos": 13278588254160.0, - "grad_norm": 6.370353351655712, - "learning_rate": 3.965439157325335e-06, - "loss": 0.9654, - "num_input_tokens_seen": 15220645, - "step": 728 - }, - { - "epoch": 0.08765706727589731, - "flos": 19779816297720.0, - "grad_norm": 6.421729761416476, - "learning_rate": 3.965294820136968e-06, - "loss": 0.9757, - "num_input_tokens_seen": 15242165, - "step": 729 - }, - { - "epoch": 0.08777731016653641, - "flos": 17346489509640.0, - "grad_norm": 5.302193910935834, - "learning_rate": 3.965150184814938e-06, - "loss": 1.0877, - "num_input_tokens_seen": 15261370, - "step": 730 - }, - { - "epoch": 0.08789755305717549, - "flos": 15616535398320.0, - "grad_norm": 3.183052850186833, - "learning_rate": 3.965005251381189e-06, - "loss": 0.9766, - "num_input_tokens_seen": 15279025, - "step": 731 - }, - { - "epoch": 0.08801779594781459, - "flos": 41766100343400.0, - "grad_norm": 0.8664539430664058, - "learning_rate": 3.964860019857705e-06, - "loss": 0.8961, - "num_input_tokens_seen": 15343660, - "step": 732 - }, - { - "epoch": 0.08813803883845367, - "flos": 16560038022600.0, - "grad_norm": 4.144246371129393, - "learning_rate": 3.964714490266518e-06, - "loss": 1.0679, - "num_input_tokens_seen": 15364025, - "step": 733 - }, - { - "epoch": 0.08825828172909277, - "flos": 45405172236120.0, - "grad_norm": 1.1685537448874792, - "learning_rate": 3.964568662629706e-06, - "loss": 0.8905, - "num_input_tokens_seen": 15425050, - "step": 734 - }, - { - "epoch": 0.08837852461973186, - "flos": 19021916271120.0, - "grad_norm": 4.515978314262337, - "learning_rate": 3.9644225369693895e-06, - "loss": 1.0616, - "num_input_tokens_seen": 15445070, - "step": 735 - }, - { - "epoch": 0.08849876751037095, - "flos": 19412811736080.0, - "grad_norm": 20.71135670666843, - "learning_rate": 3.964276113307735e-06, - "loss": 1.0979, - "num_input_tokens_seen": 15464755, - "step": 736 - }, - { - "epoch": 0.08861901040101004, - "flos": 14069087067120.0, - "grad_norm": 4.272885358383568, - "learning_rate": 3.9641293916669574e-06, - "loss": 1.0205, - "num_input_tokens_seen": 15483435, - "step": 737 - }, - { - "epoch": 0.08873925329164913, - "flos": 16821452640840.0, - "grad_norm": 2.833923128860938, - "learning_rate": 3.9639823720693115e-06, - "loss": 1.0543, - "num_input_tokens_seen": 15505010, - "step": 738 - }, - { - "epoch": 0.08885949618228822, - "flos": 51448860600600.0, - "grad_norm": 0.8532658960162136, - "learning_rate": 3.963835054537102e-06, - "loss": 0.8834, - "num_input_tokens_seen": 15573695, - "step": 739 - }, - { - "epoch": 0.08897973907292732, - "flos": 15878287293720.0, - "grad_norm": 3.4050205356690046, - "learning_rate": 3.963687439092676e-06, - "loss": 0.8411, - "num_input_tokens_seen": 15594100, - "step": 740 - }, - { - "epoch": 0.0890999819635664, - "flos": 15091835806680.0, - "grad_norm": 3.6750298116084634, - "learning_rate": 3.963539525758427e-06, - "loss": 1.0348, - "num_input_tokens_seen": 15613380, - "step": 741 - }, - { - "epoch": 0.0892202248542055, - "flos": 18051609895320.0, - "grad_norm": 5.992898234923599, - "learning_rate": 3.9633913145567925e-06, - "loss": 0.9028, - "num_input_tokens_seen": 15633590, - "step": 742 - }, - { - "epoch": 0.08934046774484458, - "flos": 17395344470880.0, - "grad_norm": 3.9935015104115923, - "learning_rate": 3.9632428055102575e-06, - "loss": 1.0499, - "num_input_tokens_seen": 15653320, - "step": 743 - }, - { - "epoch": 0.08946071063548368, - "flos": 25527712887120.0, - "grad_norm": 5.408749746662484, - "learning_rate": 3.9630939986413495e-06, - "loss": 0.8945, - "num_input_tokens_seen": 15674840, - "step": 744 - }, - { - "epoch": 0.08958095352612276, - "flos": 10136431031640.0, - "grad_norm": 4.623925780218761, - "learning_rate": 3.962944893972643e-06, - "loss": 1.0136, - "num_input_tokens_seen": 15693010, - "step": 745 - }, - { - "epoch": 0.08970119641676186, - "flos": 12648635996280.0, - "grad_norm": 7.101928835779514, - "learning_rate": 3.962795491526756e-06, - "loss": 1.1382, - "num_input_tokens_seen": 15709890, - "step": 746 - }, - { - "epoch": 0.08982143930740095, - "flos": 14775188622720.0, - "grad_norm": 4.1420571207761565, - "learning_rate": 3.962645791326354e-06, - "loss": 1.1177, - "num_input_tokens_seen": 15728865, - "step": 747 - }, - { - "epoch": 0.08994168219804004, - "flos": 17137118654880.0, - "grad_norm": 3.0431773122826247, - "learning_rate": 3.962495793394146e-06, - "loss": 1.062, - "num_input_tokens_seen": 15747775, - "step": 748 - }, - { - "epoch": 0.09006192508867913, - "flos": 40923864382560.0, - "grad_norm": 0.702667362904822, - "learning_rate": 3.9623454977528864e-06, - "loss": 0.8418, - "num_input_tokens_seen": 15806150, - "step": 749 - }, - { - "epoch": 0.09018216797931822, - "flos": 14542785388320.0, - "grad_norm": 3.969071743155991, - "learning_rate": 3.962194904425375e-06, - "loss": 1.0781, - "num_input_tokens_seen": 15826500, - "step": 750 - }, - { - "epoch": 0.09030241086995731, - "flos": 16088118071880.0, - "grad_norm": 4.586411615717571, - "learning_rate": 3.9620440134344566e-06, - "loss": 0.9034, - "num_input_tokens_seen": 15844375, - "step": 751 - }, - { - "epoch": 0.09042265376059641, - "flos": 15537105313680.0, - "grad_norm": 12.915105187980528, - "learning_rate": 3.9618928248030215e-06, - "loss": 1.0545, - "num_input_tokens_seen": 15863605, - "step": 752 - }, - { - "epoch": 0.0905428966512355, - "flos": 17294077807440.0, - "grad_norm": 3.5107882309654546, - "learning_rate": 3.961741338554005e-06, - "loss": 1.0616, - "num_input_tokens_seen": 15881665, - "step": 753 - }, - { - "epoch": 0.09066313954187459, - "flos": 25579572681240.0, - "grad_norm": 6.739321704092334, - "learning_rate": 3.9615895547103865e-06, - "loss": 0.978, - "num_input_tokens_seen": 15905030, - "step": 754 - }, - { - "epoch": 0.09078338243251367, - "flos": 20781494997480.0, - "grad_norm": 5.91891171839687, - "learning_rate": 3.961437473295193e-06, - "loss": 1.0051, - "num_input_tokens_seen": 15924895, - "step": 755 - }, - { - "epoch": 0.09090362532315277, - "flos": 15563081203080.0, - "grad_norm": 17.819552982773146, - "learning_rate": 3.961285094331495e-06, - "loss": 0.9442, - "num_input_tokens_seen": 15942530, - "step": 756 - }, - { - "epoch": 0.09102386821379185, - "flos": 19470466565040.0, - "grad_norm": 5.776552013236018, - "learning_rate": 3.961132417842406e-06, - "loss": 1.0787, - "num_input_tokens_seen": 15962035, - "step": 757 - }, - { - "epoch": 0.09114411110443095, - "flos": 14776752362280.0, - "grad_norm": 13.91273659284733, - "learning_rate": 3.960979443851089e-06, - "loss": 0.9813, - "num_input_tokens_seen": 15978780, - "step": 758 - }, - { - "epoch": 0.09126435399507005, - "flos": 18609828120120.0, - "grad_norm": 4.737439061077396, - "learning_rate": 3.96082617238075e-06, - "loss": 1.0259, - "num_input_tokens_seen": 16001125, - "step": 759 - }, - { - "epoch": 0.09138459688570913, - "flos": 17346305540280.0, - "grad_norm": 4.589496537731414, - "learning_rate": 3.960672603454639e-06, - "loss": 1.0115, - "num_input_tokens_seen": 16020825, - "step": 760 - }, - { - "epoch": 0.09150483977634823, - "flos": 14934294084480.0, - "grad_norm": 7.617316705634952, - "learning_rate": 3.960518737096054e-06, - "loss": 0.9761, - "num_input_tokens_seen": 16040175, - "step": 761 - }, - { - "epoch": 0.09162508266698731, - "flos": 16245414501600.0, - "grad_norm": 15.790208626758625, - "learning_rate": 3.960364573328334e-06, - "loss": 0.9497, - "num_input_tokens_seen": 16059220, - "step": 762 - }, - { - "epoch": 0.0917453255576264, - "flos": 15431239416240.0, - "grad_norm": 7.93787220960784, - "learning_rate": 3.9602101121748675e-06, - "loss": 1.1089, - "num_input_tokens_seen": 16079435, - "step": 763 - }, - { - "epoch": 0.0918655684482655, - "flos": 10317618364680.0, - "grad_norm": 10.228082810503425, - "learning_rate": 3.960055353659085e-06, - "loss": 0.9439, - "num_input_tokens_seen": 16096265, - "step": 764 - }, - { - "epoch": 0.09198581133890459, - "flos": 16660047562080.0, - "grad_norm": 7.306781325914069, - "learning_rate": 3.959900297804465e-06, - "loss": 1.0725, - "num_input_tokens_seen": 16116155, - "step": 765 - }, - { - "epoch": 0.09210605422954368, - "flos": 11888804291400.0, - "grad_norm": 4.213151848501921, - "learning_rate": 3.9597449446345276e-06, - "loss": 0.9948, - "num_input_tokens_seen": 16133120, - "step": 766 - }, - { - "epoch": 0.09222629712018277, - "flos": 16114032638160.0, - "grad_norm": 4.15547620410828, - "learning_rate": 3.95958929417284e-06, - "loss": 1.0607, - "num_input_tokens_seen": 16150995, - "step": 767 - }, - { - "epoch": 0.09234654001082186, - "flos": 50116670143680.0, - "grad_norm": 0.7380806382920999, - "learning_rate": 3.9594333464430145e-06, - "loss": 0.8408, - "num_input_tokens_seen": 16220205, - "step": 768 - }, - { - "epoch": 0.09246678290146094, - "flos": 14200407607440.0, - "grad_norm": 5.334020669674668, - "learning_rate": 3.959277101468709e-06, - "loss": 1.1069, - "num_input_tokens_seen": 16239475, - "step": 769 - }, - { - "epoch": 0.09258702579210004, - "flos": 12573069268200.0, - "grad_norm": 6.589116353137615, - "learning_rate": 3.959120559273624e-06, - "loss": 1.0097, - "num_input_tokens_seen": 16256980, - "step": 770 - }, - { - "epoch": 0.09270726868273914, - "flos": 14829256049160.0, - "grad_norm": 6.423036493484282, - "learning_rate": 3.958963719881509e-06, - "loss": 1.0739, - "num_input_tokens_seen": 16274790, - "step": 771 - }, - { - "epoch": 0.09282751157337822, - "flos": 12046897921680.0, - "grad_norm": 3.6016661655955837, - "learning_rate": 3.958806583316154e-06, - "loss": 1.153, - "num_input_tokens_seen": 16292480, - "step": 772 - }, - { - "epoch": 0.09294775446401732, - "flos": 23192371975560.0, - "grad_norm": 3.510429646423673, - "learning_rate": 3.9586491496013985e-06, - "loss": 1.014, - "num_input_tokens_seen": 16314595, - "step": 773 - }, - { - "epoch": 0.0930679973546564, - "flos": 12941453600040.0, - "grad_norm": 4.256606408879871, - "learning_rate": 3.958491418761124e-06, - "loss": 1.0446, - "num_input_tokens_seen": 16331885, - "step": 774 - }, - { - "epoch": 0.0931882402452955, - "flos": 14981707952400.0, - "grad_norm": 5.185335190182588, - "learning_rate": 3.958333390819258e-06, - "loss": 0.956, - "num_input_tokens_seen": 16348535, - "step": 775 - }, - { - "epoch": 0.0933084831359346, - "flos": 17215874185200.0, - "grad_norm": 4.325656228087118, - "learning_rate": 3.9581750657997754e-06, - "loss": 1.0232, - "num_input_tokens_seen": 16367620, - "step": 776 - }, - { - "epoch": 0.09342872602657368, - "flos": 18130212117840.0, - "grad_norm": 3.6234311622343776, - "learning_rate": 3.95801644372669e-06, - "loss": 1.1269, - "num_input_tokens_seen": 16387245, - "step": 777 - }, - { - "epoch": 0.09354896891721277, - "flos": 16455337264440.0, - "grad_norm": 3.2096754132216776, - "learning_rate": 3.957857524624068e-06, - "loss": 1.0644, - "num_input_tokens_seen": 16405845, - "step": 778 - }, - { - "epoch": 0.09366921180785186, - "flos": 17267733979320.0, - "grad_norm": 3.709009701771414, - "learning_rate": 3.957698308516016e-06, - "loss": 1.1289, - "num_input_tokens_seen": 16426865, - "step": 779 - }, - { - "epoch": 0.09378945469849095, - "flos": 13278894869760.0, - "grad_norm": 6.440141746903933, - "learning_rate": 3.957538795426688e-06, - "loss": 1.0589, - "num_input_tokens_seen": 16444010, - "step": 780 - }, - { - "epoch": 0.09390969758913004, - "flos": 16505050749360.0, - "grad_norm": 3.248217020263575, - "learning_rate": 3.9573789853802804e-06, - "loss": 0.9978, - "num_input_tokens_seen": 16462205, - "step": 781 - }, - { - "epoch": 0.09402994047976913, - "flos": 13938318434880.0, - "grad_norm": 5.516260558947448, - "learning_rate": 3.957218878401037e-06, - "loss": 0.9773, - "num_input_tokens_seen": 16480415, - "step": 782 - }, - { - "epoch": 0.09415018337040823, - "flos": 20961915791520.0, - "grad_norm": 3.502247633626676, - "learning_rate": 3.957058474513246e-06, - "loss": 1.1151, - "num_input_tokens_seen": 16499990, - "step": 783 - }, - { - "epoch": 0.09427042626104731, - "flos": 17478055342440.0, - "grad_norm": 5.065805180408589, - "learning_rate": 3.956897773741241e-06, - "loss": 1.0046, - "num_input_tokens_seen": 16518700, - "step": 784 - }, - { - "epoch": 0.09439066915168641, - "flos": 18762647962080.0, - "grad_norm": 3.5417608959153988, - "learning_rate": 3.956736776109398e-06, - "loss": 0.9479, - "num_input_tokens_seen": 16539595, - "step": 785 - }, - { - "epoch": 0.09451091204232549, - "flos": 13780531420200.0, - "grad_norm": 6.846817513611508, - "learning_rate": 3.956575481642143e-06, - "loss": 1.0611, - "num_input_tokens_seen": 16558205, - "step": 786 - }, - { - "epoch": 0.09463115493296459, - "flos": 18049187632080.0, - "grad_norm": 4.962421153328577, - "learning_rate": 3.956413890363943e-06, - "loss": 0.9604, - "num_input_tokens_seen": 16574905, - "step": 787 - }, - { - "epoch": 0.09475139782360369, - "flos": 7093884748320.0, - "grad_norm": 6.93782661287752, - "learning_rate": 3.956252002299312e-06, - "loss": 1.0482, - "num_input_tokens_seen": 16590525, - "step": 788 - }, - { - "epoch": 0.09487164071424277, - "flos": 12201220180080.0, - "grad_norm": 5.40598715435369, - "learning_rate": 3.956089817472807e-06, - "loss": 1.1359, - "num_input_tokens_seen": 16607550, - "step": 789 - }, - { - "epoch": 0.09499188360488187, - "flos": 21989815673160.0, - "grad_norm": 5.053741791119585, - "learning_rate": 3.955927335909032e-06, - "loss": 1.084, - "num_input_tokens_seen": 16630480, - "step": 790 - }, - { - "epoch": 0.09511212649552095, - "flos": 20913060830280.0, - "grad_norm": 9.03099191686281, - "learning_rate": 3.955764557632634e-06, - "loss": 0.9979, - "num_input_tokens_seen": 16650010, - "step": 791 - }, - { - "epoch": 0.09523236938616005, - "flos": 7277463683040.0, - "grad_norm": 5.254976003430545, - "learning_rate": 3.955601482668309e-06, - "loss": 1.1687, - "num_input_tokens_seen": 16667590, - "step": 792 - }, - { - "epoch": 0.09535261227679913, - "flos": 13517798354880.0, - "grad_norm": 5.304661611682042, - "learning_rate": 3.955438111040794e-06, - "loss": 1.1105, - "num_input_tokens_seen": 16685585, - "step": 793 - }, - { - "epoch": 0.09547285516743823, - "flos": 14855446569480.0, - "grad_norm": 4.135855061365551, - "learning_rate": 3.955274442774873e-06, - "loss": 1.0303, - "num_input_tokens_seen": 16703885, - "step": 794 - }, - { - "epoch": 0.09559309805807732, - "flos": 21489957493200.0, - "grad_norm": 5.955105990116014, - "learning_rate": 3.9551104778953725e-06, - "loss": 0.9419, - "num_input_tokens_seen": 16723900, - "step": 795 - }, - { - "epoch": 0.0957133409487164, - "flos": 14958460941840.0, - "grad_norm": 3.579406508794467, - "learning_rate": 3.954946216427167e-06, - "loss": 1.0793, - "num_input_tokens_seen": 16744080, - "step": 796 - }, - { - "epoch": 0.0958335838393555, - "flos": 51062324625240.0, - "grad_norm": 1.4775554406467923, - "learning_rate": 3.954781658395176e-06, - "loss": 0.8787, - "num_input_tokens_seen": 16800055, - "step": 797 - }, - { - "epoch": 0.09595382672999458, - "flos": 15538362437640.0, - "grad_norm": 3.5568021751218692, - "learning_rate": 3.95461680382436e-06, - "loss": 1.1413, - "num_input_tokens_seen": 16818700, - "step": 798 - }, - { - "epoch": 0.09607406962063368, - "flos": 13254390735240.0, - "grad_norm": 4.782904843445569, - "learning_rate": 3.9544516527397295e-06, - "loss": 1.082, - "num_input_tokens_seen": 16834770, - "step": 799 - }, - { - "epoch": 0.09619431251127276, - "flos": 16038496571640.0, - "grad_norm": 5.456421650811575, - "learning_rate": 3.954286205166338e-06, - "loss": 1.0333, - "num_input_tokens_seen": 16855655, - "step": 800 - }, - { - "epoch": 0.09631455540191186, - "flos": 10057000947000.0, - "grad_norm": 4.3260451184167215, - "learning_rate": 3.954120461129282e-06, - "loss": 1.0578, - "num_input_tokens_seen": 16872785, - "step": 801 - }, - { - "epoch": 0.09643479829255096, - "flos": 14724187352280.0, - "grad_norm": 31.006364722751883, - "learning_rate": 3.953954420653706e-06, - "loss": 1.0745, - "num_input_tokens_seen": 16889530, - "step": 802 - }, - { - "epoch": 0.09655504118319004, - "flos": 17368326088440.0, - "grad_norm": 4.177044697850014, - "learning_rate": 3.953788083764798e-06, - "loss": 1.1171, - "num_input_tokens_seen": 16908485, - "step": 803 - }, - { - "epoch": 0.09667528407382914, - "flos": 13072375540080.0, - "grad_norm": 8.822747947643991, - "learning_rate": 3.953621450487792e-06, - "loss": 1.1487, - "num_input_tokens_seen": 16926825, - "step": 804 - }, - { - "epoch": 0.09679552696446822, - "flos": 50720217250320.0, - "grad_norm": 0.8913280656714081, - "learning_rate": 3.953454520847964e-06, - "loss": 0.8803, - "num_input_tokens_seen": 16991390, - "step": 805 - }, - { - "epoch": 0.09691576985510732, - "flos": 15590436862680.0, - "grad_norm": 4.731170677820239, - "learning_rate": 3.9532872948706395e-06, - "loss": 0.9535, - "num_input_tokens_seen": 17010605, - "step": 806 - }, - { - "epoch": 0.09703601274574641, - "flos": 12729415189560.0, - "grad_norm": 26.845984973170335, - "learning_rate": 3.9531197725811845e-06, - "loss": 1.0626, - "num_input_tokens_seen": 17025710, - "step": 807 - }, - { - "epoch": 0.0971562556363855, - "flos": 15745832275680.0, - "grad_norm": 5.74488237942839, - "learning_rate": 3.952951954005013e-06, - "loss": 1.102, - "num_input_tokens_seen": 17045115, - "step": 808 - }, - { - "epoch": 0.0972764985270246, - "flos": 18395490092640.0, - "grad_norm": 4.109869053371927, - "learning_rate": 3.952783839167584e-06, - "loss": 1.0757, - "num_input_tokens_seen": 17064880, - "step": 809 - }, - { - "epoch": 0.09739674141766368, - "flos": 14436030305640.0, - "grad_norm": 6.63229745543379, - "learning_rate": 3.952615428094398e-06, - "loss": 0.9579, - "num_input_tokens_seen": 17084120, - "step": 810 - }, - { - "epoch": 0.09751698430830277, - "flos": 11132866604640.0, - "grad_norm": 4.541819065348517, - "learning_rate": 3.952446720811004e-06, - "loss": 0.9726, - "num_input_tokens_seen": 17102165, - "step": 811 - }, - { - "epoch": 0.09763722719894186, - "flos": 45614941691160.0, - "grad_norm": 0.8223337611875231, - "learning_rate": 3.952277717342995e-06, - "loss": 0.9031, - "num_input_tokens_seen": 17168320, - "step": 812 - }, - { - "epoch": 0.09775747008958095, - "flos": 15694861666800.0, - "grad_norm": 5.774729087387117, - "learning_rate": 3.952108417716009e-06, - "loss": 1.0861, - "num_input_tokens_seen": 17187495, - "step": 813 - }, - { - "epoch": 0.09787771298022005, - "flos": 15116125310280.0, - "grad_norm": 3.7013053447240494, - "learning_rate": 3.951938821955727e-06, - "loss": 1.0745, - "num_input_tokens_seen": 17206615, - "step": 814 - }, - { - "epoch": 0.09799795587085913, - "flos": 15668763131160.0, - "grad_norm": 3.370963897502701, - "learning_rate": 3.9517689300878786e-06, - "loss": 0.995, - "num_input_tokens_seen": 17226070, - "step": 815 - }, - { - "epoch": 0.09811819876149823, - "flos": 15747181384320.0, - "grad_norm": 3.382476296577941, - "learning_rate": 3.951598742138236e-06, - "loss": 1.0123, - "num_input_tokens_seen": 17244515, - "step": 816 - }, - { - "epoch": 0.09823844165213731, - "flos": 15794840544720.0, - "grad_norm": 5.529221502212787, - "learning_rate": 3.951428258132615e-06, - "loss": 1.0143, - "num_input_tokens_seen": 17262355, - "step": 817 - }, - { - "epoch": 0.09835868454277641, - "flos": 15979431310920.0, - "grad_norm": 4.450638568794814, - "learning_rate": 3.951257478096879e-06, - "loss": 1.0652, - "num_input_tokens_seen": 17280440, - "step": 818 - }, - { - "epoch": 0.0984789274334155, - "flos": 11577982803840.0, - "grad_norm": 7.545839226767986, - "learning_rate": 3.951086402056936e-06, - "loss": 0.923, - "num_input_tokens_seen": 17294760, - "step": 819 - }, - { - "epoch": 0.09859917032405459, - "flos": 17241911397720.0, - "grad_norm": 3.7962688700056564, - "learning_rate": 3.950915030038735e-06, - "loss": 1.065, - "num_input_tokens_seen": 17314275, - "step": 820 - }, - { - "epoch": 0.09871941321469369, - "flos": 12338029139640.0, - "grad_norm": 6.3574212749615695, - "learning_rate": 3.9507433620682765e-06, - "loss": 1.057, - "num_input_tokens_seen": 17330930, - "step": 821 - }, - { - "epoch": 0.09883965610533277, - "flos": 20284856281320.0, - "grad_norm": 3.300308825752056, - "learning_rate": 3.9505713981716e-06, - "loss": 1.1102, - "num_input_tokens_seen": 17353480, - "step": 822 - }, - { - "epoch": 0.09895989899597187, - "flos": 16846600668120.0, - "grad_norm": 16.32453328468813, - "learning_rate": 3.950399138374795e-06, - "loss": 1.0431, - "num_input_tokens_seen": 17372280, - "step": 823 - }, - { - "epoch": 0.09908014188661095, - "flos": 17555063163840.0, - "grad_norm": 4.393623821242867, - "learning_rate": 3.95022658270399e-06, - "loss": 0.9605, - "num_input_tokens_seen": 17392365, - "step": 824 - }, - { - "epoch": 0.09920038477725004, - "flos": 9926324299440.0, - "grad_norm": 3.852615429178158, - "learning_rate": 3.9500537311853635e-06, - "loss": 1.0079, - "num_input_tokens_seen": 17410040, - "step": 825 - }, - { - "epoch": 0.09932062766788914, - "flos": 9453913763760.0, - "grad_norm": 5.5186252808869645, - "learning_rate": 3.949880583845136e-06, - "loss": 1.0537, - "num_input_tokens_seen": 17427835, - "step": 826 - }, - { - "epoch": 0.09944087055852822, - "flos": 13833403045800.0, - "grad_norm": 3.2727867335399146, - "learning_rate": 3.949707140709575e-06, - "loss": 1.0376, - "num_input_tokens_seen": 17447285, - "step": 827 - }, - { - "epoch": 0.09956111344916732, - "flos": 12574479699960.0, - "grad_norm": 5.409360310640891, - "learning_rate": 3.949533401804991e-06, - "loss": 1.0534, - "num_input_tokens_seen": 17463910, - "step": 828 - }, - { - "epoch": 0.0996813563398064, - "flos": 12731438852520.0, - "grad_norm": 3.992110936574713, - "learning_rate": 3.949359367157739e-06, - "loss": 1.1367, - "num_input_tokens_seen": 17482325, - "step": 829 - }, - { - "epoch": 0.0998015992304455, - "flos": 12364556937120.0, - "grad_norm": 4.941253587377449, - "learning_rate": 3.949185036794222e-06, - "loss": 0.9906, - "num_input_tokens_seen": 17500055, - "step": 830 - }, - { - "epoch": 0.0999218421210846, - "flos": 18424746768960.0, - "grad_norm": 13.899453548969504, - "learning_rate": 3.949010410740884e-06, - "loss": 0.9995, - "num_input_tokens_seen": 17522600, - "step": 831 - }, - { - "epoch": 0.10004208501172368, - "flos": 15066105209760.0, - "grad_norm": 5.777709866801174, - "learning_rate": 3.948835489024216e-06, - "loss": 1.0975, - "num_input_tokens_seen": 17542055, - "step": 832 - }, - { - "epoch": 0.10016232790236278, - "flos": 12286598607360.0, - "grad_norm": 11.155724435602803, - "learning_rate": 3.948660271670755e-06, - "loss": 1.1219, - "num_input_tokens_seen": 17558925, - "step": 833 - }, - { - "epoch": 0.10028257079300186, - "flos": 18264598814160.0, - "grad_norm": 5.10365764129806, - "learning_rate": 3.948484758707079e-06, - "loss": 1.073, - "num_input_tokens_seen": 17578245, - "step": 834 - }, - { - "epoch": 0.10040281368364096, - "flos": 17897747560320.0, - "grad_norm": 5.789069418960946, - "learning_rate": 3.948308950159815e-06, - "loss": 1.0668, - "num_input_tokens_seen": 17596645, - "step": 835 - }, - { - "epoch": 0.10052305657428004, - "flos": 12469717618680.0, - "grad_norm": 5.178393540357229, - "learning_rate": 3.9481328460556326e-06, - "loss": 0.9853, - "num_input_tokens_seen": 17613585, - "step": 836 - }, - { - "epoch": 0.10064329946491914, - "flos": 13229089400160.0, - "grad_norm": 6.9091089467447455, - "learning_rate": 3.9479564464212455e-06, - "loss": 1.1082, - "num_input_tokens_seen": 17632465, - "step": 837 - }, - { - "epoch": 0.10076354235555823, - "flos": 12178801031640.0, - "grad_norm": 9.627862281768431, - "learning_rate": 3.947779751283414e-06, - "loss": 0.9827, - "num_input_tokens_seen": 17649355, - "step": 838 - }, - { - "epoch": 0.10088378524619732, - "flos": 16320490644720.0, - "grad_norm": 5.263479095495745, - "learning_rate": 3.947602760668944e-06, - "loss": 0.9807, - "num_input_tokens_seen": 17668865, - "step": 839 - }, - { - "epoch": 0.10100402813683641, - "flos": 27045904542000.0, - "grad_norm": 13.866413930888346, - "learning_rate": 3.947425474604684e-06, - "loss": 0.9393, - "num_input_tokens_seen": 17692520, - "step": 840 - }, - { - "epoch": 0.1011242710274755, - "flos": 15301390630800.0, - "grad_norm": 4.013209466216106, - "learning_rate": 3.947247893117528e-06, - "loss": 1.1547, - "num_input_tokens_seen": 17710745, - "step": 841 - }, - { - "epoch": 0.10124451391811459, - "flos": 9607377498480.0, - "grad_norm": 4.751076626483494, - "learning_rate": 3.947070016234413e-06, - "loss": 0.9052, - "num_input_tokens_seen": 17726255, - "step": 842 - }, - { - "epoch": 0.10136475680875369, - "flos": 11783306332680.0, - "grad_norm": 4.536748457112247, - "learning_rate": 3.946891843982326e-06, - "loss": 0.9676, - "num_input_tokens_seen": 17743640, - "step": 843 - }, - { - "epoch": 0.10148499969939277, - "flos": 13804422323520.0, - "grad_norm": 5.56111007641302, - "learning_rate": 3.9467133763882935e-06, - "loss": 0.9749, - "num_input_tokens_seen": 17761825, - "step": 844 - }, - { - "epoch": 0.10160524259003187, - "flos": 14986184540160.0, - "grad_norm": 8.917202181793153, - "learning_rate": 3.9465346134793905e-06, - "loss": 1.0833, - "num_input_tokens_seen": 17781355, - "step": 845 - }, - { - "epoch": 0.10172548548067095, - "flos": 12548319841200.0, - "grad_norm": 5.749258242322575, - "learning_rate": 3.9463555552827335e-06, - "loss": 1.0287, - "num_input_tokens_seen": 17798245, - "step": 846 - }, - { - "epoch": 0.10184572837131005, - "flos": 14986061893920.0, - "grad_norm": 4.094487247620006, - "learning_rate": 3.946176201825487e-06, - "loss": 1.0793, - "num_input_tokens_seen": 17816000, - "step": 847 - }, - { - "epoch": 0.10196597126194913, - "flos": 18552786522360.0, - "grad_norm": 35.689695915365405, - "learning_rate": 3.9459965531348575e-06, - "loss": 1.061, - "num_input_tokens_seen": 17835375, - "step": 848 - }, - { - "epoch": 0.10208621415258823, - "flos": 20886349063440.0, - "grad_norm": 4.632623729751852, - "learning_rate": 3.945816609238098e-06, - "loss": 1.0784, - "num_input_tokens_seen": 17854505, - "step": 849 - }, - { - "epoch": 0.10220645704322733, - "flos": 16927318538280.0, - "grad_norm": 3.2717442469079536, - "learning_rate": 3.945636370162507e-06, - "loss": 1.0774, - "num_input_tokens_seen": 17874335, - "step": 850 - }, - { - "epoch": 0.10232669993386641, - "flos": 16504805456880.0, - "grad_norm": 3.127376263287693, - "learning_rate": 3.945455835935425e-06, - "loss": 1.0245, - "num_input_tokens_seen": 17893240, - "step": 851 - }, - { - "epoch": 0.1024469428245055, - "flos": 16291847199600.0, - "grad_norm": 6.295602068723439, - "learning_rate": 3.94527500658424e-06, - "loss": 0.9692, - "num_input_tokens_seen": 17910625, - "step": 852 - }, - { - "epoch": 0.10256718571514459, - "flos": 22356176342040.0, - "grad_norm": 4.409659074679949, - "learning_rate": 3.945093882136382e-06, - "loss": 1.0291, - "num_input_tokens_seen": 17934120, - "step": 853 - }, - { - "epoch": 0.10268742860578368, - "flos": 16689150930600.0, - "grad_norm": 3.2406965757317474, - "learning_rate": 3.944912462619329e-06, - "loss": 1.0694, - "num_input_tokens_seen": 17952805, - "step": 854 - }, - { - "epoch": 0.10280767149642277, - "flos": 18159468794160.0, - "grad_norm": 4.418164533059687, - "learning_rate": 3.9447307480606025e-06, - "loss": 1.0352, - "num_input_tokens_seen": 17972610, - "step": 855 - }, - { - "epoch": 0.10292791438706186, - "flos": 12285985376160.0, - "grad_norm": 4.826354655899872, - "learning_rate": 3.944548738487767e-06, - "loss": 1.1251, - "num_input_tokens_seen": 17989845, - "step": 856 - }, - { - "epoch": 0.10304815727770096, - "flos": 19260758433120.0, - "grad_norm": 4.4800389822898765, - "learning_rate": 3.944366433928434e-06, - "loss": 1.1325, - "num_input_tokens_seen": 18009545, - "step": 857 - }, - { - "epoch": 0.10316840016834004, - "flos": 16191745675440.0, - "grad_norm": 6.175072722836891, - "learning_rate": 3.9441838344102594e-06, - "loss": 1.0611, - "num_input_tokens_seen": 18028990, - "step": 858 - }, - { - "epoch": 0.10328864305897914, - "flos": 14698548740040.0, - "grad_norm": 3.7574878956897364, - "learning_rate": 3.944000939960943e-06, - "loss": 0.906, - "num_input_tokens_seen": 18047435, - "step": 859 - }, - { - "epoch": 0.10340888594961822, - "flos": 20286113405280.0, - "grad_norm": 2.9022157622877702, - "learning_rate": 3.943817750608229e-06, - "loss": 1.0361, - "num_input_tokens_seen": 18069705, - "step": 860 - }, - { - "epoch": 0.10352912884025732, - "flos": 9427477950960.0, - "grad_norm": 4.552588579551001, - "learning_rate": 3.943634266379908e-06, - "loss": 1.0409, - "num_input_tokens_seen": 18086320, - "step": 861 - }, - { - "epoch": 0.10364937173089642, - "flos": 18187192392480.0, - "grad_norm": 3.403444026849345, - "learning_rate": 3.943450487303815e-06, - "loss": 1.0781, - "num_input_tokens_seen": 18106535, - "step": 862 - }, - { - "epoch": 0.1037696146215355, - "flos": 15065737271040.0, - "grad_norm": 3.2823798278054865, - "learning_rate": 3.943266413407827e-06, - "loss": 1.0776, - "num_input_tokens_seen": 18125530, - "step": 863 - }, - { - "epoch": 0.1038898575121746, - "flos": 18365129600160.0, - "grad_norm": 2.958033121126048, - "learning_rate": 3.94308204471987e-06, - "loss": 1.073, - "num_input_tokens_seen": 18144265, - "step": 864 - }, - { - "epoch": 0.10401010040281368, - "flos": 13518319601400.0, - "grad_norm": 2.918324153964865, - "learning_rate": 3.942897381267912e-06, - "loss": 0.9665, - "num_input_tokens_seen": 18160350, - "step": 865 - }, - { - "epoch": 0.10413034329345278, - "flos": 11571635860920.0, - "grad_norm": 4.7550507847012415, - "learning_rate": 3.942712423079965e-06, - "loss": 0.9019, - "num_input_tokens_seen": 18176460, - "step": 866 - }, - { - "epoch": 0.10425058618409186, - "flos": 12205850075640.0, - "grad_norm": 5.36608413393744, - "learning_rate": 3.942527170184088e-06, - "loss": 1.1351, - "num_input_tokens_seen": 18192800, - "step": 867 - }, - { - "epoch": 0.10437082907473096, - "flos": 12731254883160.0, - "grad_norm": 4.994227701382062, - "learning_rate": 3.942341622608385e-06, - "loss": 0.9994, - "num_input_tokens_seen": 18209550, - "step": 868 - }, - { - "epoch": 0.10449107196537005, - "flos": 25894594802520.0, - "grad_norm": 3.0352139068586554, - "learning_rate": 3.942155780381001e-06, - "loss": 0.9963, - "num_input_tokens_seen": 18233005, - "step": 869 - }, - { - "epoch": 0.10461131485600914, - "flos": 16924160397600.0, - "grad_norm": 4.776971436010882, - "learning_rate": 3.94196964353013e-06, - "loss": 0.9878, - "num_input_tokens_seen": 18252175, - "step": 870 - }, - { - "epoch": 0.10473155774664823, - "flos": 13046154358200.0, - "grad_norm": 3.7412630663540676, - "learning_rate": 3.941783212084008e-06, - "loss": 1.0307, - "num_input_tokens_seen": 18269650, - "step": 871 - }, - { - "epoch": 0.10485180063728732, - "flos": 18211359249840.0, - "grad_norm": 4.349168211237486, - "learning_rate": 3.941596486070916e-06, - "loss": 1.0003, - "num_input_tokens_seen": 18287415, - "step": 872 - }, - { - "epoch": 0.10497204352792641, - "flos": 19286611676280.0, - "grad_norm": 3.169028823292923, - "learning_rate": 3.941409465519182e-06, - "loss": 0.7998, - "num_input_tokens_seen": 18307660, - "step": 873 - }, - { - "epoch": 0.10509228641856551, - "flos": 23433881693280.0, - "grad_norm": 2.750986442507551, - "learning_rate": 3.941222150457176e-06, - "loss": 1.0793, - "num_input_tokens_seen": 18330635, - "step": 874 - }, - { - "epoch": 0.10521252930920459, - "flos": 10109412649200.0, - "grad_norm": 4.557036172153608, - "learning_rate": 3.941034540913311e-06, - "loss": 0.9398, - "num_input_tokens_seen": 18347885, - "step": 875 - }, - { - "epoch": 0.10533277219984369, - "flos": 15404251695360.0, - "grad_norm": 2.275674669603232, - "learning_rate": 3.940846636916051e-06, - "loss": 1.0503, - "num_input_tokens_seen": 18367640, - "step": 876 - }, - { - "epoch": 0.10545301509048277, - "flos": 15822932081760.0, - "grad_norm": 20.573895760588904, - "learning_rate": 3.940658438493899e-06, - "loss": 1.0879, - "num_input_tokens_seen": 18385205, - "step": 877 - }, - { - "epoch": 0.10557325798112187, - "flos": 15772973304360.0, - "grad_norm": 3.8337570177430065, - "learning_rate": 3.940469945675405e-06, - "loss": 0.9896, - "num_input_tokens_seen": 18403310, - "step": 878 - }, - { - "epoch": 0.10569350087176095, - "flos": 18343170375120.0, - "grad_norm": 9.275057284031744, - "learning_rate": 3.940281158489163e-06, - "loss": 1.1353, - "num_input_tokens_seen": 18422260, - "step": 879 - }, - { - "epoch": 0.10581374376240005, - "flos": 12259917502080.0, - "grad_norm": 3.4281000795388796, - "learning_rate": 3.940092076963812e-06, - "loss": 1.054, - "num_input_tokens_seen": 18439475, - "step": 880 - }, - { - "epoch": 0.10593398665303914, - "flos": 24504749516640.0, - "grad_norm": 4.217826007392635, - "learning_rate": 3.9399027011280355e-06, - "loss": 1.0182, - "num_input_tokens_seen": 18461290, - "step": 881 - }, - { - "epoch": 0.10605422954367823, - "flos": 16533540886680.0, - "grad_norm": 3.971915717107571, - "learning_rate": 3.939713031010561e-06, - "loss": 0.9922, - "num_input_tokens_seen": 18479375, - "step": 882 - }, - { - "epoch": 0.10617447243431732, - "flos": 16218856042560.0, - "grad_norm": 3.4623948725085976, - "learning_rate": 3.939523066640163e-06, - "loss": 1.0079, - "num_input_tokens_seen": 18497990, - "step": 883 - }, - { - "epoch": 0.10629471532495641, - "flos": 17344312538880.0, - "grad_norm": 4.334125421952828, - "learning_rate": 3.939332808045657e-06, - "loss": 1.0377, - "num_input_tokens_seen": 18517360, - "step": 884 - }, - { - "epoch": 0.1064149582155955, - "flos": 14986613802000.0, - "grad_norm": 4.042803155576132, - "learning_rate": 3.939142255255906e-06, - "loss": 1.0704, - "num_input_tokens_seen": 18537965, - "step": 885 - }, - { - "epoch": 0.1065352011062346, - "flos": 14696371769280.0, - "grad_norm": 3.097240444143052, - "learning_rate": 3.938951408299817e-06, - "loss": 1.1027, - "num_input_tokens_seen": 18556525, - "step": 886 - }, - { - "epoch": 0.10665544399687368, - "flos": 44855293955640.0, - "grad_norm": 0.823490526203908, - "learning_rate": 3.938760267206342e-06, - "loss": 0.8099, - "num_input_tokens_seen": 18618065, - "step": 887 - }, - { - "epoch": 0.10677568688751278, - "flos": 18605075578320.0, - "grad_norm": 22.58666577215692, - "learning_rate": 3.938568832004475e-06, - "loss": 1.0025, - "num_input_tokens_seen": 18636490, - "step": 888 - }, - { - "epoch": 0.10689592977815186, - "flos": 9007908379320.0, - "grad_norm": 16.04098126156827, - "learning_rate": 3.938377102723257e-06, - "loss": 0.9808, - "num_input_tokens_seen": 18653345, - "step": 889 - }, - { - "epoch": 0.10701617266879096, - "flos": 15720469617480.0, - "grad_norm": 6.558857806253788, - "learning_rate": 3.938185079391774e-06, - "loss": 1.0646, - "num_input_tokens_seen": 18670110, - "step": 890 - }, - { - "epoch": 0.10713641555943004, - "flos": 14007967481880.0, - "grad_norm": 5.519426543261091, - "learning_rate": 3.937992762039157e-06, - "loss": 1.2973, - "num_input_tokens_seen": 18683155, - "step": 891 - }, - { - "epoch": 0.10725665845006914, - "flos": 17033276420400.0, - "grad_norm": 2.586727332881789, - "learning_rate": 3.937800150694577e-06, - "loss": 1.0276, - "num_input_tokens_seen": 18704050, - "step": 892 - }, - { - "epoch": 0.10737690134070824, - "flos": 13151284378200.0, - "grad_norm": 8.884537421159768, - "learning_rate": 3.937607245387255e-06, - "loss": 0.9758, - "num_input_tokens_seen": 18723135, - "step": 893 - }, - { - "epoch": 0.10749714423134732, - "flos": 16137586264320.0, - "grad_norm": 2.8312367554748503, - "learning_rate": 3.937414046146455e-06, - "loss": 0.946, - "num_input_tokens_seen": 18740810, - "step": 894 - }, - { - "epoch": 0.10761738712198642, - "flos": 14987104386960.0, - "grad_norm": 4.968135003950725, - "learning_rate": 3.9372205530014845e-06, - "loss": 0.9859, - "num_input_tokens_seen": 18759010, - "step": 895 - }, - { - "epoch": 0.1077376300126255, - "flos": 16898889724080.0, - "grad_norm": 5.81997986937512, - "learning_rate": 3.937026765981696e-06, - "loss": 0.9535, - "num_input_tokens_seen": 18778800, - "step": 896 - }, - { - "epoch": 0.1078578729032646, - "flos": 14852533721280.0, - "grad_norm": 3.498534849583465, - "learning_rate": 3.936832685116488e-06, - "loss": 1.0154, - "num_input_tokens_seen": 18796615, - "step": 897 - }, - { - "epoch": 0.10797811579390369, - "flos": 10476447872400.0, - "grad_norm": 3.9898361776656164, - "learning_rate": 3.936638310435301e-06, - "loss": 1.1253, - "num_input_tokens_seen": 18814200, - "step": 898 - }, - { - "epoch": 0.10809835868454278, - "flos": 13859409596760.0, - "grad_norm": 3.8262755617012565, - "learning_rate": 3.936443641967623e-06, - "loss": 1.0554, - "num_input_tokens_seen": 18832750, - "step": 899 - }, - { - "epoch": 0.10821860157518187, - "flos": 13072682155680.0, - "grad_norm": 4.899836338969052, - "learning_rate": 3.936248679742983e-06, - "loss": 1.0538, - "num_input_tokens_seen": 18850965, - "step": 900 - }, - { - "epoch": 0.10833884446582095, - "flos": 35294103862680.0, - "grad_norm": 1.0791107867783067, - "learning_rate": 3.936053423790959e-06, - "loss": 0.9846, - "num_input_tokens_seen": 18899005, - "step": 901 - }, - { - "epoch": 0.10845908735646005, - "flos": 14488104730680.0, - "grad_norm": 4.550736193781783, - "learning_rate": 3.935857874141168e-06, - "loss": 0.999, - "num_input_tokens_seen": 18917560, - "step": 902 - }, - { - "epoch": 0.10857933024709913, - "flos": 9900041794440.0, - "grad_norm": 4.156208238893815, - "learning_rate": 3.935662030823279e-06, - "loss": 1.0608, - "num_input_tokens_seen": 18933465, - "step": 903 - }, - { - "epoch": 0.10869957313773823, - "flos": 9426343473240.0, - "grad_norm": 4.286985245213212, - "learning_rate": 3.935465893866998e-06, - "loss": 0.94, - "num_input_tokens_seen": 18951410, - "step": 904 - }, - { - "epoch": 0.10881981602837733, - "flos": 18364884307680.0, - "grad_norm": 3.1751750447237126, - "learning_rate": 3.935269463302079e-06, - "loss": 1.0386, - "num_input_tokens_seen": 18969335, - "step": 905 - }, - { - "epoch": 0.10894005891901641, - "flos": 14750991103800.0, - "grad_norm": 2.968299590627064, - "learning_rate": 3.935072739158322e-06, - "loss": 0.9861, - "num_input_tokens_seen": 18988765, - "step": 906 - }, - { - "epoch": 0.10906030180965551, - "flos": 18971068308480.0, - "grad_norm": 2.7626518173272827, - "learning_rate": 3.934875721465569e-06, - "loss": 1.0198, - "num_input_tokens_seen": 19008905, - "step": 907 - }, - { - "epoch": 0.10918054470029459, - "flos": 26074463688480.0, - "grad_norm": 5.307278209765304, - "learning_rate": 3.9346784102537076e-06, - "loss": 0.9245, - "num_input_tokens_seen": 19030760, - "step": 908 - }, - { - "epoch": 0.10930078759093369, - "flos": 15458871029880.0, - "grad_norm": 3.0749883015727995, - "learning_rate": 3.934480805552669e-06, - "loss": 1.0074, - "num_input_tokens_seen": 19051490, - "step": 909 - }, - { - "epoch": 0.10942103048157277, - "flos": 16062019536240.0, - "grad_norm": 3.541153111342574, - "learning_rate": 3.93428290739243e-06, - "loss": 1.1021, - "num_input_tokens_seen": 19070580, - "step": 910 - }, - { - "epoch": 0.10954127337221187, - "flos": 10631199392640.0, - "grad_norm": 4.695029759581868, - "learning_rate": 3.9340847158030125e-06, - "loss": 1.0234, - "num_input_tokens_seen": 19083880, - "step": 911 - }, - { - "epoch": 0.10966151626285096, - "flos": 15378398452200.0, - "grad_norm": 6.2174386793670475, - "learning_rate": 3.9338862308144814e-06, - "loss": 0.9867, - "num_input_tokens_seen": 19102420, - "step": 912 - }, - { - "epoch": 0.10978175915349005, - "flos": 14279469753360.0, - "grad_norm": 3.397430060132036, - "learning_rate": 3.933687452456946e-06, - "loss": 1.0825, - "num_input_tokens_seen": 19122040, - "step": 913 - }, - { - "epoch": 0.10990200204412914, - "flos": 14488748623440.0, - "grad_norm": 3.356526573336223, - "learning_rate": 3.933488380760562e-06, - "loss": 1.0833, - "num_input_tokens_seen": 19141120, - "step": 914 - }, - { - "epoch": 0.11002224493476823, - "flos": 12417091285560.0, - "grad_norm": 15.78304541921736, - "learning_rate": 3.9332890157555286e-06, - "loss": 1.1042, - "num_input_tokens_seen": 19157775, - "step": 915 - }, - { - "epoch": 0.11014248782540732, - "flos": 8588645423280.0, - "grad_norm": 5.547804203915757, - "learning_rate": 3.933089357472088e-06, - "loss": 0.9964, - "num_input_tokens_seen": 19175525, - "step": 916 - }, - { - "epoch": 0.11026273071604642, - "flos": 15904784429640.0, - "grad_norm": 6.853511450465386, - "learning_rate": 3.932889405940529e-06, - "loss": 1.0893, - "num_input_tokens_seen": 19193340, - "step": 917 - }, - { - "epoch": 0.1103829736066855, - "flos": 14069332359600.0, - "grad_norm": 34.43811322616448, - "learning_rate": 3.932689161191184e-06, - "loss": 1.0362, - "num_input_tokens_seen": 19210765, - "step": 918 - }, - { - "epoch": 0.1105032164973246, - "flos": 16110077296920.0, - "grad_norm": 6.0829657177916205, - "learning_rate": 3.93248862325443e-06, - "loss": 1.1066, - "num_input_tokens_seen": 19229390, - "step": 919 - }, - { - "epoch": 0.11062345938796368, - "flos": 47602477725720.0, - "grad_norm": 1.0005851464135318, - "learning_rate": 3.932287792160688e-06, - "loss": 0.8856, - "num_input_tokens_seen": 19287570, - "step": 920 - }, - { - "epoch": 0.11074370227860278, - "flos": 15563142526200.0, - "grad_norm": 4.256320292918883, - "learning_rate": 3.932086667940424e-06, - "loss": 1.0359, - "num_input_tokens_seen": 19303995, - "step": 921 - }, - { - "epoch": 0.11086394516924186, - "flos": 20414551758960.0, - "grad_norm": 4.418125463483805, - "learning_rate": 3.93188525062415e-06, - "loss": 1.046, - "num_input_tokens_seen": 19324180, - "step": 922 - }, - { - "epoch": 0.11098418805988096, - "flos": 17451527544960.0, - "grad_norm": 3.290149644967429, - "learning_rate": 3.931683540242418e-06, - "loss": 1.0986, - "num_input_tokens_seen": 19344965, - "step": 923 - }, - { - "epoch": 0.11110443095052006, - "flos": 16318896243600.0, - "grad_norm": 5.268367245709903, - "learning_rate": 3.9314815368258295e-06, - "loss": 1.133, - "num_input_tokens_seen": 19361165, - "step": 924 - }, - { - "epoch": 0.11122467384115914, - "flos": 13437141807840.0, - "grad_norm": 3.133821928874864, - "learning_rate": 3.9312792404050275e-06, - "loss": 1.0072, - "num_input_tokens_seen": 19378940, - "step": 925 - }, - { - "epoch": 0.11134491673179824, - "flos": 17845213211880.0, - "grad_norm": 4.360168822283545, - "learning_rate": 3.9310766510107e-06, - "loss": 1.0073, - "num_input_tokens_seen": 19397835, - "step": 926 - }, - { - "epoch": 0.11146515962243732, - "flos": 17425612978680.0, - "grad_norm": 3.9900191478221996, - "learning_rate": 3.9308737686735806e-06, - "loss": 1.1566, - "num_input_tokens_seen": 19417515, - "step": 927 - }, - { - "epoch": 0.11158540251307641, - "flos": 15876232969200.0, - "grad_norm": 4.94353338603748, - "learning_rate": 3.9306705934244455e-06, - "loss": 1.0481, - "num_input_tokens_seen": 19437315, - "step": 928 - }, - { - "epoch": 0.11170564540371551, - "flos": 14122020015840.0, - "grad_norm": 5.496336323604819, - "learning_rate": 3.930467125294116e-06, - "loss": 1.1109, - "num_input_tokens_seen": 19456585, - "step": 929 - }, - { - "epoch": 0.1118258882943546, - "flos": 46238945606400.0, - "grad_norm": 0.9859118196797494, - "learning_rate": 3.930263364313458e-06, - "loss": 0.8654, - "num_input_tokens_seen": 19506875, - "step": 930 - }, - { - "epoch": 0.11194613118499369, - "flos": 12181284618000.0, - "grad_norm": 5.919301623829216, - "learning_rate": 3.930059310513384e-06, - "loss": 1.074, - "num_input_tokens_seen": 19525635, - "step": 931 - }, - { - "epoch": 0.11206637407563277, - "flos": 22717447191960.0, - "grad_norm": 3.2144712878956576, - "learning_rate": 3.929854963924846e-06, - "loss": 1.0602, - "num_input_tokens_seen": 19545620, - "step": 932 - }, - { - "epoch": 0.11218661696627187, - "flos": 15590528847360.0, - "grad_norm": 8.29226447271487, - "learning_rate": 3.929650324578845e-06, - "loss": 0.9999, - "num_input_tokens_seen": 19564805, - "step": 933 - }, - { - "epoch": 0.11230685985691095, - "flos": 18415732270320.0, - "grad_norm": 4.672067899772328, - "learning_rate": 3.929445392506423e-06, - "loss": 1.0441, - "num_input_tokens_seen": 19582465, - "step": 934 - }, - { - "epoch": 0.11242710274755005, - "flos": 15795177821880.0, - "grad_norm": 6.606555526727553, - "learning_rate": 3.92924016773867e-06, - "loss": 0.9889, - "num_input_tokens_seen": 19598680, - "step": 935 - }, - { - "epoch": 0.11254734563818915, - "flos": 12547890579360.0, - "grad_norm": 5.665037631577349, - "learning_rate": 3.9290346503067175e-06, - "loss": 0.9693, - "num_input_tokens_seen": 19615065, - "step": 936 - }, - { - "epoch": 0.11266758852882823, - "flos": 39296286298920.0, - "grad_norm": 9.49710480593029, - "learning_rate": 3.9288288402417415e-06, - "loss": 1.0121, - "num_input_tokens_seen": 19641045, - "step": 937 - }, - { - "epoch": 0.11278783141946733, - "flos": 13385619290880.0, - "grad_norm": 12.89080314744325, - "learning_rate": 3.928622737574964e-06, - "loss": 0.9285, - "num_input_tokens_seen": 19656100, - "step": 938 - }, - { - "epoch": 0.11290807431010641, - "flos": 18841495477080.0, - "grad_norm": 3.4184853125280816, - "learning_rate": 3.928416342337652e-06, - "loss": 1.1431, - "num_input_tokens_seen": 19675555, - "step": 939 - }, - { - "epoch": 0.1130283172007455, - "flos": 16140131173800.0, - "grad_norm": 6.379222892382218, - "learning_rate": 3.928209654561113e-06, - "loss": 1.0548, - "num_input_tokens_seen": 19696110, - "step": 940 - }, - { - "epoch": 0.1131485600913846, - "flos": 16505970596160.0, - "grad_norm": 8.205567902619062, - "learning_rate": 3.928002674276703e-06, - "loss": 1.0309, - "num_input_tokens_seen": 19715220, - "step": 941 - }, - { - "epoch": 0.11326880298202369, - "flos": 9926293637880.0, - "grad_norm": 11.26067371527888, - "learning_rate": 3.92779540151582e-06, - "loss": 0.9812, - "num_input_tokens_seen": 19732025, - "step": 942 - }, - { - "epoch": 0.11338904587266278, - "flos": 11551179052320.0, - "grad_norm": 3.5579726245549614, - "learning_rate": 3.927587836309907e-06, - "loss": 1.0889, - "num_input_tokens_seen": 19749575, - "step": 943 - }, - { - "epoch": 0.11350928876330187, - "flos": 17373262599600.0, - "grad_norm": 4.57439512829783, - "learning_rate": 3.927379978690452e-06, - "loss": 1.006, - "num_input_tokens_seen": 19768560, - "step": 944 - }, - { - "epoch": 0.11362953165394096, - "flos": 17423865269760.0, - "grad_norm": 5.415276021218709, - "learning_rate": 3.927171828688987e-06, - "loss": 1.0914, - "num_input_tokens_seen": 19787805, - "step": 945 - }, - { - "epoch": 0.11374977454458005, - "flos": 17110560195840.0, - "grad_norm": 6.920996754473986, - "learning_rate": 3.926963386337088e-06, - "loss": 1.0403, - "num_input_tokens_seen": 19805755, - "step": 946 - }, - { - "epoch": 0.11387001743521914, - "flos": 28175929610760.0, - "grad_norm": 11.492249240359307, - "learning_rate": 3.926754651666375e-06, - "loss": 0.9292, - "num_input_tokens_seen": 19826035, - "step": 947 - }, - { - "epoch": 0.11399026032585824, - "flos": 17844783950040.0, - "grad_norm": 5.255436217172051, - "learning_rate": 3.926545624708513e-06, - "loss": 1.0016, - "num_input_tokens_seen": 19844995, - "step": 948 - }, - { - "epoch": 0.11411050321649732, - "flos": 12726533002920.0, - "grad_norm": 3.2915302361008347, - "learning_rate": 3.926336305495213e-06, - "loss": 1.0945, - "num_input_tokens_seen": 19863275, - "step": 949 - }, - { - "epoch": 0.11423074610713642, - "flos": 15956766870000.0, - "grad_norm": 5.295142686630583, - "learning_rate": 3.926126694058226e-06, - "loss": 1.1075, - "num_input_tokens_seen": 19882145, - "step": 950 - }, - { - "epoch": 0.1143509889977755, - "flos": 13989043751280.0, - "grad_norm": 3.923327659488314, - "learning_rate": 3.92591679042935e-06, - "loss": 1.0461, - "num_input_tokens_seen": 19901755, - "step": 951 - }, - { - "epoch": 0.1144712318884146, - "flos": 14064058571280.0, - "grad_norm": 7.077462609561854, - "learning_rate": 3.92570659464043e-06, - "loss": 1.0544, - "num_input_tokens_seen": 19919535, - "step": 952 - }, - { - "epoch": 0.1145914747790537, - "flos": 10554988771800.0, - "grad_norm": 3.8402704135731422, - "learning_rate": 3.925496106723349e-06, - "loss": 1.0326, - "num_input_tokens_seen": 19936695, - "step": 953 - }, - { - "epoch": 0.11471171766969278, - "flos": 14095124279640.0, - "grad_norm": 14.726870748597559, - "learning_rate": 3.9252853267100405e-06, - "loss": 1.0647, - "num_input_tokens_seen": 19955660, - "step": 954 - }, - { - "epoch": 0.11483196056033187, - "flos": 16007584171080.0, - "grad_norm": 4.480423199555946, - "learning_rate": 3.9250742546324786e-06, - "loss": 1.0672, - "num_input_tokens_seen": 19975615, - "step": 955 - }, - { - "epoch": 0.11495220345097096, - "flos": 20099897576400.0, - "grad_norm": 5.243614855219434, - "learning_rate": 3.924862890522683e-06, - "loss": 1.0924, - "num_input_tokens_seen": 19995345, - "step": 956 - }, - { - "epoch": 0.11507244634161005, - "flos": 12389704964400.0, - "grad_norm": 9.785364035343765, - "learning_rate": 3.9246512344127174e-06, - "loss": 1.0928, - "num_input_tokens_seen": 20012725, - "step": 957 - }, - { - "epoch": 0.11519268923224914, - "flos": 16035491738760.0, - "grad_norm": 4.300351044275794, - "learning_rate": 3.9244392863346895e-06, - "loss": 1.0507, - "num_input_tokens_seen": 20031850, - "step": 958 - }, - { - "epoch": 0.11531293212288823, - "flos": 12023773557360.0, - "grad_norm": 4.700909571190324, - "learning_rate": 3.9242270463207524e-06, - "loss": 1.1347, - "num_input_tokens_seen": 20049960, - "step": 959 - }, - { - "epoch": 0.11543317501352733, - "flos": 8745972514560.0, - "grad_norm": 8.553991392197736, - "learning_rate": 3.924014514403102e-06, - "loss": 1.0686, - "num_input_tokens_seen": 20065835, - "step": 960 - }, - { - "epoch": 0.11555341790416641, - "flos": 14063016078240.0, - "grad_norm": 7.16359949643762, - "learning_rate": 3.92380169061398e-06, - "loss": 1.1477, - "num_input_tokens_seen": 20083335, - "step": 961 - }, - { - "epoch": 0.11567366079480551, - "flos": 18316489269840.0, - "grad_norm": 3.7473641350708675, - "learning_rate": 3.9235885749856705e-06, - "loss": 1.069, - "num_input_tokens_seen": 20101735, - "step": 962 - }, - { - "epoch": 0.1157939036854446, - "flos": 12915263079720.0, - "grad_norm": 4.9131174801151545, - "learning_rate": 3.9233751675505035e-06, - "loss": 1.0574, - "num_input_tokens_seen": 20120165, - "step": 963 - }, - { - "epoch": 0.11591414657608369, - "flos": 16400901899280.0, - "grad_norm": 14.507012611733408, - "learning_rate": 3.923161468340853e-06, - "loss": 1.0639, - "num_input_tokens_seen": 20139720, - "step": 964 - }, - { - "epoch": 0.11603438946672277, - "flos": 13805004893160.0, - "grad_norm": 2.9150200873544896, - "learning_rate": 3.9229474773891374e-06, - "loss": 1.0407, - "num_input_tokens_seen": 20157980, - "step": 965 - }, - { - "epoch": 0.11615463235736187, - "flos": 19101898263840.0, - "grad_norm": 7.964686176561716, - "learning_rate": 3.922733194727818e-06, - "loss": 1.0729, - "num_input_tokens_seen": 20177495, - "step": 966 - }, - { - "epoch": 0.11627487524800097, - "flos": 13170790678440.0, - "grad_norm": 5.949584869520638, - "learning_rate": 3.922518620389402e-06, - "loss": 1.0715, - "num_input_tokens_seen": 20194080, - "step": 967 - }, - { - "epoch": 0.11639511813864005, - "flos": 12862943362200.0, - "grad_norm": 5.6458565499179105, - "learning_rate": 3.922303754406439e-06, - "loss": 1.129, - "num_input_tokens_seen": 20211640, - "step": 968 - }, - { - "epoch": 0.11651536102927915, - "flos": 14854618707360.0, - "grad_norm": 3.6962628745356976, - "learning_rate": 3.922088596811526e-06, - "loss": 1.0167, - "num_input_tokens_seen": 20230490, - "step": 969 - }, - { - "epoch": 0.11663560391991823, - "flos": 11682959516040.0, - "grad_norm": 7.650043073545378, - "learning_rate": 3.9218731476373e-06, - "loss": 1.0835, - "num_input_tokens_seen": 20246395, - "step": 970 - }, - { - "epoch": 0.11675584681055733, - "flos": 14095124279640.0, - "grad_norm": 9.221490174721245, - "learning_rate": 3.9216574069164455e-06, - "loss": 1.081, - "num_input_tokens_seen": 20265090, - "step": 971 - }, - { - "epoch": 0.11687608970119642, - "flos": 15589455692760.0, - "grad_norm": 6.5582924569716985, - "learning_rate": 3.921441374681691e-06, - "loss": 1.0322, - "num_input_tokens_seen": 20284870, - "step": 972 - }, - { - "epoch": 0.1169963325918355, - "flos": 17110836149880.0, - "grad_norm": 3.4667426822348815, - "learning_rate": 3.921225050965808e-06, - "loss": 0.8707, - "num_input_tokens_seen": 20304475, - "step": 973 - }, - { - "epoch": 0.1171165754824746, - "flos": 16612817663520.0, - "grad_norm": 8.719883561363103, - "learning_rate": 3.921008435801612e-06, - "loss": 0.9596, - "num_input_tokens_seen": 20323280, - "step": 974 - }, - { - "epoch": 0.11723681837311369, - "flos": 13151744301600.0, - "grad_norm": 6.069878875680279, - "learning_rate": 3.920791529221963e-06, - "loss": 0.9704, - "num_input_tokens_seen": 20341675, - "step": 975 - }, - { - "epoch": 0.11735706126375278, - "flos": 16743892911360.0, - "grad_norm": 9.4994064948755, - "learning_rate": 3.920574331259768e-06, - "loss": 0.9893, - "num_input_tokens_seen": 20362595, - "step": 976 - }, - { - "epoch": 0.11747730415439187, - "flos": 15903557967240.0, - "grad_norm": 6.94164942464103, - "learning_rate": 3.9203568419479716e-06, - "loss": 1.0163, - "num_input_tokens_seen": 20382870, - "step": 977 - }, - { - "epoch": 0.11759754704503096, - "flos": 15773831828040.0, - "grad_norm": 4.423907456518263, - "learning_rate": 3.92013906131957e-06, - "loss": 0.9705, - "num_input_tokens_seen": 20401520, - "step": 978 - }, - { - "epoch": 0.11771778993567006, - "flos": 15852127434960.0, - "grad_norm": 3.5389695313223495, - "learning_rate": 3.9199209894076e-06, - "loss": 1.042, - "num_input_tokens_seen": 20421555, - "step": 979 - }, - { - "epoch": 0.11783803282630914, - "flos": 15117934342320.0, - "grad_norm": 8.062372250986465, - "learning_rate": 3.919702626245142e-06, - "loss": 1.1229, - "num_input_tokens_seen": 20440930, - "step": 980 - }, - { - "epoch": 0.11795827571694824, - "flos": 18052161803400.0, - "grad_norm": 5.114998338984335, - "learning_rate": 3.919483971865322e-06, - "loss": 0.8722, - "num_input_tokens_seen": 20460645, - "step": 981 - }, - { - "epoch": 0.11807851860758732, - "flos": 16795231458960.0, - "grad_norm": 5.401106487928683, - "learning_rate": 3.91926502630131e-06, - "loss": 1.0901, - "num_input_tokens_seen": 20480980, - "step": 982 - }, - { - "epoch": 0.11819876149822642, - "flos": 17765691142560.0, - "grad_norm": 3.7714526403782034, - "learning_rate": 3.91904578958632e-06, - "loss": 0.9459, - "num_input_tokens_seen": 20500115, - "step": 983 - }, - { - "epoch": 0.11831900438886551, - "flos": 16350544521600.0, - "grad_norm": 4.504140432577225, - "learning_rate": 3.918826261753608e-06, - "loss": 1.0742, - "num_input_tokens_seen": 20519415, - "step": 984 - }, - { - "epoch": 0.1184392472795046, - "flos": 19916563934160.0, - "grad_norm": 8.081227398550906, - "learning_rate": 3.918606442836478e-06, - "loss": 0.9221, - "num_input_tokens_seen": 20541355, - "step": 985 - }, - { - "epoch": 0.1185594901701437, - "flos": 14118769890480.0, - "grad_norm": 5.320801117315821, - "learning_rate": 3.918386332868277e-06, - "loss": 1.0029, - "num_input_tokens_seen": 20559045, - "step": 986 - }, - { - "epoch": 0.11867973306078278, - "flos": 13409908794480.0, - "grad_norm": 4.472872026727399, - "learning_rate": 3.918165931882394e-06, - "loss": 1.174, - "num_input_tokens_seen": 20577165, - "step": 987 - }, - { - "epoch": 0.11879997595142187, - "flos": 12023742895800.0, - "grad_norm": 4.870058734390789, - "learning_rate": 3.917945239912264e-06, - "loss": 0.9786, - "num_input_tokens_seen": 20594360, - "step": 988 - }, - { - "epoch": 0.11892021884206096, - "flos": 12417305916480.0, - "grad_norm": 3.985550172320808, - "learning_rate": 3.917724256991367e-06, - "loss": 0.9876, - "num_input_tokens_seen": 20612825, - "step": 989 - }, - { - "epoch": 0.11904046173270005, - "flos": 22065811663080.0, - "grad_norm": 4.655932729267872, - "learning_rate": 3.9175029831532245e-06, - "loss": 1.0314, - "num_input_tokens_seen": 20632060, - "step": 990 - }, - { - "epoch": 0.11916070462333915, - "flos": 14305169688720.0, - "grad_norm": 8.356102418701912, - "learning_rate": 3.917281418431404e-06, - "loss": 1.1003, - "num_input_tokens_seen": 20650825, - "step": 991 - }, - { - "epoch": 0.11928094751397823, - "flos": 16744383496320.0, - "grad_norm": 5.693606274127599, - "learning_rate": 3.917059562859516e-06, - "loss": 0.9881, - "num_input_tokens_seen": 20669870, - "step": 992 - }, - { - "epoch": 0.11940119040461733, - "flos": 17000156387520.0, - "grad_norm": 4.390365864447524, - "learning_rate": 3.916837416471218e-06, - "loss": 1.1148, - "num_input_tokens_seen": 20686210, - "step": 993 - }, - { - "epoch": 0.11952143329525641, - "flos": 9768261330720.0, - "grad_norm": 7.5780303634554755, - "learning_rate": 3.916614979300207e-06, - "loss": 0.9383, - "num_input_tokens_seen": 20700775, - "step": 994 - }, - { - "epoch": 0.11964167618589551, - "flos": 19233954681600.0, - "grad_norm": 5.85195950025936, - "learning_rate": 3.9163922513802274e-06, - "loss": 1.0084, - "num_input_tokens_seen": 20722830, - "step": 995 - }, - { - "epoch": 0.1197619190765346, - "flos": 8850581288040.0, - "grad_norm": 7.671591572857759, - "learning_rate": 3.916169232745067e-06, - "loss": 1.0459, - "num_input_tokens_seen": 20740225, - "step": 996 - }, - { - "epoch": 0.11988216196717369, - "flos": 11970963254880.0, - "grad_norm": 6.635704748905608, - "learning_rate": 3.915945923428559e-06, - "loss": 1.1308, - "num_input_tokens_seen": 20756470, - "step": 997 - }, - { - "epoch": 0.12000240485781279, - "flos": 11473067414760.0, - "grad_norm": 9.931287827128637, - "learning_rate": 3.915722323464577e-06, - "loss": 1.0543, - "num_input_tokens_seen": 20774795, - "step": 998 - }, - { - "epoch": 0.12012264774845187, - "flos": 35281655269320.0, - "grad_norm": 28.50119069607005, - "learning_rate": 3.91549843288704e-06, - "loss": 0.9366, - "num_input_tokens_seen": 20798195, - "step": 999 - }, - { - "epoch": 0.12024289063909097, - "flos": 19208070776880.0, - "grad_norm": 8.63627528825316, - "learning_rate": 3.915274251729916e-06, - "loss": 1.0235, - "num_input_tokens_seen": 20819205, - "step": 1000 - }, - { - "epoch": 0.12036313352973005, - "flos": 13859685550800.0, - "grad_norm": 7.359498365141706, - "learning_rate": 3.91504978002721e-06, - "loss": 1.1308, - "num_input_tokens_seen": 20837980, - "step": 1001 - }, - { - "epoch": 0.12048337642036915, - "flos": 12228023931600.0, - "grad_norm": 4.159493005742327, - "learning_rate": 3.914825017812974e-06, - "loss": 0.9932, - "num_input_tokens_seen": 20854350, - "step": 1002 - }, - { - "epoch": 0.12060361931100824, - "flos": 16245383840040.0, - "grad_norm": 4.459541917549367, - "learning_rate": 3.9145999651213065e-06, - "loss": 0.9516, - "num_input_tokens_seen": 20873310, - "step": 1003 - }, - { - "epoch": 0.12072386220164733, - "flos": 11839673376120.0, - "grad_norm": 6.9063530133189746, - "learning_rate": 3.9143746219863465e-06, - "loss": 1.097, - "num_input_tokens_seen": 20890135, - "step": 1004 - }, - { - "epoch": 0.12084410509228642, - "flos": 39454159750200.0, - "grad_norm": 1.1067799586499576, - "learning_rate": 3.914148988442278e-06, - "loss": 0.9516, - "num_input_tokens_seen": 20945645, - "step": 1005 - }, - { - "epoch": 0.1209643479829255, - "flos": 19050498393120.0, - "grad_norm": 4.706174957771363, - "learning_rate": 3.91392306452333e-06, - "loss": 1.1735, - "num_input_tokens_seen": 20962440, - "step": 1006 - }, - { - "epoch": 0.1210845908735646, - "flos": 7749107679720.0, - "grad_norm": 7.806571481039177, - "learning_rate": 3.913696850263774e-06, - "loss": 0.8769, - "num_input_tokens_seen": 20976525, - "step": 1007 - }, - { - "epoch": 0.1212048337642037, - "flos": 14540332463520.0, - "grad_norm": 3.797564534184559, - "learning_rate": 3.913470345697929e-06, - "loss": 1.015, - "num_input_tokens_seen": 20994875, - "step": 1008 - }, - { - "epoch": 0.12132507665484278, - "flos": 15878195309040.0, - "grad_norm": 8.66633805850189, - "learning_rate": 3.913243550860153e-06, - "loss": 1.0823, - "num_input_tokens_seen": 21012360, - "step": 1009 - }, - { - "epoch": 0.12144531954548188, - "flos": 20886625017480.0, - "grad_norm": 6.46451684407611, - "learning_rate": 3.913016465784852e-06, - "loss": 0.9968, - "num_input_tokens_seen": 21032755, - "step": 1010 - }, - { - "epoch": 0.12156556243612096, - "flos": 14540853710040.0, - "grad_norm": 5.7666279403768, - "learning_rate": 3.912789090506474e-06, - "loss": 0.9465, - "num_input_tokens_seen": 21051735, - "step": 1011 - }, - { - "epoch": 0.12168580532676006, - "flos": 11656646349480.0, - "grad_norm": 5.896350779477798, - "learning_rate": 3.9125614250595114e-06, - "loss": 0.9434, - "num_input_tokens_seen": 21067665, - "step": 1012 - }, - { - "epoch": 0.12180604821739914, - "flos": 10844065665240.0, - "grad_norm": 8.61963652656524, - "learning_rate": 3.912333469478502e-06, - "loss": 1.1133, - "num_input_tokens_seen": 21085350, - "step": 1013 - }, - { - "epoch": 0.12192629110803824, - "flos": 13702051843920.0, - "grad_norm": 5.0222339285842565, - "learning_rate": 3.912105223798025e-06, - "loss": 1.0031, - "num_input_tokens_seen": 21104490, - "step": 1014 - }, - { - "epoch": 0.12204653399867733, - "flos": 34120742913480.0, - "grad_norm": 1.0393075869060615, - "learning_rate": 3.9118766880527065e-06, - "loss": 0.9503, - "num_input_tokens_seen": 21158645, - "step": 1015 - }, - { - "epoch": 0.12216677688931642, - "flos": 12914097940440.0, - "grad_norm": 4.569421404181731, - "learning_rate": 3.9116478622772145e-06, - "loss": 0.9568, - "num_input_tokens_seen": 21176940, - "step": 1016 - }, - { - "epoch": 0.12228701977995551, - "flos": 19600468658280.0, - "grad_norm": 4.971375545444417, - "learning_rate": 3.911418746506261e-06, - "loss": 1.1026, - "num_input_tokens_seen": 21196790, - "step": 1017 - }, - { - "epoch": 0.1224072626705946, - "flos": 15484325672760.0, - "grad_norm": 5.791381206841679, - "learning_rate": 3.911189340774604e-06, - "loss": 1.0145, - "num_input_tokens_seen": 21216640, - "step": 1018 - }, - { - "epoch": 0.1225275055612337, - "flos": 14697843524160.0, - "grad_norm": 7.0619344569265, - "learning_rate": 3.910959645117043e-06, - "loss": 1.0232, - "num_input_tokens_seen": 21235695, - "step": 1019 - }, - { - "epoch": 0.12264774845187278, - "flos": 41324970147000.0, - "grad_norm": 0.7876205109924053, - "learning_rate": 3.910729659568423e-06, - "loss": 0.8265, - "num_input_tokens_seen": 21292600, - "step": 1020 - }, - { - "epoch": 0.12276799134251187, - "flos": 18789053113320.0, - "grad_norm": 5.232442835146055, - "learning_rate": 3.9104993841636344e-06, - "loss": 1.0514, - "num_input_tokens_seen": 21312890, - "step": 1021 - }, - { - "epoch": 0.12288823423315097, - "flos": 14956283971080.0, - "grad_norm": 4.1576154946751025, - "learning_rate": 3.910268818937608e-06, - "loss": 1.0398, - "num_input_tokens_seen": 21330765, - "step": 1022 - }, - { - "epoch": 0.12300847712379005, - "flos": 8667216984240.0, - "grad_norm": 7.152356283696691, - "learning_rate": 3.9100379639253196e-06, - "loss": 1.1071, - "num_input_tokens_seen": 21347205, - "step": 1023 - }, - { - "epoch": 0.12312872001442915, - "flos": 11865005372760.0, - "grad_norm": 7.222016095211377, - "learning_rate": 3.909806819161791e-06, - "loss": 1.0848, - "num_input_tokens_seen": 21362400, - "step": 1024 - }, - { - "epoch": 0.12324896290506823, - "flos": 13045694434800.0, - "grad_norm": 8.672720873019527, - "learning_rate": 3.909575384682086e-06, - "loss": 1.0828, - "num_input_tokens_seen": 21381000, - "step": 1025 - }, - { - "epoch": 0.12336920579570733, - "flos": 13412515027080.0, - "grad_norm": 6.037067987058875, - "learning_rate": 3.9093436605213144e-06, - "loss": 0.9153, - "num_input_tokens_seen": 21401220, - "step": 1026 - }, - { - "epoch": 0.12348944868634643, - "flos": 16979423624880.0, - "grad_norm": 7.807605888926315, - "learning_rate": 3.909111646714627e-06, - "loss": 1.0108, - "num_input_tokens_seen": 21421785, - "step": 1027 - }, - { - "epoch": 0.12360969157698551, - "flos": 13492221065760.0, - "grad_norm": 9.988581432475941, - "learning_rate": 3.9088793432972206e-06, - "loss": 0.9461, - "num_input_tokens_seen": 21440325, - "step": 1028 - }, - { - "epoch": 0.1237299344676246, - "flos": 9322777192800.0, - "grad_norm": 7.359351674425614, - "learning_rate": 3.908646750304336e-06, - "loss": 1.0467, - "num_input_tokens_seen": 21457730, - "step": 1029 - }, - { - "epoch": 0.12385017735826369, - "flos": 14541221648760.0, - "grad_norm": 8.136879086574364, - "learning_rate": 3.908413867771257e-06, - "loss": 1.0889, - "num_input_tokens_seen": 21476360, - "step": 1030 - }, - { - "epoch": 0.12397042024890279, - "flos": 12286108022400.0, - "grad_norm": 15.336858814022186, - "learning_rate": 3.908180695733311e-06, - "loss": 1.0318, - "num_input_tokens_seen": 21495570, - "step": 1031 - }, - { - "epoch": 0.12409066313954187, - "flos": 14488472669400.0, - "grad_norm": 5.4333109888346725, - "learning_rate": 3.907947234225871e-06, - "loss": 1.0539, - "num_input_tokens_seen": 21514300, - "step": 1032 - }, - { - "epoch": 0.12421090603018096, - "flos": 14721029211600.0, - "grad_norm": 4.79226851069905, - "learning_rate": 3.907713483284352e-06, - "loss": 1.0953, - "num_input_tokens_seen": 21533495, - "step": 1033 - }, - { - "epoch": 0.12433114892082006, - "flos": 17425367686200.0, - "grad_norm": 4.933606121322573, - "learning_rate": 3.907479442944216e-06, - "loss": 1.2011, - "num_input_tokens_seen": 21551620, - "step": 1034 - }, - { - "epoch": 0.12445139181145914, - "flos": 13963773077760.0, - "grad_norm": 5.74109324714059, - "learning_rate": 3.907245113240963e-06, - "loss": 1.1512, - "num_input_tokens_seen": 21569460, - "step": 1035 - }, - { - "epoch": 0.12457163470209824, - "flos": 33181415809440.0, - "grad_norm": 3.587150551394399, - "learning_rate": 3.907010494210144e-06, - "loss": 0.9741, - "num_input_tokens_seen": 21591840, - "step": 1036 - }, - { - "epoch": 0.12469187759273732, - "flos": 14462619426240.0, - "grad_norm": 7.934673376734482, - "learning_rate": 3.9067755858873495e-06, - "loss": 1.1541, - "num_input_tokens_seen": 21608360, - "step": 1037 - }, - { - "epoch": 0.12481212048337642, - "flos": 49573113692640.0, - "grad_norm": 0.8571827409468216, - "learning_rate": 3.906540388308214e-06, - "loss": 0.8943, - "num_input_tokens_seen": 21667665, - "step": 1038 - }, - { - "epoch": 0.12493236337401552, - "flos": 12915600356880.0, - "grad_norm": 4.614878327159591, - "learning_rate": 3.906304901508417e-06, - "loss": 1.0459, - "num_input_tokens_seen": 21686285, - "step": 1039 - }, - { - "epoch": 0.12505260626465461, - "flos": 21410803362600.0, - "grad_norm": 21.169273075818964, - "learning_rate": 3.9060691255236835e-06, - "loss": 0.984, - "num_input_tokens_seen": 21706570, - "step": 1040 - }, - { - "epoch": 0.1251728491552937, - "flos": 17379854835000.0, - "grad_norm": 2.95612388809611, - "learning_rate": 3.905833060389778e-06, - "loss": 1.048, - "num_input_tokens_seen": 21730410, - "step": 1041 - }, - { - "epoch": 0.12529309204593278, - "flos": 19308202962600.0, - "grad_norm": 6.1466773881356715, - "learning_rate": 3.905596706142513e-06, - "loss": 0.9932, - "num_input_tokens_seen": 21751540, - "step": 1042 - }, - { - "epoch": 0.12541333493657186, - "flos": 21935288323320.0, - "grad_norm": 15.924941915888235, - "learning_rate": 3.9053600628177435e-06, - "loss": 1.075, - "num_input_tokens_seen": 21770870, - "step": 1043 - }, - { - "epoch": 0.12553357782721097, - "flos": 16822188518280.0, - "grad_norm": 8.532458097554505, - "learning_rate": 3.905123130451367e-06, - "loss": 1.0709, - "num_input_tokens_seen": 21791690, - "step": 1044 - }, - { - "epoch": 0.12565382071785006, - "flos": 17687640828120.0, - "grad_norm": 3.557450848988465, - "learning_rate": 3.904885909079326e-06, - "loss": 1.0122, - "num_input_tokens_seen": 21810195, - "step": 1045 - }, - { - "epoch": 0.12577406360848914, - "flos": 15170070090480.0, - "grad_norm": 12.089540207215284, - "learning_rate": 3.904648398737607e-06, - "loss": 0.9963, - "num_input_tokens_seen": 21828480, - "step": 1046 - }, - { - "epoch": 0.12589430649912825, - "flos": 25790292644640.0, - "grad_norm": 2.7978278726636687, - "learning_rate": 3.9044105994622406e-06, - "loss": 1.0049, - "num_input_tokens_seen": 21849345, - "step": 1047 - }, - { - "epoch": 0.12601454938976733, - "flos": 18028178915400.0, - "grad_norm": 5.5265467416216705, - "learning_rate": 3.9041725112893005e-06, - "loss": 1.0336, - "num_input_tokens_seen": 21870290, - "step": 1048 - }, - { - "epoch": 0.12613479228040642, - "flos": 11001300771840.0, - "grad_norm": 4.592253356855944, - "learning_rate": 3.903934134254904e-06, - "loss": 0.9758, - "num_input_tokens_seen": 21887800, - "step": 1049 - }, - { - "epoch": 0.1262550351710455, - "flos": 15249040251720.0, - "grad_norm": 5.764370748250933, - "learning_rate": 3.903695468395213e-06, - "loss": 1.0717, - "num_input_tokens_seen": 21905390, - "step": 1050 - }, - { - "epoch": 0.1263752780616846, - "flos": 22512062340000.0, - "grad_norm": 5.460023904545475, - "learning_rate": 3.903456513746434e-06, - "loss": 0.7917, - "num_input_tokens_seen": 21926085, - "step": 1051 - }, - { - "epoch": 0.1264955209523237, - "flos": 20493767212680.0, - "grad_norm": 4.48105213787221, - "learning_rate": 3.903217270344815e-06, - "loss": 1.101, - "num_input_tokens_seen": 21946055, - "step": 1052 - }, - { - "epoch": 0.12661576384296278, - "flos": 20833446776280.0, - "grad_norm": 5.353339976254748, - "learning_rate": 3.902977738226648e-06, - "loss": 1.0412, - "num_input_tokens_seen": 21966510, - "step": 1053 - }, - { - "epoch": 0.12673600673360189, - "flos": 14803280159760.0, - "grad_norm": 6.022886179272094, - "learning_rate": 3.902737917428273e-06, - "loss": 1.1404, - "num_input_tokens_seen": 21984395, - "step": 1054 - }, - { - "epoch": 0.12685624962424097, - "flos": 17974878027960.0, - "grad_norm": 5.507368527097285, - "learning_rate": 3.902497807986068e-06, - "loss": 1.0585, - "num_input_tokens_seen": 22004135, - "step": 1055 - }, - { - "epoch": 0.12697649251488005, - "flos": 19601725782240.0, - "grad_norm": 3.6747778273915386, - "learning_rate": 3.902257409936458e-06, - "loss": 1.0545, - "num_input_tokens_seen": 22024620, - "step": 1056 - }, - { - "epoch": 0.12709673540551916, - "flos": 15091866468240.0, - "grad_norm": 4.164583270232121, - "learning_rate": 3.902016723315912e-06, - "loss": 1.0582, - "num_input_tokens_seen": 22042280, - "step": 1057 - }, - { - "epoch": 0.12721697829615825, - "flos": 18028209576960.0, - "grad_norm": 9.157869683962396, - "learning_rate": 3.901775748160941e-06, - "loss": 0.9186, - "num_input_tokens_seen": 22061180, - "step": 1058 - }, - { - "epoch": 0.12733722118679733, - "flos": 44342276418360.0, - "grad_norm": 0.9127311466038808, - "learning_rate": 3.901534484508101e-06, - "loss": 0.8691, - "num_input_tokens_seen": 22123575, - "step": 1059 - }, - { - "epoch": 0.1274574640774364, - "flos": 19205771159880.0, - "grad_norm": 4.16594763591673, - "learning_rate": 3.901292932393991e-06, - "loss": 0.97, - "num_input_tokens_seen": 22142175, - "step": 1060 - }, - { - "epoch": 0.12757770696807552, - "flos": 15799286470920.0, - "grad_norm": 5.432378702999519, - "learning_rate": 3.9010510918552555e-06, - "loss": 1.0833, - "num_input_tokens_seen": 22160970, - "step": 1061 - }, - { - "epoch": 0.1276979498587146, - "flos": 20335244320560.0, - "grad_norm": 6.642018465930203, - "learning_rate": 3.900808962928581e-06, - "loss": 0.9733, - "num_input_tokens_seen": 22178305, - "step": 1062 - }, - { - "epoch": 0.1278181927493537, - "flos": 12338151785880.0, - "grad_norm": 3.6962183674712374, - "learning_rate": 3.900566545650698e-06, - "loss": 1.1243, - "num_input_tokens_seen": 22195695, - "step": 1063 - }, - { - "epoch": 0.1279384356399928, - "flos": 15009983458800.0, - "grad_norm": 4.412956166664954, - "learning_rate": 3.900323840058381e-06, - "loss": 1.0452, - "num_input_tokens_seen": 22213125, - "step": 1064 - }, - { - "epoch": 0.12805867853063188, - "flos": 18918472636920.0, - "grad_norm": 4.7528386919517756, - "learning_rate": 3.900080846188449e-06, - "loss": 1.0385, - "num_input_tokens_seen": 22231435, - "step": 1065 - }, - { - "epoch": 0.12817892142127096, - "flos": 11631007737240.0, - "grad_norm": 4.354800651183456, - "learning_rate": 3.8998375640777625e-06, - "loss": 1.0339, - "num_input_tokens_seen": 22249025, - "step": 1066 - }, - { - "epoch": 0.12829916431191005, - "flos": 43487223925440.0, - "grad_norm": 0.7603770250064262, - "learning_rate": 3.899593993763229e-06, - "loss": 0.7831, - "num_input_tokens_seen": 22309705, - "step": 1067 - }, - { - "epoch": 0.12841940720254916, - "flos": 21225476718960.0, - "grad_norm": 3.2729040180029068, - "learning_rate": 3.899350135281796e-06, - "loss": 1.0235, - "num_input_tokens_seen": 22330425, - "step": 1068 - }, - { - "epoch": 0.12853965009318824, - "flos": 18468757203720.0, - "grad_norm": 8.356710476338854, - "learning_rate": 3.8991059886704585e-06, - "loss": 1.0232, - "num_input_tokens_seen": 22349650, - "step": 1069 - }, - { - "epoch": 0.12865989298382732, - "flos": 21987516056160.0, - "grad_norm": 4.338443506175275, - "learning_rate": 3.898861553966252e-06, - "loss": 1.0491, - "num_input_tokens_seen": 22369020, - "step": 1070 - }, - { - "epoch": 0.12878013587446643, - "flos": 18422201859480.0, - "grad_norm": 2.2012676228343673, - "learning_rate": 3.898616831206257e-06, - "loss": 1.1063, - "num_input_tokens_seen": 22389165, - "step": 1071 - }, - { - "epoch": 0.12890037876510552, - "flos": 16587853605600.0, - "grad_norm": 3.3727427811045234, - "learning_rate": 3.8983718204276e-06, - "loss": 0.9914, - "num_input_tokens_seen": 22411105, - "step": 1072 - }, - { - "epoch": 0.1290206216557446, - "flos": 16770298062600.0, - "grad_norm": 3.4693157662551166, - "learning_rate": 3.898126521667446e-06, - "loss": 1.0612, - "num_input_tokens_seen": 22430980, - "step": 1073 - }, - { - "epoch": 0.12914086454638368, - "flos": 17189499695520.0, - "grad_norm": 5.027681589816433, - "learning_rate": 3.897880934963007e-06, - "loss": 1.0674, - "num_input_tokens_seen": 22450250, - "step": 1074 - }, - { - "epoch": 0.1292611074370228, - "flos": 14384231834640.0, - "grad_norm": 5.111048059949952, - "learning_rate": 3.89763506035154e-06, - "loss": 1.0134, - "num_input_tokens_seen": 22467820, - "step": 1075 - }, - { - "epoch": 0.12938135032766188, - "flos": 19493866883400.0, - "grad_norm": 5.284520327364825, - "learning_rate": 3.897388897870343e-06, - "loss": 1.0433, - "num_input_tokens_seen": 22488180, - "step": 1076 - }, - { - "epoch": 0.12950159321830096, - "flos": 20859238696320.0, - "grad_norm": 3.3686105504659407, - "learning_rate": 3.89714244755676e-06, - "loss": 0.9622, - "num_input_tokens_seen": 22509260, - "step": 1077 - }, - { - "epoch": 0.12962183610894007, - "flos": 17451220929360.0, - "grad_norm": 11.3788103701333, - "learning_rate": 3.896895709448175e-06, - "loss": 1.0907, - "num_input_tokens_seen": 22528730, - "step": 1078 - }, - { - "epoch": 0.12974207899957915, - "flos": 7878925803600.0, - "grad_norm": 6.900917721960858, - "learning_rate": 3.896648683582019e-06, - "loss": 0.9894, - "num_input_tokens_seen": 22543785, - "step": 1079 - }, - { - "epoch": 0.12986232189021824, - "flos": 17582817423720.0, - "grad_norm": 3.8814918528712963, - "learning_rate": 3.896401369995766e-06, - "loss": 1.034, - "num_input_tokens_seen": 22563310, - "step": 1080 - }, - { - "epoch": 0.12998256478085732, - "flos": 17006074068600.0, - "grad_norm": 4.9894435069243, - "learning_rate": 3.896153768726932e-06, - "loss": 1.0122, - "num_input_tokens_seen": 22583340, - "step": 1081 - }, - { - "epoch": 0.13010280767149643, - "flos": 13203328141680.0, - "grad_norm": 3.4441948632772776, - "learning_rate": 3.8959058798130806e-06, - "loss": 1.1027, - "num_input_tokens_seen": 22601035, - "step": 1082 - }, - { - "epoch": 0.1302230505621355, - "flos": 16192941476280.0, - "grad_norm": 3.146489830651695, - "learning_rate": 3.895657703291814e-06, - "loss": 0.9674, - "num_input_tokens_seen": 22620860, - "step": 1083 - }, - { - "epoch": 0.1303432934527746, - "flos": 15143266338960.0, - "grad_norm": 19.867708224103882, - "learning_rate": 3.895409239200781e-06, - "loss": 1.0198, - "num_input_tokens_seen": 22636465, - "step": 1084 - }, - { - "epoch": 0.1304635363434137, - "flos": 14855017307640.0, - "grad_norm": 11.193980125051102, - "learning_rate": 3.895160487577673e-06, - "loss": 1.143, - "num_input_tokens_seen": 22653755, - "step": 1085 - }, - { - "epoch": 0.1305837792340528, - "flos": 45276764544000.0, - "grad_norm": 0.7929082762861785, - "learning_rate": 3.894911448460226e-06, - "loss": 0.8643, - "num_input_tokens_seen": 22712790, - "step": 1086 - }, - { - "epoch": 0.13070402212469187, - "flos": 18811932185160.0, - "grad_norm": 15.902076998192573, - "learning_rate": 3.8946621218862195e-06, - "loss": 0.9573, - "num_input_tokens_seen": 22733510, - "step": 1087 - }, - { - "epoch": 0.13082426501533098, - "flos": 19706917125360.0, - "grad_norm": 3.497960177422269, - "learning_rate": 3.894412507893475e-06, - "loss": 1.1116, - "num_input_tokens_seen": 22753510, - "step": 1088 - }, - { - "epoch": 0.13094450790597006, - "flos": 17660990384400.0, - "grad_norm": 4.4600827121335165, - "learning_rate": 3.894162606519859e-06, - "loss": 0.9449, - "num_input_tokens_seen": 22772180, - "step": 1089 - }, - { - "epoch": 0.13106475079660915, - "flos": 13518472909200.0, - "grad_norm": 5.424870347965593, - "learning_rate": 3.893912417803282e-06, - "loss": 0.9941, - "num_input_tokens_seen": 22791615, - "step": 1090 - }, - { - "epoch": 0.13118499368724823, - "flos": 20597824078080.0, - "grad_norm": 3.7927300076527604, - "learning_rate": 3.8936619417816975e-06, - "loss": 0.9887, - "num_input_tokens_seen": 22811665, - "step": 1091 - }, - { - "epoch": 0.13130523657788734, - "flos": 10083804698520.0, - "grad_norm": 8.214030139809283, - "learning_rate": 3.8934111784931015e-06, - "loss": 0.9476, - "num_input_tokens_seen": 22828835, - "step": 1092 - }, - { - "epoch": 0.13142547946852642, - "flos": 50255630960520.0, - "grad_norm": 0.9725333446385622, - "learning_rate": 3.893160127975535e-06, - "loss": 0.8746, - "num_input_tokens_seen": 22889245, - "step": 1093 - }, - { - "epoch": 0.1315457223591655, - "flos": 32738353934760.0, - "grad_norm": 5.018230011297715, - "learning_rate": 3.8929087902670826e-06, - "loss": 1.0424, - "num_input_tokens_seen": 22910595, - "step": 1094 - }, - { - "epoch": 0.13166596524980462, - "flos": 45014859340800.0, - "grad_norm": 0.8826900160900228, - "learning_rate": 3.8926571654058715e-06, - "loss": 0.8559, - "num_input_tokens_seen": 22966960, - "step": 1095 - }, - { - "epoch": 0.1317862081404437, - "flos": 16769684831400.0, - "grad_norm": 4.814393500835156, - "learning_rate": 3.892405253430074e-06, - "loss": 0.987, - "num_input_tokens_seen": 22984200, - "step": 1096 - }, - { - "epoch": 0.13190645103108278, - "flos": 14378774076960.0, - "grad_norm": 3.792270961577048, - "learning_rate": 3.892153054377904e-06, - "loss": 1.0533, - "num_input_tokens_seen": 23001325, - "step": 1097 - }, - { - "epoch": 0.13202669392172187, - "flos": 38239093531320.0, - "grad_norm": 0.9715091114706551, - "learning_rate": 3.891900568287619e-06, - "loss": 0.8683, - "num_input_tokens_seen": 23053430, - "step": 1098 - }, - { - "epoch": 0.13214693681236098, - "flos": 11210487657240.0, - "grad_norm": 6.5215320476881375, - "learning_rate": 3.891647795197523e-06, - "loss": 0.9519, - "num_input_tokens_seen": 23069190, - "step": 1099 - }, - { - "epoch": 0.13226717970300006, - "flos": 13727690456160.0, - "grad_norm": 4.581619127086362, - "learning_rate": 3.8913947351459605e-06, - "loss": 0.9033, - "num_input_tokens_seen": 23086450, - "step": 1100 - }, - { - "epoch": 0.13238742259363914, - "flos": 14692998997680.0, - "grad_norm": 3.867459990794411, - "learning_rate": 3.89114138817132e-06, - "loss": 0.9038, - "num_input_tokens_seen": 23102835, - "step": 1101 - }, - { - "epoch": 0.13250766548427825, - "flos": 14934110115120.0, - "grad_norm": 3.746523708136635, - "learning_rate": 3.890887754312035e-06, - "loss": 1.0726, - "num_input_tokens_seen": 23120800, - "step": 1102 - }, - { - "epoch": 0.13262790837491734, - "flos": 16087903440960.0, - "grad_norm": 4.6692509761571745, - "learning_rate": 3.890633833606581e-06, - "loss": 1.0966, - "num_input_tokens_seen": 23140210, - "step": 1103 - }, - { - "epoch": 0.13274815126555642, - "flos": 13964478293640.0, - "grad_norm": 4.12683573247613, - "learning_rate": 3.890379626093477e-06, - "loss": 0.9179, - "num_input_tokens_seen": 23159680, - "step": 1104 - }, - { - "epoch": 0.1328683941561955, - "flos": 15138207181560.0, - "grad_norm": 12.79594669538189, - "learning_rate": 3.890125131811287e-06, - "loss": 1.1376, - "num_input_tokens_seen": 23177450, - "step": 1105 - }, - { - "epoch": 0.1329886370468346, - "flos": 9663836526600.0, - "grad_norm": 3.2849887437787255, - "learning_rate": 3.889870350798618e-06, - "loss": 0.9858, - "num_input_tokens_seen": 23194515, - "step": 1106 - }, - { - "epoch": 0.1331088799374737, - "flos": 14934079453560.0, - "grad_norm": 3.2840080658770625, - "learning_rate": 3.889615283094119e-06, - "loss": 1.0149, - "num_input_tokens_seen": 23213425, - "step": 1107 - }, - { - "epoch": 0.13322912282811278, - "flos": 12942036169680.0, - "grad_norm": 3.994583993194601, - "learning_rate": 3.889359928736485e-06, - "loss": 1.0672, - "num_input_tokens_seen": 23231090, - "step": 1108 - }, - { - "epoch": 0.1333493657187519, - "flos": 17397889380360.0, - "grad_norm": 8.659238664317174, - "learning_rate": 3.889104287764451e-06, - "loss": 1.1302, - "num_input_tokens_seen": 23251185, - "step": 1109 - }, - { - "epoch": 0.13346960860939097, - "flos": 15743072735280.0, - "grad_norm": 9.79020539330337, - "learning_rate": 3.888848360216798e-06, - "loss": 1.1294, - "num_input_tokens_seen": 23268550, - "step": 1110 - }, - { - "epoch": 0.13358985150003005, - "flos": 48646260295440.0, - "grad_norm": 0.8018275055233133, - "learning_rate": 3.888592146132351e-06, - "loss": 0.8116, - "num_input_tokens_seen": 23329540, - "step": 1111 - }, - { - "epoch": 0.13371009439066917, - "flos": 19103860603680.0, - "grad_norm": 8.590019165301898, - "learning_rate": 3.888335645549978e-06, - "loss": 0.9965, - "num_input_tokens_seen": 23349680, - "step": 1112 - }, - { - "epoch": 0.13383033728130825, - "flos": 18736212149280.0, - "grad_norm": 10.448628049892484, - "learning_rate": 3.888078858508588e-06, - "loss": 1.0446, - "num_input_tokens_seen": 23369260, - "step": 1113 - }, - { - "epoch": 0.13395058017194733, - "flos": 16034633215080.0, - "grad_norm": 5.321933762589373, - "learning_rate": 3.8878217850471365e-06, - "loss": 1.0718, - "num_input_tokens_seen": 23388895, - "step": 1114 - }, - { - "epoch": 0.13407082306258641, - "flos": 18368165094600.0, - "grad_norm": 4.430683972534888, - "learning_rate": 3.887564425204621e-06, - "loss": 0.9595, - "num_input_tokens_seen": 23410300, - "step": 1115 - }, - { - "epoch": 0.13419106595322552, - "flos": 48934729505760.0, - "grad_norm": 0.804313128151064, - "learning_rate": 3.887306779020083e-06, - "loss": 0.7978, - "num_input_tokens_seen": 23464675, - "step": 1116 - }, - { - "epoch": 0.1343113088438646, - "flos": 14514908482200.0, - "grad_norm": 9.049946265298217, - "learning_rate": 3.887048846532608e-06, - "loss": 0.9259, - "num_input_tokens_seen": 23481370, - "step": 1117 - }, - { - "epoch": 0.1344315517345037, - "flos": 48253009438440.0, - "grad_norm": 0.7482298385728272, - "learning_rate": 3.8867906277813224e-06, - "loss": 0.8276, - "num_input_tokens_seen": 23539245, - "step": 1118 - }, - { - "epoch": 0.1345517946251428, - "flos": 29093517668760.0, - "grad_norm": 6.191207853682511, - "learning_rate": 3.886532122805399e-06, - "loss": 0.966, - "num_input_tokens_seen": 23561445, - "step": 1119 - }, - { - "epoch": 0.13467203751578188, - "flos": 16216065840600.0, - "grad_norm": 4.431139618826426, - "learning_rate": 3.886273331644053e-06, - "loss": 1.1203, - "num_input_tokens_seen": 23580035, - "step": 1120 - }, - { - "epoch": 0.13479228040642097, - "flos": 12101823871800.0, - "grad_norm": 4.99844488755706, - "learning_rate": 3.886014254336542e-06, - "loss": 1.0529, - "num_input_tokens_seen": 23596230, - "step": 1121 - }, - { - "epoch": 0.13491252329706005, - "flos": 16872913834680.0, - "grad_norm": 4.512937597470963, - "learning_rate": 3.885754890922168e-06, - "loss": 1.1492, - "num_input_tokens_seen": 23616280, - "step": 1122 - }, - { - "epoch": 0.13503276618769916, - "flos": 24344448254040.0, - "grad_norm": 6.685462142046306, - "learning_rate": 3.885495241440277e-06, - "loss": 1.0124, - "num_input_tokens_seen": 23640095, - "step": 1123 - }, - { - "epoch": 0.13515300907833824, - "flos": 12548013225600.0, - "grad_norm": 3.209421064274686, - "learning_rate": 3.885235305930257e-06, - "loss": 0.975, - "num_input_tokens_seen": 23658015, - "step": 1124 - }, - { - "epoch": 0.13527325196897733, - "flos": 14378866061640.0, - "grad_norm": 4.63769191951471, - "learning_rate": 3.884975084431539e-06, - "loss": 1.0968, - "num_input_tokens_seen": 23672685, - "step": 1125 - }, - { - "epoch": 0.13539349485961644, - "flos": 12888765943800.0, - "grad_norm": 4.9364904989319, - "learning_rate": 3.8847145769836e-06, - "loss": 1.1556, - "num_input_tokens_seen": 23688825, - "step": 1126 - }, - { - "epoch": 0.13551373775025552, - "flos": 13701561258960.0, - "grad_norm": 5.271365514533913, - "learning_rate": 3.884453783625959e-06, - "loss": 0.902, - "num_input_tokens_seen": 23706155, - "step": 1127 - }, - { - "epoch": 0.1356339806408946, - "flos": 14803678760040.0, - "grad_norm": 4.944483007978339, - "learning_rate": 3.884192704398176e-06, - "loss": 1.0824, - "num_input_tokens_seen": 23723075, - "step": 1128 - }, - { - "epoch": 0.13575422353153369, - "flos": 36094603892280.0, - "grad_norm": 4.448859919545707, - "learning_rate": 3.883931339339858e-06, - "loss": 0.9677, - "num_input_tokens_seen": 23747180, - "step": 1129 - }, - { - "epoch": 0.1358744664221728, - "flos": 12863219316240.0, - "grad_norm": 6.516806369594624, - "learning_rate": 3.883669688490654e-06, - "loss": 1.0092, - "num_input_tokens_seen": 23764670, - "step": 1130 - }, - { - "epoch": 0.13599470931281188, - "flos": 13073234063760.0, - "grad_norm": 22.241113838461207, - "learning_rate": 3.883407751890256e-06, - "loss": 1.0745, - "num_input_tokens_seen": 23782995, - "step": 1131 - }, - { - "epoch": 0.13611495220345096, - "flos": 18992506287000.0, - "grad_norm": 10.33085598526006, - "learning_rate": 3.8831455295783994e-06, - "loss": 1.0773, - "num_input_tokens_seen": 23801965, - "step": 1132 - }, - { - "epoch": 0.13623519509409007, - "flos": 15403577141040.0, - "grad_norm": 3.9258959093952366, - "learning_rate": 3.882883021594864e-06, - "loss": 0.9716, - "num_input_tokens_seen": 23819825, - "step": 1133 - }, - { - "epoch": 0.13635543798472916, - "flos": 10476877134240.0, - "grad_norm": 3.6710159920240457, - "learning_rate": 3.8826202279794705e-06, - "loss": 1.0911, - "num_input_tokens_seen": 23836605, - "step": 1134 - }, - { - "epoch": 0.13647568087536824, - "flos": 16268937466200.0, - "grad_norm": 36.78163166220845, - "learning_rate": 3.882357148772085e-06, - "loss": 0.9171, - "num_input_tokens_seen": 23853750, - "step": 1135 - }, - { - "epoch": 0.13659592376600732, - "flos": 14147505320280.0, - "grad_norm": 37.143103093601034, - "learning_rate": 3.882093784012617e-06, - "loss": 1.0772, - "num_input_tokens_seen": 23872110, - "step": 1136 - }, - { - "epoch": 0.13671616665664643, - "flos": 15218097189600.0, - "grad_norm": 3.8546449656588733, - "learning_rate": 3.881830133741019e-06, - "loss": 1.0594, - "num_input_tokens_seen": 23890695, - "step": 1137 - }, - { - "epoch": 0.13683640954728551, - "flos": 16187943642000.0, - "grad_norm": 4.148271747733264, - "learning_rate": 3.881566197997285e-06, - "loss": 0.9917, - "num_input_tokens_seen": 23906850, - "step": 1138 - }, - { - "epoch": 0.1369566524379246, - "flos": 15432496540200.0, - "grad_norm": 2.397030296417498, - "learning_rate": 3.881301976821456e-06, - "loss": 0.9747, - "num_input_tokens_seen": 23926600, - "step": 1139 - }, - { - "epoch": 0.1370768953285637, - "flos": 13203542772600.0, - "grad_norm": 4.686663540277935, - "learning_rate": 3.881037470253612e-06, - "loss": 1.1279, - "num_input_tokens_seen": 23945835, - "step": 1140 - }, - { - "epoch": 0.1371971382192028, - "flos": 10555264725840.0, - "grad_norm": 4.5177657916732565, - "learning_rate": 3.88077267833388e-06, - "loss": 1.0289, - "num_input_tokens_seen": 23962070, - "step": 1141 - }, - { - "epoch": 0.13731738110984187, - "flos": 13490258725920.0, - "grad_norm": 6.053260920647441, - "learning_rate": 3.880507601102427e-06, - "loss": 1.0589, - "num_input_tokens_seen": 23979725, - "step": 1142 - }, - { - "epoch": 0.13743762400048098, - "flos": 12889593805920.0, - "grad_norm": 2.5396494964501177, - "learning_rate": 3.880242238599467e-06, - "loss": 1.0463, - "num_input_tokens_seen": 23995970, - "step": 1143 - }, - { - "epoch": 0.13755786689112007, - "flos": 14933711514840.0, - "grad_norm": 11.752208096421777, - "learning_rate": 3.879976590865254e-06, - "loss": 1.0632, - "num_input_tokens_seen": 24015145, - "step": 1144 - }, - { - "epoch": 0.13767810978175915, - "flos": 15169947444240.0, - "grad_norm": 3.1255514039893013, - "learning_rate": 3.879710657940087e-06, - "loss": 1.1015, - "num_input_tokens_seen": 24033815, - "step": 1145 - }, - { - "epoch": 0.13779835267239823, - "flos": 21804489029520.0, - "grad_norm": 3.7585968658237707, - "learning_rate": 3.879444439864308e-06, - "loss": 0.9282, - "num_input_tokens_seen": 24053110, - "step": 1146 - }, - { - "epoch": 0.13791859556303734, - "flos": 16110291927840.0, - "grad_norm": 6.255246211267324, - "learning_rate": 3.879177936678301e-06, - "loss": 1.0848, - "num_input_tokens_seen": 24071835, - "step": 1147 - }, - { - "epoch": 0.13803883845367643, - "flos": 25133260681200.0, - "grad_norm": 3.498628211264629, - "learning_rate": 3.878911148422496e-06, - "loss": 1.009, - "num_input_tokens_seen": 24093030, - "step": 1148 - }, - { - "epoch": 0.1381590813443155, - "flos": 22826716522560.0, - "grad_norm": 3.226039299098226, - "learning_rate": 3.878644075137364e-06, - "loss": 0.9376, - "num_input_tokens_seen": 24113400, - "step": 1149 - }, - { - "epoch": 0.13827932423495462, - "flos": 12626738094360.0, - "grad_norm": 4.065336909485126, - "learning_rate": 3.878376716863418e-06, - "loss": 1.0183, - "num_input_tokens_seen": 24129420, - "step": 1150 - }, - { - "epoch": 0.1383995671255937, - "flos": 13780378112400.0, - "grad_norm": 4.300677028851869, - "learning_rate": 3.878109073641219e-06, - "loss": 0.9298, - "num_input_tokens_seen": 24148170, - "step": 1151 - }, - { - "epoch": 0.13851981001623279, - "flos": 20155191465240.0, - "grad_norm": 3.348934824115984, - "learning_rate": 3.877841145511366e-06, - "loss": 1.0413, - "num_input_tokens_seen": 24170630, - "step": 1152 - }, - { - "epoch": 0.13864005290687187, - "flos": 15063682946520.0, - "grad_norm": 5.324352371878822, - "learning_rate": 3.8775729325145035e-06, - "loss": 1.0553, - "num_input_tokens_seen": 24189585, - "step": 1153 - }, - { - "epoch": 0.13876029579751098, - "flos": 51321403416840.0, - "grad_norm": 0.884866199292831, - "learning_rate": 3.877304434691321e-06, - "loss": 0.8755, - "num_input_tokens_seen": 24256155, - "step": 1154 - }, - { - "epoch": 0.13888053868815006, - "flos": 15586880121720.0, - "grad_norm": 2.7869093675498333, - "learning_rate": 3.877035652082548e-06, - "loss": 1.0274, - "num_input_tokens_seen": 24275320, - "step": 1155 - }, - { - "epoch": 0.13900078157878915, - "flos": 13910717482800.0, - "grad_norm": 3.3268587810974535, - "learning_rate": 3.87676658472896e-06, - "loss": 1.0726, - "num_input_tokens_seen": 24293850, - "step": 1156 - }, - { - "epoch": 0.13912102446942826, - "flos": 16088056748760.0, - "grad_norm": 7.540296004983719, - "learning_rate": 3.876497232671372e-06, - "loss": 1.0816, - "num_input_tokens_seen": 24313525, - "step": 1157 - }, - { - "epoch": 0.13924126736006734, - "flos": 21122615654400.0, - "grad_norm": 3.856851965415562, - "learning_rate": 3.876227595950647e-06, - "loss": 1.0522, - "num_input_tokens_seen": 24332675, - "step": 1158 - }, - { - "epoch": 0.13936151025070642, - "flos": 19523062236600.0, - "grad_norm": 3.9339561863657195, - "learning_rate": 3.875957674607686e-06, - "loss": 1.0112, - "num_input_tokens_seen": 24354670, - "step": 1159 - }, - { - "epoch": 0.1394817531413455, - "flos": 11604449278200.0, - "grad_norm": 4.466375400289209, - "learning_rate": 3.8756874686834386e-06, - "loss": 1.1082, - "num_input_tokens_seen": 24372605, - "step": 1160 - }, - { - "epoch": 0.13960199603198462, - "flos": 22042074067560.0, - "grad_norm": 3.1398691055325747, - "learning_rate": 3.875416978218893e-06, - "loss": 1.0352, - "num_input_tokens_seen": 24395520, - "step": 1161 - }, - { - "epoch": 0.1397222389226237, - "flos": 12836538210960.0, - "grad_norm": 3.932663747939545, - "learning_rate": 3.8751462032550835e-06, - "loss": 1.0551, - "num_input_tokens_seen": 24412245, - "step": 1162 - }, - { - "epoch": 0.13984248181326278, - "flos": 11944588765200.0, - "grad_norm": 4.751857595114572, - "learning_rate": 3.874875143833085e-06, - "loss": 1.0431, - "num_input_tokens_seen": 24430205, - "step": 1163 - }, - { - "epoch": 0.1399627247039019, - "flos": 38714023863000.0, - "grad_norm": 2.8206529913708795, - "learning_rate": 3.874603799994019e-06, - "loss": 0.911, - "num_input_tokens_seen": 24453460, - "step": 1164 - }, - { - "epoch": 0.14008296759454097, - "flos": 8274205871640.0, - "grad_norm": 3.1058739355852096, - "learning_rate": 3.874332171779046e-06, - "loss": 1.1051, - "num_input_tokens_seen": 24468060, - "step": 1165 - }, - { - "epoch": 0.14020321048518006, - "flos": 15642051364320.0, - "grad_norm": 2.4381006918963957, - "learning_rate": 3.874060259229373e-06, - "loss": 0.9875, - "num_input_tokens_seen": 24489355, - "step": 1166 - }, - { - "epoch": 0.14032345337581917, - "flos": 16638762891360.0, - "grad_norm": 5.373728402114985, - "learning_rate": 3.873788062386249e-06, - "loss": 1.1417, - "num_input_tokens_seen": 24507335, - "step": 1167 - }, - { - "epoch": 0.14044369626645825, - "flos": 21124700640480.0, - "grad_norm": 5.4059120028464696, - "learning_rate": 3.873515581290965e-06, - "loss": 1.0607, - "num_input_tokens_seen": 24531860, - "step": 1168 - }, - { - "epoch": 0.14056393915709733, - "flos": 12994110594720.0, - "grad_norm": 3.040187754856241, - "learning_rate": 3.8732428159848575e-06, - "loss": 0.9827, - "num_input_tokens_seen": 24550555, - "step": 1169 - }, - { - "epoch": 0.14068418204773642, - "flos": 18998270660280.0, - "grad_norm": 3.239389558906966, - "learning_rate": 3.872969766509304e-06, - "loss": 1.0133, - "num_input_tokens_seen": 24570830, - "step": 1170 - }, - { - "epoch": 0.14080442493837553, - "flos": 46724877890040.0, - "grad_norm": 0.8302366217592422, - "learning_rate": 3.872696432905726e-06, - "loss": 0.8171, - "num_input_tokens_seen": 24631370, - "step": 1171 - }, - { - "epoch": 0.1409246678290146, - "flos": 18343875591000.0, - "grad_norm": 3.374527226995999, - "learning_rate": 3.872422815215589e-06, - "loss": 0.9278, - "num_input_tokens_seen": 24650170, - "step": 1172 - }, - { - "epoch": 0.1410449107196537, - "flos": 15534560404200.0, - "grad_norm": 2.633567814910028, - "learning_rate": 3.8721489134803994e-06, - "loss": 0.9717, - "num_input_tokens_seen": 24668680, - "step": 1173 - }, - { - "epoch": 0.1411651536102928, - "flos": 11808178405920.0, - "grad_norm": 3.81496587335564, - "learning_rate": 3.871874727741707e-06, - "loss": 0.9494, - "num_input_tokens_seen": 24685630, - "step": 1174 - }, - { - "epoch": 0.1412853965009319, - "flos": 14904730792560.0, - "grad_norm": 2.2823644554780014, - "learning_rate": 3.871600258041108e-06, - "loss": 1.1964, - "num_input_tokens_seen": 24704875, - "step": 1175 - }, - { - "epoch": 0.14140563939157097, - "flos": 14432534887800.0, - "grad_norm": 4.539755369477222, - "learning_rate": 3.871325504420238e-06, - "loss": 1.0781, - "num_input_tokens_seen": 24723585, - "step": 1176 - }, - { - "epoch": 0.14152588228221005, - "flos": 14960239312320.0, - "grad_norm": 3.5997231641927248, - "learning_rate": 3.871050466920776e-06, - "loss": 1.0515, - "num_input_tokens_seen": 24743210, - "step": 1177 - }, - { - "epoch": 0.14164612517284916, - "flos": 12915140433480.0, - "grad_norm": 2.977650360999975, - "learning_rate": 3.870775145584447e-06, - "loss": 1.027, - "num_input_tokens_seen": 24760710, - "step": 1178 - }, - { - "epoch": 0.14176636806348825, - "flos": 16164114061800.0, - "grad_norm": 7.010559564261096, - "learning_rate": 3.8704995404530145e-06, - "loss": 0.8571, - "num_input_tokens_seen": 24776055, - "step": 1179 - }, - { - "epoch": 0.14188661095412733, - "flos": 15695014974600.0, - "grad_norm": 6.776283519825927, - "learning_rate": 3.87022365156829e-06, - "loss": 1.0744, - "num_input_tokens_seen": 24796490, - "step": 1180 - }, - { - "epoch": 0.14200685384476644, - "flos": 17320053696840.0, - "grad_norm": 4.103997370654002, - "learning_rate": 3.869947478972123e-06, - "loss": 1.04, - "num_input_tokens_seen": 24817235, - "step": 1181 - }, - { - "epoch": 0.14212709673540552, - "flos": 17082376674120.0, - "grad_norm": 3.9138095796838903, - "learning_rate": 3.869671022706412e-06, - "loss": 1.0468, - "num_input_tokens_seen": 24835685, - "step": 1182 - }, - { - "epoch": 0.1422473396260446, - "flos": 18814446433080.0, - "grad_norm": 3.0057345259667194, - "learning_rate": 3.869394282813092e-06, - "loss": 0.8899, - "num_input_tokens_seen": 24854605, - "step": 1183 - }, - { - "epoch": 0.1423675825166837, - "flos": 12075786659280.0, - "grad_norm": 4.744677523075055, - "learning_rate": 3.869117259334147e-06, - "loss": 1.1059, - "num_input_tokens_seen": 24872250, - "step": 1184 - }, - { - "epoch": 0.1424878254073228, - "flos": 12703837900440.0, - "grad_norm": 2.773285681639175, - "learning_rate": 3.868839952311599e-06, - "loss": 1.0509, - "num_input_tokens_seen": 24889925, - "step": 1185 - }, - { - "epoch": 0.14260806829796188, - "flos": 14410422354960.0, - "grad_norm": 4.312191938371307, - "learning_rate": 3.868562361787516e-06, - "loss": 1.0257, - "num_input_tokens_seen": 24908775, - "step": 1186 - }, - { - "epoch": 0.14272831118860096, - "flos": 16481711754120.0, - "grad_norm": 4.007870322362492, - "learning_rate": 3.868284487804009e-06, - "loss": 0.9255, - "num_input_tokens_seen": 24927725, - "step": 1187 - }, - { - "epoch": 0.14284855407924008, - "flos": 19389380756160.0, - "grad_norm": 2.558218006761188, - "learning_rate": 3.86800633040323e-06, - "loss": 1.0189, - "num_input_tokens_seen": 24948035, - "step": 1188 - }, - { - "epoch": 0.14296879696987916, - "flos": 20074013671680.0, - "grad_norm": 11.34048423311816, - "learning_rate": 3.867727889627376e-06, - "loss": 1.0058, - "num_input_tokens_seen": 24967370, - "step": 1189 - }, - { - "epoch": 0.14308903986051824, - "flos": 13753880976480.0, - "grad_norm": 3.4880435260279268, - "learning_rate": 3.867449165518687e-06, - "loss": 1.0111, - "num_input_tokens_seen": 24983560, - "step": 1190 - }, - { - "epoch": 0.14320928275115732, - "flos": 12364955537400.0, - "grad_norm": 3.6496948428161375, - "learning_rate": 3.867170158119444e-06, - "loss": 0.9263, - "num_input_tokens_seen": 25002280, - "step": 1191 - }, - { - "epoch": 0.14332952564179643, - "flos": 15245176895160.0, - "grad_norm": 8.223980185874492, - "learning_rate": 3.866890867471972e-06, - "loss": 0.9912, - "num_input_tokens_seen": 25020470, - "step": 1192 - }, - { - "epoch": 0.14344976853243552, - "flos": 11602456276800.0, - "grad_norm": 5.5829219761472615, - "learning_rate": 3.86661129361864e-06, - "loss": 1.1236, - "num_input_tokens_seen": 25034680, - "step": 1193 - }, - { - "epoch": 0.1435700114230746, - "flos": 13413649504800.0, - "grad_norm": 15.35390408350491, - "learning_rate": 3.866331436601859e-06, - "loss": 1.074, - "num_input_tokens_seen": 25052395, - "step": 1194 - }, - { - "epoch": 0.1436902543137137, - "flos": 14016307426200.0, - "grad_norm": 5.559802826603037, - "learning_rate": 3.866051296464083e-06, - "loss": 0.9698, - "num_input_tokens_seen": 25070950, - "step": 1195 - }, - { - "epoch": 0.1438104972043528, - "flos": 10372299022320.0, - "grad_norm": 3.9430500455230115, - "learning_rate": 3.86577087324781e-06, - "loss": 1.0715, - "num_input_tokens_seen": 25087160, - "step": 1196 - }, - { - "epoch": 0.14393074009499188, - "flos": 12102467764560.0, - "grad_norm": 2.6310793098666467, - "learning_rate": 3.865490166995578e-06, - "loss": 0.994, - "num_input_tokens_seen": 25105110, - "step": 1197 - }, - { - "epoch": 0.144050982985631, - "flos": 21720337064640.0, - "grad_norm": 3.56014883523202, - "learning_rate": 3.86520917774997e-06, - "loss": 1.0611, - "num_input_tokens_seen": 25124265, - "step": 1198 - }, - { - "epoch": 0.14417122587627007, - "flos": 12653081922480.0, - "grad_norm": 3.1077108098995163, - "learning_rate": 3.864927905553614e-06, - "loss": 0.9767, - "num_input_tokens_seen": 25141895, - "step": 1199 - }, - { - "epoch": 0.14429146876690915, - "flos": 15351686685360.0, - "grad_norm": 2.5374933176939223, - "learning_rate": 3.8646463504491765e-06, - "loss": 1.1107, - "num_input_tokens_seen": 25161750, - "step": 1200 - }, - { - "epoch": 0.14441171165754824, - "flos": 16533663532920.0, - "grad_norm": 6.413950687971835, - "learning_rate": 3.8643645124793705e-06, - "loss": 1.0591, - "num_input_tokens_seen": 25180370, - "step": 1201 - }, - { - "epoch": 0.14453195454818735, - "flos": 30617443035360.0, - "grad_norm": 2.348962912093364, - "learning_rate": 3.8640823916869515e-06, - "loss": 0.973, - "num_input_tokens_seen": 25204400, - "step": 1202 - }, - { - "epoch": 0.14465219743882643, - "flos": 19392508235280.0, - "grad_norm": 2.6008751680072777, - "learning_rate": 3.863799988114714e-06, - "loss": 1.0113, - "num_input_tokens_seen": 25226150, - "step": 1203 - }, - { - "epoch": 0.1447724403294655, - "flos": 11813789471400.0, - "grad_norm": 4.209396504982303, - "learning_rate": 3.863517301805502e-06, - "loss": 0.9262, - "num_input_tokens_seen": 25244260, - "step": 1204 - }, - { - "epoch": 0.14489268322010462, - "flos": 14243743487880.0, - "grad_norm": 3.780395285106799, - "learning_rate": 3.863234332802196e-06, - "loss": 1.1924, - "num_input_tokens_seen": 25256185, - "step": 1205 - }, - { - "epoch": 0.1450129261107437, - "flos": 19313170135320.0, - "grad_norm": 3.713990828577241, - "learning_rate": 3.862951081147723e-06, - "loss": 0.9482, - "num_input_tokens_seen": 25276070, - "step": 1206 - }, - { - "epoch": 0.1451331690013828, - "flos": 18290237426400.0, - "grad_norm": 6.658233776788495, - "learning_rate": 3.862667546885053e-06, - "loss": 1.0089, - "num_input_tokens_seen": 25294340, - "step": 1207 - }, - { - "epoch": 0.14525341189202187, - "flos": 18315232145880.0, - "grad_norm": 3.066299572686597, - "learning_rate": 3.8623837300571965e-06, - "loss": 0.9529, - "num_input_tokens_seen": 25313045, - "step": 1208 - }, - { - "epoch": 0.14537365478266098, - "flos": 16401024545520.0, - "grad_norm": 2.2661205933577624, - "learning_rate": 3.8620996307072085e-06, - "loss": 1.0677, - "num_input_tokens_seen": 25333470, - "step": 1209 - }, - { - "epoch": 0.14549389767330007, - "flos": 14619793209720.0, - "grad_norm": 3.1605597051054315, - "learning_rate": 3.861815248878188e-06, - "loss": 0.8725, - "num_input_tokens_seen": 25350675, - "step": 1210 - }, - { - "epoch": 0.14561414056393915, - "flos": 10686247989000.0, - "grad_norm": 3.3405857979568108, - "learning_rate": 3.861530584613274e-06, - "loss": 1.0302, - "num_input_tokens_seen": 25368395, - "step": 1211 - }, - { - "epoch": 0.14573438345457826, - "flos": 13780715389560.0, - "grad_norm": 5.439681919840358, - "learning_rate": 3.86124563795565e-06, - "loss": 1.0442, - "num_input_tokens_seen": 25386930, - "step": 1212 - }, - { - "epoch": 0.14585462634521734, - "flos": 17662400816160.0, - "grad_norm": 3.6996453914313014, - "learning_rate": 3.860960408948543e-06, - "loss": 0.9409, - "num_input_tokens_seen": 25408400, - "step": 1213 - }, - { - "epoch": 0.14597486923585642, - "flos": 10920920178840.0, - "grad_norm": 3.330290756820398, - "learning_rate": 3.860674897635222e-06, - "loss": 1.1306, - "num_input_tokens_seen": 25424605, - "step": 1214 - }, - { - "epoch": 0.1460951121264955, - "flos": 11788365490080.0, - "grad_norm": 2.475744358990443, - "learning_rate": 3.860389104058998e-06, - "loss": 1.0635, - "num_input_tokens_seen": 25442555, - "step": 1215 - }, - { - "epoch": 0.14621535501713462, - "flos": 17687242227840.0, - "grad_norm": 2.2131215014641468, - "learning_rate": 3.860103028263227e-06, - "loss": 0.9466, - "num_input_tokens_seen": 25465380, - "step": 1216 - }, - { - "epoch": 0.1463355979077737, - "flos": 17949331400400.0, - "grad_norm": 3.2767001623832774, - "learning_rate": 3.859816670291304e-06, - "loss": 0.9199, - "num_input_tokens_seen": 25484195, - "step": 1217 - }, - { - "epoch": 0.14645584079841278, - "flos": 15668395192440.0, - "grad_norm": 5.172948259166779, - "learning_rate": 3.859530030186672e-06, - "loss": 1.1289, - "num_input_tokens_seen": 25500925, - "step": 1218 - }, - { - "epoch": 0.1465760836890519, - "flos": 16796641890720.0, - "grad_norm": 4.203536570085352, - "learning_rate": 3.859243107992813e-06, - "loss": 1.0484, - "num_input_tokens_seen": 25519450, - "step": 1219 - }, - { - "epoch": 0.14669632657969098, - "flos": 26701932360000.0, - "grad_norm": 6.0734801600797885, - "learning_rate": 3.858955903753252e-06, - "loss": 1.0119, - "num_input_tokens_seen": 25537810, - "step": 1220 - }, - { - "epoch": 0.14681656947033006, - "flos": 20204506349880.0, - "grad_norm": 2.2445972001411185, - "learning_rate": 3.858668417511559e-06, - "loss": 1.0636, - "num_input_tokens_seen": 25560280, - "step": 1221 - }, - { - "epoch": 0.14693681236096917, - "flos": 13099485907200.0, - "grad_norm": 6.531491215828236, - "learning_rate": 3.8583806493113445e-06, - "loss": 0.9958, - "num_input_tokens_seen": 25578345, - "step": 1222 - }, - { - "epoch": 0.14705705525160825, - "flos": 14751205734720.0, - "grad_norm": 3.9728221277285165, - "learning_rate": 3.858092599196263e-06, - "loss": 1.0625, - "num_input_tokens_seen": 25596020, - "step": 1223 - }, - { - "epoch": 0.14717729814224734, - "flos": 21330453431160.0, - "grad_norm": 3.421723941621535, - "learning_rate": 3.857804267210012e-06, - "loss": 1.0511, - "num_input_tokens_seen": 25615040, - "step": 1224 - }, - { - "epoch": 0.14729754103288642, - "flos": 14226260850600.0, - "grad_norm": 9.38105354687916, - "learning_rate": 3.857515653396331e-06, - "loss": 1.1098, - "num_input_tokens_seen": 25631970, - "step": 1225 - }, - { - "epoch": 0.14741778392352553, - "flos": 13675800000480.0, - "grad_norm": 12.024709692482249, - "learning_rate": 3.857226757799002e-06, - "loss": 1.0954, - "num_input_tokens_seen": 25649245, - "step": 1226 - }, - { - "epoch": 0.1475380268141646, - "flos": 18081019879440.0, - "grad_norm": 4.171496656313858, - "learning_rate": 3.85693758046185e-06, - "loss": 0.9568, - "num_input_tokens_seen": 25667255, - "step": 1227 - }, - { - "epoch": 0.1476582697048037, - "flos": 14800980542760.0, - "grad_norm": 2.9374628267573946, - "learning_rate": 3.8566481214287435e-06, - "loss": 1.0593, - "num_input_tokens_seen": 25685095, - "step": 1228 - }, - { - "epoch": 0.1477785125954428, - "flos": 9900348410040.0, - "grad_norm": 2.8215306575717785, - "learning_rate": 3.8563583807435935e-06, - "loss": 1.1355, - "num_input_tokens_seen": 25700960, - "step": 1229 - }, - { - "epoch": 0.1478987554860819, - "flos": 14562782273520.0, - "grad_norm": 2.9857684172900973, - "learning_rate": 3.856068358450353e-06, - "loss": 1.0086, - "num_input_tokens_seen": 25720630, - "step": 1230 - }, - { - "epoch": 0.14801899837672097, - "flos": 12651824798520.0, - "grad_norm": 2.812268904622518, - "learning_rate": 3.8557780545930186e-06, - "loss": 1.088, - "num_input_tokens_seen": 25738765, - "step": 1231 - }, - { - "epoch": 0.14813924126736006, - "flos": 14824871446080.0, - "grad_norm": 2.565797219670144, - "learning_rate": 3.855487469215628e-06, - "loss": 1.0113, - "num_input_tokens_seen": 25757415, - "step": 1232 - }, - { - "epoch": 0.14825948415799917, - "flos": 26707206148320.0, - "grad_norm": 5.201423252174985, - "learning_rate": 3.855196602362264e-06, - "loss": 0.947, - "num_input_tokens_seen": 25780055, - "step": 1233 - }, - { - "epoch": 0.14837972704863825, - "flos": 15639598439520.0, - "grad_norm": 2.813400548234451, - "learning_rate": 3.854905454077051e-06, - "loss": 1.1716, - "num_input_tokens_seen": 25797385, - "step": 1234 - }, - { - "epoch": 0.14849996993927733, - "flos": 14908195548840.0, - "grad_norm": 5.192645359199491, - "learning_rate": 3.854614024404155e-06, - "loss": 1.0979, - "num_input_tokens_seen": 25815415, - "step": 1235 - }, - { - "epoch": 0.14862021282991644, - "flos": 14226690112440.0, - "grad_norm": 3.4953060008548347, - "learning_rate": 3.8543223133877865e-06, - "loss": 1.1243, - "num_input_tokens_seen": 25833730, - "step": 1236 - }, - { - "epoch": 0.14874045572055553, - "flos": 16141357636200.0, - "grad_norm": 3.3234952863324367, - "learning_rate": 3.854030321072198e-06, - "loss": 1.1031, - "num_input_tokens_seen": 25853355, - "step": 1237 - }, - { - "epoch": 0.1488606986111946, - "flos": 18081173187240.0, - "grad_norm": 4.858809950379942, - "learning_rate": 3.853738047501682e-06, - "loss": 0.9633, - "num_input_tokens_seen": 25873635, - "step": 1238 - }, - { - "epoch": 0.1489809415018337, - "flos": 12049504154280.0, - "grad_norm": 5.5576838528058845, - "learning_rate": 3.85344549272058e-06, - "loss": 1.0044, - "num_input_tokens_seen": 25891335, - "step": 1239 - }, - { - "epoch": 0.1491011843924728, - "flos": 24009245278200.0, - "grad_norm": 3.980292528798245, - "learning_rate": 3.853152656773269e-06, - "loss": 1.0578, - "num_input_tokens_seen": 25912490, - "step": 1240 - }, - { - "epoch": 0.14922142728311188, - "flos": 15039516089160.0, - "grad_norm": 5.0461986549152185, - "learning_rate": 3.852859539704174e-06, - "loss": 1.0617, - "num_input_tokens_seen": 25931510, - "step": 1241 - }, - { - "epoch": 0.14934167017375097, - "flos": 21278501652360.0, - "grad_norm": 3.0516690612094663, - "learning_rate": 3.85256614155776e-06, - "loss": 0.9893, - "num_input_tokens_seen": 25951360, - "step": 1242 - }, - { - "epoch": 0.14946191306439008, - "flos": 12049902754560.0, - "grad_norm": 3.7442698886707597, - "learning_rate": 3.852272462378535e-06, - "loss": 0.9794, - "num_input_tokens_seen": 25968955, - "step": 1243 - }, - { - "epoch": 0.14958215595502916, - "flos": 11079535055640.0, - "grad_norm": 3.1475303232733585, - "learning_rate": 3.85197850221105e-06, - "loss": 1.0164, - "num_input_tokens_seen": 25984975, - "step": 1244 - }, - { - "epoch": 0.14970239884566824, - "flos": 23612892055560.0, - "grad_norm": 8.344209922658852, - "learning_rate": 3.851684261099899e-06, - "loss": 0.9915, - "num_input_tokens_seen": 26006435, - "step": 1245 - }, - { - "epoch": 0.14982264173630733, - "flos": 12626216847840.0, - "grad_norm": 5.306686092290036, - "learning_rate": 3.851389739089718e-06, - "loss": 1.0977, - "num_input_tokens_seen": 26022775, - "step": 1246 - }, - { - "epoch": 0.14994288462694644, - "flos": 23109753088680.0, - "grad_norm": 3.8850788586638894, - "learning_rate": 3.851094936225186e-06, - "loss": 1.0386, - "num_input_tokens_seen": 26043380, - "step": 1247 - }, - { - "epoch": 0.15006312751758552, - "flos": 22668561569160.0, - "grad_norm": 3.8524552678228687, - "learning_rate": 3.850799852551024e-06, - "loss": 1.0036, - "num_input_tokens_seen": 26065520, - "step": 1248 - }, - { - "epoch": 0.1501833704082246, - "flos": 11761592400120.0, - "grad_norm": 3.5601211100776347, - "learning_rate": 3.850504488111995e-06, - "loss": 1.0818, - "num_input_tokens_seen": 26081915, - "step": 1249 - }, - { - "epoch": 0.15030361329886371, - "flos": 16685992789920.0, - "grad_norm": 3.12369708556397, - "learning_rate": 3.850208842952907e-06, - "loss": 1.0583, - "num_input_tokens_seen": 26100440, - "step": 1250 - }, - { - "epoch": 0.1504238561895028, - "flos": 18237825724200.0, - "grad_norm": 5.38625735934187, - "learning_rate": 3.849912917118608e-06, - "loss": 1.0226, - "num_input_tokens_seen": 26121200, - "step": 1251 - }, - { - "epoch": 0.15054409908014188, - "flos": 37265010235560.0, - "grad_norm": 0.9616684033550622, - "learning_rate": 3.849616710653992e-06, - "loss": 0.8588, - "num_input_tokens_seen": 26182390, - "step": 1252 - }, - { - "epoch": 0.150664341970781, - "flos": 13387428322920.0, - "grad_norm": 3.735978041827349, - "learning_rate": 3.84932022360399e-06, - "loss": 0.9874, - "num_input_tokens_seen": 26200775, - "step": 1253 - }, - { - "epoch": 0.15078458486142007, - "flos": 15746752122480.0, - "grad_norm": 4.7187510105157235, - "learning_rate": 3.849023456013581e-06, - "loss": 1.0542, - "num_input_tokens_seen": 26218055, - "step": 1254 - }, - { - "epoch": 0.15090482775205916, - "flos": 18972785355840.0, - "grad_norm": 4.9316990250255035, - "learning_rate": 3.848726407927784e-06, - "loss": 0.8507, - "num_input_tokens_seen": 26238160, - "step": 1255 - }, - { - "epoch": 0.15102507064269824, - "flos": 15485061550200.0, - "grad_norm": 18.257480708099916, - "learning_rate": 3.84842907939166e-06, - "loss": 1.0927, - "num_input_tokens_seen": 26257105, - "step": 1256 - }, - { - "epoch": 0.15114531353333735, - "flos": 16219530596880.0, - "grad_norm": 8.13633617979641, - "learning_rate": 3.8481314704503146e-06, - "loss": 0.9377, - "num_input_tokens_seen": 26276655, - "step": 1257 - }, - { - "epoch": 0.15126555642397643, - "flos": 13964079693360.0, - "grad_norm": 4.655717448047581, - "learning_rate": 3.847833581148895e-06, - "loss": 1.1049, - "num_input_tokens_seen": 26295285, - "step": 1258 - }, - { - "epoch": 0.15138579931461552, - "flos": 20462670842760.0, - "grad_norm": 5.622394551755104, - "learning_rate": 3.84753541153259e-06, - "loss": 1.0254, - "num_input_tokens_seen": 26314575, - "step": 1259 - }, - { - "epoch": 0.15150604220525463, - "flos": 15720745571520.0, - "grad_norm": 2.6215731641420863, - "learning_rate": 3.847236961646633e-06, - "loss": 1.063, - "num_input_tokens_seen": 26333275, - "step": 1260 - }, - { - "epoch": 0.1516262850958937, - "flos": 9138799657800.0, - "grad_norm": 4.434176207610239, - "learning_rate": 3.846938231536296e-06, - "loss": 1.0127, - "num_input_tokens_seen": 26348615, - "step": 1261 - }, - { - "epoch": 0.1517465279865328, - "flos": 15483467149080.0, - "grad_norm": 3.5652242584101703, - "learning_rate": 3.8466392212468995e-06, - "loss": 1.0408, - "num_input_tokens_seen": 26368525, - "step": 1262 - }, - { - "epoch": 0.15186677087717187, - "flos": 41632326878280.0, - "grad_norm": 0.8283235563805189, - "learning_rate": 3.8463399308238e-06, - "loss": 0.8789, - "num_input_tokens_seen": 26427350, - "step": 1263 - }, - { - "epoch": 0.15198701376781099, - "flos": 23297348687760.0, - "grad_norm": 5.923389251279032, - "learning_rate": 3.846040360312402e-06, - "loss": 0.8744, - "num_input_tokens_seen": 26450330, - "step": 1264 - }, - { - "epoch": 0.15210725665845007, - "flos": 20230880839560.0, - "grad_norm": 3.714356078539864, - "learning_rate": 3.8457405097581485e-06, - "loss": 1.0431, - "num_input_tokens_seen": 26469040, - "step": 1265 - }, - { - "epoch": 0.15222749954908915, - "flos": 14147719951200.0, - "grad_norm": 5.3674673482242, - "learning_rate": 3.8454403792065275e-06, - "loss": 1.0144, - "num_input_tokens_seen": 26487580, - "step": 1266 - }, - { - "epoch": 0.15234774243972826, - "flos": 15143787585480.0, - "grad_norm": 3.3092354576654137, - "learning_rate": 3.845139968703068e-06, - "loss": 1.0803, - "num_input_tokens_seen": 26504820, - "step": 1267 - }, - { - "epoch": 0.15246798533036734, - "flos": 18473847022680.0, - "grad_norm": 3.5503778175998506, - "learning_rate": 3.844839278293342e-06, - "loss": 1.0606, - "num_input_tokens_seen": 26525390, - "step": 1268 - }, - { - "epoch": 0.15258822822100643, - "flos": 18368992956720.0, - "grad_norm": 2.7852981766630234, - "learning_rate": 3.8445383080229654e-06, - "loss": 0.9908, - "num_input_tokens_seen": 26541125, - "step": 1269 - }, - { - "epoch": 0.1527084711116455, - "flos": 17976380444400.0, - "grad_norm": 3.871470289654089, - "learning_rate": 3.844237057937593e-06, - "loss": 0.9634, - "num_input_tokens_seen": 26559850, - "step": 1270 - }, - { - "epoch": 0.15282871400228462, - "flos": 20832680237280.0, - "grad_norm": 5.25517181696333, - "learning_rate": 3.843935528082926e-06, - "loss": 1.0049, - "num_input_tokens_seen": 26580595, - "step": 1271 - }, - { - "epoch": 0.1529489568929237, - "flos": 14826189893160.0, - "grad_norm": 3.7536854753464826, - "learning_rate": 3.843633718504704e-06, - "loss": 1.0824, - "num_input_tokens_seen": 26598760, - "step": 1272 - }, - { - "epoch": 0.1530691997835628, - "flos": 14252359386240.0, - "grad_norm": 4.095849816310827, - "learning_rate": 3.843331629248715e-06, - "loss": 1.1194, - "num_input_tokens_seen": 26616080, - "step": 1273 - }, - { - "epoch": 0.1531894426742019, - "flos": 20486592407640.0, - "grad_norm": 3.8681456948961164, - "learning_rate": 3.843029260360782e-06, - "loss": 1.0005, - "num_input_tokens_seen": 26634170, - "step": 1274 - }, - { - "epoch": 0.15330968556484098, - "flos": 15799255809360.0, - "grad_norm": 6.839416804297015, - "learning_rate": 3.8427266118867755e-06, - "loss": 1.0165, - "num_input_tokens_seen": 26653640, - "step": 1275 - }, - { - "epoch": 0.15342992845548006, - "flos": 19602063059400.0, - "grad_norm": 2.9678285473761945, - "learning_rate": 3.842423683872608e-06, - "loss": 1.0493, - "num_input_tokens_seen": 26673935, - "step": 1276 - }, - { - "epoch": 0.15355017134611917, - "flos": 13911545344920.0, - "grad_norm": 4.483717813751174, - "learning_rate": 3.842120476364232e-06, - "loss": 1.0089, - "num_input_tokens_seen": 26692105, - "step": 1277 - }, - { - "epoch": 0.15367041423675826, - "flos": 13098719368200.0, - "grad_norm": 2.4103560650811806, - "learning_rate": 3.841816989407644e-06, - "loss": 1.0465, - "num_input_tokens_seen": 26707315, - "step": 1278 - }, - { - "epoch": 0.15379065712739734, - "flos": 29591720124480.0, - "grad_norm": 3.853652851118797, - "learning_rate": 3.841513223048884e-06, - "loss": 0.992, - "num_input_tokens_seen": 26727720, - "step": 1279 - }, - { - "epoch": 0.15391090001803642, - "flos": 15668456515560.0, - "grad_norm": 3.767897936910722, - "learning_rate": 3.841209177334031e-06, - "loss": 0.9933, - "num_input_tokens_seen": 26745800, - "step": 1280 - }, - { - "epoch": 0.15403114290867553, - "flos": 10922391933720.0, - "grad_norm": 4.4815975719463035, - "learning_rate": 3.84090485230921e-06, - "loss": 0.9853, - "num_input_tokens_seen": 26763760, - "step": 1281 - }, - { - "epoch": 0.15415138579931462, - "flos": 12703561946400.0, - "grad_norm": 5.0636759204667365, - "learning_rate": 3.840600248020588e-06, - "loss": 0.984, - "num_input_tokens_seen": 26780420, - "step": 1282 - }, - { - "epoch": 0.1542716286899537, - "flos": 8031531014640.0, - "grad_norm": 6.221083313539061, - "learning_rate": 3.840295364514371e-06, - "loss": 1.0148, - "num_input_tokens_seen": 26797520, - "step": 1283 - }, - { - "epoch": 0.1543918715805928, - "flos": 12338151785880.0, - "grad_norm": 4.270276785462845, - "learning_rate": 3.83999020183681e-06, - "loss": 1.0096, - "num_input_tokens_seen": 26815935, - "step": 1284 - }, - { - "epoch": 0.1545121144712319, - "flos": 12601375436160.0, - "grad_norm": 2.686987007328333, - "learning_rate": 3.839684760034199e-06, - "loss": 1.0056, - "num_input_tokens_seen": 26833860, - "step": 1285 - }, - { - "epoch": 0.15463235736187098, - "flos": 20099805591720.0, - "grad_norm": 4.500428709165848, - "learning_rate": 3.8393790391528716e-06, - "loss": 0.871, - "num_input_tokens_seen": 26854275, - "step": 1286 - }, - { - "epoch": 0.15475260025251006, - "flos": 16245322516920.0, - "grad_norm": 4.574183306731813, - "learning_rate": 3.8390730392392075e-06, - "loss": 1.1179, - "num_input_tokens_seen": 26873975, - "step": 1287 - }, - { - "epoch": 0.15487284314314917, - "flos": 12469349679960.0, - "grad_norm": 3.2671451224703407, - "learning_rate": 3.838766760339626e-06, - "loss": 1.0304, - "num_input_tokens_seen": 26892220, - "step": 1288 - }, - { - "epoch": 0.15499308603378825, - "flos": 14250826308240.0, - "grad_norm": 6.054790510716462, - "learning_rate": 3.838460202500587e-06, - "loss": 1.0074, - "num_input_tokens_seen": 26907730, - "step": 1289 - }, - { - "epoch": 0.15511332892442733, - "flos": 11257625571120.0, - "grad_norm": 4.562101581892479, - "learning_rate": 3.838153365768599e-06, - "loss": 0.9672, - "num_input_tokens_seen": 26923960, - "step": 1290 - }, - { - "epoch": 0.15523357181506645, - "flos": 29487479289720.0, - "grad_norm": 3.5530048118535102, - "learning_rate": 3.837846250190206e-06, - "loss": 0.9715, - "num_input_tokens_seen": 26946545, - "step": 1291 - }, - { - "epoch": 0.15535381470570553, - "flos": 13098872676000.0, - "grad_norm": 3.644185563974839, - "learning_rate": 3.837538855811998e-06, - "loss": 0.9974, - "num_input_tokens_seen": 26964440, - "step": 1292 - }, - { - "epoch": 0.1554740575963446, - "flos": 9821255602560.0, - "grad_norm": 3.118464540554893, - "learning_rate": 3.837231182680606e-06, - "loss": 0.9353, - "num_input_tokens_seen": 26982125, - "step": 1293 - }, - { - "epoch": 0.1555943004869837, - "flos": 14800489957800.0, - "grad_norm": 3.271021422467336, - "learning_rate": 3.836923230842706e-06, - "loss": 0.9979, - "num_input_tokens_seen": 27000960, - "step": 1294 - }, - { - "epoch": 0.1557145433776228, - "flos": 15690569048400.0, - "grad_norm": 4.591791890416265, - "learning_rate": 3.836615000345011e-06, - "loss": 1.0337, - "num_input_tokens_seen": 27018860, - "step": 1295 - }, - { - "epoch": 0.1558347862682619, - "flos": 14042620592760.0, - "grad_norm": 3.946428499918409, - "learning_rate": 3.836306491234282e-06, - "loss": 1.0069, - "num_input_tokens_seen": 27036430, - "step": 1296 - }, - { - "epoch": 0.15595502915890097, - "flos": 12206340660600.0, - "grad_norm": 3.5373702224272674, - "learning_rate": 3.835997703557317e-06, - "loss": 0.9663, - "num_input_tokens_seen": 27052890, - "step": 1297 - }, - { - "epoch": 0.15607527204954008, - "flos": 13990607490840.0, - "grad_norm": 3.926777484562761, - "learning_rate": 3.83568863736096e-06, - "loss": 1.0329, - "num_input_tokens_seen": 27071480, - "step": 1298 - }, - { - "epoch": 0.15619551494017916, - "flos": 13125431135040.0, - "grad_norm": 5.3815381406052545, - "learning_rate": 3.8353792926920975e-06, - "loss": 1.1139, - "num_input_tokens_seen": 27089850, - "step": 1299 - }, - { - "epoch": 0.15631575783081825, - "flos": 14121866708040.0, - "grad_norm": 3.6239284378548193, - "learning_rate": 3.835069669597655e-06, - "loss": 1.0322, - "num_input_tokens_seen": 27107960, - "step": 1300 - }, - { - "epoch": 0.15643600072145733, - "flos": 14751083088480.0, - "grad_norm": 3.3397440934510554, - "learning_rate": 3.834759768124603e-06, - "loss": 1.0308, - "num_input_tokens_seen": 27126555, - "step": 1301 - }, - { - "epoch": 0.15655624361209644, - "flos": 13146746467320.0, - "grad_norm": 3.7231195247623337, - "learning_rate": 3.834449588319953e-06, - "loss": 0.9828, - "num_input_tokens_seen": 27144310, - "step": 1302 - }, - { - "epoch": 0.15667648650273552, - "flos": 17950649847480.0, - "grad_norm": 3.0806030522789607, - "learning_rate": 3.834139130230758e-06, - "loss": 1.0707, - "num_input_tokens_seen": 27163335, - "step": 1303 - }, - { - "epoch": 0.1567967293933746, - "flos": 17661266338440.0, - "grad_norm": 2.913468123866994, - "learning_rate": 3.833828393904117e-06, - "loss": 1.0388, - "num_input_tokens_seen": 27183335, - "step": 1304 - }, - { - "epoch": 0.15691697228401372, - "flos": 13590728188800.0, - "grad_norm": 5.140031681779527, - "learning_rate": 3.833517379387165e-06, - "loss": 1.0023, - "num_input_tokens_seen": 27199510, - "step": 1305 - }, - { - "epoch": 0.1570372151746528, - "flos": 17635321110600.0, - "grad_norm": 3.522577489537995, - "learning_rate": 3.833206086727085e-06, - "loss": 1.1192, - "num_input_tokens_seen": 27218580, - "step": 1306 - }, - { - "epoch": 0.15715745806529188, - "flos": 17687548843440.0, - "grad_norm": 3.9267697128954215, - "learning_rate": 3.8328945159710994e-06, - "loss": 0.9257, - "num_input_tokens_seen": 27238480, - "step": 1307 - }, - { - "epoch": 0.157277700955931, - "flos": 15537657221760.0, - "grad_norm": 10.360875085819021, - "learning_rate": 3.832582667166473e-06, - "loss": 1.1056, - "num_input_tokens_seen": 27258010, - "step": 1308 - }, - { - "epoch": 0.15739794384657008, - "flos": 17449902482280.0, - "grad_norm": 3.8855389668679576, - "learning_rate": 3.8322705403605125e-06, - "loss": 1.045, - "num_input_tokens_seen": 27278075, - "step": 1309 - }, - { - "epoch": 0.15751818673720916, - "flos": 12571260236160.0, - "grad_norm": 4.135356963500995, - "learning_rate": 3.831958135600568e-06, - "loss": 1.0469, - "num_input_tokens_seen": 27295345, - "step": 1310 - }, - { - "epoch": 0.15763842962784824, - "flos": 12653112584040.0, - "grad_norm": 3.442832278945948, - "learning_rate": 3.831645452934032e-06, - "loss": 1.0314, - "num_input_tokens_seen": 27313495, - "step": 1311 - }, - { - "epoch": 0.15775867251848735, - "flos": 19209327900840.0, - "grad_norm": 2.6498785458928866, - "learning_rate": 3.831332492408336e-06, - "loss": 1.0376, - "num_input_tokens_seen": 27334625, - "step": 1312 - }, - { - "epoch": 0.15787891540912644, - "flos": 13645623477360.0, - "grad_norm": 2.6580046300051214, - "learning_rate": 3.831019254070957e-06, - "loss": 0.9119, - "num_input_tokens_seen": 27352130, - "step": 1313 - }, - { - "epoch": 0.15799915829976552, - "flos": 19417656262560.0, - "grad_norm": 3.364572877346047, - "learning_rate": 3.8307057379694135e-06, - "loss": 1.17, - "num_input_tokens_seen": 27371185, - "step": 1314 - }, - { - "epoch": 0.15811940119040463, - "flos": 14482646973000.0, - "grad_norm": 7.398002055807001, - "learning_rate": 3.830391944151264e-06, - "loss": 1.0438, - "num_input_tokens_seen": 27386785, - "step": 1315 - }, - { - "epoch": 0.1582396440810437, - "flos": 23247144617880.0, - "grad_norm": 4.418266198679362, - "learning_rate": 3.830077872664114e-06, - "loss": 0.9001, - "num_input_tokens_seen": 27407630, - "step": 1316 - }, - { - "epoch": 0.1583598869716828, - "flos": 24110052018240.0, - "grad_norm": 2.9696828882229167, - "learning_rate": 3.829763523555604e-06, - "loss": 0.9691, - "num_input_tokens_seen": 27427750, - "step": 1317 - }, - { - "epoch": 0.15848012986232188, - "flos": 17556534918720.0, - "grad_norm": 3.484933440614075, - "learning_rate": 3.829448896873423e-06, - "loss": 1.0101, - "num_input_tokens_seen": 27446570, - "step": 1318 - }, - { - "epoch": 0.158600372752961, - "flos": 16062264828720.0, - "grad_norm": 8.095687810977674, - "learning_rate": 3.829133992665299e-06, - "loss": 1.0167, - "num_input_tokens_seen": 27465415, - "step": 1319 - }, - { - "epoch": 0.15872061564360007, - "flos": 19889729521080.0, - "grad_norm": 3.390371427415073, - "learning_rate": 3.828818810979002e-06, - "loss": 1.1096, - "num_input_tokens_seen": 27483465, - "step": 1320 - }, - { - "epoch": 0.15884085853423915, - "flos": 16849053592920.0, - "grad_norm": 3.3922285515872326, - "learning_rate": 3.8285033518623454e-06, - "loss": 1.0306, - "num_input_tokens_seen": 27503435, - "step": 1321 - }, - { - "epoch": 0.15896110142487826, - "flos": 16428932113200.0, - "grad_norm": 6.767056144191695, - "learning_rate": 3.8281876153631845e-06, - "loss": 1.0379, - "num_input_tokens_seen": 27519910, - "step": 1322 - }, - { - "epoch": 0.15908134431551735, - "flos": 10372666961040.0, - "grad_norm": 3.1221749315342, - "learning_rate": 3.827871601529416e-06, - "loss": 0.8679, - "num_input_tokens_seen": 27538150, - "step": 1323 - }, - { - "epoch": 0.15920158720615643, - "flos": 14331298885920.0, - "grad_norm": 6.095472512959926, - "learning_rate": 3.827555310408979e-06, - "loss": 1.0347, - "num_input_tokens_seen": 27557265, - "step": 1324 - }, - { - "epoch": 0.1593218300967955, - "flos": 17660867738160.0, - "grad_norm": 3.2305360577177966, - "learning_rate": 3.827238742049854e-06, - "loss": 1.0525, - "num_input_tokens_seen": 27577280, - "step": 1325 - }, - { - "epoch": 0.15944207298743462, - "flos": 20177671936800.0, - "grad_norm": 7.843866476584761, - "learning_rate": 3.826921896500066e-06, - "loss": 0.7579, - "num_input_tokens_seen": 27598285, - "step": 1326 - }, - { - "epoch": 0.1595623158780737, - "flos": 16322698277040.0, - "grad_norm": 2.9967506613609327, - "learning_rate": 3.826604773807678e-06, - "loss": 1.0025, - "num_input_tokens_seen": 27615980, - "step": 1327 - }, - { - "epoch": 0.1596825587687128, - "flos": 13982942100840.0, - "grad_norm": 4.562271948562646, - "learning_rate": 3.826287374020798e-06, - "loss": 0.9613, - "num_input_tokens_seen": 27630505, - "step": 1328 - }, - { - "epoch": 0.1598028016593519, - "flos": 16087780794720.0, - "grad_norm": 4.157759313519155, - "learning_rate": 3.825969697187575e-06, - "loss": 1.0492, - "num_input_tokens_seen": 27649555, - "step": 1329 - }, - { - "epoch": 0.15992304454999098, - "flos": 14538860708640.0, - "grad_norm": 2.951650584468041, - "learning_rate": 3.8256517433562015e-06, - "loss": 0.9359, - "num_input_tokens_seen": 27667215, - "step": 1330 - }, - { - "epoch": 0.16004328744063007, - "flos": 12522006674640.0, - "grad_norm": 3.491242624419139, - "learning_rate": 3.82533351257491e-06, - "loss": 1.1452, - "num_input_tokens_seen": 27684885, - "step": 1331 - }, - { - "epoch": 0.16016353033126918, - "flos": 17137977178560.0, - "grad_norm": 13.223138031289368, - "learning_rate": 3.825015004891975e-06, - "loss": 1.1195, - "num_input_tokens_seen": 27703345, - "step": 1332 - }, - { - "epoch": 0.16028377322190826, - "flos": 19678917573000.0, - "grad_norm": 8.069786386983424, - "learning_rate": 3.824696220355716e-06, - "loss": 0.9848, - "num_input_tokens_seen": 27724655, - "step": 1333 - }, - { - "epoch": 0.16040401611254734, - "flos": 14883139506240.0, - "grad_norm": 2.3969961799715276, - "learning_rate": 3.824377159014491e-06, - "loss": 1.0206, - "num_input_tokens_seen": 27745270, - "step": 1334 - }, - { - "epoch": 0.16052425900318643, - "flos": 15087757819200.0, - "grad_norm": 3.3655286628161134, - "learning_rate": 3.824057820916702e-06, - "loss": 1.0759, - "num_input_tokens_seen": 27762195, - "step": 1335 - }, - { - "epoch": 0.16064450189382554, - "flos": 10975416867120.0, - "grad_norm": 3.5916135325266842, - "learning_rate": 3.8237382061107904e-06, - "loss": 0.9506, - "num_input_tokens_seen": 27778635, - "step": 1336 - }, - { - "epoch": 0.16076474478446462, - "flos": 15039086827320.0, - "grad_norm": 6.263930093874553, - "learning_rate": 3.823418314645243e-06, - "loss": 1.0069, - "num_input_tokens_seen": 27797230, - "step": 1337 - }, - { - "epoch": 0.1608849876751037, - "flos": 13017786867120.0, - "grad_norm": 3.3043469553407268, - "learning_rate": 3.823098146568588e-06, - "loss": 0.9789, - "num_input_tokens_seen": 27816655, - "step": 1338 - }, - { - "epoch": 0.1610052305657428, - "flos": 21017424311280.0, - "grad_norm": 3.350319656380083, - "learning_rate": 3.822777701929394e-06, - "loss": 0.9485, - "num_input_tokens_seen": 27838200, - "step": 1339 - }, - { - "epoch": 0.1611254734563819, - "flos": 19077700744920.0, - "grad_norm": 3.2293694873406174, - "learning_rate": 3.8224569807762714e-06, - "loss": 0.9669, - "num_input_tokens_seen": 27857240, - "step": 1340 - }, - { - "epoch": 0.16124571634702098, - "flos": 15931097596200.0, - "grad_norm": 2.5759255940075283, - "learning_rate": 3.822135983157873e-06, - "loss": 1.006, - "num_input_tokens_seen": 27876235, - "step": 1341 - }, - { - "epoch": 0.16136595923766006, - "flos": 7723377082800.0, - "grad_norm": 5.03996782080656, - "learning_rate": 3.821814709122896e-06, - "loss": 1.0705, - "num_input_tokens_seen": 27894005, - "step": 1342 - }, - { - "epoch": 0.16148620212829917, - "flos": 15065308009200.0, - "grad_norm": 3.302177323569094, - "learning_rate": 3.821493158720076e-06, - "loss": 1.0774, - "num_input_tokens_seen": 27912830, - "step": 1343 - }, - { - "epoch": 0.16160644501893826, - "flos": 11861785908960.0, - "grad_norm": 6.603509270552991, - "learning_rate": 3.821171331998191e-06, - "loss": 0.9489, - "num_input_tokens_seen": 27929080, - "step": 1344 - }, - { - "epoch": 0.16172668790957734, - "flos": 46138046881680.0, - "grad_norm": 0.8056345431381857, - "learning_rate": 3.820849229006064e-06, - "loss": 0.8246, - "num_input_tokens_seen": 27996550, - "step": 1345 - }, - { - "epoch": 0.16184693080021645, - "flos": 16533295594200.0, - "grad_norm": 6.080484045235645, - "learning_rate": 3.8205268497925564e-06, - "loss": 0.94, - "num_input_tokens_seen": 28016740, - "step": 1346 - }, - { - "epoch": 0.16196717369085553, - "flos": 12359497779720.0, - "grad_norm": 2.8856823703614274, - "learning_rate": 3.8202041944065725e-06, - "loss": 1.0164, - "num_input_tokens_seen": 28032280, - "step": 1347 - }, - { - "epoch": 0.16208741658149461, - "flos": 16975161668040.0, - "grad_norm": 2.761621454621228, - "learning_rate": 3.819881262897061e-06, - "loss": 0.9683, - "num_input_tokens_seen": 28050135, - "step": 1348 - }, - { - "epoch": 0.1622076594721337, - "flos": 18421373997360.0, - "grad_norm": 3.6998416815213533, - "learning_rate": 3.819558055313008e-06, - "loss": 0.963, - "num_input_tokens_seen": 28070540, - "step": 1349 - }, - { - "epoch": 0.1623279023627728, - "flos": 15298232490120.0, - "grad_norm": 3.2350383575853976, - "learning_rate": 3.819234571703444e-06, - "loss": 0.9971, - "num_input_tokens_seen": 28089085, - "step": 1350 - }, - { - "epoch": 0.1624481452534119, - "flos": 15690170448120.0, - "grad_norm": 3.438921825091666, - "learning_rate": 3.8189108121174435e-06, - "loss": 1.0807, - "num_input_tokens_seen": 28108570, - "step": 1351 - }, - { - "epoch": 0.16256838814405097, - "flos": 19285538521680.0, - "grad_norm": 2.867106506700468, - "learning_rate": 3.818586776604118e-06, - "loss": 1.0649, - "num_input_tokens_seen": 28128930, - "step": 1352 - }, - { - "epoch": 0.16268863103469008, - "flos": 14278611229680.0, - "grad_norm": 5.2482960070076885, - "learning_rate": 3.818262465212625e-06, - "loss": 0.8535, - "num_input_tokens_seen": 28148775, - "step": 1353 - }, - { - "epoch": 0.16280887392532917, - "flos": 12993957286920.0, - "grad_norm": 4.05143380863636, - "learning_rate": 3.817937877992161e-06, - "loss": 1.0007, - "num_input_tokens_seen": 28165790, - "step": 1354 - }, - { - "epoch": 0.16292911681596825, - "flos": 8352317509200.0, - "grad_norm": 3.7671152545420505, - "learning_rate": 3.817613014991967e-06, - "loss": 1.0848, - "num_input_tokens_seen": 28181650, - "step": 1355 - }, - { - "epoch": 0.16304935970660733, - "flos": 18578486457720.0, - "grad_norm": 5.554095659233728, - "learning_rate": 3.817287876261323e-06, - "loss": 0.9838, - "num_input_tokens_seen": 28201705, - "step": 1356 - }, - { - "epoch": 0.16316960259724644, - "flos": 20913275461200.0, - "grad_norm": 6.547074271654071, - "learning_rate": 3.816962461849553e-06, - "loss": 1.0319, - "num_input_tokens_seen": 28223295, - "step": 1357 - }, - { - "epoch": 0.16328984548788553, - "flos": 14829440018520.0, - "grad_norm": 2.846267424317754, - "learning_rate": 3.8166367718060235e-06, - "loss": 1.0718, - "num_input_tokens_seen": 28242905, - "step": 1358 - }, - { - "epoch": 0.1634100883785246, - "flos": 12784341139680.0, - "grad_norm": 7.855467050363441, - "learning_rate": 3.816310806180139e-06, - "loss": 0.9762, - "num_input_tokens_seen": 28261035, - "step": 1359 - }, - { - "epoch": 0.16353033126916372, - "flos": 17478086004000.0, - "grad_norm": 2.7071393144183915, - "learning_rate": 3.81598456502135e-06, - "loss": 1.0343, - "num_input_tokens_seen": 28280775, - "step": 1360 - }, - { - "epoch": 0.1636505741598028, - "flos": 14113986687120.0, - "grad_norm": 2.3637070453519273, - "learning_rate": 3.8156580483791455e-06, - "loss": 1.0963, - "num_input_tokens_seen": 28295685, - "step": 1361 - }, - { - "epoch": 0.16377081705044189, - "flos": 20230911501120.0, - "grad_norm": 2.9831617195879345, - "learning_rate": 3.815331256303059e-06, - "loss": 1.0033, - "num_input_tokens_seen": 28315435, - "step": 1362 - }, - { - "epoch": 0.163891059941081, - "flos": 15563387818680.0, - "grad_norm": 4.238694806292224, - "learning_rate": 3.815004188842665e-06, - "loss": 0.9918, - "num_input_tokens_seen": 28333195, - "step": 1363 - }, - { - "epoch": 0.16401130283172008, - "flos": 19077486114000.0, - "grad_norm": 2.3977998264812546, - "learning_rate": 3.814676846047578e-06, - "loss": 1.0205, - "num_input_tokens_seen": 28353790, - "step": 1364 - }, - { - "epoch": 0.16413154572235916, - "flos": 23532787416600.0, - "grad_norm": 2.583502876706863, - "learning_rate": 3.8143492279674565e-06, - "loss": 0.9294, - "num_input_tokens_seen": 28376205, - "step": 1365 - }, - { - "epoch": 0.16425178861299825, - "flos": 28652197954920.0, - "grad_norm": 0.9443813182889583, - "learning_rate": 3.8140213346519997e-06, - "loss": 0.8882, - "num_input_tokens_seen": 28426520, - "step": 1366 - }, - { - "epoch": 0.16437203150363736, - "flos": 18107149076640.0, - "grad_norm": 2.738547928431624, - "learning_rate": 3.813693166150948e-06, - "loss": 0.9947, - "num_input_tokens_seen": 28446450, - "step": 1367 - }, - { - "epoch": 0.16449227439427644, - "flos": 16690867977960.0, - "grad_norm": 4.3241686865468045, - "learning_rate": 3.813364722514086e-06, - "loss": 1.0745, - "num_input_tokens_seen": 28464505, - "step": 1368 - }, - { - "epoch": 0.16461251728491552, - "flos": 9552144932760.0, - "grad_norm": 4.756184216318849, - "learning_rate": 3.8130360037912368e-06, - "loss": 1.0331, - "num_input_tokens_seen": 28480670, - "step": 1369 - }, - { - "epoch": 0.16473276017555463, - "flos": 16350605844720.0, - "grad_norm": 3.748616917185468, - "learning_rate": 3.812707010032268e-06, - "loss": 1.0424, - "num_input_tokens_seen": 28499445, - "step": 1370 - }, - { - "epoch": 0.16485300306619372, - "flos": 17635106479680.0, - "grad_norm": 2.5654485532649964, - "learning_rate": 3.8123777412870863e-06, - "loss": 1.0217, - "num_input_tokens_seen": 28518665, - "step": 1371 - }, - { - "epoch": 0.1649732459568328, - "flos": 14987472325680.0, - "grad_norm": 3.804657922273378, - "learning_rate": 3.812048197605643e-06, - "loss": 1.0153, - "num_input_tokens_seen": 28537280, - "step": 1372 - }, - { - "epoch": 0.16509348884747188, - "flos": 14383863895920.0, - "grad_norm": 4.988035956246272, - "learning_rate": 3.8117183790379277e-06, - "loss": 1.0316, - "num_input_tokens_seen": 28555450, - "step": 1373 - }, - { - "epoch": 0.165213731738111, - "flos": 7749506280000.0, - "grad_norm": 4.506373871728993, - "learning_rate": 3.811388285633976e-06, - "loss": 1.1614, - "num_input_tokens_seen": 28571155, - "step": 1374 - }, - { - "epoch": 0.16533397462875007, - "flos": 21358912906920.0, - "grad_norm": 12.207508740661229, - "learning_rate": 3.811057917443861e-06, - "loss": 0.8501, - "num_input_tokens_seen": 28590140, - "step": 1375 - }, - { - "epoch": 0.16545421751938916, - "flos": 46939399886880.0, - "grad_norm": 0.8636570798520219, - "learning_rate": 3.8107272745177e-06, - "loss": 0.9379, - "num_input_tokens_seen": 28662190, - "step": 1376 - }, - { - "epoch": 0.16557446041002827, - "flos": 15983386652160.0, - "grad_norm": 3.0904822768842006, - "learning_rate": 3.8103963569056513e-06, - "loss": 1.0184, - "num_input_tokens_seen": 28681045, - "step": 1377 - }, - { - "epoch": 0.16569470330066735, - "flos": 17499278690040.0, - "grad_norm": 4.325462651727473, - "learning_rate": 3.8100651646579146e-06, - "loss": 1.1101, - "num_input_tokens_seen": 28699975, - "step": 1378 - }, - { - "epoch": 0.16581494619130643, - "flos": 10602341316600.0, - "grad_norm": 3.0474594729375446, - "learning_rate": 3.8097336978247317e-06, - "loss": 1.1524, - "num_input_tokens_seen": 28716400, - "step": 1379 - }, - { - "epoch": 0.16593518908194552, - "flos": 12338151785880.0, - "grad_norm": 6.366248013510882, - "learning_rate": 3.8094019564563854e-06, - "loss": 1.109, - "num_input_tokens_seen": 28733050, - "step": 1380 - }, - { - "epoch": 0.16605543197258463, - "flos": 14488717961880.0, - "grad_norm": 6.772405948429426, - "learning_rate": 3.809069940603201e-06, - "loss": 0.9947, - "num_input_tokens_seen": 28750725, - "step": 1381 - }, - { - "epoch": 0.1661756748632237, - "flos": 10030779765120.0, - "grad_norm": 3.715024348059814, - "learning_rate": 3.8087376503155452e-06, - "loss": 1.0028, - "num_input_tokens_seen": 28767930, - "step": 1382 - }, - { - "epoch": 0.1662959177538628, - "flos": 47312940908880.0, - "grad_norm": 0.95390391181578, - "learning_rate": 3.808405085643826e-06, - "loss": 0.8409, - "num_input_tokens_seen": 28832530, - "step": 1383 - }, - { - "epoch": 0.1664161606445019, - "flos": 14724340660080.0, - "grad_norm": 8.196717642736395, - "learning_rate": 3.8080722466384925e-06, - "loss": 1.1215, - "num_input_tokens_seen": 28850100, - "step": 1384 - }, - { - "epoch": 0.166536403535141, - "flos": 17972517087840.0, - "grad_norm": 4.155425714197155, - "learning_rate": 3.8077391333500376e-06, - "loss": 0.9317, - "num_input_tokens_seen": 28868960, - "step": 1385 - }, - { - "epoch": 0.16665664642578007, - "flos": 18107118415080.0, - "grad_norm": 2.3401290388474374, - "learning_rate": 3.8074057458289934e-06, - "loss": 0.9872, - "num_input_tokens_seen": 28889370, - "step": 1386 - }, - { - "epoch": 0.16677688931641918, - "flos": 15773862489600.0, - "grad_norm": 3.0287959596529377, - "learning_rate": 3.807072084125934e-06, - "loss": 1.0452, - "num_input_tokens_seen": 28910940, - "step": 1387 - }, - { - "epoch": 0.16689713220705826, - "flos": 11997215098320.0, - "grad_norm": 3.650416750174036, - "learning_rate": 3.806738148291477e-06, - "loss": 1.0112, - "num_input_tokens_seen": 28927485, - "step": 1388 - }, - { - "epoch": 0.16701737509769735, - "flos": 25866319296120.0, - "grad_norm": 2.522766307463205, - "learning_rate": 3.8064039383762793e-06, - "loss": 0.9393, - "num_input_tokens_seen": 28949570, - "step": 1389 - }, - { - "epoch": 0.16713761798833643, - "flos": 16534123456320.0, - "grad_norm": 4.1333466673195, - "learning_rate": 3.8060694544310396e-06, - "loss": 1.0141, - "num_input_tokens_seen": 28967800, - "step": 1390 - }, - { - "epoch": 0.16725786087897554, - "flos": 18003092211240.0, - "grad_norm": 3.0294667140393154, - "learning_rate": 3.8057346965065006e-06, - "loss": 1.0131, - "num_input_tokens_seen": 28988750, - "step": 1391 - }, - { - "epoch": 0.16737810376961462, - "flos": 22695120028200.0, - "grad_norm": 6.027920925864619, - "learning_rate": 3.805399664653443e-06, - "loss": 1.0805, - "num_input_tokens_seen": 29010610, - "step": 1392 - }, - { - "epoch": 0.1674983466602537, - "flos": 19913743070640.0, - "grad_norm": 3.4794747831009425, - "learning_rate": 3.805064358922692e-06, - "loss": 0.9718, - "num_input_tokens_seen": 29028620, - "step": 1393 - }, - { - "epoch": 0.16761858955089282, - "flos": 15458963014560.0, - "grad_norm": 3.4181199743331927, - "learning_rate": 3.8047287793651136e-06, - "loss": 1.0215, - "num_input_tokens_seen": 29049785, - "step": 1394 - }, - { - "epoch": 0.1677388324415319, - "flos": 16927073245800.0, - "grad_norm": 3.9339377623125946, - "learning_rate": 3.8043929260316137e-06, - "loss": 1.1121, - "num_input_tokens_seen": 29067660, - "step": 1395 - }, - { - "epoch": 0.16785907533217098, - "flos": 14593847981880.0, - "grad_norm": 4.232916887079736, - "learning_rate": 3.8040567989731417e-06, - "loss": 1.0567, - "num_input_tokens_seen": 29085325, - "step": 1396 - }, - { - "epoch": 0.16797931822281006, - "flos": 11080056302160.0, - "grad_norm": 3.530552617980658, - "learning_rate": 3.8037203982406876e-06, - "loss": 1.0399, - "num_input_tokens_seen": 29103210, - "step": 1397 - }, - { - "epoch": 0.16809956111344918, - "flos": 11705777264760.0, - "grad_norm": 3.5684670785435886, - "learning_rate": 3.8033837238852835e-06, - "loss": 0.9627, - "num_input_tokens_seen": 29119630, - "step": 1398 - }, - { - "epoch": 0.16821980400408826, - "flos": 16533632871360.0, - "grad_norm": 16.98732329803702, - "learning_rate": 3.8030467759580017e-06, - "loss": 0.9214, - "num_input_tokens_seen": 29140270, - "step": 1399 - }, - { - "epoch": 0.16834004689472734, - "flos": 14748599502120.0, - "grad_norm": 6.695079180681563, - "learning_rate": 3.802709554509958e-06, - "loss": 1.1052, - "num_input_tokens_seen": 29157790, - "step": 1400 - }, - { - "epoch": 0.16846028978536645, - "flos": 18998086690920.0, - "grad_norm": 6.516328988590803, - "learning_rate": 3.8023720595923083e-06, - "loss": 1.0126, - "num_input_tokens_seen": 29176765, - "step": 1401 - }, - { - "epoch": 0.16858053267600553, - "flos": 13360808540760.0, - "grad_norm": 2.6515939633104932, - "learning_rate": 3.80203429125625e-06, - "loss": 1.1086, - "num_input_tokens_seen": 29194660, - "step": 1402 - }, - { - "epoch": 0.16870077556664462, - "flos": 19757795749560.0, - "grad_norm": 4.464178988679637, - "learning_rate": 3.8016962495530225e-06, - "loss": 0.9306, - "num_input_tokens_seen": 29213570, - "step": 1403 - }, - { - "epoch": 0.1688210184572837, - "flos": 9685918397880.0, - "grad_norm": 14.123108863826413, - "learning_rate": 3.8013579345339063e-06, - "loss": 0.9872, - "num_input_tokens_seen": 29228155, - "step": 1404 - }, - { - "epoch": 0.1689412613479228, - "flos": 18841096876800.0, - "grad_norm": 3.343388903363002, - "learning_rate": 3.801019346250224e-06, - "loss": 0.9237, - "num_input_tokens_seen": 29248020, - "step": 1405 - }, - { - "epoch": 0.1690615042385619, - "flos": 15010167428160.0, - "grad_norm": 4.290170106160701, - "learning_rate": 3.8006804847533395e-06, - "loss": 1.0528, - "num_input_tokens_seen": 29267255, - "step": 1406 - }, - { - "epoch": 0.16918174712920098, - "flos": 14802421636080.0, - "grad_norm": 4.985691960862933, - "learning_rate": 3.8003413500946556e-06, - "loss": 1.0728, - "num_input_tokens_seen": 29287085, - "step": 1407 - }, - { - "epoch": 0.1693019900198401, - "flos": 12024141496080.0, - "grad_norm": 5.34887770530406, - "learning_rate": 3.8000019423256216e-06, - "loss": 1.0476, - "num_input_tokens_seen": 29304570, - "step": 1408 - }, - { - "epoch": 0.16942223291047917, - "flos": 19076872882800.0, - "grad_norm": 3.314978050515996, - "learning_rate": 3.7996622614977234e-06, - "loss": 1.1064, - "num_input_tokens_seen": 29325480, - "step": 1409 - }, - { - "epoch": 0.16954247580111825, - "flos": 13173672865080.0, - "grad_norm": 3.405357033808637, - "learning_rate": 3.799322307662492e-06, - "loss": 1.0277, - "num_input_tokens_seen": 29343020, - "step": 1410 - }, - { - "epoch": 0.16966271869175734, - "flos": 9867320361840.0, - "grad_norm": 3.281646123317766, - "learning_rate": 3.798982080871496e-06, - "loss": 1.0755, - "num_input_tokens_seen": 29357880, - "step": 1411 - }, - { - "epoch": 0.16978296158239645, - "flos": 26759893804560.0, - "grad_norm": 3.0352044807441225, - "learning_rate": 3.798641581176349e-06, - "loss": 0.9034, - "num_input_tokens_seen": 29379880, - "step": 1412 - }, - { - "epoch": 0.16990320447303553, - "flos": 20177886567720.0, - "grad_norm": 3.856224472975364, - "learning_rate": 3.7983008086287044e-06, - "loss": 0.9692, - "num_input_tokens_seen": 29400920, - "step": 1413 - }, - { - "epoch": 0.1700234473636746, - "flos": 14326668990360.0, - "grad_norm": 3.1160240966100208, - "learning_rate": 3.797959763280257e-06, - "loss": 1.0161, - "num_input_tokens_seen": 29419325, - "step": 1414 - }, - { - "epoch": 0.17014369025431372, - "flos": 17683286886600.0, - "grad_norm": 3.473497321076607, - "learning_rate": 3.797618445182743e-06, - "loss": 1.014, - "num_input_tokens_seen": 29440440, - "step": 1415 - }, - { - "epoch": 0.1702639331449528, - "flos": 11652568362000.0, - "grad_norm": 3.2114693973799, - "learning_rate": 3.79727685438794e-06, - "loss": 1.0658, - "num_input_tokens_seen": 29454350, - "step": 1416 - }, - { - "epoch": 0.1703841760355919, - "flos": 37559090511360.0, - "grad_norm": 0.9259893770906231, - "learning_rate": 3.796934990947667e-06, - "loss": 0.8669, - "num_input_tokens_seen": 29515755, - "step": 1417 - }, - { - "epoch": 0.170504418926231, - "flos": 35303639607840.0, - "grad_norm": 0.9161250978298814, - "learning_rate": 3.7965928549137854e-06, - "loss": 0.8893, - "num_input_tokens_seen": 29572290, - "step": 1418 - }, - { - "epoch": 0.17062466181687008, - "flos": 18395919354480.0, - "grad_norm": 3.757868332160597, - "learning_rate": 3.7962504463381953e-06, - "loss": 0.9798, - "num_input_tokens_seen": 29593500, - "step": 1419 - }, - { - "epoch": 0.17074490470750917, - "flos": 14882464951920.0, - "grad_norm": 2.8159243611884484, - "learning_rate": 3.7959077652728412e-06, - "loss": 1.0135, - "num_input_tokens_seen": 29611675, - "step": 1420 - }, - { - "epoch": 0.17086514759814825, - "flos": 14881913043840.0, - "grad_norm": 3.6374639450331347, - "learning_rate": 3.795564811769707e-06, - "loss": 0.9971, - "num_input_tokens_seen": 29629750, - "step": 1421 - }, - { - "epoch": 0.17098539048878736, - "flos": 20282066079360.0, - "grad_norm": 3.4430343465872926, - "learning_rate": 3.795221585880818e-06, - "loss": 1.0066, - "num_input_tokens_seen": 29650150, - "step": 1422 - }, - { - "epoch": 0.17110563337942644, - "flos": 11525908378800.0, - "grad_norm": 5.086582500383054, - "learning_rate": 3.794878087658242e-06, - "loss": 1.1419, - "num_input_tokens_seen": 29667640, - "step": 1423 - }, - { - "epoch": 0.17122587627006552, - "flos": 21144053632920.0, - "grad_norm": 4.698714409176091, - "learning_rate": 3.7945343171540873e-06, - "loss": 1.0104, - "num_input_tokens_seen": 29688235, - "step": 1424 - }, - { - "epoch": 0.17134611916070464, - "flos": 18028792146600.0, - "grad_norm": 5.0620736778561355, - "learning_rate": 3.7941902744205033e-06, - "loss": 1.0118, - "num_input_tokens_seen": 29708990, - "step": 1425 - }, - { - "epoch": 0.17146636205134372, - "flos": 9846802230120.0, - "grad_norm": 3.2926453853593203, - "learning_rate": 3.7938459595096817e-06, - "loss": 1.0689, - "num_input_tokens_seen": 29727255, - "step": 1426 - }, - { - "epoch": 0.1715866049419828, - "flos": 17005951422360.0, - "grad_norm": 3.1464699780959426, - "learning_rate": 3.7935013724738545e-06, - "loss": 1.0774, - "num_input_tokens_seen": 29747475, - "step": 1427 - }, - { - "epoch": 0.17170684783262188, - "flos": 16139088680760.0, - "grad_norm": 3.967932549341721, - "learning_rate": 3.7931565133652945e-06, - "loss": 1.0006, - "num_input_tokens_seen": 29767270, - "step": 1428 - }, - { - "epoch": 0.171827090723261, - "flos": 18944755141920.0, - "grad_norm": 5.03314760804413, - "learning_rate": 3.792811382236317e-06, - "loss": 0.8968, - "num_input_tokens_seen": 29785500, - "step": 1429 - }, - { - "epoch": 0.17194733361390008, - "flos": 20048467044120.0, - "grad_norm": 4.827513622104701, - "learning_rate": 3.792465979139279e-06, - "loss": 1.0013, - "num_input_tokens_seen": 29807825, - "step": 1430 - }, - { - "epoch": 0.17206757650453916, - "flos": 46917139594320.0, - "grad_norm": 1.051575711352948, - "learning_rate": 3.792120304126576e-06, - "loss": 0.9724, - "num_input_tokens_seen": 29870920, - "step": 1431 - }, - { - "epoch": 0.17218781939517827, - "flos": 15826090222440.0, - "grad_norm": 3.77818539857507, - "learning_rate": 3.791774357250649e-06, - "loss": 1.0685, - "num_input_tokens_seen": 29889470, - "step": 1432 - }, - { - "epoch": 0.17230806228581735, - "flos": 9978797324760.0, - "grad_norm": 4.000764058572833, - "learning_rate": 3.7914281385639757e-06, - "loss": 1.014, - "num_input_tokens_seen": 29907065, - "step": 1433 - }, - { - "epoch": 0.17242830517645644, - "flos": 14698763370960.0, - "grad_norm": 7.166221368247737, - "learning_rate": 3.7910816481190784e-06, - "loss": 1.0054, - "num_input_tokens_seen": 29926600, - "step": 1434 - }, - { - "epoch": 0.17254854806709552, - "flos": 21935656262040.0, - "grad_norm": 3.7448468723746475, - "learning_rate": 3.7907348859685193e-06, - "loss": 0.9783, - "num_input_tokens_seen": 29948025, - "step": 1435 - }, - { - "epoch": 0.17266879095773463, - "flos": 18945981604320.0, - "grad_norm": 6.169838407555079, - "learning_rate": 3.790387852164902e-06, - "loss": 1.028, - "num_input_tokens_seen": 29968475, - "step": 1436 - }, - { - "epoch": 0.1727890338483737, - "flos": 14383495957200.0, - "grad_norm": 3.79290865723831, - "learning_rate": 3.7900405467608707e-06, - "loss": 0.994, - "num_input_tokens_seen": 29987740, - "step": 1437 - }, - { - "epoch": 0.1729092767390128, - "flos": 12883062893640.0, - "grad_norm": 6.104522416963426, - "learning_rate": 3.7896929698091114e-06, - "loss": 0.9957, - "num_input_tokens_seen": 30000275, - "step": 1438 - }, - { - "epoch": 0.1730295196296519, - "flos": 19049885161920.0, - "grad_norm": 7.033280143114818, - "learning_rate": 3.7893451213623518e-06, - "loss": 0.9254, - "num_input_tokens_seen": 30017225, - "step": 1439 - }, - { - "epoch": 0.173149762520291, - "flos": 16953754351080.0, - "grad_norm": 7.208870604967877, - "learning_rate": 3.7889970014733606e-06, - "loss": 1.0476, - "num_input_tokens_seen": 30036050, - "step": 1440 - }, - { - "epoch": 0.17327000541093007, - "flos": 16612848325080.0, - "grad_norm": 2.446499764815677, - "learning_rate": 3.7886486101949463e-06, - "loss": 0.9963, - "num_input_tokens_seen": 30056950, - "step": 1441 - }, - { - "epoch": 0.17339024830156918, - "flos": 12913546032360.0, - "grad_norm": 5.8688816108611945, - "learning_rate": 3.7882999475799594e-06, - "loss": 1.1026, - "num_input_tokens_seen": 30074705, - "step": 1442 - }, - { - "epoch": 0.17351049119220827, - "flos": 16586903097240.0, - "grad_norm": 4.32540554725142, - "learning_rate": 3.787951013681293e-06, - "loss": 1.0478, - "num_input_tokens_seen": 30092470, - "step": 1443 - }, - { - "epoch": 0.17363073408284735, - "flos": 16925356198440.0, - "grad_norm": 4.296792879186855, - "learning_rate": 3.787601808551879e-06, - "loss": 1.0127, - "num_input_tokens_seen": 30112005, - "step": 1444 - }, - { - "epoch": 0.17375097697348643, - "flos": 13125124519440.0, - "grad_norm": 4.7654076698651675, - "learning_rate": 3.7872523322446926e-06, - "loss": 1.0499, - "num_input_tokens_seen": 30130610, - "step": 1445 - }, - { - "epoch": 0.17387121986412554, - "flos": 27759824795400.0, - "grad_norm": 4.163825681440077, - "learning_rate": 3.7869025848127478e-06, - "loss": 0.8228, - "num_input_tokens_seen": 30154525, - "step": 1446 - }, - { - "epoch": 0.17399146275476463, - "flos": 14462128841280.0, - "grad_norm": 7.290007862144385, - "learning_rate": 3.786552566309102e-06, - "loss": 1.0128, - "num_input_tokens_seen": 30172455, - "step": 1447 - }, - { - "epoch": 0.1741117056454037, - "flos": 13590544219440.0, - "grad_norm": 6.279159881378875, - "learning_rate": 3.7862022767868517e-06, - "loss": 1.0961, - "num_input_tokens_seen": 30189765, - "step": 1448 - }, - { - "epoch": 0.17423194853604282, - "flos": 18054522743520.0, - "grad_norm": 10.42680532800649, - "learning_rate": 3.7858517162991367e-06, - "loss": 1.0717, - "num_input_tokens_seen": 30209560, - "step": 1449 - }, - { - "epoch": 0.1743521914266819, - "flos": 17923416834120.0, - "grad_norm": 17.46641587715551, - "learning_rate": 3.7855008848991363e-06, - "loss": 0.84, - "num_input_tokens_seen": 30227485, - "step": 1450 - }, - { - "epoch": 0.17447243431732098, - "flos": 18264629475720.0, - "grad_norm": 2.920365703622666, - "learning_rate": 3.7851497826400714e-06, - "loss": 1.0084, - "num_input_tokens_seen": 30247345, - "step": 1451 - }, - { - "epoch": 0.17459267720796007, - "flos": 25893429663240.0, - "grad_norm": 16.740152225508677, - "learning_rate": 3.7847984095752034e-06, - "loss": 0.9855, - "num_input_tokens_seen": 30270520, - "step": 1452 - }, - { - "epoch": 0.17471292009859918, - "flos": 14200131653400.0, - "grad_norm": 3.9578181818316276, - "learning_rate": 3.784446765757836e-06, - "loss": 1.02, - "num_input_tokens_seen": 30288885, - "step": 1453 - }, - { - "epoch": 0.17483316298923826, - "flos": 19811709868200.0, - "grad_norm": 8.62907114557814, - "learning_rate": 3.7840948512413133e-06, - "loss": 1.0107, - "num_input_tokens_seen": 30306190, - "step": 1454 - }, - { - "epoch": 0.17495340587987734, - "flos": 31453270730160.0, - "grad_norm": 4.601479184703095, - "learning_rate": 3.7837426660790196e-06, - "loss": 0.9997, - "num_input_tokens_seen": 30327325, - "step": 1455 - }, - { - "epoch": 0.17507364877051645, - "flos": 14828642817960.0, - "grad_norm": 4.6557233883860665, - "learning_rate": 3.783390210324382e-06, - "loss": 1.0426, - "num_input_tokens_seen": 30346770, - "step": 1456 - }, - { - "epoch": 0.17519389166115554, - "flos": 17582572131240.0, - "grad_norm": 5.217145661777249, - "learning_rate": 3.7830374840308676e-06, - "loss": 0.9494, - "num_input_tokens_seen": 30366645, - "step": 1457 - }, - { - "epoch": 0.17531413455179462, - "flos": 16920787626000.0, - "grad_norm": 3.603084085567993, - "learning_rate": 3.7826844872519842e-06, - "loss": 1.0367, - "num_input_tokens_seen": 30384220, - "step": 1458 - }, - { - "epoch": 0.1754343774424337, - "flos": 17478300634920.0, - "grad_norm": 4.593211447358644, - "learning_rate": 3.782331220041282e-06, - "loss": 0.9536, - "num_input_tokens_seen": 30404005, - "step": 1459 - }, - { - "epoch": 0.17555462033307281, - "flos": 12837243426840.0, - "grad_norm": 4.648166885833551, - "learning_rate": 3.7819776824523504e-06, - "loss": 1.061, - "num_input_tokens_seen": 30421590, - "step": 1460 - }, - { - "epoch": 0.1756748632237119, - "flos": 20204598334560.0, - "grad_norm": 3.8948321900559835, - "learning_rate": 3.7816238745388213e-06, - "loss": 1.068, - "num_input_tokens_seen": 30440855, - "step": 1461 - }, - { - "epoch": 0.17579510611435098, - "flos": 18311430112440.0, - "grad_norm": 3.9021536138301545, - "learning_rate": 3.781269796354367e-06, - "loss": 1.1038, - "num_input_tokens_seen": 30460195, - "step": 1462 - }, - { - "epoch": 0.1759153490049901, - "flos": 13177413575400.0, - "grad_norm": 3.1213209239034616, - "learning_rate": 3.7809154479527006e-06, - "loss": 1.1002, - "num_input_tokens_seen": 30479120, - "step": 1463 - }, - { - "epoch": 0.17603559189562917, - "flos": 13199158169520.0, - "grad_norm": 3.8987058497476372, - "learning_rate": 3.780560829387577e-06, - "loss": 1.0638, - "num_input_tokens_seen": 30497340, - "step": 1464 - }, - { - "epoch": 0.17615583478626826, - "flos": 43323611214360.0, - "grad_norm": 0.8758932731097472, - "learning_rate": 3.7802059407127915e-06, - "loss": 0.8437, - "num_input_tokens_seen": 30555610, - "step": 1465 - }, - { - "epoch": 0.17627607767690734, - "flos": 16790110978440.0, - "grad_norm": 4.977582650295959, - "learning_rate": 3.7798507819821797e-06, - "loss": 1.0835, - "num_input_tokens_seen": 30572455, - "step": 1466 - }, - { - "epoch": 0.17639632056754645, - "flos": 12495325569360.0, - "grad_norm": 3.7489831181489754, - "learning_rate": 3.7794953532496197e-06, - "loss": 1.028, - "num_input_tokens_seen": 30588080, - "step": 1467 - }, - { - "epoch": 0.17651656345818553, - "flos": 41460246028560.0, - "grad_norm": 1.1364433767778013, - "learning_rate": 3.7791396545690295e-06, - "loss": 0.8689, - "num_input_tokens_seen": 30649035, - "step": 1468 - }, - { - "epoch": 0.17663680634882462, - "flos": 16297458265080.0, - "grad_norm": 3.649638793003627, - "learning_rate": 3.7787836859943685e-06, - "loss": 1.0228, - "num_input_tokens_seen": 30667480, - "step": 1469 - }, - { - "epoch": 0.17675704923946373, - "flos": 16087995425640.0, - "grad_norm": 7.3100393997153414, - "learning_rate": 3.7784274475796363e-06, - "loss": 1.011, - "num_input_tokens_seen": 30685830, - "step": 1470 - }, - { - "epoch": 0.1768772921301028, - "flos": 19313936674320.0, - "grad_norm": 3.979314245446528, - "learning_rate": 3.7780709393788745e-06, - "loss": 0.9918, - "num_input_tokens_seen": 30706025, - "step": 1471 - }, - { - "epoch": 0.1769975350207419, - "flos": 13597197777960.0, - "grad_norm": 4.524244483554426, - "learning_rate": 3.777714161446165e-06, - "loss": 0.9793, - "num_input_tokens_seen": 30725450, - "step": 1472 - }, - { - "epoch": 0.177117777911381, - "flos": 25787625088920.0, - "grad_norm": 3.6591925884230148, - "learning_rate": 3.7773571138356304e-06, - "loss": 0.8955, - "num_input_tokens_seen": 30745340, - "step": 1473 - }, - { - "epoch": 0.17723802080202009, - "flos": 15694585712760.0, - "grad_norm": 4.106614262465321, - "learning_rate": 3.776999796601435e-06, - "loss": 1.117, - "num_input_tokens_seen": 30763820, - "step": 1474 - }, - { - "epoch": 0.17735826369265917, - "flos": 21538383192600.0, - "grad_norm": 5.777223576758941, - "learning_rate": 3.776642209797783e-06, - "loss": 0.9369, - "num_input_tokens_seen": 30785370, - "step": 1475 - }, - { - "epoch": 0.17747850658329825, - "flos": 15196567226400.0, - "grad_norm": 9.43246386220052, - "learning_rate": 3.7762843534789205e-06, - "loss": 0.995, - "num_input_tokens_seen": 30803840, - "step": 1476 - }, - { - "epoch": 0.17759874947393736, - "flos": 12024202819200.0, - "grad_norm": 3.412946645271033, - "learning_rate": 3.7759262276991343e-06, - "loss": 1.0992, - "num_input_tokens_seen": 30821170, - "step": 1477 - }, - { - "epoch": 0.17771899236457644, - "flos": 8116449518520.0, - "grad_norm": 6.520527630384928, - "learning_rate": 3.7755678325127506e-06, - "loss": 1.0254, - "num_input_tokens_seen": 30838570, - "step": 1478 - }, - { - "epoch": 0.17783923525521553, - "flos": 13335384559440.0, - "grad_norm": 9.440838558000166, - "learning_rate": 3.7752091679741393e-06, - "loss": 0.9738, - "num_input_tokens_seen": 30856080, - "step": 1479 - }, - { - "epoch": 0.17795947814585464, - "flos": 21672279303960.0, - "grad_norm": 6.1109997864418695, - "learning_rate": 3.774850234137708e-06, - "loss": 0.9949, - "num_input_tokens_seen": 30873095, - "step": 1480 - }, - { - "epoch": 0.17807972103649372, - "flos": 17346428186520.0, - "grad_norm": 3.6965425472445013, - "learning_rate": 3.7744910310579076e-06, - "loss": 1.0497, - "num_input_tokens_seen": 30891740, - "step": 1481 - }, - { - "epoch": 0.1781999639271328, - "flos": 14408674646040.0, - "grad_norm": 7.20758170323369, - "learning_rate": 3.774131558789229e-06, - "loss": 1.081, - "num_input_tokens_seen": 30910790, - "step": 1482 - }, - { - "epoch": 0.1783202068177719, - "flos": 11263113990360.0, - "grad_norm": 6.106788497497996, - "learning_rate": 3.773771817386203e-06, - "loss": 0.9205, - "num_input_tokens_seen": 30927840, - "step": 1483 - }, - { - "epoch": 0.178440449708411, - "flos": 14646229022520.0, - "grad_norm": 3.537546131925073, - "learning_rate": 3.773411806903403e-06, - "loss": 1.0223, - "num_input_tokens_seen": 30946640, - "step": 1484 - }, - { - "epoch": 0.17856069259905008, - "flos": 15403423833240.0, - "grad_norm": 5.451981301592572, - "learning_rate": 3.7730515273954415e-06, - "loss": 1.1605, - "num_input_tokens_seen": 30964970, - "step": 1485 - }, - { - "epoch": 0.17868093548968916, - "flos": 18915253173120.0, - "grad_norm": 3.893233684762568, - "learning_rate": 3.772690978916973e-06, - "loss": 1.0605, - "num_input_tokens_seen": 30984445, - "step": 1486 - }, - { - "epoch": 0.17880117838032827, - "flos": 13171373248080.0, - "grad_norm": 4.0607143292341386, - "learning_rate": 3.772330161522693e-06, - "loss": 1.0928, - "num_input_tokens_seen": 31002075, - "step": 1487 - }, - { - "epoch": 0.17892142127096736, - "flos": 18893079317160.0, - "grad_norm": 3.673789057100554, - "learning_rate": 3.7719690752673365e-06, - "loss": 1.0282, - "num_input_tokens_seen": 31022590, - "step": 1488 - }, - { - "epoch": 0.17904166416160644, - "flos": 16974517775280.0, - "grad_norm": 5.228368062390519, - "learning_rate": 3.7716077202056796e-06, - "loss": 1.0062, - "num_input_tokens_seen": 31040785, - "step": 1489 - }, - { - "epoch": 0.17916190705224552, - "flos": 13569934103040.0, - "grad_norm": 3.842998151982717, - "learning_rate": 3.7712460963925404e-06, - "loss": 1.1583, - "num_input_tokens_seen": 31056445, - "step": 1490 - }, - { - "epoch": 0.17928214994288463, - "flos": 17894405450280.0, - "grad_norm": 5.51230700927054, - "learning_rate": 3.7708842038827775e-06, - "loss": 0.9819, - "num_input_tokens_seen": 31075125, - "step": 1491 - }, - { - "epoch": 0.17940239283352372, - "flos": 15906194861400.0, - "grad_norm": 2.563614579617925, - "learning_rate": 3.770522042731288e-06, - "loss": 1.0821, - "num_input_tokens_seen": 31096740, - "step": 1492 - }, - { - "epoch": 0.1795226357241628, - "flos": 16475702088360.0, - "grad_norm": 3.2857571119277798, - "learning_rate": 3.7701596129930122e-06, - "loss": 1.111, - "num_input_tokens_seen": 31115185, - "step": 1493 - }, - { - "epoch": 0.1796428786148019, - "flos": 15694432404960.0, - "grad_norm": 2.669395411615879, - "learning_rate": 3.7697969147229315e-06, - "loss": 0.9563, - "num_input_tokens_seen": 31133065, - "step": 1494 - }, - { - "epoch": 0.179763121505441, - "flos": 15144124862640.0, - "grad_norm": 3.7565575802209694, - "learning_rate": 3.7694339479760647e-06, - "loss": 1.0883, - "num_input_tokens_seen": 31151815, - "step": 1495 - }, - { - "epoch": 0.17988336439608008, - "flos": 48808682753760.0, - "grad_norm": 0.7807352277449635, - "learning_rate": 3.769070712807476e-06, - "loss": 0.8364, - "num_input_tokens_seen": 31213565, - "step": 1496 - }, - { - "epoch": 0.18000360728671919, - "flos": 15590038262400.0, - "grad_norm": 3.113213857881007, - "learning_rate": 3.768707209272266e-06, - "loss": 1.0084, - "num_input_tokens_seen": 31233415, - "step": 1497 - }, - { - "epoch": 0.18012385017735827, - "flos": 13463209681920.0, - "grad_norm": 4.657061614916839, - "learning_rate": 3.768343437425579e-06, - "loss": 0.9885, - "num_input_tokens_seen": 31251705, - "step": 1498 - }, - { - "epoch": 0.18024409306799735, - "flos": 14010083129520.0, - "grad_norm": 2.8376792414176855, - "learning_rate": 3.7679793973225987e-06, - "loss": 1.0774, - "num_input_tokens_seen": 31267235, - "step": 1499 - }, - { - "epoch": 0.18036433595863643, - "flos": 48138828710160.0, - "grad_norm": 0.8615719888999627, - "learning_rate": 3.767615089018549e-06, - "loss": 0.8688, - "num_input_tokens_seen": 31329300, - "step": 1500 - }, - { - "epoch": 0.18048457884927555, - "flos": 12884473325400.0, - "grad_norm": 5.311367856731586, - "learning_rate": 3.7672505125686966e-06, - "loss": 1.0793, - "num_input_tokens_seen": 31345385, - "step": 1501 - }, - { - "epoch": 0.18060482173991463, - "flos": 11183377290120.0, - "grad_norm": 31.162085320521115, - "learning_rate": 3.7668856680283455e-06, - "loss": 1.0733, - "num_input_tokens_seen": 31362130, - "step": 1502 - }, - { - "epoch": 0.1807250646305537, - "flos": 13176831005760.0, - "grad_norm": 3.5847185843237943, - "learning_rate": 3.7665205554528437e-06, - "loss": 1.0459, - "num_input_tokens_seen": 31381205, - "step": 1503 - }, - { - "epoch": 0.18084530752119282, - "flos": 16455398587560.0, - "grad_norm": 6.289586849744466, - "learning_rate": 3.7661551748975782e-06, - "loss": 0.9735, - "num_input_tokens_seen": 31399100, - "step": 1504 - }, - { - "epoch": 0.1809655504118319, - "flos": 42800965947240.0, - "grad_norm": 0.8034902758719084, - "learning_rate": 3.7657895264179772e-06, - "loss": 0.8474, - "num_input_tokens_seen": 31454795, - "step": 1505 - }, - { - "epoch": 0.181085793302471, - "flos": 31585725748200.0, - "grad_norm": 2.130055192760634, - "learning_rate": 3.765423610069509e-06, - "loss": 0.974, - "num_input_tokens_seen": 31479905, - "step": 1506 - }, - { - "epoch": 0.18120603619311007, - "flos": 24899477676600.0, - "grad_norm": 2.9016998110621413, - "learning_rate": 3.765057425907683e-06, - "loss": 0.9539, - "num_input_tokens_seen": 31501085, - "step": 1507 - }, - { - "epoch": 0.18132627908374918, - "flos": 15274985479560.0, - "grad_norm": 11.909169077037607, - "learning_rate": 3.764690973988048e-06, - "loss": 1.01, - "num_input_tokens_seen": 31521145, - "step": 1508 - }, - { - "epoch": 0.18144652197438826, - "flos": 20701482343200.0, - "grad_norm": 2.616253495448694, - "learning_rate": 3.7643242543661967e-06, - "loss": 0.973, - "num_input_tokens_seen": 31543525, - "step": 1509 - }, - { - "epoch": 0.18156676486502735, - "flos": 49177833624600.0, - "grad_norm": 0.856926916373557, - "learning_rate": 3.7639572670977573e-06, - "loss": 0.8749, - "num_input_tokens_seen": 31598740, - "step": 1510 - }, - { - "epoch": 0.18168700775566646, - "flos": 18843427155360.0, - "grad_norm": 3.8856132123528124, - "learning_rate": 3.7635900122384042e-06, - "loss": 0.9954, - "num_input_tokens_seen": 31621455, - "step": 1511 - }, - { - "epoch": 0.18180725064630554, - "flos": 10601881393200.0, - "grad_norm": 4.8518329735394845, - "learning_rate": 3.7632224898438477e-06, - "loss": 1.0916, - "num_input_tokens_seen": 31637650, - "step": 1512 - }, - { - "epoch": 0.18192749353694462, - "flos": 13963834400880.0, - "grad_norm": 2.9579106060187557, - "learning_rate": 3.762854699969842e-06, - "loss": 1.0254, - "num_input_tokens_seen": 31657880, - "step": 1513 - }, - { - "epoch": 0.1820477364275837, - "flos": 14696954338920.0, - "grad_norm": 2.6855992537505675, - "learning_rate": 3.762486642672179e-06, - "loss": 0.9549, - "num_input_tokens_seen": 31674540, - "step": 1514 - }, - { - "epoch": 0.18216797931822282, - "flos": 12128596961760.0, - "grad_norm": 4.525226667428004, - "learning_rate": 3.7621183180066946e-06, - "loss": 1.083, - "num_input_tokens_seen": 31692220, - "step": 1515 - }, - { - "epoch": 0.1822882222088619, - "flos": 20834673238680.0, - "grad_norm": 2.3775541725765947, - "learning_rate": 3.7617497260292625e-06, - "loss": 0.9607, - "num_input_tokens_seen": 31713995, - "step": 1516 - }, - { - "epoch": 0.18240846509950098, - "flos": 12539826589080.0, - "grad_norm": 17.360355050811442, - "learning_rate": 3.7613808667957967e-06, - "loss": 1.0218, - "num_input_tokens_seen": 31726405, - "step": 1517 - }, - { - "epoch": 0.1825287079901401, - "flos": 10447344503880.0, - "grad_norm": 3.5088025163689327, - "learning_rate": 3.7610117403622547e-06, - "loss": 1.148, - "num_input_tokens_seen": 31742685, - "step": 1518 - }, - { - "epoch": 0.18264895088077918, - "flos": 15591050093880.0, - "grad_norm": 2.9132857936701964, - "learning_rate": 3.7606423467846313e-06, - "loss": 1.1268, - "num_input_tokens_seen": 31762010, - "step": 1519 - }, - { - "epoch": 0.18276919377141826, - "flos": 14829440018520.0, - "grad_norm": 2.5291171094720135, - "learning_rate": 3.760272686118964e-06, - "loss": 1.0172, - "num_input_tokens_seen": 31779950, - "step": 1520 - }, - { - "epoch": 0.18288943666205737, - "flos": 15248427020520.0, - "grad_norm": 3.908608418905489, - "learning_rate": 3.7599027584213297e-06, - "loss": 1.1461, - "num_input_tokens_seen": 31798550, - "step": 1521 - }, - { - "epoch": 0.18300967955269645, - "flos": 15297895212960.0, - "grad_norm": 4.4362348445441055, - "learning_rate": 3.7595325637478465e-06, - "loss": 1.0087, - "num_input_tokens_seen": 31816295, - "step": 1522 - }, - { - "epoch": 0.18312992244333554, - "flos": 20571204295920.0, - "grad_norm": 2.2314964906261903, - "learning_rate": 3.7591621021546723e-06, - "loss": 1.0461, - "num_input_tokens_seen": 31838010, - "step": 1523 - }, - { - "epoch": 0.18325016533397462, - "flos": 14278396598760.0, - "grad_norm": 3.8201456170014274, - "learning_rate": 3.7587913736980062e-06, - "loss": 1.0435, - "num_input_tokens_seen": 31857370, - "step": 1524 - }, - { - "epoch": 0.18337040822461373, - "flos": 16584480834000.0, - "grad_norm": 2.976329243649224, - "learning_rate": 3.7584203784340865e-06, - "loss": 1.0773, - "num_input_tokens_seen": 31876260, - "step": 1525 - }, - { - "epoch": 0.1834906511152528, - "flos": 17792402909400.0, - "grad_norm": 3.524283499980162, - "learning_rate": 3.7580491164191938e-06, - "loss": 1.0821, - "num_input_tokens_seen": 31894290, - "step": 1526 - }, - { - "epoch": 0.1836108940058919, - "flos": 48154655623200.0, - "grad_norm": 0.7569520135281376, - "learning_rate": 3.757677587709648e-06, - "loss": 0.8625, - "num_input_tokens_seen": 31957275, - "step": 1527 - }, - { - "epoch": 0.183731136896531, - "flos": 18316366623600.0, - "grad_norm": 3.7065089735156387, - "learning_rate": 3.7573057923618095e-06, - "loss": 0.9847, - "num_input_tokens_seen": 31977090, - "step": 1528 - }, - { - "epoch": 0.1838513797871701, - "flos": 14515184436240.0, - "grad_norm": 3.5322009131291567, - "learning_rate": 3.7569337304320793e-06, - "loss": 0.9645, - "num_input_tokens_seen": 31996395, - "step": 1529 - }, - { - "epoch": 0.18397162267780917, - "flos": 49098771478680.0, - "grad_norm": 0.8858131792414026, - "learning_rate": 3.756561401976899e-06, - "loss": 0.8903, - "num_input_tokens_seen": 32055820, - "step": 1530 - }, - { - "epoch": 0.18409186556844825, - "flos": 22773875558520.0, - "grad_norm": 2.3573093087186154, - "learning_rate": 3.7561888070527514e-06, - "loss": 1.0568, - "num_input_tokens_seen": 32077580, - "step": 1531 - }, - { - "epoch": 0.18421210845908736, - "flos": 14278764537480.0, - "grad_norm": 4.445103538174099, - "learning_rate": 3.7558159457161577e-06, - "loss": 1.0143, - "num_input_tokens_seen": 32095265, - "step": 1532 - }, - { - "epoch": 0.18433235134972645, - "flos": 16427337712080.0, - "grad_norm": 3.4183544704230755, - "learning_rate": 3.755442818023681e-06, - "loss": 1.0031, - "num_input_tokens_seen": 32114610, - "step": 1533 - }, - { - "epoch": 0.18445259424036553, - "flos": 12963504809760.0, - "grad_norm": 3.727027848017106, - "learning_rate": 3.7550694240319246e-06, - "loss": 0.9857, - "num_input_tokens_seen": 32132205, - "step": 1534 - }, - { - "epoch": 0.18457283713100464, - "flos": 15143664939240.0, - "grad_norm": 3.086170351925032, - "learning_rate": 3.7546957637975326e-06, - "loss": 0.9873, - "num_input_tokens_seen": 32149335, - "step": 1535 - }, - { - "epoch": 0.18469308002164372, - "flos": 14462098179720.0, - "grad_norm": 2.62768889266328, - "learning_rate": 3.7543218373771873e-06, - "loss": 0.9752, - "num_input_tokens_seen": 32168380, - "step": 1536 - }, - { - "epoch": 0.1848133229122828, - "flos": 18817941850920.0, - "grad_norm": 1.5154485698142564, - "learning_rate": 3.753947644827615e-06, - "loss": 1.017, - "num_input_tokens_seen": 32191560, - "step": 1537 - }, - { - "epoch": 0.1849335658029219, - "flos": 50525048245920.0, - "grad_norm": 0.7904869667754657, - "learning_rate": 3.753573186205579e-06, - "loss": 0.8315, - "num_input_tokens_seen": 32259400, - "step": 1538 - }, - { - "epoch": 0.185053808693561, - "flos": 12312175896480.0, - "grad_norm": 2.7767129817578815, - "learning_rate": 3.753198461567885e-06, - "loss": 0.9988, - "num_input_tokens_seen": 32276365, - "step": 1539 - }, - { - "epoch": 0.18517405158420008, - "flos": 20650266441840.0, - "grad_norm": 2.1821177071100233, - "learning_rate": 3.7528234709713783e-06, - "loss": 1.1467, - "num_input_tokens_seen": 32298830, - "step": 1540 - }, - { - "epoch": 0.18529429447483917, - "flos": 19075217158560.0, - "grad_norm": 5.984570054344052, - "learning_rate": 3.7524482144729447e-06, - "loss": 1.0704, - "num_input_tokens_seen": 32318005, - "step": 1541 - }, - { - "epoch": 0.18541453736547828, - "flos": 9578274129960.0, - "grad_norm": 2.776293334622731, - "learning_rate": 3.7520726921295106e-06, - "loss": 1.066, - "num_input_tokens_seen": 32334445, - "step": 1542 - }, - { - "epoch": 0.18553478025611736, - "flos": 17085258860760.0, - "grad_norm": 3.0695888452716455, - "learning_rate": 3.751696903998042e-06, - "loss": 0.9625, - "num_input_tokens_seen": 32352800, - "step": 1543 - }, - { - "epoch": 0.18565502314675644, - "flos": 18421680612960.0, - "grad_norm": 2.271741831154723, - "learning_rate": 3.7513208501355456e-06, - "loss": 0.9213, - "num_input_tokens_seen": 32373625, - "step": 1544 - }, - { - "epoch": 0.18577526603739553, - "flos": 13911943945200.0, - "grad_norm": 3.403419788718363, - "learning_rate": 3.750944530599069e-06, - "loss": 1.0602, - "num_input_tokens_seen": 32392915, - "step": 1545 - }, - { - "epoch": 0.18589550892803464, - "flos": 13095561227520.0, - "grad_norm": 4.084155538393707, - "learning_rate": 3.7505679454456992e-06, - "loss": 1.0287, - "num_input_tokens_seen": 32409245, - "step": 1546 - }, - { - "epoch": 0.18601575181867372, - "flos": 16743402326400.0, - "grad_norm": 2.2392643072913336, - "learning_rate": 3.750191094732564e-06, - "loss": 0.9146, - "num_input_tokens_seen": 32429830, - "step": 1547 - }, - { - "epoch": 0.1861359947093128, - "flos": 18763169208600.0, - "grad_norm": 4.5140187927855475, - "learning_rate": 3.7498139785168313e-06, - "loss": 0.9825, - "num_input_tokens_seen": 32450155, - "step": 1548 - }, - { - "epoch": 0.1862562375999519, - "flos": 16586197881360.0, - "grad_norm": 2.3884578680706987, - "learning_rate": 3.749436596855709e-06, - "loss": 1.0026, - "num_input_tokens_seen": 32469175, - "step": 1549 - }, - { - "epoch": 0.186376480490591, - "flos": 11782447809000.0, - "grad_norm": 2.572314471198849, - "learning_rate": 3.749058949806446e-06, - "loss": 1.1511, - "num_input_tokens_seen": 32485620, - "step": 1550 - }, - { - "epoch": 0.18649672338123008, - "flos": 15247384527480.0, - "grad_norm": 2.291374211001186, - "learning_rate": 3.748681037426331e-06, - "loss": 1.0642, - "num_input_tokens_seen": 32504550, - "step": 1551 - }, - { - "epoch": 0.1866169662718692, - "flos": 8667462276720.0, - "grad_norm": 3.298314218823942, - "learning_rate": 3.7483028597726936e-06, - "loss": 1.1389, - "num_input_tokens_seen": 32521040, - "step": 1552 - }, - { - "epoch": 0.18673720916250827, - "flos": 16765208243640.0, - "grad_norm": 3.5483064829710984, - "learning_rate": 3.7479244169029017e-06, - "loss": 0.852, - "num_input_tokens_seen": 32540550, - "step": 1553 - }, - { - "epoch": 0.18685745205314735, - "flos": 13989626320920.0, - "grad_norm": 3.240611380361779, - "learning_rate": 3.7475457088743658e-06, - "loss": 0.9403, - "num_input_tokens_seen": 32557520, - "step": 1554 - }, - { - "epoch": 0.18697769494378644, - "flos": 24400386035640.0, - "grad_norm": 2.6240779528901648, - "learning_rate": 3.7471667357445348e-06, - "loss": 0.9691, - "num_input_tokens_seen": 32577070, - "step": 1555 - }, - { - "epoch": 0.18709793783442555, - "flos": 24427220448720.0, - "grad_norm": 3.082692122586401, - "learning_rate": 3.7467874975709e-06, - "loss": 0.951, - "num_input_tokens_seen": 32597595, - "step": 1556 - }, - { - "epoch": 0.18721818072506463, - "flos": 29093609653440.0, - "grad_norm": 3.3306285056541176, - "learning_rate": 3.7464079944109904e-06, - "loss": 1.0204, - "num_input_tokens_seen": 32619175, - "step": 1557 - }, - { - "epoch": 0.18733842361570371, - "flos": 15741754288200.0, - "grad_norm": 2.877958351305763, - "learning_rate": 3.746028226322376e-06, - "loss": 1.0035, - "num_input_tokens_seen": 32634775, - "step": 1558 - }, - { - "epoch": 0.18745866650634282, - "flos": 13412116426800.0, - "grad_norm": 3.2716945478197377, - "learning_rate": 3.745648193362669e-06, - "loss": 0.9863, - "num_input_tokens_seen": 32653850, - "step": 1559 - }, - { - "epoch": 0.1875789093969819, - "flos": 13699138995720.0, - "grad_norm": 3.8830428793890275, - "learning_rate": 3.745267895589518e-06, - "loss": 0.9576, - "num_input_tokens_seen": 32672110, - "step": 1560 - }, - { - "epoch": 0.187699152287621, - "flos": 12049688123640.0, - "grad_norm": 2.4035381542373133, - "learning_rate": 3.7448873330606154e-06, - "loss": 1.0376, - "num_input_tokens_seen": 32689600, - "step": 1561 - }, - { - "epoch": 0.18781939517826007, - "flos": 15878471263080.0, - "grad_norm": 4.904221036226647, - "learning_rate": 3.7445065058336914e-06, - "loss": 1.0933, - "num_input_tokens_seen": 32708190, - "step": 1562 - }, - { - "epoch": 0.18793963806889918, - "flos": 10319550042960.0, - "grad_norm": 2.8292498801516945, - "learning_rate": 3.7441254139665176e-06, - "loss": 1.0929, - "num_input_tokens_seen": 32724095, - "step": 1563 - }, - { - "epoch": 0.18805988095953827, - "flos": 12364587598680.0, - "grad_norm": 2.839127692994612, - "learning_rate": 3.743744057516905e-06, - "loss": 1.0514, - "num_input_tokens_seen": 32741875, - "step": 1564 - }, - { - "epoch": 0.18818012385017735, - "flos": 10629574329960.0, - "grad_norm": 4.418576145663815, - "learning_rate": 3.743362436542706e-06, - "loss": 1.0991, - "num_input_tokens_seen": 32756285, - "step": 1565 - }, - { - "epoch": 0.18830036674081646, - "flos": 33992432754120.0, - "grad_norm": 4.975230088017129, - "learning_rate": 3.7429805511018115e-06, - "loss": 0.9929, - "num_input_tokens_seen": 32777665, - "step": 1566 - }, - { - "epoch": 0.18842060963145554, - "flos": 21411017993520.0, - "grad_norm": 2.5643758653215794, - "learning_rate": 3.7425984012521524e-06, - "loss": 1.0, - "num_input_tokens_seen": 32797585, - "step": 1567 - }, - { - "epoch": 0.18854085252209463, - "flos": 50359043933160.0, - "grad_norm": 0.7421978999965217, - "learning_rate": 3.7422159870517025e-06, - "loss": 0.8564, - "num_input_tokens_seen": 32862560, - "step": 1568 - }, - { - "epoch": 0.1886610954127337, - "flos": 15118915512240.0, - "grad_norm": 3.4632802917217544, - "learning_rate": 3.7418333085584717e-06, - "loss": 1.0166, - "num_input_tokens_seen": 32883465, - "step": 1569 - }, - { - "epoch": 0.18878133830337282, - "flos": 12226797469200.0, - "grad_norm": 3.0857724688862476, - "learning_rate": 3.7414503658305128e-06, - "loss": 1.1441, - "num_input_tokens_seen": 32900420, - "step": 1570 - }, - { - "epoch": 0.1889015811940119, - "flos": 18342587805480.0, - "grad_norm": 3.5973775053609067, - "learning_rate": 3.7410671589259185e-06, - "loss": 1.0066, - "num_input_tokens_seen": 32918740, - "step": 1571 - }, - { - "epoch": 0.18902182408465099, - "flos": 14933895484200.0, - "grad_norm": 6.146653494555302, - "learning_rate": 3.7406836879028205e-06, - "loss": 1.0145, - "num_input_tokens_seen": 32938685, - "step": 1572 - }, - { - "epoch": 0.1891420669752901, - "flos": 15824925083160.0, - "grad_norm": 2.6508605135695738, - "learning_rate": 3.7402999528193907e-06, - "loss": 0.9987, - "num_input_tokens_seen": 32957905, - "step": 1573 - }, - { - "epoch": 0.18926230986592918, - "flos": 15642296656800.0, - "grad_norm": 8.911732481963208, - "learning_rate": 3.739915953733842e-06, - "loss": 1.065, - "num_input_tokens_seen": 32975670, - "step": 1574 - }, - { - "epoch": 0.18938255275656826, - "flos": 17399422458360.0, - "grad_norm": 2.5615687655337034, - "learning_rate": 3.7395316907044264e-06, - "loss": 1.0492, - "num_input_tokens_seen": 32996175, - "step": 1575 - }, - { - "epoch": 0.18950279564720737, - "flos": 17373937153920.0, - "grad_norm": 2.6138532209333207, - "learning_rate": 3.7391471637894364e-06, - "loss": 1.026, - "num_input_tokens_seen": 33018160, - "step": 1576 - }, - { - "epoch": 0.18962303853784646, - "flos": 14017073965200.0, - "grad_norm": 2.4518560541351064, - "learning_rate": 3.738762373047205e-06, - "loss": 1.0816, - "num_input_tokens_seen": 33037800, - "step": 1577 - }, - { - "epoch": 0.18974328142848554, - "flos": 14933772837960.0, - "grad_norm": 2.544482271486132, - "learning_rate": 3.738377318536103e-06, - "loss": 1.0606, - "num_input_tokens_seen": 33057405, - "step": 1578 - }, - { - "epoch": 0.18986352431912462, - "flos": 9136592025480.0, - "grad_norm": 4.655711302861863, - "learning_rate": 3.7379920003145447e-06, - "loss": 0.9414, - "num_input_tokens_seen": 33071400, - "step": 1579 - }, - { - "epoch": 0.18998376720976373, - "flos": 16899901555560.0, - "grad_norm": 2.371147745890799, - "learning_rate": 3.7376064184409817e-06, - "loss": 1.0743, - "num_input_tokens_seen": 33090700, - "step": 1580 - }, - { - "epoch": 0.19010401010040281, - "flos": 16323710108520.0, - "grad_norm": 2.468072339498591, - "learning_rate": 3.7372205729739063e-06, - "loss": 1.1041, - "num_input_tokens_seen": 33112235, - "step": 1581 - }, - { - "epoch": 0.1902242529910419, - "flos": 13570976596080.0, - "grad_norm": 4.387519362534621, - "learning_rate": 3.7368344639718514e-06, - "loss": 0.9525, - "num_input_tokens_seen": 33129890, - "step": 1582 - }, - { - "epoch": 0.190344495881681, - "flos": 18133339596960.0, - "grad_norm": 2.7006689832337343, - "learning_rate": 3.7364480914933895e-06, - "loss": 1.038, - "num_input_tokens_seen": 33149850, - "step": 1583 - }, - { - "epoch": 0.1904647387723201, - "flos": 19073408126520.0, - "grad_norm": 2.9076096830401243, - "learning_rate": 3.7360614555971325e-06, - "loss": 1.0435, - "num_input_tokens_seen": 33169225, - "step": 1584 - }, - { - "epoch": 0.19058498166295917, - "flos": 17058025847400.0, - "grad_norm": 5.44297304620049, - "learning_rate": 3.735674556341733e-06, - "loss": 1.072, - "num_input_tokens_seen": 33188560, - "step": 1585 - }, - { - "epoch": 0.19070522455359826, - "flos": 20152370601720.0, - "grad_norm": 3.510364716972724, - "learning_rate": 3.7352873937858835e-06, - "loss": 1.0617, - "num_input_tokens_seen": 33209815, - "step": 1586 - }, - { - "epoch": 0.19082546744423737, - "flos": 18259202379600.0, - "grad_norm": 11.10840732314205, - "learning_rate": 3.734899967988316e-06, - "loss": 0.945, - "num_input_tokens_seen": 33227715, - "step": 1587 - }, - { - "epoch": 0.19094571033487645, - "flos": 13990576829280.0, - "grad_norm": 2.494239464625835, - "learning_rate": 3.7345122790078026e-06, - "loss": 1.0701, - "num_input_tokens_seen": 33245000, - "step": 1588 - }, - { - "epoch": 0.19106595322551553, - "flos": 15353741009880.0, - "grad_norm": 6.693647179762216, - "learning_rate": 3.7341243269031556e-06, - "loss": 1.1577, - "num_input_tokens_seen": 33263710, - "step": 1589 - }, - { - "epoch": 0.19118619611615464, - "flos": 21304600188000.0, - "grad_norm": 2.8020753730367782, - "learning_rate": 3.7337361117332275e-06, - "loss": 1.001, - "num_input_tokens_seen": 33285170, - "step": 1590 - }, - { - "epoch": 0.19130643900679373, - "flos": 12232439196240.0, - "grad_norm": 2.9467904104023677, - "learning_rate": 3.7333476335569087e-06, - "loss": 0.9947, - "num_input_tokens_seen": 33302890, - "step": 1591 - }, - { - "epoch": 0.1914266818974328, - "flos": 17660622445680.0, - "grad_norm": 2.956591796471802, - "learning_rate": 3.7329588924331325e-06, - "loss": 0.892, - "num_input_tokens_seen": 33323815, - "step": 1592 - }, - { - "epoch": 0.1915469247880719, - "flos": 13439226793920.0, - "grad_norm": 2.6739211250889623, - "learning_rate": 3.732569888420871e-06, - "loss": 1.0459, - "num_input_tokens_seen": 33343070, - "step": 1593 - }, - { - "epoch": 0.191667167678711, - "flos": 14934539376960.0, - "grad_norm": 3.2876251777053342, - "learning_rate": 3.732180621579134e-06, - "loss": 1.0563, - "num_input_tokens_seen": 33362005, - "step": 1594 - }, - { - "epoch": 0.1917874105693501, - "flos": 24424092969600.0, - "grad_norm": 3.1664684802494993, - "learning_rate": 3.7317910919669745e-06, - "loss": 1.0365, - "num_input_tokens_seen": 33382920, - "step": 1595 - }, - { - "epoch": 0.19190765345998917, - "flos": 16743954234480.0, - "grad_norm": 4.363689297658465, - "learning_rate": 3.7314012996434826e-06, - "loss": 0.9888, - "num_input_tokens_seen": 33401370, - "step": 1596 - }, - { - "epoch": 0.19202789635062828, - "flos": 14091966138960.0, - "grad_norm": 2.8004168029915286, - "learning_rate": 3.7310112446677907e-06, - "loss": 1.0255, - "num_input_tokens_seen": 33419000, - "step": 1597 - }, - { - "epoch": 0.19214813924126736, - "flos": 14855415907920.0, - "grad_norm": 5.544529034604849, - "learning_rate": 3.7306209270990695e-06, - "loss": 0.9122, - "num_input_tokens_seen": 33436725, - "step": 1598 - }, - { - "epoch": 0.19226838213190645, - "flos": 18762770608320.0, - "grad_norm": 2.2633094267034366, - "learning_rate": 3.7302303469965292e-06, - "loss": 1.092, - "num_input_tokens_seen": 33455985, - "step": 1599 - }, - { - "epoch": 0.19238862502254553, - "flos": 14803433467560.0, - "grad_norm": 2.8827855385447383, - "learning_rate": 3.7298395044194206e-06, - "loss": 0.9291, - "num_input_tokens_seen": 33474515, - "step": 1600 - }, - { - "epoch": 0.19250886791318464, - "flos": 15432649848000.0, - "grad_norm": 3.6321078365634714, - "learning_rate": 3.7294483994270356e-06, - "loss": 1.1501, - "num_input_tokens_seen": 33492560, - "step": 1601 - }, - { - "epoch": 0.19262911080382372, - "flos": 16612541709480.0, - "grad_norm": 7.854323597224772, - "learning_rate": 3.7290570320787033e-06, - "loss": 0.9962, - "num_input_tokens_seen": 33511860, - "step": 1602 - }, - { - "epoch": 0.1927493536944628, - "flos": 15588719815320.0, - "grad_norm": 2.7163192581496793, - "learning_rate": 3.728665402433793e-06, - "loss": 0.9419, - "num_input_tokens_seen": 33530150, - "step": 1603 - }, - { - "epoch": 0.19286959658510192, - "flos": 11709609959760.0, - "grad_norm": 3.5862816207092703, - "learning_rate": 3.7282735105517164e-06, - "loss": 1.0818, - "num_input_tokens_seen": 33547995, - "step": 1604 - }, - { - "epoch": 0.192989839475741, - "flos": 15354292917960.0, - "grad_norm": 3.7064835010070265, - "learning_rate": 3.727881356491922e-06, - "loss": 0.9074, - "num_input_tokens_seen": 33566125, - "step": 1605 - }, - { - "epoch": 0.19311008236638008, - "flos": 13675800000480.0, - "grad_norm": 3.23302604814414, - "learning_rate": 3.7274889403139002e-06, - "loss": 0.993, - "num_input_tokens_seen": 33583470, - "step": 1606 - }, - { - "epoch": 0.1932303252570192, - "flos": 20409645909360.0, - "grad_norm": 4.446382470912361, - "learning_rate": 3.727096262077179e-06, - "loss": 1.0188, - "num_input_tokens_seen": 33602185, - "step": 1607 - }, - { - "epoch": 0.19335056814765827, - "flos": 13019933176320.0, - "grad_norm": 4.3406650923105685, - "learning_rate": 3.7267033218413285e-06, - "loss": 1.0781, - "num_input_tokens_seen": 33619700, - "step": 1608 - }, - { - "epoch": 0.19347081103829736, - "flos": 9348814405320.0, - "grad_norm": 3.2463287745845015, - "learning_rate": 3.726310119665957e-06, - "loss": 1.0421, - "num_input_tokens_seen": 33635755, - "step": 1609 - }, - { - "epoch": 0.19359105392893644, - "flos": 14407448183640.0, - "grad_norm": 3.5546545006194927, - "learning_rate": 3.725916655610713e-06, - "loss": 1.086, - "num_input_tokens_seen": 33654805, - "step": 1610 - }, - { - "epoch": 0.19371129681957555, - "flos": 14540301801960.0, - "grad_norm": 5.240861659657593, - "learning_rate": 3.725522929735284e-06, - "loss": 0.9722, - "num_input_tokens_seen": 33671460, - "step": 1611 - }, - { - "epoch": 0.19383153971021463, - "flos": 21699052393920.0, - "grad_norm": 4.190118682564327, - "learning_rate": 3.725128942099399e-06, - "loss": 0.9621, - "num_input_tokens_seen": 33691580, - "step": 1612 - }, - { - "epoch": 0.19395178260085372, - "flos": 17476215648840.0, - "grad_norm": 6.30140844677489, - "learning_rate": 3.7247346927628245e-06, - "loss": 1.0313, - "num_input_tokens_seen": 33711235, - "step": 1613 - }, - { - "epoch": 0.19407202549149283, - "flos": 20624842460520.0, - "grad_norm": 6.238240279491322, - "learning_rate": 3.7243401817853694e-06, - "loss": 1.01, - "num_input_tokens_seen": 33731645, - "step": 1614 - }, - { - "epoch": 0.1941922683821319, - "flos": 12758027973120.0, - "grad_norm": 5.029036514802007, - "learning_rate": 3.723945409226879e-06, - "loss": 0.9479, - "num_input_tokens_seen": 33749855, - "step": 1615 - }, - { - "epoch": 0.194312511272771, - "flos": 6804102639000.0, - "grad_norm": 4.972313904346194, - "learning_rate": 3.723550375147241e-06, - "loss": 1.0374, - "num_input_tokens_seen": 33764350, - "step": 1616 - }, - { - "epoch": 0.19443275416341008, - "flos": 19234537251240.0, - "grad_norm": 3.1222578606281695, - "learning_rate": 3.7231550796063816e-06, - "loss": 1.022, - "num_input_tokens_seen": 33784080, - "step": 1617 - }, - { - "epoch": 0.1945529970540492, - "flos": 11206501654440.0, - "grad_norm": 4.063445700097591, - "learning_rate": 3.722759522664266e-06, - "loss": 0.879, - "num_input_tokens_seen": 33801100, - "step": 1618 - }, - { - "epoch": 0.19467323994468827, - "flos": 13699445611320.0, - "grad_norm": 3.4934361079679275, - "learning_rate": 3.7223637043809016e-06, - "loss": 1.0375, - "num_input_tokens_seen": 33819800, - "step": 1619 - }, - { - "epoch": 0.19479348283532735, - "flos": 17162511974640.0, - "grad_norm": 2.581325324855288, - "learning_rate": 3.7219676248163322e-06, - "loss": 1.0892, - "num_input_tokens_seen": 33836685, - "step": 1620 - }, - { - "epoch": 0.19491372572596646, - "flos": 18342955744200.0, - "grad_norm": 2.6524801092813224, - "learning_rate": 3.721571284030643e-06, - "loss": 1.1465, - "num_input_tokens_seen": 33856215, - "step": 1621 - }, - { - "epoch": 0.19503396861660555, - "flos": 13937030649360.0, - "grad_norm": 7.210426452976901, - "learning_rate": 3.7211746820839587e-06, - "loss": 1.0028, - "num_input_tokens_seen": 33873030, - "step": 1622 - }, - { - "epoch": 0.19515421150724463, - "flos": 14934600700080.0, - "grad_norm": 3.9464073550462184, - "learning_rate": 3.7207778190364437e-06, - "loss": 1.045, - "num_input_tokens_seen": 33891175, - "step": 1623 - }, - { - "epoch": 0.1952744543978837, - "flos": 23506842188760.0, - "grad_norm": 4.577513774390671, - "learning_rate": 3.720380694948302e-06, - "loss": 0.9716, - "num_input_tokens_seen": 33913780, - "step": 1624 - }, - { - "epoch": 0.19539469728852282, - "flos": 45850196450640.0, - "grad_norm": 0.9662410486949126, - "learning_rate": 3.719983309879777e-06, - "loss": 0.9763, - "num_input_tokens_seen": 33973280, - "step": 1625 - }, - { - "epoch": 0.1955149401791619, - "flos": 9427968535920.0, - "grad_norm": 4.966379043819024, - "learning_rate": 3.719585663891151e-06, - "loss": 1.0077, - "num_input_tokens_seen": 33990535, - "step": 1626 - }, - { - "epoch": 0.195635183069801, - "flos": 13277913699840.0, - "grad_norm": 5.5914117625199875, - "learning_rate": 3.719187757042747e-06, - "loss": 1.0049, - "num_input_tokens_seen": 34008075, - "step": 1627 - }, - { - "epoch": 0.1957554259604401, - "flos": 49639507066200.0, - "grad_norm": 0.780213698677252, - "learning_rate": 3.7187895893949275e-06, - "loss": 0.8278, - "num_input_tokens_seen": 34074265, - "step": 1628 - }, - { - "epoch": 0.19587566885107918, - "flos": 14960607251040.0, - "grad_norm": 5.594924547711084, - "learning_rate": 3.7183911610080937e-06, - "loss": 0.959, - "num_input_tokens_seen": 34090850, - "step": 1629 - }, - { - "epoch": 0.19599591174171827, - "flos": 15768650024400.0, - "grad_norm": 5.424787898685767, - "learning_rate": 3.7179924719426872e-06, - "loss": 0.9697, - "num_input_tokens_seen": 34108465, - "step": 1630 - }, - { - "epoch": 0.19611615463235738, - "flos": 16900300155840.0, - "grad_norm": 6.709936136092463, - "learning_rate": 3.7175935222591885e-06, - "loss": 0.9753, - "num_input_tokens_seen": 34127485, - "step": 1631 - }, - { - "epoch": 0.19623639752299646, - "flos": 20385417728880.0, - "grad_norm": 6.262517677023299, - "learning_rate": 3.717194312018118e-06, - "loss": 0.9873, - "num_input_tokens_seen": 34146190, - "step": 1632 - }, - { - "epoch": 0.19635664041363554, - "flos": 14934324746040.0, - "grad_norm": 4.828759012907595, - "learning_rate": 3.716794841280036e-06, - "loss": 0.9792, - "num_input_tokens_seen": 34164615, - "step": 1633 - }, - { - "epoch": 0.19647688330427462, - "flos": 13386999061080.0, - "grad_norm": 4.4642623028822515, - "learning_rate": 3.7163951101055407e-06, - "loss": 0.992, - "num_input_tokens_seen": 34182395, - "step": 1634 - }, - { - "epoch": 0.19659712619491373, - "flos": 17241022212480.0, - "grad_norm": 3.145081655536962, - "learning_rate": 3.715995118555273e-06, - "loss": 1.0123, - "num_input_tokens_seen": 34202090, - "step": 1635 - }, - { - "epoch": 0.19671736908555282, - "flos": 17582418823440.0, - "grad_norm": 4.01882415275921, - "learning_rate": 3.71559486668991e-06, - "loss": 1.0699, - "num_input_tokens_seen": 34220670, - "step": 1636 - }, - { - "epoch": 0.1968376119761919, - "flos": 16953539720160.0, - "grad_norm": 3.639946477307876, - "learning_rate": 3.715194354570169e-06, - "loss": 0.9946, - "num_input_tokens_seen": 34240395, - "step": 1637 - }, - { - "epoch": 0.196957854866831, - "flos": 12836660857200.0, - "grad_norm": 3.91413754450264, - "learning_rate": 3.714793582256809e-06, - "loss": 1.0533, - "num_input_tokens_seen": 34257180, - "step": 1638 - }, - { - "epoch": 0.1970780977574701, - "flos": 15380238145800.0, - "grad_norm": 3.3018018600563073, - "learning_rate": 3.7143925498106253e-06, - "loss": 1.0655, - "num_input_tokens_seen": 34275440, - "step": 1639 - }, - { - "epoch": 0.19719834064810918, - "flos": 14775311268960.0, - "grad_norm": 4.154236499399659, - "learning_rate": 3.7139912572924558e-06, - "loss": 1.0233, - "num_input_tokens_seen": 34294190, - "step": 1640 - }, - { - "epoch": 0.19731858353874826, - "flos": 16659863592720.0, - "grad_norm": 4.7615344893594305, - "learning_rate": 3.7135897047631744e-06, - "loss": 1.0417, - "num_input_tokens_seen": 34311795, - "step": 1641 - }, - { - "epoch": 0.19743882642938737, - "flos": 17058393786120.0, - "grad_norm": 3.893679725661867, - "learning_rate": 3.713187892283698e-06, - "loss": 0.9711, - "num_input_tokens_seen": 34331125, - "step": 1642 - }, - { - "epoch": 0.19755906932002645, - "flos": 10600746915480.0, - "grad_norm": 4.545298299704958, - "learning_rate": 3.71278581991498e-06, - "loss": 1.0875, - "num_input_tokens_seen": 34346705, - "step": 1643 - }, - { - "epoch": 0.19767931221066554, - "flos": 13828037272800.0, - "grad_norm": 4.599843868425492, - "learning_rate": 3.712383487718015e-06, - "loss": 1.0183, - "num_input_tokens_seen": 34364665, - "step": 1644 - }, - { - "epoch": 0.19779955510130465, - "flos": 18315661407720.0, - "grad_norm": 3.5156522622640876, - "learning_rate": 3.7119808957538365e-06, - "loss": 1.0962, - "num_input_tokens_seen": 34383380, - "step": 1645 - }, - { - "epoch": 0.19791979799194373, - "flos": 14751113750040.0, - "grad_norm": 4.637466126187145, - "learning_rate": 3.711578044083517e-06, - "loss": 1.0202, - "num_input_tokens_seen": 34399900, - "step": 1646 - }, - { - "epoch": 0.1980400408825828, - "flos": 18208262432280.0, - "grad_norm": 3.8800181229158293, - "learning_rate": 3.7111749327681698e-06, - "loss": 0.979, - "num_input_tokens_seen": 34419655, - "step": 1647 - }, - { - "epoch": 0.1981602837732219, - "flos": 16717487760120.0, - "grad_norm": 4.845503950965097, - "learning_rate": 3.7107715618689455e-06, - "loss": 1.0804, - "num_input_tokens_seen": 34438350, - "step": 1648 - }, - { - "epoch": 0.198280526663861, - "flos": 16481405138520.0, - "grad_norm": 2.3577401263212576, - "learning_rate": 3.710367931447035e-06, - "loss": 1.0568, - "num_input_tokens_seen": 34459850, - "step": 1649 - }, - { - "epoch": 0.1984007695545001, - "flos": 15406336681440.0, - "grad_norm": 4.777978313781653, - "learning_rate": 3.70996404156367e-06, - "loss": 1.0851, - "num_input_tokens_seen": 34479205, - "step": 1650 - }, - { - "epoch": 0.19852101244513917, - "flos": 25736930434080.0, - "grad_norm": 3.3395760412170286, - "learning_rate": 3.7095598922801187e-06, - "loss": 0.9507, - "num_input_tokens_seen": 34501000, - "step": 1651 - }, - { - "epoch": 0.19864125533577828, - "flos": 16423413032400.0, - "grad_norm": 3.611710886582601, - "learning_rate": 3.7091554836576914e-06, - "loss": 0.981, - "num_input_tokens_seen": 34517395, - "step": 1652 - }, - { - "epoch": 0.19876149822641737, - "flos": 17504215201200.0, - "grad_norm": 4.348683800474436, - "learning_rate": 3.708750815757736e-06, - "loss": 1.0474, - "num_input_tokens_seen": 34537885, - "step": 1653 - }, - { - "epoch": 0.19888174111705645, - "flos": 22957546477920.0, - "grad_norm": 6.848206066596303, - "learning_rate": 3.7083458886416407e-06, - "loss": 0.9419, - "num_input_tokens_seen": 34556800, - "step": 1654 - }, - { - "epoch": 0.19900198400769553, - "flos": 17504184539640.0, - "grad_norm": 5.012134894489725, - "learning_rate": 3.707940702370832e-06, - "loss": 1.1036, - "num_input_tokens_seen": 34577365, - "step": 1655 - }, - { - "epoch": 0.19912222689833464, - "flos": 48633106486200.0, - "grad_norm": 0.7719984056303134, - "learning_rate": 3.707535257006777e-06, - "loss": 0.8281, - "num_input_tokens_seen": 34642710, - "step": 1656 - }, - { - "epoch": 0.19924246978897373, - "flos": 11132253373440.0, - "grad_norm": 4.569972777170782, - "learning_rate": 3.707129552610981e-06, - "loss": 1.112, - "num_input_tokens_seen": 34661080, - "step": 1657 - }, - { - "epoch": 0.1993627126796128, - "flos": 12443281805880.0, - "grad_norm": 4.248195040519552, - "learning_rate": 3.70672358924499e-06, - "loss": 0.9642, - "num_input_tokens_seen": 34680040, - "step": 1658 - }, - { - "epoch": 0.19948295557025192, - "flos": 29172119891280.0, - "grad_norm": 4.385100122959441, - "learning_rate": 3.706317366970386e-06, - "loss": 1.0087, - "num_input_tokens_seen": 34700760, - "step": 1659 - }, - { - "epoch": 0.199603198460891, - "flos": 17844753288480.0, - "grad_norm": 5.079475293732961, - "learning_rate": 3.705910885848795e-06, - "loss": 1.0727, - "num_input_tokens_seen": 34718855, - "step": 1660 - }, - { - "epoch": 0.19972344135153008, - "flos": 14252604678720.0, - "grad_norm": 3.3723463292950018, - "learning_rate": 3.705504145941879e-06, - "loss": 1.0673, - "num_input_tokens_seen": 34736745, - "step": 1661 - }, - { - "epoch": 0.1998436842421692, - "flos": 16870307602080.0, - "grad_norm": 5.380777846478546, - "learning_rate": 3.7050971473113403e-06, - "loss": 1.0108, - "num_input_tokens_seen": 34756240, - "step": 1662 - }, - { - "epoch": 0.19996392713280828, - "flos": 25764746017080.0, - "grad_norm": 2.4197732973708397, - "learning_rate": 3.7046898900189196e-06, - "loss": 1.0304, - "num_input_tokens_seen": 34780295, - "step": 1663 - }, - { - "epoch": 0.20008417002344736, - "flos": 16820563455600.0, - "grad_norm": 4.480336354435563, - "learning_rate": 3.704282374126398e-06, - "loss": 1.0673, - "num_input_tokens_seen": 34799695, - "step": 1664 - }, - { - "epoch": 0.20020441291408644, - "flos": 15537595898640.0, - "grad_norm": 2.982279096482079, - "learning_rate": 3.7038745996955954e-06, - "loss": 1.1085, - "num_input_tokens_seen": 34818760, - "step": 1665 - }, - { - "epoch": 0.20032465580472555, - "flos": 16476560612040.0, - "grad_norm": 4.916801623200218, - "learning_rate": 3.703466566788371e-06, - "loss": 0.9357, - "num_input_tokens_seen": 34837610, - "step": 1666 - }, - { - "epoch": 0.20044489869536464, - "flos": 16975284314280.0, - "grad_norm": 8.136304298134132, - "learning_rate": 3.703058275466622e-06, - "loss": 0.9771, - "num_input_tokens_seen": 34856565, - "step": 1667 - }, - { - "epoch": 0.20056514158600372, - "flos": 15590191570200.0, - "grad_norm": 2.8367047913884744, - "learning_rate": 3.7026497257922877e-06, - "loss": 0.9985, - "num_input_tokens_seen": 34876595, - "step": 1668 - }, - { - "epoch": 0.20068538447664283, - "flos": 16980098179200.0, - "grad_norm": 3.411295522439892, - "learning_rate": 3.7022409178273436e-06, - "loss": 1.0689, - "num_input_tokens_seen": 34897295, - "step": 1669 - }, - { - "epoch": 0.2008056273672819, - "flos": 13073111417520.0, - "grad_norm": 3.1847116578759582, - "learning_rate": 3.7018318516338054e-06, - "loss": 1.0072, - "num_input_tokens_seen": 34916175, - "step": 1670 - }, - { - "epoch": 0.200925870257921, - "flos": 16822157856720.0, - "grad_norm": 10.028561811244577, - "learning_rate": 3.7014225272737284e-06, - "loss": 1.0284, - "num_input_tokens_seen": 34935120, - "step": 1671 - }, - { - "epoch": 0.20104611314856008, - "flos": 11473527338160.0, - "grad_norm": 3.4219025911237813, - "learning_rate": 3.701012944809207e-06, - "loss": 0.9599, - "num_input_tokens_seen": 34951955, - "step": 1672 - }, - { - "epoch": 0.2011663560391992, - "flos": 15196383257040.0, - "grad_norm": 3.126837838482204, - "learning_rate": 3.700603104302374e-06, - "loss": 1.0171, - "num_input_tokens_seen": 34971485, - "step": 1673 - }, - { - "epoch": 0.20128659892983827, - "flos": 44547170685360.0, - "grad_norm": 0.8702965753766032, - "learning_rate": 3.7001930058154027e-06, - "loss": 0.8159, - "num_input_tokens_seen": 35036165, - "step": 1674 - }, - { - "epoch": 0.20140684182047736, - "flos": 20256826067400.0, - "grad_norm": 5.2651633221544, - "learning_rate": 3.6997826494105037e-06, - "loss": 1.0212, - "num_input_tokens_seen": 35056330, - "step": 1675 - }, - { - "epoch": 0.20152708471111647, - "flos": 19994767556400.0, - "grad_norm": 4.642626192825413, - "learning_rate": 3.6993720351499286e-06, - "loss": 0.9185, - "num_input_tokens_seen": 35077175, - "step": 1676 - }, - { - "epoch": 0.20164732760175555, - "flos": 16743218357040.0, - "grad_norm": 3.1659091411476803, - "learning_rate": 3.6989611630959666e-06, - "loss": 0.9956, - "num_input_tokens_seen": 35095450, - "step": 1677 - }, - { - "epoch": 0.20176757049239463, - "flos": 50923394469960.0, - "grad_norm": 0.7005693615031068, - "learning_rate": 3.6985500333109474e-06, - "loss": 0.843, - "num_input_tokens_seen": 35163500, - "step": 1678 - }, - { - "epoch": 0.20188781338303372, - "flos": 15219446298240.0, - "grad_norm": 4.405424216516024, - "learning_rate": 3.6981386458572385e-06, - "loss": 0.9886, - "num_input_tokens_seen": 35181195, - "step": 1679 - }, - { - "epoch": 0.20200805627367283, - "flos": 8116541503200.0, - "grad_norm": 4.997797099256523, - "learning_rate": 3.6977270007972468e-06, - "loss": 0.9782, - "num_input_tokens_seen": 35198450, - "step": 1680 - }, - { - "epoch": 0.2021282991643119, - "flos": 20333098011360.0, - "grad_norm": 3.8726598995484744, - "learning_rate": 3.6973150981934196e-06, - "loss": 0.9426, - "num_input_tokens_seen": 35219400, - "step": 1681 - }, - { - "epoch": 0.202248542054951, - "flos": 12698686758360.0, - "grad_norm": 6.1896219949746385, - "learning_rate": 3.6969029381082415e-06, - "loss": 1.0575, - "num_input_tokens_seen": 35235115, - "step": 1682 - }, - { - "epoch": 0.2023687849455901, - "flos": 14094787002480.0, - "grad_norm": 3.125163417358022, - "learning_rate": 3.696490520604237e-06, - "loss": 1.0307, - "num_input_tokens_seen": 35253525, - "step": 1683 - }, - { - "epoch": 0.20248902783622919, - "flos": 16035307769400.0, - "grad_norm": 3.1365760020153277, - "learning_rate": 3.696077845743968e-06, - "loss": 1.0352, - "num_input_tokens_seen": 35272835, - "step": 1684 - }, - { - "epoch": 0.20260927072686827, - "flos": 16139395296360.0, - "grad_norm": 3.5628325993058207, - "learning_rate": 3.69566491359004e-06, - "loss": 0.9526, - "num_input_tokens_seen": 35289200, - "step": 1685 - }, - { - "epoch": 0.20272951361750738, - "flos": 36488504190120.0, - "grad_norm": 4.393172967171043, - "learning_rate": 3.695251724205092e-06, - "loss": 0.9277, - "num_input_tokens_seen": 35313280, - "step": 1686 - }, - { - "epoch": 0.20284975650814646, - "flos": 18919668437760.0, - "grad_norm": 2.234018364034254, - "learning_rate": 3.6948382776518054e-06, - "loss": 1.0883, - "num_input_tokens_seen": 35333705, - "step": 1687 - }, - { - "epoch": 0.20296999939878554, - "flos": 11342022828480.0, - "grad_norm": 10.985535214442395, - "learning_rate": 3.6944245739929e-06, - "loss": 1.0172, - "num_input_tokens_seen": 35349585, - "step": 1688 - }, - { - "epoch": 0.20309024228942463, - "flos": 13618942372080.0, - "grad_norm": 7.853708943622916, - "learning_rate": 3.6940106132911332e-06, - "loss": 0.9466, - "num_input_tokens_seen": 35366490, - "step": 1689 - }, - { - "epoch": 0.20321048518006374, - "flos": 16219683904680.0, - "grad_norm": 12.306262300019247, - "learning_rate": 3.6935963956093037e-06, - "loss": 1.0996, - "num_input_tokens_seen": 35386295, - "step": 1690 - }, - { - "epoch": 0.20333072807070282, - "flos": 13542394474080.0, - "grad_norm": 2.46942959859548, - "learning_rate": 3.6931819210102474e-06, - "loss": 0.9242, - "num_input_tokens_seen": 35405410, - "step": 1691 - }, - { - "epoch": 0.2034509709613419, - "flos": 12884044063560.0, - "grad_norm": 4.539639618471998, - "learning_rate": 3.6927671895568402e-06, - "loss": 1.0738, - "num_input_tokens_seen": 35424190, - "step": 1692 - }, - { - "epoch": 0.20357121385198101, - "flos": 16292675061720.0, - "grad_norm": 3.57957942005243, - "learning_rate": 3.692352201311996e-06, - "loss": 1.0995, - "num_input_tokens_seen": 35442760, - "step": 1693 - }, - { - "epoch": 0.2036914567426201, - "flos": 14854710692040.0, - "grad_norm": 3.2669221045085033, - "learning_rate": 3.6919369563386687e-06, - "loss": 0.9902, - "num_input_tokens_seen": 35462280, - "step": 1694 - }, - { - "epoch": 0.20381169963325918, - "flos": 10971277556520.0, - "grad_norm": 7.29392537328661, - "learning_rate": 3.69152145469985e-06, - "loss": 1.0216, - "num_input_tokens_seen": 35479045, - "step": 1695 - }, - { - "epoch": 0.20393194252389826, - "flos": 20539341387000.0, - "grad_norm": 7.830515361404721, - "learning_rate": 3.691105696458572e-06, - "loss": 1.0488, - "num_input_tokens_seen": 35496060, - "step": 1696 - }, - { - "epoch": 0.20405218541453737, - "flos": 15980136526800.0, - "grad_norm": 8.525167120137267, - "learning_rate": 3.690689681677904e-06, - "loss": 0.9009, - "num_input_tokens_seen": 35514250, - "step": 1697 - }, - { - "epoch": 0.20417242830517646, - "flos": 18054614728200.0, - "grad_norm": 5.242532050781072, - "learning_rate": 3.690273410420956e-06, - "loss": 1.1052, - "num_input_tokens_seen": 35533735, - "step": 1698 - }, - { - "epoch": 0.20429267119581554, - "flos": 10449950736480.0, - "grad_norm": 3.671121852821581, - "learning_rate": 3.689856882750875e-06, - "loss": 0.9926, - "num_input_tokens_seen": 35548655, - "step": 1699 - }, - { - "epoch": 0.20441291408645465, - "flos": 12597512079600.0, - "grad_norm": 4.096049587456235, - "learning_rate": 3.6894400987308486e-06, - "loss": 1.0124, - "num_input_tokens_seen": 35565895, - "step": 1700 - }, - { - "epoch": 0.20453315697709373, - "flos": 11630547813840.0, - "grad_norm": 4.982759889676008, - "learning_rate": 3.6890230584241024e-06, - "loss": 1.0643, - "num_input_tokens_seen": 35582545, - "step": 1701 - }, - { - "epoch": 0.20465339986773282, - "flos": 47768911300320.0, - "grad_norm": 0.9326142795377602, - "learning_rate": 3.6886057618939016e-06, - "loss": 0.9316, - "num_input_tokens_seen": 35645085, - "step": 1702 - }, - { - "epoch": 0.2047736427583719, - "flos": 29984332636800.0, - "grad_norm": 3.360116140538192, - "learning_rate": 3.6881882092035492e-06, - "loss": 0.9146, - "num_input_tokens_seen": 35666190, - "step": 1703 - }, - { - "epoch": 0.204893885649011, - "flos": 50086039245240.0, - "grad_norm": 1.039211176510706, - "learning_rate": 3.6877704004163873e-06, - "loss": 0.9145, - "num_input_tokens_seen": 35726315, - "step": 1704 - }, - { - "epoch": 0.2050141285396501, - "flos": 15773218596840.0, - "grad_norm": 4.522737069988487, - "learning_rate": 3.6873523355957984e-06, - "loss": 1.0171, - "num_input_tokens_seen": 35745035, - "step": 1705 - }, - { - "epoch": 0.20513437143028918, - "flos": 33087073110480.0, - "grad_norm": 1.0652865725445322, - "learning_rate": 3.686934014805201e-06, - "loss": 0.9545, - "num_input_tokens_seen": 35795385, - "step": 1706 - }, - { - "epoch": 0.20525461432092829, - "flos": 15560199016440.0, - "grad_norm": 3.9099311268391896, - "learning_rate": 3.6865154381080552e-06, - "loss": 1.0417, - "num_input_tokens_seen": 35815790, - "step": 1707 - }, - { - "epoch": 0.20537485721156737, - "flos": 15064694778000.0, - "grad_norm": 5.4210419028541, - "learning_rate": 3.6860966055678585e-06, - "loss": 1.053, - "num_input_tokens_seen": 35831865, - "step": 1708 - }, - { - "epoch": 0.20549510010220645, - "flos": 14329244561400.0, - "grad_norm": 5.355125766848036, - "learning_rate": 3.685677517248147e-06, - "loss": 1.0872, - "num_input_tokens_seen": 35850475, - "step": 1709 - }, - { - "epoch": 0.20561534299284553, - "flos": 12047756445360.0, - "grad_norm": 8.1793913546259, - "learning_rate": 3.6852581732124967e-06, - "loss": 1.0314, - "num_input_tokens_seen": 35867540, - "step": 1710 - }, - { - "epoch": 0.20573558588348465, - "flos": 16269060112440.0, - "grad_norm": 27.113389021990823, - "learning_rate": 3.6848385735245213e-06, - "loss": 0.9898, - "num_input_tokens_seen": 35886350, - "step": 1711 - }, - { - "epoch": 0.20585582877412373, - "flos": 17528841981960.0, - "grad_norm": 5.22849975629129, - "learning_rate": 3.6844187182478734e-06, - "loss": 1.0935, - "num_input_tokens_seen": 35906925, - "step": 1712 - }, - { - "epoch": 0.2059760716647628, - "flos": 17215598231160.0, - "grad_norm": 6.317920434907848, - "learning_rate": 3.683998607446246e-06, - "loss": 0.9761, - "num_input_tokens_seen": 35925295, - "step": 1713 - }, - { - "epoch": 0.20609631455540192, - "flos": 14357029482840.0, - "grad_norm": 4.533520149607743, - "learning_rate": 3.6835782411833686e-06, - "loss": 0.9708, - "num_input_tokens_seen": 35944535, - "step": 1714 - }, - { - "epoch": 0.206216557446041, - "flos": 14095062956520.0, - "grad_norm": 4.258711021878697, - "learning_rate": 3.68315761952301e-06, - "loss": 0.9707, - "num_input_tokens_seen": 35961485, - "step": 1715 - }, - { - "epoch": 0.2063368003366801, - "flos": 17135922854040.0, - "grad_norm": 3.7268729090616777, - "learning_rate": 3.6827367425289797e-06, - "loss": 1.0561, - "num_input_tokens_seen": 35980980, - "step": 1716 - }, - { - "epoch": 0.2064570432273192, - "flos": 14436704859960.0, - "grad_norm": 4.985981745031964, - "learning_rate": 3.6823156102651225e-06, - "loss": 0.9507, - "num_input_tokens_seen": 35998855, - "step": 1717 - }, - { - "epoch": 0.20657728611795828, - "flos": 14565909752640.0, - "grad_norm": 8.203484460130198, - "learning_rate": 3.6818942227953257e-06, - "loss": 0.9366, - "num_input_tokens_seen": 36019120, - "step": 1718 - }, - { - "epoch": 0.20669752900859736, - "flos": 15485306842680.0, - "grad_norm": 6.622559871784557, - "learning_rate": 3.681472580183512e-06, - "loss": 0.9175, - "num_input_tokens_seen": 36037490, - "step": 1719 - }, - { - "epoch": 0.20681777189923645, - "flos": 10686431958360.0, - "grad_norm": 4.333720312201142, - "learning_rate": 3.6810506824936455e-06, - "loss": 1.0943, - "num_input_tokens_seen": 36055290, - "step": 1720 - }, - { - "epoch": 0.20693801478987556, - "flos": 40413152010360.0, - "grad_norm": 1.0983351918920172, - "learning_rate": 3.680628529789726e-06, - "loss": 0.8951, - "num_input_tokens_seen": 36107420, - "step": 1721 - }, - { - "epoch": 0.20705825768051464, - "flos": 15351931977840.0, - "grad_norm": 3.5045055272432535, - "learning_rate": 3.680206122135796e-06, - "loss": 1.0809, - "num_input_tokens_seen": 36127745, - "step": 1722 - }, - { - "epoch": 0.20717850057115372, - "flos": 18395643400440.0, - "grad_norm": 3.051813124498745, - "learning_rate": 3.6797834595959323e-06, - "loss": 1.0057, - "num_input_tokens_seen": 36147365, - "step": 1723 - }, - { - "epoch": 0.20729874346179283, - "flos": 20754047353200.0, - "grad_norm": 5.970467684779812, - "learning_rate": 3.679360542234254e-06, - "loss": 0.999, - "num_input_tokens_seen": 36166430, - "step": 1724 - }, - { - "epoch": 0.20741898635243192, - "flos": 20677100854920.0, - "grad_norm": 5.809877521845676, - "learning_rate": 3.678937370114916e-06, - "loss": 0.9524, - "num_input_tokens_seen": 36185955, - "step": 1725 - }, - { - "epoch": 0.207539229243071, - "flos": 11000748863760.0, - "grad_norm": 3.7514241375366213, - "learning_rate": 3.678513943302114e-06, - "loss": 1.0186, - "num_input_tokens_seen": 36202450, - "step": 1726 - }, - { - "epoch": 0.20765947213371008, - "flos": 14566522983840.0, - "grad_norm": 22.754266695106462, - "learning_rate": 3.678090261860082e-06, - "loss": 1.0774, - "num_input_tokens_seen": 36221900, - "step": 1727 - }, - { - "epoch": 0.2077797150243492, - "flos": 13728181041120.0, - "grad_norm": 3.4935403887219962, - "learning_rate": 3.6776663258530906e-06, - "loss": 1.003, - "num_input_tokens_seen": 36240270, - "step": 1728 - }, - { - "epoch": 0.20789995791498828, - "flos": 15506560851840.0, - "grad_norm": 2.7162295530052574, - "learning_rate": 3.6772421353454516e-06, - "loss": 0.9456, - "num_input_tokens_seen": 36258585, - "step": 1729 - }, - { - "epoch": 0.20802020080562736, - "flos": 16455153295080.0, - "grad_norm": 4.483325795636464, - "learning_rate": 3.6768176904015153e-06, - "loss": 1.1003, - "num_input_tokens_seen": 36278110, - "step": 1730 - }, - { - "epoch": 0.20814044369626647, - "flos": 16399982052480.0, - "grad_norm": 3.8318330206039155, - "learning_rate": 3.6763929910856674e-06, - "loss": 0.8308, - "num_input_tokens_seen": 36296280, - "step": 1731 - }, - { - "epoch": 0.20826068658690555, - "flos": 13910196236280.0, - "grad_norm": 5.231931930449164, - "learning_rate": 3.6759680374623365e-06, - "loss": 1.0006, - "num_input_tokens_seen": 36313915, - "step": 1732 - }, - { - "epoch": 0.20838092947754464, - "flos": 18054308112600.0, - "grad_norm": 4.356196731832788, - "learning_rate": 3.675542829595986e-06, - "loss": 0.9654, - "num_input_tokens_seen": 36333300, - "step": 1733 - }, - { - "epoch": 0.20850117236818372, - "flos": 17111326734840.0, - "grad_norm": 6.0862593559589655, - "learning_rate": 3.6751173675511213e-06, - "loss": 1.0156, - "num_input_tokens_seen": 36355065, - "step": 1734 - }, - { - "epoch": 0.20862141525882283, - "flos": 14247208244160.0, - "grad_norm": 5.9530465028248285, - "learning_rate": 3.674691651392283e-06, - "loss": 1.1166, - "num_input_tokens_seen": 36372455, - "step": 1735 - }, - { - "epoch": 0.2087416581494619, - "flos": 27857320086960.0, - "grad_norm": 4.173929115969354, - "learning_rate": 3.674265681184053e-06, - "loss": 0.9831, - "num_input_tokens_seen": 36395435, - "step": 1736 - }, - { - "epoch": 0.208861901040101, - "flos": 18577229333760.0, - "grad_norm": 3.640241086961633, - "learning_rate": 3.6738394569910504e-06, - "loss": 1.0852, - "num_input_tokens_seen": 36415695, - "step": 1737 - }, - { - "epoch": 0.2089821439307401, - "flos": 20227600052640.0, - "grad_norm": 4.4429062698234505, - "learning_rate": 3.6734129788779333e-06, - "loss": 1.0463, - "num_input_tokens_seen": 36434590, - "step": 1738 - }, - { - "epoch": 0.2091023868213792, - "flos": 14960913866640.0, - "grad_norm": 3.89544465001125, - "learning_rate": 3.6729862469093976e-06, - "loss": 1.1342, - "num_input_tokens_seen": 36453405, - "step": 1739 - }, - { - "epoch": 0.20922262971201827, - "flos": 15956828193120.0, - "grad_norm": 6.116680489644882, - "learning_rate": 3.6725592611501782e-06, - "loss": 1.035, - "num_input_tokens_seen": 36471800, - "step": 1740 - }, - { - "epoch": 0.20934287260265738, - "flos": 19834956878760.0, - "grad_norm": 3.5853406621371726, - "learning_rate": 3.6721320216650496e-06, - "loss": 0.9959, - "num_input_tokens_seen": 36492135, - "step": 1741 - }, - { - "epoch": 0.20946311549329646, - "flos": 11630241198240.0, - "grad_norm": 5.094694459567967, - "learning_rate": 3.6717045285188215e-06, - "loss": 1.0722, - "num_input_tokens_seen": 36509550, - "step": 1742 - }, - { - "epoch": 0.20958335838393555, - "flos": 15983693267760.0, - "grad_norm": 4.054337973404276, - "learning_rate": 3.671276781776346e-06, - "loss": 1.0884, - "num_input_tokens_seen": 36527925, - "step": 1743 - }, - { - "epoch": 0.20970360127457463, - "flos": 17946571860000.0, - "grad_norm": 2.85243291242712, - "learning_rate": 3.6708487815025128e-06, - "loss": 0.8922, - "num_input_tokens_seen": 36548225, - "step": 1744 - }, - { - "epoch": 0.20982384416521374, - "flos": 13099209953160.0, - "grad_norm": 3.983152673260951, - "learning_rate": 3.6704205277622463e-06, - "loss": 0.961, - "num_input_tokens_seen": 36566385, - "step": 1745 - }, - { - "epoch": 0.20994408705585282, - "flos": 18053878850760.0, - "grad_norm": 6.0988319529386565, - "learning_rate": 3.6699920206205146e-06, - "loss": 1.026, - "num_input_tokens_seen": 36586845, - "step": 1746 - }, - { - "epoch": 0.2100643299464919, - "flos": 15140997383520.0, - "grad_norm": 5.6995333296491255, - "learning_rate": 3.669563260142321e-06, - "loss": 1.0564, - "num_input_tokens_seen": 36605455, - "step": 1747 - }, - { - "epoch": 0.21018457283713102, - "flos": 13728150379560.0, - "grad_norm": 4.078886348352138, - "learning_rate": 3.6691342463927083e-06, - "loss": 1.0599, - "num_input_tokens_seen": 36624170, - "step": 1748 - }, - { - "epoch": 0.2103048157277701, - "flos": 20178806414520.0, - "grad_norm": 3.008728680473711, - "learning_rate": 3.668704979436758e-06, - "loss": 1.0461, - "num_input_tokens_seen": 36643985, - "step": 1749 - }, - { - "epoch": 0.21042505861840918, - "flos": 12364771568040.0, - "grad_norm": 4.351498286597024, - "learning_rate": 3.668275459339588e-06, - "loss": 1.0177, - "num_input_tokens_seen": 36662185, - "step": 1750 - }, - { - "epoch": 0.21054530150904827, - "flos": 10030841088240.0, - "grad_norm": 4.817524454542726, - "learning_rate": 3.667845686166358e-06, - "loss": 1.0276, - "num_input_tokens_seen": 36678830, - "step": 1751 - }, - { - "epoch": 0.21066554439968738, - "flos": 13198207661160.0, - "grad_norm": 3.4370555243536924, - "learning_rate": 3.6674156599822634e-06, - "loss": 1.0843, - "num_input_tokens_seen": 36694345, - "step": 1752 - }, - { - "epoch": 0.21078578729032646, - "flos": 16821605948640.0, - "grad_norm": 6.669732851000176, - "learning_rate": 3.666985380852539e-06, - "loss": 1.036, - "num_input_tokens_seen": 36713070, - "step": 1753 - }, - { - "epoch": 0.21090603018096554, - "flos": 20908522919400.0, - "grad_norm": 4.162338959382449, - "learning_rate": 3.6665548488424576e-06, - "loss": 0.977, - "num_input_tokens_seen": 36731550, - "step": 1754 - }, - { - "epoch": 0.21102627307160465, - "flos": 16536331088640.0, - "grad_norm": 3.448500354299462, - "learning_rate": 3.6661240640173307e-06, - "loss": 1.0967, - "num_input_tokens_seen": 36752740, - "step": 1755 - }, - { - "epoch": 0.21114651596224374, - "flos": 39087124527000.0, - "grad_norm": 0.9346326530176888, - "learning_rate": 3.6656930264425085e-06, - "loss": 0.8677, - "num_input_tokens_seen": 36816505, - "step": 1756 - }, - { - "epoch": 0.21126675885288282, - "flos": 15301267984560.0, - "grad_norm": 3.1561249056661262, - "learning_rate": 3.665261736183378e-06, - "loss": 0.9766, - "num_input_tokens_seen": 36836260, - "step": 1757 - }, - { - "epoch": 0.2113870017435219, - "flos": 7696512008160.0, - "grad_norm": 4.851294370358399, - "learning_rate": 3.664830193305366e-06, - "loss": 1.0956, - "num_input_tokens_seen": 36853755, - "step": 1758 - }, - { - "epoch": 0.211507244634161, - "flos": 11787231012360.0, - "grad_norm": 9.594145229287706, - "learning_rate": 3.6643983978739373e-06, - "loss": 0.9878, - "num_input_tokens_seen": 36870090, - "step": 1759 - }, - { - "epoch": 0.2116274875248001, - "flos": 14877068517360.0, - "grad_norm": 4.458572856612821, - "learning_rate": 3.663966349954596e-06, - "loss": 1.0379, - "num_input_tokens_seen": 36889990, - "step": 1760 - }, - { - "epoch": 0.21174773041543918, - "flos": 48833708134800.0, - "grad_norm": 0.7633074112687154, - "learning_rate": 3.6635340496128816e-06, - "loss": 0.8466, - "num_input_tokens_seen": 36946640, - "step": 1761 - }, - { - "epoch": 0.2118679733060783, - "flos": 14672511527520.0, - "grad_norm": 6.0485213056871645, - "learning_rate": 3.6631014969143747e-06, - "loss": 1.1461, - "num_input_tokens_seen": 36966050, - "step": 1762 - }, - { - "epoch": 0.21198821619671737, - "flos": 16508975429040.0, - "grad_norm": 5.247068404086058, - "learning_rate": 3.662668691924693e-06, - "loss": 1.1066, - "num_input_tokens_seen": 36986820, - "step": 1763 - }, - { - "epoch": 0.21210845908735645, - "flos": 17425122393720.0, - "grad_norm": 15.01027886748569, - "learning_rate": 3.6622356347094927e-06, - "loss": 0.9374, - "num_input_tokens_seen": 37008105, - "step": 1764 - }, - { - "epoch": 0.21222870197799554, - "flos": 19287500861520.0, - "grad_norm": 3.5965140949611065, - "learning_rate": 3.6618023253344684e-06, - "loss": 1.0181, - "num_input_tokens_seen": 37026685, - "step": 1765 - }, - { - "epoch": 0.21234894486863465, - "flos": 11945508612000.0, - "grad_norm": 3.1773845759091075, - "learning_rate": 3.6613687638653527e-06, - "loss": 1.0698, - "num_input_tokens_seen": 37044575, - "step": 1766 - }, - { - "epoch": 0.21246918775927373, - "flos": 16689212253720.0, - "grad_norm": 3.06906483078264, - "learning_rate": 3.660934950367916e-06, - "loss": 1.0049, - "num_input_tokens_seen": 37063540, - "step": 1767 - }, - { - "epoch": 0.21258943064991281, - "flos": 15904201860000.0, - "grad_norm": 3.01151529260686, - "learning_rate": 3.660500884907968e-06, - "loss": 1.0579, - "num_input_tokens_seen": 37084000, - "step": 1768 - }, - { - "epoch": 0.21270967354055192, - "flos": 42540348529560.0, - "grad_norm": 0.8320374824397957, - "learning_rate": 3.660066567551356e-06, - "loss": 0.8547, - "num_input_tokens_seen": 37143865, - "step": 1769 - }, - { - "epoch": 0.212829916431191, - "flos": 15301881215760.0, - "grad_norm": 4.116792547692597, - "learning_rate": 3.6596319983639657e-06, - "loss": 1.0547, - "num_input_tokens_seen": 37162165, - "step": 1770 - }, - { - "epoch": 0.2129501593218301, - "flos": 20651615550480.0, - "grad_norm": 4.007417953136788, - "learning_rate": 3.6591971774117214e-06, - "loss": 1.0856, - "num_input_tokens_seen": 37184860, - "step": 1771 - }, - { - "epoch": 0.2130704022124692, - "flos": 13334740666680.0, - "grad_norm": 8.56290585189502, - "learning_rate": 3.6587621047605833e-06, - "loss": 1.0286, - "num_input_tokens_seen": 37201750, - "step": 1772 - }, - { - "epoch": 0.21319064510310828, - "flos": 9821255602560.0, - "grad_norm": 4.348416244010773, - "learning_rate": 3.6583267804765542e-06, - "loss": 1.0991, - "num_input_tokens_seen": 37215805, - "step": 1773 - }, - { - "epoch": 0.21331088799374737, - "flos": 14881851720720.0, - "grad_norm": 4.923671818287073, - "learning_rate": 3.6578912046256702e-06, - "loss": 1.0782, - "num_input_tokens_seen": 37234045, - "step": 1774 - }, - { - "epoch": 0.21343113088438645, - "flos": 13203696080400.0, - "grad_norm": 3.7170118870087117, - "learning_rate": 3.6574553772740083e-06, - "loss": 0.9913, - "num_input_tokens_seen": 37251695, - "step": 1775 - }, - { - "epoch": 0.21355137377502556, - "flos": 48272939452440.0, - "grad_norm": 0.8952256884604122, - "learning_rate": 3.657019298487684e-06, - "loss": 0.8932, - "num_input_tokens_seen": 37316425, - "step": 1776 - }, - { - "epoch": 0.21367161666566464, - "flos": 24636039395400.0, - "grad_norm": 4.983811493114619, - "learning_rate": 3.6565829683328495e-06, - "loss": 1.0368, - "num_input_tokens_seen": 37338770, - "step": 1777 - }, - { - "epoch": 0.21379185955630373, - "flos": 13465785252960.0, - "grad_norm": 9.698196510896489, - "learning_rate": 3.6561463868756965e-06, - "loss": 1.0812, - "num_input_tokens_seen": 37357190, - "step": 1778 - }, - { - "epoch": 0.21391210244694284, - "flos": 20098364498400.0, - "grad_norm": 5.181163732825163, - "learning_rate": 3.655709554182452e-06, - "loss": 1.0122, - "num_input_tokens_seen": 37377250, - "step": 1779 - }, - { - "epoch": 0.21403234533758192, - "flos": 12362870551320.0, - "grad_norm": 2.674112999998593, - "learning_rate": 3.6552724703193855e-06, - "loss": 1.0834, - "num_input_tokens_seen": 37394160, - "step": 1780 - }, - { - "epoch": 0.214152588228221, - "flos": 36933522856560.0, - "grad_norm": 0.7945281982363267, - "learning_rate": 3.654835135352801e-06, - "loss": 0.8137, - "num_input_tokens_seen": 37448690, - "step": 1781 - }, - { - "epoch": 0.21427283111886009, - "flos": 13829478366120.0, - "grad_norm": 10.335797990129908, - "learning_rate": 3.654397549349043e-06, - "loss": 1.1081, - "num_input_tokens_seen": 37465785, - "step": 1782 - }, - { - "epoch": 0.2143930740094992, - "flos": 14253187248360.0, - "grad_norm": 4.161240414040898, - "learning_rate": 3.653959712374491e-06, - "loss": 0.9687, - "num_input_tokens_seen": 37483610, - "step": 1783 - }, - { - "epoch": 0.21451331690013828, - "flos": 15484724273040.0, - "grad_norm": 16.281345594129153, - "learning_rate": 3.6535216244955663e-06, - "loss": 1.0542, - "num_input_tokens_seen": 37503225, - "step": 1784 - }, - { - "epoch": 0.21463355979077736, - "flos": 23429957013600.0, - "grad_norm": 10.449748818331038, - "learning_rate": 3.653083285778726e-06, - "loss": 0.9263, - "num_input_tokens_seen": 37524315, - "step": 1785 - }, - { - "epoch": 0.21475380268141647, - "flos": 15301697246400.0, - "grad_norm": 6.844202035406673, - "learning_rate": 3.6526446962904653e-06, - "loss": 1.0297, - "num_input_tokens_seen": 37542750, - "step": 1786 - }, - { - "epoch": 0.21487404557205556, - "flos": 22930405449240.0, - "grad_norm": 2.8649551057876477, - "learning_rate": 3.652205856097318e-06, - "loss": 0.9656, - "num_input_tokens_seen": 37565655, - "step": 1787 - }, - { - "epoch": 0.21499428846269464, - "flos": 8925841400520.0, - "grad_norm": 4.130474041942066, - "learning_rate": 3.651766765265856e-06, - "loss": 1.0146, - "num_input_tokens_seen": 37582385, - "step": 1788 - }, - { - "epoch": 0.21511453135333372, - "flos": 16686330067080.0, - "grad_norm": 5.012110349203007, - "learning_rate": 3.65132742386269e-06, - "loss": 1.028, - "num_input_tokens_seen": 37597325, - "step": 1789 - }, - { - "epoch": 0.21523477424397283, - "flos": 19182370841520.0, - "grad_norm": 2.9026591770050376, - "learning_rate": 3.6508878319544656e-06, - "loss": 1.0637, - "num_input_tokens_seen": 37617260, - "step": 1790 - }, - { - "epoch": 0.21535501713461191, - "flos": 13413158919840.0, - "grad_norm": 3.405661015816295, - "learning_rate": 3.65044798960787e-06, - "loss": 1.033, - "num_input_tokens_seen": 37635320, - "step": 1791 - }, - { - "epoch": 0.215475260025251, - "flos": 12679425750600.0, - "grad_norm": 3.2539968123089182, - "learning_rate": 3.650007896889627e-06, - "loss": 1.0048, - "num_input_tokens_seen": 37653620, - "step": 1792 - }, - { - "epoch": 0.2155955029158901, - "flos": 11787629612640.0, - "grad_norm": 2.7850537809593003, - "learning_rate": 3.6495675538664974e-06, - "loss": 1.0225, - "num_input_tokens_seen": 37672355, - "step": 1793 - }, - { - "epoch": 0.2157157458065292, - "flos": 16794372935280.0, - "grad_norm": 2.805477105566174, - "learning_rate": 3.649126960605282e-06, - "loss": 1.0482, - "num_input_tokens_seen": 37693060, - "step": 1794 - }, - { - "epoch": 0.21583598869716827, - "flos": 15720806894640.0, - "grad_norm": 3.6890412035549125, - "learning_rate": 3.6486861171728174e-06, - "loss": 1.0448, - "num_input_tokens_seen": 37711175, - "step": 1795 - }, - { - "epoch": 0.21595623158780738, - "flos": 16665352011960.0, - "grad_norm": 2.869052041635834, - "learning_rate": 3.6482450236359803e-06, - "loss": 1.0103, - "num_input_tokens_seen": 37732750, - "step": 1796 - }, - { - "epoch": 0.21607647447844647, - "flos": 19155659074680.0, - "grad_norm": 3.697215713188262, - "learning_rate": 3.647803680061683e-06, - "loss": 0.9916, - "num_input_tokens_seen": 37752885, - "step": 1797 - }, - { - "epoch": 0.21619671736908555, - "flos": 10235214108720.0, - "grad_norm": 7.751292951465168, - "learning_rate": 3.6473620865168776e-06, - "loss": 0.9692, - "num_input_tokens_seen": 37769475, - "step": 1798 - }, - { - "epoch": 0.21631696025972463, - "flos": 12705217670640.0, - "grad_norm": 6.506802740446046, - "learning_rate": 3.646920243068554e-06, - "loss": 1.0349, - "num_input_tokens_seen": 37787090, - "step": 1799 - }, - { - "epoch": 0.21643720315036374, - "flos": 17399177165880.0, - "grad_norm": 2.1274878580215337, - "learning_rate": 3.6464781497837384e-06, - "loss": 0.9612, - "num_input_tokens_seen": 37808785, - "step": 1800 - }, - { - "epoch": 0.21655744604100283, - "flos": 20282372694960.0, - "grad_norm": 9.26195022248706, - "learning_rate": 3.6460358067294965e-06, - "loss": 0.9607, - "num_input_tokens_seen": 37829735, - "step": 1801 - }, - { - "epoch": 0.2166776889316419, - "flos": 14301520963080.0, - "grad_norm": 4.030114538869973, - "learning_rate": 3.645593213972932e-06, - "loss": 1.0009, - "num_input_tokens_seen": 37848360, - "step": 1802 - }, - { - "epoch": 0.21679793182228102, - "flos": 10737586536600.0, - "grad_norm": 2.9558338477241177, - "learning_rate": 3.6451503715811852e-06, - "loss": 1.0092, - "num_input_tokens_seen": 37866390, - "step": 1803 - }, - { - "epoch": 0.2169181747129201, - "flos": 12312451850520.0, - "grad_norm": 2.8062498368770834, - "learning_rate": 3.6447072796214345e-06, - "loss": 1.0318, - "num_input_tokens_seen": 37884675, - "step": 1804 - }, - { - "epoch": 0.21703841760355919, - "flos": 42054104082240.0, - "grad_norm": 0.9658918363896586, - "learning_rate": 3.644263938160898e-06, - "loss": 0.9013, - "num_input_tokens_seen": 37940360, - "step": 1805 - }, - { - "epoch": 0.21715866049419827, - "flos": 15930147087840.0, - "grad_norm": 3.383974787471628, - "learning_rate": 3.6438203472668293e-06, - "loss": 0.9467, - "num_input_tokens_seen": 37959725, - "step": 1806 - }, - { - "epoch": 0.21727890338483738, - "flos": 12206892568680.0, - "grad_norm": 3.7528538335181607, - "learning_rate": 3.6433765070065206e-06, - "loss": 1.05, - "num_input_tokens_seen": 37977235, - "step": 1807 - }, - { - "epoch": 0.21739914627547646, - "flos": 9472653525000.0, - "grad_norm": 4.8095196499552495, - "learning_rate": 3.6429324174473025e-06, - "loss": 1.1108, - "num_input_tokens_seen": 37990495, - "step": 1808 - }, - { - "epoch": 0.21751938916611555, - "flos": 14881698412920.0, - "grad_norm": 3.9397257009381774, - "learning_rate": 3.6424880786565425e-06, - "loss": 1.0757, - "num_input_tokens_seen": 38006360, - "step": 1809 - }, - { - "epoch": 0.21763963205675466, - "flos": 19654045499760.0, - "grad_norm": 3.0736564306139664, - "learning_rate": 3.6420434907016482e-06, - "loss": 1.0061, - "num_input_tokens_seen": 38025770, - "step": 1810 - }, - { - "epoch": 0.21775987494739374, - "flos": 15219660929160.0, - "grad_norm": 3.1064080119479374, - "learning_rate": 3.6415986536500606e-06, - "loss": 1.0419, - "num_input_tokens_seen": 38043820, - "step": 1811 - }, - { - "epoch": 0.21788011783803282, - "flos": 12994110594720.0, - "grad_norm": 3.109047263811028, - "learning_rate": 3.641153567569263e-06, - "loss": 1.0422, - "num_input_tokens_seen": 38061855, - "step": 1812 - }, - { - "epoch": 0.2180003607286719, - "flos": 21567670530480.0, - "grad_norm": 3.7151942844852903, - "learning_rate": 3.640708232526774e-06, - "loss": 1.1759, - "num_input_tokens_seen": 38080230, - "step": 1813 - }, - { - "epoch": 0.21812060361931102, - "flos": 18130212117840.0, - "grad_norm": 6.792610742524769, - "learning_rate": 3.6402626485901504e-06, - "loss": 1.0078, - "num_input_tokens_seen": 38099045, - "step": 1814 - }, - { - "epoch": 0.2182408465099501, - "flos": 15563479803360.0, - "grad_norm": 3.312951399078509, - "learning_rate": 3.639816815826988e-06, - "loss": 0.9999, - "num_input_tokens_seen": 38118090, - "step": 1815 - }, - { - "epoch": 0.21836108940058918, - "flos": 16820287501560.0, - "grad_norm": 3.268717200732835, - "learning_rate": 3.6393707343049176e-06, - "loss": 1.0017, - "num_input_tokens_seen": 38138140, - "step": 1816 - }, - { - "epoch": 0.2184813322912283, - "flos": 17556258964680.0, - "grad_norm": 3.8169171119266982, - "learning_rate": 3.6389244040916104e-06, - "loss": 0.9712, - "num_input_tokens_seen": 38156935, - "step": 1817 - }, - { - "epoch": 0.21860157518186737, - "flos": 18916694266440.0, - "grad_norm": 4.0045221515623455, - "learning_rate": 3.6384778252547747e-06, - "loss": 1.0198, - "num_input_tokens_seen": 38172535, - "step": 1818 - }, - { - "epoch": 0.21872181807250646, - "flos": 14829041418240.0, - "grad_norm": 3.0213385499312686, - "learning_rate": 3.638030997862155e-06, - "loss": 1.003, - "num_input_tokens_seen": 38191190, - "step": 1819 - }, - { - "epoch": 0.21884206096314554, - "flos": 43814326701360.0, - "grad_norm": 0.8258897737128829, - "learning_rate": 3.6375839219815356e-06, - "loss": 0.8544, - "num_input_tokens_seen": 38248710, - "step": 1820 - }, - { - "epoch": 0.21896230385378465, - "flos": 16688261745360.0, - "grad_norm": 4.682683479229659, - "learning_rate": 3.6371365976807375e-06, - "loss": 1.0565, - "num_input_tokens_seen": 38268825, - "step": 1821 - }, - { - "epoch": 0.21908254674442373, - "flos": 17844324026640.0, - "grad_norm": 4.471509814862768, - "learning_rate": 3.6366890250276185e-06, - "loss": 1.0529, - "num_input_tokens_seen": 38289500, - "step": 1822 - }, - { - "epoch": 0.21920278963506282, - "flos": 16717426437000.0, - "grad_norm": 31.829217082086604, - "learning_rate": 3.6362412040900764e-06, - "loss": 1.119, - "num_input_tokens_seen": 38309010, - "step": 1823 - }, - { - "epoch": 0.21932303252570193, - "flos": 20834397284640.0, - "grad_norm": 3.8266167608411723, - "learning_rate": 3.635793134936044e-06, - "loss": 1.0308, - "num_input_tokens_seen": 38329740, - "step": 1824 - }, - { - "epoch": 0.219443275416341, - "flos": 14771079973680.0, - "grad_norm": 2.89360824468449, - "learning_rate": 3.635344817633494e-06, - "loss": 0.9611, - "num_input_tokens_seen": 38348775, - "step": 1825 - }, - { - "epoch": 0.2195635183069801, - "flos": 10240487897040.0, - "grad_norm": 4.365306469622777, - "learning_rate": 3.634896252250436e-06, - "loss": 0.9788, - "num_input_tokens_seen": 38365260, - "step": 1826 - }, - { - "epoch": 0.2196837611976192, - "flos": 17241850074600.0, - "grad_norm": 3.351138499367545, - "learning_rate": 3.6344474388549157e-06, - "loss": 1.0451, - "num_input_tokens_seen": 38384635, - "step": 1827 - }, - { - "epoch": 0.2198040040882583, - "flos": 12807956088960.0, - "grad_norm": 4.468021146650824, - "learning_rate": 3.6339983775150183e-06, - "loss": 1.0395, - "num_input_tokens_seen": 38400915, - "step": 1828 - }, - { - "epoch": 0.21992424697889737, - "flos": 12599505081000.0, - "grad_norm": 3.7315526536019834, - "learning_rate": 3.6335490682988664e-06, - "loss": 1.0654, - "num_input_tokens_seen": 38416245, - "step": 1829 - }, - { - "epoch": 0.22004448986953645, - "flos": 12494834984400.0, - "grad_norm": 2.8606332814065696, - "learning_rate": 3.63309951127462e-06, - "loss": 1.0517, - "num_input_tokens_seen": 38432875, - "step": 1830 - }, - { - "epoch": 0.22016473276017556, - "flos": 15827837931360.0, - "grad_norm": 3.2739975479759957, - "learning_rate": 3.6326497065104757e-06, - "loss": 0.9733, - "num_input_tokens_seen": 38453060, - "step": 1831 - }, - { - "epoch": 0.22028497565081465, - "flos": 18185230052640.0, - "grad_norm": 2.8740727131670294, - "learning_rate": 3.6321996540746697e-06, - "loss": 0.9989, - "num_input_tokens_seen": 38471855, - "step": 1832 - }, - { - "epoch": 0.22040521854145373, - "flos": 25868802882480.0, - "grad_norm": 2.4209780108695265, - "learning_rate": 3.6317493540354733e-06, - "loss": 1.0247, - "num_input_tokens_seen": 38494990, - "step": 1833 - }, - { - "epoch": 0.22052546143209284, - "flos": 8326586912280.0, - "grad_norm": 3.612821048607342, - "learning_rate": 3.6312988064611976e-06, - "loss": 0.9934, - "num_input_tokens_seen": 38513020, - "step": 1834 - }, - { - "epoch": 0.22064570432273192, - "flos": 17217560571000.0, - "grad_norm": 2.657140874129462, - "learning_rate": 3.6308480114201896e-06, - "loss": 1.0303, - "num_input_tokens_seen": 38534660, - "step": 1835 - }, - { - "epoch": 0.220765947213371, - "flos": 12705432301560.0, - "grad_norm": 2.901219105159515, - "learning_rate": 3.630396968980835e-06, - "loss": 0.9946, - "num_input_tokens_seen": 38552255, - "step": 1836 - }, - { - "epoch": 0.2208861901040101, - "flos": 19047769514280.0, - "grad_norm": 4.697018127900215, - "learning_rate": 3.6299456792115575e-06, - "loss": 1.0754, - "num_input_tokens_seen": 38573230, - "step": 1837 - }, - { - "epoch": 0.2210064329946492, - "flos": 12623426645880.0, - "grad_norm": 2.5140003146867094, - "learning_rate": 3.629494142180815e-06, - "loss": 1.0326, - "num_input_tokens_seen": 38591695, - "step": 1838 - }, - { - "epoch": 0.22112667588528828, - "flos": 12731285544720.0, - "grad_norm": 3.578847087645071, - "learning_rate": 3.6290423579571075e-06, - "loss": 1.0768, - "num_input_tokens_seen": 38607955, - "step": 1839 - }, - { - "epoch": 0.22124691877592736, - "flos": 13020117145680.0, - "grad_norm": 7.338917542346376, - "learning_rate": 3.6285903266089694e-06, - "loss": 1.0215, - "num_input_tokens_seen": 38626950, - "step": 1840 - }, - { - "epoch": 0.22136716166656648, - "flos": 14776568392920.0, - "grad_norm": 2.8171114098317185, - "learning_rate": 3.628138048204974e-06, - "loss": 0.9991, - "num_input_tokens_seen": 38647355, - "step": 1841 - }, - { - "epoch": 0.22148740455720556, - "flos": 12521669397480.0, - "grad_norm": 2.9029831111467934, - "learning_rate": 3.6276855228137304e-06, - "loss": 0.9959, - "num_input_tokens_seen": 38665280, - "step": 1842 - }, - { - "epoch": 0.22160764744784464, - "flos": 15433201756080.0, - "grad_norm": 3.8558387772028606, - "learning_rate": 3.6272327505038874e-06, - "loss": 1.0456, - "num_input_tokens_seen": 38681465, - "step": 1843 - }, - { - "epoch": 0.22172789033848372, - "flos": 16897172676720.0, - "grad_norm": 4.966258834036002, - "learning_rate": 3.626779731344131e-06, - "loss": 1.0107, - "num_input_tokens_seen": 38700975, - "step": 1844 - }, - { - "epoch": 0.22184813322912283, - "flos": 12023313633960.0, - "grad_norm": 3.7763280439851323, - "learning_rate": 3.6263264654031814e-06, - "loss": 1.0806, - "num_input_tokens_seen": 38717595, - "step": 1845 - }, - { - "epoch": 0.22196837611976192, - "flos": 44254996974360.0, - "grad_norm": 0.679973322962148, - "learning_rate": 3.6258729527498008e-06, - "loss": 0.8265, - "num_input_tokens_seen": 38778160, - "step": 1846 - }, - { - "epoch": 0.222088619010401, - "flos": 18186732469080.0, - "grad_norm": 4.756980785716788, - "learning_rate": 3.6254191934527854e-06, - "loss": 0.8478, - "num_input_tokens_seen": 38797235, - "step": 1847 - }, - { - "epoch": 0.2222088619010401, - "flos": 13702542428880.0, - "grad_norm": 5.151876548425325, - "learning_rate": 3.6249651875809715e-06, - "loss": 0.8647, - "num_input_tokens_seen": 38816835, - "step": 1848 - }, - { - "epoch": 0.2223291047916792, - "flos": 13544908722000.0, - "grad_norm": 3.230665650646109, - "learning_rate": 3.62451093520323e-06, - "loss": 1.1205, - "num_input_tokens_seen": 38834460, - "step": 1849 - }, - { - "epoch": 0.22244934768231828, - "flos": 14803341482880.0, - "grad_norm": 3.2143269490821407, - "learning_rate": 3.6240564363884714e-06, - "loss": 1.1184, - "num_input_tokens_seen": 38854125, - "step": 1850 - }, - { - "epoch": 0.2225695905729574, - "flos": 11053620489360.0, - "grad_norm": 2.2249972011248524, - "learning_rate": 3.623601691205643e-06, - "loss": 0.9239, - "num_input_tokens_seen": 38872920, - "step": 1851 - }, - { - "epoch": 0.22268983346359647, - "flos": 18053909512320.0, - "grad_norm": 2.673262921624494, - "learning_rate": 3.623146699723729e-06, - "loss": 1.0366, - "num_input_tokens_seen": 38892100, - "step": 1852 - }, - { - "epoch": 0.22281007635423555, - "flos": 9349304990280.0, - "grad_norm": 39.78858159806576, - "learning_rate": 3.6226914620117507e-06, - "loss": 1.017, - "num_input_tokens_seen": 38910440, - "step": 1853 - }, - { - "epoch": 0.22293031924487464, - "flos": 10843421772480.0, - "grad_norm": 3.237369938303567, - "learning_rate": 3.622235978138768e-06, - "loss": 1.0306, - "num_input_tokens_seen": 38927785, - "step": 1854 - }, - { - "epoch": 0.22305056213551375, - "flos": 16035246446280.0, - "grad_norm": 25.22491599032096, - "learning_rate": 3.621780248173877e-06, - "loss": 1.0475, - "num_input_tokens_seen": 38945705, - "step": 1855 - }, - { - "epoch": 0.22317080502615283, - "flos": 46450677401280.0, - "grad_norm": 0.8823693775301036, - "learning_rate": 3.6213242721862125e-06, - "loss": 0.8871, - "num_input_tokens_seen": 39003880, - "step": 1856 - }, - { - "epoch": 0.2232910479167919, - "flos": 18343262359800.0, - "grad_norm": 2.5697230442896033, - "learning_rate": 3.620868050244945e-06, - "loss": 0.977, - "num_input_tokens_seen": 39024080, - "step": 1857 - }, - { - "epoch": 0.22341129080743102, - "flos": 16528267098360.0, - "grad_norm": 4.89581958958205, - "learning_rate": 3.6204115824192817e-06, - "loss": 0.9973, - "num_input_tokens_seen": 39041275, - "step": 1858 - }, - { - "epoch": 0.2235315336980701, - "flos": 15064940070480.0, - "grad_norm": 2.838244128017287, - "learning_rate": 3.619954868778471e-06, - "loss": 0.9868, - "num_input_tokens_seen": 39057690, - "step": 1859 - }, - { - "epoch": 0.2236517765887092, - "flos": 14121713400240.0, - "grad_norm": 2.436989070456145, - "learning_rate": 3.6194979093917944e-06, - "loss": 1.0531, - "num_input_tokens_seen": 39076825, - "step": 1860 - }, - { - "epoch": 0.22377201947934827, - "flos": 16501647316200.0, - "grad_norm": 2.363480488599179, - "learning_rate": 3.6190407043285724e-06, - "loss": 1.1123, - "num_input_tokens_seen": 39094280, - "step": 1861 - }, - { - "epoch": 0.22389226236998738, - "flos": 19074603927360.0, - "grad_norm": 2.7781814620949663, - "learning_rate": 3.618583253658163e-06, - "loss": 0.9719, - "num_input_tokens_seen": 39114100, - "step": 1862 - }, - { - "epoch": 0.22401250526062647, - "flos": 17189285064600.0, - "grad_norm": 3.2493636168471123, - "learning_rate": 3.618125557449961e-06, - "loss": 1.091, - "num_input_tokens_seen": 39131875, - "step": 1863 - }, - { - "epoch": 0.22413274815126555, - "flos": 11862889725120.0, - "grad_norm": 3.021665857409288, - "learning_rate": 3.6176676157733983e-06, - "loss": 1.0669, - "num_input_tokens_seen": 39146605, - "step": 1864 - }, - { - "epoch": 0.22425299104190466, - "flos": 15169579505520.0, - "grad_norm": 3.9241588979079673, - "learning_rate": 3.6172094286979443e-06, - "loss": 0.9832, - "num_input_tokens_seen": 39163695, - "step": 1865 - }, - { - "epoch": 0.22437323393254374, - "flos": 22935924530040.0, - "grad_norm": 2.3702350802467516, - "learning_rate": 3.6167509962931064e-06, - "loss": 1.0396, - "num_input_tokens_seen": 39189115, - "step": 1866 - }, - { - "epoch": 0.22449347682318282, - "flos": 12756586879800.0, - "grad_norm": 4.094613952325555, - "learning_rate": 3.6162923186284276e-06, - "loss": 1.0026, - "num_input_tokens_seen": 39204795, - "step": 1867 - }, - { - "epoch": 0.2246137197138219, - "flos": 13255862490120.0, - "grad_norm": 4.885482277934564, - "learning_rate": 3.6158333957734888e-06, - "loss": 1.0844, - "num_input_tokens_seen": 39223105, - "step": 1868 - }, - { - "epoch": 0.22473396260446102, - "flos": 11022462796320.0, - "grad_norm": 3.802321808882786, - "learning_rate": 3.6153742277979088e-06, - "loss": 1.0658, - "num_input_tokens_seen": 39240255, - "step": 1869 - }, - { - "epoch": 0.2248542054951001, - "flos": 10214634653880.0, - "grad_norm": 2.7091694543785634, - "learning_rate": 3.6149148147713434e-06, - "loss": 1.0113, - "num_input_tokens_seen": 39258210, - "step": 1870 - }, - { - "epoch": 0.22497444838573918, - "flos": 13648597648680.0, - "grad_norm": 2.7894745850869382, - "learning_rate": 3.614455156763484e-06, - "loss": 1.0987, - "num_input_tokens_seen": 39276235, - "step": 1871 - }, - { - "epoch": 0.2250946912763783, - "flos": 11971699132320.0, - "grad_norm": 2.482517627757202, - "learning_rate": 3.613995253844061e-06, - "loss": 0.9359, - "num_input_tokens_seen": 39293635, - "step": 1872 - }, - { - "epoch": 0.22521493416701738, - "flos": 17556565580280.0, - "grad_norm": 3.3076711041516424, - "learning_rate": 3.6135351060828414e-06, - "loss": 1.0303, - "num_input_tokens_seen": 39313830, - "step": 1873 - }, - { - "epoch": 0.22533517705765646, - "flos": 12626830079040.0, - "grad_norm": 3.546114936760279, - "learning_rate": 3.6130747135496285e-06, - "loss": 0.9081, - "num_input_tokens_seen": 39332550, - "step": 1874 - }, - { - "epoch": 0.22545541994829554, - "flos": 24031633765080.0, - "grad_norm": 3.786525986323741, - "learning_rate": 3.6126140763142646e-06, - "loss": 0.8905, - "num_input_tokens_seen": 39357300, - "step": 1875 - }, - { - "epoch": 0.22557566283893465, - "flos": 13596737854560.0, - "grad_norm": 5.656108369687451, - "learning_rate": 3.6121531944466275e-06, - "loss": 1.0781, - "num_input_tokens_seen": 39374345, - "step": 1876 - }, - { - "epoch": 0.22569590572957374, - "flos": 14747342378160.0, - "grad_norm": 2.774003693719122, - "learning_rate": 3.611692068016633e-06, - "loss": 1.0126, - "num_input_tokens_seen": 39390395, - "step": 1877 - }, - { - "epoch": 0.22581614862021282, - "flos": 13073019432840.0, - "grad_norm": 2.898801055892851, - "learning_rate": 3.611230697094233e-06, - "loss": 0.9722, - "num_input_tokens_seen": 39406815, - "step": 1878 - }, - { - "epoch": 0.22593639151085193, - "flos": 14227027389600.0, - "grad_norm": 3.6604642137983054, - "learning_rate": 3.6107690817494173e-06, - "loss": 1.1047, - "num_input_tokens_seen": 39426755, - "step": 1879 - }, - { - "epoch": 0.226056634401491, - "flos": 9244082985600.0, - "grad_norm": 3.5933107854056674, - "learning_rate": 3.6103072220522117e-06, - "loss": 0.9359, - "num_input_tokens_seen": 39442005, - "step": 1880 - }, - { - "epoch": 0.2261768772921301, - "flos": 13464957390840.0, - "grad_norm": 4.207626258193294, - "learning_rate": 3.609845118072682e-06, - "loss": 1.1398, - "num_input_tokens_seen": 39460395, - "step": 1881 - }, - { - "epoch": 0.2262971201827692, - "flos": 14173419886560.0, - "grad_norm": 2.711965844082745, - "learning_rate": 3.6093827698809276e-06, - "loss": 1.0191, - "num_input_tokens_seen": 39479215, - "step": 1882 - }, - { - "epoch": 0.2264173630734083, - "flos": 11787844243560.0, - "grad_norm": 3.3222097922538785, - "learning_rate": 3.6089201775470864e-06, - "loss": 1.0803, - "num_input_tokens_seen": 39494390, - "step": 1883 - }, - { - "epoch": 0.22653760596404737, - "flos": 17347072079280.0, - "grad_norm": 2.48627557571628, - "learning_rate": 3.6084573411413334e-06, - "loss": 1.0127, - "num_input_tokens_seen": 39513505, - "step": 1884 - }, - { - "epoch": 0.22665784885468646, - "flos": 12993742656000.0, - "grad_norm": 3.7241252870637367, - "learning_rate": 3.607994260733881e-06, - "loss": 1.0267, - "num_input_tokens_seen": 39532465, - "step": 1885 - }, - { - "epoch": 0.22677809174532557, - "flos": 17108229917280.0, - "grad_norm": 2.4321083691382213, - "learning_rate": 3.6075309363949776e-06, - "loss": 0.9726, - "num_input_tokens_seen": 39551355, - "step": 1886 - }, - { - "epoch": 0.22689833463596465, - "flos": 14460871717320.0, - "grad_norm": 3.8277272543289595, - "learning_rate": 3.6070673681949094e-06, - "loss": 1.0332, - "num_input_tokens_seen": 39569440, - "step": 1887 - }, - { - "epoch": 0.22701857752660373, - "flos": 21465851958960.0, - "grad_norm": 2.198149127930825, - "learning_rate": 3.606603556203999e-06, - "loss": 1.029, - "num_input_tokens_seen": 39594105, - "step": 1888 - }, - { - "epoch": 0.22713882041724284, - "flos": 15983233344360.0, - "grad_norm": 2.2438095552409143, - "learning_rate": 3.6061395004926066e-06, - "loss": 1.0699, - "num_input_tokens_seen": 39612760, - "step": 1889 - }, - { - "epoch": 0.22725906330788193, - "flos": 14566921584120.0, - "grad_norm": 3.1353709744559635, - "learning_rate": 3.605675201131129e-06, - "loss": 1.0682, - "num_input_tokens_seen": 39630940, - "step": 1890 - }, - { - "epoch": 0.227379306198521, - "flos": 13466275837920.0, - "grad_norm": 3.359606215925674, - "learning_rate": 3.60521065819e-06, - "loss": 1.016, - "num_input_tokens_seen": 39647970, - "step": 1891 - }, - { - "epoch": 0.2274995490891601, - "flos": 15484509642120.0, - "grad_norm": 2.698949863936341, - "learning_rate": 3.60474587173969e-06, - "loss": 1.1017, - "num_input_tokens_seen": 39666175, - "step": 1892 - }, - { - "epoch": 0.2276197919797992, - "flos": 13515100137600.0, - "grad_norm": 2.8807810834170877, - "learning_rate": 3.6042808418507084e-06, - "loss": 1.06, - "num_input_tokens_seen": 39683580, - "step": 1893 - }, - { - "epoch": 0.22774003487043828, - "flos": 13334556697320.0, - "grad_norm": 3.100682823289427, - "learning_rate": 3.6038155685935976e-06, - "loss": 0.9995, - "num_input_tokens_seen": 39699870, - "step": 1894 - }, - { - "epoch": 0.22786027776107737, - "flos": 16349961951960.0, - "grad_norm": 2.686793300950952, - "learning_rate": 3.6033500520389404e-06, - "loss": 0.9343, - "num_input_tokens_seen": 39716260, - "step": 1895 - }, - { - "epoch": 0.22798052065171648, - "flos": 47763606850440.0, - "grad_norm": 0.8324837685340112, - "learning_rate": 3.6028842922573553e-06, - "loss": 0.9217, - "num_input_tokens_seen": 39780125, - "step": 1896 - }, - { - "epoch": 0.22810076354235556, - "flos": 44440047663960.0, - "grad_norm": 0.8168230954690783, - "learning_rate": 3.602418289319497e-06, - "loss": 0.8941, - "num_input_tokens_seen": 39838400, - "step": 1897 - }, - { - "epoch": 0.22822100643299464, - "flos": 16977706577520.0, - "grad_norm": 2.508230869654845, - "learning_rate": 3.601952043296059e-06, - "loss": 0.9621, - "num_input_tokens_seen": 39858115, - "step": 1898 - }, - { - "epoch": 0.22834124932363373, - "flos": 14903933592000.0, - "grad_norm": 3.024868569789761, - "learning_rate": 3.6014855542577696e-06, - "loss": 1.0379, - "num_input_tokens_seen": 39875045, - "step": 1899 - }, - { - "epoch": 0.22846149221427284, - "flos": 17714996487720.0, - "grad_norm": 4.572724056604724, - "learning_rate": 3.6010188222753943e-06, - "loss": 1.0681, - "num_input_tokens_seen": 39895535, - "step": 1900 - }, - { - "epoch": 0.22858173510491192, - "flos": 40164707232000.0, - "grad_norm": 0.9772509475180345, - "learning_rate": 3.6005518474197372e-06, - "loss": 0.9054, - "num_input_tokens_seen": 39947300, - "step": 1901 - }, - { - "epoch": 0.228701977995551, - "flos": 17189683664880.0, - "grad_norm": 4.3517097708472585, - "learning_rate": 3.6000846297616373e-06, - "loss": 1.0148, - "num_input_tokens_seen": 39965320, - "step": 1902 - }, - { - "epoch": 0.22882222088619011, - "flos": 15188288605200.0, - "grad_norm": 2.846489896385478, - "learning_rate": 3.5996171693719717e-06, - "loss": 0.9667, - "num_input_tokens_seen": 39981135, - "step": 1903 - }, - { - "epoch": 0.2289424637768292, - "flos": 46241827793040.0, - "grad_norm": 0.8740661269689927, - "learning_rate": 3.5991494663216528e-06, - "loss": 0.901, - "num_input_tokens_seen": 40043840, - "step": 1904 - }, - { - "epoch": 0.22906270666746828, - "flos": 15746905430280.0, - "grad_norm": 3.547436909131377, - "learning_rate": 3.5986815206816314e-06, - "loss": 1.1043, - "num_input_tokens_seen": 40062380, - "step": 1905 - }, - { - "epoch": 0.2291829495581074, - "flos": 18340134880680.0, - "grad_norm": 2.415304019028142, - "learning_rate": 3.598213332522895e-06, - "loss": 0.9711, - "num_input_tokens_seen": 40082130, - "step": 1906 - }, - { - "epoch": 0.22930319244874647, - "flos": 22221391045440.0, - "grad_norm": 3.055528919990121, - "learning_rate": 3.597744901916466e-06, - "loss": 1.0106, - "num_input_tokens_seen": 40103135, - "step": 1907 - }, - { - "epoch": 0.22942343533938556, - "flos": 16530229438200.0, - "grad_norm": 8.296368772074088, - "learning_rate": 3.5972762289334058e-06, - "loss": 0.9845, - "num_input_tokens_seen": 40122485, - "step": 1908 - }, - { - "epoch": 0.22954367823002464, - "flos": 10319059458000.0, - "grad_norm": 3.258074925965572, - "learning_rate": 3.5968073136448116e-06, - "loss": 1.08, - "num_input_tokens_seen": 40140225, - "step": 1909 - }, - { - "epoch": 0.22966392112066375, - "flos": 11866170512040.0, - "grad_norm": 2.9442196546274912, - "learning_rate": 3.596338156121818e-06, - "loss": 1.1337, - "num_input_tokens_seen": 40158830, - "step": 1910 - }, - { - "epoch": 0.22978416401130283, - "flos": 48315263501400.0, - "grad_norm": 0.7666550927394256, - "learning_rate": 3.595868756435595e-06, - "loss": 0.8488, - "num_input_tokens_seen": 40226230, - "step": 1911 - }, - { - "epoch": 0.22990440690194192, - "flos": 14095216264320.0, - "grad_norm": 2.727400424164092, - "learning_rate": 3.5953991146573504e-06, - "loss": 1.0285, - "num_input_tokens_seen": 40244595, - "step": 1912 - }, - { - "epoch": 0.23002464979258103, - "flos": 9368106074640.0, - "grad_norm": 2.479859861598333, - "learning_rate": 3.5949292308583294e-06, - "loss": 1.0483, - "num_input_tokens_seen": 40257560, - "step": 1913 - }, - { - "epoch": 0.2301448926832201, - "flos": 15747212045880.0, - "grad_norm": 3.4812794435855126, - "learning_rate": 3.594459105109811e-06, - "loss": 1.0314, - "num_input_tokens_seen": 40276460, - "step": 1914 - }, - { - "epoch": 0.2302651355738592, - "flos": 14698456755360.0, - "grad_norm": 2.363949011419, - "learning_rate": 3.593988737483115e-06, - "loss": 1.0392, - "num_input_tokens_seen": 40296120, - "step": 1915 - }, - { - "epoch": 0.23038537846449827, - "flos": 13178149452840.0, - "grad_norm": 3.267039453553731, - "learning_rate": 3.5935181280495947e-06, - "loss": 1.0078, - "num_input_tokens_seen": 40314420, - "step": 1916 - }, - { - "epoch": 0.23050562135513739, - "flos": 45979186712400.0, - "grad_norm": 0.8808517624891922, - "learning_rate": 3.5930472768806412e-06, - "loss": 0.8178, - "num_input_tokens_seen": 40372810, - "step": 1917 - }, - { - "epoch": 0.23062586424577647, - "flos": 12261634549440.0, - "grad_norm": 2.4841412128354574, - "learning_rate": 3.5925761840476826e-06, - "loss": 0.9857, - "num_input_tokens_seen": 40391140, - "step": 1918 - }, - { - "epoch": 0.23074610713641555, - "flos": 19838145681000.0, - "grad_norm": 4.049004778617287, - "learning_rate": 3.592104849622183e-06, - "loss": 1.0488, - "num_input_tokens_seen": 40413115, - "step": 1919 - }, - { - "epoch": 0.23086635002705466, - "flos": 20546730822960.0, - "grad_norm": 2.107377187062398, - "learning_rate": 3.591633273675644e-06, - "loss": 0.9565, - "num_input_tokens_seen": 40435070, - "step": 1920 - }, - { - "epoch": 0.23098659291769374, - "flos": 45041300701680.0, - "grad_norm": 1.081962555725702, - "learning_rate": 3.591161456279602e-06, - "loss": 0.8587, - "num_input_tokens_seen": 40480335, - "step": 1921 - }, - { - "epoch": 0.23110683580833283, - "flos": 16690806654840.0, - "grad_norm": 2.2292481404564444, - "learning_rate": 3.590689397505633e-06, - "loss": 1.0237, - "num_input_tokens_seen": 40500965, - "step": 1922 - }, - { - "epoch": 0.2312270786989719, - "flos": 19417625601000.0, - "grad_norm": 2.2768584530962372, - "learning_rate": 3.590217097425347e-06, - "loss": 1.0914, - "num_input_tokens_seen": 40520585, - "step": 1923 - }, - { - "epoch": 0.23134732158961102, - "flos": 9349213005600.0, - "grad_norm": 3.4326840042839075, - "learning_rate": 3.589744556110391e-06, - "loss": 0.9264, - "num_input_tokens_seen": 40538295, - "step": 1924 - }, - { - "epoch": 0.2314675644802501, - "flos": 26393747766600.0, - "grad_norm": 1.9168047509693373, - "learning_rate": 3.58927177363245e-06, - "loss": 1.0754, - "num_input_tokens_seen": 40560840, - "step": 1925 - }, - { - "epoch": 0.2315878073708892, - "flos": 16953478397040.0, - "grad_norm": 4.097935291604152, - "learning_rate": 3.5887987500632447e-06, - "loss": 0.9651, - "num_input_tokens_seen": 40578565, - "step": 1926 - }, - { - "epoch": 0.2317080502615283, - "flos": 16376520411000.0, - "grad_norm": 2.583691131260734, - "learning_rate": 3.5883254854745325e-06, - "loss": 1.0648, - "num_input_tokens_seen": 40596675, - "step": 1927 - }, - { - "epoch": 0.23182829315216738, - "flos": 7907048002200.0, - "grad_norm": 3.6510627662531516, - "learning_rate": 3.587851979938107e-06, - "loss": 0.9854, - "num_input_tokens_seen": 40613285, - "step": 1928 - }, - { - "epoch": 0.23194853604280646, - "flos": 14069240374920.0, - "grad_norm": 3.112777305414023, - "learning_rate": 3.5873782335257985e-06, - "loss": 0.9966, - "num_input_tokens_seen": 40631170, - "step": 1929 - }, - { - "epoch": 0.23206877893344555, - "flos": 10818028452720.0, - "grad_norm": 3.4657436780499737, - "learning_rate": 3.5869042463094744e-06, - "loss": 1.0188, - "num_input_tokens_seen": 40648605, - "step": 1930 - }, - { - "epoch": 0.23218902182408466, - "flos": 16140897712800.0, - "grad_norm": 2.927700660923713, - "learning_rate": 3.586430018361038e-06, - "loss": 1.0041, - "num_input_tokens_seen": 40668095, - "step": 1931 - }, - { - "epoch": 0.23230926471472374, - "flos": 16140253820040.0, - "grad_norm": 6.06193964619988, - "learning_rate": 3.5859555497524283e-06, - "loss": 0.9867, - "num_input_tokens_seen": 40685050, - "step": 1932 - }, - { - "epoch": 0.23242950760536282, - "flos": 14461944871920.0, - "grad_norm": 3.94461349489205, - "learning_rate": 3.5854808405556237e-06, - "loss": 1.1464, - "num_input_tokens_seen": 40704005, - "step": 1933 - }, - { - "epoch": 0.23254975049600193, - "flos": 11970012746520.0, - "grad_norm": 3.595141862768107, - "learning_rate": 3.5850058908426355e-06, - "loss": 0.974, - "num_input_tokens_seen": 40722275, - "step": 1934 - }, - { - "epoch": 0.23266999338664102, - "flos": 16560068684160.0, - "grad_norm": 2.903127325773591, - "learning_rate": 3.584530700685514e-06, - "loss": 1.0735, - "num_input_tokens_seen": 40742255, - "step": 1935 - }, - { - "epoch": 0.2327902362772801, - "flos": 13882656607320.0, - "grad_norm": 2.8509855339890184, - "learning_rate": 3.5840552701563448e-06, - "loss": 1.1174, - "num_input_tokens_seen": 40758175, - "step": 1936 - }, - { - "epoch": 0.2329104791679192, - "flos": 11840286607320.0, - "grad_norm": 2.864237002620023, - "learning_rate": 3.5835795993272513e-06, - "loss": 1.0375, - "num_input_tokens_seen": 40776180, - "step": 1937 - }, - { - "epoch": 0.2330307220585583, - "flos": 15747181384320.0, - "grad_norm": 3.1950182044715016, - "learning_rate": 3.583103688270391e-06, - "loss": 0.9366, - "num_input_tokens_seen": 40795680, - "step": 1938 - }, - { - "epoch": 0.23315096494919738, - "flos": 13701438612720.0, - "grad_norm": 4.054223182861925, - "learning_rate": 3.58262753705796e-06, - "loss": 1.1115, - "num_input_tokens_seen": 40810290, - "step": 1939 - }, - { - "epoch": 0.23327120783983646, - "flos": 37935906772200.0, - "grad_norm": 0.768283316217871, - "learning_rate": 3.5821511457621902e-06, - "loss": 0.8045, - "num_input_tokens_seen": 40867310, - "step": 1940 - }, - { - "epoch": 0.23339145073047557, - "flos": 12127125206880.0, - "grad_norm": 5.550128165474942, - "learning_rate": 3.5816745144553497e-06, - "loss": 1.034, - "num_input_tokens_seen": 40882350, - "step": 1941 - }, - { - "epoch": 0.23351169362111465, - "flos": 9218107096200.0, - "grad_norm": 2.8674386145781785, - "learning_rate": 3.5811976432097424e-06, - "loss": 0.9822, - "num_input_tokens_seen": 40899740, - "step": 1942 - }, - { - "epoch": 0.23363193651175373, - "flos": 11210456995680.0, - "grad_norm": 5.083074323382097, - "learning_rate": 3.58072053209771e-06, - "loss": 1.0709, - "num_input_tokens_seen": 40916015, - "step": 1943 - }, - { - "epoch": 0.23375217940239285, - "flos": 14928407064960.0, - "grad_norm": 2.655946242971917, - "learning_rate": 3.5802431811916296e-06, - "loss": 1.0227, - "num_input_tokens_seen": 40932345, - "step": 1944 - }, - { - "epoch": 0.23387242229303193, - "flos": 14619517255680.0, - "grad_norm": 3.8988690032360065, - "learning_rate": 3.579765590563916e-06, - "loss": 1.0294, - "num_input_tokens_seen": 40951465, - "step": 1945 - }, - { - "epoch": 0.233992665183671, - "flos": 17267672656200.0, - "grad_norm": 2.726386209379609, - "learning_rate": 3.579287760287017e-06, - "loss": 1.0448, - "num_input_tokens_seen": 40971935, - "step": 1946 - }, - { - "epoch": 0.2341129080743101, - "flos": 21490141462560.0, - "grad_norm": 2.3012250333029463, - "learning_rate": 3.578809690433421e-06, - "loss": 0.9555, - "num_input_tokens_seen": 40993365, - "step": 1947 - }, - { - "epoch": 0.2342331509649492, - "flos": 16193064122520.0, - "grad_norm": 4.274602253575124, - "learning_rate": 3.578331381075651e-06, - "loss": 1.033, - "num_input_tokens_seen": 41013585, - "step": 1948 - }, - { - "epoch": 0.2343533938555883, - "flos": 16796028659520.0, - "grad_norm": 3.4631562365087194, - "learning_rate": 3.5778528322862646e-06, - "loss": 0.9091, - "num_input_tokens_seen": 41032125, - "step": 1949 - }, - { - "epoch": 0.23447363674622737, - "flos": 17476552926000.0, - "grad_norm": 1.8916512848596996, - "learning_rate": 3.5773740441378585e-06, - "loss": 1.0938, - "num_input_tokens_seen": 41052600, - "step": 1950 - }, - { - "epoch": 0.23459387963686648, - "flos": 38009486046960.0, - "grad_norm": 2.942818230571435, - "learning_rate": 3.5768950167030633e-06, - "loss": 0.9661, - "num_input_tokens_seen": 41077020, - "step": 1951 - }, - { - "epoch": 0.23471412252750556, - "flos": 17031927311760.0, - "grad_norm": 2.226185442134643, - "learning_rate": 3.576415750054548e-06, - "loss": 1.0079, - "num_input_tokens_seen": 41096860, - "step": 1952 - }, - { - "epoch": 0.23483436541814465, - "flos": 11106645422760.0, - "grad_norm": 2.6964784615772475, - "learning_rate": 3.5759362442650172e-06, - "loss": 1.0839, - "num_input_tokens_seen": 41113330, - "step": 1953 - }, - { - "epoch": 0.23495460830878373, - "flos": 17739684591600.0, - "grad_norm": 2.9754675105145965, - "learning_rate": 3.5754564994072113e-06, - "loss": 1.0639, - "num_input_tokens_seen": 41131890, - "step": 1954 - }, - { - "epoch": 0.23507485119942284, - "flos": 21725856145440.0, - "grad_norm": 2.75357369653772, - "learning_rate": 3.5749765155539067e-06, - "loss": 0.841, - "num_input_tokens_seen": 41152095, - "step": 1955 - }, - { - "epoch": 0.23519509409006192, - "flos": 12990829807800.0, - "grad_norm": 3.484943839541507, - "learning_rate": 3.574496292777917e-06, - "loss": 1.1434, - "num_input_tokens_seen": 41170025, - "step": 1956 - }, - { - "epoch": 0.235315336980701, - "flos": 21122523669720.0, - "grad_norm": 2.670129931142327, - "learning_rate": 3.574015831152092e-06, - "loss": 0.9325, - "num_input_tokens_seen": 41190160, - "step": 1957 - }, - { - "epoch": 0.23543557987134012, - "flos": 13151284378200.0, - "grad_norm": 4.293722654030687, - "learning_rate": 3.573535130749316e-06, - "loss": 1.051, - "num_input_tokens_seen": 41207830, - "step": 1958 - }, - { - "epoch": 0.2355558227619792, - "flos": 17554572578880.0, - "grad_norm": 2.401289744328714, - "learning_rate": 3.5730541916425127e-06, - "loss": 0.9608, - "num_input_tokens_seen": 41229030, - "step": 1959 - }, - { - "epoch": 0.23567606565261828, - "flos": 15589823631480.0, - "grad_norm": 2.421193968479541, - "learning_rate": 3.572573013904639e-06, - "loss": 1.0825, - "num_input_tokens_seen": 41248660, - "step": 1960 - }, - { - "epoch": 0.2357963085432574, - "flos": 9584897026920.0, - "grad_norm": 2.7020025844660442, - "learning_rate": 3.572091597608689e-06, - "loss": 1.1505, - "num_input_tokens_seen": 41266505, - "step": 1961 - }, - { - "epoch": 0.23591655143389648, - "flos": 15692960650080.0, - "grad_norm": 5.28478596610303, - "learning_rate": 3.571609942827694e-06, - "loss": 0.9477, - "num_input_tokens_seen": 41285340, - "step": 1962 - }, - { - "epoch": 0.23603679432453556, - "flos": 12048308353440.0, - "grad_norm": 2.6829088605883786, - "learning_rate": 3.57112804963472e-06, - "loss": 1.1002, - "num_input_tokens_seen": 41303275, - "step": 1963 - }, - { - "epoch": 0.23615703721517464, - "flos": 13596492562080.0, - "grad_norm": 2.0137529773086333, - "learning_rate": 3.57064591810287e-06, - "loss": 0.9863, - "num_input_tokens_seen": 41320495, - "step": 1964 - }, - { - "epoch": 0.23627728010581375, - "flos": 13544448798600.0, - "grad_norm": 3.4693567243701846, - "learning_rate": 3.570163548305284e-06, - "loss": 1.0318, - "num_input_tokens_seen": 41339145, - "step": 1965 - }, - { - "epoch": 0.23639752299645284, - "flos": 10082792867040.0, - "grad_norm": 3.2369700171089937, - "learning_rate": 3.569680940315135e-06, - "loss": 0.9211, - "num_input_tokens_seen": 41355265, - "step": 1966 - }, - { - "epoch": 0.23651776588709192, - "flos": 16271574360360.0, - "grad_norm": 2.7709275614460314, - "learning_rate": 3.5691980942056356e-06, - "loss": 1.0369, - "num_input_tokens_seen": 41374355, - "step": 1967 - }, - { - "epoch": 0.23663800877773103, - "flos": 13203604095720.0, - "grad_norm": 2.3242533192347765, - "learning_rate": 3.5687150100500332e-06, - "loss": 1.0214, - "num_input_tokens_seen": 41393775, - "step": 1968 - }, - { - "epoch": 0.2367582516683701, - "flos": 18184862113920.0, - "grad_norm": 3.7496467423944115, - "learning_rate": 3.568231687921611e-06, - "loss": 0.9738, - "num_input_tokens_seen": 41413670, - "step": 1969 - }, - { - "epoch": 0.2368784945590092, - "flos": 16560283315080.0, - "grad_norm": 1.7765285760463858, - "learning_rate": 3.5677481278936883e-06, - "loss": 1.0452, - "num_input_tokens_seen": 41432970, - "step": 1970 - }, - { - "epoch": 0.23699873744964828, - "flos": 50031169070160.0, - "grad_norm": 0.8273216525576805, - "learning_rate": 3.5672643300396214e-06, - "loss": 0.8258, - "num_input_tokens_seen": 41501835, - "step": 1971 - }, - { - "epoch": 0.2371189803402874, - "flos": 15510914793360.0, - "grad_norm": 3.2345445376573294, - "learning_rate": 3.566780294432802e-06, - "loss": 0.9119, - "num_input_tokens_seen": 41518730, - "step": 1972 - }, - { - "epoch": 0.23723922323092647, - "flos": 15563755757400.0, - "grad_norm": 7.6953992499869175, - "learning_rate": 3.566296021146657e-06, - "loss": 0.9573, - "num_input_tokens_seen": 41537830, - "step": 1973 - }, - { - "epoch": 0.23735946612156555, - "flos": 23325808163520.0, - "grad_norm": 2.5049374461025407, - "learning_rate": 3.565811510254652e-06, - "loss": 0.9498, - "num_input_tokens_seen": 41558430, - "step": 1974 - }, - { - "epoch": 0.23747970901220466, - "flos": 50524618984080.0, - "grad_norm": 0.8131858791751202, - "learning_rate": 3.5653267618302845e-06, - "loss": 0.8235, - "num_input_tokens_seen": 41625730, - "step": 1975 - }, - { - "epoch": 0.23759995190284375, - "flos": 14802820236360.0, - "grad_norm": 2.485757694898886, - "learning_rate": 3.564841775947093e-06, - "loss": 1.0844, - "num_input_tokens_seen": 41646340, - "step": 1976 - }, - { - "epoch": 0.23772019479348283, - "flos": 23477953451160.0, - "grad_norm": 2.966523952123176, - "learning_rate": 3.5643565526786475e-06, - "loss": 0.9845, - "num_input_tokens_seen": 41666000, - "step": 1977 - }, - { - "epoch": 0.2378404376841219, - "flos": 23062584513240.0, - "grad_norm": 2.4096466465463684, - "learning_rate": 3.5638710920985574e-06, - "loss": 0.9976, - "num_input_tokens_seen": 41687180, - "step": 1978 - }, - { - "epoch": 0.23796068057476102, - "flos": 16345761318240.0, - "grad_norm": 3.778808309828859, - "learning_rate": 3.5633853942804655e-06, - "loss": 1.0355, - "num_input_tokens_seen": 41705225, - "step": 1979 - }, - { - "epoch": 0.2380809234654001, - "flos": 9507122666520.0, - "grad_norm": 4.119071167622082, - "learning_rate": 3.5628994592980527e-06, - "loss": 0.9879, - "num_input_tokens_seen": 41722850, - "step": 1980 - }, - { - "epoch": 0.2382011663560392, - "flos": 11943760903080.0, - "grad_norm": 3.797415361296219, - "learning_rate": 3.562413287225034e-06, - "loss": 0.9396, - "num_input_tokens_seen": 41740680, - "step": 1981 - }, - { - "epoch": 0.2383214092466783, - "flos": 13071670324200.0, - "grad_norm": 3.0374811368128922, - "learning_rate": 3.5619268781351623e-06, - "loss": 1.1114, - "num_input_tokens_seen": 41758470, - "step": 1982 - }, - { - "epoch": 0.23844165213731738, - "flos": 14016583380240.0, - "grad_norm": 2.5681335117833113, - "learning_rate": 3.5614402321022256e-06, - "loss": 1.0032, - "num_input_tokens_seen": 41776020, - "step": 1983 - }, - { - "epoch": 0.23856189502795647, - "flos": 16612143109200.0, - "grad_norm": 2.462059419207773, - "learning_rate": 3.5609533492000463e-06, - "loss": 1.0913, - "num_input_tokens_seen": 41794630, - "step": 1984 - }, - { - "epoch": 0.23868213791859555, - "flos": 16689304238400.0, - "grad_norm": 3.494297832195746, - "learning_rate": 3.560466229502485e-06, - "loss": 1.002, - "num_input_tokens_seen": 41813695, - "step": 1985 - }, - { - "epoch": 0.23880238080923466, - "flos": 11761009830480.0, - "grad_norm": 3.6710643116870063, - "learning_rate": 3.5599788730834384e-06, - "loss": 1.1249, - "num_input_tokens_seen": 41831375, - "step": 1986 - }, - { - "epoch": 0.23892262369987374, - "flos": 12286598607360.0, - "grad_norm": 4.938612028032546, - "learning_rate": 3.559491280016836e-06, - "loss": 1.0222, - "num_input_tokens_seen": 41849040, - "step": 1987 - }, - { - "epoch": 0.23904286659051283, - "flos": 15852250081200.0, - "grad_norm": 7.89039125001854, - "learning_rate": 3.5590034503766465e-06, - "loss": 0.9437, - "num_input_tokens_seen": 41868425, - "step": 1988 - }, - { - "epoch": 0.23916310948115194, - "flos": 15038872196400.0, - "grad_norm": 2.9102398137121672, - "learning_rate": 3.558515384236874e-06, - "loss": 1.0387, - "num_input_tokens_seen": 41885575, - "step": 1989 - }, - { - "epoch": 0.23928335237179102, - "flos": 9978827986320.0, - "grad_norm": 2.4150728253427443, - "learning_rate": 3.558027081671556e-06, - "loss": 1.06, - "num_input_tokens_seen": 41902280, - "step": 1990 - }, - { - "epoch": 0.2394035952624301, - "flos": 16901342648880.0, - "grad_norm": 3.2115250766849495, - "learning_rate": 3.557538542754769e-06, - "loss": 0.9135, - "num_input_tokens_seen": 41921695, - "step": 1991 - }, - { - "epoch": 0.2395238381530692, - "flos": 17214954338400.0, - "grad_norm": 3.0680288116116827, - "learning_rate": 3.557049767560623e-06, - "loss": 0.8955, - "num_input_tokens_seen": 41940330, - "step": 1992 - }, - { - "epoch": 0.2396440810437083, - "flos": 17997327837960.0, - "grad_norm": 3.20435023251168, - "learning_rate": 3.5565607561632655e-06, - "loss": 1.0839, - "num_input_tokens_seen": 41958890, - "step": 1993 - }, - { - "epoch": 0.23976432393434738, - "flos": 20331135671520.0, - "grad_norm": 3.244707346270146, - "learning_rate": 3.5560715086368787e-06, - "loss": 1.0133, - "num_input_tokens_seen": 41976480, - "step": 1994 - }, - { - "epoch": 0.23988456682498646, - "flos": 13827945288120.0, - "grad_norm": 2.2256772071566315, - "learning_rate": 3.5555820250556816e-06, - "loss": 1.0504, - "num_input_tokens_seen": 41993400, - "step": 1995 - }, - { - "epoch": 0.24000480971562557, - "flos": 14383526618760.0, - "grad_norm": 2.83480047125949, - "learning_rate": 3.5550923054939278e-06, - "loss": 0.9185, - "num_input_tokens_seen": 42012575, - "step": 1996 - }, - { - "epoch": 0.24012505260626466, - "flos": 18103469689440.0, - "grad_norm": 2.345328346315814, - "learning_rate": 3.5546023500259083e-06, - "loss": 0.9591, - "num_input_tokens_seen": 42033390, - "step": 1997 - }, - { - "epoch": 0.24024529549690374, - "flos": 10995536398560.0, - "grad_norm": 3.2033014016308146, - "learning_rate": 3.5541121587259477e-06, - "loss": 1.0393, - "num_input_tokens_seen": 42050945, - "step": 1998 - }, - { - "epoch": 0.24036553838754285, - "flos": 40876910438040.0, - "grad_norm": 0.8117570047478695, - "learning_rate": 3.553621731668408e-06, - "loss": 0.8209, - "num_input_tokens_seen": 42113875, - "step": 1999 - }, - { - "epoch": 0.24048578127818193, - "flos": 17763514171800.0, - "grad_norm": 2.4675511213609216, - "learning_rate": 3.553131068927688e-06, - "loss": 1.0615, - "num_input_tokens_seen": 42132000, - "step": 2000 - }, - { - "epoch": 0.24060602416882101, - "flos": 16585983250440.0, - "grad_norm": 2.8374629686266633, - "learning_rate": 3.552640170578219e-06, - "loss": 1.0339, - "num_input_tokens_seen": 42151970, - "step": 2001 - }, - { - "epoch": 0.2407262670594601, - "flos": 10004558583240.0, - "grad_norm": 3.260449410336072, - "learning_rate": 3.5521490366944703e-06, - "loss": 1.0107, - "num_input_tokens_seen": 42169340, - "step": 2002 - }, - { - "epoch": 0.2408465099500992, - "flos": 9638013945000.0, - "grad_norm": 4.486984928929604, - "learning_rate": 3.5516576673509474e-06, - "loss": 1.0309, - "num_input_tokens_seen": 42187060, - "step": 2003 - }, - { - "epoch": 0.2409667528407383, - "flos": 22275795749040.0, - "grad_norm": 3.132626040420186, - "learning_rate": 3.5511660626221896e-06, - "loss": 1.0861, - "num_input_tokens_seen": 42207420, - "step": 2004 - }, - { - "epoch": 0.24108699573137737, - "flos": 15773157273720.0, - "grad_norm": 3.4041764133520838, - "learning_rate": 3.5506742225827744e-06, - "loss": 1.0934, - "num_input_tokens_seen": 42223995, - "step": 2005 - }, - { - "epoch": 0.24120723862201648, - "flos": 18578118519000.0, - "grad_norm": 3.024640772904238, - "learning_rate": 3.5501821473073116e-06, - "loss": 1.1269, - "num_input_tokens_seen": 42240300, - "step": 2006 - }, - { - "epoch": 0.24132748151265557, - "flos": 13203941372880.0, - "grad_norm": 7.496465740346674, - "learning_rate": 3.54968983687045e-06, - "loss": 1.0935, - "num_input_tokens_seen": 42256890, - "step": 2007 - }, - { - "epoch": 0.24144772440329465, - "flos": 10791071393400.0, - "grad_norm": 3.497186756299277, - "learning_rate": 3.549197291346872e-06, - "loss": 1.1147, - "num_input_tokens_seen": 42273135, - "step": 2008 - }, - { - "epoch": 0.24156796729393373, - "flos": 17084277690840.0, - "grad_norm": 4.669952711564333, - "learning_rate": 3.548704510811297e-06, - "loss": 1.0274, - "num_input_tokens_seen": 42292050, - "step": 2009 - }, - { - "epoch": 0.24168821018457284, - "flos": 18710358906120.0, - "grad_norm": 3.9635568118046236, - "learning_rate": 3.5482114953384787e-06, - "loss": 0.9689, - "num_input_tokens_seen": 42311000, - "step": 2010 - }, - { - "epoch": 0.24180845307521193, - "flos": 12915171095040.0, - "grad_norm": 3.3084458787609674, - "learning_rate": 3.5477182450032077e-06, - "loss": 1.0663, - "num_input_tokens_seen": 42329320, - "step": 2011 - }, - { - "epoch": 0.241928695965851, - "flos": 14515061790000.0, - "grad_norm": 3.502449677001684, - "learning_rate": 3.5472247598803097e-06, - "loss": 1.0552, - "num_input_tokens_seen": 42348385, - "step": 2012 - }, - { - "epoch": 0.24204893885649012, - "flos": 18185414022000.0, - "grad_norm": 3.9161548009479645, - "learning_rate": 3.546731040044645e-06, - "loss": 1.081, - "num_input_tokens_seen": 42363275, - "step": 2013 - }, - { - "epoch": 0.2421691817471292, - "flos": 21852792082680.0, - "grad_norm": 2.3075729669377085, - "learning_rate": 3.546237085571112e-06, - "loss": 0.9839, - "num_input_tokens_seen": 42381430, - "step": 2014 - }, - { - "epoch": 0.24228942463776829, - "flos": 15590007600840.0, - "grad_norm": 2.398287193732862, - "learning_rate": 3.5457428965346425e-06, - "loss": 0.9436, - "num_input_tokens_seen": 42400090, - "step": 2015 - }, - { - "epoch": 0.2424096675284074, - "flos": 24242445713160.0, - "grad_norm": 2.7836916203708695, - "learning_rate": 3.545248473010205e-06, - "loss": 0.98, - "num_input_tokens_seen": 42422615, - "step": 2016 - }, - { - "epoch": 0.24252991041904648, - "flos": 15380483438280.0, - "grad_norm": 5.975439666788192, - "learning_rate": 3.544753815072802e-06, - "loss": 1.0962, - "num_input_tokens_seen": 42440990, - "step": 2017 - }, - { - "epoch": 0.24265015330968556, - "flos": 15535878851280.0, - "grad_norm": 2.984928738815328, - "learning_rate": 3.544258922797474e-06, - "loss": 1.1204, - "num_input_tokens_seen": 42458830, - "step": 2018 - }, - { - "epoch": 0.24277039620032465, - "flos": 18236844554280.0, - "grad_norm": 2.0970809403326323, - "learning_rate": 3.543763796259295e-06, - "loss": 1.0114, - "num_input_tokens_seen": 42478505, - "step": 2019 - }, - { - "epoch": 0.24289063909096376, - "flos": 18710174936760.0, - "grad_norm": 2.326957768462349, - "learning_rate": 3.5432684355333754e-06, - "loss": 1.1362, - "num_input_tokens_seen": 42496880, - "step": 2020 - }, - { - "epoch": 0.24301088198160284, - "flos": 17838467668680.0, - "grad_norm": 3.2728977532248034, - "learning_rate": 3.5427728406948613e-06, - "loss": 0.9867, - "num_input_tokens_seen": 42515715, - "step": 2021 - }, - { - "epoch": 0.24313112487224192, - "flos": 50061805516680.0, - "grad_norm": 0.7566075921910222, - "learning_rate": 3.542277011818934e-06, - "loss": 0.8348, - "num_input_tokens_seen": 42579270, - "step": 2022 - }, - { - "epoch": 0.24325136776288103, - "flos": 29041811182440.0, - "grad_norm": 2.5696459430580236, - "learning_rate": 3.5417809489808104e-06, - "loss": 0.956, - "num_input_tokens_seen": 42600600, - "step": 2023 - }, - { - "epoch": 0.24337161065352012, - "flos": 17818930706880.0, - "grad_norm": 2.2054823580087954, - "learning_rate": 3.5412846522557422e-06, - "loss": 0.951, - "num_input_tokens_seen": 42621210, - "step": 2024 - }, - { - "epoch": 0.2434918535441592, - "flos": 13229947923840.0, - "grad_norm": 3.874492488974413, - "learning_rate": 3.540788121719018e-06, - "loss": 0.9761, - "num_input_tokens_seen": 42639350, - "step": 2025 - }, - { - "epoch": 0.24361209643479828, - "flos": 17005890099240.0, - "grad_norm": 2.6927410741078504, - "learning_rate": 3.5402913574459604e-06, - "loss": 1.0502, - "num_input_tokens_seen": 42658975, - "step": 2026 - }, - { - "epoch": 0.2437323393254374, - "flos": 20414030512440.0, - "grad_norm": 2.294628056324187, - "learning_rate": 3.5397943595119297e-06, - "loss": 1.0836, - "num_input_tokens_seen": 42680115, - "step": 2027 - }, - { - "epoch": 0.24385258221607647, - "flos": 16743708942000.0, - "grad_norm": 4.243301048745328, - "learning_rate": 3.5392971279923177e-06, - "loss": 0.9919, - "num_input_tokens_seen": 42698055, - "step": 2028 - }, - { - "epoch": 0.24397282510671556, - "flos": 18026829806760.0, - "grad_norm": 5.025533520668069, - "learning_rate": 3.5387996629625557e-06, - "loss": 1.0556, - "num_input_tokens_seen": 42715365, - "step": 2029 - }, - { - "epoch": 0.24409306799735467, - "flos": 46673355373080.0, - "grad_norm": 0.86257049161914, - "learning_rate": 3.5383019644981083e-06, - "loss": 0.8295, - "num_input_tokens_seen": 42778780, - "step": 2030 - }, - { - "epoch": 0.24421331088799375, - "flos": 13859501581440.0, - "grad_norm": 4.342943953779388, - "learning_rate": 3.5378040326744763e-06, - "loss": 0.9533, - "num_input_tokens_seen": 42797985, - "step": 2031 - }, - { - "epoch": 0.24433355377863283, - "flos": 14960024681400.0, - "grad_norm": 2.9321532592270314, - "learning_rate": 3.5373058675671946e-06, - "loss": 1.0902, - "num_input_tokens_seen": 42815710, - "step": 2032 - }, - { - "epoch": 0.24445379666927192, - "flos": 16087320871320.0, - "grad_norm": 3.3382633150718344, - "learning_rate": 3.536807469251836e-06, - "loss": 0.9497, - "num_input_tokens_seen": 42834585, - "step": 2033 - }, - { - "epoch": 0.24457403955991103, - "flos": 15091651837320.0, - "grad_norm": 3.923541926821463, - "learning_rate": 3.5363088378040055e-06, - "loss": 1.0503, - "num_input_tokens_seen": 42853195, - "step": 2034 - }, - { - "epoch": 0.2446942824505501, - "flos": 47972579104920.0, - "grad_norm": 0.7959606310175857, - "learning_rate": 3.5358099732993463e-06, - "loss": 0.9135, - "num_input_tokens_seen": 42912025, - "step": 2035 - }, - { - "epoch": 0.2448145253411892, - "flos": 14487430176360.0, - "grad_norm": 2.3351637155126985, - "learning_rate": 3.535310875813535e-06, - "loss": 1.112, - "num_input_tokens_seen": 42930140, - "step": 2036 - }, - { - "epoch": 0.2449347682318283, - "flos": 20520019056120.0, - "grad_norm": 2.6462000394996315, - "learning_rate": 3.5348115454222843e-06, - "loss": 1.0472, - "num_input_tokens_seen": 42952445, - "step": 2037 - }, - { - "epoch": 0.2450550111224674, - "flos": 16009730480280.0, - "grad_norm": 2.534971915245608, - "learning_rate": 3.5343119822013425e-06, - "loss": 1.0772, - "num_input_tokens_seen": 42971275, - "step": 2038 - }, - { - "epoch": 0.24517525401310647, - "flos": 15455958181680.0, - "grad_norm": 2.272168411554546, - "learning_rate": 3.533812186226493e-06, - "loss": 1.0081, - "num_input_tokens_seen": 42991705, - "step": 2039 - }, - { - "epoch": 0.24529549690374555, - "flos": 17816293812720.0, - "grad_norm": 2.480836855990833, - "learning_rate": 3.5333121575735545e-06, - "loss": 0.9933, - "num_input_tokens_seen": 43011065, - "step": 2040 - }, - { - "epoch": 0.24541573979438466, - "flos": 22905073452600.0, - "grad_norm": 5.464720543978374, - "learning_rate": 3.532811896318381e-06, - "loss": 0.9834, - "num_input_tokens_seen": 43032855, - "step": 2041 - }, - { - "epoch": 0.24553598268502375, - "flos": 22354520617800.0, - "grad_norm": 3.025703783370836, - "learning_rate": 3.5323114025368615e-06, - "loss": 1.0256, - "num_input_tokens_seen": 43047640, - "step": 2042 - }, - { - "epoch": 0.24565622557566283, - "flos": 9899796501960.0, - "grad_norm": 3.191422386548862, - "learning_rate": 3.53181067630492e-06, - "loss": 1.044, - "num_input_tokens_seen": 43064830, - "step": 2043 - }, - { - "epoch": 0.24577646846630194, - "flos": 11735309895120.0, - "grad_norm": 5.276354444997801, - "learning_rate": 3.5313097176985175e-06, - "loss": 0.9846, - "num_input_tokens_seen": 43082860, - "step": 2044 - }, - { - "epoch": 0.24589671135694102, - "flos": 13335108605400.0, - "grad_norm": 2.6104433533770055, - "learning_rate": 3.5308085267936482e-06, - "loss": 1.0327, - "num_input_tokens_seen": 43100295, - "step": 2045 - }, - { - "epoch": 0.2460169542475801, - "flos": 14148026566800.0, - "grad_norm": 3.046102259531827, - "learning_rate": 3.530307103666342e-06, - "loss": 1.1322, - "num_input_tokens_seen": 43119095, - "step": 2046 - }, - { - "epoch": 0.24613719713821922, - "flos": 17190020942040.0, - "grad_norm": 2.517488441024049, - "learning_rate": 3.5298054483926658e-06, - "loss": 1.0255, - "num_input_tokens_seen": 43139510, - "step": 2047 - }, - { - "epoch": 0.2462574400288583, - "flos": 21537003422400.0, - "grad_norm": 3.1649705125540923, - "learning_rate": 3.5293035610487187e-06, - "loss": 1.0585, - "num_input_tokens_seen": 43158595, - "step": 2048 - }, - { - "epoch": 0.24637768291949738, - "flos": 49372113443760.0, - "grad_norm": 0.7323032537405493, - "learning_rate": 3.5288014417106374e-06, - "loss": 0.8747, - "num_input_tokens_seen": 43224335, - "step": 2049 - }, - { - "epoch": 0.24649792581013646, - "flos": 24529314974280.0, - "grad_norm": 3.4390466158481425, - "learning_rate": 3.528299090454593e-06, - "loss": 0.9862, - "num_input_tokens_seen": 43244590, - "step": 2050 - }, - { - "epoch": 0.24661816870077558, - "flos": 13962178676640.0, - "grad_norm": 3.1586536281966597, - "learning_rate": 3.527796507356792e-06, - "loss": 1.0556, - "num_input_tokens_seen": 43258200, - "step": 2051 - }, - { - "epoch": 0.24673841159141466, - "flos": 14192588909640.0, - "grad_norm": 3.7709359985763333, - "learning_rate": 3.527293692493475e-06, - "loss": 1.1117, - "num_input_tokens_seen": 43273785, - "step": 2052 - }, - { - "epoch": 0.24685865448205374, - "flos": 15374657741880.0, - "grad_norm": 5.324231109268945, - "learning_rate": 3.52679064594092e-06, - "loss": 0.9649, - "num_input_tokens_seen": 43290845, - "step": 2053 - }, - { - "epoch": 0.24697889737269285, - "flos": 12725367863640.0, - "grad_norm": 3.102355394459228, - "learning_rate": 3.5262873677754375e-06, - "loss": 0.9632, - "num_input_tokens_seen": 43308570, - "step": 2054 - }, - { - "epoch": 0.24709914026333193, - "flos": 19470558549720.0, - "grad_norm": 3.0181392012258272, - "learning_rate": 3.5257838580733745e-06, - "loss": 1.0319, - "num_input_tokens_seen": 43327895, - "step": 2055 - }, - { - "epoch": 0.24721938315397102, - "flos": 13670679519960.0, - "grad_norm": 3.6788033559120454, - "learning_rate": 3.5252801169111138e-06, - "loss": 1.0942, - "num_input_tokens_seen": 43345280, - "step": 2056 - }, - { - "epoch": 0.2473396260446101, - "flos": 16481773077240.0, - "grad_norm": 2.628054249794334, - "learning_rate": 3.524776144365072e-06, - "loss": 1.0155, - "num_input_tokens_seen": 43363455, - "step": 2057 - }, - { - "epoch": 0.2474598689352492, - "flos": 15013233584160.0, - "grad_norm": 2.6891344999508084, - "learning_rate": 3.5242719405117016e-06, - "loss": 1.028, - "num_input_tokens_seen": 43382980, - "step": 2058 - }, - { - "epoch": 0.2475801118258883, - "flos": 15376160158320.0, - "grad_norm": 4.122229136769361, - "learning_rate": 3.5237675054274893e-06, - "loss": 0.9617, - "num_input_tokens_seen": 43401900, - "step": 2059 - }, - { - "epoch": 0.24770035471652738, - "flos": 16114247269080.0, - "grad_norm": 3.8510344416074465, - "learning_rate": 3.5232628391889584e-06, - "loss": 1.0257, - "num_input_tokens_seen": 43419910, - "step": 2060 - }, - { - "epoch": 0.2478205976071665, - "flos": 15747242707440.0, - "grad_norm": 6.452580833120883, - "learning_rate": 3.522757941872666e-06, - "loss": 0.869, - "num_input_tokens_seen": 43437785, - "step": 2061 - }, - { - "epoch": 0.24794084049780557, - "flos": 17766212389080.0, - "grad_norm": 2.163482037308569, - "learning_rate": 3.5222528135552042e-06, - "loss": 1.054, - "num_input_tokens_seen": 43458965, - "step": 2062 - }, - { - "epoch": 0.24806108338844465, - "flos": 12967797428160.0, - "grad_norm": 3.109859102988734, - "learning_rate": 3.521747454313201e-06, - "loss": 1.0385, - "num_input_tokens_seen": 43477365, - "step": 2063 - }, - { - "epoch": 0.24818132627908374, - "flos": 13676351908560.0, - "grad_norm": 10.351654259616891, - "learning_rate": 3.521241864223319e-06, - "loss": 0.9005, - "num_input_tokens_seen": 43496045, - "step": 2064 - }, - { - "epoch": 0.24830156916972285, - "flos": 50336195522880.0, - "grad_norm": 0.8027927349622862, - "learning_rate": 3.5207360433622552e-06, - "loss": 0.8591, - "num_input_tokens_seen": 43557765, - "step": 2065 - }, - { - "epoch": 0.24842181206036193, - "flos": 28860133264440.0, - "grad_norm": 2.1802678036465326, - "learning_rate": 3.5202299918067437e-06, - "loss": 0.9743, - "num_input_tokens_seen": 43581080, - "step": 2066 - }, - { - "epoch": 0.248542054951001, - "flos": 14251163585400.0, - "grad_norm": 6.774868683052641, - "learning_rate": 3.519723709633551e-06, - "loss": 0.9256, - "num_input_tokens_seen": 43599560, - "step": 2067 - }, - { - "epoch": 0.24866229784164012, - "flos": 16717579744800.0, - "grad_norm": 3.6270447587458583, - "learning_rate": 3.519217196919479e-06, - "loss": 1.0528, - "num_input_tokens_seen": 43618265, - "step": 2068 - }, - { - "epoch": 0.2487825407322792, - "flos": 14095185602760.0, - "grad_norm": 4.064217307175674, - "learning_rate": 3.518710453741367e-06, - "loss": 0.9574, - "num_input_tokens_seen": 43637185, - "step": 2069 - }, - { - "epoch": 0.2489027836229183, - "flos": 15642143349000.0, - "grad_norm": 3.878912888259048, - "learning_rate": 3.518203480176086e-06, - "loss": 0.8957, - "num_input_tokens_seen": 43655835, - "step": 2070 - }, - { - "epoch": 0.2490230265135574, - "flos": 16559670083880.0, - "grad_norm": 2.626097676699852, - "learning_rate": 3.517696276300545e-06, - "loss": 1.0205, - "num_input_tokens_seen": 43677095, - "step": 2071 - }, - { - "epoch": 0.24914326940419648, - "flos": 14067400681320.0, - "grad_norm": 4.795969327737881, - "learning_rate": 3.517188842191685e-06, - "loss": 0.9245, - "num_input_tokens_seen": 43694965, - "step": 2072 - }, - { - "epoch": 0.24926351229483557, - "flos": 14356814851920.0, - "grad_norm": 3.2573058834483395, - "learning_rate": 3.5166811779264837e-06, - "loss": 0.9661, - "num_input_tokens_seen": 43715005, - "step": 2073 - }, - { - "epoch": 0.24938375518547465, - "flos": 16559854053240.0, - "grad_norm": 5.122356942796359, - "learning_rate": 3.5161732835819545e-06, - "loss": 1.0047, - "num_input_tokens_seen": 43734035, - "step": 2074 - }, - { - "epoch": 0.24950399807611376, - "flos": 12260162794560.0, - "grad_norm": 3.981952723791034, - "learning_rate": 3.515665159235143e-06, - "loss": 1.0549, - "num_input_tokens_seen": 43752640, - "step": 2075 - }, - { - "epoch": 0.24962424096675284, - "flos": 13491178572720.0, - "grad_norm": 4.9061936296274, - "learning_rate": 3.5151568049631318e-06, - "loss": 0.9731, - "num_input_tokens_seen": 43771075, - "step": 2076 - }, - { - "epoch": 0.24974448385739192, - "flos": 23821251078840.0, - "grad_norm": 3.8233700240042294, - "learning_rate": 3.5146482208430385e-06, - "loss": 1.0284, - "num_input_tokens_seen": 43792625, - "step": 2077 - }, - { - "epoch": 0.24986472674803104, - "flos": 21384060934200.0, - "grad_norm": 3.9495459476690513, - "learning_rate": 3.514139406952014e-06, - "loss": 0.9069, - "num_input_tokens_seen": 43814370, - "step": 2078 - }, - { - "epoch": 0.24998496963867012, - "flos": 18944571172560.0, - "grad_norm": 3.8619803464874436, - "learning_rate": 3.5136303633672454e-06, - "loss": 1.0684, - "num_input_tokens_seen": 43834220, - "step": 2079 - }, - { - "epoch": 0.25010521252930923, - "flos": 16746836421120.0, - "grad_norm": 2.7264250362688416, - "learning_rate": 3.5131210901659544e-06, - "loss": 0.9804, - "num_input_tokens_seen": 43855695, - "step": 2080 - }, - { - "epoch": 0.2502254554199483, - "flos": 16665321350400.0, - "grad_norm": 3.6716095524064465, - "learning_rate": 3.5126115874253967e-06, - "loss": 1.0409, - "num_input_tokens_seen": 43874970, - "step": 2081 - }, - { - "epoch": 0.2503456983105874, - "flos": 20488186808760.0, - "grad_norm": 3.2947914807760497, - "learning_rate": 3.5121018552228644e-06, - "loss": 1.0335, - "num_input_tokens_seen": 43893195, - "step": 2082 - }, - { - "epoch": 0.2504659412012265, - "flos": 13308304853880.0, - "grad_norm": 5.240009793570882, - "learning_rate": 3.5115918936356827e-06, - "loss": 1.0032, - "num_input_tokens_seen": 43909670, - "step": 2083 - }, - { - "epoch": 0.25058618409186556, - "flos": 11945110011720.0, - "grad_norm": 5.134657529648627, - "learning_rate": 3.5110817027412123e-06, - "loss": 1.0068, - "num_input_tokens_seen": 43928480, - "step": 2084 - }, - { - "epoch": 0.25070642698250467, - "flos": 17372649368400.0, - "grad_norm": 4.273556221214236, - "learning_rate": 3.5105712826168493e-06, - "loss": 0.9083, - "num_input_tokens_seen": 43947850, - "step": 2085 - }, - { - "epoch": 0.2508266698731437, - "flos": 14378927384760.0, - "grad_norm": 3.134540794604848, - "learning_rate": 3.5100606333400235e-06, - "loss": 0.9351, - "num_input_tokens_seen": 43964705, - "step": 2086 - }, - { - "epoch": 0.25094691276378284, - "flos": 13649824111080.0, - "grad_norm": 12.491963358878792, - "learning_rate": 3.5095497549882006e-06, - "loss": 1.002, - "num_input_tokens_seen": 43982870, - "step": 2087 - }, - { - "epoch": 0.25106715565442195, - "flos": 19182094887480.0, - "grad_norm": 4.428857042621662, - "learning_rate": 3.50903864763888e-06, - "loss": 0.9482, - "num_input_tokens_seen": 44003380, - "step": 2088 - }, - { - "epoch": 0.251187398545061, - "flos": 34573499389200.0, - "grad_norm": 3.93525668226794, - "learning_rate": 3.5085273113695965e-06, - "loss": 0.9858, - "num_input_tokens_seen": 44027670, - "step": 2089 - }, - { - "epoch": 0.2513076414357001, - "flos": 19234445266560.0, - "grad_norm": 7.83126189527042, - "learning_rate": 3.508015746257919e-06, - "loss": 1.0153, - "num_input_tokens_seen": 44046430, - "step": 2090 - }, - { - "epoch": 0.2514278843263392, - "flos": 13806629955840.0, - "grad_norm": 5.168604620626851, - "learning_rate": 3.5075039523814518e-06, - "loss": 1.0584, - "num_input_tokens_seen": 44065340, - "step": 2091 - }, - { - "epoch": 0.2515481272169783, - "flos": 11938885715040.0, - "grad_norm": 5.10978543094214, - "learning_rate": 3.506991929817834e-06, - "loss": 1.0407, - "num_input_tokens_seen": 44081780, - "step": 2092 - }, - { - "epoch": 0.2516683701076174, - "flos": 16874569558920.0, - "grad_norm": 6.150860436267865, - "learning_rate": 3.506479678644738e-06, - "loss": 1.047, - "num_input_tokens_seen": 44101895, - "step": 2093 - }, - { - "epoch": 0.2517886129982565, - "flos": 19679868081360.0, - "grad_norm": 4.55754279907018, - "learning_rate": 3.505967198939873e-06, - "loss": 0.9604, - "num_input_tokens_seen": 44118655, - "step": 2094 - }, - { - "epoch": 0.25190885588889556, - "flos": 27203108987040.0, - "grad_norm": 4.857186072822464, - "learning_rate": 3.5054544907809813e-06, - "loss": 1.0016, - "num_input_tokens_seen": 44138875, - "step": 2095 - }, - { - "epoch": 0.25202909877953467, - "flos": 15823024066440.0, - "grad_norm": 2.7536879865578876, - "learning_rate": 3.50494155424584e-06, - "loss": 1.0216, - "num_input_tokens_seen": 44157500, - "step": 2096 - }, - { - "epoch": 0.2521493416701738, - "flos": 15458196475560.0, - "grad_norm": 4.381569643645329, - "learning_rate": 3.504428389412262e-06, - "loss": 1.0617, - "num_input_tokens_seen": 44178030, - "step": 2097 - }, - { - "epoch": 0.25226958456081283, - "flos": 19759666104720.0, - "grad_norm": 10.016173063568514, - "learning_rate": 3.5039149963580927e-06, - "loss": 0.9566, - "num_input_tokens_seen": 44197770, - "step": 2098 - }, - { - "epoch": 0.25238982745145194, - "flos": 21904375922760.0, - "grad_norm": 3.2929835200402744, - "learning_rate": 3.503401375161215e-06, - "loss": 0.9171, - "num_input_tokens_seen": 44217235, - "step": 2099 - }, - { - "epoch": 0.252510070342091, - "flos": 14383526618760.0, - "grad_norm": 4.602220264825098, - "learning_rate": 3.502887525899544e-06, - "loss": 1.0756, - "num_input_tokens_seen": 44235935, - "step": 2100 - }, - { - "epoch": 0.2526303132327301, - "flos": 16166628309720.0, - "grad_norm": 2.786110049909206, - "learning_rate": 3.50237344865103e-06, - "loss": 1.0593, - "num_input_tokens_seen": 44256655, - "step": 2101 - }, - { - "epoch": 0.2527505561233692, - "flos": 21568283761680.0, - "grad_norm": 4.007289919329346, - "learning_rate": 3.501859143493658e-06, - "loss": 0.9882, - "num_input_tokens_seen": 44277005, - "step": 2102 - }, - { - "epoch": 0.2528707990140083, - "flos": 41859762939960.0, - "grad_norm": 0.8555770526768288, - "learning_rate": 3.5013446105054488e-06, - "loss": 0.878, - "num_input_tokens_seen": 44329645, - "step": 2103 - }, - { - "epoch": 0.2529910419046474, - "flos": 17530559029320.0, - "grad_norm": 10.462125863116137, - "learning_rate": 3.5008298497644555e-06, - "loss": 0.9856, - "num_input_tokens_seen": 44348410, - "step": 2104 - }, - { - "epoch": 0.2531112847952865, - "flos": 16953417073920.0, - "grad_norm": 7.419245268078175, - "learning_rate": 3.500314861348767e-06, - "loss": 1.1149, - "num_input_tokens_seen": 44368765, - "step": 2105 - }, - { - "epoch": 0.25323152768592555, - "flos": 11420809020360.0, - "grad_norm": 2.4956510864698114, - "learning_rate": 3.499799645336507e-06, - "loss": 1.0043, - "num_input_tokens_seen": 44385380, - "step": 2106 - }, - { - "epoch": 0.25335177057656466, - "flos": 20233088471880.0, - "grad_norm": 2.34669756872609, - "learning_rate": 3.4992842018058336e-06, - "loss": 1.1011, - "num_input_tokens_seen": 44408000, - "step": 2107 - }, - { - "epoch": 0.25347201346720377, - "flos": 13328485708440.0, - "grad_norm": 4.126337470346972, - "learning_rate": 3.4987685308349384e-06, - "loss": 1.0991, - "num_input_tokens_seen": 44425450, - "step": 2108 - }, - { - "epoch": 0.2535922563578428, - "flos": 11184849045000.0, - "grad_norm": 3.6432627016732297, - "learning_rate": 3.4982526325020497e-06, - "loss": 0.8449, - "num_input_tokens_seen": 44442140, - "step": 2109 - }, - { - "epoch": 0.25371249924848194, - "flos": 11545659971520.0, - "grad_norm": 3.231112691592332, - "learning_rate": 3.4977365068854273e-06, - "loss": 1.0486, - "num_input_tokens_seen": 44457480, - "step": 2110 - }, - { - "epoch": 0.25383274213912105, - "flos": 15458073829320.0, - "grad_norm": 5.457145017995813, - "learning_rate": 3.4972201540633676e-06, - "loss": 0.9462, - "num_input_tokens_seen": 44476555, - "step": 2111 - }, - { - "epoch": 0.2539529850297601, - "flos": 15195953995200.0, - "grad_norm": 3.838435523102433, - "learning_rate": 3.4967035741142008e-06, - "loss": 1.0777, - "num_input_tokens_seen": 44495095, - "step": 2112 - }, - { - "epoch": 0.2540732279203992, - "flos": 17949730000680.0, - "grad_norm": 4.19370163549969, - "learning_rate": 3.4961867671162917e-06, - "loss": 1.0488, - "num_input_tokens_seen": 44514745, - "step": 2113 - }, - { - "epoch": 0.2541934708110383, - "flos": 13780623404880.0, - "grad_norm": 3.4087911104102027, - "learning_rate": 3.4956697331480402e-06, - "loss": 0.9827, - "num_input_tokens_seen": 44533035, - "step": 2114 - }, - { - "epoch": 0.2543137137016774, - "flos": 17030240925960.0, - "grad_norm": 2.950459725717098, - "learning_rate": 3.495152472287879e-06, - "loss": 1.0296, - "num_input_tokens_seen": 44553465, - "step": 2115 - }, - { - "epoch": 0.2544339565923165, - "flos": 18211911157920.0, - "grad_norm": 3.0301982195385873, - "learning_rate": 3.4946349846142766e-06, - "loss": 0.96, - "num_input_tokens_seen": 44572325, - "step": 2116 - }, - { - "epoch": 0.25455419948295555, - "flos": 15406336681440.0, - "grad_norm": 3.318470594350851, - "learning_rate": 3.4941172702057353e-06, - "loss": 0.9862, - "num_input_tokens_seen": 44592105, - "step": 2117 - }, - { - "epoch": 0.25467444237359466, - "flos": 18684168385800.0, - "grad_norm": 3.8840265045597406, - "learning_rate": 3.4935993291407924e-06, - "loss": 1.0275, - "num_input_tokens_seen": 44610650, - "step": 2118 - }, - { - "epoch": 0.25479468526423377, - "flos": 19208285407800.0, - "grad_norm": 5.1230895040282265, - "learning_rate": 3.4930811614980183e-06, - "loss": 0.9372, - "num_input_tokens_seen": 44632065, - "step": 2119 - }, - { - "epoch": 0.2549149281548728, - "flos": 16689948131160.0, - "grad_norm": 6.616723126507662, - "learning_rate": 3.4925627673560198e-06, - "loss": 1.0167, - "num_input_tokens_seen": 44652445, - "step": 2120 - }, - { - "epoch": 0.25503517104551193, - "flos": 18369330233880.0, - "grad_norm": 4.559451822492575, - "learning_rate": 3.4920441467934357e-06, - "loss": 1.1043, - "num_input_tokens_seen": 44672680, - "step": 2121 - }, - { - "epoch": 0.25515541393615104, - "flos": 18966990321000.0, - "grad_norm": 2.920072111519613, - "learning_rate": 3.491525299888941e-06, - "loss": 1.0438, - "num_input_tokens_seen": 44691245, - "step": 2122 - }, - { - "epoch": 0.2552756568267901, - "flos": 47225993193960.0, - "grad_norm": 0.9751076504381887, - "learning_rate": 3.491006226721244e-06, - "loss": 0.9171, - "num_input_tokens_seen": 44755175, - "step": 2123 - }, - { - "epoch": 0.2553958997174292, - "flos": 12705217670640.0, - "grad_norm": 2.9409141990185246, - "learning_rate": 3.4904869273690882e-06, - "loss": 0.9956, - "num_input_tokens_seen": 44772785, - "step": 2124 - }, - { - "epoch": 0.2555161426080683, - "flos": 16612511047920.0, - "grad_norm": 6.473003076574713, - "learning_rate": 3.489967401911251e-06, - "loss": 1.1169, - "num_input_tokens_seen": 44791805, - "step": 2125 - }, - { - "epoch": 0.2556363854987074, - "flos": 29014976769360.0, - "grad_norm": 3.309978893247308, - "learning_rate": 3.4894476504265428e-06, - "loss": 0.9261, - "num_input_tokens_seen": 44815765, - "step": 2126 - }, - { - "epoch": 0.2557566283893465, - "flos": 48706802859120.0, - "grad_norm": 0.7983404410357093, - "learning_rate": 3.4889276729938104e-06, - "loss": 0.8016, - "num_input_tokens_seen": 44874015, - "step": 2127 - }, - { - "epoch": 0.2558768712799856, - "flos": 16086002424240.0, - "grad_norm": 4.772021394194027, - "learning_rate": 3.488407469691934e-06, - "loss": 1.021, - "num_input_tokens_seen": 44894430, - "step": 2128 - }, - { - "epoch": 0.25599711417062465, - "flos": 18789114436440.0, - "grad_norm": 3.363710382710932, - "learning_rate": 3.487887040599828e-06, - "loss": 1.0255, - "num_input_tokens_seen": 44913950, - "step": 2129 - }, - { - "epoch": 0.25611735706126376, - "flos": 16241183206320.0, - "grad_norm": 4.115464257046268, - "learning_rate": 3.4873663857964407e-06, - "loss": 0.9842, - "num_input_tokens_seen": 44930885, - "step": 2130 - }, - { - "epoch": 0.2562375999519028, - "flos": 16612572371040.0, - "grad_norm": 3.4375883611246345, - "learning_rate": 3.4868455053607556e-06, - "loss": 0.9027, - "num_input_tokens_seen": 44950220, - "step": 2131 - }, - { - "epoch": 0.2563578428425419, - "flos": 16245383840040.0, - "grad_norm": 5.769578854746056, - "learning_rate": 3.486324399371789e-06, - "loss": 0.9283, - "num_input_tokens_seen": 44969240, - "step": 2132 - }, - { - "epoch": 0.25647808573318104, - "flos": 15380882038560.0, - "grad_norm": 3.031929490530226, - "learning_rate": 3.485803067908593e-06, - "loss": 1.006, - "num_input_tokens_seen": 44988470, - "step": 2133 - }, - { - "epoch": 0.2565983286238201, - "flos": 24060154563960.0, - "grad_norm": 4.86219396731365, - "learning_rate": 3.485281511050253e-06, - "loss": 1.0185, - "num_input_tokens_seen": 45010325, - "step": 2134 - }, - { - "epoch": 0.2567185715144592, - "flos": 11472576829800.0, - "grad_norm": 5.406273637841207, - "learning_rate": 3.484759728875889e-06, - "loss": 1.1219, - "num_input_tokens_seen": 45025410, - "step": 2135 - }, - { - "epoch": 0.2568388144050983, - "flos": 12365783399520.0, - "grad_norm": 3.950751636311917, - "learning_rate": 3.4842377214646543e-06, - "loss": 1.0365, - "num_input_tokens_seen": 45043425, - "step": 2136 - }, - { - "epoch": 0.25695905729573737, - "flos": 14830022588160.0, - "grad_norm": 3.984735330482026, - "learning_rate": 3.483715488895737e-06, - "loss": 0.9039, - "num_input_tokens_seen": 45063475, - "step": 2137 - }, - { - "epoch": 0.2570793001863765, - "flos": 17582296177200.0, - "grad_norm": 3.2423085385445125, - "learning_rate": 3.48319303124836e-06, - "loss": 1.011, - "num_input_tokens_seen": 45083575, - "step": 2138 - }, - { - "epoch": 0.2571995430770156, - "flos": 19155996351840.0, - "grad_norm": 3.784859439956074, - "learning_rate": 3.4826703486017798e-06, - "loss": 0.9012, - "num_input_tokens_seen": 45102920, - "step": 2139 - }, - { - "epoch": 0.25731978596765465, - "flos": 14043111177720.0, - "grad_norm": 3.0373387486234, - "learning_rate": 3.4821474410352867e-06, - "loss": 0.9925, - "num_input_tokens_seen": 45121300, - "step": 2140 - }, - { - "epoch": 0.25744002885829376, - "flos": 50535080124120.0, - "grad_norm": 0.92080370839538, - "learning_rate": 3.481624308628205e-06, - "loss": 0.901, - "num_input_tokens_seen": 45182390, - "step": 2141 - }, - { - "epoch": 0.25756027174893287, - "flos": 12781857553320.0, - "grad_norm": 4.873832562851129, - "learning_rate": 3.481100951459893e-06, - "loss": 1.2264, - "num_input_tokens_seen": 45195130, - "step": 2142 - }, - { - "epoch": 0.2576805146395719, - "flos": 16113879330360.0, - "grad_norm": 2.970416113477543, - "learning_rate": 3.4805773696097453e-06, - "loss": 1.0113, - "num_input_tokens_seen": 45215740, - "step": 2143 - }, - { - "epoch": 0.25780075753021103, - "flos": 11656830318840.0, - "grad_norm": 2.8299714694253066, - "learning_rate": 3.4800535631571874e-06, - "loss": 1.0979, - "num_input_tokens_seen": 45230990, - "step": 2144 - }, - { - "epoch": 0.25792100042085014, - "flos": 16219009350360.0, - "grad_norm": 4.272442968712214, - "learning_rate": 3.4795295321816804e-06, - "loss": 0.9878, - "num_input_tokens_seen": 45249535, - "step": 2145 - }, - { - "epoch": 0.2580412433114892, - "flos": 13255862490120.0, - "grad_norm": 8.458401483972164, - "learning_rate": 3.47900527676272e-06, - "loss": 1.1428, - "num_input_tokens_seen": 45267590, - "step": 2146 - }, - { - "epoch": 0.2581614862021283, - "flos": 10083590067600.0, - "grad_norm": 6.984259482842948, - "learning_rate": 3.478480796979835e-06, - "loss": 1.1134, - "num_input_tokens_seen": 45285195, - "step": 2147 - }, - { - "epoch": 0.25828172909276736, - "flos": 21018497465880.0, - "grad_norm": 1.987938212422741, - "learning_rate": 3.4779560929125894e-06, - "loss": 1.0036, - "num_input_tokens_seen": 45306460, - "step": 2148 - }, - { - "epoch": 0.2584019719834065, - "flos": 48057313639440.0, - "grad_norm": 0.6971203949290368, - "learning_rate": 3.4774311646405783e-06, - "loss": 0.815, - "num_input_tokens_seen": 45376085, - "step": 2149 - }, - { - "epoch": 0.2585222148740456, - "flos": 16271697006600.0, - "grad_norm": 5.96305439608871, - "learning_rate": 3.476906012243435e-06, - "loss": 1.0609, - "num_input_tokens_seen": 45394715, - "step": 2150 - }, - { - "epoch": 0.25864245776468464, - "flos": 20594420644920.0, - "grad_norm": 3.001161191643991, - "learning_rate": 3.476380635800824e-06, - "loss": 1.0424, - "num_input_tokens_seen": 45415635, - "step": 2151 - }, - { - "epoch": 0.25876270065532375, - "flos": 10450042721160.0, - "grad_norm": 4.437362024015815, - "learning_rate": 3.475855035392444e-06, - "loss": 1.0763, - "num_input_tokens_seen": 45430675, - "step": 2152 - }, - { - "epoch": 0.25888294354596286, - "flos": 43274792462760.0, - "grad_norm": 3.340570667420943, - "learning_rate": 3.475329211098029e-06, - "loss": 0.9317, - "num_input_tokens_seen": 45453550, - "step": 2153 - }, - { - "epoch": 0.2590031864366019, - "flos": 19834037031960.0, - "grad_norm": 2.5708983892765227, - "learning_rate": 3.4748031629973453e-06, - "loss": 1.0486, - "num_input_tokens_seen": 45474000, - "step": 2154 - }, - { - "epoch": 0.25912342932724103, - "flos": 44687291093400.0, - "grad_norm": 0.9297869973674793, - "learning_rate": 3.4742768911701944e-06, - "loss": 0.8018, - "num_input_tokens_seen": 45536415, - "step": 2155 - }, - { - "epoch": 0.25924367221788014, - "flos": 8714048282520.0, - "grad_norm": 3.7841948084375314, - "learning_rate": 3.4737503956964113e-06, - "loss": 0.9196, - "num_input_tokens_seen": 45548440, - "step": 2156 - }, - { - "epoch": 0.2593639151085192, - "flos": 10293512830440.0, - "grad_norm": 2.7534648568588165, - "learning_rate": 3.473223676655865e-06, - "loss": 0.8865, - "num_input_tokens_seen": 45566160, - "step": 2157 - }, - { - "epoch": 0.2594841579991583, - "flos": 10764635580600.0, - "grad_norm": 3.085334042806497, - "learning_rate": 3.472696734128459e-06, - "loss": 1.014, - "num_input_tokens_seen": 45583745, - "step": 2158 - }, - { - "epoch": 0.2596044008897974, - "flos": 16796396598240.0, - "grad_norm": 2.547166304515928, - "learning_rate": 3.4721695681941286e-06, - "loss": 0.9813, - "num_input_tokens_seen": 45602505, - "step": 2159 - }, - { - "epoch": 0.25972464378043647, - "flos": 9716432198160.0, - "grad_norm": 5.066364085213904, - "learning_rate": 3.471642178932845e-06, - "loss": 1.0555, - "num_input_tokens_seen": 45620870, - "step": 2160 - }, - { - "epoch": 0.2598448866710756, - "flos": 13885722763320.0, - "grad_norm": 3.5599520757214003, - "learning_rate": 3.471114566424613e-06, - "loss": 1.1274, - "num_input_tokens_seen": 45639050, - "step": 2161 - }, - { - "epoch": 0.25996512956171464, - "flos": 15380330130480.0, - "grad_norm": 24.699595286648297, - "learning_rate": 3.4705867307494715e-06, - "loss": 0.9854, - "num_input_tokens_seen": 45657840, - "step": 2162 - }, - { - "epoch": 0.26008537245235375, - "flos": 12915416387520.0, - "grad_norm": 3.593959238346614, - "learning_rate": 3.470058671987492e-06, - "loss": 1.0536, - "num_input_tokens_seen": 45675825, - "step": 2163 - }, - { - "epoch": 0.26020561534299286, - "flos": 17530773660240.0, - "grad_norm": 4.153320886543283, - "learning_rate": 3.4695303902187805e-06, - "loss": 1.0643, - "num_input_tokens_seen": 45695100, - "step": 2164 - }, - { - "epoch": 0.2603258582336319, - "flos": 18341269358400.0, - "grad_norm": 3.6137127592845855, - "learning_rate": 3.469001885523478e-06, - "loss": 1.0104, - "num_input_tokens_seen": 45715540, - "step": 2165 - }, - { - "epoch": 0.260446101124271, - "flos": 20492142150000.0, - "grad_norm": 2.532658408841438, - "learning_rate": 3.4684731579817568e-06, - "loss": 1.0393, - "num_input_tokens_seen": 45736250, - "step": 2166 - }, - { - "epoch": 0.26056634401491013, - "flos": 18264782783520.0, - "grad_norm": 2.2078591203458133, - "learning_rate": 3.4679442076738247e-06, - "loss": 0.9997, - "num_input_tokens_seen": 45755685, - "step": 2167 - }, - { - "epoch": 0.2606865869055492, - "flos": 19260819756240.0, - "grad_norm": 2.601786102674904, - "learning_rate": 3.4674150346799245e-06, - "loss": 1.0603, - "num_input_tokens_seen": 45775105, - "step": 2168 - }, - { - "epoch": 0.2608068297961883, - "flos": 12548258518080.0, - "grad_norm": 3.6462765814773803, - "learning_rate": 3.4668856390803295e-06, - "loss": 1.0094, - "num_input_tokens_seen": 45792705, - "step": 2169 - }, - { - "epoch": 0.2609270726868274, - "flos": 13151223055080.0, - "grad_norm": 2.7463202566224525, - "learning_rate": 3.4663560209553495e-06, - "loss": 1.1273, - "num_input_tokens_seen": 45810490, - "step": 2170 - }, - { - "epoch": 0.26104731557746647, - "flos": 15511466701440.0, - "grad_norm": 2.214166554527677, - "learning_rate": 3.4658261803853267e-06, - "loss": 1.0037, - "num_input_tokens_seen": 45828135, - "step": 2171 - }, - { - "epoch": 0.2611675584681056, - "flos": 15406489989240.0, - "grad_norm": 3.274850908918904, - "learning_rate": 3.4652961174506383e-06, - "loss": 1.037, - "num_input_tokens_seen": 45847725, - "step": 2172 - }, - { - "epoch": 0.2612878013587447, - "flos": 51470022625080.0, - "grad_norm": 1.0513649492773234, - "learning_rate": 3.464765832231694e-06, - "loss": 0.8373, - "num_input_tokens_seen": 45901610, - "step": 2173 - }, - { - "epoch": 0.26140804424938374, - "flos": 14355312435480.0, - "grad_norm": 3.136974129547951, - "learning_rate": 3.4642353248089373e-06, - "loss": 0.9334, - "num_input_tokens_seen": 45920090, - "step": 2174 - }, - { - "epoch": 0.26152828714002285, - "flos": 18185935268520.0, - "grad_norm": 2.731309665752746, - "learning_rate": 3.463704595262846e-06, - "loss": 1.0315, - "num_input_tokens_seen": 45940690, - "step": 2175 - }, - { - "epoch": 0.26164853003066196, - "flos": 18106321214520.0, - "grad_norm": 2.288454567443054, - "learning_rate": 3.463173643673931e-06, - "loss": 0.9278, - "num_input_tokens_seen": 45962935, - "step": 2176 - }, - { - "epoch": 0.261768772921301, - "flos": 38592632120040.0, - "grad_norm": 0.9401980993343764, - "learning_rate": 3.4626424701227387e-06, - "loss": 0.8939, - "num_input_tokens_seen": 46017715, - "step": 2177 - }, - { - "epoch": 0.26188901581194013, - "flos": 50624996462280.0, - "grad_norm": 0.9901365273190742, - "learning_rate": 3.4621110746898452e-06, - "loss": 0.8498, - "num_input_tokens_seen": 46085295, - "step": 2178 - }, - { - "epoch": 0.2620092587025792, - "flos": 14960883205080.0, - "grad_norm": 2.206766460367251, - "learning_rate": 3.4615794574558654e-06, - "loss": 0.9683, - "num_input_tokens_seen": 46104025, - "step": 2179 - }, - { - "epoch": 0.2621295015932183, - "flos": 13020454422840.0, - "grad_norm": 4.58633613478508, - "learning_rate": 3.4610476185014436e-06, - "loss": 1.0506, - "num_input_tokens_seen": 46121005, - "step": 2180 - }, - { - "epoch": 0.2622497444838574, - "flos": 16822556457000.0, - "grad_norm": 3.0918001137558053, - "learning_rate": 3.4605155579072597e-06, - "loss": 1.0247, - "num_input_tokens_seen": 46140580, - "step": 2181 - }, - { - "epoch": 0.26236998737449646, - "flos": 15718016692680.0, - "grad_norm": 3.4246983137707527, - "learning_rate": 3.459983275754027e-06, - "loss": 0.9453, - "num_input_tokens_seen": 46159195, - "step": 2182 - }, - { - "epoch": 0.26249023026513557, - "flos": 12679487073720.0, - "grad_norm": 4.0198287631330905, - "learning_rate": 3.4594507721224918e-06, - "loss": 1.0225, - "num_input_tokens_seen": 46177565, - "step": 2183 - }, - { - "epoch": 0.2626104731557747, - "flos": 12994079933160.0, - "grad_norm": 2.390766314150794, - "learning_rate": 3.4589180470934353e-06, - "loss": 1.0453, - "num_input_tokens_seen": 46197150, - "step": 2184 - }, - { - "epoch": 0.26273071604641374, - "flos": 13701775889880.0, - "grad_norm": 5.284780371946588, - "learning_rate": 3.4583851007476713e-06, - "loss": 0.9841, - "num_input_tokens_seen": 46215340, - "step": 2185 - }, - { - "epoch": 0.26285095893705285, - "flos": 12989419376040.0, - "grad_norm": 3.4659411080668123, - "learning_rate": 3.4578519331660464e-06, - "loss": 0.9116, - "num_input_tokens_seen": 46232055, - "step": 2186 - }, - { - "epoch": 0.26297120182769196, - "flos": 14331482855280.0, - "grad_norm": 6.021747594494216, - "learning_rate": 3.4573185444294426e-06, - "loss": 1.0459, - "num_input_tokens_seen": 46250140, - "step": 2187 - }, - { - "epoch": 0.263091444718331, - "flos": 15930116426280.0, - "grad_norm": 2.696573775035006, - "learning_rate": 3.456784934618774e-06, - "loss": 1.0136, - "num_input_tokens_seen": 46271025, - "step": 2188 - }, - { - "epoch": 0.2632116876089701, - "flos": 13491147911160.0, - "grad_norm": 3.1079228523037026, - "learning_rate": 3.4562511038149897e-06, - "loss": 1.0257, - "num_input_tokens_seen": 46286240, - "step": 2189 - }, - { - "epoch": 0.26333193049960923, - "flos": 48198016617120.0, - "grad_norm": 1.0407853149096442, - "learning_rate": 3.4557170520990705e-06, - "loss": 0.8329, - "num_input_tokens_seen": 46346635, - "step": 2190 - }, - { - "epoch": 0.2634521733902483, - "flos": 17820985031400.0, - "grad_norm": 2.486133169245644, - "learning_rate": 3.4551827795520324e-06, - "loss": 1.0896, - "num_input_tokens_seen": 46369240, - "step": 2191 - }, - { - "epoch": 0.2635724162808874, - "flos": 14619762548160.0, - "grad_norm": 3.638927014630832, - "learning_rate": 3.4546482862549226e-06, - "loss": 1.0768, - "num_input_tokens_seen": 46389275, - "step": 2192 - }, - { - "epoch": 0.2636926591715265, - "flos": 13649241541440.0, - "grad_norm": 5.826083482819251, - "learning_rate": 3.4541135722888253e-06, - "loss": 1.0183, - "num_input_tokens_seen": 46405585, - "step": 2193 - }, - { - "epoch": 0.26381290206216557, - "flos": 20520141702360.0, - "grad_norm": 2.712911541214815, - "learning_rate": 3.453578637734854e-06, - "loss": 1.0315, - "num_input_tokens_seen": 46426495, - "step": 2194 - }, - { - "epoch": 0.2639331449528047, - "flos": 17791851001320.0, - "grad_norm": 3.5806990114734023, - "learning_rate": 3.4530434826741605e-06, - "loss": 1.0125, - "num_input_tokens_seen": 46447155, - "step": 2195 - }, - { - "epoch": 0.26405338784344373, - "flos": 33262838895480.0, - "grad_norm": 4.143112184162606, - "learning_rate": 3.452508107187926e-06, - "loss": 0.9132, - "num_input_tokens_seen": 46470250, - "step": 2196 - }, - { - "epoch": 0.26417363073408284, - "flos": 15039914689440.0, - "grad_norm": 2.6448274838122083, - "learning_rate": 3.451972511357366e-06, - "loss": 0.9859, - "num_input_tokens_seen": 46489515, - "step": 2197 - }, - { - "epoch": 0.26429387362472195, - "flos": 16114461900000.0, - "grad_norm": 2.2684840611315544, - "learning_rate": 3.45143669526373e-06, - "loss": 1.0902, - "num_input_tokens_seen": 46508995, - "step": 2198 - }, - { - "epoch": 0.264414116515361, - "flos": 48103930306800.0, - "grad_norm": 0.7537845256356747, - "learning_rate": 3.450900658988302e-06, - "loss": 0.8625, - "num_input_tokens_seen": 46570265, - "step": 2199 - }, - { - "epoch": 0.2645343594060001, - "flos": 18263035074600.0, - "grad_norm": 3.8321362713373865, - "learning_rate": 3.450364402612397e-06, - "loss": 0.9979, - "num_input_tokens_seen": 46587140, - "step": 2200 - }, - { - "epoch": 0.26465460229663923, - "flos": 15982926728760.0, - "grad_norm": 5.355372410637398, - "learning_rate": 3.449827926217366e-06, - "loss": 1.0525, - "num_input_tokens_seen": 46606295, - "step": 2201 - }, - { - "epoch": 0.2647748451872783, - "flos": 20939159365920.0, - "grad_norm": 4.687537434746355, - "learning_rate": 3.449291229884591e-06, - "loss": 1.0287, - "num_input_tokens_seen": 46627255, - "step": 2202 - }, - { - "epoch": 0.2648950880779174, - "flos": 19077639421800.0, - "grad_norm": 3.0062711655472434, - "learning_rate": 3.4487543136954887e-06, - "loss": 1.0846, - "num_input_tokens_seen": 46646595, - "step": 2203 - }, - { - "epoch": 0.2650153309685565, - "flos": 20546056268640.0, - "grad_norm": 2.7420550800748527, - "learning_rate": 3.448217177731509e-06, - "loss": 1.1333, - "num_input_tokens_seen": 46666800, - "step": 2204 - }, - { - "epoch": 0.26513557385919556, - "flos": 14410115739360.0, - "grad_norm": 3.42262503708341, - "learning_rate": 3.4476798220741348e-06, - "loss": 0.996, - "num_input_tokens_seen": 46685400, - "step": 2205 - }, - { - "epoch": 0.26525581674983467, - "flos": 12522159982440.0, - "grad_norm": 2.7945991505731254, - "learning_rate": 3.4471422468048826e-06, - "loss": 1.0091, - "num_input_tokens_seen": 46703845, - "step": 2206 - }, - { - "epoch": 0.2653760596404738, - "flos": 19103400680280.0, - "grad_norm": 4.640469429670878, - "learning_rate": 3.4466044520053022e-06, - "loss": 0.9601, - "num_input_tokens_seen": 46722570, - "step": 2207 - }, - { - "epoch": 0.26549630253111284, - "flos": 16191377736720.0, - "grad_norm": 2.6897307607921945, - "learning_rate": 3.446066437756977e-06, - "loss": 0.8298, - "num_input_tokens_seen": 46741495, - "step": 2208 - }, - { - "epoch": 0.26561654542175195, - "flos": 16743432987960.0, - "grad_norm": 3.0720255103740857, - "learning_rate": 3.4455282041415224e-06, - "loss": 0.9745, - "num_input_tokens_seen": 46760425, - "step": 2209 - }, - { - "epoch": 0.265736788312391, - "flos": 19155812382480.0, - "grad_norm": 5.052564879863927, - "learning_rate": 3.4449897512405894e-06, - "loss": 1.0977, - "num_input_tokens_seen": 46779295, - "step": 2210 - }, - { - "epoch": 0.2658570312030301, - "flos": 16691082608880.0, - "grad_norm": 4.796081278490056, - "learning_rate": 3.444451079135859e-06, - "loss": 0.9622, - "num_input_tokens_seen": 46798525, - "step": 2211 - }, - { - "epoch": 0.2659772740936692, - "flos": 15532751372160.0, - "grad_norm": 4.125936921961943, - "learning_rate": 3.4439121879090493e-06, - "loss": 0.9697, - "num_input_tokens_seen": 46816025, - "step": 2212 - }, - { - "epoch": 0.2660975169843083, - "flos": 14043571101120.0, - "grad_norm": 3.031763878749745, - "learning_rate": 3.4433730776419082e-06, - "loss": 1.0639, - "num_input_tokens_seen": 46834670, - "step": 2213 - }, - { - "epoch": 0.2662177598749474, - "flos": 20673360144600.0, - "grad_norm": 4.013297627396868, - "learning_rate": 3.4428337484162183e-06, - "loss": 1.0281, - "num_input_tokens_seen": 46855200, - "step": 2214 - }, - { - "epoch": 0.2663380027655865, - "flos": 15458871029880.0, - "grad_norm": 5.09420549661315, - "learning_rate": 3.442294200313797e-06, - "loss": 1.0714, - "num_input_tokens_seen": 46872950, - "step": 2215 - }, - { - "epoch": 0.26645824565622556, - "flos": 47962577888280.0, - "grad_norm": 0.8638641667857648, - "learning_rate": 3.4417544334164916e-06, - "loss": 0.813, - "num_input_tokens_seen": 46936815, - "step": 2216 - }, - { - "epoch": 0.26657848854686467, - "flos": 17975307289800.0, - "grad_norm": 2.6843608886916277, - "learning_rate": 3.4412144478061854e-06, - "loss": 0.9935, - "num_input_tokens_seen": 46958945, - "step": 2217 - }, - { - "epoch": 0.2666987314375038, - "flos": 16849022931360.0, - "grad_norm": 2.592489280862453, - "learning_rate": 3.4406742435647925e-06, - "loss": 0.9827, - "num_input_tokens_seen": 46978730, - "step": 2218 - }, - { - "epoch": 0.26681897432814283, - "flos": 19257569630880.0, - "grad_norm": 2.8082481458140442, - "learning_rate": 3.440133820774263e-06, - "loss": 1.0163, - "num_input_tokens_seen": 46998260, - "step": 2219 - }, - { - "epoch": 0.26693921721878194, - "flos": 20650082472480.0, - "grad_norm": 4.211750015824869, - "learning_rate": 3.439593179516578e-06, - "loss": 1.0302, - "num_input_tokens_seen": 47017890, - "step": 2220 - }, - { - "epoch": 0.26705946010942105, - "flos": 15275905326360.0, - "grad_norm": 2.789731840283603, - "learning_rate": 3.4390523198737524e-06, - "loss": 1.0377, - "num_input_tokens_seen": 47036770, - "step": 2221 - }, - { - "epoch": 0.2671797030000601, - "flos": 15249622821360.0, - "grad_norm": 2.2718293575020447, - "learning_rate": 3.4385112419278333e-06, - "loss": 0.9643, - "num_input_tokens_seen": 47057715, - "step": 2222 - }, - { - "epoch": 0.2672999458906992, - "flos": 45954805224120.0, - "grad_norm": 0.825951657356111, - "learning_rate": 3.4379699457609033e-06, - "loss": 0.923, - "num_input_tokens_seen": 47115260, - "step": 2223 - }, - { - "epoch": 0.26742018878133833, - "flos": 11970625977720.0, - "grad_norm": 6.101680974261923, - "learning_rate": 3.4374284314550755e-06, - "loss": 1.124, - "num_input_tokens_seen": 47134020, - "step": 2224 - }, - { - "epoch": 0.2675404316719774, - "flos": 14671867634760.0, - "grad_norm": 2.3230735714412614, - "learning_rate": 3.436886699092498e-06, - "loss": 1.0282, - "num_input_tokens_seen": 47152255, - "step": 2225 - }, - { - "epoch": 0.2676606745626165, - "flos": 12383511329280.0, - "grad_norm": 4.907942558928656, - "learning_rate": 3.4363447487553502e-06, - "loss": 0.9398, - "num_input_tokens_seen": 47165290, - "step": 2226 - }, - { - "epoch": 0.26778091745325555, - "flos": 19835324817480.0, - "grad_norm": 2.8736897675721753, - "learning_rate": 3.4358025805258455e-06, - "loss": 1.0083, - "num_input_tokens_seen": 47184715, - "step": 2227 - }, - { - "epoch": 0.26790116034389466, - "flos": 14878693580040.0, - "grad_norm": 2.416131771567798, - "learning_rate": 3.435260194486232e-06, - "loss": 1.0544, - "num_input_tokens_seen": 47202405, - "step": 2228 - }, - { - "epoch": 0.2680214032345338, - "flos": 12783973200960.0, - "grad_norm": 21.78992454708318, - "learning_rate": 3.4347175907187875e-06, - "loss": 1.0427, - "num_input_tokens_seen": 47219115, - "step": 2229 - }, - { - "epoch": 0.26814164612517283, - "flos": 15930974949960.0, - "grad_norm": 2.2723762683316133, - "learning_rate": 3.4341747693058254e-06, - "loss": 1.0977, - "num_input_tokens_seen": 47237310, - "step": 2230 - }, - { - "epoch": 0.26826188901581194, - "flos": 25423288083000.0, - "grad_norm": 3.107621048083439, - "learning_rate": 3.4336317303296916e-06, - "loss": 0.9994, - "num_input_tokens_seen": 47258005, - "step": 2231 - }, - { - "epoch": 0.26838213190645105, - "flos": 12495938800560.0, - "grad_norm": 3.0556262981047655, - "learning_rate": 3.4330884738727635e-06, - "loss": 0.9714, - "num_input_tokens_seen": 47275900, - "step": 2232 - }, - { - "epoch": 0.2685023747970901, - "flos": 16114523223120.0, - "grad_norm": 3.8893043084258707, - "learning_rate": 3.4325450000174535e-06, - "loss": 0.9421, - "num_input_tokens_seen": 47292260, - "step": 2233 - }, - { - "epoch": 0.2686226176877292, - "flos": 14278764537480.0, - "grad_norm": 3.4676422549814325, - "learning_rate": 3.4320013088462067e-06, - "loss": 0.9644, - "num_input_tokens_seen": 47309340, - "step": 2234 - }, - { - "epoch": 0.2687428605783683, - "flos": 15537565237080.0, - "grad_norm": 2.15848049953295, - "learning_rate": 3.431457400441499e-06, - "loss": 1.036, - "num_input_tokens_seen": 47329455, - "step": 2235 - }, - { - "epoch": 0.2688631034690074, - "flos": 50088124231320.0, - "grad_norm": 1.0178441964667013, - "learning_rate": 3.4309132748858424e-06, - "loss": 0.9141, - "num_input_tokens_seen": 47390165, - "step": 2236 - }, - { - "epoch": 0.2689833463596465, - "flos": 16245107886000.0, - "grad_norm": 2.3731258426290025, - "learning_rate": 3.430368932261779e-06, - "loss": 1.0683, - "num_input_tokens_seen": 47410240, - "step": 2237 - }, - { - "epoch": 0.2691035892502856, - "flos": 12179812863120.0, - "grad_norm": 2.759546615430802, - "learning_rate": 3.429824372651886e-06, - "loss": 0.9792, - "num_input_tokens_seen": 47428110, - "step": 2238 - }, - { - "epoch": 0.26922383214092466, - "flos": 12573099929760.0, - "grad_norm": 5.54680175732894, - "learning_rate": 3.4292795961387732e-06, - "loss": 1.0585, - "num_input_tokens_seen": 47445730, - "step": 2239 - }, - { - "epoch": 0.26934407503156377, - "flos": 11441112521160.0, - "grad_norm": 3.6256125360125706, - "learning_rate": 3.4287346028050818e-06, - "loss": 1.1016, - "num_input_tokens_seen": 47461520, - "step": 2240 - }, - { - "epoch": 0.2694643179222028, - "flos": 16874937497640.0, - "grad_norm": 3.3289298217196723, - "learning_rate": 3.4281893927334866e-06, - "loss": 1.0256, - "num_input_tokens_seen": 47481150, - "step": 2241 - }, - { - "epoch": 0.26958456081284193, - "flos": 17583032054640.0, - "grad_norm": 2.567722116759616, - "learning_rate": 3.4276439660066963e-06, - "loss": 0.9756, - "num_input_tokens_seen": 47500570, - "step": 2242 - }, - { - "epoch": 0.26970480370348104, - "flos": 12835526379480.0, - "grad_norm": 6.934482227405809, - "learning_rate": 3.427098322707452e-06, - "loss": 1.0671, - "num_input_tokens_seen": 47516255, - "step": 2243 - }, - { - "epoch": 0.2698250465941201, - "flos": 7592148527160.0, - "grad_norm": 3.649733384728541, - "learning_rate": 3.426552462918526e-06, - "loss": 1.1099, - "num_input_tokens_seen": 47533910, - "step": 2244 - }, - { - "epoch": 0.2699452894847592, - "flos": 12257127300120.0, - "grad_norm": 3.7852400228168452, - "learning_rate": 3.426006386722726e-06, - "loss": 0.9667, - "num_input_tokens_seen": 47551690, - "step": 2245 - }, - { - "epoch": 0.2700655323753983, - "flos": 12811052906520.0, - "grad_norm": 4.186341082634841, - "learning_rate": 3.4254600942028914e-06, - "loss": 1.1471, - "num_input_tokens_seen": 47569285, - "step": 2246 - }, - { - "epoch": 0.2701857752660374, - "flos": 12888888590040.0, - "grad_norm": 4.0293245670552285, - "learning_rate": 3.424913585441893e-06, - "loss": 1.0392, - "num_input_tokens_seen": 47586840, - "step": 2247 - }, - { - "epoch": 0.2703060181566765, - "flos": 11546150556480.0, - "grad_norm": 4.669961901229554, - "learning_rate": 3.4243668605226374e-06, - "loss": 1.1011, - "num_input_tokens_seen": 47603585, - "step": 2248 - }, - { - "epoch": 0.2704262610473156, - "flos": 13884680270280.0, - "grad_norm": 3.61190509055522, - "learning_rate": 3.423819919528061e-06, - "loss": 1.0572, - "num_input_tokens_seen": 47621390, - "step": 2249 - }, - { - "epoch": 0.27054650393795465, - "flos": 14724493967880.0, - "grad_norm": 3.1650256539460653, - "learning_rate": 3.4232727625411355e-06, - "loss": 1.0098, - "num_input_tokens_seen": 47640215, - "step": 2250 - }, - { - "epoch": 0.27066674682859376, - "flos": 13227525660600.0, - "grad_norm": 3.154916160424724, - "learning_rate": 3.4227253896448626e-06, - "loss": 1.0855, - "num_input_tokens_seen": 47657795, - "step": 2251 - }, - { - "epoch": 0.2707869897192329, - "flos": 16350145921320.0, - "grad_norm": 3.040114978071556, - "learning_rate": 3.42217780092228e-06, - "loss": 1.0271, - "num_input_tokens_seen": 47675855, - "step": 2252 - }, - { - "epoch": 0.27090723260987193, - "flos": 41739541884360.0, - "grad_norm": 0.838761879258378, - "learning_rate": 3.421629996456456e-06, - "loss": 0.8673, - "num_input_tokens_seen": 47734195, - "step": 2253 - }, - { - "epoch": 0.27102747550051104, - "flos": 8431624947600.0, - "grad_norm": 3.695142165405628, - "learning_rate": 3.421081976330491e-06, - "loss": 1.0459, - "num_input_tokens_seen": 47752430, - "step": 2254 - }, - { - "epoch": 0.27114771839115015, - "flos": 14120394953160.0, - "grad_norm": 3.938108124774762, - "learning_rate": 3.4205337406275207e-06, - "loss": 1.098, - "num_input_tokens_seen": 47772270, - "step": 2255 - }, - { - "epoch": 0.2712679612817892, - "flos": 12993252071040.0, - "grad_norm": 5.225920542250525, - "learning_rate": 3.4199852894307114e-06, - "loss": 0.9782, - "num_input_tokens_seen": 47788740, - "step": 2256 - }, - { - "epoch": 0.2713882041724283, - "flos": 17397398795400.0, - "grad_norm": 3.169967002335898, - "learning_rate": 3.419436622823262e-06, - "loss": 1.0118, - "num_input_tokens_seen": 47809180, - "step": 2257 - }, - { - "epoch": 0.27150844706306737, - "flos": 16376428426320.0, - "grad_norm": 4.178826073979077, - "learning_rate": 3.4188877408884063e-06, - "loss": 0.9711, - "num_input_tokens_seen": 47829605, - "step": 2258 - }, - { - "epoch": 0.2716286899537065, - "flos": 16034387922600.0, - "grad_norm": 7.896302973352318, - "learning_rate": 3.4183386437094088e-06, - "loss": 0.8852, - "num_input_tokens_seen": 47845990, - "step": 2259 - }, - { - "epoch": 0.2717489328443456, - "flos": 9244021662480.0, - "grad_norm": 4.278206739069733, - "learning_rate": 3.417789331369565e-06, - "loss": 1.0359, - "num_input_tokens_seen": 47861500, - "step": 2260 - }, - { - "epoch": 0.27186917573498465, - "flos": 20860587804960.0, - "grad_norm": 3.087506400036067, - "learning_rate": 3.4172398039522088e-06, - "loss": 1.1307, - "num_input_tokens_seen": 47882505, - "step": 2261 - }, - { - "epoch": 0.27198941862562376, - "flos": 18527515848840.0, - "grad_norm": 2.3811704887672214, - "learning_rate": 3.4166900615407e-06, - "loss": 1.0241, - "num_input_tokens_seen": 47900140, - "step": 2262 - }, - { - "epoch": 0.27210966151626287, - "flos": 23377575972960.0, - "grad_norm": 4.697329578241874, - "learning_rate": 3.416140104218436e-06, - "loss": 0.9652, - "num_input_tokens_seen": 47919225, - "step": 2263 - }, - { - "epoch": 0.2722299044069019, - "flos": 46876072669320.0, - "grad_norm": 0.8504736001673444, - "learning_rate": 3.4155899320688437e-06, - "loss": 0.9738, - "num_input_tokens_seen": 47985020, - "step": 2264 - }, - { - "epoch": 0.27235014729754103, - "flos": 10837964014800.0, - "grad_norm": 2.9171391422327604, - "learning_rate": 3.415039545175384e-06, - "loss": 0.9694, - "num_input_tokens_seen": 48000465, - "step": 2265 - }, - { - "epoch": 0.27247039018818014, - "flos": 15537841191120.0, - "grad_norm": 4.880568625043578, - "learning_rate": 3.414488943621551e-06, - "loss": 0.8792, - "num_input_tokens_seen": 48018850, - "step": 2266 - }, - { - "epoch": 0.2725906330788192, - "flos": 13254329412120.0, - "grad_norm": 2.8620262986740284, - "learning_rate": 3.41393812749087e-06, - "loss": 0.9672, - "num_input_tokens_seen": 48036615, - "step": 2267 - }, - { - "epoch": 0.2727108759694583, - "flos": 12671729699040.0, - "grad_norm": 6.220214671266629, - "learning_rate": 3.4133870968668984e-06, - "loss": 0.9564, - "num_input_tokens_seen": 48051135, - "step": 2268 - }, - { - "epoch": 0.2728311188600974, - "flos": 17398563934680.0, - "grad_norm": 3.0344674250535197, - "learning_rate": 3.412835851833229e-06, - "loss": 1.0001, - "num_input_tokens_seen": 48073050, - "step": 2269 - }, - { - "epoch": 0.2729513617507365, - "flos": 22092830045520.0, - "grad_norm": 4.383377773122492, - "learning_rate": 3.4122843924734834e-06, - "loss": 1.0005, - "num_input_tokens_seen": 48095070, - "step": 2270 - }, - { - "epoch": 0.2730716046413756, - "flos": 13540892057640.0, - "grad_norm": 3.2526947000740245, - "learning_rate": 3.411732718871319e-06, - "loss": 1.1083, - "num_input_tokens_seen": 48110630, - "step": 2271 - }, - { - "epoch": 0.27319184753201464, - "flos": 19183229365200.0, - "grad_norm": 2.0750803047175332, - "learning_rate": 3.4111808311104227e-06, - "loss": 1.0114, - "num_input_tokens_seen": 48132665, - "step": 2272 - }, - { - "epoch": 0.27331209042265375, - "flos": 22643750819040.0, - "grad_norm": 2.5158391825795503, - "learning_rate": 3.410628729274517e-06, - "loss": 0.9198, - "num_input_tokens_seen": 48153905, - "step": 2273 - }, - { - "epoch": 0.27343233331329286, - "flos": 18316673239200.0, - "grad_norm": 3.9427616144340245, - "learning_rate": 3.4100764134473546e-06, - "loss": 1.0496, - "num_input_tokens_seen": 48172910, - "step": 2274 - }, - { - "epoch": 0.2735525762039319, - "flos": 17346734802120.0, - "grad_norm": 3.670993780352697, - "learning_rate": 3.4095238837127215e-06, - "loss": 1.0693, - "num_input_tokens_seen": 48191770, - "step": 2275 - }, - { - "epoch": 0.27367281909457103, - "flos": 10214512007640.0, - "grad_norm": 2.884320286059307, - "learning_rate": 3.4089711401544355e-06, - "loss": 1.0144, - "num_input_tokens_seen": 48209085, - "step": 2276 - }, - { - "epoch": 0.27379306198521014, - "flos": 16690959962640.0, - "grad_norm": 3.8131683264113683, - "learning_rate": 3.4084181828563486e-06, - "loss": 0.8944, - "num_input_tokens_seen": 48225525, - "step": 2277 - }, - { - "epoch": 0.2739133048758492, - "flos": 12365507445480.0, - "grad_norm": 2.1413433790806575, - "learning_rate": 3.4078650119023428e-06, - "loss": 0.9298, - "num_input_tokens_seen": 48243560, - "step": 2278 - }, - { - "epoch": 0.2740335477664883, - "flos": 13668625195440.0, - "grad_norm": 4.060974069264091, - "learning_rate": 3.4073116273763337e-06, - "loss": 0.9762, - "num_input_tokens_seen": 48257725, - "step": 2279 - }, - { - "epoch": 0.2741537906571274, - "flos": 18579559612320.0, - "grad_norm": 2.9335950503492456, - "learning_rate": 3.40675802936227e-06, - "loss": 1.0348, - "num_input_tokens_seen": 48278230, - "step": 2280 - }, - { - "epoch": 0.27427403354776647, - "flos": 24371221344000.0, - "grad_norm": 3.8982757962925048, - "learning_rate": 3.4062042179441318e-06, - "loss": 0.9415, - "num_input_tokens_seen": 48298420, - "step": 2281 - }, - { - "epoch": 0.2743942764384056, - "flos": 13305514651920.0, - "grad_norm": 5.6316675300217405, - "learning_rate": 3.4056501932059314e-06, - "loss": 1.0376, - "num_input_tokens_seen": 48316215, - "step": 2282 - }, - { - "epoch": 0.2745145193290447, - "flos": 46465921744680.0, - "grad_norm": 0.8316223142515454, - "learning_rate": 3.405095955231715e-06, - "loss": 0.8462, - "num_input_tokens_seen": 48367590, - "step": 2283 - }, - { - "epoch": 0.27463476221968375, - "flos": 11414922000840.0, - "grad_norm": 8.58531990641629, - "learning_rate": 3.4045415041055585e-06, - "loss": 1.1648, - "num_input_tokens_seen": 48382950, - "step": 2284 - }, - { - "epoch": 0.27475500511032286, - "flos": 7275777297240.0, - "grad_norm": 4.47583449270948, - "learning_rate": 3.4039868399115728e-06, - "loss": 1.0033, - "num_input_tokens_seen": 48397310, - "step": 2285 - }, - { - "epoch": 0.27487524800096197, - "flos": 12259641548040.0, - "grad_norm": 2.5831688756790756, - "learning_rate": 3.4034319627339003e-06, - "loss": 1.0217, - "num_input_tokens_seen": 48413895, - "step": 2286 - }, - { - "epoch": 0.274995490891601, - "flos": 19308540239760.0, - "grad_norm": 7.590557416267163, - "learning_rate": 3.402876872656715e-06, - "loss": 0.9256, - "num_input_tokens_seen": 48431935, - "step": 2287 - }, - { - "epoch": 0.27511573378224013, - "flos": 16660967408880.0, - "grad_norm": 3.878675993724461, - "learning_rate": 3.402321569764223e-06, - "loss": 1.1152, - "num_input_tokens_seen": 48450960, - "step": 2288 - }, - { - "epoch": 0.2752359766728792, - "flos": 11835595388640.0, - "grad_norm": 2.5159628504995846, - "learning_rate": 3.4017660541406635e-06, - "loss": 1.055, - "num_input_tokens_seen": 48466745, - "step": 2289 - }, - { - "epoch": 0.2753562195635183, - "flos": 17999014223760.0, - "grad_norm": 2.647267130839637, - "learning_rate": 3.4012103258703092e-06, - "loss": 0.9723, - "num_input_tokens_seen": 48485220, - "step": 2290 - }, - { - "epoch": 0.2754764624541574, - "flos": 19465744684800.0, - "grad_norm": 2.142979795487962, - "learning_rate": 3.4006543850374616e-06, - "loss": 1.0629, - "num_input_tokens_seen": 48499990, - "step": 2291 - }, - { - "epoch": 0.27559670534479647, - "flos": 12207444476760.0, - "grad_norm": 3.3094583617923794, - "learning_rate": 3.400098231726458e-06, - "loss": 0.9861, - "num_input_tokens_seen": 48516810, - "step": 2292 - }, - { - "epoch": 0.2757169482354356, - "flos": 15584948443440.0, - "grad_norm": 2.6637385107013705, - "learning_rate": 3.3995418660216657e-06, - "loss": 1.104, - "num_input_tokens_seen": 48533985, - "step": 2293 - }, - { - "epoch": 0.2758371911260747, - "flos": 14803556113800.0, - "grad_norm": 2.486336084009993, - "learning_rate": 3.3989852880074848e-06, - "loss": 1.0364, - "num_input_tokens_seen": 48555135, - "step": 2294 - }, - { - "epoch": 0.27595743401671374, - "flos": 49606111079280.0, - "grad_norm": 0.7804227645508158, - "learning_rate": 3.398428497768348e-06, - "loss": 0.8742, - "num_input_tokens_seen": 48620025, - "step": 2295 - }, - { - "epoch": 0.27607767690735285, - "flos": 15065338670760.0, - "grad_norm": 3.1304517874509243, - "learning_rate": 3.3978714953887205e-06, - "loss": 0.9383, - "num_input_tokens_seen": 48639500, - "step": 2296 - }, - { - "epoch": 0.27619791979799196, - "flos": 17659763922000.0, - "grad_norm": 2.3206102584752837, - "learning_rate": 3.397314280953098e-06, - "loss": 1.0894, - "num_input_tokens_seen": 48660045, - "step": 2297 - }, - { - "epoch": 0.276318162688631, - "flos": 17608333389720.0, - "grad_norm": 3.042551269099283, - "learning_rate": 3.3967568545460108e-06, - "loss": 1.0263, - "num_input_tokens_seen": 48679305, - "step": 2298 - }, - { - "epoch": 0.27643840557927013, - "flos": 12862544761920.0, - "grad_norm": 3.3290942576096887, - "learning_rate": 3.3961992162520185e-06, - "loss": 1.023, - "num_input_tokens_seen": 48697650, - "step": 2299 - }, - { - "epoch": 0.27655864846990924, - "flos": 17658568121160.0, - "grad_norm": 4.8181727280048445, - "learning_rate": 3.3956413661557156e-06, - "loss": 0.9453, - "num_input_tokens_seen": 48717545, - "step": 2300 - }, - { - "epoch": 0.2766788913605483, - "flos": 14383741249680.0, - "grad_norm": 2.936013840052859, - "learning_rate": 3.3950833043417273e-06, - "loss": 0.8836, - "num_input_tokens_seen": 48735410, - "step": 2301 - }, - { - "epoch": 0.2767991342511874, - "flos": 15248764297680.0, - "grad_norm": 3.3129194461921356, - "learning_rate": 3.3945250308947105e-06, - "loss": 0.9522, - "num_input_tokens_seen": 48751435, - "step": 2302 - }, - { - "epoch": 0.2769193771418265, - "flos": 47255991295800.0, - "grad_norm": 1.250456011563203, - "learning_rate": 3.3939665458993556e-06, - "loss": 0.9341, - "num_input_tokens_seen": 48805575, - "step": 2303 - }, - { - "epoch": 0.27703962003246557, - "flos": 14698426093800.0, - "grad_norm": 7.6405738251577375, - "learning_rate": 3.3934078494403843e-06, - "loss": 0.9813, - "num_input_tokens_seen": 48824870, - "step": 2304 - }, - { - "epoch": 0.2771598629231047, - "flos": 16297795542240.0, - "grad_norm": 2.7852317510913878, - "learning_rate": 3.3928489416025495e-06, - "loss": 1.0341, - "num_input_tokens_seen": 48845435, - "step": 2305 - }, - { - "epoch": 0.27728010581374374, - "flos": 13020331776600.0, - "grad_norm": 4.23271253544519, - "learning_rate": 3.392289822470638e-06, - "loss": 1.0072, - "num_input_tokens_seen": 48863135, - "step": 2306 - }, - { - "epoch": 0.27740034870438285, - "flos": 13780838035800.0, - "grad_norm": 3.250460958802943, - "learning_rate": 3.3917304921294674e-06, - "loss": 0.9793, - "num_input_tokens_seen": 48881020, - "step": 2307 - }, - { - "epoch": 0.27752059159502196, - "flos": 15352422562800.0, - "grad_norm": 2.036908996616699, - "learning_rate": 3.3911709506638876e-06, - "loss": 1.0255, - "num_input_tokens_seen": 48900050, - "step": 2308 - }, - { - "epoch": 0.277640834485661, - "flos": 18940247892600.0, - "grad_norm": 3.159402160829637, - "learning_rate": 3.390611198158781e-06, - "loss": 1.0365, - "num_input_tokens_seen": 48917645, - "step": 2309 - }, - { - "epoch": 0.2777610773763001, - "flos": 13826626841040.0, - "grad_norm": 2.7436307494506353, - "learning_rate": 3.3900512346990612e-06, - "loss": 1.1251, - "num_input_tokens_seen": 48933355, - "step": 2310 - }, - { - "epoch": 0.27788132026693924, - "flos": 27337311714000.0, - "grad_norm": 2.420672407289319, - "learning_rate": 3.389491060369674e-06, - "loss": 0.8755, - "num_input_tokens_seen": 48958750, - "step": 2311 - }, - { - "epoch": 0.2780015631575783, - "flos": 15904416490920.0, - "grad_norm": 4.6144311103369935, - "learning_rate": 3.388930675255598e-06, - "loss": 1.1095, - "num_input_tokens_seen": 48978320, - "step": 2312 - }, - { - "epoch": 0.2781218060482174, - "flos": 8588737407960.0, - "grad_norm": 3.6114923781264756, - "learning_rate": 3.388370079441843e-06, - "loss": 0.9963, - "num_input_tokens_seen": 48993555, - "step": 2313 - }, - { - "epoch": 0.2782420489388565, - "flos": 12831295084200.0, - "grad_norm": 2.5516219571924754, - "learning_rate": 3.3878092730134505e-06, - "loss": 1.1621, - "num_input_tokens_seen": 49011260, - "step": 2314 - }, - { - "epoch": 0.27836229182949557, - "flos": 13124664596040.0, - "grad_norm": 2.1422469405616393, - "learning_rate": 3.3872482560554947e-06, - "loss": 1.0289, - "num_input_tokens_seen": 49029755, - "step": 2315 - }, - { - "epoch": 0.2784825347201347, - "flos": 48032226935280.0, - "grad_norm": 0.8121410824235912, - "learning_rate": 3.386687028653082e-06, - "loss": 0.8257, - "num_input_tokens_seen": 49092320, - "step": 2316 - }, - { - "epoch": 0.2786027776107738, - "flos": 16082139067680.0, - "grad_norm": 3.1421386714956108, - "learning_rate": 3.386125590891349e-06, - "loss": 1.0732, - "num_input_tokens_seen": 49108915, - "step": 2317 - }, - { - "epoch": 0.27872302050141284, - "flos": 11158474555320.0, - "grad_norm": 2.6588978752905055, - "learning_rate": 3.3855639428554657e-06, - "loss": 1.0473, - "num_input_tokens_seen": 49126165, - "step": 2318 - }, - { - "epoch": 0.27884326339205195, - "flos": 15720714909960.0, - "grad_norm": 2.7204642317610985, - "learning_rate": 3.385002084630635e-06, - "loss": 1.0357, - "num_input_tokens_seen": 49144855, - "step": 2319 - }, - { - "epoch": 0.278963506282691, - "flos": 14593725335640.0, - "grad_norm": 2.8787740731402613, - "learning_rate": 3.384440016302088e-06, - "loss": 1.0667, - "num_input_tokens_seen": 49163250, - "step": 2320 - }, - { - "epoch": 0.2790837491733301, - "flos": 15588229230360.0, - "grad_norm": 3.165652782020782, - "learning_rate": 3.3838777379550923e-06, - "loss": 0.8406, - "num_input_tokens_seen": 49182415, - "step": 2321 - }, - { - "epoch": 0.27920399206396923, - "flos": 18710358906120.0, - "grad_norm": 2.605680397050545, - "learning_rate": 3.383315249674944e-06, - "loss": 1.0262, - "num_input_tokens_seen": 49200700, - "step": 2322 - }, - { - "epoch": 0.2793242349546083, - "flos": 18072373319520.0, - "grad_norm": 3.5691895169181227, - "learning_rate": 3.3827525515469715e-06, - "loss": 1.0936, - "num_input_tokens_seen": 49215325, - "step": 2323 - }, - { - "epoch": 0.2794444778452474, - "flos": 14803280159760.0, - "grad_norm": 2.6349063323888093, - "learning_rate": 3.3821896436565367e-06, - "loss": 0.9326, - "num_input_tokens_seen": 49234705, - "step": 2324 - }, - { - "epoch": 0.2795647207358865, - "flos": 15324453672000.0, - "grad_norm": 2.8732908392977525, - "learning_rate": 3.381626526089032e-06, - "loss": 0.9319, - "num_input_tokens_seen": 49253990, - "step": 2325 - }, - { - "epoch": 0.27968496362652556, - "flos": 15250021421640.0, - "grad_norm": 2.7473316251414204, - "learning_rate": 3.3810631989298815e-06, - "loss": 1.0175, - "num_input_tokens_seen": 49273320, - "step": 2326 - }, - { - "epoch": 0.2798052065171647, - "flos": 16534154117880.0, - "grad_norm": 5.123511679793792, - "learning_rate": 3.3804996622645423e-06, - "loss": 1.0764, - "num_input_tokens_seen": 49291040, - "step": 2327 - }, - { - "epoch": 0.2799254494078038, - "flos": 15297864551400.0, - "grad_norm": 2.6080269443664696, - "learning_rate": 3.3799359161785015e-06, - "loss": 1.1138, - "num_input_tokens_seen": 49310410, - "step": 2328 - }, - { - "epoch": 0.28004569229844284, - "flos": 18787029450360.0, - "grad_norm": 1.739583005924156, - "learning_rate": 3.3793719607572798e-06, - "loss": 1.0809, - "num_input_tokens_seen": 49331095, - "step": 2329 - }, - { - "epoch": 0.28016593518908195, - "flos": 23928650054280.0, - "grad_norm": 2.6291866521383045, - "learning_rate": 3.378807796086428e-06, - "loss": 0.9969, - "num_input_tokens_seen": 49353675, - "step": 2330 - }, - { - "epoch": 0.28028617807972106, - "flos": 10843667064960.0, - "grad_norm": 3.1054785770284767, - "learning_rate": 3.37824342225153e-06, - "loss": 0.9893, - "num_input_tokens_seen": 49369815, - "step": 2331 - }, - { - "epoch": 0.2804064209703601, - "flos": 18159438132600.0, - "grad_norm": 2.4835856401251672, - "learning_rate": 3.3776788393382006e-06, - "loss": 0.9952, - "num_input_tokens_seen": 49389015, - "step": 2332 - }, - { - "epoch": 0.2805266638609992, - "flos": 20913122153400.0, - "grad_norm": 2.942949475335906, - "learning_rate": 3.3771140474320872e-06, - "loss": 0.9921, - "num_input_tokens_seen": 49408685, - "step": 2333 - }, - { - "epoch": 0.28064690675163834, - "flos": 15243183893760.0, - "grad_norm": 2.620468770089086, - "learning_rate": 3.3765490466188664e-06, - "loss": 1.011, - "num_input_tokens_seen": 49425805, - "step": 2334 - }, - { - "epoch": 0.2807671496422774, - "flos": 14907612979200.0, - "grad_norm": 3.5181243828173034, - "learning_rate": 3.3759838369842508e-06, - "loss": 0.9483, - "num_input_tokens_seen": 49443600, - "step": 2335 - }, - { - "epoch": 0.2808873925329165, - "flos": 15274832171760.0, - "grad_norm": 2.396688911147295, - "learning_rate": 3.375418418613981e-06, - "loss": 0.9568, - "num_input_tokens_seen": 49462345, - "step": 2336 - }, - { - "epoch": 0.28100763542355556, - "flos": 11368335995040.0, - "grad_norm": 3.14752350596164, - "learning_rate": 3.374852791593831e-06, - "loss": 1.0631, - "num_input_tokens_seen": 49478265, - "step": 2337 - }, - { - "epoch": 0.28112787831419467, - "flos": 13518472909200.0, - "grad_norm": 6.507657455667341, - "learning_rate": 3.374286956009605e-06, - "loss": 0.7467, - "num_input_tokens_seen": 49496550, - "step": 2338 - }, - { - "epoch": 0.2812481212048338, - "flos": 9034650807720.0, - "grad_norm": 3.167206433789607, - "learning_rate": 3.3737209119471405e-06, - "loss": 0.9882, - "num_input_tokens_seen": 49512780, - "step": 2339 - }, - { - "epoch": 0.28136836409547283, - "flos": 11053743135600.0, - "grad_norm": 14.638634219639695, - "learning_rate": 3.373154659492306e-06, - "loss": 0.8779, - "num_input_tokens_seen": 49530640, - "step": 2340 - }, - { - "epoch": 0.28148860698611194, - "flos": 14143917917760.0, - "grad_norm": 2.34691457092815, - "learning_rate": 3.3725881987310016e-06, - "loss": 1.0723, - "num_input_tokens_seen": 49547895, - "step": 2341 - }, - { - "epoch": 0.28160884987675106, - "flos": 12364710244920.0, - "grad_norm": 2.3643229561063532, - "learning_rate": 3.372021529749159e-06, - "loss": 1.0967, - "num_input_tokens_seen": 49566675, - "step": 2342 - }, - { - "epoch": 0.2817290927673901, - "flos": 11917049136240.0, - "grad_norm": 3.1558595925779724, - "learning_rate": 3.3714546526327405e-06, - "loss": 1.1484, - "num_input_tokens_seen": 49584395, - "step": 2343 - }, - { - "epoch": 0.2818493356580292, - "flos": 10896354721200.0, - "grad_norm": 2.596403579981429, - "learning_rate": 3.3708875674677423e-06, - "loss": 1.095, - "num_input_tokens_seen": 49602090, - "step": 2344 - }, - { - "epoch": 0.28196957854866833, - "flos": 14488564654080.0, - "grad_norm": 5.5274917852305325, - "learning_rate": 3.37032027434019e-06, - "loss": 1.0515, - "num_input_tokens_seen": 49621330, - "step": 2345 - }, - { - "epoch": 0.2820898214393074, - "flos": 14172622686000.0, - "grad_norm": 3.190879526176714, - "learning_rate": 3.369752773336141e-06, - "loss": 1.0609, - "num_input_tokens_seen": 49640530, - "step": 2346 - }, - { - "epoch": 0.2822100643299465, - "flos": 16009147910640.0, - "grad_norm": 2.204722481388573, - "learning_rate": 3.3691850645416864e-06, - "loss": 1.0019, - "num_input_tokens_seen": 49659960, - "step": 2347 - }, - { - "epoch": 0.2823303072205856, - "flos": 8116940103480.0, - "grad_norm": 2.4653518080602823, - "learning_rate": 3.368617148042945e-06, - "loss": 1.058, - "num_input_tokens_seen": 49677350, - "step": 2348 - }, - { - "epoch": 0.28245055011122466, - "flos": 12941576246280.0, - "grad_norm": 2.5965300219537664, - "learning_rate": 3.368049023926071e-06, - "loss": 1.0743, - "num_input_tokens_seen": 49696065, - "step": 2349 - }, - { - "epoch": 0.2825707930018638, - "flos": 17503877924040.0, - "grad_norm": 1.8265539738543386, - "learning_rate": 3.3674806922772476e-06, - "loss": 1.0617, - "num_input_tokens_seen": 49716670, - "step": 2350 - }, - { - "epoch": 0.28269103589250283, - "flos": 17948472876720.0, - "grad_norm": 2.887926316269708, - "learning_rate": 3.3669121531826904e-06, - "loss": 0.9675, - "num_input_tokens_seen": 49737370, - "step": 2351 - }, - { - "epoch": 0.28281127878314194, - "flos": 13675830662040.0, - "grad_norm": 2.6716083594337494, - "learning_rate": 3.366343406728647e-06, - "loss": 1.0554, - "num_input_tokens_seen": 49756540, - "step": 2352 - }, - { - "epoch": 0.28293152167378105, - "flos": 16979454286440.0, - "grad_norm": 2.9007000176942404, - "learning_rate": 3.3657744530013946e-06, - "loss": 0.9133, - "num_input_tokens_seen": 49775495, - "step": 2353 - }, - { - "epoch": 0.2830517645644201, - "flos": 31344093384240.0, - "grad_norm": 2.5713578623177904, - "learning_rate": 3.3652052920872437e-06, - "loss": 0.94, - "num_input_tokens_seen": 49798080, - "step": 2354 - }, - { - "epoch": 0.2831720074550592, - "flos": 18972662709600.0, - "grad_norm": 2.7236234460275033, - "learning_rate": 3.3646359240725355e-06, - "loss": 1.0811, - "num_input_tokens_seen": 49816990, - "step": 2355 - }, - { - "epoch": 0.2832922503456983, - "flos": 22119204535200.0, - "grad_norm": 2.8120556798281675, - "learning_rate": 3.364066349043643e-06, - "loss": 0.9075, - "num_input_tokens_seen": 49837915, - "step": 2356 - }, - { - "epoch": 0.2834124932363374, - "flos": 14482800280800.0, - "grad_norm": 2.8093035017341856, - "learning_rate": 3.363496567086969e-06, - "loss": 1.0546, - "num_input_tokens_seen": 49854730, - "step": 2357 - }, - { - "epoch": 0.2835327361269765, - "flos": 28123640554800.0, - "grad_norm": 3.018248686619004, - "learning_rate": 3.3629265782889506e-06, - "loss": 0.9797, - "num_input_tokens_seen": 49876275, - "step": 2358 - }, - { - "epoch": 0.2836529790176156, - "flos": 21566689360560.0, - "grad_norm": 3.100313718365081, - "learning_rate": 3.362356382736054e-06, - "loss": 0.9286, - "num_input_tokens_seen": 49896600, - "step": 2359 - }, - { - "epoch": 0.28377322190825466, - "flos": 8929858064880.0, - "grad_norm": 3.190430434792918, - "learning_rate": 3.361785980514777e-06, - "loss": 1.1414, - "num_input_tokens_seen": 49912520, - "step": 2360 - }, - { - "epoch": 0.28389346479889377, - "flos": 12968042720640.0, - "grad_norm": 3.134892636396953, - "learning_rate": 3.361215371711649e-06, - "loss": 0.9945, - "num_input_tokens_seen": 49931335, - "step": 2361 - }, - { - "epoch": 0.2840137076895329, - "flos": 14483904096960.0, - "grad_norm": 2.2319256355312342, - "learning_rate": 3.3606445564132326e-06, - "loss": 1.0565, - "num_input_tokens_seen": 49948350, - "step": 2362 - }, - { - "epoch": 0.28413395058017193, - "flos": 14226720774000.0, - "grad_norm": 2.4850659639134776, - "learning_rate": 3.360073534706118e-06, - "loss": 1.0487, - "num_input_tokens_seen": 49965225, - "step": 2363 - }, - { - "epoch": 0.28425419347081105, - "flos": 26885756587200.0, - "grad_norm": 3.627532074653058, - "learning_rate": 3.35950230667693e-06, - "loss": 0.9885, - "num_input_tokens_seen": 49986640, - "step": 2364 - }, - { - "epoch": 0.28437443636145016, - "flos": 9768721254120.0, - "grad_norm": 3.289937462487711, - "learning_rate": 3.358930872412323e-06, - "loss": 1.0838, - "num_input_tokens_seen": 50003525, - "step": 2365 - }, - { - "epoch": 0.2844946792520892, - "flos": 16166965586880.0, - "grad_norm": 2.0979445843573714, - "learning_rate": 3.3583592319989825e-06, - "loss": 1.0389, - "num_input_tokens_seen": 50022615, - "step": 2366 - }, - { - "epoch": 0.2846149221427283, - "flos": 23115241507920.0, - "grad_norm": 6.455056850380978, - "learning_rate": 3.357787385523627e-06, - "loss": 0.9098, - "num_input_tokens_seen": 50043740, - "step": 2367 - }, - { - "epoch": 0.2847351650333674, - "flos": 20283691142040.0, - "grad_norm": 3.9066330378607206, - "learning_rate": 3.3572153330730048e-06, - "loss": 1.0638, - "num_input_tokens_seen": 50064555, - "step": 2368 - }, - { - "epoch": 0.2848554079240065, - "flos": 39890506743240.0, - "grad_norm": 0.8097450058663231, - "learning_rate": 3.3566430747338956e-06, - "loss": 0.8998, - "num_input_tokens_seen": 50119480, - "step": 2369 - }, - { - "epoch": 0.2849756508146456, - "flos": 8325268465200.0, - "grad_norm": 3.6736012398375437, - "learning_rate": 3.35607061059311e-06, - "loss": 1.0947, - "num_input_tokens_seen": 50134130, - "step": 2370 - }, - { - "epoch": 0.28509589370528465, - "flos": 17897042344440.0, - "grad_norm": 2.408935435530839, - "learning_rate": 3.3554979407374917e-06, - "loss": 0.9704, - "num_input_tokens_seen": 50155960, - "step": 2371 - }, - { - "epoch": 0.28521613659592376, - "flos": 14174063779320.0, - "grad_norm": 2.6629841431486927, - "learning_rate": 3.3549250652539134e-06, - "loss": 0.9674, - "num_input_tokens_seen": 50174775, - "step": 2372 - }, - { - "epoch": 0.2853363794865629, - "flos": 16612664355720.0, - "grad_norm": 3.53542128775989, - "learning_rate": 3.3543519842292794e-06, - "loss": 1.0476, - "num_input_tokens_seen": 50194150, - "step": 2373 - }, - { - "epoch": 0.28545662237720193, - "flos": 14092487385480.0, - "grad_norm": 2.4414352537339687, - "learning_rate": 3.353778697750527e-06, - "loss": 1.0661, - "num_input_tokens_seen": 50212275, - "step": 2374 - }, - { - "epoch": 0.28557686526784104, - "flos": 16979975532960.0, - "grad_norm": 2.3496452956652614, - "learning_rate": 3.353205205904622e-06, - "loss": 1.1135, - "num_input_tokens_seen": 50231105, - "step": 2375 - }, - { - "epoch": 0.28569710815848015, - "flos": 32079850216440.0, - "grad_norm": 3.1552247752360185, - "learning_rate": 3.3526315087785637e-06, - "loss": 0.953, - "num_input_tokens_seen": 50251940, - "step": 2376 - }, - { - "epoch": 0.2858173510491192, - "flos": 19099997247120.0, - "grad_norm": 3.4699509749673347, - "learning_rate": 3.3520576064593805e-06, - "loss": 1.034, - "num_input_tokens_seen": 50271615, - "step": 2377 - }, - { - "epoch": 0.2859375939397583, - "flos": 16455214618200.0, - "grad_norm": 2.352547778172419, - "learning_rate": 3.3514834990341337e-06, - "loss": 1.0493, - "num_input_tokens_seen": 50291660, - "step": 2378 - }, - { - "epoch": 0.2860578368303974, - "flos": 8535927105480.0, - "grad_norm": 4.686846471369785, - "learning_rate": 3.3509091865899144e-06, - "loss": 1.1773, - "num_input_tokens_seen": 50306570, - "step": 2379 - }, - { - "epoch": 0.2861780797210365, - "flos": 14147597304960.0, - "grad_norm": 2.952358731759007, - "learning_rate": 3.350334669213846e-06, - "loss": 0.9331, - "num_input_tokens_seen": 50323695, - "step": 2380 - }, - { - "epoch": 0.2862983226116756, - "flos": 19627548363840.0, - "grad_norm": 4.576917886268715, - "learning_rate": 3.3497599469930816e-06, - "loss": 1.0015, - "num_input_tokens_seen": 50341625, - "step": 2381 - }, - { - "epoch": 0.28641856550231465, - "flos": 15668241884640.0, - "grad_norm": 6.625789091512427, - "learning_rate": 3.349185020014807e-06, - "loss": 1.0559, - "num_input_tokens_seen": 50358610, - "step": 2382 - }, - { - "epoch": 0.28653880839295376, - "flos": 15901197027120.0, - "grad_norm": 3.0642444546843786, - "learning_rate": 3.348609888366237e-06, - "loss": 0.9771, - "num_input_tokens_seen": 50377345, - "step": 2383 - }, - { - "epoch": 0.28665905128359287, - "flos": 16612664355720.0, - "grad_norm": 4.863574602096307, - "learning_rate": 3.348034552134619e-06, - "loss": 0.8539, - "num_input_tokens_seen": 50396470, - "step": 2384 - }, - { - "epoch": 0.2867792941742319, - "flos": 14825116738560.0, - "grad_norm": 2.4890583926715952, - "learning_rate": 3.3474590114072316e-06, - "loss": 1.0738, - "num_input_tokens_seen": 50414190, - "step": 2385 - }, - { - "epoch": 0.28689953706487104, - "flos": 14668832140320.0, - "grad_norm": 2.24647425869989, - "learning_rate": 3.3468832662713836e-06, - "loss": 1.0523, - "num_input_tokens_seen": 50432155, - "step": 2386 - }, - { - "epoch": 0.28701977995551015, - "flos": 8927926386600.0, - "grad_norm": 2.500465254406642, - "learning_rate": 3.346307316814415e-06, - "loss": 1.0648, - "num_input_tokens_seen": 50447045, - "step": 2387 - }, - { - "epoch": 0.2871400228461492, - "flos": 15092295730080.0, - "grad_norm": 3.1437238142314627, - "learning_rate": 3.3457311631236965e-06, - "loss": 0.9682, - "num_input_tokens_seen": 50467750, - "step": 2388 - }, - { - "epoch": 0.2872602657367883, - "flos": 17871281085960.0, - "grad_norm": 3.0977827818380277, - "learning_rate": 3.345154805286631e-06, - "loss": 1.0687, - "num_input_tokens_seen": 50487730, - "step": 2389 - }, - { - "epoch": 0.2873805086274274, - "flos": 11781068038800.0, - "grad_norm": 3.3768524454970885, - "learning_rate": 3.344578243390651e-06, - "loss": 0.9816, - "num_input_tokens_seen": 50503010, - "step": 2390 - }, - { - "epoch": 0.2875007515180665, - "flos": 12337967816520.0, - "grad_norm": 3.642443157896675, - "learning_rate": 3.3440014775232206e-06, - "loss": 1.0041, - "num_input_tokens_seen": 50520785, - "step": 2391 - }, - { - "epoch": 0.2876209944087056, - "flos": 16659342346200.0, - "grad_norm": 3.262168405569956, - "learning_rate": 3.343424507771834e-06, - "loss": 0.9499, - "num_input_tokens_seen": 50538715, - "step": 2392 - }, - { - "epoch": 0.2877412372993447, - "flos": 9689965723800.0, - "grad_norm": 3.0110250384082473, - "learning_rate": 3.342847334224018e-06, - "loss": 1.0999, - "num_input_tokens_seen": 50555835, - "step": 2393 - }, - { - "epoch": 0.28786148018998375, - "flos": 41566111926000.0, - "grad_norm": 0.873553954466456, - "learning_rate": 3.342269956967329e-06, - "loss": 0.8948, - "num_input_tokens_seen": 50617460, - "step": 2394 - }, - { - "epoch": 0.28798172308062286, - "flos": 16660078223640.0, - "grad_norm": 3.1576660303484134, - "learning_rate": 3.341692376089355e-06, - "loss": 0.9423, - "num_input_tokens_seen": 50632735, - "step": 2395 - }, - { - "epoch": 0.288101965971262, - "flos": 17863401065040.0, - "grad_norm": 3.8471917351265676, - "learning_rate": 3.3411145916777146e-06, - "loss": 1.0544, - "num_input_tokens_seen": 50646615, - "step": 2396 - }, - { - "epoch": 0.28822220886190103, - "flos": 11498859334800.0, - "grad_norm": 3.439221592593863, - "learning_rate": 3.3405366038200566e-06, - "loss": 1.1247, - "num_input_tokens_seen": 50665270, - "step": 2397 - }, - { - "epoch": 0.28834245175254014, - "flos": 17451864822120.0, - "grad_norm": 3.6601631547176736, - "learning_rate": 3.3399584126040617e-06, - "loss": 1.0711, - "num_input_tokens_seen": 50684490, - "step": 2398 - }, - { - "epoch": 0.2884626946431792, - "flos": 17477503434360.0, - "grad_norm": 2.424601933163825, - "learning_rate": 3.339380018117441e-06, - "loss": 1.129, - "num_input_tokens_seen": 50705045, - "step": 2399 - }, - { - "epoch": 0.2885829375338183, - "flos": 11708812759200.0, - "grad_norm": 3.9649450129463935, - "learning_rate": 3.3388014204479366e-06, - "loss": 0.999, - "num_input_tokens_seen": 50722570, - "step": 2400 - }, - { - "epoch": 0.2887031804244574, - "flos": 17110897473000.0, - "grad_norm": 2.8592704378614857, - "learning_rate": 3.338222619683321e-06, - "loss": 1.1412, - "num_input_tokens_seen": 50742255, - "step": 2401 - }, - { - "epoch": 0.2888234233150965, - "flos": 16585768619520.0, - "grad_norm": 4.213713627887139, - "learning_rate": 3.337643615911398e-06, - "loss": 0.9505, - "num_input_tokens_seen": 50761600, - "step": 2402 - }, - { - "epoch": 0.2889436662057356, - "flos": 15825476991240.0, - "grad_norm": 3.297081963429447, - "learning_rate": 3.3370644092200026e-06, - "loss": 1.0098, - "num_input_tokens_seen": 50778595, - "step": 2403 - }, - { - "epoch": 0.2890639090963747, - "flos": 15354262256400.0, - "grad_norm": 3.605774243473579, - "learning_rate": 3.3364849996969985e-06, - "loss": 1.0156, - "num_input_tokens_seen": 50798335, - "step": 2404 - }, - { - "epoch": 0.28918415198701375, - "flos": 20362446672360.0, - "grad_norm": 2.8459087184976606, - "learning_rate": 3.335905387430283e-06, - "loss": 1.0795, - "num_input_tokens_seen": 50819490, - "step": 2405 - }, - { - "epoch": 0.28930439487765286, - "flos": 15589731646800.0, - "grad_norm": 2.7854959887953474, - "learning_rate": 3.335325572507782e-06, - "loss": 1.0476, - "num_input_tokens_seen": 50839710, - "step": 2406 - }, - { - "epoch": 0.28942463776829197, - "flos": 13676137277640.0, - "grad_norm": 4.073029473475103, - "learning_rate": 3.3347455550174537e-06, - "loss": 0.966, - "num_input_tokens_seen": 50858770, - "step": 2407 - }, - { - "epoch": 0.289544880658931, - "flos": 10343839546560.0, - "grad_norm": 4.434579022198835, - "learning_rate": 3.3341653350472864e-06, - "loss": 0.9092, - "num_input_tokens_seen": 50875320, - "step": 2408 - }, - { - "epoch": 0.28966512354957014, - "flos": 20388545208000.0, - "grad_norm": 3.3925715190008368, - "learning_rate": 3.333584912685298e-06, - "loss": 0.9274, - "num_input_tokens_seen": 50893660, - "step": 2409 - }, - { - "epoch": 0.28978536644020925, - "flos": 46331345530920.0, - "grad_norm": 0.8369322588977761, - "learning_rate": 3.3330042880195385e-06, - "loss": 0.8184, - "num_input_tokens_seen": 50947730, - "step": 2410 - }, - { - "epoch": 0.2899056093308483, - "flos": 13203696080400.0, - "grad_norm": 2.536178594329509, - "learning_rate": 3.3324234611380888e-06, - "loss": 1.0123, - "num_input_tokens_seen": 50966180, - "step": 2411 - }, - { - "epoch": 0.2900258522214874, - "flos": 16271850314400.0, - "grad_norm": 3.312083955944593, - "learning_rate": 3.3318424321290596e-06, - "loss": 1.0494, - "num_input_tokens_seen": 50985615, - "step": 2412 - }, - { - "epoch": 0.2901460951121265, - "flos": 50923093402440.0, - "grad_norm": 0.83795638815578, - "learning_rate": 3.3312612010805917e-06, - "loss": 0.8764, - "num_input_tokens_seen": 51044910, - "step": 2413 - }, - { - "epoch": 0.2902663380027656, - "flos": 22932030511920.0, - "grad_norm": 3.0999133436335136, - "learning_rate": 3.330679768080858e-06, - "loss": 0.9344, - "num_input_tokens_seen": 51068515, - "step": 2414 - }, - { - "epoch": 0.2903865808934047, - "flos": 20913030168720.0, - "grad_norm": 18.8013408426226, - "learning_rate": 3.3300981332180627e-06, - "loss": 1.0625, - "num_input_tokens_seen": 51087440, - "step": 2415 - }, - { - "epoch": 0.29050682378404374, - "flos": 12099646901040.0, - "grad_norm": 2.840731517443917, - "learning_rate": 3.3295162965804373e-06, - "loss": 1.0369, - "num_input_tokens_seen": 51105655, - "step": 2416 - }, - { - "epoch": 0.29062706667468285, - "flos": 12652867291560.0, - "grad_norm": 3.3767302987321886, - "learning_rate": 3.328934258256247e-06, - "loss": 0.9973, - "num_input_tokens_seen": 51123440, - "step": 2417 - }, - { - "epoch": 0.29074730956532197, - "flos": 17267948610240.0, - "grad_norm": 2.277047453722989, - "learning_rate": 3.3283520183337856e-06, - "loss": 0.8936, - "num_input_tokens_seen": 51142865, - "step": 2418 - }, - { - "epoch": 0.290867552455961, - "flos": 15873841367520.0, - "grad_norm": 2.196421495840481, - "learning_rate": 3.3277695769013797e-06, - "loss": 0.9234, - "num_input_tokens_seen": 51162030, - "step": 2419 - }, - { - "epoch": 0.29098779534660013, - "flos": 16482079692840.0, - "grad_norm": 2.878453543898585, - "learning_rate": 3.327186934047385e-06, - "loss": 1.0027, - "num_input_tokens_seen": 51180445, - "step": 2420 - }, - { - "epoch": 0.29110803823723924, - "flos": 10817323236840.0, - "grad_norm": 5.210196789852496, - "learning_rate": 3.3266040898601877e-06, - "loss": 0.8686, - "num_input_tokens_seen": 51198000, - "step": 2421 - }, - { - "epoch": 0.2912282811278783, - "flos": 16056531117000.0, - "grad_norm": 3.001186930777428, - "learning_rate": 3.3260210444282045e-06, - "loss": 1.0003, - "num_input_tokens_seen": 51215675, - "step": 2422 - }, - { - "epoch": 0.2913485240185174, - "flos": 17424570485640.0, - "grad_norm": 3.678128764783412, - "learning_rate": 3.325437797839883e-06, - "loss": 0.9522, - "num_input_tokens_seen": 51233765, - "step": 2423 - }, - { - "epoch": 0.2914687669091565, - "flos": 12705156347520.0, - "grad_norm": 3.429124095060745, - "learning_rate": 3.3248543501837015e-06, - "loss": 0.9629, - "num_input_tokens_seen": 51250690, - "step": 2424 - }, - { - "epoch": 0.2915890097997956, - "flos": 16010006434320.0, - "grad_norm": 2.758728221121148, - "learning_rate": 3.3242707015481684e-06, - "loss": 1.0044, - "num_input_tokens_seen": 51270345, - "step": 2425 - }, - { - "epoch": 0.2917092526904347, - "flos": 9768843900360.0, - "grad_norm": 2.6675627414864764, - "learning_rate": 3.323686852021823e-06, - "loss": 1.0356, - "num_input_tokens_seen": 51287575, - "step": 2426 - }, - { - "epoch": 0.2918294955810738, - "flos": 16114553884680.0, - "grad_norm": 4.939281673329264, - "learning_rate": 3.323102801693235e-06, - "loss": 1.0237, - "num_input_tokens_seen": 51306060, - "step": 2427 - }, - { - "epoch": 0.29194973847171285, - "flos": 16662837764040.0, - "grad_norm": 3.034481096229875, - "learning_rate": 3.322518550651003e-06, - "loss": 1.0173, - "num_input_tokens_seen": 51325090, - "step": 2428 - }, - { - "epoch": 0.29206998136235196, - "flos": 15564246342360.0, - "grad_norm": 2.6324946271039105, - "learning_rate": 3.3219340989837586e-06, - "loss": 1.034, - "num_input_tokens_seen": 51344800, - "step": 2429 - }, - { - "epoch": 0.292190224252991, - "flos": 16502567163000.0, - "grad_norm": 2.5547912763230136, - "learning_rate": 3.3213494467801625e-06, - "loss": 1.0267, - "num_input_tokens_seen": 51363695, - "step": 2430 - }, - { - "epoch": 0.2923104671436301, - "flos": 14724555291000.0, - "grad_norm": 4.681729439603948, - "learning_rate": 3.3207645941289063e-06, - "loss": 0.9417, - "num_input_tokens_seen": 51381760, - "step": 2431 - }, - { - "epoch": 0.29243071003426924, - "flos": 25553811422760.0, - "grad_norm": 2.7964899234900518, - "learning_rate": 3.320179541118711e-06, - "loss": 1.0293, - "num_input_tokens_seen": 51403980, - "step": 2432 - }, - { - "epoch": 0.2925509529249083, - "flos": 41567736988680.0, - "grad_norm": 1.0337400717760163, - "learning_rate": 3.3195942878383293e-06, - "loss": 0.8749, - "num_input_tokens_seen": 51459800, - "step": 2433 - }, - { - "epoch": 0.2926711958155474, - "flos": 15196168626120.0, - "grad_norm": 5.650710116972008, - "learning_rate": 3.319008834376543e-06, - "loss": 1.0022, - "num_input_tokens_seen": 51479210, - "step": 2434 - }, - { - "epoch": 0.2927914387061865, - "flos": 16481773077240.0, - "grad_norm": 4.830943053686709, - "learning_rate": 3.3184231808221654e-06, - "loss": 1.1114, - "num_input_tokens_seen": 51493255, - "step": 2435 - }, - { - "epoch": 0.29291168159682557, - "flos": 15956889516240.0, - "grad_norm": 3.1981078514249175, - "learning_rate": 3.3178373272640394e-06, - "loss": 0.8519, - "num_input_tokens_seen": 51512070, - "step": 2436 - }, - { - "epoch": 0.2930319244874647, - "flos": 15032371945680.0, - "grad_norm": 3.3858982310990777, - "learning_rate": 3.3172512737910387e-06, - "loss": 1.0905, - "num_input_tokens_seen": 51529300, - "step": 2437 - }, - { - "epoch": 0.2931521673781038, - "flos": 22354765910280.0, - "grad_norm": 3.0202422692845916, - "learning_rate": 3.3166650204920674e-06, - "loss": 1.1027, - "num_input_tokens_seen": 51550190, - "step": 2438 - }, - { - "epoch": 0.29327241026874284, - "flos": 17210508412200.0, - "grad_norm": 2.0470316586713513, - "learning_rate": 3.316078567456059e-06, - "loss": 1.0486, - "num_input_tokens_seen": 51567750, - "step": 2439 - }, - { - "epoch": 0.29339265315938196, - "flos": 17241114197160.0, - "grad_norm": 3.039941028223759, - "learning_rate": 3.3154919147719786e-06, - "loss": 0.9914, - "num_input_tokens_seen": 51588485, - "step": 2440 - }, - { - "epoch": 0.29351289605002107, - "flos": 11997797667960.0, - "grad_norm": 5.383382958024991, - "learning_rate": 3.31490506252882e-06, - "loss": 1.0971, - "num_input_tokens_seen": 51607585, - "step": 2441 - }, - { - "epoch": 0.2936331389406601, - "flos": 14069363021160.0, - "grad_norm": 2.888073098913947, - "learning_rate": 3.31431801081561e-06, - "loss": 1.0766, - "num_input_tokens_seen": 51626240, - "step": 2442 - }, - { - "epoch": 0.29375338183129923, - "flos": 51149021499600.0, - "grad_norm": 0.9406307080878248, - "learning_rate": 3.313730759721402e-06, - "loss": 0.9181, - "num_input_tokens_seen": 51688890, - "step": 2443 - }, - { - "epoch": 0.29387362472193834, - "flos": 15668701808040.0, - "grad_norm": 3.6787443794936014, - "learning_rate": 3.313143309335282e-06, - "loss": 1.085, - "num_input_tokens_seen": 51707100, - "step": 2444 - }, - { - "epoch": 0.2939938676125774, - "flos": 16324047385680.0, - "grad_norm": 4.16522326581973, - "learning_rate": 3.3125556597463665e-06, - "loss": 1.0632, - "num_input_tokens_seen": 51726125, - "step": 2445 - }, - { - "epoch": 0.2941141105032165, - "flos": 22355501787720.0, - "grad_norm": 2.8484264111996174, - "learning_rate": 3.311967811043801e-06, - "loss": 0.8834, - "num_input_tokens_seen": 51747765, - "step": 2446 - }, - { - "epoch": 0.29423435339385556, - "flos": 16507871612880.0, - "grad_norm": 8.393377836360415, - "learning_rate": 3.3113797633167617e-06, - "loss": 1.0238, - "num_input_tokens_seen": 51765780, - "step": 2447 - }, - { - "epoch": 0.2943545962844947, - "flos": 19124348073840.0, - "grad_norm": 4.875428792240035, - "learning_rate": 3.310791516654455e-06, - "loss": 0.9076, - "num_input_tokens_seen": 51782560, - "step": 2448 - }, - { - "epoch": 0.2944748391751338, - "flos": 14358869176440.0, - "grad_norm": 3.847683266481928, - "learning_rate": 3.3102030711461177e-06, - "loss": 1.0206, - "num_input_tokens_seen": 51801855, - "step": 2449 - }, - { - "epoch": 0.29459508206577284, - "flos": 11289335172240.0, - "grad_norm": 3.5978060182758047, - "learning_rate": 3.3096144268810156e-06, - "loss": 0.904, - "num_input_tokens_seen": 51820335, - "step": 2450 - }, - { - "epoch": 0.29471532495641195, - "flos": 14488932592800.0, - "grad_norm": 3.284180558652873, - "learning_rate": 3.3090255839484462e-06, - "loss": 0.9505, - "num_input_tokens_seen": 51838050, - "step": 2451 - }, - { - "epoch": 0.29483556784705106, - "flos": 14462864718720.0, - "grad_norm": 7.864959855992716, - "learning_rate": 3.3084365424377366e-06, - "loss": 1.076, - "num_input_tokens_seen": 51856535, - "step": 2452 - }, - { - "epoch": 0.2949558107376901, - "flos": 49090676826840.0, - "grad_norm": 0.7944053077468575, - "learning_rate": 3.307847302438245e-06, - "loss": 0.8147, - "num_input_tokens_seen": 51910235, - "step": 2453 - }, - { - "epoch": 0.2950760536283292, - "flos": 11394373207560.0, - "grad_norm": 10.66443272880682, - "learning_rate": 3.3072578640393562e-06, - "loss": 0.9927, - "num_input_tokens_seen": 51927290, - "step": 2454 - }, - { - "epoch": 0.29519629651896834, - "flos": 14539473939840.0, - "grad_norm": 13.932709422280475, - "learning_rate": 3.3066682273304886e-06, - "loss": 1.0178, - "num_input_tokens_seen": 51944655, - "step": 2455 - }, - { - "epoch": 0.2953165394096074, - "flos": 13413434873880.0, - "grad_norm": 7.176526790424575, - "learning_rate": 3.3060783924010904e-06, - "loss": 0.9932, - "num_input_tokens_seen": 51962300, - "step": 2456 - }, - { - "epoch": 0.2954367823002465, - "flos": 14640004725840.0, - "grad_norm": 3.6147468522578734, - "learning_rate": 3.3054883593406387e-06, - "loss": 1.084, - "num_input_tokens_seen": 51976770, - "step": 2457 - }, - { - "epoch": 0.2955570251908856, - "flos": 22223751985560.0, - "grad_norm": 3.7356696657851955, - "learning_rate": 3.3048981282386404e-06, - "loss": 0.8632, - "num_input_tokens_seen": 51997800, - "step": 2458 - }, - { - "epoch": 0.29567726808152467, - "flos": 15377846544120.0, - "grad_norm": 7.341143985751216, - "learning_rate": 3.304307699184634e-06, - "loss": 1.052, - "num_input_tokens_seen": 52016110, - "step": 2459 - }, - { - "epoch": 0.2957975109721638, - "flos": 17242187351760.0, - "grad_norm": 2.8974403660901853, - "learning_rate": 3.3037170722681866e-06, - "loss": 1.0236, - "num_input_tokens_seen": 52036665, - "step": 2460 - }, - { - "epoch": 0.29591775386280283, - "flos": 9506080173480.0, - "grad_norm": 3.677624036822331, - "learning_rate": 3.3031262475788956e-06, - "loss": 0.9027, - "num_input_tokens_seen": 52053325, - "step": 2461 - }, - { - "epoch": 0.29603799675344195, - "flos": 12575062269600.0, - "grad_norm": 7.037366859707914, - "learning_rate": 3.3025352252063897e-06, - "loss": 0.9665, - "num_input_tokens_seen": 52071740, - "step": 2462 - }, - { - "epoch": 0.29615823964408106, - "flos": 16192880153160.0, - "grad_norm": 4.498936209506856, - "learning_rate": 3.3019440052403252e-06, - "loss": 0.9753, - "num_input_tokens_seen": 52091325, - "step": 2463 - }, - { - "epoch": 0.2962784825347201, - "flos": 16717794375720.0, - "grad_norm": 3.481154933980595, - "learning_rate": 3.30135258777039e-06, - "loss": 0.9335, - "num_input_tokens_seen": 52110415, - "step": 2464 - }, - { - "epoch": 0.2963987254253592, - "flos": 11578166773200.0, - "grad_norm": 3.7155255354906136, - "learning_rate": 3.3007609728863024e-06, - "loss": 0.926, - "num_input_tokens_seen": 52128225, - "step": 2465 - }, - { - "epoch": 0.29651896831599833, - "flos": 23874122704440.0, - "grad_norm": 3.685825937740787, - "learning_rate": 3.300169160677809e-06, - "loss": 0.9561, - "num_input_tokens_seen": 52151860, - "step": 2466 - }, - { - "epoch": 0.2966392112066374, - "flos": 16926950599560.0, - "grad_norm": 3.8820920463863566, - "learning_rate": 3.2995771512346878e-06, - "loss": 0.9751, - "num_input_tokens_seen": 52169930, - "step": 2467 - }, - { - "epoch": 0.2967594540972765, - "flos": 14147965243680.0, - "grad_norm": 5.290510376681665, - "learning_rate": 3.298984944646746e-06, - "loss": 0.9516, - "num_input_tokens_seen": 52188330, - "step": 2468 - }, - { - "epoch": 0.2968796969879156, - "flos": 16874876174520.0, - "grad_norm": 6.083603456674869, - "learning_rate": 3.298392541003822e-06, - "loss": 1.0504, - "num_input_tokens_seen": 52207455, - "step": 2469 - }, - { - "epoch": 0.29699993987855466, - "flos": 16271819652840.0, - "grad_norm": 3.5378332297879718, - "learning_rate": 3.2977999403957806e-06, - "loss": 1.1185, - "num_input_tokens_seen": 52225935, - "step": 2470 - }, - { - "epoch": 0.2971201827691938, - "flos": 24132440505120.0, - "grad_norm": 186.22019476281298, - "learning_rate": 3.2972071429125207e-06, - "loss": 0.8943, - "num_input_tokens_seen": 52246875, - "step": 2471 - }, - { - "epoch": 0.2972404256598329, - "flos": 15668579161800.0, - "grad_norm": 6.320546073982836, - "learning_rate": 3.2966141486439682e-06, - "loss": 1.1081, - "num_input_tokens_seen": 52265785, - "step": 2472 - }, - { - "epoch": 0.29736066855047194, - "flos": 22800955264080.0, - "grad_norm": 5.799497109019824, - "learning_rate": 3.29602095768008e-06, - "loss": 0.881, - "num_input_tokens_seen": 52286020, - "step": 2473 - }, - { - "epoch": 0.29748091144111105, - "flos": 23901754318080.0, - "grad_norm": 6.744000912613089, - "learning_rate": 3.2954275701108437e-06, - "loss": 0.8695, - "num_input_tokens_seen": 52306920, - "step": 2474 - }, - { - "epoch": 0.29760115433175016, - "flos": 29488245828720.0, - "grad_norm": 4.399960404998181, - "learning_rate": 3.294833986026275e-06, - "loss": 0.913, - "num_input_tokens_seen": 52329880, - "step": 2475 - }, - { - "epoch": 0.2977213972223892, - "flos": 17420676467520.0, - "grad_norm": 3.4536799334481816, - "learning_rate": 3.29424020551642e-06, - "loss": 1.0701, - "num_input_tokens_seen": 52348235, - "step": 2476 - }, - { - "epoch": 0.2978416401130283, - "flos": 15115328109720.0, - "grad_norm": 10.125048225193794, - "learning_rate": 3.2936462286713546e-06, - "loss": 0.9382, - "num_input_tokens_seen": 52366305, - "step": 2477 - }, - { - "epoch": 0.2979618830036674, - "flos": 18339981572880.0, - "grad_norm": 3.0855372688442375, - "learning_rate": 3.2930520555811846e-06, - "loss": 0.9952, - "num_input_tokens_seen": 52385650, - "step": 2478 - }, - { - "epoch": 0.2980821258943065, - "flos": 16690959962640.0, - "grad_norm": 4.308694902271946, - "learning_rate": 3.292457686336046e-06, - "loss": 1.0201, - "num_input_tokens_seen": 52404690, - "step": 2479 - }, - { - "epoch": 0.2982023687849456, - "flos": 49954621172160.0, - "grad_norm": 0.8319870126632175, - "learning_rate": 3.291863121026105e-06, - "loss": 0.8813, - "num_input_tokens_seen": 52468190, - "step": 2480 - }, - { - "epoch": 0.29832261167558466, - "flos": 21253874871600.0, - "grad_norm": 4.922304746243631, - "learning_rate": 3.2912683597415547e-06, - "loss": 0.9934, - "num_input_tokens_seen": 52491995, - "step": 2481 - }, - { - "epoch": 0.29844285456622377, - "flos": 24188838210120.0, - "grad_norm": 3.6047213059631322, - "learning_rate": 3.2906734025726213e-06, - "loss": 0.9984, - "num_input_tokens_seen": 52510980, - "step": 2482 - }, - { - "epoch": 0.2985630974568629, - "flos": 16978197162480.0, - "grad_norm": 5.232283171395538, - "learning_rate": 3.290078249609559e-06, - "loss": 1.0926, - "num_input_tokens_seen": 52530120, - "step": 2483 - }, - { - "epoch": 0.29868334034750194, - "flos": 15485153534880.0, - "grad_norm": 3.7200920183839727, - "learning_rate": 3.2894829009426514e-06, - "loss": 1.0887, - "num_input_tokens_seen": 52547675, - "step": 2484 - }, - { - "epoch": 0.29880358323814105, - "flos": 18264690798840.0, - "grad_norm": 5.1089020023082705, - "learning_rate": 3.288887356662213e-06, - "loss": 0.9978, - "num_input_tokens_seen": 52568730, - "step": 2485 - }, - { - "epoch": 0.29892382612878016, - "flos": 50852340539280.0, - "grad_norm": 0.7884296107492806, - "learning_rate": 3.288291616858588e-06, - "loss": 0.8423, - "num_input_tokens_seen": 52623840, - "step": 2486 - }, - { - "epoch": 0.2990440690194192, - "flos": 18130947995280.0, - "grad_norm": 3.1943378780852405, - "learning_rate": 3.287695681622149e-06, - "loss": 1.0039, - "num_input_tokens_seen": 52642910, - "step": 2487 - }, - { - "epoch": 0.2991643119100583, - "flos": 16874477574240.0, - "grad_norm": 2.7178513368057957, - "learning_rate": 3.2870995510432982e-06, - "loss": 1.0381, - "num_input_tokens_seen": 52661110, - "step": 2488 - }, - { - "epoch": 0.29928455480069743, - "flos": 19522970251920.0, - "grad_norm": 4.375877728289987, - "learning_rate": 3.2865032252124697e-06, - "loss": 0.9898, - "num_input_tokens_seen": 52681345, - "step": 2489 - }, - { - "epoch": 0.2994047976913365, - "flos": 24032952212160.0, - "grad_norm": 3.1760984968483155, - "learning_rate": 3.2859067042201243e-06, - "loss": 1.0043, - "num_input_tokens_seen": 52703105, - "step": 2490 - }, - { - "epoch": 0.2995250405819756, - "flos": 11866477127640.0, - "grad_norm": 4.546383336282774, - "learning_rate": 3.2853099881567544e-06, - "loss": 1.0001, - "num_input_tokens_seen": 52721225, - "step": 2491 - }, - { - "epoch": 0.29964528347261465, - "flos": 16321962399600.0, - "grad_norm": 5.629010275647897, - "learning_rate": 3.284713077112881e-06, - "loss": 1.0115, - "num_input_tokens_seen": 52740375, - "step": 2492 - }, - { - "epoch": 0.29976552636325376, - "flos": 11990438893560.0, - "grad_norm": 6.525733479750032, - "learning_rate": 3.284115971179056e-06, - "loss": 1.064, - "num_input_tokens_seen": 52754125, - "step": 2493 - }, - { - "epoch": 0.2998857692538929, - "flos": 12076767829200.0, - "grad_norm": 4.088110613929573, - "learning_rate": 3.283518670445859e-06, - "loss": 1.0212, - "num_input_tokens_seen": 52771755, - "step": 2494 - }, - { - "epoch": 0.30000601214453193, - "flos": 49291702189200.0, - "grad_norm": 3.3026862972827553, - "learning_rate": 3.2829211750038995e-06, - "loss": 0.8191, - "num_input_tokens_seen": 52840105, - "step": 2495 - }, - { - "epoch": 0.30012625503517104, - "flos": 12469686957120.0, - "grad_norm": 3.7499331150447963, - "learning_rate": 3.2823234849438183e-06, - "loss": 1.1052, - "num_input_tokens_seen": 52857860, - "step": 2496 - }, - { - "epoch": 0.30024649792581015, - "flos": 15092479699440.0, - "grad_norm": 10.527083571692144, - "learning_rate": 3.2817256003562836e-06, - "loss": 0.9624, - "num_input_tokens_seen": 52877955, - "step": 2497 - }, - { - "epoch": 0.3003667408164492, - "flos": 16350575183160.0, - "grad_norm": 3.3960619110377057, - "learning_rate": 3.281127521331995e-06, - "loss": 0.8898, - "num_input_tokens_seen": 52898855, - "step": 2498 - }, - { - "epoch": 0.3004869837070883, - "flos": 45986085563400.0, - "grad_norm": 0.880450033075238, - "learning_rate": 3.2805292479616798e-06, - "loss": 0.8741, - "num_input_tokens_seen": 52957440, - "step": 2499 - }, - { - "epoch": 0.30060722659772743, - "flos": 18682727292480.0, - "grad_norm": 7.711294426817105, - "learning_rate": 3.2799307803360955e-06, - "loss": 1.131, - "num_input_tokens_seen": 52973090, - "step": 2500 - }, - { - "epoch": 0.3007274694883665, - "flos": 17764311372360.0, - "grad_norm": 2.7874859967756715, - "learning_rate": 3.27933211854603e-06, - "loss": 1.0438, - "num_input_tokens_seen": 52991865, - "step": 2501 - }, - { - "epoch": 0.3008477123790056, - "flos": 12076246582680.0, - "grad_norm": 4.999935912675219, - "learning_rate": 3.278733262682299e-06, - "loss": 1.0899, - "num_input_tokens_seen": 53009440, - "step": 2502 - }, - { - "epoch": 0.3009679552696447, - "flos": 15274678863960.0, - "grad_norm": 5.484498807808382, - "learning_rate": 3.2781342128357484e-06, - "loss": 1.0355, - "num_input_tokens_seen": 53028515, - "step": 2503 - }, - { - "epoch": 0.30108819816028376, - "flos": 15006028117560.0, - "grad_norm": 6.321772346662551, - "learning_rate": 3.2775349690972547e-06, - "loss": 1.0193, - "num_input_tokens_seen": 53042385, - "step": 2504 - }, - { - "epoch": 0.30120844105092287, - "flos": 50940509168520.0, - "grad_norm": 0.7950045755842479, - "learning_rate": 3.276935531557722e-06, - "loss": 0.7929, - "num_input_tokens_seen": 53107325, - "step": 2505 - }, - { - "epoch": 0.301328683941562, - "flos": 14382024202320.0, - "grad_norm": 6.846395738856841, - "learning_rate": 3.2763359003080837e-06, - "loss": 1.0322, - "num_input_tokens_seen": 53124000, - "step": 2506 - }, - { - "epoch": 0.30144892683220104, - "flos": 50596291694040.0, - "grad_norm": 0.9081138649013, - "learning_rate": 3.2757360754393047e-06, - "loss": 0.9086, - "num_input_tokens_seen": 53187790, - "step": 2507 - }, - { - "epoch": 0.30156916972284015, - "flos": 16219223981280.0, - "grad_norm": 9.310590100133037, - "learning_rate": 3.2751360570423767e-06, - "loss": 0.8452, - "num_input_tokens_seen": 53205895, - "step": 2508 - }, - { - "epoch": 0.3016894126134792, - "flos": 21306562527840.0, - "grad_norm": 4.880501015380779, - "learning_rate": 3.2745358452083236e-06, - "loss": 0.9842, - "num_input_tokens_seen": 53228515, - "step": 2509 - }, - { - "epoch": 0.3018096555041183, - "flos": 15303843555600.0, - "grad_norm": 4.863146725674587, - "learning_rate": 3.2739354400281955e-06, - "loss": 1.0445, - "num_input_tokens_seen": 53249455, - "step": 2510 - }, - { - "epoch": 0.3019298983947574, - "flos": 42325269076560.0, - "grad_norm": 0.8710295649261648, - "learning_rate": 3.2733348415930744e-06, - "loss": 0.9195, - "num_input_tokens_seen": 53311045, - "step": 2511 - }, - { - "epoch": 0.3020501412853965, - "flos": 24557743788480.0, - "grad_norm": 9.13500511666224, - "learning_rate": 3.27273404999407e-06, - "loss": 1.0308, - "num_input_tokens_seen": 53332985, - "step": 2512 - }, - { - "epoch": 0.3021703841760356, - "flos": 50854425525360.0, - "grad_norm": 0.8165144050177755, - "learning_rate": 3.272133065322322e-06, - "loss": 0.8568, - "num_input_tokens_seen": 53390975, - "step": 2513 - }, - { - "epoch": 0.3022906270666747, - "flos": 15278204943360.0, - "grad_norm": 7.9660146768345195, - "learning_rate": 3.271531887669e-06, - "loss": 1.0114, - "num_input_tokens_seen": 53410755, - "step": 2514 - }, - { - "epoch": 0.30241086995731375, - "flos": 22191521137920.0, - "grad_norm": 6.1108178193582825, - "learning_rate": 3.2709305171253015e-06, - "loss": 0.8685, - "num_input_tokens_seen": 53430595, - "step": 2515 - }, - { - "epoch": 0.30253111284795287, - "flos": 16715525420280.0, - "grad_norm": 3.9703966464358817, - "learning_rate": 3.2703289537824536e-06, - "loss": 1.0053, - "num_input_tokens_seen": 53450115, - "step": 2516 - }, - { - "epoch": 0.302651355738592, - "flos": 13274908866960.0, - "grad_norm": 5.894787842845424, - "learning_rate": 3.269727197731714e-06, - "loss": 1.012, - "num_input_tokens_seen": 53462600, - "step": 2517 - }, - { - "epoch": 0.30277159862923103, - "flos": 15930515026560.0, - "grad_norm": 4.150814776120956, - "learning_rate": 3.269125249064367e-06, - "loss": 1.0056, - "num_input_tokens_seen": 53482015, - "step": 2518 - }, - { - "epoch": 0.30289184151987014, - "flos": 15825783606840.0, - "grad_norm": 6.003083708192679, - "learning_rate": 3.2685231078717297e-06, - "loss": 1.0498, - "num_input_tokens_seen": 53501925, - "step": 2519 - }, - { - "epoch": 0.30301208441050925, - "flos": 17947277075880.0, - "grad_norm": 10.032321575601983, - "learning_rate": 3.267920774245145e-06, - "loss": 0.9535, - "num_input_tokens_seen": 53521050, - "step": 2520 - }, - { - "epoch": 0.3031323273011483, - "flos": 16376367103200.0, - "grad_norm": 75.11761610967164, - "learning_rate": 3.2673182482759876e-06, - "loss": 1.0754, - "num_input_tokens_seen": 53539885, - "step": 2521 - }, - { - "epoch": 0.3032525701917874, - "flos": 13384392828480.0, - "grad_norm": 3.6585610413703114, - "learning_rate": 3.266715530055659e-06, - "loss": 0.875, - "num_input_tokens_seen": 53557755, - "step": 2522 - }, - { - "epoch": 0.30337281308242653, - "flos": 12598186633920.0, - "grad_norm": 4.220377064351907, - "learning_rate": 3.2661126196755927e-06, - "loss": 1.0339, - "num_input_tokens_seen": 53576585, - "step": 2523 - }, - { - "epoch": 0.3034930559730656, - "flos": 41382563652840.0, - "grad_norm": 0.8201924554090413, - "learning_rate": 3.265509517227248e-06, - "loss": 0.8367, - "num_input_tokens_seen": 53633120, - "step": 2524 - }, - { - "epoch": 0.3036132988637047, - "flos": 10422871030920.0, - "grad_norm": 4.998098393271998, - "learning_rate": 3.264906222802115e-06, - "loss": 1.0287, - "num_input_tokens_seen": 53650690, - "step": 2525 - }, - { - "epoch": 0.30373354175434375, - "flos": 14934754007880.0, - "grad_norm": 6.4841905682544105, - "learning_rate": 3.264302736491715e-06, - "loss": 0.9957, - "num_input_tokens_seen": 53670530, - "step": 2526 - }, - { - "epoch": 0.30385378464498286, - "flos": 15013908138480.0, - "grad_norm": 5.7932828482362195, - "learning_rate": 3.263699058387594e-06, - "loss": 1.0885, - "num_input_tokens_seen": 53687685, - "step": 2527 - }, - { - "epoch": 0.30397402753562197, - "flos": 14644082713320.0, - "grad_norm": 8.666598637631767, - "learning_rate": 3.2630951885813315e-06, - "loss": 1.1306, - "num_input_tokens_seen": 53704800, - "step": 2528 - }, - { - "epoch": 0.304094270426261, - "flos": 10660333422720.0, - "grad_norm": 4.6953334204894785, - "learning_rate": 3.262491127164533e-06, - "loss": 0.9985, - "num_input_tokens_seen": 53723335, - "step": 2529 - }, - { - "epoch": 0.30421451331690014, - "flos": 9768843900360.0, - "grad_norm": 10.817853539413228, - "learning_rate": 3.2618868742288337e-06, - "loss": 1.028, - "num_input_tokens_seen": 53739980, - "step": 2530 - }, - { - "epoch": 0.30433475620753925, - "flos": 12312237219600.0, - "grad_norm": 5.181888164613506, - "learning_rate": 3.261282429865899e-06, - "loss": 0.9509, - "num_input_tokens_seen": 53757705, - "step": 2531 - }, - { - "epoch": 0.3044549990981783, - "flos": 13413833474160.0, - "grad_norm": 4.449207389622788, - "learning_rate": 3.2606777941674225e-06, - "loss": 0.9554, - "num_input_tokens_seen": 53776080, - "step": 2532 - }, - { - "epoch": 0.3045752419888174, - "flos": 14959902035160.0, - "grad_norm": 50.20616674958864, - "learning_rate": 3.2600729672251276e-06, - "loss": 1.0707, - "num_input_tokens_seen": 53793515, - "step": 2533 - }, - { - "epoch": 0.3046954848794565, - "flos": 20729604541800.0, - "grad_norm": 3.8750872448359575, - "learning_rate": 3.259467949130765e-06, - "loss": 0.8752, - "num_input_tokens_seen": 53814645, - "step": 2534 - }, - { - "epoch": 0.3048157277700956, - "flos": 14403370196160.0, - "grad_norm": 9.30038749954009, - "learning_rate": 3.2588627399761164e-06, - "loss": 1.0588, - "num_input_tokens_seen": 53830360, - "step": 2535 - }, - { - "epoch": 0.3049359706607347, - "flos": 16160250705240.0, - "grad_norm": 4.516087914204962, - "learning_rate": 3.2582573398529903e-06, - "loss": 0.9423, - "num_input_tokens_seen": 53847435, - "step": 2536 - }, - { - "epoch": 0.3050562135513738, - "flos": 13066120581840.0, - "grad_norm": 8.897743847799546, - "learning_rate": 3.2576517488532265e-06, - "loss": 0.9629, - "num_input_tokens_seen": 53863505, - "step": 2537 - }, - { - "epoch": 0.30517645644201286, - "flos": 14457529607280.0, - "grad_norm": 4.660702413576202, - "learning_rate": 3.257045967068692e-06, - "loss": 1.0883, - "num_input_tokens_seen": 53882480, - "step": 2538 - }, - { - "epoch": 0.30529669933265197, - "flos": 15590099585520.0, - "grad_norm": 4.967168784308776, - "learning_rate": 3.2564399945912848e-06, - "loss": 1.0348, - "num_input_tokens_seen": 53901990, - "step": 2539 - }, - { - "epoch": 0.305416942223291, - "flos": 15511221408960.0, - "grad_norm": 7.669930081403173, - "learning_rate": 3.2558338315129287e-06, - "loss": 1.0519, - "num_input_tokens_seen": 53919855, - "step": 2540 - }, - { - "epoch": 0.30553718511393013, - "flos": 24190248641880.0, - "grad_norm": 7.8645349631600965, - "learning_rate": 3.2552274779255785e-06, - "loss": 0.9816, - "num_input_tokens_seen": 53940505, - "step": 2541 - }, - { - "epoch": 0.30565742800456924, - "flos": 15821858927160.0, - "grad_norm": 12.173904426050393, - "learning_rate": 3.2546209339212184e-06, - "loss": 0.9901, - "num_input_tokens_seen": 53959245, - "step": 2542 - }, - { - "epoch": 0.3057776708952083, - "flos": 16035583723440.0, - "grad_norm": 5.001943973293073, - "learning_rate": 3.25401419959186e-06, - "loss": 0.9935, - "num_input_tokens_seen": 53979575, - "step": 2543 - }, - { - "epoch": 0.3058979137858474, - "flos": 15485245519560.0, - "grad_norm": 4.142308306865975, - "learning_rate": 3.253407275029545e-06, - "loss": 0.9926, - "num_input_tokens_seen": 53998200, - "step": 2544 - }, - { - "epoch": 0.3060181566764865, - "flos": 19208070776880.0, - "grad_norm": 6.337824417027065, - "learning_rate": 3.2528001603263425e-06, - "loss": 1.0258, - "num_input_tokens_seen": 54019990, - "step": 2545 - }, - { - "epoch": 0.3061383995671256, - "flos": 14095185602760.0, - "grad_norm": 3.6828228011248196, - "learning_rate": 3.2521928555743514e-06, - "loss": 1.0357, - "num_input_tokens_seen": 54037055, - "step": 2546 - }, - { - "epoch": 0.3062586424577647, - "flos": 15721174833360.0, - "grad_norm": 4.556035099272795, - "learning_rate": 3.2515853608657e-06, - "loss": 0.8925, - "num_input_tokens_seen": 54054775, - "step": 2547 - }, - { - "epoch": 0.3063788853484038, - "flos": 14799140849160.0, - "grad_norm": 7.174078044074423, - "learning_rate": 3.250977676292545e-06, - "loss": 0.9563, - "num_input_tokens_seen": 54072735, - "step": 2548 - }, - { - "epoch": 0.30649912823904285, - "flos": 11467211056800.0, - "grad_norm": 4.454728115703457, - "learning_rate": 3.2503698019470712e-06, - "loss": 1.0139, - "num_input_tokens_seen": 54088225, - "step": 2549 - }, - { - "epoch": 0.30661937112968196, - "flos": 13197870384000.0, - "grad_norm": 3.4630721077479625, - "learning_rate": 3.249761737921492e-06, - "loss": 0.9973, - "num_input_tokens_seen": 54104475, - "step": 2550 - }, - { - "epoch": 0.30673961402032107, - "flos": 22377982259280.0, - "grad_norm": 7.067315004705282, - "learning_rate": 3.249153484308051e-06, - "loss": 0.9685, - "num_input_tokens_seen": 54122810, - "step": 2551 - }, - { - "epoch": 0.3068598569109601, - "flos": 14357274775320.0, - "grad_norm": 4.999791713190957, - "learning_rate": 3.2485450411990194e-06, - "loss": 0.9989, - "num_input_tokens_seen": 54141885, - "step": 2552 - }, - { - "epoch": 0.30698009980159924, - "flos": 21092255161920.0, - "grad_norm": 6.689491617133781, - "learning_rate": 3.2479364086866983e-06, - "loss": 1.0351, - "num_input_tokens_seen": 54161860, - "step": 2553 - }, - { - "epoch": 0.30710034269223835, - "flos": 16953662366400.0, - "grad_norm": 3.70452339391315, - "learning_rate": 3.247327586863416e-06, - "loss": 1.0361, - "num_input_tokens_seen": 54182460, - "step": 2554 - }, - { - "epoch": 0.3072205855828774, - "flos": 18421527305160.0, - "grad_norm": 5.044930381634814, - "learning_rate": 3.2467185758215304e-06, - "loss": 1.0028, - "num_input_tokens_seen": 54201920, - "step": 2555 - }, - { - "epoch": 0.3073408284735165, - "flos": 15799347794040.0, - "grad_norm": 4.217241483768967, - "learning_rate": 3.246109375653428e-06, - "loss": 1.0645, - "num_input_tokens_seen": 54218405, - "step": 2556 - }, - { - "epoch": 0.30746107136415557, - "flos": 13833004445520.0, - "grad_norm": 4.896879736356995, - "learning_rate": 3.2454999864515243e-06, - "loss": 1.0143, - "num_input_tokens_seen": 54237500, - "step": 2557 - }, - { - "epoch": 0.3075813142547947, - "flos": 15431576693400.0, - "grad_norm": 6.845158401562411, - "learning_rate": 3.244890408308263e-06, - "loss": 0.9156, - "num_input_tokens_seen": 54257925, - "step": 2558 - }, - { - "epoch": 0.3077015571454338, - "flos": 17137333285800.0, - "grad_norm": 5.208851089174027, - "learning_rate": 3.2442806413161165e-06, - "loss": 0.8304, - "num_input_tokens_seen": 54277290, - "step": 2559 - }, - { - "epoch": 0.30782180003607285, - "flos": 13046154358200.0, - "grad_norm": 8.15345733889302, - "learning_rate": 3.243670685567586e-06, - "loss": 0.9998, - "num_input_tokens_seen": 54294410, - "step": 2560 - }, - { - "epoch": 0.30794204292671196, - "flos": 16979576932680.0, - "grad_norm": 5.97900723589255, - "learning_rate": 3.2430605411552012e-06, - "loss": 1.023, - "num_input_tokens_seen": 54314245, - "step": 2561 - }, - { - "epoch": 0.30806228581735107, - "flos": 48643347447240.0, - "grad_norm": 0.9255394277742329, - "learning_rate": 3.2424502081715205e-06, - "loss": 0.9483, - "num_input_tokens_seen": 54377080, - "step": 2562 - }, - { - "epoch": 0.3081825287079901, - "flos": 16954336920720.0, - "grad_norm": 5.096015473191082, - "learning_rate": 3.241839686709132e-06, - "loss": 1.0028, - "num_input_tokens_seen": 54397735, - "step": 2563 - }, - { - "epoch": 0.30830277159862923, - "flos": 11467425687720.0, - "grad_norm": 3.870992741965504, - "learning_rate": 3.2412289768606495e-06, - "loss": 1.0375, - "num_input_tokens_seen": 54414025, - "step": 2564 - }, - { - "epoch": 0.30842301448926834, - "flos": 20911190475120.0, - "grad_norm": 4.944652736338862, - "learning_rate": 3.240618078718718e-06, - "loss": 1.0444, - "num_input_tokens_seen": 54435205, - "step": 2565 - }, - { - "epoch": 0.3085432573799074, - "flos": 15590160908640.0, - "grad_norm": 7.723402343274855, - "learning_rate": 3.240006992376011e-06, - "loss": 0.9613, - "num_input_tokens_seen": 54454550, - "step": 2566 - }, - { - "epoch": 0.3086635002705465, - "flos": 15668885777400.0, - "grad_norm": 7.508157489960035, - "learning_rate": 3.2393957179252284e-06, - "loss": 0.9855, - "num_input_tokens_seen": 54470805, - "step": 2567 - }, - { - "epoch": 0.3087837431611856, - "flos": 23294129223960.0, - "grad_norm": 8.6934245374299, - "learning_rate": 3.2387842554591016e-06, - "loss": 1.0338, - "num_input_tokens_seen": 54491340, - "step": 2568 - }, - { - "epoch": 0.3089039860518247, - "flos": 12466252862400.0, - "grad_norm": 6.7739018299793265, - "learning_rate": 3.238172605070388e-06, - "loss": 1.0865, - "num_input_tokens_seen": 54506475, - "step": 2569 - }, - { - "epoch": 0.3090242289424638, - "flos": 10154772192600.0, - "grad_norm": 5.337294577726044, - "learning_rate": 3.2375607668518745e-06, - "loss": 0.9981, - "num_input_tokens_seen": 54519230, - "step": 2570 - }, - { - "epoch": 0.30914447183310284, - "flos": 11366465639880.0, - "grad_norm": 11.03698857851743, - "learning_rate": 3.236948740896377e-06, - "loss": 1.1216, - "num_input_tokens_seen": 54533750, - "step": 2571 - }, - { - "epoch": 0.30926471472374195, - "flos": 22981284073440.0, - "grad_norm": 22.825325482858673, - "learning_rate": 3.2363365272967384e-06, - "loss": 1.0674, - "num_input_tokens_seen": 54556040, - "step": 2572 - }, - { - "epoch": 0.30938495761438106, - "flos": 14457866884440.0, - "grad_norm": 2.9853156240169985, - "learning_rate": 3.235724126145832e-06, - "loss": 1.0381, - "num_input_tokens_seen": 54571795, - "step": 2573 - }, - { - "epoch": 0.3095052005050201, - "flos": 17110836149880.0, - "grad_norm": 5.966834594807125, - "learning_rate": 3.235111537536558e-06, - "loss": 0.9932, - "num_input_tokens_seen": 54592330, - "step": 2574 - }, - { - "epoch": 0.30962544339565923, - "flos": 16636401951240.0, - "grad_norm": 5.247529493070015, - "learning_rate": 3.2344987615618456e-06, - "loss": 1.0614, - "num_input_tokens_seen": 54611885, - "step": 2575 - }, - { - "epoch": 0.30974568628629834, - "flos": 24109561433280.0, - "grad_norm": 7.3478737798408496, - "learning_rate": 3.2338857983146533e-06, - "loss": 0.9967, - "num_input_tokens_seen": 54633105, - "step": 2576 - }, - { - "epoch": 0.3098659291769374, - "flos": 14357090805960.0, - "grad_norm": 3.925285406730572, - "learning_rate": 3.233272647887966e-06, - "loss": 0.9822, - "num_input_tokens_seen": 54651715, - "step": 2577 - }, - { - "epoch": 0.3099861720675765, - "flos": 17604071432880.0, - "grad_norm": 17.02337385973648, - "learning_rate": 3.2326593103747985e-06, - "loss": 1.1196, - "num_input_tokens_seen": 54670450, - "step": 2578 - }, - { - "epoch": 0.3101064149582156, - "flos": 8274175210080.0, - "grad_norm": 18.277006886054693, - "learning_rate": 3.2320457858681936e-06, - "loss": 1.0669, - "num_input_tokens_seen": 54688560, - "step": 2579 - }, - { - "epoch": 0.31022665784885467, - "flos": 16371246622680.0, - "grad_norm": 4.2639485512755435, - "learning_rate": 3.2314320744612228e-06, - "loss": 1.0863, - "num_input_tokens_seen": 54703580, - "step": 2580 - }, - { - "epoch": 0.3103469007394938, - "flos": 11525203162920.0, - "grad_norm": 5.188394676178355, - "learning_rate": 3.2308181762469854e-06, - "loss": 0.9901, - "num_input_tokens_seen": 54721245, - "step": 2581 - }, - { - "epoch": 0.3104671436301329, - "flos": 21748765878840.0, - "grad_norm": 4.3241208279901935, - "learning_rate": 3.230204091318609e-06, - "loss": 1.0106, - "num_input_tokens_seen": 54741505, - "step": 2582 - }, - { - "epoch": 0.31058738652077195, - "flos": 14226046219680.0, - "grad_norm": 6.078526657679886, - "learning_rate": 3.2295898197692503e-06, - "loss": 1.0701, - "num_input_tokens_seen": 54760185, - "step": 2583 - }, - { - "epoch": 0.31070762941141106, - "flos": 19995258141360.0, - "grad_norm": 3.498245616998696, - "learning_rate": 3.228975361692094e-06, - "loss": 1.0133, - "num_input_tokens_seen": 54780925, - "step": 2584 - }, - { - "epoch": 0.31082787230205017, - "flos": 14567228199720.0, - "grad_norm": 9.751676757612556, - "learning_rate": 3.228360717180352e-06, - "loss": 1.0279, - "num_input_tokens_seen": 54798170, - "step": 2585 - }, - { - "epoch": 0.3109481151926892, - "flos": 44702412790560.0, - "grad_norm": 0.9205742727626685, - "learning_rate": 3.227745886327266e-06, - "loss": 0.8743, - "num_input_tokens_seen": 54856585, - "step": 2586 - }, - { - "epoch": 0.31106835808332833, - "flos": 31981858791840.0, - "grad_norm": 0.8165187484401447, - "learning_rate": 3.227130869226105e-06, - "loss": 0.8182, - "num_input_tokens_seen": 54913465, - "step": 2587 - }, - { - "epoch": 0.3111886009739674, - "flos": 16637689736760.0, - "grad_norm": 25.55026497003627, - "learning_rate": 3.226515665970167e-06, - "loss": 1.0404, - "num_input_tokens_seen": 54930725, - "step": 2588 - }, - { - "epoch": 0.3113088438646065, - "flos": 12417612532080.0, - "grad_norm": 5.443102526345125, - "learning_rate": 3.225900276652777e-06, - "loss": 1.0778, - "num_input_tokens_seen": 54947220, - "step": 2589 - }, - { - "epoch": 0.3114290867552456, - "flos": 20204414365200.0, - "grad_norm": 5.240696611182428, - "learning_rate": 3.2252847013672906e-06, - "loss": 0.9754, - "num_input_tokens_seen": 54969600, - "step": 2590 - }, - { - "epoch": 0.31154932964588467, - "flos": 19495124007360.0, - "grad_norm": 9.037973191574597, - "learning_rate": 3.224668940207089e-06, - "loss": 0.9812, - "num_input_tokens_seen": 54988305, - "step": 2591 - }, - { - "epoch": 0.3116695725365238, - "flos": 18893631225240.0, - "grad_norm": 6.13775490039193, - "learning_rate": 3.2240529932655828e-06, - "loss": 1.0858, - "num_input_tokens_seen": 55007290, - "step": 2592 - }, - { - "epoch": 0.3117898154271629, - "flos": 15038320288320.0, - "grad_norm": 5.151260273623942, - "learning_rate": 3.223436860636211e-06, - "loss": 1.0895, - "num_input_tokens_seen": 55022645, - "step": 2593 - }, - { - "epoch": 0.31191005831780194, - "flos": 19418790740280.0, - "grad_norm": 5.7162596903405385, - "learning_rate": 3.2228205424124403e-06, - "loss": 0.9652, - "num_input_tokens_seen": 55045520, - "step": 2594 - }, - { - "epoch": 0.31203030120844105, - "flos": 9134353731600.0, - "grad_norm": 14.095514089810797, - "learning_rate": 3.222204038687765e-06, - "loss": 0.9692, - "num_input_tokens_seen": 55058375, - "step": 2595 - }, - { - "epoch": 0.31215054409908016, - "flos": 19626597855480.0, - "grad_norm": 3.439717635165417, - "learning_rate": 3.221587349555709e-06, - "loss": 1.1116, - "num_input_tokens_seen": 55078355, - "step": 2596 - }, - { - "epoch": 0.3122707869897192, - "flos": 15274617540840.0, - "grad_norm": 3.049173124198265, - "learning_rate": 3.2209704751098236e-06, - "loss": 0.9159, - "num_input_tokens_seen": 55097105, - "step": 2597 - }, - { - "epoch": 0.31239102988035833, - "flos": 10732067455800.0, - "grad_norm": 4.140695247414135, - "learning_rate": 3.2203534154436875e-06, - "loss": 1.0426, - "num_input_tokens_seen": 55111180, - "step": 2598 - }, - { - "epoch": 0.31251127277099744, - "flos": 15668027253720.0, - "grad_norm": 4.528518379557409, - "learning_rate": 3.2197361706509084e-06, - "loss": 0.9915, - "num_input_tokens_seen": 55131655, - "step": 2599 - }, - { - "epoch": 0.3126315156616365, - "flos": 10738015798440.0, - "grad_norm": 5.119341261627606, - "learning_rate": 3.2191187408251228e-06, - "loss": 1.0386, - "num_input_tokens_seen": 55148535, - "step": 2600 - }, - { - "epoch": 0.3127517585522756, - "flos": 12858558759120.0, - "grad_norm": 6.964110331021237, - "learning_rate": 3.218501126059993e-06, - "loss": 0.9959, - "num_input_tokens_seen": 55163650, - "step": 2601 - }, - { - "epoch": 0.31287200144291466, - "flos": 15563878403640.0, - "grad_norm": 5.24488963313757, - "learning_rate": 3.2178833264492116e-06, - "loss": 1.0377, - "num_input_tokens_seen": 55182075, - "step": 2602 - }, - { - "epoch": 0.31299224433355377, - "flos": 21304692172680.0, - "grad_norm": 3.2808842046383786, - "learning_rate": 3.217265342086498e-06, - "loss": 0.9874, - "num_input_tokens_seen": 55202285, - "step": 2603 - }, - { - "epoch": 0.3131124872241929, - "flos": 8195389018200.0, - "grad_norm": 5.7695811688556455, - "learning_rate": 3.216647173065599e-06, - "loss": 0.9532, - "num_input_tokens_seen": 55217470, - "step": 2604 - }, - { - "epoch": 0.31323273011483194, - "flos": 35642864796120.0, - "grad_norm": 5.303098010667179, - "learning_rate": 3.216028819480292e-06, - "loss": 0.9533, - "num_input_tokens_seen": 55238530, - "step": 2605 - }, - { - "epoch": 0.31335297300547105, - "flos": 16061590274400.0, - "grad_norm": 3.611887400074968, - "learning_rate": 3.2154102814243793e-06, - "loss": 0.9837, - "num_input_tokens_seen": 55257390, - "step": 2606 - }, - { - "epoch": 0.31347321589611016, - "flos": 24764017825680.0, - "grad_norm": 4.2453803592294586, - "learning_rate": 3.2147915589916937e-06, - "loss": 0.8992, - "num_input_tokens_seen": 55278670, - "step": 2607 - }, - { - "epoch": 0.3135934587867492, - "flos": 14016644703360.0, - "grad_norm": 3.355055618166228, - "learning_rate": 3.2141726522760938e-06, - "loss": 1.0493, - "num_input_tokens_seen": 55296450, - "step": 2608 - }, - { - "epoch": 0.3137137016773883, - "flos": 47125830346680.0, - "grad_norm": 0.8845675754146803, - "learning_rate": 3.213553561371469e-06, - "loss": 0.789, - "num_input_tokens_seen": 55359905, - "step": 2609 - }, - { - "epoch": 0.31383394456802743, - "flos": 11499012642600.0, - "grad_norm": 5.514815403467842, - "learning_rate": 3.212934286371733e-06, - "loss": 1.1929, - "num_input_tokens_seen": 55376335, - "step": 2610 - }, - { - "epoch": 0.3139541874586665, - "flos": 27699349102920.0, - "grad_norm": 3.9232442828767367, - "learning_rate": 3.2123148273708304e-06, - "loss": 1.0399, - "num_input_tokens_seen": 55396245, - "step": 2611 - }, - { - "epoch": 0.3140744303493056, - "flos": 17818869383760.0, - "grad_norm": 4.319805205156599, - "learning_rate": 3.211695184462733e-06, - "loss": 0.9876, - "num_input_tokens_seen": 55417140, - "step": 2612 - }, - { - "epoch": 0.3141946732399447, - "flos": 51929555305560.0, - "grad_norm": 0.8705704832295009, - "learning_rate": 3.2110753577414383e-06, - "loss": 0.8768, - "num_input_tokens_seen": 55478440, - "step": 2613 - }, - { - "epoch": 0.31431491613058377, - "flos": 13645102230840.0, - "grad_norm": 10.192158950141105, - "learning_rate": 3.2104553473009757e-06, - "loss": 1.0141, - "num_input_tokens_seen": 55496280, - "step": 2614 - }, - { - "epoch": 0.3144351590212229, - "flos": 25842367069680.0, - "grad_norm": 3.324734675785073, - "learning_rate": 3.209835153235399e-06, - "loss": 0.9037, - "num_input_tokens_seen": 55517555, - "step": 2615 - }, - { - "epoch": 0.314555401911862, - "flos": 13151315039760.0, - "grad_norm": 3.2298781236738052, - "learning_rate": 3.2092147756387916e-06, - "loss": 0.9047, - "num_input_tokens_seen": 55537600, - "step": 2616 - }, - { - "epoch": 0.31467564480250104, - "flos": 11578258757880.0, - "grad_norm": 5.26387461199316, - "learning_rate": 3.208594214605264e-06, - "loss": 1.0621, - "num_input_tokens_seen": 55555865, - "step": 2617 - }, - { - "epoch": 0.31479588769314015, - "flos": 15379594253040.0, - "grad_norm": 5.512438320930714, - "learning_rate": 3.2079734702289553e-06, - "loss": 1.0011, - "num_input_tokens_seen": 55574480, - "step": 2618 - }, - { - "epoch": 0.3149161305837792, - "flos": 47292422777160.0, - "grad_norm": 0.8185432609865567, - "learning_rate": 3.207352542604031e-06, - "loss": 0.8732, - "num_input_tokens_seen": 55636535, - "step": 2619 - }, - { - "epoch": 0.3150363734744183, - "flos": 20651155627080.0, - "grad_norm": 2.5368885299498505, - "learning_rate": 3.2067314318246864e-06, - "loss": 1.005, - "num_input_tokens_seen": 55656970, - "step": 2620 - }, - { - "epoch": 0.31515661636505743, - "flos": 19680419989440.0, - "grad_norm": 3.4322907250518355, - "learning_rate": 3.206110137985143e-06, - "loss": 0.9931, - "num_input_tokens_seen": 55676895, - "step": 2621 - }, - { - "epoch": 0.3152768592556965, - "flos": 17501762276400.0, - "grad_norm": 3.8144788765611803, - "learning_rate": 3.2054886611796505e-06, - "loss": 1.1413, - "num_input_tokens_seen": 55695610, - "step": 2622 - }, - { - "epoch": 0.3153971021463356, - "flos": 49033297951920.0, - "grad_norm": 0.9423798081740485, - "learning_rate": 3.204867001502487e-06, - "loss": 0.9222, - "num_input_tokens_seen": 55753985, - "step": 2623 - }, - { - "epoch": 0.3155173450369747, - "flos": 18212279096640.0, - "grad_norm": 3.3280569436721943, - "learning_rate": 3.2042451590479567e-06, - "loss": 1.0228, - "num_input_tokens_seen": 55774220, - "step": 2624 - }, - { - "epoch": 0.31563758792761376, - "flos": 17288221449480.0, - "grad_norm": 2.3078864187129504, - "learning_rate": 3.203623133910394e-06, - "loss": 1.0821, - "num_input_tokens_seen": 55792245, - "step": 2625 - }, - { - "epoch": 0.31575783081825287, - "flos": 22747010483880.0, - "grad_norm": 4.548693945262312, - "learning_rate": 3.203000926184158e-06, - "loss": 0.9861, - "num_input_tokens_seen": 55810890, - "step": 2626 - }, - { - "epoch": 0.315878073708892, - "flos": 21961785459240.0, - "grad_norm": 12.560902660030672, - "learning_rate": 3.202378535963639e-06, - "loss": 0.9978, - "num_input_tokens_seen": 55831525, - "step": 2627 - }, - { - "epoch": 0.31599831659953104, - "flos": 15773433227760.0, - "grad_norm": 5.440289012300129, - "learning_rate": 3.2017559633432516e-06, - "loss": 1.0621, - "num_input_tokens_seen": 55850875, - "step": 2628 - }, - { - "epoch": 0.31611855949017015, - "flos": 18212064465720.0, - "grad_norm": 3.721937464422892, - "learning_rate": 3.2011332084174398e-06, - "loss": 0.88, - "num_input_tokens_seen": 55871465, - "step": 2629 - }, - { - "epoch": 0.31623880238080926, - "flos": 14618996009160.0, - "grad_norm": 3.355862364591702, - "learning_rate": 3.2005102712806756e-06, - "loss": 1.1132, - "num_input_tokens_seen": 55890015, - "step": 2630 - }, - { - "epoch": 0.3163590452714483, - "flos": 9006528609120.0, - "grad_norm": 4.068144647477856, - "learning_rate": 3.1998871520274575e-06, - "loss": 0.9477, - "num_input_tokens_seen": 55905070, - "step": 2631 - }, - { - "epoch": 0.3164792881620874, - "flos": 16378114812120.0, - "grad_norm": 3.9084777583025847, - "learning_rate": 3.199263850752312e-06, - "loss": 1.0617, - "num_input_tokens_seen": 55925625, - "step": 2632 - }, - { - "epoch": 0.31659953105272653, - "flos": 12967705443480.0, - "grad_norm": 6.924728597562676, - "learning_rate": 3.198640367549795e-06, - "loss": 1.0703, - "num_input_tokens_seen": 55944240, - "step": 2633 - }, - { - "epoch": 0.3167197739433656, - "flos": 18291157273200.0, - "grad_norm": 3.6685725518485075, - "learning_rate": 3.198016702514487e-06, - "loss": 1.0879, - "num_input_tokens_seen": 55964240, - "step": 2634 - }, - { - "epoch": 0.3168400168340047, - "flos": 16740029554800.0, - "grad_norm": 2.4332020354012047, - "learning_rate": 3.1973928557409972e-06, - "loss": 1.0739, - "num_input_tokens_seen": 55982000, - "step": 2635 - }, - { - "epoch": 0.31696025972464376, - "flos": 20204966273280.0, - "grad_norm": 3.312462140712673, - "learning_rate": 3.1967688273239636e-06, - "loss": 0.9245, - "num_input_tokens_seen": 56001525, - "step": 2636 - }, - { - "epoch": 0.31708050261528287, - "flos": 11604265308840.0, - "grad_norm": 3.9447064529105065, - "learning_rate": 3.1961446173580503e-06, - "loss": 1.0422, - "num_input_tokens_seen": 56018185, - "step": 2637 - }, - { - "epoch": 0.317200745505922, - "flos": 18656659418400.0, - "grad_norm": 3.993936353463838, - "learning_rate": 3.1955202259379502e-06, - "loss": 0.997, - "num_input_tokens_seen": 56039635, - "step": 2638 - }, - { - "epoch": 0.31732098839656103, - "flos": 22348817567640.0, - "grad_norm": 5.924768873125024, - "learning_rate": 3.194895653158381e-06, - "loss": 1.0461, - "num_input_tokens_seen": 56058295, - "step": 2639 - }, - { - "epoch": 0.31744123128720014, - "flos": 42219249871320.0, - "grad_norm": 0.7720385528756399, - "learning_rate": 3.194270899114093e-06, - "loss": 0.8244, - "num_input_tokens_seen": 56123810, - "step": 2640 - }, - { - "epoch": 0.31756147417783925, - "flos": 12335790845760.0, - "grad_norm": 3.27488629092765, - "learning_rate": 3.193645963899858e-06, - "loss": 1.0515, - "num_input_tokens_seen": 56141145, - "step": 2641 - }, - { - "epoch": 0.3176817170684783, - "flos": 18131407918680.0, - "grad_norm": 3.2292407283698585, - "learning_rate": 3.193020847610479e-06, - "loss": 1.0598, - "num_input_tokens_seen": 56161185, - "step": 2642 - }, - { - "epoch": 0.3178019599591174, - "flos": 17765783127240.0, - "grad_norm": 4.46110702700261, - "learning_rate": 3.192395550340787e-06, - "loss": 0.932, - "num_input_tokens_seen": 56178855, - "step": 2643 - }, - { - "epoch": 0.31792220284975653, - "flos": 8746156483920.0, - "grad_norm": 4.287290001889624, - "learning_rate": 3.191770072185638e-06, - "loss": 0.9907, - "num_input_tokens_seen": 56195570, - "step": 2644 - }, - { - "epoch": 0.3180424457403956, - "flos": 10947693268800.0, - "grad_norm": 6.184853960932821, - "learning_rate": 3.191144413239916e-06, - "loss": 0.9488, - "num_input_tokens_seen": 56211860, - "step": 2645 - }, - { - "epoch": 0.3181626886310347, - "flos": 18629671697520.0, - "grad_norm": 3.479614152893268, - "learning_rate": 3.190518573598534e-06, - "loss": 1.1015, - "num_input_tokens_seen": 56228185, - "step": 2646 - }, - { - "epoch": 0.3182829315216738, - "flos": 18132634381080.0, - "grad_norm": 2.63342640225408, - "learning_rate": 3.1898925533564308e-06, - "loss": 1.0108, - "num_input_tokens_seen": 56249375, - "step": 2647 - }, - { - "epoch": 0.31840317441231286, - "flos": 12836476887840.0, - "grad_norm": 3.9775323490767063, - "learning_rate": 3.1892663526085733e-06, - "loss": 0.8713, - "num_input_tokens_seen": 56267470, - "step": 2648 - }, - { - "epoch": 0.31852341730295197, - "flos": 46352170278240.0, - "grad_norm": 0.7685548813854782, - "learning_rate": 3.188639971449956e-06, - "loss": 0.8328, - "num_input_tokens_seen": 56333240, - "step": 2649 - }, - { - "epoch": 0.318643660193591, - "flos": 14672358219720.0, - "grad_norm": 25.50633968757204, - "learning_rate": 3.1880134099756e-06, - "loss": 0.9464, - "num_input_tokens_seen": 56352595, - "step": 2650 - }, - { - "epoch": 0.31876390308423014, - "flos": 19182186872160.0, - "grad_norm": 3.233506385524397, - "learning_rate": 3.1873866682805535e-06, - "loss": 0.933, - "num_input_tokens_seen": 56373010, - "step": 2651 - }, - { - "epoch": 0.31888414597486925, - "flos": 12785046355560.0, - "grad_norm": 3.2058870641814776, - "learning_rate": 3.186759746459894e-06, - "loss": 1.1072, - "num_input_tokens_seen": 56391840, - "step": 2652 - }, - { - "epoch": 0.3190043888655083, - "flos": 17892013848600.0, - "grad_norm": 6.1233714527209635, - "learning_rate": 3.1861326446087246e-06, - "loss": 1.0268, - "num_input_tokens_seen": 56410300, - "step": 2653 - }, - { - "epoch": 0.3191246317561474, - "flos": 15668180561520.0, - "grad_norm": 3.4680969723318102, - "learning_rate": 3.1855053628221763e-06, - "loss": 0.9372, - "num_input_tokens_seen": 56429275, - "step": 2654 - }, - { - "epoch": 0.3192448746467865, - "flos": 10528001050920.0, - "grad_norm": 4.615746391006957, - "learning_rate": 3.184877901195407e-06, - "loss": 1.1108, - "num_input_tokens_seen": 56445690, - "step": 2655 - }, - { - "epoch": 0.3193651175374256, - "flos": 48145359622440.0, - "grad_norm": 0.8539433924073763, - "learning_rate": 3.184250259823602e-06, - "loss": 0.9005, - "num_input_tokens_seen": 56507940, - "step": 2656 - }, - { - "epoch": 0.3194853604280647, - "flos": 8609316862800.0, - "grad_norm": 5.5966329124997465, - "learning_rate": 3.183622438801974e-06, - "loss": 1.0357, - "num_input_tokens_seen": 56522950, - "step": 2657 - }, - { - "epoch": 0.3196056033187038, - "flos": 10555602003000.0, - "grad_norm": 2.9139671003255656, - "learning_rate": 3.1829944382257637e-06, - "loss": 0.9873, - "num_input_tokens_seen": 56540800, - "step": 2658 - }, - { - "epoch": 0.31972584620934286, - "flos": 16900729417680.0, - "grad_norm": 3.7657278213075784, - "learning_rate": 3.1823662581902373e-06, - "loss": 1.0395, - "num_input_tokens_seen": 56558205, - "step": 2659 - }, - { - "epoch": 0.31984608909998197, - "flos": 15091590514200.0, - "grad_norm": 4.48254995391635, - "learning_rate": 3.1817378987906896e-06, - "loss": 0.9639, - "num_input_tokens_seen": 56577430, - "step": 2660 - }, - { - "epoch": 0.3199663319906211, - "flos": 12967674781920.0, - "grad_norm": 3.9426534926893497, - "learning_rate": 3.181109360122442e-06, - "loss": 1.0332, - "num_input_tokens_seen": 56594740, - "step": 2661 - }, - { - "epoch": 0.32008657488126013, - "flos": 13281961025760.0, - "grad_norm": 4.107397960061739, - "learning_rate": 3.1804806422808445e-06, - "loss": 1.0057, - "num_input_tokens_seen": 56611595, - "step": 2662 - }, - { - "epoch": 0.32020681777189924, - "flos": 14620069163760.0, - "grad_norm": 4.083445151737804, - "learning_rate": 3.1798517453612714e-06, - "loss": 0.948, - "num_input_tokens_seen": 56631120, - "step": 2663 - }, - { - "epoch": 0.32032706066253835, - "flos": 25160371048320.0, - "grad_norm": 3.7836393263389745, - "learning_rate": 3.1792226694591265e-06, - "loss": 0.9853, - "num_input_tokens_seen": 56652225, - "step": 2664 - }, - { - "epoch": 0.3204473035531774, - "flos": 10817353898400.0, - "grad_norm": 3.617712522431612, - "learning_rate": 3.178593414669841e-06, - "loss": 1.0257, - "num_input_tokens_seen": 56670530, - "step": 2665 - }, - { - "epoch": 0.3205675464438165, - "flos": 17399330473680.0, - "grad_norm": 3.4729882543943527, - "learning_rate": 3.1779639810888707e-06, - "loss": 0.9315, - "num_input_tokens_seen": 56689845, - "step": 2666 - }, - { - "epoch": 0.3206877893344556, - "flos": 15957472085880.0, - "grad_norm": 3.155130622203154, - "learning_rate": 3.1773343688117013e-06, - "loss": 0.984, - "num_input_tokens_seen": 56710475, - "step": 2667 - }, - { - "epoch": 0.3208080322250947, - "flos": 14488380684720.0, - "grad_norm": 4.15009465856489, - "learning_rate": 3.1767045779338445e-06, - "loss": 1.0615, - "num_input_tokens_seen": 56727855, - "step": 2668 - }, - { - "epoch": 0.3209282751157338, - "flos": 15458503091160.0, - "grad_norm": 4.568758917302771, - "learning_rate": 3.176074608550839e-06, - "loss": 1.1286, - "num_input_tokens_seen": 56743395, - "step": 2669 - }, - { - "epoch": 0.32104851800637285, - "flos": 15669161731440.0, - "grad_norm": 2.4311260123128418, - "learning_rate": 3.17544446075825e-06, - "loss": 1.053, - "num_input_tokens_seen": 56762280, - "step": 2670 - }, - { - "epoch": 0.32116876089701196, - "flos": 26418405208920.0, - "grad_norm": 2.6971947331265795, - "learning_rate": 3.174814134651671e-06, - "loss": 0.9431, - "num_input_tokens_seen": 56784550, - "step": 2671 - }, - { - "epoch": 0.3212890037876511, - "flos": 15614297104440.0, - "grad_norm": 3.1458153797841546, - "learning_rate": 3.1741836303267215e-06, - "loss": 1.0405, - "num_input_tokens_seen": 56803805, - "step": 2672 - }, - { - "epoch": 0.32140924667829013, - "flos": 7251365147400.0, - "grad_norm": 2.8786554731982124, - "learning_rate": 3.1735529478790496e-06, - "loss": 0.97, - "num_input_tokens_seen": 56821515, - "step": 2673 - }, - { - "epoch": 0.32152948956892924, - "flos": 36325903310520.0, - "grad_norm": 2.932750839861528, - "learning_rate": 3.172922087404328e-06, - "loss": 1.0264, - "num_input_tokens_seen": 56843495, - "step": 2674 - }, - { - "epoch": 0.32164973245956835, - "flos": 45720255680520.0, - "grad_norm": 0.7787843706435753, - "learning_rate": 3.1722910489982586e-06, - "loss": 0.8144, - "num_input_tokens_seen": 56903575, - "step": 2675 - }, - { - "epoch": 0.3217699753502074, - "flos": 16717579744800.0, - "grad_norm": 4.7600761753686776, - "learning_rate": 3.1716598327565694e-06, - "loss": 1.0313, - "num_input_tokens_seen": 56922935, - "step": 2676 - }, - { - "epoch": 0.3218902182408465, - "flos": 13518411586080.0, - "grad_norm": 2.751360210064048, - "learning_rate": 3.171028438775015e-06, - "loss": 1.068, - "num_input_tokens_seen": 56941850, - "step": 2677 - }, - { - "epoch": 0.3220104611314856, - "flos": 14462006195040.0, - "grad_norm": 3.2806087775618744, - "learning_rate": 3.170396867149377e-06, - "loss": 1.0667, - "num_input_tokens_seen": 56959575, - "step": 2678 - }, - { - "epoch": 0.3221307040221247, - "flos": 14275759704600.0, - "grad_norm": 3.2597243738964212, - "learning_rate": 3.1697651179754653e-06, - "loss": 1.0752, - "num_input_tokens_seen": 56977955, - "step": 2679 - }, - { - "epoch": 0.3222509469127638, - "flos": 17058056508960.0, - "grad_norm": 4.041212915072797, - "learning_rate": 3.1691331913491153e-06, - "loss": 0.9647, - "num_input_tokens_seen": 57000245, - "step": 2680 - }, - { - "epoch": 0.32237118980340285, - "flos": 12521700059040.0, - "grad_norm": 3.594473061176619, - "learning_rate": 3.1685010873661898e-06, - "loss": 1.0614, - "num_input_tokens_seen": 57019140, - "step": 2681 - }, - { - "epoch": 0.32249143269404196, - "flos": 16454417417640.0, - "grad_norm": 2.965128084603677, - "learning_rate": 3.167868806122578e-06, - "loss": 1.0135, - "num_input_tokens_seen": 57037910, - "step": 2682 - }, - { - "epoch": 0.32261167558468107, - "flos": 17369889828000.0, - "grad_norm": 3.048090296241325, - "learning_rate": 3.1672363477141968e-06, - "loss": 0.896, - "num_input_tokens_seen": 57056925, - "step": 2683 - }, - { - "epoch": 0.3227319184753201, - "flos": 21642041457720.0, - "grad_norm": 3.3447830173155273, - "learning_rate": 3.1666037122369903e-06, - "loss": 1.072, - "num_input_tokens_seen": 57077305, - "step": 2684 - }, - { - "epoch": 0.32285216136595923, - "flos": 11997399067680.0, - "grad_norm": 4.1474221921386, - "learning_rate": 3.165970899786928e-06, - "loss": 1.0814, - "num_input_tokens_seen": 57094940, - "step": 2685 - }, - { - "epoch": 0.32297240425659834, - "flos": 15616259444280.0, - "grad_norm": 6.754255578100865, - "learning_rate": 3.1653379104600067e-06, - "loss": 0.957, - "num_input_tokens_seen": 57114805, - "step": 2686 - }, - { - "epoch": 0.3230926471472374, - "flos": 16167180217800.0, - "grad_norm": 4.762951310368029, - "learning_rate": 3.164704744352251e-06, - "loss": 0.9207, - "num_input_tokens_seen": 57135330, - "step": 2687 - }, - { - "epoch": 0.3232128900378765, - "flos": 11994670188840.0, - "grad_norm": 3.842226051984175, - "learning_rate": 3.164071401559713e-06, - "loss": 1.0345, - "num_input_tokens_seen": 57152450, - "step": 2688 - }, - { - "epoch": 0.3233331329285156, - "flos": 17083817767440.0, - "grad_norm": 3.14854215159316, - "learning_rate": 3.1634378821784674e-06, - "loss": 0.9328, - "num_input_tokens_seen": 57172385, - "step": 2689 - }, - { - "epoch": 0.3234533758191547, - "flos": 12836599534080.0, - "grad_norm": 8.66969373608659, - "learning_rate": 3.1628041863046208e-06, - "loss": 0.9546, - "num_input_tokens_seen": 57189520, - "step": 2690 - }, - { - "epoch": 0.3235736187097938, - "flos": 11997368406120.0, - "grad_norm": 3.106653297079021, - "learning_rate": 3.162170314034304e-06, - "loss": 1.1336, - "num_input_tokens_seen": 57206655, - "step": 2691 - }, - { - "epoch": 0.3236938616004329, - "flos": 15720868217760.0, - "grad_norm": 8.555572603327073, - "learning_rate": 3.1615362654636738e-06, - "loss": 1.031, - "num_input_tokens_seen": 57227115, - "step": 2692 - }, - { - "epoch": 0.32381410449107195, - "flos": 12154327558680.0, - "grad_norm": 3.091393700517079, - "learning_rate": 3.1609020406889163e-06, - "loss": 1.1021, - "num_input_tokens_seen": 57244270, - "step": 2693 - }, - { - "epoch": 0.32393434738171106, - "flos": 11732458370040.0, - "grad_norm": 3.2527010037636805, - "learning_rate": 3.1602676398062416e-06, - "loss": 1.0752, - "num_input_tokens_seen": 57262900, - "step": 2694 - }, - { - "epoch": 0.3240545902723502, - "flos": 18133063642920.0, - "grad_norm": 4.180909393533134, - "learning_rate": 3.1596330629118886e-06, - "loss": 0.8414, - "num_input_tokens_seen": 57282590, - "step": 2695 - }, - { - "epoch": 0.32417483316298923, - "flos": 25580247235560.0, - "grad_norm": 2.96241099549843, - "learning_rate": 3.1589983101021223e-06, - "loss": 0.9586, - "num_input_tokens_seen": 57300940, - "step": 2696 - }, - { - "epoch": 0.32429507605362834, - "flos": 21436717928880.0, - "grad_norm": 3.5532725441929487, - "learning_rate": 3.1583633814732337e-06, - "loss": 1.0515, - "num_input_tokens_seen": 57320180, - "step": 2697 - }, - { - "epoch": 0.3244153189442674, - "flos": 12915232418160.0, - "grad_norm": 4.9090222344267, - "learning_rate": 3.157728277121541e-06, - "loss": 0.9297, - "num_input_tokens_seen": 57338075, - "step": 2698 - }, - { - "epoch": 0.3245355618349065, - "flos": 12546296178240.0, - "grad_norm": 6.803101942858263, - "learning_rate": 3.1570929971433897e-06, - "loss": 1.0122, - "num_input_tokens_seen": 57353580, - "step": 2699 - }, - { - "epoch": 0.3246558047255456, - "flos": 16664984073240.0, - "grad_norm": 3.2678664696446416, - "learning_rate": 3.1564575416351504e-06, - "loss": 1.0485, - "num_input_tokens_seen": 57372000, - "step": 2700 - }, - { - "epoch": 0.32477604761618467, - "flos": 15457429936560.0, - "grad_norm": 3.0152704692626857, - "learning_rate": 3.155821910693221e-06, - "loss": 0.9639, - "num_input_tokens_seen": 57391135, - "step": 2701 - }, - { - "epoch": 0.3248962905068238, - "flos": 14068719128400.0, - "grad_norm": 2.7948706920103823, - "learning_rate": 3.1551861044140275e-06, - "loss": 1.0789, - "num_input_tokens_seen": 57410490, - "step": 2702 - }, - { - "epoch": 0.3250165333974629, - "flos": 17029198432920.0, - "grad_norm": 2.861139805556254, - "learning_rate": 3.15455012289402e-06, - "loss": 0.9923, - "num_input_tokens_seen": 57429855, - "step": 2703 - }, - { - "epoch": 0.32513677628810195, - "flos": 17059374956040.0, - "grad_norm": 3.946108656518816, - "learning_rate": 3.153913966229677e-06, - "loss": 1.0627, - "num_input_tokens_seen": 57448695, - "step": 2704 - }, - { - "epoch": 0.32525701917874106, - "flos": 50602761283200.0, - "grad_norm": 0.6531036884536054, - "learning_rate": 3.1532776345175027e-06, - "loss": 0.7442, - "num_input_tokens_seen": 57513560, - "step": 2705 - }, - { - "epoch": 0.32537726206938017, - "flos": 13964049031800.0, - "grad_norm": 4.766430595972284, - "learning_rate": 3.1526411278540285e-06, - "loss": 1.0016, - "num_input_tokens_seen": 57531710, - "step": 2706 - }, - { - "epoch": 0.3254975049600192, - "flos": 20487604239120.0, - "grad_norm": 5.331773152235512, - "learning_rate": 3.1520044463358116e-06, - "loss": 1.0378, - "num_input_tokens_seen": 57548160, - "step": 2707 - }, - { - "epoch": 0.32561774785065833, - "flos": 13384975398120.0, - "grad_norm": 2.872186528059299, - "learning_rate": 3.151367590059436e-06, - "loss": 1.0251, - "num_input_tokens_seen": 57566305, - "step": 2708 - }, - { - "epoch": 0.32573799074129745, - "flos": 16428717482280.0, - "grad_norm": 2.9162324491349616, - "learning_rate": 3.1507305591215117e-06, - "loss": 1.0873, - "num_input_tokens_seen": 57583935, - "step": 2709 - }, - { - "epoch": 0.3258582336319365, - "flos": 51020675130600.0, - "grad_norm": 0.9407989632574033, - "learning_rate": 3.150093353618677e-06, - "loss": 0.8128, - "num_input_tokens_seen": 57648385, - "step": 2710 - }, - { - "epoch": 0.3259784765225756, - "flos": 15957288116520.0, - "grad_norm": 7.457238683518114, - "learning_rate": 3.149455973647596e-06, - "loss": 1.0944, - "num_input_tokens_seen": 57666165, - "step": 2711 - }, - { - "epoch": 0.32609871941321467, - "flos": 14540485771320.0, - "grad_norm": 5.559876927173163, - "learning_rate": 3.1488184193049563e-06, - "loss": 0.9855, - "num_input_tokens_seen": 57685420, - "step": 2712 - }, - { - "epoch": 0.3262189623038538, - "flos": 15928736656080.0, - "grad_norm": 4.524969549761582, - "learning_rate": 3.1481806906874767e-06, - "loss": 0.9535, - "num_input_tokens_seen": 57706450, - "step": 2713 - }, - { - "epoch": 0.3263392051944929, - "flos": 14855875831320.0, - "grad_norm": 3.5718595462708405, - "learning_rate": 3.147542787891899e-06, - "loss": 1.1044, - "num_input_tokens_seen": 57725515, - "step": 2714 - }, - { - "epoch": 0.32645944808513194, - "flos": 17085044229840.0, - "grad_norm": 3.1051086600881765, - "learning_rate": 3.1469047110149926e-06, - "loss": 0.9636, - "num_input_tokens_seen": 57743975, - "step": 2715 - }, - { - "epoch": 0.32657969097577105, - "flos": 14934079453560.0, - "grad_norm": 3.1277350092106264, - "learning_rate": 3.146266460153554e-06, - "loss": 1.0763, - "num_input_tokens_seen": 57763405, - "step": 2716 - }, - { - "epoch": 0.32669993386641016, - "flos": 16139855219760.0, - "grad_norm": 2.9457058833571823, - "learning_rate": 3.145628035404404e-06, - "loss": 1.0341, - "num_input_tokens_seen": 57782325, - "step": 2717 - }, - { - "epoch": 0.3268201767570492, - "flos": 51643513906560.0, - "grad_norm": 0.8435760954613227, - "learning_rate": 3.1449894368643922e-06, - "loss": 0.8304, - "num_input_tokens_seen": 57844360, - "step": 2718 - }, - { - "epoch": 0.32694041964768833, - "flos": 17451466221840.0, - "grad_norm": 1.9701096214738354, - "learning_rate": 3.1443506646303934e-06, - "loss": 0.9464, - "num_input_tokens_seen": 57865380, - "step": 2719 - }, - { - "epoch": 0.32706066253832744, - "flos": 23666591543280.0, - "grad_norm": 3.3503206526610216, - "learning_rate": 3.1437117187993086e-06, - "loss": 0.8987, - "num_input_tokens_seen": 57887420, - "step": 2720 - }, - { - "epoch": 0.3271809054289665, - "flos": 17111234750160.0, - "grad_norm": 2.635827033821013, - "learning_rate": 3.143072599468065e-06, - "loss": 1.0255, - "num_input_tokens_seen": 57906965, - "step": 2721 - }, - { - "epoch": 0.3273011483196056, - "flos": 27310753254960.0, - "grad_norm": 3.21759130862602, - "learning_rate": 3.1424333067336174e-06, - "loss": 0.9854, - "num_input_tokens_seen": 57929450, - "step": 2722 - }, - { - "epoch": 0.3274213912102447, - "flos": 20698661479680.0, - "grad_norm": 11.887940754225543, - "learning_rate": 3.141793840692945e-06, - "loss": 0.9943, - "num_input_tokens_seen": 57949920, - "step": 2723 - }, - { - "epoch": 0.32754163410088377, - "flos": 20756132339280.0, - "grad_norm": 3.051620465659297, - "learning_rate": 3.1411542014430553e-06, - "loss": 0.8383, - "num_input_tokens_seen": 57970720, - "step": 2724 - }, - { - "epoch": 0.3276618769915229, - "flos": 14645646452880.0, - "grad_norm": 3.9828745690856056, - "learning_rate": 3.1405143890809804e-06, - "loss": 1.0503, - "num_input_tokens_seen": 57989735, - "step": 2725 - }, - { - "epoch": 0.327782119882162, - "flos": 13226759121600.0, - "grad_norm": 3.4799463156143813, - "learning_rate": 3.1398744037037796e-06, - "loss": 0.9323, - "num_input_tokens_seen": 58008790, - "step": 2726 - }, - { - "epoch": 0.32790236277280105, - "flos": 15482332671360.0, - "grad_norm": 3.926789506330377, - "learning_rate": 3.139234245408538e-06, - "loss": 1.0616, - "num_input_tokens_seen": 58027390, - "step": 2727 - }, - { - "epoch": 0.32802260566344016, - "flos": 16586381850720.0, - "grad_norm": 2.7386402977649316, - "learning_rate": 3.1385939142923666e-06, - "loss": 0.9898, - "num_input_tokens_seen": 58049500, - "step": 2728 - }, - { - "epoch": 0.3281428485540792, - "flos": 17215628892720.0, - "grad_norm": 5.909313880608372, - "learning_rate": 3.137953410452405e-06, - "loss": 0.9993, - "num_input_tokens_seen": 58069490, - "step": 2729 - }, - { - "epoch": 0.3282630914447183, - "flos": 24346073316720.0, - "grad_norm": 2.305164869622422, - "learning_rate": 3.1373127339858146e-06, - "loss": 0.9752, - "num_input_tokens_seen": 58091810, - "step": 2730 - }, - { - "epoch": 0.32838333433535744, - "flos": 19654720054080.0, - "grad_norm": 4.0166757596705756, - "learning_rate": 3.136671884989787e-06, - "loss": 0.9589, - "num_input_tokens_seen": 58111440, - "step": 2731 - }, - { - "epoch": 0.3285035772259965, - "flos": 8693928751080.0, - "grad_norm": 3.082255578912195, - "learning_rate": 3.1360308635615383e-06, - "loss": 1.0828, - "num_input_tokens_seen": 58129700, - "step": 2732 - }, - { - "epoch": 0.3286238201166356, - "flos": 17293985822760.0, - "grad_norm": 3.340089594844263, - "learning_rate": 3.135389669798311e-06, - "loss": 1.0158, - "num_input_tokens_seen": 58147480, - "step": 2733 - }, - { - "epoch": 0.3287440630072747, - "flos": 15192489238920.0, - "grad_norm": 3.6565971535746558, - "learning_rate": 3.134748303797373e-06, - "loss": 1.0305, - "num_input_tokens_seen": 58164570, - "step": 2734 - }, - { - "epoch": 0.32886430589791377, - "flos": 16874630882040.0, - "grad_norm": 2.8447369550101964, - "learning_rate": 3.1341067656560203e-06, - "loss": 1.0338, - "num_input_tokens_seen": 58182135, - "step": 2735 - }, - { - "epoch": 0.3289845487885529, - "flos": 15930361718760.0, - "grad_norm": 2.5396977495597093, - "learning_rate": 3.133465055471572e-06, - "loss": 1.0749, - "num_input_tokens_seen": 58201640, - "step": 2736 - }, - { - "epoch": 0.329104791679192, - "flos": 13964018370240.0, - "grad_norm": 3.509032990799257, - "learning_rate": 3.1328231733413767e-06, - "loss": 0.8782, - "num_input_tokens_seen": 58218000, - "step": 2737 - }, - { - "epoch": 0.32922503456983104, - "flos": 11315372384760.0, - "grad_norm": 3.3163096275677333, - "learning_rate": 3.1321811193628067e-06, - "loss": 1.1234, - "num_input_tokens_seen": 58235865, - "step": 2738 - }, - { - "epoch": 0.32934527746047015, - "flos": 19103676634320.0, - "grad_norm": 4.983945248053564, - "learning_rate": 3.131538893633261e-06, - "loss": 0.9248, - "num_input_tokens_seen": 58255145, - "step": 2739 - }, - { - "epoch": 0.32946552035110926, - "flos": 16638302967960.0, - "grad_norm": 4.157217730530024, - "learning_rate": 3.130896496250165e-06, - "loss": 0.9985, - "num_input_tokens_seen": 58274690, - "step": 2740 - }, - { - "epoch": 0.3295857632417483, - "flos": 10003884028920.0, - "grad_norm": 3.790960532588483, - "learning_rate": 3.1302539273109693e-06, - "loss": 1.0848, - "num_input_tokens_seen": 58291235, - "step": 2741 - }, - { - "epoch": 0.32970600613238743, - "flos": 15770397733320.0, - "grad_norm": 2.1652901262582764, - "learning_rate": 3.1296111869131513e-06, - "loss": 1.0404, - "num_input_tokens_seen": 58308380, - "step": 2742 - }, - { - "epoch": 0.32982624902302654, - "flos": 15668088576840.0, - "grad_norm": 2.9485030562689176, - "learning_rate": 3.1289682751542153e-06, - "loss": 1.0728, - "num_input_tokens_seen": 58327660, - "step": 2743 - }, - { - "epoch": 0.3299464919136656, - "flos": 13230346524120.0, - "grad_norm": 4.788860571930716, - "learning_rate": 3.1283251921316883e-06, - "loss": 0.9428, - "num_input_tokens_seen": 58345125, - "step": 2744 - }, - { - "epoch": 0.3300667348043047, - "flos": 9454159056240.0, - "grad_norm": 3.1604925667473625, - "learning_rate": 3.1276819379431277e-06, - "loss": 1.0435, - "num_input_tokens_seen": 58362935, - "step": 2745 - }, - { - "epoch": 0.33018697769494376, - "flos": 11132437342800.0, - "grad_norm": 4.255687498321984, - "learning_rate": 3.1270385126861134e-06, - "loss": 0.972, - "num_input_tokens_seen": 58380640, - "step": 2746 - }, - { - "epoch": 0.3303072205855829, - "flos": 12940472430120.0, - "grad_norm": 3.846287579779014, - "learning_rate": 3.1263949164582533e-06, - "loss": 1.0451, - "num_input_tokens_seen": 58400010, - "step": 2747 - }, - { - "epoch": 0.330427463476222, - "flos": 12574755654000.0, - "grad_norm": 4.223954444943044, - "learning_rate": 3.1257511493571797e-06, - "loss": 0.996, - "num_input_tokens_seen": 58418235, - "step": 2748 - }, - { - "epoch": 0.33054770636686104, - "flos": 19339789917480.0, - "grad_norm": 3.255061975876477, - "learning_rate": 3.125107211480552e-06, - "loss": 1.0107, - "num_input_tokens_seen": 58437890, - "step": 2749 - }, - { - "epoch": 0.33066794925750015, - "flos": 14275913012400.0, - "grad_norm": 2.7109938733838295, - "learning_rate": 3.124463102926054e-06, - "loss": 1.0136, - "num_input_tokens_seen": 58456945, - "step": 2750 - }, - { - "epoch": 0.33078819214813926, - "flos": 50591508490680.0, - "grad_norm": 0.7516115676184614, - "learning_rate": 3.1238188237913984e-06, - "loss": 0.8568, - "num_input_tokens_seen": 58521205, - "step": 2751 - }, - { - "epoch": 0.3309084350387783, - "flos": 15012804322320.0, - "grad_norm": 6.764245154650076, - "learning_rate": 3.1231743741743202e-06, - "loss": 0.9835, - "num_input_tokens_seen": 58540430, - "step": 2752 - }, - { - "epoch": 0.3310286779294174, - "flos": 10030442487960.0, - "grad_norm": 4.0581559650950005, - "learning_rate": 3.122529754172582e-06, - "loss": 1.0575, - "num_input_tokens_seen": 58557035, - "step": 2753 - }, - { - "epoch": 0.33114892082005654, - "flos": 14751481688760.0, - "grad_norm": 11.816523759746284, - "learning_rate": 3.1218849638839736e-06, - "loss": 0.953, - "num_input_tokens_seen": 58576015, - "step": 2754 - }, - { - "epoch": 0.3312691637106956, - "flos": 12101118655920.0, - "grad_norm": 3.7499498999745766, - "learning_rate": 3.121240003406307e-06, - "loss": 1.0095, - "num_input_tokens_seen": 58594585, - "step": 2755 - }, - { - "epoch": 0.3313894066013347, - "flos": 20987523742200.0, - "grad_norm": 3.0558586509730064, - "learning_rate": 3.120594872837425e-06, - "loss": 0.9426, - "num_input_tokens_seen": 58612975, - "step": 2756 - }, - { - "epoch": 0.3315096494919738, - "flos": 43963865756400.0, - "grad_norm": 0.858535772974646, - "learning_rate": 3.1199495722751906e-06, - "loss": 0.878, - "num_input_tokens_seen": 58672225, - "step": 2757 - }, - { - "epoch": 0.33162989238261287, - "flos": 15380176822680.0, - "grad_norm": 3.7635826808969224, - "learning_rate": 3.1193041018174972e-06, - "loss": 1.0719, - "num_input_tokens_seen": 58692660, - "step": 2758 - }, - { - "epoch": 0.331750135273252, - "flos": 16114492561560.0, - "grad_norm": 2.822466167993908, - "learning_rate": 3.118658461562261e-06, - "loss": 1.1656, - "num_input_tokens_seen": 58708480, - "step": 2759 - }, - { - "epoch": 0.33187037816389103, - "flos": 16166199047880.0, - "grad_norm": 2.3616181882496443, - "learning_rate": 3.118012651607426e-06, - "loss": 1.0758, - "num_input_tokens_seen": 58729805, - "step": 2760 - }, - { - "epoch": 0.33199062105453014, - "flos": 13619279649240.0, - "grad_norm": 4.654576762592619, - "learning_rate": 3.1173666720509603e-06, - "loss": 1.0537, - "num_input_tokens_seen": 58746460, - "step": 2761 - }, - { - "epoch": 0.33211086394516925, - "flos": 22512307632480.0, - "grad_norm": 3.6709493566068128, - "learning_rate": 3.116720522990859e-06, - "loss": 0.9073, - "num_input_tokens_seen": 58767055, - "step": 2762 - }, - { - "epoch": 0.3322311068358083, - "flos": 12706413471480.0, - "grad_norm": 3.346017656834924, - "learning_rate": 3.116074204525142e-06, - "loss": 0.8431, - "num_input_tokens_seen": 58784950, - "step": 2763 - }, - { - "epoch": 0.3323513497264474, - "flos": 23010172811040.0, - "grad_norm": 2.051769548879153, - "learning_rate": 3.1154277167518553e-06, - "loss": 1.0543, - "num_input_tokens_seen": 58806285, - "step": 2764 - }, - { - "epoch": 0.33247159261708653, - "flos": 37675810601040.0, - "grad_norm": 0.8564186570511672, - "learning_rate": 3.114781059769072e-06, - "loss": 0.8564, - "num_input_tokens_seen": 58857330, - "step": 2765 - }, - { - "epoch": 0.3325918355077256, - "flos": 19312832858160.0, - "grad_norm": 7.748905473759488, - "learning_rate": 3.1141342336748874e-06, - "loss": 0.9054, - "num_input_tokens_seen": 58876610, - "step": 2766 - }, - { - "epoch": 0.3327120783983647, - "flos": 16826327828880.0, - "grad_norm": 1.7843592761627975, - "learning_rate": 3.1134872385674253e-06, - "loss": 1.0375, - "num_input_tokens_seen": 58900485, - "step": 2767 - }, - { - "epoch": 0.3328323212890038, - "flos": 13596553885200.0, - "grad_norm": 2.666982800897753, - "learning_rate": 3.1128400745448353e-06, - "loss": 1.0948, - "num_input_tokens_seen": 58919585, - "step": 2768 - }, - { - "epoch": 0.33295256417964286, - "flos": 26917312880520.0, - "grad_norm": 2.9861788266411935, - "learning_rate": 3.11219274170529e-06, - "loss": 0.8547, - "num_input_tokens_seen": 58941115, - "step": 2769 - }, - { - "epoch": 0.333072807070282, - "flos": 18867931289880.0, - "grad_norm": 4.693906551345964, - "learning_rate": 3.1115452401469903e-06, - "loss": 1.044, - "num_input_tokens_seen": 58961235, - "step": 2770 - }, - { - "epoch": 0.3331930499609211, - "flos": 15220090191000.0, - "grad_norm": 5.149210986092615, - "learning_rate": 3.1108975699681613e-06, - "loss": 1.0938, - "num_input_tokens_seen": 58978350, - "step": 2771 - }, - { - "epoch": 0.33331329285156014, - "flos": 14665367384040.0, - "grad_norm": 2.710415660751434, - "learning_rate": 3.1102497312670542e-06, - "loss": 0.943, - "num_input_tokens_seen": 58996075, - "step": 2772 - }, - { - "epoch": 0.33343353574219925, - "flos": 19942754454480.0, - "grad_norm": 2.976275092239059, - "learning_rate": 3.109601724141946e-06, - "loss": 1.0188, - "num_input_tokens_seen": 59014790, - "step": 2773 - }, - { - "epoch": 0.33355377863283836, - "flos": 16897387307640.0, - "grad_norm": 2.8194612607992084, - "learning_rate": 3.108953548691138e-06, - "loss": 0.9176, - "num_input_tokens_seen": 59034595, - "step": 2774 - }, - { - "epoch": 0.3336740215234774, - "flos": 26970061859880.0, - "grad_norm": 3.8187250064712472, - "learning_rate": 3.108305205012959e-06, - "loss": 0.9458, - "num_input_tokens_seen": 59055010, - "step": 2775 - }, - { - "epoch": 0.3337942644141165, - "flos": 18158395639560.0, - "grad_norm": 3.8621765346943997, - "learning_rate": 3.107656693205761e-06, - "loss": 1.1102, - "num_input_tokens_seen": 59074170, - "step": 2776 - }, - { - "epoch": 0.3339145073047556, - "flos": 18496266171120.0, - "grad_norm": 4.923172262075573, - "learning_rate": 3.107008013367924e-06, - "loss": 0.9119, - "num_input_tokens_seen": 59092685, - "step": 2777 - }, - { - "epoch": 0.3340347501953947, - "flos": 13518442247640.0, - "grad_norm": 9.35533011379534, - "learning_rate": 3.1063591655978507e-06, - "loss": 1.0837, - "num_input_tokens_seen": 59108355, - "step": 2778 - }, - { - "epoch": 0.3341549930860338, - "flos": 12833073454680.0, - "grad_norm": 4.5802623980121, - "learning_rate": 3.105710149993972e-06, - "loss": 1.0209, - "num_input_tokens_seen": 59127405, - "step": 2779 - }, - { - "epoch": 0.33427523597667286, - "flos": 16114645869360.0, - "grad_norm": 4.923679565948585, - "learning_rate": 3.1050609666547427e-06, - "loss": 1.0803, - "num_input_tokens_seen": 59146685, - "step": 2780 - }, - { - "epoch": 0.33439547886731197, - "flos": 16088302041240.0, - "grad_norm": 4.694274279778925, - "learning_rate": 3.104411615678644e-06, - "loss": 0.9982, - "num_input_tokens_seen": 59165255, - "step": 2781 - }, - { - "epoch": 0.3345157217579511, - "flos": 17135984177160.0, - "grad_norm": 4.447399219488271, - "learning_rate": 3.1037620971641803e-06, - "loss": 0.9604, - "num_input_tokens_seen": 59184765, - "step": 2782 - }, - { - "epoch": 0.33463596464859013, - "flos": 13465907899200.0, - "grad_norm": 5.5690793334987365, - "learning_rate": 3.1031124112098844e-06, - "loss": 0.878, - "num_input_tokens_seen": 59202695, - "step": 2783 - }, - { - "epoch": 0.33475620753922924, - "flos": 14462036856600.0, - "grad_norm": 3.127492139823178, - "learning_rate": 3.1024625579143127e-06, - "loss": 0.9463, - "num_input_tokens_seen": 59219935, - "step": 2784 - }, - { - "epoch": 0.33487645042986836, - "flos": 12885423833760.0, - "grad_norm": 2.5810679891115815, - "learning_rate": 3.101812537376048e-06, - "loss": 0.9497, - "num_input_tokens_seen": 59238675, - "step": 2785 - }, - { - "epoch": 0.3349966933205074, - "flos": 18395582077320.0, - "grad_norm": 3.6833858492000147, - "learning_rate": 3.1011623496936973e-06, - "loss": 1.0594, - "num_input_tokens_seen": 59256690, - "step": 2786 - }, - { - "epoch": 0.3351169362111465, - "flos": 20021755277280.0, - "grad_norm": 2.462533681416479, - "learning_rate": 3.100511994965893e-06, - "loss": 0.9209, - "num_input_tokens_seen": 59276365, - "step": 2787 - }, - { - "epoch": 0.33523717910178563, - "flos": 16113235437600.0, - "grad_norm": 3.1745184045024692, - "learning_rate": 3.0998614732912947e-06, - "loss": 1.0851, - "num_input_tokens_seen": 59295460, - "step": 2788 - }, - { - "epoch": 0.3353574219924247, - "flos": 11080178948400.0, - "grad_norm": 4.087933317308722, - "learning_rate": 3.0992107847685855e-06, - "loss": 0.8967, - "num_input_tokens_seen": 59312895, - "step": 2789 - }, - { - "epoch": 0.3354776648830638, - "flos": 17635075818120.0, - "grad_norm": 4.854045533509326, - "learning_rate": 3.0985599294964736e-06, - "loss": 1.0232, - "num_input_tokens_seen": 59332170, - "step": 2790 - }, - { - "epoch": 0.33559790777370285, - "flos": 20440987571760.0, - "grad_norm": 5.0302540047976905, - "learning_rate": 3.097908907573695e-06, - "loss": 0.9325, - "num_input_tokens_seen": 59349870, - "step": 2791 - }, - { - "epoch": 0.33571815066434196, - "flos": 15798826547520.0, - "grad_norm": 4.520243358643681, - "learning_rate": 3.0972577190990067e-06, - "loss": 1.1175, - "num_input_tokens_seen": 59368070, - "step": 2792 - }, - { - "epoch": 0.3358383935549811, - "flos": 16953539720160.0, - "grad_norm": 5.355267877797408, - "learning_rate": 3.096606364171196e-06, - "loss": 1.0213, - "num_input_tokens_seen": 59387580, - "step": 2793 - }, - { - "epoch": 0.33595863644562013, - "flos": 15820816434120.0, - "grad_norm": 5.953508499889116, - "learning_rate": 3.0959548428890703e-06, - "loss": 1.0684, - "num_input_tokens_seen": 59406170, - "step": 2794 - }, - { - "epoch": 0.33607887933625924, - "flos": 14278120644720.0, - "grad_norm": 6.452610983708849, - "learning_rate": 3.095303155351468e-06, - "loss": 1.0558, - "num_input_tokens_seen": 59426095, - "step": 2795 - }, - { - "epoch": 0.33619912222689835, - "flos": 13702113167040.0, - "grad_norm": 4.490603491108458, - "learning_rate": 3.0946513016572464e-06, - "loss": 1.0148, - "num_input_tokens_seen": 59444720, - "step": 2796 - }, - { - "epoch": 0.3363193651175374, - "flos": 11892882278880.0, - "grad_norm": 3.7004877026779726, - "learning_rate": 3.0939992819052938e-06, - "loss": 0.9845, - "num_input_tokens_seen": 59461950, - "step": 2797 - }, - { - "epoch": 0.3364396080081765, - "flos": 16743678280440.0, - "grad_norm": 6.522211126303274, - "learning_rate": 3.0933470961945193e-06, - "loss": 1.0299, - "num_input_tokens_seen": 59479965, - "step": 2798 - }, - { - "epoch": 0.3365598508988156, - "flos": 19968822328560.0, - "grad_norm": 2.889371858067406, - "learning_rate": 3.0926947446238597e-06, - "loss": 0.917, - "num_input_tokens_seen": 59499255, - "step": 2799 - }, - { - "epoch": 0.3366800937894547, - "flos": 12023712234240.0, - "grad_norm": 4.014702004363199, - "learning_rate": 3.092042227292276e-06, - "loss": 1.0477, - "num_input_tokens_seen": 59515810, - "step": 2800 - }, - { - "epoch": 0.3368003366800938, - "flos": 17006043407040.0, - "grad_norm": 3.11496466064001, - "learning_rate": 3.0913895442987557e-06, - "loss": 1.1108, - "num_input_tokens_seen": 59536495, - "step": 2801 - }, - { - "epoch": 0.3369205795707329, - "flos": 17635566403080.0, - "grad_norm": 3.6270803861626155, - "learning_rate": 3.090736695742308e-06, - "loss": 1.0781, - "num_input_tokens_seen": 59557345, - "step": 2802 - }, - { - "epoch": 0.33704082246137196, - "flos": 12705217670640.0, - "grad_norm": 6.745216370004289, - "learning_rate": 3.0900836817219713e-06, - "loss": 0.7314, - "num_input_tokens_seen": 59573495, - "step": 2803 - }, - { - "epoch": 0.33716106535201107, - "flos": 15116523910560.0, - "grad_norm": 2.851121675044438, - "learning_rate": 3.089430502336807e-06, - "loss": 1.0666, - "num_input_tokens_seen": 59593185, - "step": 2804 - }, - { - "epoch": 0.3372813082426502, - "flos": 13044008049000.0, - "grad_norm": 4.34888730781105, - "learning_rate": 3.088777157685902e-06, - "loss": 1.119, - "num_input_tokens_seen": 59608495, - "step": 2805 - }, - { - "epoch": 0.33740155113328923, - "flos": 12180794033040.0, - "grad_norm": 3.5968858388941274, - "learning_rate": 3.088123647868367e-06, - "loss": 1.0822, - "num_input_tokens_seen": 59624765, - "step": 2806 - }, - { - "epoch": 0.33752179402392835, - "flos": 20702555497800.0, - "grad_norm": 3.3561956930473076, - "learning_rate": 3.0874699729833405e-06, - "loss": 1.0336, - "num_input_tokens_seen": 59645855, - "step": 2807 - }, - { - "epoch": 0.3376420369145674, - "flos": 17843312195160.0, - "grad_norm": 3.9612488031791786, - "learning_rate": 3.086816133129983e-06, - "loss": 1.0263, - "num_input_tokens_seen": 59665835, - "step": 2808 - }, - { - "epoch": 0.3377622798052065, - "flos": 19575504600360.0, - "grad_norm": 3.387326794239688, - "learning_rate": 3.0861621284074826e-06, - "loss": 0.9911, - "num_input_tokens_seen": 59686080, - "step": 2809 - }, - { - "epoch": 0.3378825226958456, - "flos": 15610863009720.0, - "grad_norm": 3.128918441767758, - "learning_rate": 3.085507958915051e-06, - "loss": 0.9528, - "num_input_tokens_seen": 59704230, - "step": 2810 - }, - { - "epoch": 0.3380027655864847, - "flos": 30379183443000.0, - "grad_norm": 6.5764123132278, - "learning_rate": 3.084853624751925e-06, - "loss": 0.9345, - "num_input_tokens_seen": 59725535, - "step": 2811 - }, - { - "epoch": 0.3381230084771238, - "flos": 19025411688960.0, - "grad_norm": 4.157035122478698, - "learning_rate": 3.0841991260173668e-06, - "loss": 1.0719, - "num_input_tokens_seen": 59745160, - "step": 2812 - }, - { - "epoch": 0.3382432513677629, - "flos": 16139947204440.0, - "grad_norm": 3.6645856211389307, - "learning_rate": 3.0835444628106634e-06, - "loss": 1.0324, - "num_input_tokens_seen": 59763860, - "step": 2813 - }, - { - "epoch": 0.33836349425840195, - "flos": 15716820891840.0, - "grad_norm": 5.102891267001181, - "learning_rate": 3.082889635231126e-06, - "loss": 1.0665, - "num_input_tokens_seen": 59782240, - "step": 2814 - }, - { - "epoch": 0.33848373714904106, - "flos": 19444306706280.0, - "grad_norm": 3.6490836051297264, - "learning_rate": 3.0822346433780925e-06, - "loss": 0.9702, - "num_input_tokens_seen": 59802685, - "step": 2815 - }, - { - "epoch": 0.3386039800396802, - "flos": 18395490092640.0, - "grad_norm": 4.1840074692581055, - "learning_rate": 3.0815794873509237e-06, - "loss": 1.0737, - "num_input_tokens_seen": 59820690, - "step": 2816 - }, - { - "epoch": 0.33872422293031923, - "flos": 13387765600080.0, - "grad_norm": 5.8787475970079015, - "learning_rate": 3.0809241672490066e-06, - "loss": 0.9484, - "num_input_tokens_seen": 59838580, - "step": 2817 - }, - { - "epoch": 0.33884446582095834, - "flos": 16453558893960.0, - "grad_norm": 2.77264874484975, - "learning_rate": 3.080268683171753e-06, - "loss": 1.0721, - "num_input_tokens_seen": 59858590, - "step": 2818 - }, - { - "epoch": 0.33896470871159745, - "flos": 11315464369440.0, - "grad_norm": 5.051501158634776, - "learning_rate": 3.0796130352185985e-06, - "loss": 1.1118, - "num_input_tokens_seen": 59875165, - "step": 2819 - }, - { - "epoch": 0.3390849516022365, - "flos": 24609940859760.0, - "grad_norm": 9.065151063614568, - "learning_rate": 3.0789572234890057e-06, - "loss": 0.9048, - "num_input_tokens_seen": 59896525, - "step": 2820 - }, - { - "epoch": 0.3392051944928756, - "flos": 11447030202240.0, - "grad_norm": 7.200045574891702, - "learning_rate": 3.0783012480824596e-06, - "loss": 1.0014, - "num_input_tokens_seen": 59915390, - "step": 2821 - }, - { - "epoch": 0.33932543738351467, - "flos": 12097623238080.0, - "grad_norm": 4.356266567904739, - "learning_rate": 3.077645109098471e-06, - "loss": 0.9677, - "num_input_tokens_seen": 59931380, - "step": 2822 - }, - { - "epoch": 0.3394456802741538, - "flos": 15720714909960.0, - "grad_norm": 3.4087499841089253, - "learning_rate": 3.076988806636577e-06, - "loss": 0.9391, - "num_input_tokens_seen": 59948860, - "step": 2823 - }, - { - "epoch": 0.3395659231647929, - "flos": 17948380892040.0, - "grad_norm": 2.882628892463447, - "learning_rate": 3.0763323407963377e-06, - "loss": 1.104, - "num_input_tokens_seen": 59968190, - "step": 2824 - }, - { - "epoch": 0.33968616605543195, - "flos": 20729665864920.0, - "grad_norm": 5.284528267846798, - "learning_rate": 3.075675711677337e-06, - "loss": 1.0208, - "num_input_tokens_seen": 59988005, - "step": 2825 - }, - { - "epoch": 0.33980640894607106, - "flos": 15222696423600.0, - "grad_norm": 4.02332751670065, - "learning_rate": 3.0750189193791865e-06, - "loss": 1.0023, - "num_input_tokens_seen": 60007310, - "step": 2826 - }, - { - "epoch": 0.33992665183671017, - "flos": 23168603718480.0, - "grad_norm": 3.841600887262257, - "learning_rate": 3.0743619640015203e-06, - "loss": 0.934, - "num_input_tokens_seen": 60027280, - "step": 2827 - }, - { - "epoch": 0.3400468947273492, - "flos": 12075878643960.0, - "grad_norm": 3.1128941849353, - "learning_rate": 3.073704845643999e-06, - "loss": 1.146, - "num_input_tokens_seen": 60044125, - "step": 2828 - }, - { - "epoch": 0.34016713761798834, - "flos": 11944772734560.0, - "grad_norm": 6.492698089315713, - "learning_rate": 3.0730475644063063e-06, - "loss": 1.0075, - "num_input_tokens_seen": 60058945, - "step": 2829 - }, - { - "epoch": 0.34028738050862745, - "flos": 15562621279680.0, - "grad_norm": 3.645162742341123, - "learning_rate": 3.072390120388151e-06, - "loss": 0.8809, - "num_input_tokens_seen": 60076990, - "step": 2830 - }, - { - "epoch": 0.3404076233992665, - "flos": 16165463170440.0, - "grad_norm": 7.638480605186515, - "learning_rate": 3.071732513689267e-06, - "loss": 0.9167, - "num_input_tokens_seen": 60095245, - "step": 2831 - }, - { - "epoch": 0.3405278662899056, - "flos": 12073272411360.0, - "grad_norm": 6.404649488043609, - "learning_rate": 3.0710747444094134e-06, - "loss": 0.8971, - "num_input_tokens_seen": 60112995, - "step": 2832 - }, - { - "epoch": 0.3406481091805447, - "flos": 30587297173800.0, - "grad_norm": 7.271045525049596, - "learning_rate": 3.070416812648372e-06, - "loss": 0.8828, - "num_input_tokens_seen": 60136165, - "step": 2833 - }, - { - "epoch": 0.3407683520711838, - "flos": 19051540886160.0, - "grad_norm": 3.5963048726689757, - "learning_rate": 3.069758718505951e-06, - "loss": 0.8648, - "num_input_tokens_seen": 60157625, - "step": 2834 - }, - { - "epoch": 0.3408885949618229, - "flos": 20493276627720.0, - "grad_norm": 3.987908685004164, - "learning_rate": 3.0691004620819836e-06, - "loss": 1.0267, - "num_input_tokens_seen": 60177475, - "step": 2835 - }, - { - "epoch": 0.341008837852462, - "flos": 45514656197640.0, - "grad_norm": 0.8030676973435824, - "learning_rate": 3.0684420434763254e-06, - "loss": 0.8639, - "num_input_tokens_seen": 60243380, - "step": 2836 - }, - { - "epoch": 0.34112908074310105, - "flos": 14775617884560.0, - "grad_norm": 3.8138171562851184, - "learning_rate": 3.06778346278886e-06, - "loss": 0.9819, - "num_input_tokens_seen": 60261935, - "step": 2837 - }, - { - "epoch": 0.34124932363374016, - "flos": 17769063914160.0, - "grad_norm": 2.1894039043028943, - "learning_rate": 3.0671247201194906e-06, - "loss": 1.0191, - "num_input_tokens_seen": 60283790, - "step": 2838 - }, - { - "epoch": 0.3413695665243792, - "flos": 20231218116720.0, - "grad_norm": 10.614663874676545, - "learning_rate": 3.066465815568151e-06, - "loss": 0.9727, - "num_input_tokens_seen": 60304340, - "step": 2839 - }, - { - "epoch": 0.34148980941501833, - "flos": 18002662949400.0, - "grad_norm": 4.127540499640419, - "learning_rate": 3.0658067492347947e-06, - "loss": 0.9229, - "num_input_tokens_seen": 60326700, - "step": 2840 - }, - { - "epoch": 0.34161005230565744, - "flos": 12417060624000.0, - "grad_norm": 5.2216496435513555, - "learning_rate": 3.065147521219402e-06, - "loss": 0.8942, - "num_input_tokens_seen": 60345675, - "step": 2841 - }, - { - "epoch": 0.3417302951962965, - "flos": 31188514001880.0, - "grad_norm": 5.827533927173937, - "learning_rate": 3.064488131621977e-06, - "loss": 0.9782, - "num_input_tokens_seen": 60368720, - "step": 2842 - }, - { - "epoch": 0.3418505380869356, - "flos": 22012756068120.0, - "grad_norm": 3.198061331957063, - "learning_rate": 3.063828580542549e-06, - "loss": 0.9576, - "num_input_tokens_seen": 60389635, - "step": 2843 - }, - { - "epoch": 0.3419707809775747, - "flos": 13806844586760.0, - "grad_norm": 3.7074814292972507, - "learning_rate": 3.0631688680811706e-06, - "loss": 0.9402, - "num_input_tokens_seen": 60408980, - "step": 2844 - }, - { - "epoch": 0.3420910238682138, - "flos": 20464203920760.0, - "grad_norm": 5.692320070233439, - "learning_rate": 3.062508994337921e-06, - "loss": 0.9859, - "num_input_tokens_seen": 60428305, - "step": 2845 - }, - { - "epoch": 0.3422112667588529, - "flos": 15196291272360.0, - "grad_norm": 3.1538256408353535, - "learning_rate": 3.0618489594129013e-06, - "loss": 1.003, - "num_input_tokens_seen": 60446165, - "step": 2846 - }, - { - "epoch": 0.342331509649492, - "flos": 9794850451320.0, - "grad_norm": 2.9755803287504574, - "learning_rate": 3.061188763406239e-06, - "loss": 0.9335, - "num_input_tokens_seen": 60462030, - "step": 2847 - }, - { - "epoch": 0.34245175254013105, - "flos": 20388177269280.0, - "grad_norm": 6.504753722001066, - "learning_rate": 3.060528406418085e-06, - "loss": 1.0459, - "num_input_tokens_seen": 60481600, - "step": 2848 - }, - { - "epoch": 0.34257199543077016, - "flos": 24344448254040.0, - "grad_norm": 3.0766695715937704, - "learning_rate": 3.0598678885486145e-06, - "loss": 0.8423, - "num_input_tokens_seen": 60503860, - "step": 2849 - }, - { - "epoch": 0.34269223832140927, - "flos": 14173389225000.0, - "grad_norm": 4.670153654785007, - "learning_rate": 3.0592072098980282e-06, - "loss": 0.9646, - "num_input_tokens_seen": 60523240, - "step": 2850 - }, - { - "epoch": 0.3428124812120483, - "flos": 19392324265920.0, - "grad_norm": 4.126481917091721, - "learning_rate": 3.0585463705665514e-06, - "loss": 0.9406, - "num_input_tokens_seen": 60543335, - "step": 2851 - }, - { - "epoch": 0.34293272410268744, - "flos": 17477012849400.0, - "grad_norm": 75.00524721227842, - "learning_rate": 3.0578853706544304e-06, - "loss": 0.9272, - "num_input_tokens_seen": 60560445, - "step": 2852 - }, - { - "epoch": 0.34305296699332655, - "flos": 15275077464240.0, - "grad_norm": 4.808404542251345, - "learning_rate": 3.0572242102619404e-06, - "loss": 0.8692, - "num_input_tokens_seen": 60577320, - "step": 2853 - }, - { - "epoch": 0.3431732098839656, - "flos": 17107003454880.0, - "grad_norm": 3.400863448064743, - "learning_rate": 3.0565628894893784e-06, - "loss": 1.042, - "num_input_tokens_seen": 60597675, - "step": 2854 - }, - { - "epoch": 0.3432934527746047, - "flos": 11893066248240.0, - "grad_norm": 3.2693938633209227, - "learning_rate": 3.0559014084370655e-06, - "loss": 0.9744, - "num_input_tokens_seen": 60615920, - "step": 2855 - }, - { - "epoch": 0.34341369566524377, - "flos": 16664064226440.0, - "grad_norm": 3.3823836761795287, - "learning_rate": 3.055239767205349e-06, - "loss": 1.009, - "num_input_tokens_seen": 60637390, - "step": 2856 - }, - { - "epoch": 0.3435339385558829, - "flos": 12227349377280.0, - "grad_norm": 3.9848481738796613, - "learning_rate": 3.054577965894599e-06, - "loss": 1.0012, - "num_input_tokens_seen": 60653255, - "step": 2857 - }, - { - "epoch": 0.343654181446522, - "flos": 15772666688760.0, - "grad_norm": 3.086067293552108, - "learning_rate": 3.0539160046052094e-06, - "loss": 0.9465, - "num_input_tokens_seen": 60672675, - "step": 2858 - }, - { - "epoch": 0.34377442433716104, - "flos": 14121590754000.0, - "grad_norm": 3.7331463057355325, - "learning_rate": 3.0532538834376003e-06, - "loss": 0.9203, - "num_input_tokens_seen": 60691955, - "step": 2859 - }, - { - "epoch": 0.34389466722780015, - "flos": 15771225595440.0, - "grad_norm": 4.7943645456394295, - "learning_rate": 3.0525916024922143e-06, - "loss": 1.0059, - "num_input_tokens_seen": 60710860, - "step": 2860 - }, - { - "epoch": 0.34401491011843927, - "flos": 13203634757280.0, - "grad_norm": 7.243571884690704, - "learning_rate": 3.0519291618695193e-06, - "loss": 1.0599, - "num_input_tokens_seen": 60727980, - "step": 2861 - }, - { - "epoch": 0.3441351530090783, - "flos": 12652836630000.0, - "grad_norm": 4.224564677728179, - "learning_rate": 3.0512665616700065e-06, - "loss": 0.9691, - "num_input_tokens_seen": 60746765, - "step": 2862 - }, - { - "epoch": 0.34425539589971743, - "flos": 16429269390360.0, - "grad_norm": 5.293944141561621, - "learning_rate": 3.0506038019941933e-06, - "loss": 1.1131, - "num_input_tokens_seen": 60766495, - "step": 2863 - }, - { - "epoch": 0.34437563879035654, - "flos": 15563081203080.0, - "grad_norm": 5.064406989461572, - "learning_rate": 3.049940882942617e-06, - "loss": 0.8897, - "num_input_tokens_seen": 60785000, - "step": 2864 - }, - { - "epoch": 0.3444958816809956, - "flos": 16403600116560.0, - "grad_norm": 3.495295359910967, - "learning_rate": 3.0492778046158448e-06, - "loss": 1.0187, - "num_input_tokens_seen": 60806140, - "step": 2865 - }, - { - "epoch": 0.3446161245716347, - "flos": 15563878403640.0, - "grad_norm": 3.947912828482152, - "learning_rate": 3.0486145671144633e-06, - "loss": 0.9918, - "num_input_tokens_seen": 60825650, - "step": 2866 - }, - { - "epoch": 0.3447363674622738, - "flos": 17865792666720.0, - "grad_norm": 7.344556658655922, - "learning_rate": 3.047951170539086e-06, - "loss": 0.9672, - "num_input_tokens_seen": 60844995, - "step": 2867 - }, - { - "epoch": 0.3448566103529129, - "flos": 8431747593840.0, - "grad_norm": 2.748237912303014, - "learning_rate": 3.047287614990349e-06, - "loss": 1.0686, - "num_input_tokens_seen": 60862635, - "step": 2868 - }, - { - "epoch": 0.344976853243552, - "flos": 28700813171760.0, - "grad_norm": 4.962930402179732, - "learning_rate": 3.046623900568914e-06, - "loss": 0.8185, - "num_input_tokens_seen": 60884920, - "step": 2869 - }, - { - "epoch": 0.34509709613419104, - "flos": 20460647179800.0, - "grad_norm": 9.877972891727687, - "learning_rate": 3.045960027375465e-06, - "loss": 0.9167, - "num_input_tokens_seen": 60902475, - "step": 2870 - }, - { - "epoch": 0.34521733902483015, - "flos": 21355110873480.0, - "grad_norm": 16.532486809829532, - "learning_rate": 3.045295995510711e-06, - "loss": 1.0448, - "num_input_tokens_seen": 60919165, - "step": 2871 - }, - { - "epoch": 0.34533758191546926, - "flos": 19885559548920.0, - "grad_norm": 3.605000408664633, - "learning_rate": 3.0446318050753865e-06, - "loss": 0.9596, - "num_input_tokens_seen": 60939365, - "step": 2872 - }, - { - "epoch": 0.3454578248061083, - "flos": 19313384766240.0, - "grad_norm": 6.158066844551372, - "learning_rate": 3.0439674561702474e-06, - "loss": 1.007, - "num_input_tokens_seen": 60958585, - "step": 2873 - }, - { - "epoch": 0.3455780676967474, - "flos": 13491209234280.0, - "grad_norm": 3.4465078066647936, - "learning_rate": 3.043302948896076e-06, - "loss": 1.0927, - "num_input_tokens_seen": 60976910, - "step": 2874 - }, - { - "epoch": 0.34569831058738654, - "flos": 24610155490680.0, - "grad_norm": 6.003921498842054, - "learning_rate": 3.0426382833536756e-06, - "loss": 0.8243, - "num_input_tokens_seen": 60999985, - "step": 2875 - }, - { - "epoch": 0.3458185534780256, - "flos": 22197224188080.0, - "grad_norm": 3.7584807818825894, - "learning_rate": 3.041973459643877e-06, - "loss": 1.0127, - "num_input_tokens_seen": 61019160, - "step": 2876 - }, - { - "epoch": 0.3459387963686647, - "flos": 23141493351360.0, - "grad_norm": 6.038889419819235, - "learning_rate": 3.0413084778675334e-06, - "loss": 0.8993, - "num_input_tokens_seen": 61040130, - "step": 2877 - }, - { - "epoch": 0.3460590392593038, - "flos": 17551690392240.0, - "grad_norm": 2.808028927010108, - "learning_rate": 3.0406433381255214e-06, - "loss": 1.0582, - "num_input_tokens_seen": 61057885, - "step": 2878 - }, - { - "epoch": 0.34617928214994287, - "flos": 13335016620720.0, - "grad_norm": 3.4777334920022325, - "learning_rate": 3.0399780405187425e-06, - "loss": 1.0482, - "num_input_tokens_seen": 61076600, - "step": 2879 - }, - { - "epoch": 0.346299525040582, - "flos": 17682428362920.0, - "grad_norm": 3.445219769603132, - "learning_rate": 3.0393125851481216e-06, - "loss": 1.0039, - "num_input_tokens_seen": 61096195, - "step": 2880 - }, - { - "epoch": 0.3464197679312211, - "flos": 11629903921080.0, - "grad_norm": 4.57927785685774, - "learning_rate": 3.038646972114608e-06, - "loss": 1.0776, - "num_input_tokens_seen": 61112240, - "step": 2881 - }, - { - "epoch": 0.34654001082186014, - "flos": 15904355167800.0, - "grad_norm": 2.7512124596564593, - "learning_rate": 3.037981201519174e-06, - "loss": 0.9044, - "num_input_tokens_seen": 61132560, - "step": 2882 - }, - { - "epoch": 0.34666025371249926, - "flos": 13885998717360.0, - "grad_norm": 13.634411842074336, - "learning_rate": 3.0373152734628175e-06, - "loss": 0.9335, - "num_input_tokens_seen": 61150560, - "step": 2883 - }, - { - "epoch": 0.34678049660313837, - "flos": 10791040731840.0, - "grad_norm": 4.483784600181003, - "learning_rate": 3.0366491880465584e-06, - "loss": 0.9768, - "num_input_tokens_seen": 61168300, - "step": 2884 - }, - { - "epoch": 0.3469007394937774, - "flos": 15041693059920.0, - "grad_norm": 2.942243277111698, - "learning_rate": 3.035982945371443e-06, - "loss": 1.0469, - "num_input_tokens_seen": 61189715, - "step": 2885 - }, - { - "epoch": 0.34702098238441653, - "flos": 15898376163600.0, - "grad_norm": 4.145596725894249, - "learning_rate": 3.035316545538537e-06, - "loss": 1.0849, - "num_input_tokens_seen": 61208230, - "step": 2886 - }, - { - "epoch": 0.3471412252750556, - "flos": 16297396941960.0, - "grad_norm": 3.510470668177387, - "learning_rate": 3.034649988648935e-06, - "loss": 1.0063, - "num_input_tokens_seen": 61227715, - "step": 2887 - }, - { - "epoch": 0.3472614681656947, - "flos": 15143818247040.0, - "grad_norm": 3.6464914363717114, - "learning_rate": 3.033983274803752e-06, - "loss": 1.0357, - "num_input_tokens_seen": 61247225, - "step": 2888 - }, - { - "epoch": 0.3473817110563338, - "flos": 16689886808040.0, - "grad_norm": 14.934993006803895, - "learning_rate": 3.0333164041041283e-06, - "loss": 0.9379, - "num_input_tokens_seen": 61263260, - "step": 2889 - }, - { - "epoch": 0.34750195394697286, - "flos": 15878164647480.0, - "grad_norm": 3.2274137764072126, - "learning_rate": 3.032649376651228e-06, - "loss": 0.945, - "num_input_tokens_seen": 61282400, - "step": 2890 - }, - { - "epoch": 0.347622196837612, - "flos": 20728408740960.0, - "grad_norm": 5.9027450899702565, - "learning_rate": 3.031982192546238e-06, - "loss": 0.9801, - "num_input_tokens_seen": 61305215, - "step": 2891 - }, - { - "epoch": 0.3477424397282511, - "flos": 15957042824040.0, - "grad_norm": 3.101529906747825, - "learning_rate": 3.0313148518903696e-06, - "loss": 1.1655, - "num_input_tokens_seen": 61324760, - "step": 2892 - }, - { - "epoch": 0.34786268261889014, - "flos": 11158443893760.0, - "grad_norm": 3.174040858255143, - "learning_rate": 3.030647354784859e-06, - "loss": 1.0238, - "num_input_tokens_seen": 61341520, - "step": 2893 - }, - { - "epoch": 0.34798292550952925, - "flos": 14751052426920.0, - "grad_norm": 7.173048534246663, - "learning_rate": 3.029979701330964e-06, - "loss": 0.9965, - "num_input_tokens_seen": 61360665, - "step": 2894 - }, - { - "epoch": 0.34810316840016836, - "flos": 14147413335600.0, - "grad_norm": 8.023766976103955, - "learning_rate": 3.029311891629966e-06, - "loss": 1.0269, - "num_input_tokens_seen": 61378840, - "step": 2895 - }, - { - "epoch": 0.3482234112908074, - "flos": 16796151305760.0, - "grad_norm": 3.251007070767784, - "learning_rate": 3.0286439257831744e-06, - "loss": 0.9639, - "num_input_tokens_seen": 61398030, - "step": 2896 - }, - { - "epoch": 0.3483436541814465, - "flos": 17057136662160.0, - "grad_norm": 7.470926518310309, - "learning_rate": 3.0279758038919156e-06, - "loss": 0.9284, - "num_input_tokens_seen": 61415975, - "step": 2897 - }, - { - "epoch": 0.34846389707208564, - "flos": 16088302041240.0, - "grad_norm": 3.0492567876602794, - "learning_rate": 3.0273075260575455e-06, - "loss": 1.0019, - "num_input_tokens_seen": 61434595, - "step": 2898 - }, - { - "epoch": 0.3485841399627247, - "flos": 15479818423440.0, - "grad_norm": 3.908669172768199, - "learning_rate": 3.0266390923814396e-06, - "loss": 1.0231, - "num_input_tokens_seen": 61452375, - "step": 2899 - }, - { - "epoch": 0.3487043828533638, - "flos": 12049749446760.0, - "grad_norm": 4.087122320522722, - "learning_rate": 3.0259705029650008e-06, - "loss": 1.047, - "num_input_tokens_seen": 61470025, - "step": 2900 - }, - { - "epoch": 0.34882462574400286, - "flos": 16061467628160.0, - "grad_norm": 2.9916513625012957, - "learning_rate": 3.025301757909652e-06, - "loss": 0.9573, - "num_input_tokens_seen": 61489940, - "step": 2901 - }, - { - "epoch": 0.34894486863464197, - "flos": 21279329514480.0, - "grad_norm": 2.9108772767123945, - "learning_rate": 3.024632857316842e-06, - "loss": 1.0379, - "num_input_tokens_seen": 61510975, - "step": 2902 - }, - { - "epoch": 0.3490651115252811, - "flos": 15716698245600.0, - "grad_norm": 5.689742432247043, - "learning_rate": 3.0239638012880412e-06, - "loss": 0.9966, - "num_input_tokens_seen": 61530590, - "step": 2903 - }, - { - "epoch": 0.34918535441592014, - "flos": 8928907556520.0, - "grad_norm": 8.622068929893564, - "learning_rate": 3.0232945899247466e-06, - "loss": 1.0384, - "num_input_tokens_seen": 61547245, - "step": 2904 - }, - { - "epoch": 0.34930559730655925, - "flos": 16481619769440.0, - "grad_norm": 22.77035651328691, - "learning_rate": 3.022625223328476e-06, - "loss": 0.9992, - "num_input_tokens_seen": 61568705, - "step": 2905 - }, - { - "epoch": 0.34942584019719836, - "flos": 16243728115800.0, - "grad_norm": 2.375055714003948, - "learning_rate": 3.0219557016007723e-06, - "loss": 0.922, - "num_input_tokens_seen": 61588555, - "step": 2906 - }, - { - "epoch": 0.3495460830878374, - "flos": 17373078630240.0, - "grad_norm": 3.1764260701361406, - "learning_rate": 3.021286024843202e-06, - "loss": 0.9278, - "num_input_tokens_seen": 61606470, - "step": 2907 - }, - { - "epoch": 0.3496663259784765, - "flos": 50134127667480.0, - "grad_norm": 1.1140447842552355, - "learning_rate": 3.0206161931573526e-06, - "loss": 0.9353, - "num_input_tokens_seen": 61658740, - "step": 2908 - }, - { - "epoch": 0.34978656886911563, - "flos": 20439239862840.0, - "grad_norm": 4.892344797018607, - "learning_rate": 3.0199462066448388e-06, - "loss": 1.161, - "num_input_tokens_seen": 61680655, - "step": 2909 - }, - { - "epoch": 0.3499068117597547, - "flos": 15012988291680.0, - "grad_norm": 4.147881364632248, - "learning_rate": 3.019276065407296e-06, - "loss": 0.9103, - "num_input_tokens_seen": 61699495, - "step": 2910 - }, - { - "epoch": 0.3500270546503938, - "flos": 16191837660120.0, - "grad_norm": 5.055940876530565, - "learning_rate": 3.018605769546385e-06, - "loss": 1.0335, - "num_input_tokens_seen": 61719770, - "step": 2911 - }, - { - "epoch": 0.3501472975410329, - "flos": 16272126268440.0, - "grad_norm": 4.96297805751152, - "learning_rate": 3.017935319163788e-06, - "loss": 1.0259, - "num_input_tokens_seen": 61738450, - "step": 2912 - }, - { - "epoch": 0.35026754043167196, - "flos": 18106382537640.0, - "grad_norm": 2.5927463106219815, - "learning_rate": 3.017264714361213e-06, - "loss": 0.9339, - "num_input_tokens_seen": 61757820, - "step": 2913 - }, - { - "epoch": 0.3503877833223111, - "flos": 13885876071120.0, - "grad_norm": 3.0243287657995417, - "learning_rate": 3.016593955240389e-06, - "loss": 1.0442, - "num_input_tokens_seen": 61776230, - "step": 2914 - }, - { - "epoch": 0.3505080262129502, - "flos": 46589663331600.0, - "grad_norm": 0.8173203894548053, - "learning_rate": 3.015923041903071e-06, - "loss": 0.886, - "num_input_tokens_seen": 61842075, - "step": 2915 - }, - { - "epoch": 0.35062826910358924, - "flos": 20886287740320.0, - "grad_norm": 3.7244845500927233, - "learning_rate": 3.0152519744510347e-06, - "loss": 1.0579, - "num_input_tokens_seen": 61861595, - "step": 2916 - }, - { - "epoch": 0.35074851199422835, - "flos": 17057995185840.0, - "grad_norm": 2.8094706008820345, - "learning_rate": 3.014580752986081e-06, - "loss": 1.058, - "num_input_tokens_seen": 61880190, - "step": 2917 - }, - { - "epoch": 0.3508687548848674, - "flos": 10759085838240.0, - "grad_norm": 6.405384433236527, - "learning_rate": 3.0139093776100345e-06, - "loss": 1.0065, - "num_input_tokens_seen": 61896500, - "step": 2918 - }, - { - "epoch": 0.3509889977755065, - "flos": 15170897952600.0, - "grad_norm": 2.7445853419393305, - "learning_rate": 3.013237848424741e-06, - "loss": 0.9757, - "num_input_tokens_seen": 61915605, - "step": 2919 - }, - { - "epoch": 0.35110924066614563, - "flos": 13570731303600.0, - "grad_norm": 5.748342192124932, - "learning_rate": 3.012566165532072e-06, - "loss": 0.9808, - "num_input_tokens_seen": 61934115, - "step": 2920 - }, - { - "epoch": 0.3512294835567847, - "flos": 15615707536200.0, - "grad_norm": 3.701629465446655, - "learning_rate": 3.0118943290339207e-06, - "loss": 0.9768, - "num_input_tokens_seen": 61954045, - "step": 2921 - }, - { - "epoch": 0.3513497264474238, - "flos": 12622844076240.0, - "grad_norm": 2.820377507609736, - "learning_rate": 3.011222339032204e-06, - "loss": 0.8929, - "num_input_tokens_seen": 61971915, - "step": 2922 - }, - { - "epoch": 0.3514699693380629, - "flos": 19182248195280.0, - "grad_norm": 3.0896324731713483, - "learning_rate": 3.0105501956288626e-06, - "loss": 0.9076, - "num_input_tokens_seen": 61992105, - "step": 2923 - }, - { - "epoch": 0.35159021222870196, - "flos": 10790979408720.0, - "grad_norm": 3.475678818081435, - "learning_rate": 3.0098778989258602e-06, - "loss": 0.9524, - "num_input_tokens_seen": 62010435, - "step": 2924 - }, - { - "epoch": 0.35171045511934107, - "flos": 9867749623680.0, - "grad_norm": 3.3601308992410073, - "learning_rate": 3.009205449025183e-06, - "loss": 1.1006, - "num_input_tokens_seen": 62026350, - "step": 2925 - }, - { - "epoch": 0.3518306980099802, - "flos": 10083835360080.0, - "grad_norm": 3.2714090184884492, - "learning_rate": 3.008532846028842e-06, - "loss": 0.8473, - "num_input_tokens_seen": 62042830, - "step": 2926 - }, - { - "epoch": 0.35195094090061924, - "flos": 19260911740920.0, - "grad_norm": 3.784507294286834, - "learning_rate": 3.0078600900388694e-06, - "loss": 0.926, - "num_input_tokens_seen": 62062855, - "step": 2927 - }, - { - "epoch": 0.35207118379125835, - "flos": 18237703077960.0, - "grad_norm": 7.2969032303388595, - "learning_rate": 3.007187181157323e-06, - "loss": 0.9709, - "num_input_tokens_seen": 62082585, - "step": 2928 - }, - { - "epoch": 0.35219142668189746, - "flos": 12758211942480.0, - "grad_norm": 6.915823122850434, - "learning_rate": 3.006514119486282e-06, - "loss": 0.9081, - "num_input_tokens_seen": 62099135, - "step": 2929 - }, - { - "epoch": 0.3523116695725365, - "flos": 9900348410040.0, - "grad_norm": 4.826373822793231, - "learning_rate": 3.005840905127849e-06, - "loss": 0.9188, - "num_input_tokens_seen": 62115760, - "step": 2930 - }, - { - "epoch": 0.3524319124631756, - "flos": 15222696423600.0, - "grad_norm": 3.3696373614791217, - "learning_rate": 3.0051675381841516e-06, - "loss": 1.0899, - "num_input_tokens_seen": 62132790, - "step": 2931 - }, - { - "epoch": 0.3525521553538147, - "flos": 18736150826160.0, - "grad_norm": 3.1701688640876933, - "learning_rate": 3.0044940187573363e-06, - "loss": 0.9872, - "num_input_tokens_seen": 62153520, - "step": 2932 - }, - { - "epoch": 0.3526723982444538, - "flos": 15301267984560.0, - "grad_norm": 3.3380334777798883, - "learning_rate": 3.003820346949578e-06, - "loss": 0.8936, - "num_input_tokens_seen": 62171320, - "step": 2933 - }, - { - "epoch": 0.3527926411350929, - "flos": 16875581390400.0, - "grad_norm": 5.249396334864912, - "learning_rate": 3.003146522863071e-06, - "loss": 1.0154, - "num_input_tokens_seen": 62191925, - "step": 2934 - }, - { - "epoch": 0.35291288402573195, - "flos": 21699021732360.0, - "grad_norm": 3.3280133868920236, - "learning_rate": 3.0024725466000345e-06, - "loss": 1.073, - "num_input_tokens_seen": 62211600, - "step": 2935 - }, - { - "epoch": 0.35303312691637107, - "flos": 16428778805400.0, - "grad_norm": 3.2804593541482494, - "learning_rate": 3.0017984182627087e-06, - "loss": 1.0078, - "num_input_tokens_seen": 62230645, - "step": 2936 - }, - { - "epoch": 0.3531533698070102, - "flos": 15511466701440.0, - "grad_norm": 4.672794519472884, - "learning_rate": 3.00112413795336e-06, - "loss": 1.0363, - "num_input_tokens_seen": 62250200, - "step": 2937 - }, - { - "epoch": 0.35327361269764923, - "flos": 11158597201560.0, - "grad_norm": 4.578063756122991, - "learning_rate": 3.000449705774275e-06, - "loss": 1.0344, - "num_input_tokens_seen": 62268160, - "step": 2938 - }, - { - "epoch": 0.35339385558828834, - "flos": 15694432404960.0, - "grad_norm": 3.108294121816223, - "learning_rate": 2.9997751218277654e-06, - "loss": 0.9359, - "num_input_tokens_seen": 62286035, - "step": 2939 - }, - { - "epoch": 0.35351409847892745, - "flos": 17184716492160.0, - "grad_norm": 3.463853283777072, - "learning_rate": 2.999100386216166e-06, - "loss": 0.9958, - "num_input_tokens_seen": 62304695, - "step": 2940 - }, - { - "epoch": 0.3536343413695665, - "flos": 19260421155960.0, - "grad_norm": 3.5106582483011284, - "learning_rate": 2.998425499041831e-06, - "loss": 0.9671, - "num_input_tokens_seen": 62324930, - "step": 2941 - }, - { - "epoch": 0.3537545842602056, - "flos": 47251049236560.0, - "grad_norm": 1.0124881131451637, - "learning_rate": 2.997750460407142e-06, - "loss": 0.8589, - "num_input_tokens_seen": 62386005, - "step": 2942 - }, - { - "epoch": 0.35387482715084473, - "flos": 13068052260120.0, - "grad_norm": 3.6265168798125607, - "learning_rate": 2.997075270414501e-06, - "loss": 0.9276, - "num_input_tokens_seen": 62402940, - "step": 2943 - }, - { - "epoch": 0.3539950700414838, - "flos": 46961635065960.0, - "grad_norm": 0.757070945431251, - "learning_rate": 2.9963999291663347e-06, - "loss": 0.8308, - "num_input_tokens_seen": 62468440, - "step": 2944 - }, - { - "epoch": 0.3541153129321229, - "flos": 14567013568800.0, - "grad_norm": 6.136240706834033, - "learning_rate": 2.9957244367650915e-06, - "loss": 0.9664, - "num_input_tokens_seen": 62484405, - "step": 2945 - }, - { - "epoch": 0.354235555822762, - "flos": 13885722763320.0, - "grad_norm": 5.177697168977036, - "learning_rate": 2.9950487933132425e-06, - "loss": 1.0621, - "num_input_tokens_seen": 62501540, - "step": 2946 - }, - { - "epoch": 0.35435579871340106, - "flos": 14750745811320.0, - "grad_norm": 4.282170774472691, - "learning_rate": 2.994372998913283e-06, - "loss": 0.9328, - "num_input_tokens_seen": 62519765, - "step": 2947 - }, - { - "epoch": 0.35447604160404017, - "flos": 17032264588920.0, - "grad_norm": 5.409695271707942, - "learning_rate": 2.99369705366773e-06, - "loss": 0.8527, - "num_input_tokens_seen": 62539730, - "step": 2948 - }, - { - "epoch": 0.3545962844946792, - "flos": 16660599470160.0, - "grad_norm": 3.354231902884056, - "learning_rate": 2.9930209576791244e-06, - "loss": 1.0359, - "num_input_tokens_seen": 62557925, - "step": 2949 - }, - { - "epoch": 0.35471652738531834, - "flos": 15689925155640.0, - "grad_norm": 3.9720388887389793, - "learning_rate": 2.9923447110500285e-06, - "loss": 0.8765, - "num_input_tokens_seen": 62576390, - "step": 2950 - }, - { - "epoch": 0.35483677027595745, - "flos": 19468228271160.0, - "grad_norm": 2.8134862230287982, - "learning_rate": 2.9916683138830295e-06, - "loss": 0.9772, - "num_input_tokens_seen": 62596775, - "step": 2951 - }, - { - "epoch": 0.3549570131665965, - "flos": 9532270693800.0, - "grad_norm": 2.6139711572680087, - "learning_rate": 2.9909917662807353e-06, - "loss": 1.02, - "num_input_tokens_seen": 62614295, - "step": 2952 - }, - { - "epoch": 0.3550772560572356, - "flos": 14830114572840.0, - "grad_norm": 7.09355963228871, - "learning_rate": 2.9903150683457783e-06, - "loss": 0.9056, - "num_input_tokens_seen": 62632560, - "step": 2953 - }, - { - "epoch": 0.3551974989478747, - "flos": 14331666824640.0, - "grad_norm": 9.408786773792224, - "learning_rate": 2.9896382201808126e-06, - "loss": 0.8711, - "num_input_tokens_seen": 62649680, - "step": 2954 - }, - { - "epoch": 0.3553177418385138, - "flos": 14069148390240.0, - "grad_norm": 5.254564934604724, - "learning_rate": 2.988961221888516e-06, - "loss": 1.0226, - "num_input_tokens_seen": 62666075, - "step": 2955 - }, - { - "epoch": 0.3554379847291529, - "flos": 10475803979640.0, - "grad_norm": 7.167079677065974, - "learning_rate": 2.988284073571589e-06, - "loss": 1.0122, - "num_input_tokens_seen": 62681880, - "step": 2956 - }, - { - "epoch": 0.355558227619792, - "flos": 14540945694720.0, - "grad_norm": 7.061697585171983, - "learning_rate": 2.9876067753327528e-06, - "loss": 0.9393, - "num_input_tokens_seen": 62699330, - "step": 2957 - }, - { - "epoch": 0.35567847051043106, - "flos": 26885756587200.0, - "grad_norm": 3.246142372106897, - "learning_rate": 2.986929327274754e-06, - "loss": 1.0167, - "num_input_tokens_seen": 62719630, - "step": 2958 - }, - { - "epoch": 0.35579871340107017, - "flos": 19181941579680.0, - "grad_norm": 2.723213653207031, - "learning_rate": 2.9862517295003617e-06, - "loss": 1.0081, - "num_input_tokens_seen": 62739765, - "step": 2959 - }, - { - "epoch": 0.3559189562917093, - "flos": 20152401263280.0, - "grad_norm": 10.772050819478242, - "learning_rate": 2.9855739821123654e-06, - "loss": 0.9603, - "num_input_tokens_seen": 62761065, - "step": 2960 - }, - { - "epoch": 0.35603919918234833, - "flos": 18262483166520.0, - "grad_norm": 4.342963936575404, - "learning_rate": 2.98489608521358e-06, - "loss": 1.0441, - "num_input_tokens_seen": 62780725, - "step": 2961 - }, - { - "epoch": 0.35615944207298744, - "flos": 16348520858640.0, - "grad_norm": 5.1997508429477035, - "learning_rate": 2.9842180389068425e-06, - "loss": 1.0164, - "num_input_tokens_seen": 62797755, - "step": 2962 - }, - { - "epoch": 0.35627968496362655, - "flos": 48875168112000.0, - "grad_norm": 1.713057437025098, - "learning_rate": 2.98353984329501e-06, - "loss": 0.8516, - "num_input_tokens_seen": 62861820, - "step": 2963 - }, - { - "epoch": 0.3563999278542656, - "flos": 16091184227880.0, - "grad_norm": 2.7070055323228224, - "learning_rate": 2.982861498480965e-06, - "loss": 0.9338, - "num_input_tokens_seen": 62883920, - "step": 2964 - }, - { - "epoch": 0.3565201707449047, - "flos": 18469339773360.0, - "grad_norm": 3.4012269947601634, - "learning_rate": 2.9821830045676122e-06, - "loss": 1.0459, - "num_input_tokens_seen": 62903340, - "step": 2965 - }, - { - "epoch": 0.3566404136355438, - "flos": 20283476511120.0, - "grad_norm": 6.1842399254726965, - "learning_rate": 2.9815043616578793e-06, - "loss": 0.935, - "num_input_tokens_seen": 62923855, - "step": 2966 - }, - { - "epoch": 0.3567606565261829, - "flos": 27389324815920.0, - "grad_norm": 6.560168042705549, - "learning_rate": 2.9808255698547145e-06, - "loss": 1.0013, - "num_input_tokens_seen": 62946375, - "step": 2967 - }, - { - "epoch": 0.356880899416822, - "flos": 15615830182440.0, - "grad_norm": 7.826769682917314, - "learning_rate": 2.9801466292610913e-06, - "loss": 1.0147, - "num_input_tokens_seen": 62965980, - "step": 2968 - }, - { - "epoch": 0.35700114230746105, - "flos": 13465601283600.0, - "grad_norm": 4.10223441296997, - "learning_rate": 2.979467539980003e-06, - "loss": 1.0266, - "num_input_tokens_seen": 62982490, - "step": 2969 - }, - { - "epoch": 0.35712138519810016, - "flos": 14016920657400.0, - "grad_norm": 3.4245973670586216, - "learning_rate": 2.978788302114468e-06, - "loss": 0.9889, - "num_input_tokens_seen": 62999325, - "step": 2970 - }, - { - "epoch": 0.35724162808873927, - "flos": 25103053496520.0, - "grad_norm": 5.5702561727407165, - "learning_rate": 2.9781089157675255e-06, - "loss": 1.0361, - "num_input_tokens_seen": 63017505, - "step": 2971 - }, - { - "epoch": 0.3573618709793783, - "flos": 18185628652920.0, - "grad_norm": 3.2790499623207188, - "learning_rate": 2.977429381042238e-06, - "loss": 1.1059, - "num_input_tokens_seen": 63037900, - "step": 2972 - }, - { - "epoch": 0.35748211387001744, - "flos": 20755365800280.0, - "grad_norm": 5.325517928317769, - "learning_rate": 2.9767496980416913e-06, - "loss": 1.1162, - "num_input_tokens_seen": 63056915, - "step": 2973 - }, - { - "epoch": 0.35760235676065655, - "flos": 9847078184160.0, - "grad_norm": 5.6476959121584205, - "learning_rate": 2.9760698668689914e-06, - "loss": 1.0353, - "num_input_tokens_seen": 63072860, - "step": 2974 - }, - { - "epoch": 0.3577225996512956, - "flos": 31453914622920.0, - "grad_norm": 5.450200474402904, - "learning_rate": 2.975389887627269e-06, - "loss": 0.9373, - "num_input_tokens_seen": 63095180, - "step": 2975 - }, - { - "epoch": 0.3578428425419347, - "flos": 12075694674600.0, - "grad_norm": 5.0007941569021845, - "learning_rate": 2.9747097604196764e-06, - "loss": 1.1212, - "num_input_tokens_seen": 63111545, - "step": 2976 - }, - { - "epoch": 0.3579630854325738, - "flos": 51336249159960.0, - "grad_norm": 0.6859797445162291, - "learning_rate": 2.9740294853493875e-06, - "loss": 0.8277, - "num_input_tokens_seen": 63182825, - "step": 2977 - }, - { - "epoch": 0.3580833283232129, - "flos": 17819145337800.0, - "grad_norm": 5.501737493555836, - "learning_rate": 2.9733490625196008e-06, - "loss": 0.9064, - "num_input_tokens_seen": 63202405, - "step": 2978 - }, - { - "epoch": 0.358203571213852, - "flos": 9846188998920.0, - "grad_norm": 25.133674604424513, - "learning_rate": 2.9726684920335353e-06, - "loss": 0.9631, - "num_input_tokens_seen": 63219990, - "step": 2979 - }, - { - "epoch": 0.35832381410449105, - "flos": 14409441185040.0, - "grad_norm": 4.224210694751428, - "learning_rate": 2.971987773994432e-06, - "loss": 1.0456, - "num_input_tokens_seen": 63235895, - "step": 2980 - }, - { - "epoch": 0.35844405699513016, - "flos": 12024110834520.0, - "grad_norm": 4.230065662063781, - "learning_rate": 2.9713069085055566e-06, - "loss": 1.0475, - "num_input_tokens_seen": 63253925, - "step": 2981 - }, - { - "epoch": 0.35856429988576927, - "flos": 16503364363560.0, - "grad_norm": 4.1276798213278925, - "learning_rate": 2.9706258956701958e-06, - "loss": 1.0179, - "num_input_tokens_seen": 63273635, - "step": 2982 - }, - { - "epoch": 0.3586845427764083, - "flos": 16372381100400.0, - "grad_norm": 6.070914495166044, - "learning_rate": 2.9699447355916575e-06, - "loss": 0.9884, - "num_input_tokens_seen": 63292165, - "step": 2983 - }, - { - "epoch": 0.35880478566704743, - "flos": 14199763714680.0, - "grad_norm": 3.783442216446286, - "learning_rate": 2.969263428373275e-06, - "loss": 0.9622, - "num_input_tokens_seen": 63310235, - "step": 2984 - }, - { - "epoch": 0.35892502855768654, - "flos": 9663805865040.0, - "grad_norm": 3.788346370389843, - "learning_rate": 2.9685819741184007e-06, - "loss": 1.0006, - "num_input_tokens_seen": 63328395, - "step": 2985 - }, - { - "epoch": 0.3590452714483256, - "flos": 12836814165000.0, - "grad_norm": 5.039631372322432, - "learning_rate": 2.967900372930411e-06, - "loss": 0.903, - "num_input_tokens_seen": 63346625, - "step": 2986 - }, - { - "epoch": 0.3591655143389647, - "flos": 12574694330880.0, - "grad_norm": 11.704785951061055, - "learning_rate": 2.9672186249127046e-06, - "loss": 1.0082, - "num_input_tokens_seen": 63365810, - "step": 2987 - }, - { - "epoch": 0.3592857572296038, - "flos": 17946265244400.0, - "grad_norm": 5.2978016090674895, - "learning_rate": 2.9665367301687014e-06, - "loss": 1.0088, - "num_input_tokens_seen": 63383775, - "step": 2988 - }, - { - "epoch": 0.3594060001202429, - "flos": 20936062548360.0, - "grad_norm": 3.5956011441183864, - "learning_rate": 2.965854688801845e-06, - "loss": 0.9831, - "num_input_tokens_seen": 63405555, - "step": 2989 - }, - { - "epoch": 0.359526243010882, - "flos": 12073732334760.0, - "grad_norm": 4.011583721559121, - "learning_rate": 2.9651725009156005e-06, - "loss": 0.9851, - "num_input_tokens_seen": 63423020, - "step": 2990 - }, - { - "epoch": 0.3596464859015211, - "flos": 16322912907960.0, - "grad_norm": 23.71115758752881, - "learning_rate": 2.964490166613454e-06, - "loss": 0.9681, - "num_input_tokens_seen": 63442665, - "step": 2991 - }, - { - "epoch": 0.35976672879216015, - "flos": 39162844562880.0, - "grad_norm": 0.8313276154271269, - "learning_rate": 2.963807685998917e-06, - "loss": 0.8311, - "num_input_tokens_seen": 63498250, - "step": 2992 - }, - { - "epoch": 0.35988697168279926, - "flos": 30821172163080.0, - "grad_norm": 2.790697062246492, - "learning_rate": 2.9631250591755196e-06, - "loss": 1.0073, - "num_input_tokens_seen": 63520685, - "step": 2993 - }, - { - "epoch": 0.36000721457343837, - "flos": 25580032604640.0, - "grad_norm": 8.628394744204533, - "learning_rate": 2.962442286246817e-06, - "loss": 0.8086, - "num_input_tokens_seen": 63543235, - "step": 2994 - }, - { - "epoch": 0.3601274574640774, - "flos": 12963290178840.0, - "grad_norm": 3.8727337763084937, - "learning_rate": 2.9617593673163853e-06, - "loss": 0.9279, - "num_input_tokens_seen": 63561775, - "step": 2995 - }, - { - "epoch": 0.36024770035471654, - "flos": 9401348753760.0, - "grad_norm": 8.952017138044573, - "learning_rate": 2.9610763024878216e-06, - "loss": 0.9919, - "num_input_tokens_seen": 63577000, - "step": 2996 - }, - { - "epoch": 0.3603679432453556, - "flos": 14384109188400.0, - "grad_norm": 5.632635910080841, - "learning_rate": 2.960393091864747e-06, - "loss": 1.1406, - "num_input_tokens_seen": 63595100, - "step": 2997 - }, - { - "epoch": 0.3604881861359947, - "flos": 15953118144360.0, - "grad_norm": 8.240287380947674, - "learning_rate": 2.959709735550804e-06, - "loss": 0.9669, - "num_input_tokens_seen": 63614415, - "step": 2998 - }, - { - "epoch": 0.3606084290266338, - "flos": 15668487177120.0, - "grad_norm": 10.924411113589187, - "learning_rate": 2.9590262336496575e-06, - "loss": 0.9806, - "num_input_tokens_seen": 63633865, - "step": 2999 - }, - { - "epoch": 0.36072867191727287, - "flos": 11053620489360.0, - "grad_norm": 5.070006656679979, - "learning_rate": 2.9583425862649936e-06, - "loss": 1.0706, - "num_input_tokens_seen": 63651720, - "step": 3000 - }, - { - "epoch": 0.360848914807912, - "flos": 13959235166880.0, - "grad_norm": 8.55132684060569, - "learning_rate": 2.9576587935005215e-06, - "loss": 0.9683, - "num_input_tokens_seen": 63669520, - "step": 3001 - }, - { - "epoch": 0.3609691576985511, - "flos": 13385343336840.0, - "grad_norm": 4.033503885577737, - "learning_rate": 2.9569748554599713e-06, - "loss": 0.9389, - "num_input_tokens_seen": 63684850, - "step": 3002 - }, - { - "epoch": 0.36108940058919015, - "flos": 30163251014400.0, - "grad_norm": 7.324307551545002, - "learning_rate": 2.956290772247097e-06, - "loss": 0.9554, - "num_input_tokens_seen": 63703245, - "step": 3003 - }, - { - "epoch": 0.36120964347982926, - "flos": 16481221169160.0, - "grad_norm": 12.465444791212764, - "learning_rate": 2.9556065439656724e-06, - "loss": 0.9642, - "num_input_tokens_seen": 63722015, - "step": 3004 - }, - { - "epoch": 0.36132988637046837, - "flos": 12835924979760.0, - "grad_norm": 5.250459741716877, - "learning_rate": 2.9549221707194952e-06, - "loss": 1.0436, - "num_input_tokens_seen": 63740585, - "step": 3005 - }, - { - "epoch": 0.3614501292611074, - "flos": 19806466741440.0, - "grad_norm": 5.739949606431662, - "learning_rate": 2.954237652612384e-06, - "loss": 0.9648, - "num_input_tokens_seen": 63759355, - "step": 3006 - }, - { - "epoch": 0.36157037215174653, - "flos": 16085389193040.0, - "grad_norm": 5.230910153669876, - "learning_rate": 2.9535529897481796e-06, - "loss": 1.0728, - "num_input_tokens_seen": 63776620, - "step": 3007 - }, - { - "epoch": 0.36169061504238564, - "flos": 8874012267960.0, - "grad_norm": 6.803567051752011, - "learning_rate": 2.9528681822307446e-06, - "loss": 0.9826, - "num_input_tokens_seen": 63793190, - "step": 3008 - }, - { - "epoch": 0.3618108579330247, - "flos": 18995817735480.0, - "grad_norm": 11.467555844038118, - "learning_rate": 2.952183230163964e-06, - "loss": 1.0467, - "num_input_tokens_seen": 63812485, - "step": 3009 - }, - { - "epoch": 0.3619311008236638, - "flos": 16216219148400.0, - "grad_norm": 7.529919265375055, - "learning_rate": 2.9514981336517448e-06, - "loss": 0.9634, - "num_input_tokens_seen": 63831975, - "step": 3010 - }, - { - "epoch": 0.36205134371430286, - "flos": 18446154085920.0, - "grad_norm": 3.659677187123563, - "learning_rate": 2.950812892798015e-06, - "loss": 1.0331, - "num_input_tokens_seen": 63852590, - "step": 3011 - }, - { - "epoch": 0.362171586604942, - "flos": 18604401024000.0, - "grad_norm": 3.575123156580315, - "learning_rate": 2.9501275077067256e-06, - "loss": 1.0946, - "num_input_tokens_seen": 63872930, - "step": 3012 - }, - { - "epoch": 0.3622918294955811, - "flos": 19994890202640.0, - "grad_norm": 4.1027788375018615, - "learning_rate": 2.949441978481848e-06, - "loss": 1.1148, - "num_input_tokens_seen": 63893550, - "step": 3013 - }, - { - "epoch": 0.36241207238622014, - "flos": 14068443174360.0, - "grad_norm": 4.8734210465065715, - "learning_rate": 2.9487563052273778e-06, - "loss": 1.026, - "num_input_tokens_seen": 63910030, - "step": 3014 - }, - { - "epoch": 0.36253231527685925, - "flos": 15196199287680.0, - "grad_norm": 4.654990621161923, - "learning_rate": 2.94807048804733e-06, - "loss": 1.0893, - "num_input_tokens_seen": 63929370, - "step": 3015 - }, - { - "epoch": 0.36265255816749836, - "flos": 13014720711120.0, - "grad_norm": 4.75703812233912, - "learning_rate": 2.9473845270457434e-06, - "loss": 1.1191, - "num_input_tokens_seen": 63945905, - "step": 3016 - }, - { - "epoch": 0.3627728010581374, - "flos": 13228997415480.0, - "grad_norm": 5.7622351463504105, - "learning_rate": 2.946698422326677e-06, - "loss": 0.9122, - "num_input_tokens_seen": 63963085, - "step": 3017 - }, - { - "epoch": 0.36289304394877653, - "flos": 19599303519000.0, - "grad_norm": 4.258265144016183, - "learning_rate": 2.946012173994213e-06, - "loss": 1.0232, - "num_input_tokens_seen": 63982590, - "step": 3018 - }, - { - "epoch": 0.36301328683941564, - "flos": 24637020565320.0, - "grad_norm": 3.3973962350568585, - "learning_rate": 2.945325782152454e-06, - "loss": 0.9117, - "num_input_tokens_seen": 64005345, - "step": 3019 - }, - { - "epoch": 0.3631335297300547, - "flos": 13492129081080.0, - "grad_norm": 6.03203992661434, - "learning_rate": 2.9446392469055257e-06, - "loss": 1.0184, - "num_input_tokens_seen": 64023100, - "step": 3020 - }, - { - "epoch": 0.3632537726206938, - "flos": 13859225627400.0, - "grad_norm": 3.5760565363619916, - "learning_rate": 2.9439525683575745e-06, - "loss": 1.0243, - "num_input_tokens_seen": 64041740, - "step": 3021 - }, - { - "epoch": 0.3633740155113329, - "flos": 14960055342960.0, - "grad_norm": 4.173466411049126, - "learning_rate": 2.9432657466127694e-06, - "loss": 0.9785, - "num_input_tokens_seen": 64061030, - "step": 3022 - }, - { - "epoch": 0.36349425840197197, - "flos": 14406252382800.0, - "grad_norm": 2.9099032503280124, - "learning_rate": 2.9425787817753007e-06, - "loss": 0.9937, - "num_input_tokens_seen": 64079410, - "step": 3023 - }, - { - "epoch": 0.3636145012926111, - "flos": 21175456618440.0, - "grad_norm": 2.4844118511417848, - "learning_rate": 2.94189167394938e-06, - "loss": 0.931, - "num_input_tokens_seen": 64101565, - "step": 3024 - }, - { - "epoch": 0.3637347441832502, - "flos": 15220887391560.0, - "grad_norm": 3.313256663145997, - "learning_rate": 2.941204423239241e-06, - "loss": 1.0384, - "num_input_tokens_seen": 64120160, - "step": 3025 - }, - { - "epoch": 0.36385498707388925, - "flos": 21044013431880.0, - "grad_norm": 2.972747199813914, - "learning_rate": 2.9405170297491395e-06, - "loss": 0.9861, - "num_input_tokens_seen": 64139875, - "step": 3026 - }, - { - "epoch": 0.36397522996452836, - "flos": 15799501101840.0, - "grad_norm": 3.5567992257248227, - "learning_rate": 2.939829493583353e-06, - "loss": 1.0233, - "num_input_tokens_seen": 64156240, - "step": 3027 - }, - { - "epoch": 0.3640954728551674, - "flos": 15274770848640.0, - "grad_norm": 4.370909604309264, - "learning_rate": 2.939141814846179e-06, - "loss": 1.0452, - "num_input_tokens_seen": 64173375, - "step": 3028 - }, - { - "epoch": 0.3642157157458065, - "flos": 12548473149000.0, - "grad_norm": 2.90533514902946, - "learning_rate": 2.938453993641938e-06, - "loss": 1.0527, - "num_input_tokens_seen": 64191470, - "step": 3029 - }, - { - "epoch": 0.36433595863644563, - "flos": 12495724169640.0, - "grad_norm": 6.3340822412844835, - "learning_rate": 2.937766030074973e-06, - "loss": 0.9187, - "num_input_tokens_seen": 64208445, - "step": 3030 - }, - { - "epoch": 0.3644562015270847, - "flos": 19103002080000.0, - "grad_norm": 2.8395277264627277, - "learning_rate": 2.937077924249646e-06, - "loss": 1.0386, - "num_input_tokens_seen": 64230755, - "step": 3031 - }, - { - "epoch": 0.3645764444177238, - "flos": 10083222128880.0, - "grad_norm": 4.103178625054186, - "learning_rate": 2.9363896762703443e-06, - "loss": 0.9736, - "num_input_tokens_seen": 64247540, - "step": 3032 - }, - { - "epoch": 0.3646966873083629, - "flos": 14671806311640.0, - "grad_norm": 4.085947302957886, - "learning_rate": 2.9357012862414725e-06, - "loss": 1.068, - "num_input_tokens_seen": 64266620, - "step": 3033 - }, - { - "epoch": 0.36481693019900197, - "flos": 19785366040080.0, - "grad_norm": 3.6732019502252626, - "learning_rate": 2.9350127542674593e-06, - "loss": 0.9427, - "num_input_tokens_seen": 64288550, - "step": 3034 - }, - { - "epoch": 0.3649371730896411, - "flos": 13984689809760.0, - "grad_norm": 4.969831049175478, - "learning_rate": 2.934324080452755e-06, - "loss": 0.9929, - "num_input_tokens_seen": 64306060, - "step": 3035 - }, - { - "epoch": 0.3650574159802802, - "flos": 17605727157120.0, - "grad_norm": 3.135468002017371, - "learning_rate": 2.9336352649018307e-06, - "loss": 1.0069, - "num_input_tokens_seen": 64325850, - "step": 3036 - }, - { - "epoch": 0.36517765887091924, - "flos": 23429619736440.0, - "grad_norm": 3.499293119709534, - "learning_rate": 2.9329463077191783e-06, - "loss": 0.9329, - "num_input_tokens_seen": 64348945, - "step": 3037 - }, - { - "epoch": 0.36529790176155835, - "flos": 14278580568120.0, - "grad_norm": 5.069113849793469, - "learning_rate": 2.9322572090093135e-06, - "loss": 0.8666, - "num_input_tokens_seen": 64367370, - "step": 3038 - }, - { - "epoch": 0.36541814465219746, - "flos": 12495754831200.0, - "grad_norm": 8.968468425730244, - "learning_rate": 2.9315679688767713e-06, - "loss": 0.9682, - "num_input_tokens_seen": 64385100, - "step": 3039 - }, - { - "epoch": 0.3655383875428365, - "flos": 16114124622840.0, - "grad_norm": 3.5159004612135454, - "learning_rate": 2.9308785874261085e-06, - "loss": 0.8879, - "num_input_tokens_seen": 64405010, - "step": 3040 - }, - { - "epoch": 0.36565863043347563, - "flos": 15615952828680.0, - "grad_norm": 3.5664003670885216, - "learning_rate": 2.9301890647619045e-06, - "loss": 1.0303, - "num_input_tokens_seen": 64424025, - "step": 3041 - }, - { - "epoch": 0.36577887332411474, - "flos": 17661726261840.0, - "grad_norm": 3.7251312898374307, - "learning_rate": 2.929499400988759e-06, - "loss": 1.008, - "num_input_tokens_seen": 64444905, - "step": 3042 - }, - { - "epoch": 0.3658991162147538, - "flos": 20152278617040.0, - "grad_norm": 5.351170613217513, - "learning_rate": 2.9288095962112927e-06, - "loss": 0.8749, - "num_input_tokens_seen": 64465330, - "step": 3043 - }, - { - "epoch": 0.3660193591053929, - "flos": 12600670220280.0, - "grad_norm": 4.753273689106257, - "learning_rate": 2.9281196505341503e-06, - "loss": 1.0677, - "num_input_tokens_seen": 64482220, - "step": 3044 - }, - { - "epoch": 0.36613960199603196, - "flos": 7251733086120.0, - "grad_norm": 4.768148785810687, - "learning_rate": 2.9274295640619946e-06, - "loss": 1.0197, - "num_input_tokens_seen": 64499070, - "step": 3045 - }, - { - "epoch": 0.36625984488667107, - "flos": 14016491395560.0, - "grad_norm": 4.828742817975341, - "learning_rate": 2.9267393368995103e-06, - "loss": 1.0113, - "num_input_tokens_seen": 64518020, - "step": 3046 - }, - { - "epoch": 0.3663800877773102, - "flos": 12520442935080.0, - "grad_norm": 4.0857743473192345, - "learning_rate": 2.926048969151407e-06, - "loss": 0.9569, - "num_input_tokens_seen": 64535025, - "step": 3047 - }, - { - "epoch": 0.36650033066794924, - "flos": 14410422354960.0, - "grad_norm": 4.7217989538647, - "learning_rate": 2.92535846092241e-06, - "loss": 0.8976, - "num_input_tokens_seen": 64553760, - "step": 3048 - }, - { - "epoch": 0.36662057355858835, - "flos": 17581406991960.0, - "grad_norm": 4.279569198460759, - "learning_rate": 2.9246678123172704e-06, - "loss": 1.0478, - "num_input_tokens_seen": 64573570, - "step": 3049 - }, - { - "epoch": 0.36674081644922746, - "flos": 8719536701760.0, - "grad_norm": 6.532079032592694, - "learning_rate": 2.9239770234407596e-06, - "loss": 0.9484, - "num_input_tokens_seen": 64591595, - "step": 3050 - }, - { - "epoch": 0.3668610593398665, - "flos": 14986981740720.0, - "grad_norm": 3.826098594723216, - "learning_rate": 2.9232860943976686e-06, - "loss": 0.9042, - "num_input_tokens_seen": 64612050, - "step": 3051 - }, - { - "epoch": 0.3669813022305056, - "flos": 19048658699520.0, - "grad_norm": 4.438956008380342, - "learning_rate": 2.9225950252928115e-06, - "loss": 1.0675, - "num_input_tokens_seen": 64632620, - "step": 3052 - }, - { - "epoch": 0.36710154512114473, - "flos": 13545062029800.0, - "grad_norm": 5.189783352131901, - "learning_rate": 2.9219038162310217e-06, - "loss": 1.027, - "num_input_tokens_seen": 64650540, - "step": 3053 - }, - { - "epoch": 0.3672217880117838, - "flos": 14775495238320.0, - "grad_norm": 8.939333427549633, - "learning_rate": 2.921212467317157e-06, - "loss": 1.0447, - "num_input_tokens_seen": 64669705, - "step": 3054 - }, - { - "epoch": 0.3673420309024229, - "flos": 9585786212160.0, - "grad_norm": 3.939161283229643, - "learning_rate": 2.920520978656093e-06, - "loss": 1.0264, - "num_input_tokens_seen": 64686390, - "step": 3055 - }, - { - "epoch": 0.367462273793062, - "flos": 20650879673040.0, - "grad_norm": 3.7175221700206076, - "learning_rate": 2.919829350352729e-06, - "loss": 0.9887, - "num_input_tokens_seen": 64707715, - "step": 3056 - }, - { - "epoch": 0.36758251668370107, - "flos": 44841220299600.0, - "grad_norm": 0.7534784476925916, - "learning_rate": 2.919137582511983e-06, - "loss": 0.8459, - "num_input_tokens_seen": 64763875, - "step": 3057 - }, - { - "epoch": 0.3677027595743402, - "flos": 8955741969600.0, - "grad_norm": 4.304744879310753, - "learning_rate": 2.918445675238797e-06, - "loss": 0.852, - "num_input_tokens_seen": 64780520, - "step": 3058 - }, - { - "epoch": 0.36782300246497923, - "flos": 17819084014680.0, - "grad_norm": 3.6258593888874673, - "learning_rate": 2.917753628638132e-06, - "loss": 0.9091, - "num_input_tokens_seen": 64800545, - "step": 3059 - }, - { - "epoch": 0.36794324535561834, - "flos": 12337446570000.0, - "grad_norm": 5.973498308140173, - "learning_rate": 2.9170614428149716e-06, - "loss": 0.9193, - "num_input_tokens_seen": 64818600, - "step": 3060 - }, - { - "epoch": 0.36806348824625745, - "flos": 17129790542040.0, - "grad_norm": 6.3579111874867165, - "learning_rate": 2.9163691178743195e-06, - "loss": 1.0836, - "num_input_tokens_seen": 64836970, - "step": 3061 - }, - { - "epoch": 0.3681837311368965, - "flos": 14567197538160.0, - "grad_norm": 7.4456739628121005, - "learning_rate": 2.9156766539212006e-06, - "loss": 1.0064, - "num_input_tokens_seen": 64854335, - "step": 3062 - }, - { - "epoch": 0.3683039740275356, - "flos": 15245667480120.0, - "grad_norm": 6.639725639925668, - "learning_rate": 2.9149840510606614e-06, - "loss": 0.941, - "num_input_tokens_seen": 64872710, - "step": 3063 - }, - { - "epoch": 0.36842421691817473, - "flos": 50402655767640.0, - "grad_norm": 1.0114115244873596, - "learning_rate": 2.914291309397769e-06, - "loss": 0.913, - "num_input_tokens_seen": 64929900, - "step": 3064 - }, - { - "epoch": 0.3685444598088138, - "flos": 16586351189160.0, - "grad_norm": 9.742529721585486, - "learning_rate": 2.9135984290376117e-06, - "loss": 1.0069, - "num_input_tokens_seen": 64948485, - "step": 3065 - }, - { - "epoch": 0.3686647026994529, - "flos": 16397927727960.0, - "grad_norm": 6.7905586446582555, - "learning_rate": 2.9129054100853e-06, - "loss": 1.0629, - "num_input_tokens_seen": 64967045, - "step": 3066 - }, - { - "epoch": 0.368784945590092, - "flos": 17871832994040.0, - "grad_norm": 3.9221412598704206, - "learning_rate": 2.912212252645963e-06, - "loss": 0.9792, - "num_input_tokens_seen": 64989350, - "step": 3067 - }, - { - "epoch": 0.36890518848073106, - "flos": 13072651494120.0, - "grad_norm": 5.360965592820229, - "learning_rate": 2.9115189568247523e-06, - "loss": 0.9806, - "num_input_tokens_seen": 65006630, - "step": 3068 - }, - { - "epoch": 0.36902543137137017, - "flos": 11577890819160.0, - "grad_norm": 4.9158920496859, - "learning_rate": 2.910825522726841e-06, - "loss": 1.1342, - "num_input_tokens_seen": 65023875, - "step": 3069 - }, - { - "epoch": 0.3691456742620093, - "flos": 8640689186760.0, - "grad_norm": 4.483037130977682, - "learning_rate": 2.9101319504574215e-06, - "loss": 0.9965, - "num_input_tokens_seen": 65040035, - "step": 3070 - }, - { - "epoch": 0.36926591715264834, - "flos": 12601406097720.0, - "grad_norm": 3.2556653162304863, - "learning_rate": 2.909438240121709e-06, - "loss": 0.9859, - "num_input_tokens_seen": 65060030, - "step": 3071 - }, - { - "epoch": 0.36938616004328745, - "flos": 20622880120680.0, - "grad_norm": 4.02681944104662, - "learning_rate": 2.908744391824939e-06, - "loss": 0.9308, - "num_input_tokens_seen": 65080770, - "step": 3072 - }, - { - "epoch": 0.36950640293392656, - "flos": 20807869487160.0, - "grad_norm": 13.56868029997788, - "learning_rate": 2.908050405672367e-06, - "loss": 1.0042, - "num_input_tokens_seen": 65100035, - "step": 3073 - }, - { - "epoch": 0.3696266458245656, - "flos": 17661848908080.0, - "grad_norm": 3.4651518709442586, - "learning_rate": 2.9073562817692703e-06, - "loss": 1.0135, - "num_input_tokens_seen": 65118440, - "step": 3074 - }, - { - "epoch": 0.3697468887152047, - "flos": 42862913394600.0, - "grad_norm": 0.7946613456571398, - "learning_rate": 2.9066620202209468e-06, - "loss": 0.8405, - "num_input_tokens_seen": 65180650, - "step": 3075 - }, - { - "epoch": 0.3698671316058438, - "flos": 18602683976640.0, - "grad_norm": 3.3001344306614255, - "learning_rate": 2.905967621132716e-06, - "loss": 1.0133, - "num_input_tokens_seen": 65197980, - "step": 3076 - }, - { - "epoch": 0.3699873744964829, - "flos": 17503326015960.0, - "grad_norm": 3.3905752151982993, - "learning_rate": 2.9052730846099172e-06, - "loss": 0.9671, - "num_input_tokens_seen": 65219045, - "step": 3077 - }, - { - "epoch": 0.370107617387122, - "flos": 46454878035000.0, - "grad_norm": 0.8803265772443154, - "learning_rate": 2.9045784107579123e-06, - "loss": 0.8794, - "num_input_tokens_seen": 65278870, - "step": 3078 - }, - { - "epoch": 0.37022786027776106, - "flos": 11289948403440.0, - "grad_norm": 3.22101313462136, - "learning_rate": 2.9038835996820807e-06, - "loss": 0.9039, - "num_input_tokens_seen": 65296200, - "step": 3079 - }, - { - "epoch": 0.37034810316840017, - "flos": 13147206390720.0, - "grad_norm": 3.4165327619659123, - "learning_rate": 2.903188651487826e-06, - "loss": 1.0163, - "num_input_tokens_seen": 65314475, - "step": 3080 - }, - { - "epoch": 0.3704683460590393, - "flos": 12626799417480.0, - "grad_norm": 3.4339117569069293, - "learning_rate": 2.902493566280571e-06, - "loss": 1.0906, - "num_input_tokens_seen": 65332300, - "step": 3081 - }, - { - "epoch": 0.37058858894967833, - "flos": 9976037784360.0, - "grad_norm": 5.583128493617634, - "learning_rate": 2.9017983441657595e-06, - "loss": 1.0344, - "num_input_tokens_seen": 65349350, - "step": 3082 - }, - { - "epoch": 0.37070883184031744, - "flos": 9847354138200.0, - "grad_norm": 5.428671850265157, - "learning_rate": 2.9011029852488564e-06, - "loss": 0.9845, - "num_input_tokens_seen": 65366305, - "step": 3083 - }, - { - "epoch": 0.37082907473095655, - "flos": 37418571503040.0, - "grad_norm": 1.004806827085682, - "learning_rate": 2.9004074896353465e-06, - "loss": 0.9077, - "num_input_tokens_seen": 65420025, - "step": 3084 - }, - { - "epoch": 0.3709493176215956, - "flos": 11316108262200.0, - "grad_norm": 9.80778516492427, - "learning_rate": 2.8997118574307362e-06, - "loss": 1.0344, - "num_input_tokens_seen": 65436700, - "step": 3085 - }, - { - "epoch": 0.3710695605122347, - "flos": 14881974366960.0, - "grad_norm": 3.358777370078721, - "learning_rate": 2.899016088740553e-06, - "loss": 0.9738, - "num_input_tokens_seen": 65454530, - "step": 3086 - }, - { - "epoch": 0.37118980340287383, - "flos": 10135572507960.0, - "grad_norm": 4.153160380951155, - "learning_rate": 2.898320183670344e-06, - "loss": 1.0132, - "num_input_tokens_seen": 65471665, - "step": 3087 - }, - { - "epoch": 0.3713100462935129, - "flos": 18421864582320.0, - "grad_norm": 3.0923498819438913, - "learning_rate": 2.8976241423256767e-06, - "loss": 1.1094, - "num_input_tokens_seen": 65491480, - "step": 3088 - }, - { - "epoch": 0.371430289184152, - "flos": 21751893357960.0, - "grad_norm": 3.321701369677721, - "learning_rate": 2.896927964812142e-06, - "loss": 0.9077, - "num_input_tokens_seen": 65511765, - "step": 3089 - }, - { - "epoch": 0.37155053207479105, - "flos": 11132161388760.0, - "grad_norm": 7.732547675157521, - "learning_rate": 2.8962316512353465e-06, - "loss": 0.9593, - "num_input_tokens_seen": 65529030, - "step": 3090 - }, - { - "epoch": 0.37167077496543016, - "flos": 16638548260440.0, - "grad_norm": 2.8184822039892503, - "learning_rate": 2.8955352017009233e-06, - "loss": 0.9776, - "num_input_tokens_seen": 65547995, - "step": 3091 - }, - { - "epoch": 0.3717910178560693, - "flos": 15692929988520.0, - "grad_norm": 5.5066954000676285, - "learning_rate": 2.8948386163145212e-06, - "loss": 0.9926, - "num_input_tokens_seen": 65566925, - "step": 3092 - }, - { - "epoch": 0.3719112607467083, - "flos": 19179304685520.0, - "grad_norm": 4.017495992263286, - "learning_rate": 2.8941418951818135e-06, - "loss": 1.0136, - "num_input_tokens_seen": 65586205, - "step": 3093 - }, - { - "epoch": 0.37203150363734744, - "flos": 8562332256720.0, - "grad_norm": 4.0503120113796625, - "learning_rate": 2.8934450384084903e-06, - "loss": 0.9358, - "num_input_tokens_seen": 65603440, - "step": 3094 - }, - { - "epoch": 0.37215174652798655, - "flos": 16849728147240.0, - "grad_norm": 4.295392144700855, - "learning_rate": 2.8927480461002653e-06, - "loss": 0.9357, - "num_input_tokens_seen": 65623130, - "step": 3095 - }, - { - "epoch": 0.3722719894186256, - "flos": 12673140130800.0, - "grad_norm": 5.573928408422706, - "learning_rate": 2.892050918362872e-06, - "loss": 1.0536, - "num_input_tokens_seen": 65637905, - "step": 3096 - }, - { - "epoch": 0.3723922323092647, - "flos": 44680183159560.0, - "grad_norm": 0.9066409550263126, - "learning_rate": 2.8913536553020626e-06, - "loss": 0.838, - "num_input_tokens_seen": 65691680, - "step": 3097 - }, - { - "epoch": 0.3725124751999038, - "flos": 16376796365040.0, - "grad_norm": 5.092327644828557, - "learning_rate": 2.8906562570236137e-06, - "loss": 1.0804, - "num_input_tokens_seen": 65709310, - "step": 3098 - }, - { - "epoch": 0.3726327180905429, - "flos": 14853361583400.0, - "grad_norm": 3.023995596857246, - "learning_rate": 2.889958723633318e-06, - "loss": 0.9873, - "num_input_tokens_seen": 65727970, - "step": 3099 - }, - { - "epoch": 0.372752960981182, - "flos": 21804550352640.0, - "grad_norm": 3.096240341793639, - "learning_rate": 2.889261055236992e-06, - "loss": 0.9596, - "num_input_tokens_seen": 65749905, - "step": 3100 - }, - { - "epoch": 0.3728732038718211, - "flos": 17869134776760.0, - "grad_norm": 3.18111213430315, - "learning_rate": 2.8885632519404704e-06, - "loss": 1.0527, - "num_input_tokens_seen": 65769895, - "step": 3101 - }, - { - "epoch": 0.37299344676246016, - "flos": 18002693610960.0, - "grad_norm": 10.851242950546142, - "learning_rate": 2.8878653138496107e-06, - "loss": 0.9807, - "num_input_tokens_seen": 65790110, - "step": 3102 - }, - { - "epoch": 0.37311368965309927, - "flos": 16953754351080.0, - "grad_norm": 7.383093320965406, - "learning_rate": 2.8871672410702878e-06, - "loss": 0.9692, - "num_input_tokens_seen": 65807190, - "step": 3103 - }, - { - "epoch": 0.3732339325437384, - "flos": 18369023618280.0, - "grad_norm": 6.8400230536713735, - "learning_rate": 2.8864690337084008e-06, - "loss": 1.0437, - "num_input_tokens_seen": 65826185, - "step": 3104 - }, - { - "epoch": 0.37335417543437743, - "flos": 18653317308360.0, - "grad_norm": 3.9558418195938962, - "learning_rate": 2.885770691869866e-06, - "loss": 1.0049, - "num_input_tokens_seen": 65846785, - "step": 3105 - }, - { - "epoch": 0.37347441832501654, - "flos": 17083940413680.0, - "grad_norm": 3.7384445170721157, - "learning_rate": 2.8850722156606207e-06, - "loss": 0.9587, - "num_input_tokens_seen": 65864895, - "step": 3106 - }, - { - "epoch": 0.3735946612156556, - "flos": 13986682811160.0, - "grad_norm": 4.770846148922073, - "learning_rate": 2.8843736051866252e-06, - "loss": 0.8882, - "num_input_tokens_seen": 65883540, - "step": 3107 - }, - { - "epoch": 0.3737149041062947, - "flos": 16376673718800.0, - "grad_norm": 6.850423267959091, - "learning_rate": 2.8836748605538557e-06, - "loss": 0.9238, - "num_input_tokens_seen": 65904900, - "step": 3108 - }, - { - "epoch": 0.3738351469969338, - "flos": 24740985446040.0, - "grad_norm": 7.328641373064997, - "learning_rate": 2.882975981868313e-06, - "loss": 0.8448, - "num_input_tokens_seen": 65925005, - "step": 3109 - }, - { - "epoch": 0.3739553898875729, - "flos": 31086879399720.0, - "grad_norm": 5.066177727715327, - "learning_rate": 2.882276969236016e-06, - "loss": 0.8949, - "num_input_tokens_seen": 65946085, - "step": 3110 - }, - { - "epoch": 0.374075632778212, - "flos": 9057775172040.0, - "grad_norm": 4.596873979289592, - "learning_rate": 2.881577822763005e-06, - "loss": 0.9665, - "num_input_tokens_seen": 65963755, - "step": 3111 - }, - { - "epoch": 0.3741958756688511, - "flos": 18520525013160.0, - "grad_norm": 2.8104076655879755, - "learning_rate": 2.880878542555338e-06, - "loss": 1.101, - "num_input_tokens_seen": 65981240, - "step": 3112 - }, - { - "epoch": 0.37431611855949015, - "flos": 15222635100480.0, - "grad_norm": 5.677722281333869, - "learning_rate": 2.8801791287190976e-06, - "loss": 1.034, - "num_input_tokens_seen": 65998955, - "step": 3113 - }, - { - "epoch": 0.37443636145012926, - "flos": 17215720877400.0, - "grad_norm": 4.179366910529291, - "learning_rate": 2.8794795813603817e-06, - "loss": 1.0654, - "num_input_tokens_seen": 66014140, - "step": 3114 - }, - { - "epoch": 0.3745566043407684, - "flos": 10870992063000.0, - "grad_norm": 4.33991802437004, - "learning_rate": 2.878779900585314e-06, - "loss": 1.0359, - "num_input_tokens_seen": 66031700, - "step": 3115 - }, - { - "epoch": 0.37467684723140743, - "flos": 17503969908720.0, - "grad_norm": 3.275825157753747, - "learning_rate": 2.8780800865000336e-06, - "loss": 0.9904, - "num_input_tokens_seen": 66052730, - "step": 3116 - }, - { - "epoch": 0.37479709012204654, - "flos": 46090295736600.0, - "grad_norm": 1.035135232963972, - "learning_rate": 2.877380139210702e-06, - "loss": 0.8908, - "num_input_tokens_seen": 66111120, - "step": 3117 - }, - { - "epoch": 0.37491733301268565, - "flos": 16925754798720.0, - "grad_norm": 4.459477762001963, - "learning_rate": 2.876680058823501e-06, - "loss": 0.9746, - "num_input_tokens_seen": 66131240, - "step": 3118 - }, - { - "epoch": 0.3750375759033247, - "flos": 22931999850360.0, - "grad_norm": 5.951941024508603, - "learning_rate": 2.8759798454446314e-06, - "loss": 0.8864, - "num_input_tokens_seen": 66154125, - "step": 3119 - }, - { - "epoch": 0.3751578187939638, - "flos": 16612511047920.0, - "grad_norm": 3.9622267331021424, - "learning_rate": 2.8752794991803173e-06, - "loss": 1.0348, - "num_input_tokens_seen": 66171530, - "step": 3120 - }, - { - "epoch": 0.37527806168460287, - "flos": 10314644193360.0, - "grad_norm": 3.939088289145423, - "learning_rate": 2.8745790201367976e-06, - "loss": 0.9784, - "num_input_tokens_seen": 66187005, - "step": 3121 - }, - { - "epoch": 0.375398304575242, - "flos": 18784392556200.0, - "grad_norm": 3.1566337125589454, - "learning_rate": 2.8738784084203373e-06, - "loss": 1.0747, - "num_input_tokens_seen": 66206800, - "step": 3122 - }, - { - "epoch": 0.3755185474658811, - "flos": 15799531763400.0, - "grad_norm": 3.828314577974826, - "learning_rate": 2.873177664137216e-06, - "loss": 1.0098, - "num_input_tokens_seen": 66227450, - "step": 3123 - }, - { - "epoch": 0.37563879035652015, - "flos": 21963471845040.0, - "grad_norm": 2.626195544513946, - "learning_rate": 2.8724767873937384e-06, - "loss": 0.9158, - "num_input_tokens_seen": 66251290, - "step": 3124 - }, - { - "epoch": 0.37575903324715926, - "flos": 14747557009080.0, - "grad_norm": 5.739327186735086, - "learning_rate": 2.871775778296225e-06, - "loss": 1.0989, - "num_input_tokens_seen": 66268100, - "step": 3125 - }, - { - "epoch": 0.37587927613779837, - "flos": 13256199767280.0, - "grad_norm": 5.505349592010395, - "learning_rate": 2.8710746369510196e-06, - "loss": 1.0134, - "num_input_tokens_seen": 66285805, - "step": 3126 - }, - { - "epoch": 0.3759995190284374, - "flos": 9610229023560.0, - "grad_norm": 9.832663260005804, - "learning_rate": 2.8703733634644846e-06, - "loss": 1.0581, - "num_input_tokens_seen": 66300280, - "step": 3127 - }, - { - "epoch": 0.37611976191907653, - "flos": 14540516432880.0, - "grad_norm": 3.4123174541680643, - "learning_rate": 2.869671957943002e-06, - "loss": 1.0244, - "num_input_tokens_seen": 66319155, - "step": 3128 - }, - { - "epoch": 0.37624000480971564, - "flos": 15012681676080.0, - "grad_norm": 5.696483517837486, - "learning_rate": 2.8689704204929747e-06, - "loss": 0.971, - "num_input_tokens_seen": 66338055, - "step": 3129 - }, - { - "epoch": 0.3763602477003547, - "flos": 16035093138480.0, - "grad_norm": 3.8205471473823085, - "learning_rate": 2.8682687512208253e-06, - "loss": 1.0264, - "num_input_tokens_seen": 66356785, - "step": 3130 - }, - { - "epoch": 0.3764804905909938, - "flos": 19601511151320.0, - "grad_norm": 3.42664245428692, - "learning_rate": 2.8675669502329972e-06, - "loss": 1.032, - "num_input_tokens_seen": 66378035, - "step": 3131 - }, - { - "epoch": 0.3766007334816329, - "flos": 16008963941280.0, - "grad_norm": 4.816715128477732, - "learning_rate": 2.866865017635952e-06, - "loss": 1.0566, - "num_input_tokens_seen": 66395575, - "step": 3132 - }, - { - "epoch": 0.376720976372272, - "flos": 18473295114600.0, - "grad_norm": 4.087365449796535, - "learning_rate": 2.866162953536174e-06, - "loss": 1.022, - "num_input_tokens_seen": 66416265, - "step": 3133 - }, - { - "epoch": 0.3768412192629111, - "flos": 12784402462800.0, - "grad_norm": 9.63656001275323, - "learning_rate": 2.8654607580401634e-06, - "loss": 0.9763, - "num_input_tokens_seen": 66435720, - "step": 3134 - }, - { - "epoch": 0.3769614621535502, - "flos": 45092541716520.0, - "grad_norm": 0.9300293814724722, - "learning_rate": 2.8647584312544446e-06, - "loss": 0.9143, - "num_input_tokens_seen": 66500645, - "step": 3135 - }, - { - "epoch": 0.37708170504418925, - "flos": 16823997550320.0, - "grad_norm": 8.943842418094532, - "learning_rate": 2.864055973285559e-06, - "loss": 1.0801, - "num_input_tokens_seen": 66522365, - "step": 3136 - }, - { - "epoch": 0.37720194793482836, - "flos": 17370625705440.0, - "grad_norm": 3.0424978261546487, - "learning_rate": 2.8633533842400698e-06, - "loss": 1.0976, - "num_input_tokens_seen": 66542285, - "step": 3137 - }, - { - "epoch": 0.3773221908254674, - "flos": 14908348856640.0, - "grad_norm": 6.631441835150805, - "learning_rate": 2.862650664224558e-06, - "loss": 1.0025, - "num_input_tokens_seen": 66560855, - "step": 3138 - }, - { - "epoch": 0.37744243371610653, - "flos": 26863460085000.0, - "grad_norm": 16.797033974538387, - "learning_rate": 2.861947813345627e-06, - "loss": 0.9248, - "num_input_tokens_seen": 66583275, - "step": 3139 - }, - { - "epoch": 0.37756267660674564, - "flos": 18605198224560.0, - "grad_norm": 3.2042255278804124, - "learning_rate": 2.8612448317098974e-06, - "loss": 0.9382, - "num_input_tokens_seen": 66603330, - "step": 3140 - }, - { - "epoch": 0.3776829194973847, - "flos": 13781052666720.0, - "grad_norm": 4.21789994329413, - "learning_rate": 2.8605417194240114e-06, - "loss": 1.0614, - "num_input_tokens_seen": 66621410, - "step": 3141 - }, - { - "epoch": 0.3778031623880238, - "flos": 12310612156920.0, - "grad_norm": 3.290343408952982, - "learning_rate": 2.8598384765946315e-06, - "loss": 1.0153, - "num_input_tokens_seen": 66639785, - "step": 3142 - }, - { - "epoch": 0.3779234052786629, - "flos": 19313354104680.0, - "grad_norm": 3.7762061914139755, - "learning_rate": 2.8591351033284377e-06, - "loss": 0.9384, - "num_input_tokens_seen": 66659235, - "step": 3143 - }, - { - "epoch": 0.37804364816930197, - "flos": 13964049031800.0, - "grad_norm": 16.10964210799409, - "learning_rate": 2.8584315997321325e-06, - "loss": 1.0469, - "num_input_tokens_seen": 66677960, - "step": 3144 - }, - { - "epoch": 0.3781638910599411, - "flos": 16133569599960.0, - "grad_norm": 4.9743950404110855, - "learning_rate": 2.8577279659124356e-06, - "loss": 0.9988, - "num_input_tokens_seen": 66695355, - "step": 3145 - }, - { - "epoch": 0.3782841339505802, - "flos": 10345464609240.0, - "grad_norm": 3.598763327645112, - "learning_rate": 2.857024201976089e-06, - "loss": 1.0502, - "num_input_tokens_seen": 66712635, - "step": 3146 - }, - { - "epoch": 0.37840437684121925, - "flos": 23404441047600.0, - "grad_norm": 5.28144556168757, - "learning_rate": 2.8563203080298516e-06, - "loss": 0.9479, - "num_input_tokens_seen": 66733130, - "step": 3147 - }, - { - "epoch": 0.37852461973185836, - "flos": 13020117145680.0, - "grad_norm": 3.873823048683588, - "learning_rate": 2.855616284180505e-06, - "loss": 1.1102, - "num_input_tokens_seen": 66749900, - "step": 3148 - }, - { - "epoch": 0.37864486262249747, - "flos": 42587082295080.0, - "grad_norm": 0.9263309826372083, - "learning_rate": 2.8549121305348477e-06, - "loss": 0.9561, - "num_input_tokens_seen": 66809405, - "step": 3149 - }, - { - "epoch": 0.3787651055131365, - "flos": 16608433060440.0, - "grad_norm": 4.99465905080752, - "learning_rate": 2.8542078471997006e-06, - "loss": 1.0424, - "num_input_tokens_seen": 66826740, - "step": 3150 - }, - { - "epoch": 0.37888534840377563, - "flos": 17498144212320.0, - "grad_norm": 38.06367057415936, - "learning_rate": 2.8535034342819013e-06, - "loss": 0.9819, - "num_input_tokens_seen": 66843870, - "step": 3151 - }, - { - "epoch": 0.37900559129441475, - "flos": 17057289969960.0, - "grad_norm": 3.906397214022863, - "learning_rate": 2.85279889188831e-06, - "loss": 0.9482, - "num_input_tokens_seen": 66863965, - "step": 3152 - }, - { - "epoch": 0.3791258341850538, - "flos": 17530160429040.0, - "grad_norm": 5.270110712788568, - "learning_rate": 2.852094220125805e-06, - "loss": 1.0234, - "num_input_tokens_seen": 66883195, - "step": 3153 - }, - { - "epoch": 0.3792460770756929, - "flos": 12338581047720.0, - "grad_norm": 4.889491525748981, - "learning_rate": 2.8513894191012846e-06, - "loss": 0.9336, - "num_input_tokens_seen": 66901895, - "step": 3154 - }, - { - "epoch": 0.37936631996633197, - "flos": 17215260954000.0, - "grad_norm": 2.8882665962423273, - "learning_rate": 2.8506844889216664e-06, - "loss": 1.0091, - "num_input_tokens_seen": 66921000, - "step": 3155 - }, - { - "epoch": 0.3794865628569711, - "flos": 50345823252720.0, - "grad_norm": 0.8889184295361691, - "learning_rate": 2.849979429693887e-06, - "loss": 0.9017, - "num_input_tokens_seen": 66981705, - "step": 3156 - }, - { - "epoch": 0.3796068057476102, - "flos": 11158934478720.0, - "grad_norm": 5.327012647931363, - "learning_rate": 2.8492742415249042e-06, - "loss": 0.9644, - "num_input_tokens_seen": 66999070, - "step": 3157 - }, - { - "epoch": 0.37972704863824924, - "flos": 17923263526320.0, - "grad_norm": 3.646723277148297, - "learning_rate": 2.848568924521694e-06, - "loss": 0.9852, - "num_input_tokens_seen": 67019570, - "step": 3158 - }, - { - "epoch": 0.37984729152888835, - "flos": 18654973032600.0, - "grad_norm": 11.146917804949176, - "learning_rate": 2.8478634787912526e-06, - "loss": 0.954, - "num_input_tokens_seen": 67037345, - "step": 3159 - }, - { - "epoch": 0.37996753441952746, - "flos": 18237733739520.0, - "grad_norm": 4.990839577721544, - "learning_rate": 2.847157904440596e-06, - "loss": 0.9996, - "num_input_tokens_seen": 67056795, - "step": 3160 - }, - { - "epoch": 0.3800877773101665, - "flos": 14276955505440.0, - "grad_norm": 2.7005988869687996, - "learning_rate": 2.846452201576759e-06, - "loss": 0.9815, - "num_input_tokens_seen": 67075890, - "step": 3161 - }, - { - "epoch": 0.38020802020080563, - "flos": 45137441336520.0, - "grad_norm": 0.8944945348676089, - "learning_rate": 2.845746370306795e-06, - "loss": 0.8868, - "num_input_tokens_seen": 67140800, - "step": 3162 - }, - { - "epoch": 0.38032826309144474, - "flos": 15118148973240.0, - "grad_norm": 4.730431251786761, - "learning_rate": 2.84504041073778e-06, - "loss": 1.0076, - "num_input_tokens_seen": 67158935, - "step": 3163 - }, - { - "epoch": 0.3804485059820838, - "flos": 13440637225680.0, - "grad_norm": 5.370540632031179, - "learning_rate": 2.844334322976806e-06, - "loss": 1.026, - "num_input_tokens_seen": 67178870, - "step": 3164 - }, - { - "epoch": 0.3805687488727229, - "flos": 15509810977200.0, - "grad_norm": 4.943976295587199, - "learning_rate": 2.8436281071309866e-06, - "loss": 1.0581, - "num_input_tokens_seen": 67197130, - "step": 3165 - }, - { - "epoch": 0.380688991763362, - "flos": 41900333731920.0, - "grad_norm": 0.7512688632190054, - "learning_rate": 2.842921763307455e-06, - "loss": 0.7853, - "num_input_tokens_seen": 67259660, - "step": 3166 - }, - { - "epoch": 0.38080923465400107, - "flos": 16921952765280.0, - "grad_norm": 3.3093864635236563, - "learning_rate": 2.842215291613361e-06, - "loss": 1.0525, - "num_input_tokens_seen": 67277760, - "step": 3167 - }, - { - "epoch": 0.3809294775446402, - "flos": 39329983353360.0, - "grad_norm": 0.8305567539558001, - "learning_rate": 2.8415086921558774e-06, - "loss": 0.8685, - "num_input_tokens_seen": 67340905, - "step": 3168 - }, - { - "epoch": 0.38104972043527924, - "flos": 17529332566920.0, - "grad_norm": 2.768302612670111, - "learning_rate": 2.840801965042194e-06, - "loss": 1.0076, - "num_input_tokens_seen": 67360085, - "step": 3169 - }, - { - "epoch": 0.38116996332591835, - "flos": 16245230532240.0, - "grad_norm": 13.130428100021863, - "learning_rate": 2.840095110379521e-06, - "loss": 1.0563, - "num_input_tokens_seen": 67379325, - "step": 3170 - }, - { - "epoch": 0.38129020621655746, - "flos": 43544909415960.0, - "grad_norm": 0.7595176782515731, - "learning_rate": 2.8393881282750884e-06, - "loss": 0.7839, - "num_input_tokens_seen": 67441875, - "step": 3171 - }, - { - "epoch": 0.3814104491071965, - "flos": 15375945527400.0, - "grad_norm": 26.69971795665347, - "learning_rate": 2.838681018836144e-06, - "loss": 1.0053, - "num_input_tokens_seen": 67458915, - "step": 3172 - }, - { - "epoch": 0.3815306919978356, - "flos": 13544694091080.0, - "grad_norm": 3.437112821101878, - "learning_rate": 2.837973782169955e-06, - "loss": 1.0027, - "num_input_tokens_seen": 67477010, - "step": 3173 - }, - { - "epoch": 0.38165093488847474, - "flos": 48022568543880.0, - "grad_norm": 0.8626388645337604, - "learning_rate": 2.8372664183838096e-06, - "loss": 0.8561, - "num_input_tokens_seen": 67539750, - "step": 3174 - }, - { - "epoch": 0.3817711777791138, - "flos": 15873964013760.0, - "grad_norm": 3.9108392231412754, - "learning_rate": 2.836558927585015e-06, - "loss": 0.905, - "num_input_tokens_seen": 67556440, - "step": 3175 - }, - { - "epoch": 0.3818914206697529, - "flos": 16219285304400.0, - "grad_norm": 3.48058766719547, - "learning_rate": 2.8358513098808957e-06, - "loss": 1.046, - "num_input_tokens_seen": 67576475, - "step": 3176 - }, - { - "epoch": 0.382011663560392, - "flos": 17343883277040.0, - "grad_norm": 4.1756391938243365, - "learning_rate": 2.835143565378798e-06, - "loss": 0.9931, - "num_input_tokens_seen": 67596660, - "step": 3177 - }, - { - "epoch": 0.38213190645103107, - "flos": 15616290105840.0, - "grad_norm": 3.2354759517100784, - "learning_rate": 2.8344356941860847e-06, - "loss": 0.9997, - "num_input_tokens_seen": 67616010, - "step": 3178 - }, - { - "epoch": 0.3822521493416702, - "flos": 25343796675240.0, - "grad_norm": 4.567521976278186, - "learning_rate": 2.8337276964101403e-06, - "loss": 0.8851, - "num_input_tokens_seen": 67636170, - "step": 3179 - }, - { - "epoch": 0.3823723922323093, - "flos": 14961159159120.0, - "grad_norm": 2.843514904323239, - "learning_rate": 2.833019572158367e-06, - "loss": 0.9761, - "num_input_tokens_seen": 67654325, - "step": 3180 - }, - { - "epoch": 0.38249263512294834, - "flos": 14040198329520.0, - "grad_norm": 3.219377771286391, - "learning_rate": 2.8323113215381872e-06, - "loss": 1.0387, - "num_input_tokens_seen": 67672390, - "step": 3181 - }, - { - "epoch": 0.38261287801358745, - "flos": 15222665762040.0, - "grad_norm": 3.1327602924363243, - "learning_rate": 2.831602944657042e-06, - "loss": 0.9752, - "num_input_tokens_seen": 67690190, - "step": 3182 - }, - { - "epoch": 0.38273312090422656, - "flos": 15616290105840.0, - "grad_norm": 3.2729678306408934, - "learning_rate": 2.830894441622391e-06, - "loss": 0.9679, - "num_input_tokens_seen": 67706560, - "step": 3183 - }, - { - "epoch": 0.3828533637948656, - "flos": 17635290449040.0, - "grad_norm": 6.29044833818784, - "learning_rate": 2.8301858125417134e-06, - "loss": 1.0167, - "num_input_tokens_seen": 67726120, - "step": 3184 - }, - { - "epoch": 0.38297360668550473, - "flos": 15956797531560.0, - "grad_norm": 3.5585625288061666, - "learning_rate": 2.8294770575225082e-06, - "loss": 0.9667, - "num_input_tokens_seen": 67745970, - "step": 3185 - }, - { - "epoch": 0.3830938495761438, - "flos": 17711010484920.0, - "grad_norm": 3.3356963523879797, - "learning_rate": 2.828768176672293e-06, - "loss": 1.0676, - "num_input_tokens_seen": 67764805, - "step": 3186 - }, - { - "epoch": 0.3832140924667829, - "flos": 23561001599880.0, - "grad_norm": 5.6297251881934764, - "learning_rate": 2.8280591700986044e-06, - "loss": 0.9496, - "num_input_tokens_seen": 67786390, - "step": 3187 - }, - { - "epoch": 0.383334335357422, - "flos": 22746887837640.0, - "grad_norm": 5.208167762667769, - "learning_rate": 2.827350037908999e-06, - "loss": 0.9731, - "num_input_tokens_seen": 67805550, - "step": 3188 - }, - { - "epoch": 0.38345457824806106, - "flos": 14042559269640.0, - "grad_norm": 5.4662007520975155, - "learning_rate": 2.8266407802110496e-06, - "loss": 1.0162, - "num_input_tokens_seen": 67823525, - "step": 3189 - }, - { - "epoch": 0.3835748211387002, - "flos": 15930944288400.0, - "grad_norm": 3.6953366300992685, - "learning_rate": 2.8259313971123515e-06, - "loss": 0.9829, - "num_input_tokens_seen": 67844365, - "step": 3190 - }, - { - "epoch": 0.3836950640293393, - "flos": 17870575870080.0, - "grad_norm": 2.41967102179175, - "learning_rate": 2.8252218887205166e-06, - "loss": 1.0038, - "num_input_tokens_seen": 67864775, - "step": 3191 - }, - { - "epoch": 0.38381530691997834, - "flos": 15485245519560.0, - "grad_norm": 5.454462362870308, - "learning_rate": 2.824512255143178e-06, - "loss": 1.0336, - "num_input_tokens_seen": 67883730, - "step": 3192 - }, - { - "epoch": 0.38393554981061745, - "flos": 15092571684120.0, - "grad_norm": 3.280248289247527, - "learning_rate": 2.8238024964879855e-06, - "loss": 1.0179, - "num_input_tokens_seen": 67904345, - "step": 3193 - }, - { - "epoch": 0.38405579270125656, - "flos": 12050025400800.0, - "grad_norm": 4.696373843361634, - "learning_rate": 2.8230926128626095e-06, - "loss": 0.9839, - "num_input_tokens_seen": 67922560, - "step": 3194 - }, - { - "epoch": 0.3841760355918956, - "flos": 15511190747400.0, - "grad_norm": 3.8165486835306797, - "learning_rate": 2.822382604374738e-06, - "loss": 1.0123, - "num_input_tokens_seen": 67941205, - "step": 3195 - }, - { - "epoch": 0.3842962784825347, - "flos": 18442321390920.0, - "grad_norm": 3.586151316133639, - "learning_rate": 2.8216724711320793e-06, - "loss": 0.8816, - "num_input_tokens_seen": 67960050, - "step": 3196 - }, - { - "epoch": 0.38441652137317384, - "flos": 18028025607600.0, - "grad_norm": 18.51944020556085, - "learning_rate": 2.820962213242361e-06, - "loss": 1.0246, - "num_input_tokens_seen": 67979100, - "step": 3197 - }, - { - "epoch": 0.3845367642638129, - "flos": 12836292918480.0, - "grad_norm": 3.7322344530798635, - "learning_rate": 2.8202518308133264e-06, - "loss": 1.0846, - "num_input_tokens_seen": 67996095, - "step": 3198 - }, - { - "epoch": 0.384657007154452, - "flos": 17949668677560.0, - "grad_norm": 3.5434088218114503, - "learning_rate": 2.8195413239527426e-06, - "loss": 0.9642, - "num_input_tokens_seen": 68015555, - "step": 3199 - }, - { - "epoch": 0.38477725004509106, - "flos": 14095829495520.0, - "grad_norm": 2.8482809336578043, - "learning_rate": 2.8188306927683906e-06, - "loss": 1.0366, - "num_input_tokens_seen": 68034745, - "step": 3200 - }, - { - "epoch": 0.38489749293573017, - "flos": 12941146984440.0, - "grad_norm": 3.199342755533207, - "learning_rate": 2.818119937368074e-06, - "loss": 0.9646, - "num_input_tokens_seen": 68053100, - "step": 3201 - }, - { - "epoch": 0.3850177358263693, - "flos": 17346520171200.0, - "grad_norm": 3.7126850069232002, - "learning_rate": 2.817409057859613e-06, - "loss": 0.8757, - "num_input_tokens_seen": 68071810, - "step": 3202 - }, - { - "epoch": 0.38513797871700833, - "flos": 12518112656520.0, - "grad_norm": 2.503066375846008, - "learning_rate": 2.8166980543508482e-06, - "loss": 1.0066, - "num_input_tokens_seen": 68087420, - "step": 3203 - }, - { - "epoch": 0.38525822160764744, - "flos": 18317102501040.0, - "grad_norm": 2.995787842413566, - "learning_rate": 2.815986926949638e-06, - "loss": 1.0095, - "num_input_tokens_seen": 68105640, - "step": 3204 - }, - { - "epoch": 0.38537846449828655, - "flos": 14331329547480.0, - "grad_norm": 4.865507224635322, - "learning_rate": 2.8152756757638597e-06, - "loss": 1.0227, - "num_input_tokens_seen": 68123860, - "step": 3205 - }, - { - "epoch": 0.3854987073889256, - "flos": 16376459087880.0, - "grad_norm": 5.5339464414794515, - "learning_rate": 2.8145643009014093e-06, - "loss": 1.059, - "num_input_tokens_seen": 68142075, - "step": 3206 - }, - { - "epoch": 0.3856189502795647, - "flos": 14328753976440.0, - "grad_norm": 3.627221051880365, - "learning_rate": 2.813852802470202e-06, - "loss": 1.0177, - "num_input_tokens_seen": 68159690, - "step": 3207 - }, - { - "epoch": 0.38573919317020383, - "flos": 18290482718880.0, - "grad_norm": 4.583130895594445, - "learning_rate": 2.8131411805781717e-06, - "loss": 0.9503, - "num_input_tokens_seen": 68179535, - "step": 3208 - }, - { - "epoch": 0.3858594360608429, - "flos": 21250011515040.0, - "grad_norm": 4.937759972017652, - "learning_rate": 2.8124294353332707e-06, - "loss": 0.8608, - "num_input_tokens_seen": 68197930, - "step": 3209 - }, - { - "epoch": 0.385979678951482, - "flos": 17634891848760.0, - "grad_norm": 4.166707585451747, - "learning_rate": 2.8117175668434713e-06, - "loss": 0.996, - "num_input_tokens_seen": 68217310, - "step": 3210 - }, - { - "epoch": 0.3860999218421211, - "flos": 15117689049840.0, - "grad_norm": 4.219203896300188, - "learning_rate": 2.811005575216762e-06, - "loss": 0.8955, - "num_input_tokens_seen": 68235745, - "step": 3211 - }, - { - "epoch": 0.38622016473276016, - "flos": 17452784668920.0, - "grad_norm": 2.395571011542758, - "learning_rate": 2.8102934605611513e-06, - "loss": 1.0097, - "num_input_tokens_seen": 68257100, - "step": 3212 - }, - { - "epoch": 0.3863404076233993, - "flos": 14593296073800.0, - "grad_norm": 5.060890601265603, - "learning_rate": 2.8095812229846665e-06, - "loss": 0.898, - "num_input_tokens_seen": 68276780, - "step": 3213 - }, - { - "epoch": 0.3864606505140384, - "flos": 15878379278400.0, - "grad_norm": 5.530007950292464, - "learning_rate": 2.808868862595355e-06, - "loss": 0.9214, - "num_input_tokens_seen": 68296745, - "step": 3214 - }, - { - "epoch": 0.38658089340467744, - "flos": 18291310581000.0, - "grad_norm": 3.1043958813998174, - "learning_rate": 2.8081563795012795e-06, - "loss": 1.0129, - "num_input_tokens_seen": 68316090, - "step": 3215 - }, - { - "epoch": 0.38670113629531655, - "flos": 24111738404040.0, - "grad_norm": 8.414682236200631, - "learning_rate": 2.807443773810524e-06, - "loss": 0.9598, - "num_input_tokens_seen": 68337070, - "step": 3216 - }, - { - "epoch": 0.3868213791859556, - "flos": 16586473835400.0, - "grad_norm": 3.0503252887799697, - "learning_rate": 2.80673104563119e-06, - "loss": 1.1125, - "num_input_tokens_seen": 68357415, - "step": 3217 - }, - { - "epoch": 0.3869416220765947, - "flos": 13072007601360.0, - "grad_norm": 3.742737030901705, - "learning_rate": 2.8060181950713976e-06, - "loss": 1.0077, - "num_input_tokens_seen": 68373925, - "step": 3218 - }, - { - "epoch": 0.3870618649672338, - "flos": 11053681812480.0, - "grad_norm": 4.488470354072172, - "learning_rate": 2.805305222239286e-06, - "loss": 1.0298, - "num_input_tokens_seen": 68390900, - "step": 3219 - }, - { - "epoch": 0.3871821078578729, - "flos": 16717457098560.0, - "grad_norm": 3.1359149287713026, - "learning_rate": 2.8045921272430118e-06, - "loss": 0.9573, - "num_input_tokens_seen": 68410300, - "step": 3220 - }, - { - "epoch": 0.387302350748512, - "flos": 12594445923600.0, - "grad_norm": 3.7689200899069224, - "learning_rate": 2.803878910190753e-06, - "loss": 0.9927, - "num_input_tokens_seen": 68426940, - "step": 3221 - }, - { - "epoch": 0.3874225936391511, - "flos": 8084923886760.0, - "grad_norm": 5.215271664325344, - "learning_rate": 2.8031655711907017e-06, - "loss": 1.049, - "num_input_tokens_seen": 68440365, - "step": 3222 - }, - { - "epoch": 0.38754283652979016, - "flos": 15590130247080.0, - "grad_norm": 3.9337714570863054, - "learning_rate": 2.8024521103510723e-06, - "loss": 1.0293, - "num_input_tokens_seen": 68456855, - "step": 3223 - }, - { - "epoch": 0.38766307942042927, - "flos": 15036940518120.0, - "grad_norm": 4.458333927522899, - "learning_rate": 2.8017385277800952e-06, - "loss": 0.9779, - "num_input_tokens_seen": 68474930, - "step": 3224 - }, - { - "epoch": 0.3877833223110684, - "flos": 19522755621000.0, - "grad_norm": 4.185968602980868, - "learning_rate": 2.8010248235860213e-06, - "loss": 0.963, - "num_input_tokens_seen": 68494765, - "step": 3225 - }, - { - "epoch": 0.38790356520170743, - "flos": 46180120090080.0, - "grad_norm": 0.8402543949170023, - "learning_rate": 2.8003109978771192e-06, - "loss": 0.8978, - "num_input_tokens_seen": 68555650, - "step": 3226 - }, - { - "epoch": 0.38802380809234654, - "flos": 15877643400960.0, - "grad_norm": 3.2383944833282556, - "learning_rate": 2.799597050761674e-06, - "loss": 1.0192, - "num_input_tokens_seen": 68571575, - "step": 3227 - }, - { - "epoch": 0.38814405098298566, - "flos": 17972885026560.0, - "grad_norm": 5.136085987642821, - "learning_rate": 2.7988829823479924e-06, - "loss": 1.0176, - "num_input_tokens_seen": 68589685, - "step": 3228 - }, - { - "epoch": 0.3882642938736247, - "flos": 13358999508720.0, - "grad_norm": 2.8977558331773534, - "learning_rate": 2.7981687927443976e-06, - "loss": 0.8686, - "num_input_tokens_seen": 68606205, - "step": 3229 - }, - { - "epoch": 0.3883845367642638, - "flos": 15379962191760.0, - "grad_norm": 2.8214398188996275, - "learning_rate": 2.797454482059231e-06, - "loss": 1.079, - "num_input_tokens_seen": 68626080, - "step": 3230 - }, - { - "epoch": 0.3885047796549029, - "flos": 14592713504160.0, - "grad_norm": 3.554362053697491, - "learning_rate": 2.7967400504008537e-06, - "loss": 1.0687, - "num_input_tokens_seen": 68645100, - "step": 3231 - }, - { - "epoch": 0.388625022545542, - "flos": 46050828760800.0, - "grad_norm": 0.8353614961730537, - "learning_rate": 2.7960254978776456e-06, - "loss": 0.8557, - "num_input_tokens_seen": 68706910, - "step": 3232 - }, - { - "epoch": 0.3887452654361811, - "flos": 12836630195640.0, - "grad_norm": 5.323468542386378, - "learning_rate": 2.7953108245980006e-06, - "loss": 1.025, - "num_input_tokens_seen": 68725145, - "step": 3233 - }, - { - "epoch": 0.38886550832682015, - "flos": 17768052082680.0, - "grad_norm": 2.7269059541472402, - "learning_rate": 2.7945960306703365e-06, - "loss": 0.9678, - "num_input_tokens_seen": 68747850, - "step": 3234 - }, - { - "epoch": 0.38898575121745926, - "flos": 19366379038080.0, - "grad_norm": 2.658575139861951, - "learning_rate": 2.7938811162030865e-06, - "loss": 0.8838, - "num_input_tokens_seen": 68767835, - "step": 3235 - }, - { - "epoch": 0.3891059941080984, - "flos": 20489934517680.0, - "grad_norm": 2.9568401562160274, - "learning_rate": 2.793166081304702e-06, - "loss": 1.0518, - "num_input_tokens_seen": 68788050, - "step": 3236 - }, - { - "epoch": 0.38922623699873743, - "flos": 16271605021920.0, - "grad_norm": 3.384926784968858, - "learning_rate": 2.7924509260836543e-06, - "loss": 1.0378, - "num_input_tokens_seen": 68806895, - "step": 3237 - }, - { - "epoch": 0.38934647988937654, - "flos": 14042988531480.0, - "grad_norm": 4.28379601281608, - "learning_rate": 2.791735650648431e-06, - "loss": 0.909, - "num_input_tokens_seen": 68825735, - "step": 3238 - }, - { - "epoch": 0.38946672278001565, - "flos": 13617838555920.0, - "grad_norm": 3.2726028580088293, - "learning_rate": 2.791020255107538e-06, - "loss": 0.9639, - "num_input_tokens_seen": 68842825, - "step": 3239 - }, - { - "epoch": 0.3895869656706547, - "flos": 17739868560960.0, - "grad_norm": 2.5124264872410262, - "learning_rate": 2.7903047395695023e-06, - "loss": 1.0302, - "num_input_tokens_seen": 68862445, - "step": 3240 - }, - { - "epoch": 0.3897072085612938, - "flos": 17162849251800.0, - "grad_norm": 3.105311359759637, - "learning_rate": 2.789589104142865e-06, - "loss": 1.1264, - "num_input_tokens_seen": 68879790, - "step": 3241 - }, - { - "epoch": 0.3898274514519329, - "flos": 12155799313560.0, - "grad_norm": 9.535183054422587, - "learning_rate": 2.7888733489361895e-06, - "loss": 0.9942, - "num_input_tokens_seen": 68897925, - "step": 3242 - }, - { - "epoch": 0.389947694342572, - "flos": 47307759105240.0, - "grad_norm": 0.7834669320505099, - "learning_rate": 2.788157474058054e-06, - "loss": 0.8802, - "num_input_tokens_seen": 68959920, - "step": 3243 - }, - { - "epoch": 0.3900679372332111, - "flos": 18318236978760.0, - "grad_norm": 2.4736628841194226, - "learning_rate": 2.7874414796170555e-06, - "loss": 0.924, - "num_input_tokens_seen": 68981130, - "step": 3244 - }, - { - "epoch": 0.3901881801238502, - "flos": 8300089776360.0, - "grad_norm": 5.9419786126141245, - "learning_rate": 2.7867253657218113e-06, - "loss": 1.0506, - "num_input_tokens_seen": 68994740, - "step": 3245 - }, - { - "epoch": 0.39030842301448926, - "flos": 19445042583720.0, - "grad_norm": 3.2442855481062955, - "learning_rate": 2.7860091324809544e-06, - "loss": 0.9511, - "num_input_tokens_seen": 69015520, - "step": 3246 - }, - { - "epoch": 0.39042866590512837, - "flos": 19340433810240.0, - "grad_norm": 7.188767296563482, - "learning_rate": 2.7852927800031377e-06, - "loss": 1.034, - "num_input_tokens_seen": 69035405, - "step": 3247 - }, - { - "epoch": 0.3905489087957674, - "flos": 21174904710360.0, - "grad_norm": 3.616410965091304, - "learning_rate": 2.7845763083970298e-06, - "loss": 1.0595, - "num_input_tokens_seen": 69055525, - "step": 3248 - }, - { - "epoch": 0.39066915168640653, - "flos": 17425183716840.0, - "grad_norm": 2.700204053868084, - "learning_rate": 2.7838597177713205e-06, - "loss": 1.0409, - "num_input_tokens_seen": 69076335, - "step": 3249 - }, - { - "epoch": 0.39078939457704565, - "flos": 14593602689400.0, - "grad_norm": 2.5875870436618382, - "learning_rate": 2.7831430082347143e-06, - "loss": 0.9668, - "num_input_tokens_seen": 69095260, - "step": 3250 - }, - { - "epoch": 0.3909096374676847, - "flos": 16192450891320.0, - "grad_norm": 7.288886130469402, - "learning_rate": 2.7824261798959373e-06, - "loss": 1.0509, - "num_input_tokens_seen": 69113160, - "step": 3251 - }, - { - "epoch": 0.3910298803583238, - "flos": 16350452536920.0, - "grad_norm": 2.6733550495429426, - "learning_rate": 2.78170923286373e-06, - "loss": 1.0215, - "num_input_tokens_seen": 69132480, - "step": 3252 - }, - { - "epoch": 0.3911501232489629, - "flos": 17294139130560.0, - "grad_norm": 3.9258061498355246, - "learning_rate": 2.780992167246854e-06, - "loss": 1.068, - "num_input_tokens_seen": 69149725, - "step": 3253 - }, - { - "epoch": 0.391270366139602, - "flos": 43568064441840.0, - "grad_norm": 1.0218429373747806, - "learning_rate": 2.7802749831540883e-06, - "loss": 1.0106, - "num_input_tokens_seen": 69208345, - "step": 3254 - }, - { - "epoch": 0.3913906090302411, - "flos": 15301421292360.0, - "grad_norm": 2.8901869983298556, - "learning_rate": 2.7795576806942268e-06, - "loss": 1.046, - "num_input_tokens_seen": 69226870, - "step": 3255 - }, - { - "epoch": 0.3915108519208802, - "flos": 35640253015440.0, - "grad_norm": 0.7950251181850968, - "learning_rate": 2.778840259976085e-06, - "loss": 0.8058, - "num_input_tokens_seen": 69281820, - "step": 3256 - }, - { - "epoch": 0.39163109481151925, - "flos": 11681119822440.0, - "grad_norm": 5.357848013834983, - "learning_rate": 2.778122721108495e-06, - "loss": 0.9942, - "num_input_tokens_seen": 69299770, - "step": 3257 - }, - { - "epoch": 0.39175133770215836, - "flos": 18552694537680.0, - "grad_norm": 2.4397306750134455, - "learning_rate": 2.7774050642003076e-06, - "loss": 1.111, - "num_input_tokens_seen": 69320300, - "step": 3258 - }, - { - "epoch": 0.3918715805927975, - "flos": 15537595898640.0, - "grad_norm": 4.48935861165251, - "learning_rate": 2.7766872893603896e-06, - "loss": 1.1684, - "num_input_tokens_seen": 69339995, - "step": 3259 - }, - { - "epoch": 0.39199182348343653, - "flos": 14462527441560.0, - "grad_norm": 2.6295184347982983, - "learning_rate": 2.7759693966976275e-06, - "loss": 0.9546, - "num_input_tokens_seen": 69358220, - "step": 3260 - }, - { - "epoch": 0.39211206637407564, - "flos": 15401277524040.0, - "grad_norm": 2.6604965373222647, - "learning_rate": 2.7752513863209242e-06, - "loss": 1.0662, - "num_input_tokens_seen": 69376520, - "step": 3261 - }, - { - "epoch": 0.39223230926471475, - "flos": 14958062341560.0, - "grad_norm": 3.0611739109687646, - "learning_rate": 2.774533258339203e-06, - "loss": 1.0611, - "num_input_tokens_seen": 69393700, - "step": 3262 - }, - { - "epoch": 0.3923525521553538, - "flos": 12469472326200.0, - "grad_norm": 5.241979921782056, - "learning_rate": 2.7738150128614014e-06, - "loss": 1.0173, - "num_input_tokens_seen": 69410825, - "step": 3263 - }, - { - "epoch": 0.3924727950459929, - "flos": 14593357396920.0, - "grad_norm": 2.8138938840583463, - "learning_rate": 2.7730966499964777e-06, - "loss": 1.124, - "num_input_tokens_seen": 69427495, - "step": 3264 - }, - { - "epoch": 0.39259303793663197, - "flos": 11472668814480.0, - "grad_norm": 3.7025033936108294, - "learning_rate": 2.772378169853408e-06, - "loss": 1.0237, - "num_input_tokens_seen": 69444785, - "step": 3265 - }, - { - "epoch": 0.3927132808272711, - "flos": 11889754799760.0, - "grad_norm": 3.9160871009296856, - "learning_rate": 2.771659572541183e-06, - "loss": 0.971, - "num_input_tokens_seen": 69462435, - "step": 3266 - }, - { - "epoch": 0.3928335237179102, - "flos": 14384293157760.0, - "grad_norm": 4.414898548451055, - "learning_rate": 2.7709408581688143e-06, - "loss": 1.0945, - "num_input_tokens_seen": 69482140, - "step": 3267 - }, - { - "epoch": 0.39295376660854925, - "flos": 17766335035320.0, - "grad_norm": 4.70915287053379, - "learning_rate": 2.7702220268453307e-06, - "loss": 1.1057, - "num_input_tokens_seen": 69502220, - "step": 3268 - }, - { - "epoch": 0.39307400949918836, - "flos": 13256291751960.0, - "grad_norm": 4.371050768820742, - "learning_rate": 2.7695030786797785e-06, - "loss": 1.0583, - "num_input_tokens_seen": 69517835, - "step": 3269 - }, - { - "epoch": 0.39319425238982747, - "flos": 15927602178360.0, - "grad_norm": 4.921950954275049, - "learning_rate": 2.7687840137812206e-06, - "loss": 0.9682, - "num_input_tokens_seen": 69535640, - "step": 3270 - }, - { - "epoch": 0.3933144952804665, - "flos": 47393842748400.0, - "grad_norm": 0.7962156073360271, - "learning_rate": 2.7680648322587395e-06, - "loss": 0.8631, - "num_input_tokens_seen": 69600235, - "step": 3271 - }, - { - "epoch": 0.39343473817110564, - "flos": 10948889069640.0, - "grad_norm": 3.9638768111641, - "learning_rate": 2.7673455342214334e-06, - "loss": 1.0392, - "num_input_tokens_seen": 69616945, - "step": 3272 - }, - { - "epoch": 0.39355498106174475, - "flos": 15144370155120.0, - "grad_norm": 3.0889201369256356, - "learning_rate": 2.7666261197784198e-06, - "loss": 0.9816, - "num_input_tokens_seen": 69635480, - "step": 3273 - }, - { - "epoch": 0.3936752239523838, - "flos": 9374177063520.0, - "grad_norm": 2.9166608174583275, - "learning_rate": 2.7659065890388336e-06, - "loss": 0.9893, - "num_input_tokens_seen": 69651200, - "step": 3274 - }, - { - "epoch": 0.3937954668430229, - "flos": 11892453017040.0, - "grad_norm": 4.667920125923531, - "learning_rate": 2.7651869421118266e-06, - "loss": 1.0615, - "num_input_tokens_seen": 69667530, - "step": 3275 - }, - { - "epoch": 0.393915709733662, - "flos": 14956314632640.0, - "grad_norm": 2.513760367305249, - "learning_rate": 2.76446717910657e-06, - "loss": 1.051, - "num_input_tokens_seen": 69687955, - "step": 3276 - }, - { - "epoch": 0.3940359526243011, - "flos": 12154879466760.0, - "grad_norm": 3.0208251076071106, - "learning_rate": 2.763747300132249e-06, - "loss": 0.9798, - "num_input_tokens_seen": 69705115, - "step": 3277 - }, - { - "epoch": 0.3941561955149402, - "flos": 14907796948560.0, - "grad_norm": 2.4580568923263524, - "learning_rate": 2.7630273052980704e-06, - "loss": 1.0891, - "num_input_tokens_seen": 69725425, - "step": 3278 - }, - { - "epoch": 0.39427643840557924, - "flos": 13360900525440.0, - "grad_norm": 4.542266712738083, - "learning_rate": 2.7623071947132554e-06, - "loss": 0.9013, - "num_input_tokens_seen": 69742175, - "step": 3279 - }, - { - "epoch": 0.39439668129621835, - "flos": 16534123456320.0, - "grad_norm": 5.4575729811188305, - "learning_rate": 2.7615869684870458e-06, - "loss": 1.0091, - "num_input_tokens_seen": 69761205, - "step": 3280 - }, - { - "epoch": 0.39451692418685746, - "flos": 18973122633000.0, - "grad_norm": 5.827603641539192, - "learning_rate": 2.7608666267286986e-06, - "loss": 1.0701, - "num_input_tokens_seen": 69781155, - "step": 3281 - }, - { - "epoch": 0.3946371670774965, - "flos": 12940564414800.0, - "grad_norm": 4.22290377107573, - "learning_rate": 2.760146169547489e-06, - "loss": 1.0801, - "num_input_tokens_seen": 69797640, - "step": 3282 - }, - { - "epoch": 0.39475740996813563, - "flos": 17214893015280.0, - "grad_norm": 3.3579363914131632, - "learning_rate": 2.75942559705271e-06, - "loss": 0.9866, - "num_input_tokens_seen": 69817095, - "step": 3283 - }, - { - "epoch": 0.39487765285877474, - "flos": 13701898536120.0, - "grad_norm": 4.646522345433864, - "learning_rate": 2.7587049093536713e-06, - "loss": 1.1118, - "num_input_tokens_seen": 69833145, - "step": 3284 - }, - { - "epoch": 0.3949978957494138, - "flos": 12260224117680.0, - "grad_norm": 4.21495348740659, - "learning_rate": 2.757984106559701e-06, - "loss": 1.0333, - "num_input_tokens_seen": 69851850, - "step": 3285 - }, - { - "epoch": 0.3951181386400529, - "flos": 25919160260160.0, - "grad_norm": 3.7015166190038533, - "learning_rate": 2.7572631887801446e-06, - "loss": 0.9379, - "num_input_tokens_seen": 69873195, - "step": 3286 - }, - { - "epoch": 0.395238381530692, - "flos": 16427215065840.0, - "grad_norm": 2.719460230170952, - "learning_rate": 2.7565421561243654e-06, - "loss": 0.9811, - "num_input_tokens_seen": 69891080, - "step": 3287 - }, - { - "epoch": 0.3953586244213311, - "flos": 17315975709360.0, - "grad_norm": 4.46642130751955, - "learning_rate": 2.7558210087017413e-06, - "loss": 1.0516, - "num_input_tokens_seen": 69910735, - "step": 3288 - }, - { - "epoch": 0.3954788673119702, - "flos": 16664370842040.0, - "grad_norm": 3.1008176894196113, - "learning_rate": 2.7550997466216724e-06, - "loss": 0.9689, - "num_input_tokens_seen": 69928250, - "step": 3289 - }, - { - "epoch": 0.3955991102026093, - "flos": 12391238042400.0, - "grad_norm": 3.925704363169656, - "learning_rate": 2.7543783699935714e-06, - "loss": 1.0339, - "num_input_tokens_seen": 69946000, - "step": 3290 - }, - { - "epoch": 0.39571935309324835, - "flos": 12913576693920.0, - "grad_norm": 4.344020478314487, - "learning_rate": 2.753656878926872e-06, - "loss": 1.0806, - "num_input_tokens_seen": 69961600, - "step": 3291 - }, - { - "epoch": 0.39583959598388746, - "flos": 12574234407480.0, - "grad_norm": 3.8866491877019413, - "learning_rate": 2.752935273531023e-06, - "loss": 0.9661, - "num_input_tokens_seen": 69979470, - "step": 3292 - }, - { - "epoch": 0.39595983887452657, - "flos": 13726433332200.0, - "grad_norm": 2.7825996467453566, - "learning_rate": 2.752213553915492e-06, - "loss": 1.0126, - "num_input_tokens_seen": 69997545, - "step": 3293 - }, - { - "epoch": 0.3960800817651656, - "flos": 43433248483680.0, - "grad_norm": 0.7994153911655028, - "learning_rate": 2.751491720189762e-06, - "loss": 0.9123, - "num_input_tokens_seen": 70055375, - "step": 3294 - }, - { - "epoch": 0.39620032465580474, - "flos": 11918735522040.0, - "grad_norm": 4.58497705751293, - "learning_rate": 2.7507697724633364e-06, - "loss": 1.1334, - "num_input_tokens_seen": 70071855, - "step": 3295 - }, - { - "epoch": 0.3963205675464438, - "flos": 49462035330000.0, - "grad_norm": 0.7799652216567214, - "learning_rate": 2.7500477108457327e-06, - "loss": 0.7961, - "num_input_tokens_seen": 70123585, - "step": 3296 - }, - { - "epoch": 0.3964408104370829, - "flos": 18265365353160.0, - "grad_norm": 4.238773552917182, - "learning_rate": 2.7493255354464877e-06, - "loss": 1.0337, - "num_input_tokens_seen": 70141115, - "step": 3297 - }, - { - "epoch": 0.396561053327722, - "flos": 17265955608840.0, - "grad_norm": 4.304291362501008, - "learning_rate": 2.748603246375156e-06, - "loss": 0.9887, - "num_input_tokens_seen": 70158850, - "step": 3298 - }, - { - "epoch": 0.39668129621836107, - "flos": 14567565476880.0, - "grad_norm": 3.6958982878392006, - "learning_rate": 2.7478808437413055e-06, - "loss": 0.9137, - "num_input_tokens_seen": 70177980, - "step": 3299 - }, - { - "epoch": 0.3968015391090002, - "flos": 19260666448440.0, - "grad_norm": 3.5221532270036446, - "learning_rate": 2.7471583276545263e-06, - "loss": 0.884, - "num_input_tokens_seen": 70198360, - "step": 3300 - }, - { - "epoch": 0.3969217819996393, - "flos": 8824942675800.0, - "grad_norm": 3.503514160537031, - "learning_rate": 2.7464356982244224e-06, - "loss": 0.9165, - "num_input_tokens_seen": 70216080, - "step": 3301 - }, - { - "epoch": 0.39704202489027834, - "flos": 47430274229760.0, - "grad_norm": 0.8618185502584875, - "learning_rate": 2.745712955560617e-06, - "loss": 0.8758, - "num_input_tokens_seen": 70272005, - "step": 3302 - }, - { - "epoch": 0.39716226778091746, - "flos": 12023896203600.0, - "grad_norm": 4.125089239831335, - "learning_rate": 2.7449900997727496e-06, - "loss": 0.9955, - "num_input_tokens_seen": 70289835, - "step": 3303 - }, - { - "epoch": 0.39728251067155657, - "flos": 16691082608880.0, - "grad_norm": 6.261196940496725, - "learning_rate": 2.744267130970476e-06, - "loss": 1.0664, - "num_input_tokens_seen": 70309280, - "step": 3304 - }, - { - "epoch": 0.3974027535621956, - "flos": 14698426093800.0, - "grad_norm": 6.922642374665166, - "learning_rate": 2.7435440492634697e-06, - "loss": 0.9874, - "num_input_tokens_seen": 70328325, - "step": 3305 - }, - { - "epoch": 0.39752299645283473, - "flos": 15301237323000.0, - "grad_norm": 5.827711320774327, - "learning_rate": 2.7428208547614228e-06, - "loss": 0.8904, - "num_input_tokens_seen": 70347540, - "step": 3306 - }, - { - "epoch": 0.39764323934347384, - "flos": 13623143005800.0, - "grad_norm": 4.39906125024679, - "learning_rate": 2.742097547574043e-06, - "loss": 0.9911, - "num_input_tokens_seen": 70365485, - "step": 3307 - }, - { - "epoch": 0.3977634822341129, - "flos": 14803157513520.0, - "grad_norm": 4.400405795718411, - "learning_rate": 2.7413741278110544e-06, - "loss": 0.9958, - "num_input_tokens_seen": 70383895, - "step": 3308 - }, - { - "epoch": 0.397883725124752, - "flos": 28411950909240.0, - "grad_norm": 15.688270823473951, - "learning_rate": 2.7406505955822016e-06, - "loss": 0.9013, - "num_input_tokens_seen": 70404640, - "step": 3309 - }, - { - "epoch": 0.39800396801539106, - "flos": 12308220555240.0, - "grad_norm": 5.247242157786428, - "learning_rate": 2.7399269509972415e-06, - "loss": 0.887, - "num_input_tokens_seen": 70418515, - "step": 3310 - }, - { - "epoch": 0.3981242109060302, - "flos": 13618421125560.0, - "grad_norm": 5.330986619281738, - "learning_rate": 2.7392031941659514e-06, - "loss": 1.0635, - "num_input_tokens_seen": 70436080, - "step": 3311 - }, - { - "epoch": 0.3982444537966693, - "flos": 17472106999800.0, - "grad_norm": 4.029793409632361, - "learning_rate": 2.7384793251981244e-06, - "loss": 1.0906, - "num_input_tokens_seen": 70454785, - "step": 3312 - }, - { - "epoch": 0.39836469668730834, - "flos": 18657947203920.0, - "grad_norm": 6.115878416959837, - "learning_rate": 2.737755344203571e-06, - "loss": 1.0277, - "num_input_tokens_seen": 70474455, - "step": 3313 - }, - { - "epoch": 0.39848493957794745, - "flos": 19680021389160.0, - "grad_norm": 6.217186506011952, - "learning_rate": 2.7370312512921186e-06, - "loss": 1.0209, - "num_input_tokens_seen": 70495955, - "step": 3314 - }, - { - "epoch": 0.39860518246858656, - "flos": 8614651974240.0, - "grad_norm": 4.126771719791453, - "learning_rate": 2.736307046573611e-06, - "loss": 0.9808, - "num_input_tokens_seen": 70511545, - "step": 3315 - }, - { - "epoch": 0.3987254253592256, - "flos": 15901503642720.0, - "grad_norm": 3.9556893335596635, - "learning_rate": 2.73558273015791e-06, - "loss": 1.0505, - "num_input_tokens_seen": 70531095, - "step": 3316 - }, - { - "epoch": 0.3988456682498647, - "flos": 16717641067920.0, - "grad_norm": 9.223267977250568, - "learning_rate": 2.734858302154894e-06, - "loss": 0.9201, - "num_input_tokens_seen": 70552315, - "step": 3317 - }, - { - "epoch": 0.39896591114050384, - "flos": 13623541606080.0, - "grad_norm": 3.3464423062879725, - "learning_rate": 2.734133762674457e-06, - "loss": 0.9688, - "num_input_tokens_seen": 70571625, - "step": 3318 - }, - { - "epoch": 0.3990861540311429, - "flos": 20229562392480.0, - "grad_norm": 5.974184128990341, - "learning_rate": 2.7334091118265124e-06, - "loss": 0.9267, - "num_input_tokens_seen": 70593240, - "step": 3319 - }, - { - "epoch": 0.399206396921782, - "flos": 44209208169120.0, - "grad_norm": 0.6641890012638509, - "learning_rate": 2.732684349720989e-06, - "loss": 0.8171, - "num_input_tokens_seen": 70660920, - "step": 3320 - }, - { - "epoch": 0.3993266398124211, - "flos": 19995196818240.0, - "grad_norm": 6.55071952303953, - "learning_rate": 2.7319594764678318e-06, - "loss": 0.9691, - "num_input_tokens_seen": 70682740, - "step": 3321 - }, - { - "epoch": 0.39944688270306017, - "flos": 16510385860800.0, - "grad_norm": 2.903305038273394, - "learning_rate": 2.7312344921770044e-06, - "loss": 1.0525, - "num_input_tokens_seen": 70704160, - "step": 3322 - }, - { - "epoch": 0.3995671255936993, - "flos": 13754003622720.0, - "grad_norm": 3.4974770308115946, - "learning_rate": 2.7305093969584857e-06, - "loss": 1.0112, - "num_input_tokens_seen": 70722705, - "step": 3323 - }, - { - "epoch": 0.3996873684843384, - "flos": 16953846335760.0, - "grad_norm": 6.572605163963329, - "learning_rate": 2.729784190922272e-06, - "loss": 1.0191, - "num_input_tokens_seen": 70743860, - "step": 3324 - }, - { - "epoch": 0.39980761137497745, - "flos": 47672335855560.0, - "grad_norm": 0.7636849872995148, - "learning_rate": 2.729058874178378e-06, - "loss": 0.8213, - "num_input_tokens_seen": 70814260, - "step": 3325 - }, - { - "epoch": 0.39992785426561656, - "flos": 20336317475160.0, - "grad_norm": 4.481000847055108, - "learning_rate": 2.7283334468368315e-06, - "loss": 0.9182, - "num_input_tokens_seen": 70835260, - "step": 3326 - }, - { - "epoch": 0.4000480971562556, - "flos": 10634296210200.0, - "grad_norm": 3.1552176722962098, - "learning_rate": 2.72760790900768e-06, - "loss": 0.9476, - "num_input_tokens_seen": 70851565, - "step": 3327 - }, - { - "epoch": 0.4001683400468947, - "flos": 17006288699520.0, - "grad_norm": 10.089853579892617, - "learning_rate": 2.7268822608009875e-06, - "loss": 1.0254, - "num_input_tokens_seen": 70870660, - "step": 3328 - }, - { - "epoch": 0.40028858293753383, - "flos": 17320298989320.0, - "grad_norm": 10.960946200660302, - "learning_rate": 2.726156502326834e-06, - "loss": 0.9955, - "num_input_tokens_seen": 70891680, - "step": 3329 - }, - { - "epoch": 0.4004088258281729, - "flos": 47822181526200.0, - "grad_norm": 0.7215606219571646, - "learning_rate": 2.725430633695316e-06, - "loss": 0.8602, - "num_input_tokens_seen": 70954480, - "step": 3330 - }, - { - "epoch": 0.400529068718812, - "flos": 41935023052440.0, - "grad_norm": 0.9398258603899267, - "learning_rate": 2.7247046550165485e-06, - "loss": 0.8533, - "num_input_tokens_seen": 71006325, - "step": 3331 - }, - { - "epoch": 0.4006493116094511, - "flos": 18057220960800.0, - "grad_norm": 2.9497474045019874, - "learning_rate": 2.7239785664006606e-06, - "loss": 0.9867, - "num_input_tokens_seen": 71029585, - "step": 3332 - }, - { - "epoch": 0.40076955450009016, - "flos": 43144631513640.0, - "grad_norm": 0.8383658601948876, - "learning_rate": 2.7232523679578002e-06, - "loss": 0.8974, - "num_input_tokens_seen": 71092385, - "step": 3333 - }, - { - "epoch": 0.4008897973907293, - "flos": 11760948507360.0, - "grad_norm": 5.419095868926946, - "learning_rate": 2.7225260597981295e-06, - "loss": 1.0215, - "num_input_tokens_seen": 71109810, - "step": 3334 - }, - { - "epoch": 0.4010100402813684, - "flos": 10869918908400.0, - "grad_norm": 4.117861726919248, - "learning_rate": 2.721799642031831e-06, - "loss": 1.0038, - "num_input_tokens_seen": 71125700, - "step": 3335 - }, - { - "epoch": 0.40113028317200744, - "flos": 9376292711160.0, - "grad_norm": 6.864238848071861, - "learning_rate": 2.721073114769101e-06, - "loss": 0.9935, - "num_input_tokens_seen": 71143095, - "step": 3336 - }, - { - "epoch": 0.40125052606264655, - "flos": 14672266235040.0, - "grad_norm": 3.503094667410564, - "learning_rate": 2.7203464781201523e-06, - "loss": 0.9782, - "num_input_tokens_seen": 71162130, - "step": 3337 - }, - { - "epoch": 0.40137076895328566, - "flos": 17503509985320.0, - "grad_norm": 29.73119420224163, - "learning_rate": 2.719619732195215e-06, - "loss": 0.9969, - "num_input_tokens_seen": 71183490, - "step": 3338 - }, - { - "epoch": 0.4014910118439247, - "flos": 17215291615560.0, - "grad_norm": 3.7868269470258595, - "learning_rate": 2.7188928771045377e-06, - "loss": 0.9571, - "num_input_tokens_seen": 71204530, - "step": 3339 - }, - { - "epoch": 0.4016112547345638, - "flos": 19025411688960.0, - "grad_norm": 3.935238870158959, - "learning_rate": 2.7181659129583815e-06, - "loss": 1.0288, - "num_input_tokens_seen": 71223840, - "step": 3340 - }, - { - "epoch": 0.4017314976252029, - "flos": 15117811696080.0, - "grad_norm": 4.16248101124141, - "learning_rate": 2.7174388398670276e-06, - "loss": 0.9855, - "num_input_tokens_seen": 71242740, - "step": 3341 - }, - { - "epoch": 0.401851740515842, - "flos": 18133983489720.0, - "grad_norm": 3.0913694836963446, - "learning_rate": 2.716711657940773e-06, - "loss": 1.1396, - "num_input_tokens_seen": 71263470, - "step": 3342 - }, - { - "epoch": 0.4019719834064811, - "flos": 39634089959280.0, - "grad_norm": 0.8358745016441953, - "learning_rate": 2.7159843672899284e-06, - "loss": 0.8315, - "num_input_tokens_seen": 71327390, - "step": 3343 - }, - { - "epoch": 0.40209222629712016, - "flos": 12883308186120.0, - "grad_norm": 6.260750946243483, - "learning_rate": 2.715256968024825e-06, - "loss": 1.0414, - "num_input_tokens_seen": 71344185, - "step": 3344 - }, - { - "epoch": 0.40221246918775927, - "flos": 18476575901520.0, - "grad_norm": 7.395848176609959, - "learning_rate": 2.7145294602558083e-06, - "loss": 1.0545, - "num_input_tokens_seen": 71364615, - "step": 3345 - }, - { - "epoch": 0.4023327120783984, - "flos": 24137867601240.0, - "grad_norm": 3.541840328371979, - "learning_rate": 2.713801844093241e-06, - "loss": 0.9193, - "num_input_tokens_seen": 71385485, - "step": 3346 - }, - { - "epoch": 0.40245295496903744, - "flos": 19150323963240.0, - "grad_norm": 3.98839985132176, - "learning_rate": 2.7130741196475014e-06, - "loss": 1.0907, - "num_input_tokens_seen": 71403335, - "step": 3347 - }, - { - "epoch": 0.40257319785967655, - "flos": 26339649678600.0, - "grad_norm": 3.6912858569983675, - "learning_rate": 2.7123462870289848e-06, - "loss": 1.0175, - "num_input_tokens_seen": 71423105, - "step": 3348 - }, - { - "epoch": 0.40269344075031566, - "flos": 17320728251160.0, - "grad_norm": 3.6291861205486566, - "learning_rate": 2.711618346348102e-06, - "loss": 1.0279, - "num_input_tokens_seen": 71443350, - "step": 3349 - }, - { - "epoch": 0.4028136836409547, - "flos": 10159739365320.0, - "grad_norm": 3.12767860055101, - "learning_rate": 2.7108902977152825e-06, - "loss": 0.8596, - "num_input_tokens_seen": 71460970, - "step": 3350 - }, - { - "epoch": 0.4029339265315938, - "flos": 18601365529560.0, - "grad_norm": 5.960168231407541, - "learning_rate": 2.7101621412409704e-06, - "loss": 0.9701, - "num_input_tokens_seen": 71480175, - "step": 3351 - }, - { - "epoch": 0.40305416942223293, - "flos": 16532130454920.0, - "grad_norm": 4.534717785046237, - "learning_rate": 2.7094338770356256e-06, - "loss": 1.0816, - "num_input_tokens_seen": 71498980, - "step": 3352 - }, - { - "epoch": 0.403174412312872, - "flos": 19287378215280.0, - "grad_norm": 4.299789272733839, - "learning_rate": 2.708705505209726e-06, - "loss": 0.8649, - "num_input_tokens_seen": 71519475, - "step": 3353 - }, - { - "epoch": 0.4032946552035111, - "flos": 15453566580000.0, - "grad_norm": 3.3249463586137282, - "learning_rate": 2.7079770258737646e-06, - "loss": 1.1355, - "num_input_tokens_seen": 71537105, - "step": 3354 - }, - { - "epoch": 0.4034148980941502, - "flos": 12282796573920.0, - "grad_norm": 6.5223354116767185, - "learning_rate": 2.707248439138251e-06, - "loss": 0.9675, - "num_input_tokens_seen": 71553060, - "step": 3355 - }, - { - "epoch": 0.40353514098478926, - "flos": 15642327318360.0, - "grad_norm": 3.2814192955303065, - "learning_rate": 2.7065197451137114e-06, - "loss": 0.8756, - "num_input_tokens_seen": 71574160, - "step": 3356 - }, - { - "epoch": 0.4036553838754284, - "flos": 10057184916360.0, - "grad_norm": 5.619664692497544, - "learning_rate": 2.7057909439106894e-06, - "loss": 0.9055, - "num_input_tokens_seen": 71591735, - "step": 3357 - }, - { - "epoch": 0.40377562676606743, - "flos": 17635075818120.0, - "grad_norm": 3.847236602670638, - "learning_rate": 2.7050620356397417e-06, - "loss": 1.0099, - "num_input_tokens_seen": 71610405, - "step": 3358 - }, - { - "epoch": 0.40389586965670654, - "flos": 17110897473000.0, - "grad_norm": 3.152124409093853, - "learning_rate": 2.7043330204114437e-06, - "loss": 0.9411, - "num_input_tokens_seen": 71628835, - "step": 3359 - }, - { - "epoch": 0.40401611254734565, - "flos": 11604694570680.0, - "grad_norm": 5.700809876311401, - "learning_rate": 2.7036038983363862e-06, - "loss": 1.0769, - "num_input_tokens_seen": 71645160, - "step": 3360 - }, - { - "epoch": 0.4041363554379847, - "flos": 17058424447680.0, - "grad_norm": 2.647992302039214, - "learning_rate": 2.702874669525177e-06, - "loss": 1.0698, - "num_input_tokens_seen": 71663360, - "step": 3361 - }, - { - "epoch": 0.4042565983286238, - "flos": 20230359593040.0, - "grad_norm": 5.87348334032474, - "learning_rate": 2.7021453340884394e-06, - "loss": 0.9209, - "num_input_tokens_seen": 71680805, - "step": 3362 - }, - { - "epoch": 0.40437684121926293, - "flos": 12546848086320.0, - "grad_norm": 4.801190053917663, - "learning_rate": 2.7014158921368125e-06, - "loss": 0.9487, - "num_input_tokens_seen": 71698850, - "step": 3363 - }, - { - "epoch": 0.404497084109902, - "flos": 17079739779960.0, - "grad_norm": 4.021455680370551, - "learning_rate": 2.700686343780953e-06, - "loss": 1.0851, - "num_input_tokens_seen": 71718440, - "step": 3364 - }, - { - "epoch": 0.4046173270005411, - "flos": 16297764880680.0, - "grad_norm": 3.9019632239864555, - "learning_rate": 2.699956689131532e-06, - "loss": 1.0972, - "num_input_tokens_seen": 71738145, - "step": 3365 - }, - { - "epoch": 0.4047375698911802, - "flos": 14672358219720.0, - "grad_norm": 4.233558716869259, - "learning_rate": 2.699226928299238e-06, - "loss": 1.0646, - "num_input_tokens_seen": 71755885, - "step": 3366 - }, - { - "epoch": 0.40485781278181926, - "flos": 20597548124040.0, - "grad_norm": 3.9396989525354456, - "learning_rate": 2.698497061394774e-06, - "loss": 1.0074, - "num_input_tokens_seen": 71774090, - "step": 3367 - }, - { - "epoch": 0.40497805567245837, - "flos": 16454754694800.0, - "grad_norm": 4.686956552156395, - "learning_rate": 2.6977670885288627e-06, - "loss": 1.0355, - "num_input_tokens_seen": 71795210, - "step": 3368 - }, - { - "epoch": 0.4050982985630975, - "flos": 11525387132280.0, - "grad_norm": 5.059757950010584, - "learning_rate": 2.6970370098122378e-06, - "loss": 0.9785, - "num_input_tokens_seen": 71811915, - "step": 3369 - }, - { - "epoch": 0.40521854145373654, - "flos": 24583903647240.0, - "grad_norm": 2.7984070231504954, - "learning_rate": 2.6963068253556535e-06, - "loss": 1.081, - "num_input_tokens_seen": 71833020, - "step": 3370 - }, - { - "epoch": 0.40533878434437565, - "flos": 18022567849920.0, - "grad_norm": 6.580546823523258, - "learning_rate": 2.6955765352698763e-06, - "loss": 1.0712, - "num_input_tokens_seen": 71852885, - "step": 3371 - }, - { - "epoch": 0.40545902723501476, - "flos": 10607339150880.0, - "grad_norm": 20.30464767094016, - "learning_rate": 2.6948461396656923e-06, - "loss": 0.9491, - "num_input_tokens_seen": 71870015, - "step": 3372 - }, - { - "epoch": 0.4055792701256538, - "flos": 18160143348480.0, - "grad_norm": 5.997161511707832, - "learning_rate": 2.6941156386539013e-06, - "loss": 0.9718, - "num_input_tokens_seen": 71889685, - "step": 3373 - }, - { - "epoch": 0.4056995130162929, - "flos": 13886581287000.0, - "grad_norm": 7.398188714715577, - "learning_rate": 2.6933850323453203e-06, - "loss": 1.0109, - "num_input_tokens_seen": 71907850, - "step": 3374 - }, - { - "epoch": 0.405819755906932, - "flos": 10896109428720.0, - "grad_norm": 8.211387672750996, - "learning_rate": 2.6926543208507806e-06, - "loss": 0.9672, - "num_input_tokens_seen": 71926250, - "step": 3375 - }, - { - "epoch": 0.4059399987975711, - "flos": 15222665762040.0, - "grad_norm": 4.701157877718568, - "learning_rate": 2.6919235042811316e-06, - "loss": 1.0089, - "num_input_tokens_seen": 71944755, - "step": 3376 - }, - { - "epoch": 0.4060602416882102, - "flos": 18185812622280.0, - "grad_norm": 8.080837480558994, - "learning_rate": 2.691192582747237e-06, - "loss": 0.98, - "num_input_tokens_seen": 71964105, - "step": 3377 - }, - { - "epoch": 0.40618048457884925, - "flos": 16898920385640.0, - "grad_norm": 3.3142853729830413, - "learning_rate": 2.6904615563599765e-06, - "loss": 0.9579, - "num_input_tokens_seen": 71983625, - "step": 3378 - }, - { - "epoch": 0.40630072746948837, - "flos": 12495846815880.0, - "grad_norm": 4.492091965696104, - "learning_rate": 2.6897304252302477e-06, - "loss": 1.0525, - "num_input_tokens_seen": 72000665, - "step": 3379 - }, - { - "epoch": 0.4064209703601275, - "flos": 43544817431280.0, - "grad_norm": 0.8127579646311379, - "learning_rate": 2.688999189468962e-06, - "loss": 0.7999, - "num_input_tokens_seen": 72056815, - "step": 3380 - }, - { - "epoch": 0.40654121325076653, - "flos": 17084339013960.0, - "grad_norm": 3.2079844759900817, - "learning_rate": 2.6882678491870464e-06, - "loss": 0.9698, - "num_input_tokens_seen": 72076970, - "step": 3381 - }, - { - "epoch": 0.40666145614140564, - "flos": 19470251934120.0, - "grad_norm": 3.329400189882503, - "learning_rate": 2.6875364044954453e-06, - "loss": 0.9342, - "num_input_tokens_seen": 72096920, - "step": 3382 - }, - { - "epoch": 0.40678169903204475, - "flos": 18631174113960.0, - "grad_norm": 3.016815583011639, - "learning_rate": 2.6868048555051185e-06, - "loss": 1.0404, - "num_input_tokens_seen": 72118170, - "step": 3383 - }, - { - "epoch": 0.4069019419226838, - "flos": 20389005131400.0, - "grad_norm": 7.794456458480879, - "learning_rate": 2.686073202327041e-06, - "loss": 1.0773, - "num_input_tokens_seen": 72136890, - "step": 3384 - }, - { - "epoch": 0.4070221848133229, - "flos": 17950189924080.0, - "grad_norm": 3.3415667099561848, - "learning_rate": 2.6853414450722043e-06, - "loss": 0.9608, - "num_input_tokens_seen": 72156275, - "step": 3385 - }, - { - "epoch": 0.40714242770396203, - "flos": 13045939727280.0, - "grad_norm": 3.1374047314287496, - "learning_rate": 2.684609583851616e-06, - "loss": 1.0728, - "num_input_tokens_seen": 72174170, - "step": 3386 - }, - { - "epoch": 0.4072626705946011, - "flos": 21543718304040.0, - "grad_norm": 2.6483656611869146, - "learning_rate": 2.683877618776297e-06, - "loss": 1.0304, - "num_input_tokens_seen": 72196145, - "step": 3387 - }, - { - "epoch": 0.4073829134852402, - "flos": 15510485531520.0, - "grad_norm": 7.879833373036286, - "learning_rate": 2.6831455499572876e-06, - "loss": 0.9653, - "num_input_tokens_seen": 72213800, - "step": 3388 - }, - { - "epoch": 0.40750315637587925, - "flos": 17971903856640.0, - "grad_norm": 3.8186638205835544, - "learning_rate": 2.682413377505641e-06, - "loss": 0.9936, - "num_input_tokens_seen": 72232325, - "step": 3389 - }, - { - "epoch": 0.40762339926651836, - "flos": 13984843117560.0, - "grad_norm": 5.3081234126401355, - "learning_rate": 2.6816811015324284e-06, - "loss": 0.9933, - "num_input_tokens_seen": 72250095, - "step": 3390 - }, - { - "epoch": 0.40774364215715747, - "flos": 51171808586760.0, - "grad_norm": 0.7877095019488144, - "learning_rate": 2.6809487221487343e-06, - "loss": 0.8488, - "num_input_tokens_seen": 72309300, - "step": 3391 - }, - { - "epoch": 0.4078638850477965, - "flos": 10659014975640.0, - "grad_norm": 7.179569812096514, - "learning_rate": 2.6802162394656605e-06, - "loss": 1.0396, - "num_input_tokens_seen": 72325730, - "step": 3392 - }, - { - "epoch": 0.40798412793843564, - "flos": 16954060966680.0, - "grad_norm": 3.7193609425031173, - "learning_rate": 2.679483653594324e-06, - "loss": 0.9384, - "num_input_tokens_seen": 72347220, - "step": 3393 - }, - { - "epoch": 0.40810437082907475, - "flos": 14957633079720.0, - "grad_norm": 3.1923043937772126, - "learning_rate": 2.678750964645857e-06, - "loss": 0.9718, - "num_input_tokens_seen": 72366020, - "step": 3394 - }, - { - "epoch": 0.4082246137197138, - "flos": 7954155254520.0, - "grad_norm": 3.824565992478461, - "learning_rate": 2.6780181727314094e-06, - "loss": 1.0685, - "num_input_tokens_seen": 72380645, - "step": 3395 - }, - { - "epoch": 0.4083448566103529, - "flos": 13518442247640.0, - "grad_norm": 3.68230110537746, - "learning_rate": 2.6772852779621435e-06, - "loss": 1.0055, - "num_input_tokens_seen": 72398225, - "step": 3396 - }, - { - "epoch": 0.408465099500992, - "flos": 16743647618880.0, - "grad_norm": 3.132370763062706, - "learning_rate": 2.676552280449239e-06, - "loss": 1.0706, - "num_input_tokens_seen": 72417830, - "step": 3397 - }, - { - "epoch": 0.4085853423916311, - "flos": 8981656535880.0, - "grad_norm": 5.028024528073349, - "learning_rate": 2.6758191803038917e-06, - "loss": 0.9613, - "num_input_tokens_seen": 72436045, - "step": 3398 - }, - { - "epoch": 0.4087055852822702, - "flos": 17320636266480.0, - "grad_norm": 7.039303926821879, - "learning_rate": 2.6750859776373125e-06, - "loss": 1.0521, - "num_input_tokens_seen": 72455220, - "step": 3399 - }, - { - "epoch": 0.4088258281729093, - "flos": 47535680203800.0, - "grad_norm": 0.7731395494330189, - "learning_rate": 2.674352672560727e-06, - "loss": 0.8436, - "num_input_tokens_seen": 72516385, - "step": 3400 - }, - { - "epoch": 0.40894607106354836, - "flos": 14515061790000.0, - "grad_norm": 3.1647963438153726, - "learning_rate": 2.673619265185377e-06, - "loss": 0.993, - "num_input_tokens_seen": 72535945, - "step": 3401 - }, - { - "epoch": 0.40906631395418747, - "flos": 19261555633680.0, - "grad_norm": 2.603363788180753, - "learning_rate": 2.672885755622521e-06, - "loss": 1.0003, - "num_input_tokens_seen": 72558080, - "step": 3402 - }, - { - "epoch": 0.4091865568448266, - "flos": 18133523566320.0, - "grad_norm": 6.033506297517845, - "learning_rate": 2.67215214398343e-06, - "loss": 0.9183, - "num_input_tokens_seen": 72577815, - "step": 3403 - }, - { - "epoch": 0.40930679973546563, - "flos": 20414337128040.0, - "grad_norm": 8.116352350414207, - "learning_rate": 2.671418430379393e-06, - "loss": 0.992, - "num_input_tokens_seen": 72596220, - "step": 3404 - }, - { - "epoch": 0.40942704262610474, - "flos": 14829041418240.0, - "grad_norm": 2.5007528125274625, - "learning_rate": 2.670684614921715e-06, - "loss": 1.0395, - "num_input_tokens_seen": 72614915, - "step": 3405 - }, - { - "epoch": 0.4095472855167438, - "flos": 15355243426320.0, - "grad_norm": 3.2957052894794066, - "learning_rate": 2.6699506977217128e-06, - "loss": 0.9095, - "num_input_tokens_seen": 72634810, - "step": 3406 - }, - { - "epoch": 0.4096675284073829, - "flos": 19888165781520.0, - "grad_norm": 2.8210012170896506, - "learning_rate": 2.6692166788907233e-06, - "loss": 0.915, - "num_input_tokens_seen": 72654725, - "step": 3407 - }, - { - "epoch": 0.409787771298022, - "flos": 13621701912480.0, - "grad_norm": 3.624885820366396, - "learning_rate": 2.6684825585400957e-06, - "loss": 0.9883, - "num_input_tokens_seen": 72673390, - "step": 3408 - }, - { - "epoch": 0.4099080141886611, - "flos": 42419054319360.0, - "grad_norm": 0.9352480863191125, - "learning_rate": 2.6677483367811947e-06, - "loss": 0.9502, - "num_input_tokens_seen": 72733150, - "step": 3409 - }, - { - "epoch": 0.4100282570793002, - "flos": 15562008048480.0, - "grad_norm": 8.672330277322517, - "learning_rate": 2.6670140137254028e-06, - "loss": 0.9819, - "num_input_tokens_seen": 72752345, - "step": 3410 - }, - { - "epoch": 0.4101484999699393, - "flos": 13151192393520.0, - "grad_norm": 4.1157599332177845, - "learning_rate": 2.666279589484115e-06, - "loss": 1.0922, - "num_input_tokens_seen": 72769965, - "step": 3411 - }, - { - "epoch": 0.41026874286057835, - "flos": 13540800072960.0, - "grad_norm": 3.4051401272479005, - "learning_rate": 2.6655450641687435e-06, - "loss": 1.0174, - "num_input_tokens_seen": 72787250, - "step": 3412 - }, - { - "epoch": 0.41038898575121746, - "flos": 22247888181360.0, - "grad_norm": 3.1577318697473764, - "learning_rate": 2.664810437890715e-06, - "loss": 0.9165, - "num_input_tokens_seen": 72808640, - "step": 3413 - }, - { - "epoch": 0.41050922864185657, - "flos": 10135817800440.0, - "grad_norm": 5.174504006615275, - "learning_rate": 2.6640757107614714e-06, - "loss": 1.0209, - "num_input_tokens_seen": 72826455, - "step": 3414 - }, - { - "epoch": 0.4106294715324956, - "flos": 22066148940240.0, - "grad_norm": 3.3958901866686015, - "learning_rate": 2.6633408828924697e-06, - "loss": 0.9305, - "num_input_tokens_seen": 72845040, - "step": 3415 - }, - { - "epoch": 0.41074971442313474, - "flos": 17394853885920.0, - "grad_norm": 3.8198052129963873, - "learning_rate": 2.662605954395185e-06, - "loss": 0.9257, - "num_input_tokens_seen": 72864720, - "step": 3416 - }, - { - "epoch": 0.41086995731377385, - "flos": 15300992030520.0, - "grad_norm": 14.959906336853244, - "learning_rate": 2.6618709253811027e-06, - "loss": 1.0678, - "num_input_tokens_seen": 72883895, - "step": 3417 - }, - { - "epoch": 0.4109902002044129, - "flos": 14696985000480.0, - "grad_norm": 2.7883236544893886, - "learning_rate": 2.6611357959617277e-06, - "loss": 1.1078, - "num_input_tokens_seen": 72903235, - "step": 3418 - }, - { - "epoch": 0.411110443095052, - "flos": 12883216201440.0, - "grad_norm": 4.063461424696929, - "learning_rate": 2.660400566248578e-06, - "loss": 1.1257, - "num_input_tokens_seen": 72921080, - "step": 3419 - }, - { - "epoch": 0.41123068598569107, - "flos": 10293543492000.0, - "grad_norm": 5.363830493828857, - "learning_rate": 2.6596652363531876e-06, - "loss": 0.8955, - "num_input_tokens_seen": 72936675, - "step": 3420 - }, - { - "epoch": 0.4113509288763302, - "flos": 15038657565480.0, - "grad_norm": 5.208352518682113, - "learning_rate": 2.6589298063871055e-06, - "loss": 1.0111, - "num_input_tokens_seen": 72956570, - "step": 3421 - }, - { - "epoch": 0.4114711717669693, - "flos": 13072590171000.0, - "grad_norm": 5.136714851174851, - "learning_rate": 2.658194276461895e-06, - "loss": 0.9234, - "num_input_tokens_seen": 72974215, - "step": 3422 - }, - { - "epoch": 0.41159141465760835, - "flos": 19390392587640.0, - "grad_norm": 4.802854110633825, - "learning_rate": 2.6574586466891368e-06, - "loss": 0.8974, - "num_input_tokens_seen": 72994410, - "step": 3423 - }, - { - "epoch": 0.41171165754824746, - "flos": 14196513589320.0, - "grad_norm": 3.401524196508976, - "learning_rate": 2.6567229171804247e-06, - "loss": 0.8605, - "num_input_tokens_seen": 73012015, - "step": 3424 - }, - { - "epoch": 0.41183190043888657, - "flos": 12757874665320.0, - "grad_norm": 4.502826862406284, - "learning_rate": 2.655987088047368e-06, - "loss": 1.0878, - "num_input_tokens_seen": 73030080, - "step": 3425 - }, - { - "epoch": 0.4119521433295256, - "flos": 19340464471800.0, - "grad_norm": 4.507473990756214, - "learning_rate": 2.6552511594015912e-06, - "loss": 1.004, - "num_input_tokens_seen": 73050190, - "step": 3426 - }, - { - "epoch": 0.41207238622016473, - "flos": 10686431958360.0, - "grad_norm": 26.29135855500755, - "learning_rate": 2.654515131354735e-06, - "loss": 1.0861, - "num_input_tokens_seen": 73068175, - "step": 3427 - }, - { - "epoch": 0.41219262911080384, - "flos": 19260911740920.0, - "grad_norm": 3.2118180081696, - "learning_rate": 2.653779004018453e-06, - "loss": 1.0805, - "num_input_tokens_seen": 73088460, - "step": 3428 - }, - { - "epoch": 0.4123128720014429, - "flos": 17560827537120.0, - "grad_norm": 3.475495935632236, - "learning_rate": 2.653042777504417e-06, - "loss": 1.0445, - "num_input_tokens_seen": 73110770, - "step": 3429 - }, - { - "epoch": 0.412433114892082, - "flos": 18679415844000.0, - "grad_norm": 4.355875996936046, - "learning_rate": 2.6523064519243105e-06, - "loss": 1.0276, - "num_input_tokens_seen": 73130060, - "step": 3430 - }, - { - "epoch": 0.4125533577827211, - "flos": 15170284721400.0, - "grad_norm": 6.952835865652736, - "learning_rate": 2.6515700273898333e-06, - "loss": 1.004, - "num_input_tokens_seen": 73147655, - "step": 3431 - }, - { - "epoch": 0.4126736006733602, - "flos": 18552755860800.0, - "grad_norm": 3.9368905700869874, - "learning_rate": 2.6508335040127018e-06, - "loss": 0.9101, - "num_input_tokens_seen": 73167070, - "step": 3432 - }, - { - "epoch": 0.4127938435639993, - "flos": 18264261537000.0, - "grad_norm": 2.2714075418271675, - "learning_rate": 2.6500968819046446e-06, - "loss": 0.9945, - "num_input_tokens_seen": 73187090, - "step": 3433 - }, - { - "epoch": 0.4129140864546384, - "flos": 12750699860280.0, - "grad_norm": 3.9957608170968255, - "learning_rate": 2.649360161177408e-06, - "loss": 0.8074, - "num_input_tokens_seen": 73201870, - "step": 3434 - }, - { - "epoch": 0.41303432934527745, - "flos": 16874937497640.0, - "grad_norm": 3.922921455260177, - "learning_rate": 2.6486233419427504e-06, - "loss": 0.9508, - "num_input_tokens_seen": 73221405, - "step": 3435 - }, - { - "epoch": 0.41315457223591656, - "flos": 14016215441520.0, - "grad_norm": 22.62940759035961, - "learning_rate": 2.6478864243124484e-06, - "loss": 0.972, - "num_input_tokens_seen": 73240790, - "step": 3436 - }, - { - "epoch": 0.4132748151265556, - "flos": 14855722523520.0, - "grad_norm": 4.397118595969579, - "learning_rate": 2.6471494083982903e-06, - "loss": 1.0786, - "num_input_tokens_seen": 73259895, - "step": 3437 - }, - { - "epoch": 0.4133950580171947, - "flos": 22983246413280.0, - "grad_norm": 9.188307123348062, - "learning_rate": 2.6464122943120818e-06, - "loss": 0.9787, - "num_input_tokens_seen": 73279840, - "step": 3438 - }, - { - "epoch": 0.41351530090783384, - "flos": 16558075682760.0, - "grad_norm": 7.567608581711669, - "learning_rate": 2.645675082165642e-06, - "loss": 1.0458, - "num_input_tokens_seen": 73295770, - "step": 3439 - }, - { - "epoch": 0.4136355437984729, - "flos": 18212156450400.0, - "grad_norm": 6.450927064496628, - "learning_rate": 2.644937772070806e-06, - "loss": 0.9868, - "num_input_tokens_seen": 73313935, - "step": 3440 - }, - { - "epoch": 0.413755786689112, - "flos": 14068964420880.0, - "grad_norm": 10.505007321892156, - "learning_rate": 2.6442003641394225e-06, - "loss": 1.0554, - "num_input_tokens_seen": 73331250, - "step": 3441 - }, - { - "epoch": 0.4138760295797511, - "flos": 19129805831520.0, - "grad_norm": 2.2387467943740464, - "learning_rate": 2.643462858483356e-06, - "loss": 1.0604, - "num_input_tokens_seen": 73351255, - "step": 3442 - }, - { - "epoch": 0.41399627247039017, - "flos": 11604387955080.0, - "grad_norm": 5.363313790130255, - "learning_rate": 2.6427252552144856e-06, - "loss": 0.9662, - "num_input_tokens_seen": 73369625, - "step": 3443 - }, - { - "epoch": 0.4141165153610293, - "flos": 16298071496280.0, - "grad_norm": 5.133964699564748, - "learning_rate": 2.6419875544447044e-06, - "loss": 0.9742, - "num_input_tokens_seen": 73390745, - "step": 3444 - }, - { - "epoch": 0.4142367582516684, - "flos": 17923999403760.0, - "grad_norm": 7.916650979545176, - "learning_rate": 2.6412497562859218e-06, - "loss": 0.9399, - "num_input_tokens_seen": 73411745, - "step": 3445 - }, - { - "epoch": 0.41435700114230745, - "flos": 15407195205120.0, - "grad_norm": 4.517924953923231, - "learning_rate": 2.6405118608500617e-06, - "loss": 0.9881, - "num_input_tokens_seen": 73430290, - "step": 3446 - }, - { - "epoch": 0.41447724403294656, - "flos": 18500804082000.0, - "grad_norm": 5.339899114448721, - "learning_rate": 2.6397738682490613e-06, - "loss": 1.0254, - "num_input_tokens_seen": 73450910, - "step": 3447 - }, - { - "epoch": 0.41459748692358567, - "flos": 12941392276920.0, - "grad_norm": 4.604218058349648, - "learning_rate": 2.6390357785948734e-06, - "loss": 0.9816, - "num_input_tokens_seen": 73467745, - "step": 3448 - }, - { - "epoch": 0.4147177298142247, - "flos": 17187660001920.0, - "grad_norm": 3.36435773504548, - "learning_rate": 2.6382975919994667e-06, - "loss": 1.0186, - "num_input_tokens_seen": 73488040, - "step": 3449 - }, - { - "epoch": 0.41483797270486383, - "flos": 14253064602120.0, - "grad_norm": 4.009004471412652, - "learning_rate": 2.637559308574822e-06, - "loss": 0.9482, - "num_input_tokens_seen": 73507505, - "step": 3450 - }, - { - "epoch": 0.4149582155955029, - "flos": 21437177852280.0, - "grad_norm": 4.662809117274236, - "learning_rate": 2.6368209284329376e-06, - "loss": 0.9468, - "num_input_tokens_seen": 73527855, - "step": 3451 - }, - { - "epoch": 0.415078458486142, - "flos": 11866845066360.0, - "grad_norm": 4.436455187229541, - "learning_rate": 2.636082451685825e-06, - "loss": 0.9714, - "num_input_tokens_seen": 73545775, - "step": 3452 - }, - { - "epoch": 0.4151987013767811, - "flos": 18528772972800.0, - "grad_norm": 6.781901104164667, - "learning_rate": 2.6353438784455094e-06, - "loss": 1.0817, - "num_input_tokens_seen": 73568780, - "step": 3453 - }, - { - "epoch": 0.41531894426742016, - "flos": 17503785939360.0, - "grad_norm": 4.436975152582971, - "learning_rate": 2.6346052088240326e-06, - "loss": 0.9336, - "num_input_tokens_seen": 73588020, - "step": 3454 - }, - { - "epoch": 0.4154391871580593, - "flos": 10581301938360.0, - "grad_norm": 6.956460422784675, - "learning_rate": 2.63386644293345e-06, - "loss": 0.9974, - "num_input_tokens_seen": 73604085, - "step": 3455 - }, - { - "epoch": 0.4155594300486984, - "flos": 10345096670520.0, - "grad_norm": 4.118402927808455, - "learning_rate": 2.633127580885833e-06, - "loss": 1.0579, - "num_input_tokens_seen": 73618305, - "step": 3456 - }, - { - "epoch": 0.41567967293933744, - "flos": 21017638942200.0, - "grad_norm": 20.273282453646043, - "learning_rate": 2.632388622793265e-06, - "loss": 0.8622, - "num_input_tokens_seen": 73637180, - "step": 3457 - }, - { - "epoch": 0.41579991582997655, - "flos": 13643906430000.0, - "grad_norm": 8.554821274638975, - "learning_rate": 2.6316495687678457e-06, - "loss": 0.9143, - "num_input_tokens_seen": 73655550, - "step": 3458 - }, - { - "epoch": 0.41592015872061566, - "flos": 17398901211840.0, - "grad_norm": 4.5659187781135655, - "learning_rate": 2.6309104189216887e-06, - "loss": 0.9775, - "num_input_tokens_seen": 73672835, - "step": 3459 - }, - { - "epoch": 0.4160404016112547, - "flos": 14749366041120.0, - "grad_norm": 3.355648511128187, - "learning_rate": 2.630171173366923e-06, - "loss": 0.9651, - "num_input_tokens_seen": 73688355, - "step": 3460 - }, - { - "epoch": 0.41616064450189383, - "flos": 9819814509240.0, - "grad_norm": 90.60360199965838, - "learning_rate": 2.629431832215691e-06, - "loss": 0.9652, - "num_input_tokens_seen": 73702880, - "step": 3461 - }, - { - "epoch": 0.41628088739253294, - "flos": 14199886360920.0, - "grad_norm": 4.281098968737695, - "learning_rate": 2.628692395580151e-06, - "loss": 1.0843, - "num_input_tokens_seen": 73722690, - "step": 3462 - }, - { - "epoch": 0.416401130283172, - "flos": 20781556320600.0, - "grad_norm": 2.435813711763592, - "learning_rate": 2.6279528635724747e-06, - "loss": 1.0161, - "num_input_tokens_seen": 73742565, - "step": 3463 - }, - { - "epoch": 0.4165213731738111, - "flos": 11493953485200.0, - "grad_norm": 3.991631456085767, - "learning_rate": 2.627213236304848e-06, - "loss": 1.009, - "num_input_tokens_seen": 73759085, - "step": 3464 - }, - { - "epoch": 0.4166416160644502, - "flos": 24084995975640.0, - "grad_norm": 3.6481972723542095, - "learning_rate": 2.626473513889472e-06, - "loss": 0.9309, - "num_input_tokens_seen": 73781185, - "step": 3465 - }, - { - "epoch": 0.41676185895508927, - "flos": 14850755350800.0, - "grad_norm": 4.936123040296434, - "learning_rate": 2.625733696438562e-06, - "loss": 1.0474, - "num_input_tokens_seen": 73798410, - "step": 3466 - }, - { - "epoch": 0.4168821018457284, - "flos": 13046706266280.0, - "grad_norm": 5.951585305216054, - "learning_rate": 2.6249937840643476e-06, - "loss": 0.9787, - "num_input_tokens_seen": 73816435, - "step": 3467 - }, - { - "epoch": 0.41700234473636744, - "flos": 13256812998480.0, - "grad_norm": 4.870935946847486, - "learning_rate": 2.6242537768790733e-06, - "loss": 0.8958, - "num_input_tokens_seen": 73835310, - "step": 3468 - }, - { - "epoch": 0.41712258762700655, - "flos": 22122025398720.0, - "grad_norm": 6.0296518056366075, - "learning_rate": 2.6235136749949975e-06, - "loss": 0.9162, - "num_input_tokens_seen": 73858480, - "step": 3469 - }, - { - "epoch": 0.41724283051764566, - "flos": 25629224843040.0, - "grad_norm": 3.556298999415289, - "learning_rate": 2.6227734785243924e-06, - "loss": 0.8324, - "num_input_tokens_seen": 73878160, - "step": 3470 - }, - { - "epoch": 0.4173630734082847, - "flos": 18024744820680.0, - "grad_norm": 7.083462014618899, - "learning_rate": 2.6220331875795466e-06, - "loss": 1.0182, - "num_input_tokens_seen": 73897230, - "step": 3471 - }, - { - "epoch": 0.4174833162989238, - "flos": 18996461628240.0, - "grad_norm": 4.261402765267798, - "learning_rate": 2.62129280227276e-06, - "loss": 0.9835, - "num_input_tokens_seen": 73916950, - "step": 3472 - }, - { - "epoch": 0.41760355918956293, - "flos": 53715973993080.0, - "grad_norm": 8.745873486367772, - "learning_rate": 2.62055232271635e-06, - "loss": 0.8931, - "num_input_tokens_seen": 73943855, - "step": 3473 - }, - { - "epoch": 0.417723802080202, - "flos": 10108002217440.0, - "grad_norm": 7.592703036751594, - "learning_rate": 2.619811749022646e-06, - "loss": 1.1127, - "num_input_tokens_seen": 73958885, - "step": 3474 - }, - { - "epoch": 0.4178440449708411, - "flos": 10342214483880.0, - "grad_norm": 7.582494612483404, - "learning_rate": 2.6190710813039917e-06, - "loss": 0.9357, - "num_input_tokens_seen": 73971730, - "step": 3475 - }, - { - "epoch": 0.4179642878614802, - "flos": 15275445402960.0, - "grad_norm": 5.584358728488641, - "learning_rate": 2.618330319672747e-06, - "loss": 1.0388, - "num_input_tokens_seen": 73990870, - "step": 3476 - }, - { - "epoch": 0.41808453075211927, - "flos": 13072406201640.0, - "grad_norm": 4.11517551200783, - "learning_rate": 2.617589464241284e-06, - "loss": 1.1475, - "num_input_tokens_seen": 74004990, - "step": 3477 - }, - { - "epoch": 0.4182047736427584, - "flos": 14408214722640.0, - "grad_norm": 5.132798750813967, - "learning_rate": 2.6168485151219914e-06, - "loss": 0.9604, - "num_input_tokens_seen": 74024330, - "step": 3478 - }, - { - "epoch": 0.4183250165333975, - "flos": 13384331505360.0, - "grad_norm": 11.73959344267163, - "learning_rate": 2.616107472427269e-06, - "loss": 0.9374, - "num_input_tokens_seen": 74038745, - "step": 3479 - }, - { - "epoch": 0.41844525942403654, - "flos": 12566752986840.0, - "grad_norm": 4.549166491950407, - "learning_rate": 2.615366336269533e-06, - "loss": 0.9859, - "num_input_tokens_seen": 74052130, - "step": 3480 - }, - { - "epoch": 0.41856550231467565, - "flos": 13013432925600.0, - "grad_norm": 5.047776652477879, - "learning_rate": 2.6146251067612126e-06, - "loss": 1.0122, - "num_input_tokens_seen": 74067325, - "step": 3481 - }, - { - "epoch": 0.41868574520531476, - "flos": 16191071121120.0, - "grad_norm": 3.148086717625398, - "learning_rate": 2.6138837840147525e-06, - "loss": 1.044, - "num_input_tokens_seen": 74086080, - "step": 3482 - }, - { - "epoch": 0.4188059880959538, - "flos": 9663867188160.0, - "grad_norm": 3.772297752222231, - "learning_rate": 2.6131423681426103e-06, - "loss": 0.9914, - "num_input_tokens_seen": 74101715, - "step": 3483 - }, - { - "epoch": 0.41892623098659293, - "flos": 26998092073800.0, - "grad_norm": 5.0632901928997365, - "learning_rate": 2.6124008592572587e-06, - "loss": 0.9549, - "num_input_tokens_seen": 74125420, - "step": 3484 - }, - { - "epoch": 0.419046473877232, - "flos": 16534307425680.0, - "grad_norm": 8.020268513662744, - "learning_rate": 2.6116592574711835e-06, - "loss": 1.0243, - "num_input_tokens_seen": 74143440, - "step": 3485 - }, - { - "epoch": 0.4191667167678711, - "flos": 14724739260360.0, - "grad_norm": 3.8111207061686336, - "learning_rate": 2.6109175628968853e-06, - "loss": 1.057, - "num_input_tokens_seen": 74162925, - "step": 3486 - }, - { - "epoch": 0.4192869596585102, - "flos": 16769592846720.0, - "grad_norm": 3.994130716101706, - "learning_rate": 2.610175775646878e-06, - "loss": 1.0581, - "num_input_tokens_seen": 74181225, - "step": 3487 - }, - { - "epoch": 0.41940720254914926, - "flos": 17843772118560.0, - "grad_norm": 10.976204189547703, - "learning_rate": 2.6094338958336907e-06, - "loss": 0.957, - "num_input_tokens_seen": 74199615, - "step": 3488 - }, - { - "epoch": 0.41952744543978837, - "flos": 10996210952880.0, - "grad_norm": 5.823977388002704, - "learning_rate": 2.608691923569867e-06, - "loss": 1.0412, - "num_input_tokens_seen": 74216210, - "step": 3489 - }, - { - "epoch": 0.4196476883304275, - "flos": 17530252413720.0, - "grad_norm": 3.031110465387454, - "learning_rate": 2.6079498589679616e-06, - "loss": 0.9877, - "num_input_tokens_seen": 74237020, - "step": 3490 - }, - { - "epoch": 0.41976793122106654, - "flos": 17448185434920.0, - "grad_norm": 4.937424478074027, - "learning_rate": 2.6072077021405465e-06, - "loss": 0.9781, - "num_input_tokens_seen": 74255575, - "step": 3491 - }, - { - "epoch": 0.41988817411170565, - "flos": 15036357948480.0, - "grad_norm": 5.178570150741645, - "learning_rate": 2.6064654532002054e-06, - "loss": 0.9229, - "num_input_tokens_seen": 74274305, - "step": 3492 - }, - { - "epoch": 0.42000841700234476, - "flos": 22564259411280.0, - "grad_norm": 5.830812218230628, - "learning_rate": 2.6057231122595375e-06, - "loss": 0.9814, - "num_input_tokens_seen": 74295335, - "step": 3493 - }, - { - "epoch": 0.4201286598929838, - "flos": 15112261953720.0, - "grad_norm": 4.059792188390317, - "learning_rate": 2.604980679431154e-06, - "loss": 0.9603, - "num_input_tokens_seen": 74313295, - "step": 3494 - }, - { - "epoch": 0.4202489027836229, - "flos": 13146869113560.0, - "grad_norm": 4.58961103133428, - "learning_rate": 2.604238154827684e-06, - "loss": 0.9786, - "num_input_tokens_seen": 74329640, - "step": 3495 - }, - { - "epoch": 0.42036914567426203, - "flos": 13701714566760.0, - "grad_norm": 4.099871535730975, - "learning_rate": 2.6034955385617656e-06, - "loss": 0.9404, - "num_input_tokens_seen": 74347690, - "step": 3496 - }, - { - "epoch": 0.4204893885649011, - "flos": 48580756107120.0, - "grad_norm": 0.7635121288198047, - "learning_rate": 2.6027528307460544e-06, - "loss": 0.8919, - "num_input_tokens_seen": 74411415, - "step": 3497 - }, - { - "epoch": 0.4206096314555402, - "flos": 15564001049880.0, - "grad_norm": 3.8713038875387773, - "learning_rate": 2.602010031493217e-06, - "loss": 1.081, - "num_input_tokens_seen": 74429365, - "step": 3498 - }, - { - "epoch": 0.42072987434617926, - "flos": 20860158543120.0, - "grad_norm": 4.28978552222138, - "learning_rate": 2.6012671409159367e-06, - "loss": 1.0866, - "num_input_tokens_seen": 74450420, - "step": 3499 - }, - { - "epoch": 0.42085011723681837, - "flos": 19654720054080.0, - "grad_norm": 8.642084864101939, - "learning_rate": 2.6005241591269097e-06, - "loss": 1.0534, - "num_input_tokens_seen": 74469510, - "step": 3500 - }, - { - "epoch": 0.4209703601274575, - "flos": 19811188621680.0, - "grad_norm": 3.8732718028720035, - "learning_rate": 2.5997810862388454e-06, - "loss": 1.0259, - "num_input_tokens_seen": 74489070, - "step": 3501 - }, - { - "epoch": 0.42109060301809653, - "flos": 19600376673600.0, - "grad_norm": 4.022379497195411, - "learning_rate": 2.599037922364467e-06, - "loss": 0.9839, - "num_input_tokens_seen": 74507690, - "step": 3502 - }, - { - "epoch": 0.42121084590873564, - "flos": 20886042447840.0, - "grad_norm": 9.896530822801502, - "learning_rate": 2.5982946676165112e-06, - "loss": 0.9895, - "num_input_tokens_seen": 74527180, - "step": 3503 - }, - { - "epoch": 0.42133108879937475, - "flos": 48260736151560.0, - "grad_norm": 0.7623068471062359, - "learning_rate": 2.5975513221077313e-06, - "loss": 0.8297, - "num_input_tokens_seen": 74590870, - "step": 3504 - }, - { - "epoch": 0.4214513316900138, - "flos": 16423535678640.0, - "grad_norm": 5.621624590824358, - "learning_rate": 2.5968078859508897e-06, - "loss": 1.1091, - "num_input_tokens_seen": 74607790, - "step": 3505 - }, - { - "epoch": 0.4215715745806529, - "flos": 10839711723720.0, - "grad_norm": 3.8259350799301135, - "learning_rate": 2.5960643592587673e-06, - "loss": 1.0139, - "num_input_tokens_seen": 74624920, - "step": 3506 - }, - { - "epoch": 0.42169181747129203, - "flos": 15720960202440.0, - "grad_norm": 4.309715145814341, - "learning_rate": 2.5953207421441553e-06, - "loss": 1.0475, - "num_input_tokens_seen": 74643240, - "step": 3507 - }, - { - "epoch": 0.4218120603619311, - "flos": 16081801790520.0, - "grad_norm": 7.9886871458615225, - "learning_rate": 2.5945770347198603e-06, - "loss": 0.9708, - "num_input_tokens_seen": 74661115, - "step": 3508 - }, - { - "epoch": 0.4219323032525702, - "flos": 13964202339600.0, - "grad_norm": 3.5050521898941587, - "learning_rate": 2.593833237098701e-06, - "loss": 1.0576, - "num_input_tokens_seen": 74678435, - "step": 3509 - }, - { - "epoch": 0.4220525461432093, - "flos": 21516056028840.0, - "grad_norm": 4.502115600037235, - "learning_rate": 2.593089349393512e-06, - "loss": 0.8533, - "num_input_tokens_seen": 74698645, - "step": 3510 - }, - { - "epoch": 0.42217278903384836, - "flos": 17293863176520.0, - "grad_norm": 4.890876154194058, - "learning_rate": 2.592345371717141e-06, - "loss": 1.0598, - "num_input_tokens_seen": 74717895, - "step": 3511 - }, - { - "epoch": 0.42229303192448747, - "flos": 12102590410800.0, - "grad_norm": 4.6105467260707, - "learning_rate": 2.591601304182448e-06, - "loss": 0.9253, - "num_input_tokens_seen": 74735585, - "step": 3512 - }, - { - "epoch": 0.4224132748151266, - "flos": 16193401399680.0, - "grad_norm": 3.0061560094657547, - "learning_rate": 2.5908571469023067e-06, - "loss": 1.0149, - "num_input_tokens_seen": 74754790, - "step": 3513 - }, - { - "epoch": 0.42253351770576564, - "flos": 12624377154240.0, - "grad_norm": 5.568813898167353, - "learning_rate": 2.5901128999896067e-06, - "loss": 0.9778, - "num_input_tokens_seen": 74769940, - "step": 3514 - }, - { - "epoch": 0.42265376059640475, - "flos": 20309851000800.0, - "grad_norm": 3.6230152879350825, - "learning_rate": 2.5893685635572487e-06, - "loss": 0.9119, - "num_input_tokens_seen": 74790510, - "step": 3515 - }, - { - "epoch": 0.4227740034870438, - "flos": 11499533889120.0, - "grad_norm": 3.9771598562102812, - "learning_rate": 2.5886241377181483e-06, - "loss": 0.9132, - "num_input_tokens_seen": 74809100, - "step": 3516 - }, - { - "epoch": 0.4228942463776829, - "flos": 17997266514840.0, - "grad_norm": 5.499794002489931, - "learning_rate": 2.587879622585234e-06, - "loss": 1.0282, - "num_input_tokens_seen": 74827420, - "step": 3517 - }, - { - "epoch": 0.423014489268322, - "flos": 18788623851480.0, - "grad_norm": 6.07727013389017, - "learning_rate": 2.5871350182714486e-06, - "loss": 0.9777, - "num_input_tokens_seen": 74848020, - "step": 3518 - }, - { - "epoch": 0.4231347321589611, - "flos": 12232929781200.0, - "grad_norm": 3.422876080957121, - "learning_rate": 2.586390324889748e-06, - "loss": 1.0223, - "num_input_tokens_seen": 74863640, - "step": 3519 - }, - { - "epoch": 0.4232549750496002, - "flos": 16347478365600.0, - "grad_norm": 3.0008203656369186, - "learning_rate": 2.5856455425531003e-06, - "loss": 0.8896, - "num_input_tokens_seen": 74884835, - "step": 3520 - }, - { - "epoch": 0.4233752179402393, - "flos": 15088861635360.0, - "grad_norm": 4.846626264534051, - "learning_rate": 2.5849006713744902e-06, - "loss": 1.0353, - "num_input_tokens_seen": 74903350, - "step": 3521 - }, - { - "epoch": 0.42349546083087836, - "flos": 14698303447560.0, - "grad_norm": 5.443122500003911, - "learning_rate": 2.5841557114669135e-06, - "loss": 0.9525, - "num_input_tokens_seen": 74919930, - "step": 3522 - }, - { - "epoch": 0.42361570372151747, - "flos": 13174838004360.0, - "grad_norm": 8.93879999696504, - "learning_rate": 2.58341066294338e-06, - "loss": 0.8844, - "num_input_tokens_seen": 74936315, - "step": 3523 - }, - { - "epoch": 0.4237359466121566, - "flos": 14881667751360.0, - "grad_norm": 5.041043624002057, - "learning_rate": 2.5826655259169124e-06, - "loss": 1.0772, - "num_input_tokens_seen": 74954690, - "step": 3524 - }, - { - "epoch": 0.42385618950279563, - "flos": 12781857553320.0, - "grad_norm": 3.614608735243543, - "learning_rate": 2.5819203005005475e-06, - "loss": 1.1216, - "num_input_tokens_seen": 74971745, - "step": 3525 - }, - { - "epoch": 0.42397643239343474, - "flos": 16900944048600.0, - "grad_norm": 3.6061516442596373, - "learning_rate": 2.581174986807336e-06, - "loss": 1.0133, - "num_input_tokens_seen": 74991700, - "step": 3526 - }, - { - "epoch": 0.42409667528407385, - "flos": 11709180697920.0, - "grad_norm": 4.626570563806929, - "learning_rate": 2.580429584950341e-06, - "loss": 1.1382, - "num_input_tokens_seen": 75007170, - "step": 3527 - }, - { - "epoch": 0.4242169181747129, - "flos": 11341992166920.0, - "grad_norm": 9.346109047806799, - "learning_rate": 2.5796840950426397e-06, - "loss": 0.8731, - "num_input_tokens_seen": 75023975, - "step": 3528 - }, - { - "epoch": 0.424337161065352, - "flos": 14252512694040.0, - "grad_norm": 4.661072352393081, - "learning_rate": 2.578938517197322e-06, - "loss": 0.8872, - "num_input_tokens_seen": 75041790, - "step": 3529 - }, - { - "epoch": 0.4244574039559911, - "flos": 16979423624880.0, - "grad_norm": 3.957396359559562, - "learning_rate": 2.5781928515274916e-06, - "loss": 0.8442, - "num_input_tokens_seen": 75060230, - "step": 3530 - }, - { - "epoch": 0.4245776468466302, - "flos": 12442729897800.0, - "grad_norm": 4.477659081168191, - "learning_rate": 2.577447098146265e-06, - "loss": 0.8985, - "num_input_tokens_seen": 75077125, - "step": 3531 - }, - { - "epoch": 0.4246978897372693, - "flos": 19779938943960.0, - "grad_norm": 2.570594006923574, - "learning_rate": 2.5767012571667724e-06, - "loss": 1.0111, - "num_input_tokens_seen": 75096325, - "step": 3532 - }, - { - "epoch": 0.42481813262790835, - "flos": 11027552615280.0, - "grad_norm": 3.1557417356462505, - "learning_rate": 2.5759553287021587e-06, - "loss": 0.8998, - "num_input_tokens_seen": 75114375, - "step": 3533 - }, - { - "epoch": 0.42493837551854746, - "flos": 17031651357720.0, - "grad_norm": 3.6663855242595536, - "learning_rate": 2.5752093128655786e-06, - "loss": 0.9916, - "num_input_tokens_seen": 75132340, - "step": 3534 - }, - { - "epoch": 0.4250586184091866, - "flos": 14776721700720.0, - "grad_norm": 6.265245992180139, - "learning_rate": 2.574463209770204e-06, - "loss": 0.9518, - "num_input_tokens_seen": 75151375, - "step": 3535 - }, - { - "epoch": 0.42517886129982563, - "flos": 21645322244640.0, - "grad_norm": 4.015148565030636, - "learning_rate": 2.5737170195292165e-06, - "loss": 1.0156, - "num_input_tokens_seen": 75174430, - "step": 3536 - }, - { - "epoch": 0.42529910419046474, - "flos": 14249630507400.0, - "grad_norm": 5.232679223327954, - "learning_rate": 2.572970742255814e-06, - "loss": 1.0007, - "num_input_tokens_seen": 75192640, - "step": 3537 - }, - { - "epoch": 0.42541934708110385, - "flos": 16083273545400.0, - "grad_norm": 2.7453075602122783, - "learning_rate": 2.5722243780632046e-06, - "loss": 1.0429, - "num_input_tokens_seen": 75210625, - "step": 3538 - }, - { - "epoch": 0.4255395899717429, - "flos": 47400434983800.0, - "grad_norm": 0.8217117727664924, - "learning_rate": 2.5714779270646125e-06, - "loss": 0.8885, - "num_input_tokens_seen": 75271115, - "step": 3539 - }, - { - "epoch": 0.425659832862382, - "flos": 12705830901840.0, - "grad_norm": 7.882116099844367, - "learning_rate": 2.5707313893732735e-06, - "loss": 0.9879, - "num_input_tokens_seen": 75289375, - "step": 3540 - }, - { - "epoch": 0.4257800757530211, - "flos": 17082805935960.0, - "grad_norm": 5.122171877716819, - "learning_rate": 2.5699847651024364e-06, - "loss": 0.987, - "num_input_tokens_seen": 75309735, - "step": 3541 - }, - { - "epoch": 0.4259003186436602, - "flos": 16848379038600.0, - "grad_norm": 3.5776008321972665, - "learning_rate": 2.5692380543653627e-06, - "loss": 0.9963, - "num_input_tokens_seen": 75327610, - "step": 3542 - }, - { - "epoch": 0.4260205615342993, - "flos": 10784325850200.0, - "grad_norm": 3.287797770775836, - "learning_rate": 2.5684912572753293e-06, - "loss": 0.9161, - "num_input_tokens_seen": 75343005, - "step": 3543 - }, - { - "epoch": 0.4261408044249384, - "flos": 21857146024200.0, - "grad_norm": 5.5843945913286035, - "learning_rate": 2.5677443739456245e-06, - "loss": 1.0722, - "num_input_tokens_seen": 75364385, - "step": 3544 - }, - { - "epoch": 0.42626104731557746, - "flos": 16533479563560.0, - "grad_norm": 10.057699444467127, - "learning_rate": 2.5669974044895495e-06, - "loss": 1.0168, - "num_input_tokens_seen": 75380500, - "step": 3545 - }, - { - "epoch": 0.42638129020621657, - "flos": 18421343335800.0, - "grad_norm": 5.9475292429259214, - "learning_rate": 2.5662503490204187e-06, - "loss": 1.015, - "num_input_tokens_seen": 75400385, - "step": 3546 - }, - { - "epoch": 0.4265015330968556, - "flos": 18864834472320.0, - "grad_norm": 3.219176032750304, - "learning_rate": 2.5655032076515603e-06, - "loss": 0.9903, - "num_input_tokens_seen": 75419430, - "step": 3547 - }, - { - "epoch": 0.42662177598749473, - "flos": 17346826786800.0, - "grad_norm": 4.34289015348695, - "learning_rate": 2.5647559804963155e-06, - "loss": 1.043, - "num_input_tokens_seen": 75439080, - "step": 3548 - }, - { - "epoch": 0.42674201887813384, - "flos": 16454969325720.0, - "grad_norm": 3.0302688741203956, - "learning_rate": 2.5640086676680364e-06, - "loss": 1.0101, - "num_input_tokens_seen": 75460295, - "step": 3549 - }, - { - "epoch": 0.4268622617687729, - "flos": 15406459327680.0, - "grad_norm": 8.135735095667448, - "learning_rate": 2.5632612692800923e-06, - "loss": 1.0225, - "num_input_tokens_seen": 75479080, - "step": 3550 - }, - { - "epoch": 0.426982504659412, - "flos": 16664432165160.0, - "grad_norm": 31.196467820767587, - "learning_rate": 2.5625137854458603e-06, - "loss": 0.9689, - "num_input_tokens_seen": 75497815, - "step": 3551 - }, - { - "epoch": 0.4271027475500511, - "flos": 13413496197000.0, - "grad_norm": 3.8089878602178704, - "learning_rate": 2.561766216278735e-06, - "loss": 1.0232, - "num_input_tokens_seen": 75515130, - "step": 3552 - }, - { - "epoch": 0.4272229904406902, - "flos": 19129897816200.0, - "grad_norm": 2.222994116661671, - "learning_rate": 2.561018561892121e-06, - "loss": 1.0325, - "num_input_tokens_seen": 75533990, - "step": 3553 - }, - { - "epoch": 0.4273432333313293, - "flos": 17031896650200.0, - "grad_norm": 3.0665572402615107, - "learning_rate": 2.5602708223994363e-06, - "loss": 0.9877, - "num_input_tokens_seen": 75555575, - "step": 3554 - }, - { - "epoch": 0.4274634762219684, - "flos": 21070173290640.0, - "grad_norm": 6.101754745991075, - "learning_rate": 2.559522997914115e-06, - "loss": 0.8948, - "num_input_tokens_seen": 75574875, - "step": 3555 - }, - { - "epoch": 0.42758371911260745, - "flos": 15222849731400.0, - "grad_norm": 3.4537941124743243, - "learning_rate": 2.558775088549599e-06, - "loss": 1.0846, - "num_input_tokens_seen": 75594175, - "step": 3556 - }, - { - "epoch": 0.42770396200324656, - "flos": 10420510090800.0, - "grad_norm": 5.539337280516887, - "learning_rate": 2.5580270944193467e-06, - "loss": 0.8779, - "num_input_tokens_seen": 75610715, - "step": 3557 - }, - { - "epoch": 0.4278242048938857, - "flos": 50601841436400.0, - "grad_norm": 0.7355940420269371, - "learning_rate": 2.557279015636827e-06, - "loss": 0.804, - "num_input_tokens_seen": 75670845, - "step": 3558 - }, - { - "epoch": 0.42794444778452473, - "flos": 49674227048280.0, - "grad_norm": 0.8169753723821664, - "learning_rate": 2.5565308523155245e-06, - "loss": 0.8881, - "num_input_tokens_seen": 75730165, - "step": 3559 - }, - { - "epoch": 0.42806469067516384, - "flos": 12908456213400.0, - "grad_norm": 5.033071247544717, - "learning_rate": 2.5557826045689336e-06, - "loss": 1.0412, - "num_input_tokens_seen": 75746125, - "step": 3560 - }, - { - "epoch": 0.4281849335658029, - "flos": 39018548634600.0, - "grad_norm": 0.8692689265159201, - "learning_rate": 2.5550342725105643e-06, - "loss": 0.8378, - "num_input_tokens_seen": 75804010, - "step": 3561 - }, - { - "epoch": 0.428305176456442, - "flos": 12233634997080.0, - "grad_norm": 3.644206892260124, - "learning_rate": 2.554285856253937e-06, - "loss": 1.0391, - "num_input_tokens_seen": 75822565, - "step": 3562 - }, - { - "epoch": 0.4284254193470811, - "flos": 18762586638960.0, - "grad_norm": 3.48560766474678, - "learning_rate": 2.5535373559125855e-06, - "loss": 0.9922, - "num_input_tokens_seen": 75842650, - "step": 3563 - }, - { - "epoch": 0.42854566223772017, - "flos": 21173310309240.0, - "grad_norm": 3.2246451604048563, - "learning_rate": 2.552788771600057e-06, - "loss": 1.0504, - "num_input_tokens_seen": 75862680, - "step": 3564 - }, - { - "epoch": 0.4286659051283593, - "flos": 15642603272400.0, - "grad_norm": 3.7794128242468488, - "learning_rate": 2.5520401034299118e-06, - "loss": 1.0484, - "num_input_tokens_seen": 75880160, - "step": 3565 - }, - { - "epoch": 0.4287861480189984, - "flos": 9401747354040.0, - "grad_norm": 3.5649503345427336, - "learning_rate": 2.551291351515722e-06, - "loss": 1.0965, - "num_input_tokens_seen": 75896895, - "step": 3566 - }, - { - "epoch": 0.42890639090963745, - "flos": 18972478740240.0, - "grad_norm": 4.993835654735625, - "learning_rate": 2.5505425159710726e-06, - "loss": 1.0849, - "num_input_tokens_seen": 75916425, - "step": 3567 - }, - { - "epoch": 0.42902663380027656, - "flos": 17106236915880.0, - "grad_norm": 4.185298056066645, - "learning_rate": 2.549793596909561e-06, - "loss": 1.0583, - "num_input_tokens_seen": 75934765, - "step": 3568 - }, - { - "epoch": 0.42914687669091567, - "flos": 11053313873760.0, - "grad_norm": 10.711327832967886, - "learning_rate": 2.5490445944447976e-06, - "loss": 0.8876, - "num_input_tokens_seen": 75952980, - "step": 3569 - }, - { - "epoch": 0.4292671195815547, - "flos": 22433858717760.0, - "grad_norm": 20.712609855622976, - "learning_rate": 2.548295508690406e-06, - "loss": 0.8796, - "num_input_tokens_seen": 75973995, - "step": 3570 - }, - { - "epoch": 0.42938736247219383, - "flos": 21563408573640.0, - "grad_norm": 5.865405425411219, - "learning_rate": 2.5475463397600217e-06, - "loss": 0.9906, - "num_input_tokens_seen": 75993795, - "step": 3571 - }, - { - "epoch": 0.42950760536283294, - "flos": 20911343782920.0, - "grad_norm": 3.9333293564555434, - "learning_rate": 2.546797087767293e-06, - "loss": 1.0055, - "num_input_tokens_seen": 76013640, - "step": 3572 - }, - { - "epoch": 0.429627848253472, - "flos": 19129376569680.0, - "grad_norm": 3.6438657555751583, - "learning_rate": 2.546047752825881e-06, - "loss": 1.0872, - "num_input_tokens_seen": 76033965, - "step": 3573 - }, - { - "epoch": 0.4297480911441111, - "flos": 9794697143520.0, - "grad_norm": 19.727015000998236, - "learning_rate": 2.5452983350494595e-06, - "loss": 1.1539, - "num_input_tokens_seen": 76049240, - "step": 3574 - }, - { - "epoch": 0.4298683340347502, - "flos": 14724831245040.0, - "grad_norm": 4.193763716969464, - "learning_rate": 2.544548834551713e-06, - "loss": 0.8808, - "num_input_tokens_seen": 76067965, - "step": 3575 - }, - { - "epoch": 0.4299885769253893, - "flos": 14825484677280.0, - "grad_norm": 5.112526204757873, - "learning_rate": 2.5437992514463424e-06, - "loss": 1.1457, - "num_input_tokens_seen": 76081010, - "step": 3576 - }, - { - "epoch": 0.4301088198160284, - "flos": 18133922166600.0, - "grad_norm": 8.256834955739889, - "learning_rate": 2.5430495858470565e-06, - "loss": 1.1056, - "num_input_tokens_seen": 76100200, - "step": 3577 - }, - { - "epoch": 0.43022906270666744, - "flos": 12941177646000.0, - "grad_norm": 5.312115285070523, - "learning_rate": 2.54229983786758e-06, - "loss": 0.9926, - "num_input_tokens_seen": 76117865, - "step": 3578 - }, - { - "epoch": 0.43034930559730655, - "flos": 16634684903880.0, - "grad_norm": 4.334808807556717, - "learning_rate": 2.541550007621651e-06, - "loss": 1.0666, - "num_input_tokens_seen": 76136075, - "step": 3579 - }, - { - "epoch": 0.43046954848794566, - "flos": 20074166979480.0, - "grad_norm": 6.834090710358061, - "learning_rate": 2.5408000952230156e-06, - "loss": 1.0282, - "num_input_tokens_seen": 76154585, - "step": 3580 - }, - { - "epoch": 0.4305897913785847, - "flos": 20358123392400.0, - "grad_norm": 3.6122998033641625, - "learning_rate": 2.5400501007854357e-06, - "loss": 1.1195, - "num_input_tokens_seen": 76173750, - "step": 3581 - }, - { - "epoch": 0.43071003426922383, - "flos": 14514601866600.0, - "grad_norm": 3.305835569544591, - "learning_rate": 2.539300024422685e-06, - "loss": 0.9781, - "num_input_tokens_seen": 76191415, - "step": 3582 - }, - { - "epoch": 0.43083027715986294, - "flos": 37191901980360.0, - "grad_norm": 0.8092973752889773, - "learning_rate": 2.538549866248549e-06, - "loss": 0.8691, - "num_input_tokens_seen": 76246115, - "step": 3583 - }, - { - "epoch": 0.430950520050502, - "flos": 11813942779200.0, - "grad_norm": 3.646538773362319, - "learning_rate": 2.5377996263768274e-06, - "loss": 1.0327, - "num_input_tokens_seen": 76263915, - "step": 3584 - }, - { - "epoch": 0.4310707629411411, - "flos": 17504153878080.0, - "grad_norm": 4.088277172434308, - "learning_rate": 2.5370493049213293e-06, - "loss": 0.8991, - "num_input_tokens_seen": 76283280, - "step": 3585 - }, - { - "epoch": 0.4311910058317802, - "flos": 18815458264560.0, - "grad_norm": 3.364240945848503, - "learning_rate": 2.536298901995878e-06, - "loss": 1.0195, - "num_input_tokens_seen": 76302210, - "step": 3586 - }, - { - "epoch": 0.43131124872241927, - "flos": 17897624914080.0, - "grad_norm": 5.154132570088669, - "learning_rate": 2.535548417714311e-06, - "loss": 1.0278, - "num_input_tokens_seen": 76321230, - "step": 3587 - }, - { - "epoch": 0.4314314916130584, - "flos": 15352453224360.0, - "grad_norm": 2.977309566272538, - "learning_rate": 2.534797852190474e-06, - "loss": 1.0946, - "num_input_tokens_seen": 76341130, - "step": 3588 - }, - { - "epoch": 0.4315517345036975, - "flos": 13670250258120.0, - "grad_norm": 3.5295064216897964, - "learning_rate": 2.5340472055382283e-06, - "loss": 1.0311, - "num_input_tokens_seen": 76356880, - "step": 3589 - }, - { - "epoch": 0.43167197739433655, - "flos": 17262858791280.0, - "grad_norm": 3.314480603294685, - "learning_rate": 2.5332964778714468e-06, - "loss": 1.0382, - "num_input_tokens_seen": 76373785, - "step": 3590 - }, - { - "epoch": 0.43179222028497566, - "flos": 11940357469920.0, - "grad_norm": 2.7974563932045666, - "learning_rate": 2.5325456693040123e-06, - "loss": 0.8929, - "num_input_tokens_seen": 76390700, - "step": 3591 - }, - { - "epoch": 0.43191246317561477, - "flos": 12495448215600.0, - "grad_norm": 7.085868052822584, - "learning_rate": 2.531794779949824e-06, - "loss": 0.9851, - "num_input_tokens_seen": 76408320, - "step": 3592 - }, - { - "epoch": 0.4320327060662538, - "flos": 16979791563600.0, - "grad_norm": 4.7831564698878175, - "learning_rate": 2.5310438099227903e-06, - "loss": 1.1073, - "num_input_tokens_seen": 76425305, - "step": 3593 - }, - { - "epoch": 0.43215294895689293, - "flos": 47541628546440.0, - "grad_norm": 1.3636585139874227, - "learning_rate": 2.530292759336833e-06, - "loss": 0.7912, - "num_input_tokens_seen": 76485760, - "step": 3594 - }, - { - "epoch": 0.432273191847532, - "flos": 14619149316960.0, - "grad_norm": 3.663044151529529, - "learning_rate": 2.5295416283058855e-06, - "loss": 0.93, - "num_input_tokens_seen": 76504345, - "step": 3595 - }, - { - "epoch": 0.4323934347381711, - "flos": 13676321247000.0, - "grad_norm": 2.8711905742070707, - "learning_rate": 2.5287904169438943e-06, - "loss": 0.8877, - "num_input_tokens_seen": 76523270, - "step": 3596 - }, - { - "epoch": 0.4325136776288102, - "flos": 15432864478920.0, - "grad_norm": 6.0101172660774225, - "learning_rate": 2.528039125364817e-06, - "loss": 0.8568, - "num_input_tokens_seen": 76541795, - "step": 3597 - }, - { - "epoch": 0.43263392051944927, - "flos": 15873657398160.0, - "grad_norm": 8.293109966267037, - "learning_rate": 2.5272877536826246e-06, - "loss": 0.972, - "num_input_tokens_seen": 76560310, - "step": 3598 - }, - { - "epoch": 0.4327541634100884, - "flos": 20781219043440.0, - "grad_norm": 5.308338105794936, - "learning_rate": 2.5265363020112986e-06, - "loss": 0.9112, - "num_input_tokens_seen": 76580350, - "step": 3599 - }, - { - "epoch": 0.4328744063007275, - "flos": 18552663876120.0, - "grad_norm": 3.50626221788967, - "learning_rate": 2.5257847704648344e-06, - "loss": 1.0668, - "num_input_tokens_seen": 76601300, - "step": 3600 - }, - { - "epoch": 0.43299464919136654, - "flos": 11735217910440.0, - "grad_norm": 8.340387131009997, - "learning_rate": 2.525033159157239e-06, - "loss": 0.9936, - "num_input_tokens_seen": 76617335, - "step": 3601 - }, - { - "epoch": 0.43311489208200565, - "flos": 11394526515360.0, - "grad_norm": 2.9170649302007035, - "learning_rate": 2.52428146820253e-06, - "loss": 0.9984, - "num_input_tokens_seen": 76635310, - "step": 3602 - }, - { - "epoch": 0.43323513497264476, - "flos": 16298071496280.0, - "grad_norm": 3.8599919741057924, - "learning_rate": 2.52352969771474e-06, - "loss": 1.0466, - "num_input_tokens_seen": 76654255, - "step": 3603 - }, - { - "epoch": 0.4333553778632838, - "flos": 18000118039920.0, - "grad_norm": 2.6112514304836916, - "learning_rate": 2.5227778478079106e-06, - "loss": 1.1163, - "num_input_tokens_seen": 76673385, - "step": 3604 - }, - { - "epoch": 0.43347562075392293, - "flos": 13751213420760.0, - "grad_norm": 6.625544399710057, - "learning_rate": 2.522025918596098e-06, - "loss": 0.997, - "num_input_tokens_seen": 76691405, - "step": 3605 - }, - { - "epoch": 0.43359586364456204, - "flos": 18738879705000.0, - "grad_norm": 4.423476721004591, - "learning_rate": 2.521273910193368e-06, - "loss": 0.8816, - "num_input_tokens_seen": 76714305, - "step": 3606 - }, - { - "epoch": 0.4337161065352011, - "flos": 11308565518440.0, - "grad_norm": 3.383336403254978, - "learning_rate": 2.5205218227138006e-06, - "loss": 1.0898, - "num_input_tokens_seen": 76726980, - "step": 3607 - }, - { - "epoch": 0.4338363494258402, - "flos": 14352920833800.0, - "grad_norm": 4.674916785820439, - "learning_rate": 2.519769656271486e-06, - "loss": 1.0144, - "num_input_tokens_seen": 76744120, - "step": 3608 - }, - { - "epoch": 0.43395659231647926, - "flos": 14252420709360.0, - "grad_norm": 6.84508734748343, - "learning_rate": 2.5190174109805285e-06, - "loss": 0.891, - "num_input_tokens_seen": 76763665, - "step": 3609 - }, - { - "epoch": 0.43407683520711837, - "flos": 14121590754000.0, - "grad_norm": 4.9234479056018055, - "learning_rate": 2.518265086955042e-06, - "loss": 0.8594, - "num_input_tokens_seen": 76781105, - "step": 3610 - }, - { - "epoch": 0.4341970780977575, - "flos": 16425835295640.0, - "grad_norm": 3.5378601861372747, - "learning_rate": 2.5175126843091534e-06, - "loss": 1.0487, - "num_input_tokens_seen": 76800195, - "step": 3611 - }, - { - "epoch": 0.43431732098839654, - "flos": 26702269637160.0, - "grad_norm": 2.99899306882825, - "learning_rate": 2.5167602031570034e-06, - "loss": 0.9712, - "num_input_tokens_seen": 76820100, - "step": 3612 - }, - { - "epoch": 0.43443756387903565, - "flos": 22721494517880.0, - "grad_norm": 2.942176991432292, - "learning_rate": 2.51600764361274e-06, - "loss": 0.9606, - "num_input_tokens_seen": 76841345, - "step": 3613 - }, - { - "epoch": 0.43455780676967476, - "flos": 16691082608880.0, - "grad_norm": 8.705090614123305, - "learning_rate": 2.5152550057905283e-06, - "loss": 1.0156, - "num_input_tokens_seen": 76860955, - "step": 3614 - }, - { - "epoch": 0.4346780496603138, - "flos": 17215690215840.0, - "grad_norm": 3.921934697278163, - "learning_rate": 2.5145022898045415e-06, - "loss": 0.9744, - "num_input_tokens_seen": 76879860, - "step": 3615 - }, - { - "epoch": 0.4347982925509529, - "flos": 12102498426120.0, - "grad_norm": 3.9849188938287146, - "learning_rate": 2.5137494957689664e-06, - "loss": 1.1212, - "num_input_tokens_seen": 76895190, - "step": 3616 - }, - { - "epoch": 0.43491853544159204, - "flos": 43623511638480.0, - "grad_norm": 0.7546161215062246, - "learning_rate": 2.5129966237980016e-06, - "loss": 0.8287, - "num_input_tokens_seen": 76957905, - "step": 3617 - }, - { - "epoch": 0.4350387783322311, - "flos": 15589700985240.0, - "grad_norm": 3.8337230622471115, - "learning_rate": 2.512243674005857e-06, - "loss": 1.0028, - "num_input_tokens_seen": 76976990, - "step": 3618 - }, - { - "epoch": 0.4351590212228702, - "flos": 17844998580960.0, - "grad_norm": 5.226950775002274, - "learning_rate": 2.5114906465067537e-06, - "loss": 1.0956, - "num_input_tokens_seen": 76997695, - "step": 3619 - }, - { - "epoch": 0.4352792641135093, - "flos": 15274801510200.0, - "grad_norm": 3.043279316127864, - "learning_rate": 2.5107375414149264e-06, - "loss": 0.9624, - "num_input_tokens_seen": 77016660, - "step": 3620 - }, - { - "epoch": 0.43539950700414837, - "flos": 11499625873800.0, - "grad_norm": 3.8228137481445432, - "learning_rate": 2.5099843588446197e-06, - "loss": 0.9234, - "num_input_tokens_seen": 77034700, - "step": 3621 - }, - { - "epoch": 0.4355197498947875, - "flos": 11814739979760.0, - "grad_norm": 5.850573141599469, - "learning_rate": 2.509231098910091e-06, - "loss": 0.8406, - "num_input_tokens_seen": 77054290, - "step": 3622 - }, - { - "epoch": 0.4356399927854266, - "flos": 11578136111640.0, - "grad_norm": 2.722429526041303, - "learning_rate": 2.508477761725611e-06, - "loss": 0.9578, - "num_input_tokens_seen": 77072285, - "step": 3623 - }, - { - "epoch": 0.43576023567606564, - "flos": 12181345941120.0, - "grad_norm": 3.574078666568398, - "learning_rate": 2.507724347405458e-06, - "loss": 1.0252, - "num_input_tokens_seen": 77089955, - "step": 3624 - }, - { - "epoch": 0.43588047856670475, - "flos": 11257288293960.0, - "grad_norm": 4.5897182496140045, - "learning_rate": 2.5069708560639243e-06, - "loss": 1.0441, - "num_input_tokens_seen": 77107585, - "step": 3625 - }, - { - "epoch": 0.4360007214573438, - "flos": 16822188518280.0, - "grad_norm": 6.199022449829981, - "learning_rate": 2.5062172878153158e-06, - "loss": 0.8344, - "num_input_tokens_seen": 77126580, - "step": 3626 - }, - { - "epoch": 0.4361209643479829, - "flos": 15613898504160.0, - "grad_norm": 18.629418585033893, - "learning_rate": 2.505463642773947e-06, - "loss": 1.0981, - "num_input_tokens_seen": 77146265, - "step": 3627 - }, - { - "epoch": 0.43624120723862203, - "flos": 12337998478080.0, - "grad_norm": 3.6537413508717096, - "learning_rate": 2.504709921054146e-06, - "loss": 0.9706, - "num_input_tokens_seen": 77162800, - "step": 3628 - }, - { - "epoch": 0.4363614501292611, - "flos": 12679579058400.0, - "grad_norm": 3.74177870777378, - "learning_rate": 2.50395612277025e-06, - "loss": 1.0685, - "num_input_tokens_seen": 77178375, - "step": 3629 - }, - { - "epoch": 0.4364816930199002, - "flos": 14409809123760.0, - "grad_norm": 3.262358715754414, - "learning_rate": 2.503202248036612e-06, - "loss": 0.9593, - "num_input_tokens_seen": 77196950, - "step": 3630 - }, - { - "epoch": 0.4366019359105393, - "flos": 17110989457680.0, - "grad_norm": 3.532383829570221, - "learning_rate": 2.5024482969675927e-06, - "loss": 0.9529, - "num_input_tokens_seen": 77216625, - "step": 3631 - }, - { - "epoch": 0.43672217880117836, - "flos": 15451665563280.0, - "grad_norm": 3.6707499627046634, - "learning_rate": 2.501694269677566e-06, - "loss": 1.0645, - "num_input_tokens_seen": 77234115, - "step": 3632 - }, - { - "epoch": 0.4368424216918175, - "flos": 12779312643840.0, - "grad_norm": 3.6131614211549232, - "learning_rate": 2.500940166280918e-06, - "loss": 1.0218, - "num_input_tokens_seen": 77252265, - "step": 3633 - }, - { - "epoch": 0.4369626645824566, - "flos": 18106842461040.0, - "grad_norm": 2.8330172381134138, - "learning_rate": 2.500185986892045e-06, - "loss": 1.0121, - "num_input_tokens_seen": 77271470, - "step": 3634 - }, - { - "epoch": 0.43708290747309564, - "flos": 18003245519040.0, - "grad_norm": 9.017174334756895, - "learning_rate": 2.499431731625355e-06, - "loss": 1.0014, - "num_input_tokens_seen": 77290215, - "step": 3635 - }, - { - "epoch": 0.43720315036373475, - "flos": 22510682569800.0, - "grad_norm": 12.416146354953183, - "learning_rate": 2.4986774005952686e-06, - "loss": 1.0298, - "num_input_tokens_seen": 77312310, - "step": 3636 - }, - { - "epoch": 0.43732339325437386, - "flos": 16428625497600.0, - "grad_norm": 3.5420971926639493, - "learning_rate": 2.4979229939162166e-06, - "loss": 1.0601, - "num_input_tokens_seen": 77330810, - "step": 3637 - }, - { - "epoch": 0.4374436361450129, - "flos": 19759267504440.0, - "grad_norm": 2.7756326933601474, - "learning_rate": 2.4971685117026433e-06, - "loss": 1.0286, - "num_input_tokens_seen": 77350295, - "step": 3638 - }, - { - "epoch": 0.437563879035652, - "flos": 17191002111960.0, - "grad_norm": 3.8310402493833213, - "learning_rate": 2.4964139540690018e-06, - "loss": 0.9919, - "num_input_tokens_seen": 77373350, - "step": 3639 - }, - { - "epoch": 0.4376841219262911, - "flos": 16691143932000.0, - "grad_norm": 9.421428410461814, - "learning_rate": 2.495659321129758e-06, - "loss": 0.952, - "num_input_tokens_seen": 77390815, - "step": 3640 - }, - { - "epoch": 0.4378043648169302, - "flos": 18107425030680.0, - "grad_norm": 3.464854484464065, - "learning_rate": 2.494904612999389e-06, - "loss": 0.9836, - "num_input_tokens_seen": 77409245, - "step": 3641 - }, - { - "epoch": 0.4379246077075693, - "flos": 38571500757120.0, - "grad_norm": 0.7979287305140713, - "learning_rate": 2.4941498297923843e-06, - "loss": 0.8371, - "num_input_tokens_seen": 77469535, - "step": 3642 - }, - { - "epoch": 0.43804485059820836, - "flos": 14614059498000.0, - "grad_norm": 4.245264444370541, - "learning_rate": 2.4933949716232424e-06, - "loss": 0.9332, - "num_input_tokens_seen": 77486780, - "step": 3643 - }, - { - "epoch": 0.43816509348884747, - "flos": 16977583931280.0, - "grad_norm": 4.34531314085812, - "learning_rate": 2.492640038606476e-06, - "loss": 0.9551, - "num_input_tokens_seen": 77504865, - "step": 3644 - }, - { - "epoch": 0.4382853363794866, - "flos": 10371992406720.0, - "grad_norm": 4.255448383235639, - "learning_rate": 2.491885030856608e-06, - "loss": 1.017, - "num_input_tokens_seen": 77522680, - "step": 3645 - }, - { - "epoch": 0.43840557927012563, - "flos": 12154910128320.0, - "grad_norm": 3.969475026624996, - "learning_rate": 2.4911299484881713e-06, - "loss": 1.0482, - "num_input_tokens_seen": 77539930, - "step": 3646 - }, - { - "epoch": 0.43852582216076474, - "flos": 13754218253640.0, - "grad_norm": 6.672062248085968, - "learning_rate": 2.490374791615712e-06, - "loss": 1.0382, - "num_input_tokens_seen": 77559675, - "step": 3647 - }, - { - "epoch": 0.43864606505140386, - "flos": 12808477335480.0, - "grad_norm": 11.524655655952643, - "learning_rate": 2.4896195603537867e-06, - "loss": 0.996, - "num_input_tokens_seen": 77574005, - "step": 3648 - }, - { - "epoch": 0.4387663079420429, - "flos": 13936754695320.0, - "grad_norm": 4.634727558248119, - "learning_rate": 2.488864254816964e-06, - "loss": 0.9643, - "num_input_tokens_seen": 77592415, - "step": 3649 - }, - { - "epoch": 0.438886550832682, - "flos": 13990454183040.0, - "grad_norm": 7.479521704900791, - "learning_rate": 2.4881088751198218e-06, - "loss": 0.8963, - "num_input_tokens_seen": 77610295, - "step": 3650 - }, - { - "epoch": 0.43900679372332113, - "flos": 10265543939640.0, - "grad_norm": 3.9718253167839466, - "learning_rate": 2.4873534213769517e-06, - "loss": 0.8543, - "num_input_tokens_seen": 77625245, - "step": 3651 - }, - { - "epoch": 0.4391270366139602, - "flos": 17106635516160.0, - "grad_norm": 2.29199166738783, - "learning_rate": 2.4865978937029547e-06, - "loss": 0.9432, - "num_input_tokens_seen": 77643945, - "step": 3652 - }, - { - "epoch": 0.4392472795045993, - "flos": 22484369403240.0, - "grad_norm": 2.6220342139915034, - "learning_rate": 2.485842292212445e-06, - "loss": 0.891, - "num_input_tokens_seen": 77664880, - "step": 3653 - }, - { - "epoch": 0.4393675223952384, - "flos": 10502607731160.0, - "grad_norm": 2.8497142916236844, - "learning_rate": 2.485086617020045e-06, - "loss": 1.0188, - "num_input_tokens_seen": 77683095, - "step": 3654 - }, - { - "epoch": 0.43948776528587746, - "flos": 10473044439240.0, - "grad_norm": 4.216417812397292, - "learning_rate": 2.4843308682403903e-06, - "loss": 1.0404, - "num_input_tokens_seen": 77699730, - "step": 3655 - }, - { - "epoch": 0.4396080081765166, - "flos": 9818158785000.0, - "grad_norm": 2.323125912489364, - "learning_rate": 2.4835750459881294e-06, - "loss": 1.0472, - "num_input_tokens_seen": 77716075, - "step": 3656 - }, - { - "epoch": 0.43972825106715563, - "flos": 12914987125680.0, - "grad_norm": 2.918435586662576, - "learning_rate": 2.4828191503779177e-06, - "loss": 1.034, - "num_input_tokens_seen": 77733895, - "step": 3657 - }, - { - "epoch": 0.43984849395779474, - "flos": 11944987365480.0, - "grad_norm": 4.075143242875172, - "learning_rate": 2.482063181524425e-06, - "loss": 1.1184, - "num_input_tokens_seen": 77749515, - "step": 3658 - }, - { - "epoch": 0.43996873684843385, - "flos": 13250925978960.0, - "grad_norm": 3.8487721950068203, - "learning_rate": 2.4813071395423307e-06, - "loss": 1.0282, - "num_input_tokens_seen": 77766800, - "step": 3659 - }, - { - "epoch": 0.4400889797390729, - "flos": 16816976053080.0, - "grad_norm": 3.3971820485472484, - "learning_rate": 2.4805510245463263e-06, - "loss": 0.8759, - "num_input_tokens_seen": 77786675, - "step": 3660 - }, - { - "epoch": 0.440209222629712, - "flos": 16455459910680.0, - "grad_norm": 10.68309420964216, - "learning_rate": 2.4797948366511137e-06, - "loss": 0.8049, - "num_input_tokens_seen": 77806105, - "step": 3661 - }, - { - "epoch": 0.4403294655203511, - "flos": 17658108197760.0, - "grad_norm": 3.4028891685747737, - "learning_rate": 2.4790385759714055e-06, - "loss": 0.9752, - "num_input_tokens_seen": 77824890, - "step": 3662 - }, - { - "epoch": 0.4404497084109902, - "flos": 16035614385000.0, - "grad_norm": 8.164620383151721, - "learning_rate": 2.478282242621926e-06, - "loss": 0.9339, - "num_input_tokens_seen": 77845070, - "step": 3663 - }, - { - "epoch": 0.4405699513016293, - "flos": 46514801819400.0, - "grad_norm": 0.903232087646895, - "learning_rate": 2.477525836717411e-06, - "loss": 0.863, - "num_input_tokens_seen": 77912555, - "step": 3664 - }, - { - "epoch": 0.4406901941922684, - "flos": 25447884202200.0, - "grad_norm": 4.082017465960579, - "learning_rate": 2.476769358372606e-06, - "loss": 1.019, - "num_input_tokens_seen": 77933925, - "step": 3665 - }, - { - "epoch": 0.44081043708290746, - "flos": 12784065185640.0, - "grad_norm": 5.20694516819787, - "learning_rate": 2.4760128077022683e-06, - "loss": 0.9688, - "num_input_tokens_seen": 77951780, - "step": 3666 - }, - { - "epoch": 0.44093067997354657, - "flos": 21489190954200.0, - "grad_norm": 2.992146384651429, - "learning_rate": 2.4752561848211672e-06, - "loss": 0.9065, - "num_input_tokens_seen": 77973900, - "step": 3667 - }, - { - "epoch": 0.4410509228641857, - "flos": 16531057300320.0, - "grad_norm": 2.7866158319092382, - "learning_rate": 2.4744994898440797e-06, - "loss": 0.9364, - "num_input_tokens_seen": 77992410, - "step": 3668 - }, - { - "epoch": 0.44117116575482473, - "flos": 13833249738000.0, - "grad_norm": 2.8484620679673665, - "learning_rate": 2.473742722885797e-06, - "loss": 1.0641, - "num_input_tokens_seen": 78011150, - "step": 3669 - }, - { - "epoch": 0.44129140864546385, - "flos": 19261126371840.0, - "grad_norm": 14.422246893058514, - "learning_rate": 2.4729858840611197e-06, - "loss": 0.8787, - "num_input_tokens_seen": 78029780, - "step": 3670 - }, - { - "epoch": 0.4414116515361029, - "flos": 18577321318440.0, - "grad_norm": 4.2319579035174195, - "learning_rate": 2.4722289734848605e-06, - "loss": 0.9592, - "num_input_tokens_seen": 78049965, - "step": 3671 - }, - { - "epoch": 0.441531894426742, - "flos": 15561701432880.0, - "grad_norm": 4.0237257178321375, - "learning_rate": 2.471471991271841e-06, - "loss": 1.0079, - "num_input_tokens_seen": 78066810, - "step": 3672 - }, - { - "epoch": 0.4416521373173811, - "flos": 16661825932560.0, - "grad_norm": 2.6893667790355735, - "learning_rate": 2.470714937536896e-06, - "loss": 1.0249, - "num_input_tokens_seen": 78085255, - "step": 3673 - }, - { - "epoch": 0.4417723802080202, - "flos": 14431737687240.0, - "grad_norm": 3.457474260986025, - "learning_rate": 2.469957812394868e-06, - "loss": 0.9364, - "num_input_tokens_seen": 78103785, - "step": 3674 - }, - { - "epoch": 0.4418926230986593, - "flos": 13387796261640.0, - "grad_norm": 4.362949013761184, - "learning_rate": 2.4692006159606148e-06, - "loss": 0.9859, - "num_input_tokens_seen": 78121035, - "step": 3675 - }, - { - "epoch": 0.4420128659892984, - "flos": 13807304510160.0, - "grad_norm": 2.6155080388579512, - "learning_rate": 2.468443348349e-06, - "loss": 1.0071, - "num_input_tokens_seen": 78138630, - "step": 3676 - }, - { - "epoch": 0.44213310887993745, - "flos": 12678781857840.0, - "grad_norm": 6.271383402916533, - "learning_rate": 2.467686009674902e-06, - "loss": 1.055, - "num_input_tokens_seen": 78152800, - "step": 3677 - }, - { - "epoch": 0.44225335177057656, - "flos": 13620046188240.0, - "grad_norm": 4.00093782874984, - "learning_rate": 2.466928600053209e-06, - "loss": 1.0745, - "num_input_tokens_seen": 78167825, - "step": 3678 - }, - { - "epoch": 0.4423735946612157, - "flos": 16686115436160.0, - "grad_norm": 3.270094008421405, - "learning_rate": 2.466171119598818e-06, - "loss": 0.9389, - "num_input_tokens_seen": 78187515, - "step": 3679 - }, - { - "epoch": 0.44249383755185473, - "flos": 18996461628240.0, - "grad_norm": 4.598997686901037, - "learning_rate": 2.465413568426639e-06, - "loss": 0.9918, - "num_input_tokens_seen": 78208185, - "step": 3680 - }, - { - "epoch": 0.44261408044249384, - "flos": 16454141463600.0, - "grad_norm": 3.766269739443013, - "learning_rate": 2.464655946651591e-06, - "loss": 1.0483, - "num_input_tokens_seen": 78226910, - "step": 3681 - }, - { - "epoch": 0.44273432333313295, - "flos": 17399330473680.0, - "grad_norm": 4.459857549402438, - "learning_rate": 2.4638982543886065e-06, - "loss": 1.0247, - "num_input_tokens_seen": 78246670, - "step": 3682 - }, - { - "epoch": 0.442854566223772, - "flos": 12416079454080.0, - "grad_norm": 3.3301311052420424, - "learning_rate": 2.4631404917526254e-06, - "loss": 1.0871, - "num_input_tokens_seen": 78263345, - "step": 3683 - }, - { - "epoch": 0.4429748091144111, - "flos": 17710489238400.0, - "grad_norm": 2.7226717495579713, - "learning_rate": 2.4623826588586e-06, - "loss": 1.0125, - "num_input_tokens_seen": 78283335, - "step": 3684 - }, - { - "epoch": 0.4430950520050502, - "flos": 15352361239680.0, - "grad_norm": 2.5111341493200534, - "learning_rate": 2.461624755821492e-06, - "loss": 1.0569, - "num_input_tokens_seen": 78302535, - "step": 3685 - }, - { - "epoch": 0.4432152948956893, - "flos": 17478208650240.0, - "grad_norm": 3.833293182052199, - "learning_rate": 2.4608667827562763e-06, - "loss": 0.9997, - "num_input_tokens_seen": 78321585, - "step": 3686 - }, - { - "epoch": 0.4433355377863284, - "flos": 15458625737400.0, - "grad_norm": 3.0855188785977483, - "learning_rate": 2.460108739777936e-06, - "loss": 1.1209, - "num_input_tokens_seen": 78340440, - "step": 3687 - }, - { - "epoch": 0.44345578067696745, - "flos": 14252911294320.0, - "grad_norm": 5.575090881428, - "learning_rate": 2.4593506270014656e-06, - "loss": 0.986, - "num_input_tokens_seen": 78359130, - "step": 3688 - }, - { - "epoch": 0.44357602356760656, - "flos": 17188549187160.0, - "grad_norm": 2.3064028805558934, - "learning_rate": 2.45859244454187e-06, - "loss": 1.0439, - "num_input_tokens_seen": 78378640, - "step": 3689 - }, - { - "epoch": 0.44369626645824567, - "flos": 16137678249000.0, - "grad_norm": 3.172547668387493, - "learning_rate": 2.4578341925141655e-06, - "loss": 0.89, - "num_input_tokens_seen": 78397575, - "step": 3690 - }, - { - "epoch": 0.4438165093488847, - "flos": 27149470822440.0, - "grad_norm": 3.836042538698393, - "learning_rate": 2.457075871033378e-06, - "loss": 0.933, - "num_input_tokens_seen": 78419170, - "step": 3691 - }, - { - "epoch": 0.44393675223952384, - "flos": 10974742312800.0, - "grad_norm": 3.6885243948778332, - "learning_rate": 2.4563174802145445e-06, - "loss": 1.1046, - "num_input_tokens_seen": 78436140, - "step": 3692 - }, - { - "epoch": 0.44405699513016295, - "flos": 46234187516520.0, - "grad_norm": 0.6220290381424115, - "learning_rate": 2.455559020172712e-06, - "loss": 0.7282, - "num_input_tokens_seen": 78503215, - "step": 3693 - }, - { - "epoch": 0.444177238020802, - "flos": 17057719231800.0, - "grad_norm": 7.113319593783942, - "learning_rate": 2.4548004910229385e-06, - "loss": 1.1136, - "num_input_tokens_seen": 78520510, - "step": 3694 - }, - { - "epoch": 0.4442974809114411, - "flos": 16033897337640.0, - "grad_norm": 3.3008306556028852, - "learning_rate": 2.4540418928802913e-06, - "loss": 1.089, - "num_input_tokens_seen": 78538965, - "step": 3695 - }, - { - "epoch": 0.4444177238020802, - "flos": 12521669397480.0, - "grad_norm": 3.1613399141978684, - "learning_rate": 2.4532832258598506e-06, - "loss": 0.8787, - "num_input_tokens_seen": 78556515, - "step": 3696 - }, - { - "epoch": 0.4445379666927193, - "flos": 20388299915520.0, - "grad_norm": 3.1072503119869137, - "learning_rate": 2.4525244900767047e-06, - "loss": 1.0402, - "num_input_tokens_seen": 78577050, - "step": 3697 - }, - { - "epoch": 0.4446582095833584, - "flos": 50483613382200.0, - "grad_norm": 0.824890369298114, - "learning_rate": 2.4517656856459536e-06, - "loss": 0.863, - "num_input_tokens_seen": 78642615, - "step": 3698 - }, - { - "epoch": 0.4447784524739975, - "flos": 18867256735560.0, - "grad_norm": 3.572339306921908, - "learning_rate": 2.4510068126827073e-06, - "loss": 0.9119, - "num_input_tokens_seen": 78663335, - "step": 3699 - }, - { - "epoch": 0.44489869536463655, - "flos": 8195664972240.0, - "grad_norm": 3.0447303462153346, - "learning_rate": 2.450247871302086e-06, - "loss": 1.0462, - "num_input_tokens_seen": 78680830, - "step": 3700 - }, - { - "epoch": 0.44501893825527566, - "flos": 14514387235680.0, - "grad_norm": 8.492578744868544, - "learning_rate": 2.44948886161922e-06, - "loss": 1.0649, - "num_input_tokens_seen": 78699565, - "step": 3701 - }, - { - "epoch": 0.4451391811459148, - "flos": 12942588077760.0, - "grad_norm": 4.974488788965942, - "learning_rate": 2.4487297837492524e-06, - "loss": 1.0724, - "num_input_tokens_seen": 78718450, - "step": 3702 - }, - { - "epoch": 0.44525942403655383, - "flos": 11971576486080.0, - "grad_norm": 4.022414112489961, - "learning_rate": 2.4479706378073323e-06, - "loss": 0.8365, - "num_input_tokens_seen": 78736710, - "step": 3703 - }, - { - "epoch": 0.44537966692719294, - "flos": 16534276764120.0, - "grad_norm": 4.316565987923802, - "learning_rate": 2.447211423908623e-06, - "loss": 1.0648, - "num_input_tokens_seen": 78756475, - "step": 3704 - }, - { - "epoch": 0.445499909817832, - "flos": 15431024785320.0, - "grad_norm": 2.8800806680614026, - "learning_rate": 2.4464521421682966e-06, - "loss": 0.9696, - "num_input_tokens_seen": 78773785, - "step": 3705 - }, - { - "epoch": 0.4456201527084711, - "flos": 17058209816760.0, - "grad_norm": 1.9739338882529682, - "learning_rate": 2.4456927927015345e-06, - "loss": 1.1034, - "num_input_tokens_seen": 78794545, - "step": 3706 - }, - { - "epoch": 0.4457403955991102, - "flos": 13334801989800.0, - "grad_norm": 3.6065249068863645, - "learning_rate": 2.4449333756235307e-06, - "loss": 0.9701, - "num_input_tokens_seen": 78810980, - "step": 3707 - }, - { - "epoch": 0.4458606384897493, - "flos": 13622683082400.0, - "grad_norm": 9.6608634717617, - "learning_rate": 2.4441738910494876e-06, - "loss": 1.02, - "num_input_tokens_seen": 78825435, - "step": 3708 - }, - { - "epoch": 0.4459808813803884, - "flos": 15170499352320.0, - "grad_norm": 3.319560030774048, - "learning_rate": 2.4434143390946176e-06, - "loss": 1.0454, - "num_input_tokens_seen": 78843965, - "step": 3709 - }, - { - "epoch": 0.4461011242710275, - "flos": 16555070849880.0, - "grad_norm": 4.01808821240779, - "learning_rate": 2.4426547198741457e-06, - "loss": 1.077, - "num_input_tokens_seen": 78861890, - "step": 3710 - }, - { - "epoch": 0.44622136716166655, - "flos": 14330777639400.0, - "grad_norm": 9.419923292362013, - "learning_rate": 2.441895033503305e-06, - "loss": 0.9653, - "num_input_tokens_seen": 78879530, - "step": 3711 - }, - { - "epoch": 0.44634161005230566, - "flos": 15113549739240.0, - "grad_norm": 3.827957959325925, - "learning_rate": 2.4411352800973375e-06, - "loss": 1.0539, - "num_input_tokens_seen": 78897685, - "step": 3712 - }, - { - "epoch": 0.44646185294294477, - "flos": 16297335618840.0, - "grad_norm": 5.681001744653306, - "learning_rate": 2.4403754597715005e-06, - "loss": 0.9587, - "num_input_tokens_seen": 78916850, - "step": 3713 - }, - { - "epoch": 0.4465820958335838, - "flos": 16087750133160.0, - "grad_norm": 5.55635262599437, - "learning_rate": 2.4396155726410553e-06, - "loss": 1.1489, - "num_input_tokens_seen": 78935180, - "step": 3714 - }, - { - "epoch": 0.44670233872422294, - "flos": 16112560883280.0, - "grad_norm": 5.075728475969492, - "learning_rate": 2.438855618821278e-06, - "loss": 1.1472, - "num_input_tokens_seen": 78950700, - "step": 3715 - }, - { - "epoch": 0.44682258161486205, - "flos": 16765974782640.0, - "grad_norm": 3.6753528762907637, - "learning_rate": 2.4380955984274517e-06, - "loss": 0.8939, - "num_input_tokens_seen": 78969075, - "step": 3716 - }, - { - "epoch": 0.4469428245055011, - "flos": 18863975948640.0, - "grad_norm": 4.414272715196239, - "learning_rate": 2.4373355115748716e-06, - "loss": 0.9853, - "num_input_tokens_seen": 78989625, - "step": 3717 - }, - { - "epoch": 0.4470630673961402, - "flos": 15273421740000.0, - "grad_norm": 2.4656065230650266, - "learning_rate": 2.436575358378842e-06, - "loss": 0.9504, - "num_input_tokens_seen": 79008835, - "step": 3718 - }, - { - "epoch": 0.44718331028677927, - "flos": 11441633767680.0, - "grad_norm": 4.544060009398924, - "learning_rate": 2.4358151389546782e-06, - "loss": 1.0432, - "num_input_tokens_seen": 79025240, - "step": 3719 - }, - { - "epoch": 0.4473035531774184, - "flos": 13962853230960.0, - "grad_norm": 5.189328158779511, - "learning_rate": 2.4350548534177035e-06, - "loss": 0.9727, - "num_input_tokens_seen": 79041790, - "step": 3720 - }, - { - "epoch": 0.4474237960680575, - "flos": 29591658801360.0, - "grad_norm": 3.950555018773927, - "learning_rate": 2.434294501883254e-06, - "loss": 0.901, - "num_input_tokens_seen": 79064605, - "step": 3721 - }, - { - "epoch": 0.44754403895869654, - "flos": 16269795989880.0, - "grad_norm": 2.6835216973417153, - "learning_rate": 2.433534084466674e-06, - "loss": 0.8859, - "num_input_tokens_seen": 79083545, - "step": 3722 - }, - { - "epoch": 0.44766428184933565, - "flos": 18238561601640.0, - "grad_norm": 2.9183053635746137, - "learning_rate": 2.4327736012833178e-06, - "loss": 0.9439, - "num_input_tokens_seen": 79104985, - "step": 3723 - }, - { - "epoch": 0.44778452473997477, - "flos": 14514816497520.0, - "grad_norm": 3.582193651750639, - "learning_rate": 2.4320130524485506e-06, - "loss": 0.9808, - "num_input_tokens_seen": 79123500, - "step": 3724 - }, - { - "epoch": 0.4479047676306138, - "flos": 15611077640640.0, - "grad_norm": 2.948129293549035, - "learning_rate": 2.431252438077746e-06, - "loss": 1.0262, - "num_input_tokens_seen": 79142720, - "step": 3725 - }, - { - "epoch": 0.44802501052125293, - "flos": 15246158065080.0, - "grad_norm": 3.937251612988056, - "learning_rate": 2.4304917582862906e-06, - "loss": 0.9871, - "num_input_tokens_seen": 79161620, - "step": 3726 - }, - { - "epoch": 0.44814525341189204, - "flos": 15720714909960.0, - "grad_norm": 2.7554306680910146, - "learning_rate": 2.4297310131895774e-06, - "loss": 1.109, - "num_input_tokens_seen": 79179885, - "step": 3727 - }, - { - "epoch": 0.4482654963025311, - "flos": 11787322997040.0, - "grad_norm": 5.2421140026051125, - "learning_rate": 2.4289702029030113e-06, - "loss": 0.9765, - "num_input_tokens_seen": 79197075, - "step": 3728 - }, - { - "epoch": 0.4483857391931702, - "flos": 13359060831840.0, - "grad_norm": 3.203722195662792, - "learning_rate": 2.4282093275420057e-06, - "loss": 1.051, - "num_input_tokens_seen": 79215825, - "step": 3729 - }, - { - "epoch": 0.4485059820838093, - "flos": 14459461285560.0, - "grad_norm": 3.757534088162881, - "learning_rate": 2.4274483872219863e-06, - "loss": 0.9233, - "num_input_tokens_seen": 79232905, - "step": 3730 - }, - { - "epoch": 0.4486262249744484, - "flos": 14226475481520.0, - "grad_norm": 4.678470714487626, - "learning_rate": 2.426687382058386e-06, - "loss": 1.1542, - "num_input_tokens_seen": 79250905, - "step": 3731 - }, - { - "epoch": 0.4487464678650875, - "flos": 46247009596680.0, - "grad_norm": 0.9159964293072999, - "learning_rate": 2.425926312166649e-06, - "loss": 0.8481, - "num_input_tokens_seen": 79303500, - "step": 3732 - }, - { - "epoch": 0.4488667107557266, - "flos": 14744368206840.0, - "grad_norm": 4.686930100447359, - "learning_rate": 2.42516517766223e-06, - "loss": 0.9482, - "num_input_tokens_seen": 79321300, - "step": 3733 - }, - { - "epoch": 0.44898695364636565, - "flos": 17032203265800.0, - "grad_norm": 7.570225828322222, - "learning_rate": 2.4244039786605907e-06, - "loss": 0.9066, - "num_input_tokens_seen": 79342025, - "step": 3734 - }, - { - "epoch": 0.44910719653700476, - "flos": 13203450787920.0, - "grad_norm": 4.410174847032313, - "learning_rate": 2.4236427152772055e-06, - "loss": 1.0488, - "num_input_tokens_seen": 79360150, - "step": 3735 - }, - { - "epoch": 0.4492274394276438, - "flos": 40810854341640.0, - "grad_norm": 0.9203049172055039, - "learning_rate": 2.422881387627557e-06, - "loss": 0.8402, - "num_input_tokens_seen": 79412320, - "step": 3736 - }, - { - "epoch": 0.4493476823182829, - "flos": 16530903992520.0, - "grad_norm": 3.2633996709588837, - "learning_rate": 2.422119995827139e-06, - "loss": 0.9982, - "num_input_tokens_seen": 79432165, - "step": 3737 - }, - { - "epoch": 0.44946792520892204, - "flos": 11184542429400.0, - "grad_norm": 11.918567923728885, - "learning_rate": 2.4213585399914528e-06, - "loss": 0.9616, - "num_input_tokens_seen": 79449090, - "step": 3738 - }, - { - "epoch": 0.4495881680995611, - "flos": 13911943945200.0, - "grad_norm": 3.323851658133359, - "learning_rate": 2.4205970202360113e-06, - "loss": 1.0775, - "num_input_tokens_seen": 79468375, - "step": 3739 - }, - { - "epoch": 0.4497084109902002, - "flos": 18526871956080.0, - "grad_norm": 5.780667662288343, - "learning_rate": 2.4198354366763354e-06, - "loss": 1.0089, - "num_input_tokens_seen": 79486735, - "step": 3740 - }, - { - "epoch": 0.4498286538808393, - "flos": 10450471983000.0, - "grad_norm": 4.9479763094335665, - "learning_rate": 2.4190737894279587e-06, - "loss": 1.0023, - "num_input_tokens_seen": 79503825, - "step": 3741 - }, - { - "epoch": 0.44994889677147837, - "flos": 10922238625920.0, - "grad_norm": 6.8589442645760395, - "learning_rate": 2.4183120786064203e-06, - "loss": 1.0241, - "num_input_tokens_seen": 79520420, - "step": 3742 - }, - { - "epoch": 0.4500691396621175, - "flos": 15484478980560.0, - "grad_norm": 7.124686518682906, - "learning_rate": 2.417550304327273e-06, - "loss": 1.0869, - "num_input_tokens_seen": 79538180, - "step": 3743 - }, - { - "epoch": 0.4501893825527566, - "flos": 23091166635240.0, - "grad_norm": 2.9432430535450536, - "learning_rate": 2.4167884667060763e-06, - "loss": 0.9798, - "num_input_tokens_seen": 79560610, - "step": 3744 - }, - { - "epoch": 0.45030962544339564, - "flos": 11709058051680.0, - "grad_norm": 5.067898356320892, - "learning_rate": 2.4160265658584e-06, - "loss": 1.1019, - "num_input_tokens_seen": 79575220, - "step": 3745 - }, - { - "epoch": 0.45042986833403476, - "flos": 13885262839920.0, - "grad_norm": 4.29242889647561, - "learning_rate": 2.4152646018998253e-06, - "loss": 0.8944, - "num_input_tokens_seen": 79593890, - "step": 3746 - }, - { - "epoch": 0.45055011122467387, - "flos": 16429024097880.0, - "grad_norm": 4.4567937361397085, - "learning_rate": 2.4145025749459403e-06, - "loss": 0.933, - "num_input_tokens_seen": 79614635, - "step": 3747 - }, - { - "epoch": 0.4506703541153129, - "flos": 14144684456760.0, - "grad_norm": 3.2153179055427183, - "learning_rate": 2.413740485112344e-06, - "loss": 0.9341, - "num_input_tokens_seen": 79632695, - "step": 3748 - }, - { - "epoch": 0.45079059700595203, - "flos": 13832360552760.0, - "grad_norm": 4.997666768225018, - "learning_rate": 2.412978332514646e-06, - "loss": 1.0463, - "num_input_tokens_seen": 79651195, - "step": 3749 - }, - { - "epoch": 0.4509108398965911, - "flos": 19680174696960.0, - "grad_norm": 3.271256113771841, - "learning_rate": 2.4122161172684623e-06, - "loss": 0.9355, - "num_input_tokens_seen": 79671710, - "step": 3750 - }, - { - "epoch": 0.4510310827872302, - "flos": 14907674302320.0, - "grad_norm": 3.227870574408056, - "learning_rate": 2.4114538394894216e-06, - "loss": 1.0465, - "num_input_tokens_seen": 79689070, - "step": 3751 - }, - { - "epoch": 0.4511513256778693, - "flos": 11473159399440.0, - "grad_norm": 5.512312372522147, - "learning_rate": 2.410691499293161e-06, - "loss": 1.0531, - "num_input_tokens_seen": 79706945, - "step": 3752 - }, - { - "epoch": 0.45127156856850836, - "flos": 17919001569480.0, - "grad_norm": 2.4970793830190656, - "learning_rate": 2.409929096795326e-06, - "loss": 0.9593, - "num_input_tokens_seen": 79727035, - "step": 3753 - }, - { - "epoch": 0.4513918114591475, - "flos": 14488625977200.0, - "grad_norm": 4.9416146818689235, - "learning_rate": 2.409166632111573e-06, - "loss": 1.0078, - "num_input_tokens_seen": 79744890, - "step": 3754 - }, - { - "epoch": 0.4515120543497866, - "flos": 18969872507640.0, - "grad_norm": 3.180880236167854, - "learning_rate": 2.4084041053575674e-06, - "loss": 1.01, - "num_input_tokens_seen": 79764030, - "step": 3755 - }, - { - "epoch": 0.45163229724042564, - "flos": 14619854532840.0, - "grad_norm": 3.7440221824477926, - "learning_rate": 2.4076415166489834e-06, - "loss": 0.9519, - "num_input_tokens_seen": 79783160, - "step": 3756 - }, - { - "epoch": 0.45175254013106475, - "flos": 15405938081160.0, - "grad_norm": 3.1187472540283347, - "learning_rate": 2.406878866101506e-06, - "loss": 1.024, - "num_input_tokens_seen": 79801845, - "step": 3757 - }, - { - "epoch": 0.45187278302170386, - "flos": 13386232522080.0, - "grad_norm": 14.96787993703752, - "learning_rate": 2.4061161538308273e-06, - "loss": 1.0063, - "num_input_tokens_seen": 79818410, - "step": 3758 - }, - { - "epoch": 0.4519930259123429, - "flos": 13172630372040.0, - "grad_norm": 6.658222031727337, - "learning_rate": 2.4053533799526523e-06, - "loss": 1.117, - "num_input_tokens_seen": 79833850, - "step": 3759 - }, - { - "epoch": 0.452113268802982, - "flos": 17923846095960.0, - "grad_norm": 5.9959114767694714, - "learning_rate": 2.404590544582691e-06, - "loss": 1.0774, - "num_input_tokens_seen": 79851805, - "step": 3760 - }, - { - "epoch": 0.45223351169362114, - "flos": 28118581397400.0, - "grad_norm": 6.88128180916595, - "learning_rate": 2.403827647836666e-06, - "loss": 1.0335, - "num_input_tokens_seen": 79872080, - "step": 3761 - }, - { - "epoch": 0.4523537545842602, - "flos": 15329512829400.0, - "grad_norm": 12.78311046686243, - "learning_rate": 2.4030646898303075e-06, - "loss": 0.9198, - "num_input_tokens_seen": 79893290, - "step": 3762 - }, - { - "epoch": 0.4524739974748993, - "flos": 20257500621720.0, - "grad_norm": 3.010636071962251, - "learning_rate": 2.4023016706793566e-06, - "loss": 1.0436, - "num_input_tokens_seen": 79912805, - "step": 3763 - }, - { - "epoch": 0.4525942403655384, - "flos": 44061330386400.0, - "grad_norm": 0.807772823222639, - "learning_rate": 2.401538590499561e-06, - "loss": 0.8319, - "num_input_tokens_seen": 79972980, - "step": 3764 - }, - { - "epoch": 0.45271448325617747, - "flos": 19604056060800.0, - "grad_norm": 3.0253160737163034, - "learning_rate": 2.400775449406682e-06, - "loss": 0.9305, - "num_input_tokens_seen": 79995895, - "step": 3765 - }, - { - "epoch": 0.4528347261468166, - "flos": 15954375268320.0, - "grad_norm": 3.1912383055183176, - "learning_rate": 2.400012247516485e-06, - "loss": 0.9594, - "num_input_tokens_seen": 80016180, - "step": 3766 - }, - { - "epoch": 0.45295496903745563, - "flos": 14985234031800.0, - "grad_norm": 4.1095710968688355, - "learning_rate": 2.3992489849447484e-06, - "loss": 1.1164, - "num_input_tokens_seen": 80034355, - "step": 3767 - }, - { - "epoch": 0.45307521192809475, - "flos": 16507043750760.0, - "grad_norm": 5.604786475790936, - "learning_rate": 2.3984856618072584e-06, - "loss": 1.0221, - "num_input_tokens_seen": 80054110, - "step": 3768 - }, - { - "epoch": 0.45319545481873386, - "flos": 11001300771840.0, - "grad_norm": 4.353337088715207, - "learning_rate": 2.3977222782198098e-06, - "loss": 0.9571, - "num_input_tokens_seen": 80072465, - "step": 3769 - }, - { - "epoch": 0.4533156977093729, - "flos": 15589792969920.0, - "grad_norm": 3.2920337584979054, - "learning_rate": 2.3969588342982077e-06, - "loss": 0.9742, - "num_input_tokens_seen": 80091560, - "step": 3770 - }, - { - "epoch": 0.453435940600012, - "flos": 17240746258440.0, - "grad_norm": 3.3769075160038167, - "learning_rate": 2.396195330158267e-06, - "loss": 0.9506, - "num_input_tokens_seen": 80111170, - "step": 3771 - }, - { - "epoch": 0.45355618349065113, - "flos": 16664616134520.0, - "grad_norm": 4.923830061855034, - "learning_rate": 2.3954317659158094e-06, - "loss": 1.0138, - "num_input_tokens_seen": 80131225, - "step": 3772 - }, - { - "epoch": 0.4536764263812902, - "flos": 47906548122000.0, - "grad_norm": 1.277760942090196, - "learning_rate": 2.394668141686667e-06, - "loss": 0.8527, - "num_input_tokens_seen": 80192910, - "step": 3773 - }, - { - "epoch": 0.4537966692719293, - "flos": 30534517532880.0, - "grad_norm": 3.507078234110745, - "learning_rate": 2.3939044575866813e-06, - "loss": 0.9041, - "num_input_tokens_seen": 80215380, - "step": 3774 - }, - { - "epoch": 0.4539169121625684, - "flos": 25366645085520.0, - "grad_norm": 3.6522481814034014, - "learning_rate": 2.3931407137317024e-06, - "loss": 0.9836, - "num_input_tokens_seen": 80235255, - "step": 3775 - }, - { - "epoch": 0.45403715505320746, - "flos": 13124633934480.0, - "grad_norm": 6.999835421126097, - "learning_rate": 2.3923769102375907e-06, - "loss": 1.0726, - "num_input_tokens_seen": 80253840, - "step": 3776 - }, - { - "epoch": 0.4541573979438466, - "flos": 17818440121920.0, - "grad_norm": 4.609667131897328, - "learning_rate": 2.391613047220213e-06, - "loss": 1.0104, - "num_input_tokens_seen": 80273460, - "step": 3777 - }, - { - "epoch": 0.4542776408344857, - "flos": 12993620009760.0, - "grad_norm": 4.942092883988989, - "learning_rate": 2.390849124795447e-06, - "loss": 1.0203, - "num_input_tokens_seen": 80289180, - "step": 3778 - }, - { - "epoch": 0.45439788372512474, - "flos": 14695635891840.0, - "grad_norm": 4.335206776123608, - "learning_rate": 2.3900851430791804e-06, - "loss": 1.0589, - "num_input_tokens_seen": 80306920, - "step": 3779 - }, - { - "epoch": 0.45451812661576385, - "flos": 15851851480920.0, - "grad_norm": 5.774142788766863, - "learning_rate": 2.389321102187307e-06, - "loss": 1.0688, - "num_input_tokens_seen": 80325420, - "step": 3780 - }, - { - "epoch": 0.4546383695064029, - "flos": 15459514922640.0, - "grad_norm": 4.402082921232874, - "learning_rate": 2.3885570022357326e-06, - "loss": 1.0481, - "num_input_tokens_seen": 80344270, - "step": 3781 - }, - { - "epoch": 0.454758612397042, - "flos": 45994977415800.0, - "grad_norm": 0.8326462098337923, - "learning_rate": 2.38779284334037e-06, - "loss": 0.8577, - "num_input_tokens_seen": 80408965, - "step": 3782 - }, - { - "epoch": 0.4548788552876811, - "flos": 19440964596240.0, - "grad_norm": 7.096640380313772, - "learning_rate": 2.387028625617141e-06, - "loss": 1.0029, - "num_input_tokens_seen": 80427900, - "step": 3783 - }, - { - "epoch": 0.4549990981783202, - "flos": 16245537147840.0, - "grad_norm": 4.981952521013309, - "learning_rate": 2.3862643491819766e-06, - "loss": 1.0767, - "num_input_tokens_seen": 80446185, - "step": 3784 - }, - { - "epoch": 0.4551193410689593, - "flos": 16534062133200.0, - "grad_norm": 6.16675055506928, - "learning_rate": 2.3855000141508186e-06, - "loss": 1.0597, - "num_input_tokens_seen": 80466060, - "step": 3785 - }, - { - "epoch": 0.4552395839595984, - "flos": 14750960442240.0, - "grad_norm": 4.967051473640763, - "learning_rate": 2.3847356206396143e-06, - "loss": 1.0586, - "num_input_tokens_seen": 80483090, - "step": 3786 - }, - { - "epoch": 0.45535982685023746, - "flos": 16533142286400.0, - "grad_norm": 2.878731834894297, - "learning_rate": 2.3839711687643227e-06, - "loss": 1.0102, - "num_input_tokens_seen": 80504035, - "step": 3787 - }, - { - "epoch": 0.45548006974087657, - "flos": 13937797188360.0, - "grad_norm": 7.952167805626205, - "learning_rate": 2.38320665864091e-06, - "loss": 0.9511, - "num_input_tokens_seen": 80523105, - "step": 3788 - }, - { - "epoch": 0.4556003126315157, - "flos": 14226506143080.0, - "grad_norm": 22.03323073986059, - "learning_rate": 2.3824420903853516e-06, - "loss": 1.0451, - "num_input_tokens_seen": 80541290, - "step": 3789 - }, - { - "epoch": 0.45572055552215474, - "flos": 16318344335520.0, - "grad_norm": 3.902906283747638, - "learning_rate": 2.3816774641136324e-06, - "loss": 1.0287, - "num_input_tokens_seen": 80558265, - "step": 3790 - }, - { - "epoch": 0.45584079841279385, - "flos": 23614057194840.0, - "grad_norm": 4.816907177544537, - "learning_rate": 2.380912779941745e-06, - "loss": 0.9496, - "num_input_tokens_seen": 80581105, - "step": 3791 - }, - { - "epoch": 0.45596104130343296, - "flos": 19418576109360.0, - "grad_norm": 3.72141599859609, - "learning_rate": 2.3801480379856918e-06, - "loss": 1.0346, - "num_input_tokens_seen": 80602535, - "step": 3792 - }, - { - "epoch": 0.456081284194072, - "flos": 15327489166440.0, - "grad_norm": 4.066126451934337, - "learning_rate": 2.379383238361484e-06, - "loss": 1.0584, - "num_input_tokens_seen": 80621615, - "step": 3793 - }, - { - "epoch": 0.4562015270847111, - "flos": 25632689599320.0, - "grad_norm": 4.186932425679573, - "learning_rate": 2.3786183811851407e-06, - "loss": 1.0141, - "num_input_tokens_seen": 80642040, - "step": 3794 - }, - { - "epoch": 0.45632176997535023, - "flos": 9585264965640.0, - "grad_norm": 5.7901160636693385, - "learning_rate": 2.3778534665726892e-06, - "loss": 1.0232, - "num_input_tokens_seen": 80658590, - "step": 3795 - }, - { - "epoch": 0.4564420128659893, - "flos": 23272997861040.0, - "grad_norm": 4.308512737933991, - "learning_rate": 2.377088494640168e-06, - "loss": 0.9523, - "num_input_tokens_seen": 80680060, - "step": 3796 - }, - { - "epoch": 0.4565622557566284, - "flos": 14463815227080.0, - "grad_norm": 8.400193312100862, - "learning_rate": 2.3763234655036216e-06, - "loss": 1.0056, - "num_input_tokens_seen": 80698980, - "step": 3797 - }, - { - "epoch": 0.45668249864726745, - "flos": 18054584066640.0, - "grad_norm": 3.8159241855055726, - "learning_rate": 2.3755583792791046e-06, - "loss": 1.0892, - "num_input_tokens_seen": 80718570, - "step": 3798 - }, - { - "epoch": 0.45680274153790656, - "flos": 11001055479360.0, - "grad_norm": 3.8185494378048794, - "learning_rate": 2.3747932360826803e-06, - "loss": 0.9691, - "num_input_tokens_seen": 80735220, - "step": 3799 - }, - { - "epoch": 0.4569229844285457, - "flos": 14043387131760.0, - "grad_norm": 5.646659687372703, - "learning_rate": 2.3740280360304205e-06, - "loss": 1.0414, - "num_input_tokens_seen": 80752665, - "step": 3800 - }, - { - "epoch": 0.45704322731918473, - "flos": 17133899191080.0, - "grad_norm": 2.994776947427596, - "learning_rate": 2.3732627792384038e-06, - "loss": 0.9105, - "num_input_tokens_seen": 80773455, - "step": 3801 - }, - { - "epoch": 0.45716347020982384, - "flos": 22325509233960.0, - "grad_norm": 5.812212865156659, - "learning_rate": 2.3724974658227207e-06, - "loss": 0.9735, - "num_input_tokens_seen": 80793965, - "step": 3802 - }, - { - "epoch": 0.45728371310046295, - "flos": 18864006610200.0, - "grad_norm": 5.003637135376579, - "learning_rate": 2.3717320958994687e-06, - "loss": 0.937, - "num_input_tokens_seen": 80811245, - "step": 3803 - }, - { - "epoch": 0.457403955991102, - "flos": 12703776577320.0, - "grad_norm": 3.7710307088940387, - "learning_rate": 2.3709666695847534e-06, - "loss": 0.932, - "num_input_tokens_seen": 80829145, - "step": 3804 - }, - { - "epoch": 0.4575241988817411, - "flos": 30168862079880.0, - "grad_norm": 6.611464871185501, - "learning_rate": 2.370201186994689e-06, - "loss": 0.9337, - "num_input_tokens_seen": 80852550, - "step": 3805 - }, - { - "epoch": 0.45764444177238023, - "flos": 21463215064800.0, - "grad_norm": 4.20636927533339, - "learning_rate": 2.369435648245399e-06, - "loss": 0.9215, - "num_input_tokens_seen": 80872485, - "step": 3806 - }, - { - "epoch": 0.4577646846630193, - "flos": 17110100272440.0, - "grad_norm": 3.659726829131812, - "learning_rate": 2.368670053453015e-06, - "loss": 1.0797, - "num_input_tokens_seen": 80893895, - "step": 3807 - }, - { - "epoch": 0.4578849275536584, - "flos": 12336649369440.0, - "grad_norm": 31.950183238666643, - "learning_rate": 2.3679044027336757e-06, - "loss": 0.9489, - "num_input_tokens_seen": 80909505, - "step": 3808 - }, - { - "epoch": 0.4580051704442975, - "flos": 9527150213280.0, - "grad_norm": 4.261427830068099, - "learning_rate": 2.3671386962035326e-06, - "loss": 0.8969, - "num_input_tokens_seen": 80926695, - "step": 3809 - }, - { - "epoch": 0.45812541333493656, - "flos": 12781366968360.0, - "grad_norm": 3.584183673358568, - "learning_rate": 2.3663729339787405e-06, - "loss": 0.9055, - "num_input_tokens_seen": 80943350, - "step": 3810 - }, - { - "epoch": 0.45824565622557567, - "flos": 14350774524600.0, - "grad_norm": 4.2840936503000275, - "learning_rate": 2.365607116175466e-06, - "loss": 0.9409, - "num_input_tokens_seen": 80958495, - "step": 3811 - }, - { - "epoch": 0.4583658991162148, - "flos": 14095001633400.0, - "grad_norm": 4.021240345543396, - "learning_rate": 2.3648412429098825e-06, - "loss": 0.8632, - "num_input_tokens_seen": 80976565, - "step": 3812 - }, - { - "epoch": 0.45848614200685384, - "flos": 14931963805920.0, - "grad_norm": 3.019498718797109, - "learning_rate": 2.364075314298172e-06, - "loss": 1.0363, - "num_input_tokens_seen": 80993740, - "step": 3813 - }, - { - "epoch": 0.45860638489749295, - "flos": 14960637912600.0, - "grad_norm": 3.4878746897909867, - "learning_rate": 2.3633093304565267e-06, - "loss": 0.9198, - "num_input_tokens_seen": 81012515, - "step": 3814 - }, - { - "epoch": 0.458726627788132, - "flos": 19103523326520.0, - "grad_norm": 5.593550183014571, - "learning_rate": 2.3625432915011443e-06, - "loss": 0.8433, - "num_input_tokens_seen": 81034145, - "step": 3815 - }, - { - "epoch": 0.4588468706787711, - "flos": 17137302624240.0, - "grad_norm": 3.109528197535293, - "learning_rate": 2.3617771975482334e-06, - "loss": 0.8701, - "num_input_tokens_seen": 81052695, - "step": 3816 - }, - { - "epoch": 0.4589671135694102, - "flos": 12674489239440.0, - "grad_norm": 3.05720778983288, - "learning_rate": 2.3610110487140083e-06, - "loss": 0.9691, - "num_input_tokens_seen": 81070850, - "step": 3817 - }, - { - "epoch": 0.4590873564600493, - "flos": 18235740738120.0, - "grad_norm": 2.50879466946021, - "learning_rate": 2.360244845114695e-06, - "loss": 1.0379, - "num_input_tokens_seen": 81090190, - "step": 3818 - }, - { - "epoch": 0.4592075993506884, - "flos": 13124695257600.0, - "grad_norm": 5.167659828139351, - "learning_rate": 2.3594785868665245e-06, - "loss": 0.9153, - "num_input_tokens_seen": 81106215, - "step": 3819 - }, - { - "epoch": 0.4593278422413275, - "flos": 14645677114440.0, - "grad_norm": 3.354859053583202, - "learning_rate": 2.3587122740857386e-06, - "loss": 1.0332, - "num_input_tokens_seen": 81123035, - "step": 3820 - }, - { - "epoch": 0.45944808513196655, - "flos": 15168138412200.0, - "grad_norm": 2.5398604327627567, - "learning_rate": 2.357945906888586e-06, - "loss": 1.0075, - "num_input_tokens_seen": 81142195, - "step": 3821 - }, - { - "epoch": 0.45956832802260567, - "flos": 15217422635280.0, - "grad_norm": 3.3744979767594776, - "learning_rate": 2.357179485391324e-06, - "loss": 1.0087, - "num_input_tokens_seen": 81159770, - "step": 3822 - }, - { - "epoch": 0.4596885709132448, - "flos": 15904508475600.0, - "grad_norm": 5.967005609585449, - "learning_rate": 2.3564130097102173e-06, - "loss": 1.0772, - "num_input_tokens_seen": 81179145, - "step": 3823 - }, - { - "epoch": 0.45980881380388383, - "flos": 20646679039320.0, - "grad_norm": 3.178447359703823, - "learning_rate": 2.355646479961541e-06, - "loss": 0.9665, - "num_input_tokens_seen": 81198175, - "step": 3824 - }, - { - "epoch": 0.45992905669452294, - "flos": 23819380723680.0, - "grad_norm": 2.904074069662722, - "learning_rate": 2.354879896261576e-06, - "loss": 0.9407, - "num_input_tokens_seen": 81218105, - "step": 3825 - }, - { - "epoch": 0.46004929958516205, - "flos": 25919436214200.0, - "grad_norm": 3.3184994762857634, - "learning_rate": 2.3541132587266133e-06, - "loss": 0.7984, - "num_input_tokens_seen": 81240545, - "step": 3826 - }, - { - "epoch": 0.4601695424758011, - "flos": 12207505799880.0, - "grad_norm": 6.594571464677168, - "learning_rate": 2.3533465674729515e-06, - "loss": 0.9188, - "num_input_tokens_seen": 81257495, - "step": 3827 - }, - { - "epoch": 0.4602897853664402, - "flos": 11237260747200.0, - "grad_norm": 3.945598206703292, - "learning_rate": 2.352579822616895e-06, - "loss": 0.9554, - "num_input_tokens_seen": 81274650, - "step": 3828 - }, - { - "epoch": 0.4604100282570793, - "flos": 18082307664960.0, - "grad_norm": 3.3560395471381987, - "learning_rate": 2.351813024274761e-06, - "loss": 1.0035, - "num_input_tokens_seen": 81295725, - "step": 3829 - }, - { - "epoch": 0.4605302711477184, - "flos": 19675391493600.0, - "grad_norm": 2.895569896836514, - "learning_rate": 2.3510461725628693e-06, - "loss": 0.9545, - "num_input_tokens_seen": 81315910, - "step": 3830 - }, - { - "epoch": 0.4606505140383575, - "flos": 16951301426280.0, - "grad_norm": 3.5661181096378742, - "learning_rate": 2.350279267597554e-06, - "loss": 0.9262, - "num_input_tokens_seen": 81336270, - "step": 3831 - }, - { - "epoch": 0.46077075692899655, - "flos": 11394311884440.0, - "grad_norm": 5.4727445999627005, - "learning_rate": 2.3495123094951515e-06, - "loss": 1.05, - "num_input_tokens_seen": 81354335, - "step": 3832 - }, - { - "epoch": 0.46089099981963566, - "flos": 34888797464520.0, - "grad_norm": 5.106674180452747, - "learning_rate": 2.34874529837201e-06, - "loss": 0.9712, - "num_input_tokens_seen": 81377600, - "step": 3833 - }, - { - "epoch": 0.46101124271027477, - "flos": 13544816737320.0, - "grad_norm": 4.62898927767046, - "learning_rate": 2.347978234344483e-06, - "loss": 1.0266, - "num_input_tokens_seen": 81393525, - "step": 3834 - }, - { - "epoch": 0.4611314856009138, - "flos": 28097051434200.0, - "grad_norm": 3.8435283069721993, - "learning_rate": 2.347211117528935e-06, - "loss": 0.9239, - "num_input_tokens_seen": 81415545, - "step": 3835 - }, - { - "epoch": 0.46125172849155294, - "flos": 14774422083720.0, - "grad_norm": 2.4648895017606556, - "learning_rate": 2.3464439480417374e-06, - "loss": 0.9394, - "num_input_tokens_seen": 81434785, - "step": 3836 - }, - { - "epoch": 0.46137197138219205, - "flos": 12704972378160.0, - "grad_norm": 5.403490188992127, - "learning_rate": 2.3456767259992676e-06, - "loss": 0.9767, - "num_input_tokens_seen": 81452150, - "step": 3837 - }, - { - "epoch": 0.4614922142728311, - "flos": 11918643537360.0, - "grad_norm": 4.6851145460197845, - "learning_rate": 2.3449094515179135e-06, - "loss": 1.0958, - "num_input_tokens_seen": 81469330, - "step": 3838 - }, - { - "epoch": 0.4616124571634702, - "flos": 18946134912120.0, - "grad_norm": 2.9744558012925277, - "learning_rate": 2.34414212471407e-06, - "loss": 1.0445, - "num_input_tokens_seen": 81488845, - "step": 3839 - }, - { - "epoch": 0.4617327000541093, - "flos": 14436674198400.0, - "grad_norm": 5.7037062257336375, - "learning_rate": 2.3433747457041394e-06, - "loss": 0.9611, - "num_input_tokens_seen": 81507270, - "step": 3840 - }, - { - "epoch": 0.4618529429447484, - "flos": 21070510567800.0, - "grad_norm": 5.127706045905215, - "learning_rate": 2.342607314604533e-06, - "loss": 1.0669, - "num_input_tokens_seen": 81526740, - "step": 3841 - }, - { - "epoch": 0.4619731858353875, - "flos": 14038389297480.0, - "grad_norm": 2.7708380754090842, - "learning_rate": 2.3418398315316694e-06, - "loss": 1.0663, - "num_input_tokens_seen": 81544280, - "step": 3842 - }, - { - "epoch": 0.4620934287260266, - "flos": 13438092316200.0, - "grad_norm": 8.146294960411835, - "learning_rate": 2.3410722966019755e-06, - "loss": 0.9966, - "num_input_tokens_seen": 81559115, - "step": 3843 - }, - { - "epoch": 0.46221367161666566, - "flos": 26655254369520.0, - "grad_norm": 4.1614095456633775, - "learning_rate": 2.3403047099318848e-06, - "loss": 0.8817, - "num_input_tokens_seen": 81582905, - "step": 3844 - }, - { - "epoch": 0.46233391450730477, - "flos": 10188014871720.0, - "grad_norm": 15.243105971654398, - "learning_rate": 2.3395370716378405e-06, - "loss": 0.9609, - "num_input_tokens_seen": 81600070, - "step": 3845 - }, - { - "epoch": 0.4624541573979438, - "flos": 15983969221800.0, - "grad_norm": 6.8895534105999765, - "learning_rate": 2.338769381836292e-06, - "loss": 0.9505, - "num_input_tokens_seen": 81619400, - "step": 3846 - }, - { - "epoch": 0.46257440028858293, - "flos": 10214266715160.0, - "grad_norm": 5.275602311735307, - "learning_rate": 2.3380016406436984e-06, - "loss": 0.9423, - "num_input_tokens_seen": 81636600, - "step": 3847 - }, - { - "epoch": 0.46269464317922204, - "flos": 16587056405040.0, - "grad_norm": 3.3361158836966567, - "learning_rate": 2.337233848176524e-06, - "loss": 1.0432, - "num_input_tokens_seen": 81654090, - "step": 3848 - }, - { - "epoch": 0.4628148860698611, - "flos": 13151560332240.0, - "grad_norm": 4.208014951440045, - "learning_rate": 2.3364660045512435e-06, - "loss": 1.0384, - "num_input_tokens_seen": 81672570, - "step": 3849 - }, - { - "epoch": 0.4629351289605002, - "flos": 50612818274880.0, - "grad_norm": 0.801420598547774, - "learning_rate": 2.335698109884337e-06, - "loss": 0.8567, - "num_input_tokens_seen": 81737495, - "step": 3850 - }, - { - "epoch": 0.4630553718511393, - "flos": 42721683622320.0, - "grad_norm": 0.8911481625771566, - "learning_rate": 2.334930164292294e-06, - "loss": 0.8852, - "num_input_tokens_seen": 81799765, - "step": 3851 - }, - { - "epoch": 0.4631756147417784, - "flos": 11289028556640.0, - "grad_norm": 3.570328126034254, - "learning_rate": 2.334162167891612e-06, - "loss": 1.0103, - "num_input_tokens_seen": 81816750, - "step": 3852 - }, - { - "epoch": 0.4632958576324175, - "flos": 11657412888480.0, - "grad_norm": 3.6391366691450746, - "learning_rate": 2.333394120798795e-06, - "loss": 0.9604, - "num_input_tokens_seen": 81835205, - "step": 3853 - }, - { - "epoch": 0.4634161005230566, - "flos": 15878563247760.0, - "grad_norm": 5.071078344029903, - "learning_rate": 2.3326260231303545e-06, - "loss": 0.9536, - "num_input_tokens_seen": 81853525, - "step": 3854 - }, - { - "epoch": 0.46353634341369565, - "flos": 11132161388760.0, - "grad_norm": 3.5186187882292512, - "learning_rate": 2.331857875002811e-06, - "loss": 1.0971, - "num_input_tokens_seen": 81871845, - "step": 3855 - }, - { - "epoch": 0.46365658630433476, - "flos": 20178622445160.0, - "grad_norm": 3.3011556740298826, - "learning_rate": 2.3310896765326916e-06, - "loss": 0.9855, - "num_input_tokens_seen": 81892565, - "step": 3856 - }, - { - "epoch": 0.46377682919497387, - "flos": 17503847262480.0, - "grad_norm": 3.5958943479333514, - "learning_rate": 2.330321427836531e-06, - "loss": 1.0632, - "num_input_tokens_seen": 81914155, - "step": 3857 - }, - { - "epoch": 0.4638970720856129, - "flos": 13517062477440.0, - "grad_norm": 6.7208384971654, - "learning_rate": 2.3295531290308733e-06, - "loss": 1.0581, - "num_input_tokens_seen": 81932025, - "step": 3858 - }, - { - "epoch": 0.46401731497625204, - "flos": 13092249779040.0, - "grad_norm": 5.426504659522943, - "learning_rate": 2.3287847802322678e-06, - "loss": 0.9551, - "num_input_tokens_seen": 81947315, - "step": 3859 - }, - { - "epoch": 0.4641375578668911, - "flos": 18552326598960.0, - "grad_norm": 4.4466235434878, - "learning_rate": 2.3280163815572723e-06, - "loss": 1.0585, - "num_input_tokens_seen": 81967630, - "step": 3860 - }, - { - "epoch": 0.4642578007575302, - "flos": 13882993884480.0, - "grad_norm": 5.826581735290219, - "learning_rate": 2.3272479331224522e-06, - "loss": 0.9887, - "num_input_tokens_seen": 81984780, - "step": 3861 - }, - { - "epoch": 0.4643780436481693, - "flos": 20075761380600.0, - "grad_norm": 2.611895868273251, - "learning_rate": 2.3264794350443817e-06, - "loss": 1.0026, - "num_input_tokens_seen": 82006595, - "step": 3862 - }, - { - "epoch": 0.46449828653880837, - "flos": 18055350605640.0, - "grad_norm": 4.3743612640596865, - "learning_rate": 2.3257108874396396e-06, - "loss": 1.0094, - "num_input_tokens_seen": 82027410, - "step": 3863 - }, - { - "epoch": 0.4646185294294475, - "flos": 11342114813160.0, - "grad_norm": 4.095467010808547, - "learning_rate": 2.3249422904248152e-06, - "loss": 0.9589, - "num_input_tokens_seen": 82045565, - "step": 3864 - }, - { - "epoch": 0.4647387723200866, - "flos": 18765652794960.0, - "grad_norm": 3.8948392908830836, - "learning_rate": 2.324173644116504e-06, - "loss": 1.0996, - "num_input_tokens_seen": 82068135, - "step": 3865 - }, - { - "epoch": 0.46485901521072565, - "flos": 19337766254520.0, - "grad_norm": 2.923749026934008, - "learning_rate": 2.3234049486313087e-06, - "loss": 1.044, - "num_input_tokens_seen": 82089305, - "step": 3866 - }, - { - "epoch": 0.46497925810136476, - "flos": 17267979271800.0, - "grad_norm": 5.390024876015184, - "learning_rate": 2.322636204085839e-06, - "loss": 0.9895, - "num_input_tokens_seen": 82109095, - "step": 3867 - }, - { - "epoch": 0.46509950099200387, - "flos": 11499349919760.0, - "grad_norm": 4.14615707794756, - "learning_rate": 2.3218674105967143e-06, - "loss": 0.9869, - "num_input_tokens_seen": 82127080, - "step": 3868 - }, - { - "epoch": 0.4652197438826429, - "flos": 16666241197200.0, - "grad_norm": 3.539332230080654, - "learning_rate": 2.3210985682805593e-06, - "loss": 1.0546, - "num_input_tokens_seen": 82148580, - "step": 3869 - }, - { - "epoch": 0.46533998677328203, - "flos": 11472914106960.0, - "grad_norm": 18.376558614009877, - "learning_rate": 2.320329677254007e-06, - "loss": 0.8903, - "num_input_tokens_seen": 82165630, - "step": 3870 - }, - { - "epoch": 0.46546022966392114, - "flos": 15012742999200.0, - "grad_norm": 4.3440676785671375, - "learning_rate": 2.319560737633697e-06, - "loss": 0.9428, - "num_input_tokens_seen": 82184070, - "step": 3871 - }, - { - "epoch": 0.4655804725545602, - "flos": 29407895897280.0, - "grad_norm": 3.383660683666786, - "learning_rate": 2.3187917495362775e-06, - "loss": 0.9137, - "num_input_tokens_seen": 82208200, - "step": 3872 - }, - { - "epoch": 0.4657007154451993, - "flos": 13883484469440.0, - "grad_norm": 4.119079039517094, - "learning_rate": 2.318022713078403e-06, - "loss": 0.9797, - "num_input_tokens_seen": 82222500, - "step": 3873 - }, - { - "epoch": 0.4658209583358384, - "flos": 10970143078800.0, - "grad_norm": 3.2835704046595313, - "learning_rate": 2.3172536283767354e-06, - "loss": 1.0729, - "num_input_tokens_seen": 82235980, - "step": 3874 - }, - { - "epoch": 0.4659412012264775, - "flos": 10529166190200.0, - "grad_norm": 3.914917751534729, - "learning_rate": 2.3164844955479447e-06, - "loss": 1.033, - "num_input_tokens_seen": 82251510, - "step": 3875 - }, - { - "epoch": 0.4660614441171166, - "flos": 17372710691520.0, - "grad_norm": 2.875844085180994, - "learning_rate": 2.3157153147087082e-06, - "loss": 0.9258, - "num_input_tokens_seen": 82273120, - "step": 3876 - }, - { - "epoch": 0.46618168700775564, - "flos": 15695014974600.0, - "grad_norm": 4.180753596879082, - "learning_rate": 2.314946085975709e-06, - "loss": 1.0664, - "num_input_tokens_seen": 82291820, - "step": 3877 - }, - { - "epoch": 0.46630192989839475, - "flos": 18631388744880.0, - "grad_norm": 5.156484517630825, - "learning_rate": 2.3141768094656393e-06, - "loss": 1.044, - "num_input_tokens_seen": 82310115, - "step": 3878 - }, - { - "epoch": 0.46642217278903386, - "flos": 8090228336640.0, - "grad_norm": 4.105475718803553, - "learning_rate": 2.3134074852951966e-06, - "loss": 1.055, - "num_input_tokens_seen": 82326425, - "step": 3879 - }, - { - "epoch": 0.4665424156796729, - "flos": 23036823254760.0, - "grad_norm": 3.0901020776375216, - "learning_rate": 2.312638113581088e-06, - "loss": 1.0043, - "num_input_tokens_seen": 82346630, - "step": 3880 - }, - { - "epoch": 0.46666265857031203, - "flos": 13067439028920.0, - "grad_norm": 4.298539716586129, - "learning_rate": 2.311868694440027e-06, - "loss": 1.0076, - "num_input_tokens_seen": 82360770, - "step": 3881 - }, - { - "epoch": 0.46678290146095114, - "flos": 49010781270720.0, - "grad_norm": 0.7465727302166503, - "learning_rate": 2.3110992279887323e-06, - "loss": 0.8679, - "num_input_tokens_seen": 82432415, - "step": 3882 - }, - { - "epoch": 0.4669031443515902, - "flos": 12548166533400.0, - "grad_norm": 2.979720395431211, - "learning_rate": 2.310329714343932e-06, - "loss": 1.0722, - "num_input_tokens_seen": 82448285, - "step": 3883 - }, - { - "epoch": 0.4670233872422293, - "flos": 17028799832640.0, - "grad_norm": 3.2943180528154032, - "learning_rate": 2.309560153622361e-06, - "loss": 1.0349, - "num_input_tokens_seen": 82464915, - "step": 3884 - }, - { - "epoch": 0.4671436301328684, - "flos": 20021601969480.0, - "grad_norm": 4.99510391845134, - "learning_rate": 2.3087905459407602e-06, - "loss": 0.9593, - "num_input_tokens_seen": 82483575, - "step": 3885 - }, - { - "epoch": 0.46726387302350747, - "flos": 49677231881160.0, - "grad_norm": 0.8405777673321156, - "learning_rate": 2.3080208914158795e-06, - "loss": 0.9125, - "num_input_tokens_seen": 82546295, - "step": 3886 - }, - { - "epoch": 0.4673841159141466, - "flos": 18158916886080.0, - "grad_norm": 2.9221550302391477, - "learning_rate": 2.3072511901644753e-06, - "loss": 0.9297, - "num_input_tokens_seen": 82565085, - "step": 3887 - }, - { - "epoch": 0.4675043588047857, - "flos": 17425735624920.0, - "grad_norm": 5.990794431452466, - "learning_rate": 2.306481442303309e-06, - "loss": 1.0265, - "num_input_tokens_seen": 82584380, - "step": 3888 - }, - { - "epoch": 0.46762460169542475, - "flos": 14882771567520.0, - "grad_norm": 3.5453697741317685, - "learning_rate": 2.3057116479491515e-06, - "loss": 0.9477, - "num_input_tokens_seen": 82603510, - "step": 3889 - }, - { - "epoch": 0.46774484458606386, - "flos": 13596799177680.0, - "grad_norm": 2.7439899661155955, - "learning_rate": 2.30494180721878e-06, - "loss": 0.9949, - "num_input_tokens_seen": 82620570, - "step": 3890 - }, - { - "epoch": 0.4678650874767029, - "flos": 12731530837200.0, - "grad_norm": 2.935414487045125, - "learning_rate": 2.3041719202289794e-06, - "loss": 1.1259, - "num_input_tokens_seen": 82636465, - "step": 3891 - }, - { - "epoch": 0.467985330367342, - "flos": 15170100752040.0, - "grad_norm": 2.7731399591440944, - "learning_rate": 2.30340198709654e-06, - "loss": 1.0318, - "num_input_tokens_seen": 82656020, - "step": 3892 - }, - { - "epoch": 0.46810557325798113, - "flos": 14567166876600.0, - "grad_norm": 6.220589677144481, - "learning_rate": 2.3026320079382605e-06, - "loss": 0.9801, - "num_input_tokens_seen": 82672675, - "step": 3893 - }, - { - "epoch": 0.4682258161486202, - "flos": 21463674988200.0, - "grad_norm": 4.47933091446526, - "learning_rate": 2.3018619828709454e-06, - "loss": 0.9787, - "num_input_tokens_seen": 82693935, - "step": 3894 - }, - { - "epoch": 0.4683460590392593, - "flos": 17995273513440.0, - "grad_norm": 5.500200094571312, - "learning_rate": 2.3010919120114084e-06, - "loss": 1.0453, - "num_input_tokens_seen": 82710185, - "step": 3895 - }, - { - "epoch": 0.4684663019298984, - "flos": 10862222856840.0, - "grad_norm": 3.409909863217583, - "learning_rate": 2.3003217954764672e-06, - "loss": 0.8678, - "num_input_tokens_seen": 82724610, - "step": 3896 - }, - { - "epoch": 0.46858654482053747, - "flos": 19782146576280.0, - "grad_norm": 2.4376816315103134, - "learning_rate": 2.299551633382949e-06, - "loss": 1.0244, - "num_input_tokens_seen": 82744640, - "step": 3897 - }, - { - "epoch": 0.4687067877111766, - "flos": 12783942539400.0, - "grad_norm": 3.5341701080538175, - "learning_rate": 2.2987814258476854e-06, - "loss": 1.0667, - "num_input_tokens_seen": 82762160, - "step": 3898 - }, - { - "epoch": 0.4688270306018157, - "flos": 12019113000240.0, - "grad_norm": 5.1136566793635465, - "learning_rate": 2.2980111729875177e-06, - "loss": 0.885, - "num_input_tokens_seen": 82778130, - "step": 3899 - }, - { - "epoch": 0.46894727349245474, - "flos": 12626554125000.0, - "grad_norm": 3.849261046994625, - "learning_rate": 2.2972408749192917e-06, - "loss": 1.0368, - "num_input_tokens_seen": 82795580, - "step": 3900 - }, - { - "epoch": 0.46906751638309385, - "flos": 15249684144480.0, - "grad_norm": 4.58582645002363, - "learning_rate": 2.296470531759861e-06, - "loss": 0.8967, - "num_input_tokens_seen": 82813400, - "step": 3901 - }, - { - "epoch": 0.46918775927373296, - "flos": 14434711858560.0, - "grad_norm": 2.99952026346099, - "learning_rate": 2.2957001436260866e-06, - "loss": 1.0145, - "num_input_tokens_seen": 82830180, - "step": 3902 - }, - { - "epoch": 0.469308002164372, - "flos": 13044253341480.0, - "grad_norm": 2.3847534279351272, - "learning_rate": 2.294929710634836e-06, - "loss": 0.9498, - "num_input_tokens_seen": 82847990, - "step": 3903 - }, - { - "epoch": 0.46942824505501113, - "flos": 27101137107720.0, - "grad_norm": 9.081793931711202, - "learning_rate": 2.2941592329029823e-06, - "loss": 0.8341, - "num_input_tokens_seen": 82868815, - "step": 3904 - }, - { - "epoch": 0.46954848794565024, - "flos": 15537963837360.0, - "grad_norm": 3.851328132725812, - "learning_rate": 2.2933887105474067e-06, - "loss": 1.0195, - "num_input_tokens_seen": 82886710, - "step": 3905 - }, - { - "epoch": 0.4696687308362893, - "flos": 15641223502200.0, - "grad_norm": 2.878839334152782, - "learning_rate": 2.2926181436849974e-06, - "loss": 1.0309, - "num_input_tokens_seen": 82905785, - "step": 3906 - }, - { - "epoch": 0.4697889737269284, - "flos": 15351472054440.0, - "grad_norm": 6.761006741470025, - "learning_rate": 2.2918475324326478e-06, - "loss": 0.9532, - "num_input_tokens_seen": 82925225, - "step": 3907 - }, - { - "epoch": 0.46990921661756746, - "flos": 17949607354440.0, - "grad_norm": 3.2080091502565704, - "learning_rate": 2.2910768769072603e-06, - "loss": 1.1276, - "num_input_tokens_seen": 82943725, - "step": 3908 - }, - { - "epoch": 0.47002945950820657, - "flos": 9767832068880.0, - "grad_norm": 4.54330566416483, - "learning_rate": 2.2903061772257417e-06, - "loss": 0.9767, - "num_input_tokens_seen": 82961430, - "step": 3909 - }, - { - "epoch": 0.4701497023988457, - "flos": 18681316860720.0, - "grad_norm": 3.3346536564604308, - "learning_rate": 2.289535433505007e-06, - "loss": 1.0098, - "num_input_tokens_seen": 82982505, - "step": 3910 - }, - { - "epoch": 0.47026994528948474, - "flos": 18237917708880.0, - "grad_norm": 5.7763499345949025, - "learning_rate": 2.2887646458619767e-06, - "loss": 0.8649, - "num_input_tokens_seen": 83003590, - "step": 3911 - }, - { - "epoch": 0.47039018818012385, - "flos": 14590045948440.0, - "grad_norm": 2.877210347886704, - "learning_rate": 2.2879938144135797e-06, - "loss": 0.9887, - "num_input_tokens_seen": 83019415, - "step": 3912 - }, - { - "epoch": 0.47051043107076296, - "flos": 15325312195680.0, - "grad_norm": 4.170140509490835, - "learning_rate": 2.2872229392767496e-06, - "loss": 0.9894, - "num_input_tokens_seen": 83039240, - "step": 3913 - }, - { - "epoch": 0.470630673961402, - "flos": 13439840025120.0, - "grad_norm": 3.296442952705521, - "learning_rate": 2.286452020568428e-06, - "loss": 0.9773, - "num_input_tokens_seen": 83057035, - "step": 3914 - }, - { - "epoch": 0.4707509168520411, - "flos": 14148394505520.0, - "grad_norm": 5.341919167075934, - "learning_rate": 2.2856810584055637e-06, - "loss": 0.9676, - "num_input_tokens_seen": 83074290, - "step": 3915 - }, - { - "epoch": 0.47087115974268023, - "flos": 28651651594920.0, - "grad_norm": 3.0610559912778927, - "learning_rate": 2.2849100529051085e-06, - "loss": 0.9022, - "num_input_tokens_seen": 83100945, - "step": 3916 - }, - { - "epoch": 0.4709914026333193, - "flos": 9558553198800.0, - "grad_norm": 4.098781609350802, - "learning_rate": 2.284139004184026e-06, - "loss": 1.0195, - "num_input_tokens_seen": 83117895, - "step": 3917 - }, - { - "epoch": 0.4711116455239584, - "flos": 14173910471520.0, - "grad_norm": 3.8366735804871395, - "learning_rate": 2.2833679123592814e-06, - "loss": 0.9635, - "num_input_tokens_seen": 83134875, - "step": 3918 - }, - { - "epoch": 0.4712318884145975, - "flos": 22906085284080.0, - "grad_norm": 4.75075143592386, - "learning_rate": 2.2825967775478508e-06, - "loss": 0.856, - "num_input_tokens_seen": 83155695, - "step": 3919 - }, - { - "epoch": 0.47135213130523657, - "flos": 14226567466200.0, - "grad_norm": 3.3612232890119325, - "learning_rate": 2.2818255998667135e-06, - "loss": 1.0561, - "num_input_tokens_seen": 83173925, - "step": 3920 - }, - { - "epoch": 0.4714723741958757, - "flos": 13780286127720.0, - "grad_norm": 2.2272736066345855, - "learning_rate": 2.2810543794328566e-06, - "loss": 1.0208, - "num_input_tokens_seen": 83192680, - "step": 3921 - }, - { - "epoch": 0.4715926170865148, - "flos": 14460565101720.0, - "grad_norm": 2.870779173348899, - "learning_rate": 2.2802831163632735e-06, - "loss": 1.0412, - "num_input_tokens_seen": 83211120, - "step": 3922 - }, - { - "epoch": 0.47171285997715384, - "flos": 16112867498880.0, - "grad_norm": 5.098417612563687, - "learning_rate": 2.279511810774965e-06, - "loss": 0.9625, - "num_input_tokens_seen": 83232370, - "step": 3923 - }, - { - "epoch": 0.47183310286779295, - "flos": 14986552478880.0, - "grad_norm": 3.189547161858135, - "learning_rate": 2.2787404627849364e-06, - "loss": 0.9368, - "num_input_tokens_seen": 83251300, - "step": 3924 - }, - { - "epoch": 0.471953345758432, - "flos": 15433079109840.0, - "grad_norm": 6.931647504135076, - "learning_rate": 2.277969072510202e-06, - "loss": 1.0117, - "num_input_tokens_seen": 83270000, - "step": 3925 - }, - { - "epoch": 0.4720735886490711, - "flos": 14091751508040.0, - "grad_norm": 3.841899643547935, - "learning_rate": 2.2771976400677803e-06, - "loss": 1.0451, - "num_input_tokens_seen": 83288550, - "step": 3926 - }, - { - "epoch": 0.47219383153971023, - "flos": 13596921823920.0, - "grad_norm": 3.1155052904339096, - "learning_rate": 2.2764261655746965e-06, - "loss": 1.0113, - "num_input_tokens_seen": 83305765, - "step": 3927 - }, - { - "epoch": 0.4723140744303493, - "flos": 16509466014000.0, - "grad_norm": 5.4569138095809535, - "learning_rate": 2.2756546491479832e-06, - "loss": 0.9874, - "num_input_tokens_seen": 83326400, - "step": 3928 - }, - { - "epoch": 0.4724343173209884, - "flos": 12915232418160.0, - "grad_norm": 5.140099031852497, - "learning_rate": 2.274883090904679e-06, - "loss": 1.0297, - "num_input_tokens_seen": 83343885, - "step": 3929 - }, - { - "epoch": 0.4725545602116275, - "flos": 15091161252360.0, - "grad_norm": 3.7441690251840862, - "learning_rate": 2.2741114909618283e-06, - "loss": 0.8959, - "num_input_tokens_seen": 83359500, - "step": 3930 - }, - { - "epoch": 0.47267480310226656, - "flos": 15222727085160.0, - "grad_norm": 3.5917943526109277, - "learning_rate": 2.2733398494364828e-06, - "loss": 0.9456, - "num_input_tokens_seen": 83378465, - "step": 3931 - }, - { - "epoch": 0.47279504599290567, - "flos": 13308519484800.0, - "grad_norm": 3.191894045732428, - "learning_rate": 2.272568166445699e-06, - "loss": 1.0727, - "num_input_tokens_seen": 83396750, - "step": 3932 - }, - { - "epoch": 0.4729152888835448, - "flos": 14986828432920.0, - "grad_norm": 2.813584775475557, - "learning_rate": 2.271796442106541e-06, - "loss": 0.8646, - "num_input_tokens_seen": 83415825, - "step": 3933 - }, - { - "epoch": 0.47303553177418384, - "flos": 50274370721760.0, - "grad_norm": 0.8115588753097489, - "learning_rate": 2.271024676536079e-06, - "loss": 0.8417, - "num_input_tokens_seen": 83475805, - "step": 3934 - }, - { - "epoch": 0.47315577466482295, - "flos": 15956521577520.0, - "grad_norm": 3.0454711810088546, - "learning_rate": 2.2702528698513894e-06, - "loss": 0.9551, - "num_input_tokens_seen": 83496650, - "step": 3935 - }, - { - "epoch": 0.47327601755546206, - "flos": 17320452297120.0, - "grad_norm": 2.8617623790567537, - "learning_rate": 2.269481022169554e-06, - "loss": 1.0121, - "num_input_tokens_seen": 83514965, - "step": 3936 - }, - { - "epoch": 0.4733962604461011, - "flos": 16295127986520.0, - "grad_norm": 2.8881967603910574, - "learning_rate": 2.2687091336076614e-06, - "loss": 1.0285, - "num_input_tokens_seen": 83534025, - "step": 3937 - }, - { - "epoch": 0.4735165033367402, - "flos": 12989327391360.0, - "grad_norm": 6.038259593496874, - "learning_rate": 2.267937204282807e-06, - "loss": 1.0183, - "num_input_tokens_seen": 83550885, - "step": 3938 - }, - { - "epoch": 0.4736367462273793, - "flos": 16374466086480.0, - "grad_norm": 4.6385980668470115, - "learning_rate": 2.2671652343120926e-06, - "loss": 0.997, - "num_input_tokens_seen": 83571080, - "step": 3939 - }, - { - "epoch": 0.4737569891180184, - "flos": 18054553405080.0, - "grad_norm": 3.512578975563441, - "learning_rate": 2.2663932238126236e-06, - "loss": 1.0286, - "num_input_tokens_seen": 83589360, - "step": 3940 - }, - { - "epoch": 0.4738772320086575, - "flos": 18396011339160.0, - "grad_norm": 2.625795062323042, - "learning_rate": 2.265621172901515e-06, - "loss": 1.0199, - "num_input_tokens_seen": 83612195, - "step": 3941 - }, - { - "epoch": 0.47399747489929656, - "flos": 19628652180000.0, - "grad_norm": 3.586073131115564, - "learning_rate": 2.2648490816958854e-06, - "loss": 0.944, - "num_input_tokens_seen": 83632910, - "step": 3942 - }, - { - "epoch": 0.47411771778993567, - "flos": 17687548843440.0, - "grad_norm": 7.70918676102463, - "learning_rate": 2.264076950312861e-06, - "loss": 0.9386, - "num_input_tokens_seen": 83651440, - "step": 3943 - }, - { - "epoch": 0.4742379606805748, - "flos": 16167149556240.0, - "grad_norm": 4.533785196298647, - "learning_rate": 2.2633047788695727e-06, - "loss": 1.0422, - "num_input_tokens_seen": 83671465, - "step": 3944 - }, - { - "epoch": 0.47435820357121383, - "flos": 13962761246280.0, - "grad_norm": 3.2569687297234653, - "learning_rate": 2.262532567483159e-06, - "loss": 0.8694, - "num_input_tokens_seen": 83689745, - "step": 3945 - }, - { - "epoch": 0.47447844646185294, - "flos": 17949944631600.0, - "grad_norm": 3.6826296178688382, - "learning_rate": 2.2617603162707635e-06, - "loss": 1.0281, - "num_input_tokens_seen": 83709875, - "step": 3946 - }, - { - "epoch": 0.47459868935249205, - "flos": 17476798218480.0, - "grad_norm": 3.188755162263682, - "learning_rate": 2.2609880253495363e-06, - "loss": 1.0503, - "num_input_tokens_seen": 83729230, - "step": 3947 - }, - { - "epoch": 0.4747189322431311, - "flos": 14593541366280.0, - "grad_norm": 2.850742337104249, - "learning_rate": 2.260215694836633e-06, - "loss": 1.0841, - "num_input_tokens_seen": 83748125, - "step": 3948 - }, - { - "epoch": 0.4748391751337702, - "flos": 18500865405120.0, - "grad_norm": 2.697420484557017, - "learning_rate": 2.2594433248492157e-06, - "loss": 0.8693, - "num_input_tokens_seen": 83766820, - "step": 3949 - }, - { - "epoch": 0.47495941802440933, - "flos": 16219653243120.0, - "grad_norm": 4.748134447464728, - "learning_rate": 2.2586709155044527e-06, - "loss": 1.0218, - "num_input_tokens_seen": 83787140, - "step": 3950 - }, - { - "epoch": 0.4750796609150484, - "flos": 19863784293240.0, - "grad_norm": 2.6410930151081624, - "learning_rate": 2.2578984669195167e-06, - "loss": 0.9844, - "num_input_tokens_seen": 83807825, - "step": 3951 - }, - { - "epoch": 0.4751999038056875, - "flos": 25446289801080.0, - "grad_norm": 3.696013112016019, - "learning_rate": 2.2571259792115887e-06, - "loss": 0.8984, - "num_input_tokens_seen": 83828765, - "step": 3952 - }, - { - "epoch": 0.4753201466963266, - "flos": 15694831005240.0, - "grad_norm": 2.9695573210983106, - "learning_rate": 2.2563534524978544e-06, - "loss": 1.0209, - "num_input_tokens_seen": 83845955, - "step": 3953 - }, - { - "epoch": 0.47544038958696566, - "flos": 21516148013520.0, - "grad_norm": 2.8661808252783634, - "learning_rate": 2.2555808868955052e-06, - "loss": 0.9489, - "num_input_tokens_seen": 83867805, - "step": 3954 - }, - { - "epoch": 0.47556063247760477, - "flos": 16686881975160.0, - "grad_norm": 3.043104293605504, - "learning_rate": 2.254808282521738e-06, - "loss": 0.9521, - "num_input_tokens_seen": 83886275, - "step": 3955 - }, - { - "epoch": 0.4756808753682438, - "flos": 17897042344440.0, - "grad_norm": 2.955980000656673, - "learning_rate": 2.2540356394937573e-06, - "loss": 1.0391, - "num_input_tokens_seen": 83904695, - "step": 3956 - }, - { - "epoch": 0.47580111825888294, - "flos": 11080117625280.0, - "grad_norm": 4.036237603171772, - "learning_rate": 2.253262957928772e-06, - "loss": 1.0454, - "num_input_tokens_seen": 83921300, - "step": 3957 - }, - { - "epoch": 0.47592136114952205, - "flos": 12493179260160.0, - "grad_norm": 2.210849719781847, - "learning_rate": 2.2524902379439976e-06, - "loss": 0.9454, - "num_input_tokens_seen": 83939690, - "step": 3958 - }, - { - "epoch": 0.4760416040401611, - "flos": 43962087385920.0, - "grad_norm": 0.7587170658788134, - "learning_rate": 2.251717479656655e-06, - "loss": 0.8857, - "num_input_tokens_seen": 84004205, - "step": 3959 - }, - { - "epoch": 0.4761618469308002, - "flos": 13046491635360.0, - "grad_norm": 3.7976117459318237, - "learning_rate": 2.2509446831839704e-06, - "loss": 0.977, - "num_input_tokens_seen": 84023365, - "step": 3960 - }, - { - "epoch": 0.4762820898214393, - "flos": 12784187831880.0, - "grad_norm": 7.178088054636637, - "learning_rate": 2.250171848643177e-06, - "loss": 1.0319, - "num_input_tokens_seen": 84040375, - "step": 3961 - }, - { - "epoch": 0.4764023327120784, - "flos": 13702235813280.0, - "grad_norm": 2.8398991596425525, - "learning_rate": 2.249398976151513e-06, - "loss": 1.0918, - "num_input_tokens_seen": 84057645, - "step": 3962 - }, - { - "epoch": 0.4765225756027175, - "flos": 16166505663480.0, - "grad_norm": 6.7715153590292, - "learning_rate": 2.248626065826223e-06, - "loss": 1.0198, - "num_input_tokens_seen": 84075570, - "step": 3963 - }, - { - "epoch": 0.4766428184933566, - "flos": 47206737734280.0, - "grad_norm": 0.7616877231636701, - "learning_rate": 2.2478531177845564e-06, - "loss": 0.8715, - "num_input_tokens_seen": 84136285, - "step": 3964 - }, - { - "epoch": 0.47676306138399566, - "flos": 17165302176600.0, - "grad_norm": 4.694212316508589, - "learning_rate": 2.247080132143769e-06, - "loss": 1.0753, - "num_input_tokens_seen": 84158360, - "step": 3965 - }, - { - "epoch": 0.47688330427463477, - "flos": 8876986439280.0, - "grad_norm": 3.177526541058063, - "learning_rate": 2.246307109021121e-06, - "loss": 0.9062, - "num_input_tokens_seen": 84175485, - "step": 3966 - }, - { - "epoch": 0.4770035471652739, - "flos": 15190588222200.0, - "grad_norm": 2.237063837428859, - "learning_rate": 2.2455340485338817e-06, - "loss": 1.0483, - "num_input_tokens_seen": 84192840, - "step": 3967 - }, - { - "epoch": 0.47712379005591293, - "flos": 17897931529680.0, - "grad_norm": 3.1223413983908723, - "learning_rate": 2.244760950799322e-06, - "loss": 0.9035, - "num_input_tokens_seen": 84210830, - "step": 3968 - }, - { - "epoch": 0.47724403294655204, - "flos": 15668548500240.0, - "grad_norm": 2.7668540508131207, - "learning_rate": 2.2439878159347203e-06, - "loss": 0.9532, - "num_input_tokens_seen": 84229975, - "step": 3969 - }, - { - "epoch": 0.4773642758371911, - "flos": 50297857476720.0, - "grad_norm": 0.8169996669353053, - "learning_rate": 2.2432146440573616e-06, - "loss": 0.8525, - "num_input_tokens_seen": 84295655, - "step": 3970 - }, - { - "epoch": 0.4774845187278302, - "flos": 16742237187120.0, - "grad_norm": 2.569527456872865, - "learning_rate": 2.242441435284534e-06, - "loss": 0.8857, - "num_input_tokens_seen": 84314250, - "step": 3971 - }, - { - "epoch": 0.4776047616184693, - "flos": 16402281669480.0, - "grad_norm": 2.87134634969805, - "learning_rate": 2.2416681897335337e-06, - "loss": 1.0684, - "num_input_tokens_seen": 84332120, - "step": 3972 - }, - { - "epoch": 0.4777250045091084, - "flos": 22742104634280.0, - "grad_norm": 2.9044709041252514, - "learning_rate": 2.240894907521661e-06, - "loss": 0.8935, - "num_input_tokens_seen": 84350920, - "step": 3973 - }, - { - "epoch": 0.4778452473997475, - "flos": 17266538178480.0, - "grad_norm": 3.925590548462079, - "learning_rate": 2.240121588766223e-06, - "loss": 0.8646, - "num_input_tokens_seen": 84370690, - "step": 3974 - }, - { - "epoch": 0.4779654902903866, - "flos": 22510621246680.0, - "grad_norm": 3.4066839379198153, - "learning_rate": 2.239348233584531e-06, - "loss": 0.9303, - "num_input_tokens_seen": 84391265, - "step": 3975 - }, - { - "epoch": 0.47808573318102565, - "flos": 13833127091760.0, - "grad_norm": 3.423373238375219, - "learning_rate": 2.2385748420939013e-06, - "loss": 1.0256, - "num_input_tokens_seen": 84410180, - "step": 3976 - }, - { - "epoch": 0.47820597607166476, - "flos": 16061467628160.0, - "grad_norm": 2.611208517092951, - "learning_rate": 2.2378014144116583e-06, - "loss": 0.9506, - "num_input_tokens_seen": 84428710, - "step": 3977 - }, - { - "epoch": 0.4783262189623039, - "flos": 16350360552240.0, - "grad_norm": 3.900500651771854, - "learning_rate": 2.23702795065513e-06, - "loss": 1.0239, - "num_input_tokens_seen": 84448010, - "step": 3978 - }, - { - "epoch": 0.47844646185294293, - "flos": 35592777824400.0, - "grad_norm": 0.986837938952435, - "learning_rate": 2.2362544509416493e-06, - "loss": 0.9327, - "num_input_tokens_seen": 84499845, - "step": 3979 - }, - { - "epoch": 0.47856670474358204, - "flos": 14357060144400.0, - "grad_norm": 4.408459481427684, - "learning_rate": 2.2354809153885572e-06, - "loss": 1.0474, - "num_input_tokens_seen": 84516635, - "step": 3980 - }, - { - "epoch": 0.47868694763422115, - "flos": 14903596314840.0, - "grad_norm": 2.498095072742861, - "learning_rate": 2.234707344113197e-06, - "loss": 1.0603, - "num_input_tokens_seen": 84534450, - "step": 3981 - }, - { - "epoch": 0.4788071905248602, - "flos": 13492619666040.0, - "grad_norm": 2.4198549767854054, - "learning_rate": 2.233933737232919e-06, - "loss": 0.9975, - "num_input_tokens_seen": 84551950, - "step": 3982 - }, - { - "epoch": 0.4789274334154993, - "flos": 16349685997920.0, - "grad_norm": 2.6500087342028364, - "learning_rate": 2.2331600948650793e-06, - "loss": 0.9989, - "num_input_tokens_seen": 84571815, - "step": 3983 - }, - { - "epoch": 0.4790476763061384, - "flos": 16402465638840.0, - "grad_norm": 2.0170458140354595, - "learning_rate": 2.2323864171270386e-06, - "loss": 1.0262, - "num_input_tokens_seen": 84592805, - "step": 3984 - }, - { - "epoch": 0.4791679191967775, - "flos": 15039761381640.0, - "grad_norm": 3.0092202595414266, - "learning_rate": 2.231612704136164e-06, - "loss": 0.9523, - "num_input_tokens_seen": 84612895, - "step": 3985 - }, - { - "epoch": 0.4792881620874166, - "flos": 15845075276160.0, - "grad_norm": 6.9359079544665265, - "learning_rate": 2.2308389560098253e-06, - "loss": 0.9609, - "num_input_tokens_seen": 84628990, - "step": 3986 - }, - { - "epoch": 0.47940840497805565, - "flos": 12338611709280.0, - "grad_norm": 13.051530676626772, - "learning_rate": 2.2300651728654008e-06, - "loss": 0.9886, - "num_input_tokens_seen": 84643970, - "step": 3987 - }, - { - "epoch": 0.47952864786869476, - "flos": 46794496275480.0, - "grad_norm": 0.7432048450563549, - "learning_rate": 2.229291354820272e-06, - "loss": 0.8635, - "num_input_tokens_seen": 84704700, - "step": 3988 - }, - { - "epoch": 0.47964889075933387, - "flos": 11892146401440.0, - "grad_norm": 3.7793414130531335, - "learning_rate": 2.228517501991828e-06, - "loss": 0.9901, - "num_input_tokens_seen": 84723220, - "step": 3989 - }, - { - "epoch": 0.4797691336499729, - "flos": 50189385346680.0, - "grad_norm": 0.8398164966508309, - "learning_rate": 2.22774361449746e-06, - "loss": 0.8706, - "num_input_tokens_seen": 84779420, - "step": 3990 - }, - { - "epoch": 0.47988937654061203, - "flos": 13439962671360.0, - "grad_norm": 3.250076146334146, - "learning_rate": 2.2269696924545668e-06, - "loss": 0.9196, - "num_input_tokens_seen": 84796970, - "step": 3991 - }, - { - "epoch": 0.48000961943125114, - "flos": 10211629821000.0, - "grad_norm": 4.176408340571748, - "learning_rate": 2.2261957359805523e-06, - "loss": 0.9988, - "num_input_tokens_seen": 84813925, - "step": 3992 - }, - { - "epoch": 0.4801298623218902, - "flos": 19259439986040.0, - "grad_norm": 3.025464386407146, - "learning_rate": 2.225421745192823e-06, - "loss": 0.9762, - "num_input_tokens_seen": 84833800, - "step": 3993 - }, - { - "epoch": 0.4802501052125293, - "flos": 18758937913320.0, - "grad_norm": 5.698185200148843, - "learning_rate": 2.2246477202087955e-06, - "loss": 0.9986, - "num_input_tokens_seen": 84854200, - "step": 3994 - }, - { - "epoch": 0.4803703481031684, - "flos": 14906141224320.0, - "grad_norm": 2.5624904167182176, - "learning_rate": 2.223873661145887e-06, - "loss": 1.0646, - "num_input_tokens_seen": 84873975, - "step": 3995 - }, - { - "epoch": 0.4804905909938075, - "flos": 14697812862600.0, - "grad_norm": 2.961845319172179, - "learning_rate": 2.2230995681215226e-06, - "loss": 0.9358, - "num_input_tokens_seen": 84893220, - "step": 3996 - }, - { - "epoch": 0.4806108338844466, - "flos": 11788089536040.0, - "grad_norm": 4.129909714642516, - "learning_rate": 2.2223254412531305e-06, - "loss": 1.006, - "num_input_tokens_seen": 84910310, - "step": 3997 - }, - { - "epoch": 0.4807310767750857, - "flos": 14200315622760.0, - "grad_norm": 2.323079010859993, - "learning_rate": 2.221551280658146e-06, - "loss": 1.0426, - "num_input_tokens_seen": 84929090, - "step": 3998 - }, - { - "epoch": 0.48085131966572475, - "flos": 16481313153840.0, - "grad_norm": 3.7193935061907664, - "learning_rate": 2.2207770864540085e-06, - "loss": 0.9743, - "num_input_tokens_seen": 84947785, - "step": 3999 - }, - { - "epoch": 0.48097156255636386, - "flos": 14593664012520.0, - "grad_norm": 3.780612355843462, - "learning_rate": 2.220002858758162e-06, - "loss": 0.9489, - "num_input_tokens_seen": 84965495, - "step": 4000 - }, - { - "epoch": 0.481091805447003, - "flos": 50499378972120.0, - "grad_norm": 0.8365342692819069, - "learning_rate": 2.2192285976880573e-06, - "loss": 0.8583, - "num_input_tokens_seen": 85029470, - "step": 4001 - }, - { - "epoch": 0.48121204833764203, - "flos": 25998590344800.0, - "grad_norm": 3.3440602951819662, - "learning_rate": 2.2184543033611485e-06, - "loss": 1.0352, - "num_input_tokens_seen": 85050270, - "step": 4002 - }, - { - "epoch": 0.48133229122828114, - "flos": 19575197984760.0, - "grad_norm": 4.327584022839007, - "learning_rate": 2.2176799758948957e-06, - "loss": 1.0417, - "num_input_tokens_seen": 85070150, - "step": 4003 - }, - { - "epoch": 0.4814525341189202, - "flos": 30774494172600.0, - "grad_norm": 4.42645436325514, - "learning_rate": 2.2169056154067635e-06, - "loss": 0.9544, - "num_input_tokens_seen": 85093790, - "step": 4004 - }, - { - "epoch": 0.4815727770095593, - "flos": 17236239009120.0, - "grad_norm": 4.033634397341436, - "learning_rate": 2.216131222014222e-06, - "loss": 1.0553, - "num_input_tokens_seen": 85111585, - "step": 4005 - }, - { - "epoch": 0.4816930199001984, - "flos": 12836415564720.0, - "grad_norm": 3.3887494081097755, - "learning_rate": 2.2153567958347455e-06, - "loss": 1.0386, - "num_input_tokens_seen": 85127515, - "step": 4006 - }, - { - "epoch": 0.48181326279083747, - "flos": 12233543012400.0, - "grad_norm": 3.776983076231887, - "learning_rate": 2.214582336985815e-06, - "loss": 1.0215, - "num_input_tokens_seen": 85145135, - "step": 4007 - }, - { - "epoch": 0.4819335056814766, - "flos": 10529227513320.0, - "grad_norm": 4.216778103608636, - "learning_rate": 2.2138078455849142e-06, - "loss": 0.8776, - "num_input_tokens_seen": 85162850, - "step": 4008 - }, - { - "epoch": 0.4820537485721157, - "flos": 13649456172360.0, - "grad_norm": 6.963952846746522, - "learning_rate": 2.2130333217495334e-06, - "loss": 1.0093, - "num_input_tokens_seen": 85181740, - "step": 4009 - }, - { - "epoch": 0.48217399146275475, - "flos": 11341654889760.0, - "grad_norm": 4.390029378110564, - "learning_rate": 2.2122587655971665e-06, - "loss": 0.8847, - "num_input_tokens_seen": 85196405, - "step": 4010 - }, - { - "epoch": 0.48229423435339386, - "flos": 17163339836760.0, - "grad_norm": 2.661119731905925, - "learning_rate": 2.211484177245314e-06, - "loss": 0.8612, - "num_input_tokens_seen": 85215715, - "step": 4011 - }, - { - "epoch": 0.48241447724403297, - "flos": 16927011922680.0, - "grad_norm": 7.428055095544284, - "learning_rate": 2.21070955681148e-06, - "loss": 0.9495, - "num_input_tokens_seen": 85234540, - "step": 4012 - }, - { - "epoch": 0.482534720134672, - "flos": 16427184404280.0, - "grad_norm": 2.120853192649248, - "learning_rate": 2.209934904413174e-06, - "loss": 1.0018, - "num_input_tokens_seen": 85255865, - "step": 4013 - }, - { - "epoch": 0.48265496302531113, - "flos": 14856151785360.0, - "grad_norm": 4.466335898341225, - "learning_rate": 2.2091602201679095e-06, - "loss": 0.9378, - "num_input_tokens_seen": 85275195, - "step": 4014 - }, - { - "epoch": 0.48277520591595025, - "flos": 10764819549960.0, - "grad_norm": 3.3901605947258937, - "learning_rate": 2.208385504193206e-06, - "loss": 1.0441, - "num_input_tokens_seen": 85292415, - "step": 4015 - }, - { - "epoch": 0.4828954488065893, - "flos": 12653235230280.0, - "grad_norm": 4.437428218637079, - "learning_rate": 2.2076107566065873e-06, - "loss": 1.0392, - "num_input_tokens_seen": 85309920, - "step": 4016 - }, - { - "epoch": 0.4830156916972284, - "flos": 22879066901640.0, - "grad_norm": 3.9171599354818336, - "learning_rate": 2.2068359775255816e-06, - "loss": 0.9749, - "num_input_tokens_seen": 85327950, - "step": 4017 - }, - { - "epoch": 0.48313593458786747, - "flos": 15537289283040.0, - "grad_norm": 3.42407796658672, - "learning_rate": 2.206061167067723e-06, - "loss": 0.9885, - "num_input_tokens_seen": 85345780, - "step": 4018 - }, - { - "epoch": 0.4832561774785066, - "flos": 16061528951280.0, - "grad_norm": 4.472122520351621, - "learning_rate": 2.205286325350549e-06, - "loss": 1.0104, - "num_input_tokens_seen": 85364565, - "step": 4019 - }, - { - "epoch": 0.4833764203691457, - "flos": 9474830495760.0, - "grad_norm": 4.481395835046386, - "learning_rate": 2.204511452491603e-06, - "loss": 0.9611, - "num_input_tokens_seen": 85380910, - "step": 4020 - }, - { - "epoch": 0.48349666325978474, - "flos": 31532486183880.0, - "grad_norm": 2.8419218789624545, - "learning_rate": 2.2037365486084316e-06, - "loss": 0.9706, - "num_input_tokens_seen": 85403870, - "step": 4021 - }, - { - "epoch": 0.48361690615042385, - "flos": 18524235061920.0, - "grad_norm": 2.850888190820976, - "learning_rate": 2.2029616138185886e-06, - "loss": 1.006, - "num_input_tokens_seen": 85422590, - "step": 4022 - }, - { - "epoch": 0.48373714904106296, - "flos": 15825936914640.0, - "grad_norm": 3.383451969591084, - "learning_rate": 2.202186648239629e-06, - "loss": 1.0514, - "num_input_tokens_seen": 85442245, - "step": 4023 - }, - { - "epoch": 0.483857391931702, - "flos": 20151512078040.0, - "grad_norm": 28.564865448412977, - "learning_rate": 2.201411651989117e-06, - "loss": 0.9458, - "num_input_tokens_seen": 85463945, - "step": 4024 - }, - { - "epoch": 0.48397763482234113, - "flos": 19523399513760.0, - "grad_norm": 3.8073808665682964, - "learning_rate": 2.2006366251846167e-06, - "loss": 1.0084, - "num_input_tokens_seen": 85484305, - "step": 4025 - }, - { - "epoch": 0.48409787771298024, - "flos": 11889969430680.0, - "grad_norm": 8.763896696957433, - "learning_rate": 2.1998615679436997e-06, - "loss": 0.9794, - "num_input_tokens_seen": 85501565, - "step": 4026 - }, - { - "epoch": 0.4842181206036193, - "flos": 17845673135280.0, - "grad_norm": 3.3081212883039646, - "learning_rate": 2.199086480383942e-06, - "loss": 0.99, - "num_input_tokens_seen": 85520660, - "step": 4027 - }, - { - "epoch": 0.4843383634942584, - "flos": 21646426060800.0, - "grad_norm": 4.57108888470996, - "learning_rate": 2.1983113626229234e-06, - "loss": 0.8861, - "num_input_tokens_seen": 85539630, - "step": 4028 - }, - { - "epoch": 0.4844586063848975, - "flos": 14489515162440.0, - "grad_norm": 4.135784039966804, - "learning_rate": 2.1975362147782293e-06, - "loss": 1.0089, - "num_input_tokens_seen": 85558545, - "step": 4029 - }, - { - "epoch": 0.48457884927553657, - "flos": 50351464979760.0, - "grad_norm": 0.8465219106480436, - "learning_rate": 2.196761036967448e-06, - "loss": 0.7989, - "num_input_tokens_seen": 85626230, - "step": 4030 - }, - { - "epoch": 0.4846990921661757, - "flos": 14144592472080.0, - "grad_norm": 2.518986322187881, - "learning_rate": 2.1959858293081743e-06, - "loss": 1.0065, - "num_input_tokens_seen": 85645085, - "step": 4031 - }, - { - "epoch": 0.4848193350568148, - "flos": 16402772254440.0, - "grad_norm": 3.985380636724688, - "learning_rate": 2.1952105919180056e-06, - "loss": 0.9876, - "num_input_tokens_seen": 85664060, - "step": 4032 - }, - { - "epoch": 0.48493957794745385, - "flos": 15956828193120.0, - "grad_norm": 4.717995286422516, - "learning_rate": 2.1944353249145456e-06, - "loss": 0.8977, - "num_input_tokens_seen": 85682890, - "step": 4033 - }, - { - "epoch": 0.48505982083809296, - "flos": 17818685414400.0, - "grad_norm": 8.611863899824565, - "learning_rate": 2.193660028415401e-06, - "loss": 0.9748, - "num_input_tokens_seen": 85703390, - "step": 4034 - }, - { - "epoch": 0.485180063728732, - "flos": 19051479563040.0, - "grad_norm": 3.592798787157444, - "learning_rate": 2.1928847025381852e-06, - "loss": 1.0489, - "num_input_tokens_seen": 85723715, - "step": 4035 - }, - { - "epoch": 0.4853003066193711, - "flos": 17109119102520.0, - "grad_norm": 2.7385232902791548, - "learning_rate": 2.192109347400512e-06, - "loss": 1.0667, - "num_input_tokens_seen": 85743650, - "step": 4036 - }, - { - "epoch": 0.48542054951001024, - "flos": 16403140193160.0, - "grad_norm": 3.717735985741884, - "learning_rate": 2.191333963120004e-06, - "loss": 1.0181, - "num_input_tokens_seen": 85762350, - "step": 4037 - }, - { - "epoch": 0.4855407924006493, - "flos": 18263188382400.0, - "grad_norm": 4.812493262348165, - "learning_rate": 2.190558549814286e-06, - "loss": 0.9186, - "num_input_tokens_seen": 85782230, - "step": 4038 - }, - { - "epoch": 0.4856610352912884, - "flos": 17058179155200.0, - "grad_norm": 11.638135482531798, - "learning_rate": 2.1897831076009872e-06, - "loss": 1.0128, - "num_input_tokens_seen": 85801590, - "step": 4039 - }, - { - "epoch": 0.4857812781819275, - "flos": 17136352115880.0, - "grad_norm": 3.0717324426583423, - "learning_rate": 2.1890076365977426e-06, - "loss": 1.0255, - "num_input_tokens_seen": 85821135, - "step": 4040 - }, - { - "epoch": 0.48590152107256657, - "flos": 40259688275640.0, - "grad_norm": 1.0495552937779247, - "learning_rate": 2.188232136922189e-06, - "loss": 0.7954, - "num_input_tokens_seen": 85878975, - "step": 4041 - }, - { - "epoch": 0.4860217639632057, - "flos": 14225831588760.0, - "grad_norm": 2.767069121660652, - "learning_rate": 2.187456608691971e-06, - "loss": 0.9816, - "num_input_tokens_seen": 85897570, - "step": 4042 - }, - { - "epoch": 0.4861420068538448, - "flos": 12626983386840.0, - "grad_norm": 2.5567674481531455, - "learning_rate": 2.1866810520247334e-06, - "loss": 1.1063, - "num_input_tokens_seen": 85916160, - "step": 4043 - }, - { - "epoch": 0.48626224974448384, - "flos": 18683984416440.0, - "grad_norm": 4.62266316531826, - "learning_rate": 2.185905467038129e-06, - "loss": 0.8733, - "num_input_tokens_seen": 85934785, - "step": 4044 - }, - { - "epoch": 0.48638249263512295, - "flos": 15668701808040.0, - "grad_norm": 2.375762214822728, - "learning_rate": 2.1851298538498127e-06, - "loss": 1.0003, - "num_input_tokens_seen": 85954220, - "step": 4045 - }, - { - "epoch": 0.48650273552576206, - "flos": 17871863655600.0, - "grad_norm": 5.6217061889477575, - "learning_rate": 2.184354212577446e-06, - "loss": 1.0248, - "num_input_tokens_seen": 85974245, - "step": 4046 - }, - { - "epoch": 0.4866229784164011, - "flos": 12364464952440.0, - "grad_norm": 7.923388575435006, - "learning_rate": 2.1835785433386907e-06, - "loss": 0.8324, - "num_input_tokens_seen": 85992780, - "step": 4047 - }, - { - "epoch": 0.48674322130704023, - "flos": 16586136558240.0, - "grad_norm": 2.281487857489509, - "learning_rate": 2.182802846251216e-06, - "loss": 0.8767, - "num_input_tokens_seen": 86012770, - "step": 4048 - }, - { - "epoch": 0.4868634641976793, - "flos": 20519681778960.0, - "grad_norm": 2.345405502559322, - "learning_rate": 2.182027121432696e-06, - "loss": 0.9436, - "num_input_tokens_seen": 86033115, - "step": 4049 - }, - { - "epoch": 0.4869837070883184, - "flos": 13492129081080.0, - "grad_norm": 2.317762109939824, - "learning_rate": 2.1812513690008054e-06, - "loss": 1.0481, - "num_input_tokens_seen": 86051955, - "step": 4050 - }, - { - "epoch": 0.4871039499789575, - "flos": 10685910711840.0, - "grad_norm": 4.426885024472339, - "learning_rate": 2.180475589073227e-06, - "loss": 1.0208, - "num_input_tokens_seen": 86069375, - "step": 4051 - }, - { - "epoch": 0.48722419286959656, - "flos": 18628905158520.0, - "grad_norm": 5.213205098642551, - "learning_rate": 2.1796997817676456e-06, - "loss": 0.9696, - "num_input_tokens_seen": 86090105, - "step": 4052 - }, - { - "epoch": 0.4873444357602357, - "flos": 17085872091960.0, - "grad_norm": 3.016384839404556, - "learning_rate": 2.1789239472017494e-06, - "loss": 0.9033, - "num_input_tokens_seen": 86111475, - "step": 4053 - }, - { - "epoch": 0.4874646786508748, - "flos": 16219131996600.0, - "grad_norm": 6.045350663810785, - "learning_rate": 2.1781480854932326e-06, - "loss": 0.9505, - "num_input_tokens_seen": 86130960, - "step": 4054 - }, - { - "epoch": 0.48758492154151384, - "flos": 15117443757360.0, - "grad_norm": 3.4660790960286465, - "learning_rate": 2.1773721967597933e-06, - "loss": 1.0247, - "num_input_tokens_seen": 86149130, - "step": 4055 - }, - { - "epoch": 0.48770516443215295, - "flos": 44554749638760.0, - "grad_norm": 0.9015670009348626, - "learning_rate": 2.1765962811191322e-06, - "loss": 0.8595, - "num_input_tokens_seen": 86203315, - "step": 4056 - }, - { - "epoch": 0.48782540732279206, - "flos": 47352898469640.0, - "grad_norm": 0.9106733956760921, - "learning_rate": 2.1758203386889566e-06, - "loss": 0.9114, - "num_input_tokens_seen": 86265805, - "step": 4057 - }, - { - "epoch": 0.4879456502134311, - "flos": 10316514548520.0, - "grad_norm": 3.6143952832880606, - "learning_rate": 2.1750443695869746e-06, - "loss": 1.0622, - "num_input_tokens_seen": 86281095, - "step": 4058 - }, - { - "epoch": 0.4880658931040702, - "flos": 13833157753320.0, - "grad_norm": 2.893605113676371, - "learning_rate": 2.174268373930901e-06, - "loss": 1.0774, - "num_input_tokens_seen": 86298330, - "step": 4059 - }, - { - "epoch": 0.48818613599470934, - "flos": 11836484573880.0, - "grad_norm": 3.572669522085423, - "learning_rate": 2.1734923518384537e-06, - "loss": 1.0211, - "num_input_tokens_seen": 86314655, - "step": 4060 - }, - { - "epoch": 0.4883063788853484, - "flos": 19047677529600.0, - "grad_norm": 4.259938008748356, - "learning_rate": 2.1727163034273547e-06, - "loss": 1.0491, - "num_input_tokens_seen": 86332540, - "step": 4061 - }, - { - "epoch": 0.4884266217759875, - "flos": 11865986542680.0, - "grad_norm": 16.95216641155112, - "learning_rate": 2.17194022881533e-06, - "loss": 1.0094, - "num_input_tokens_seen": 86348350, - "step": 4062 - }, - { - "epoch": 0.4885468646666266, - "flos": 17215751538960.0, - "grad_norm": 2.5915134392812518, - "learning_rate": 2.1711641281201092e-06, - "loss": 0.8947, - "num_input_tokens_seen": 86368000, - "step": 4063 - }, - { - "epoch": 0.48866710755726567, - "flos": 10318967473320.0, - "grad_norm": 7.117345613467353, - "learning_rate": 2.1703880014594264e-06, - "loss": 1.0063, - "num_input_tokens_seen": 86385310, - "step": 4064 - }, - { - "epoch": 0.4887873504479048, - "flos": 20519773763640.0, - "grad_norm": 3.773870913175426, - "learning_rate": 2.1696118489510182e-06, - "loss": 0.9491, - "num_input_tokens_seen": 86405960, - "step": 4065 - }, - { - "epoch": 0.48890759333854383, - "flos": 16193278753440.0, - "grad_norm": 2.3488955154533993, - "learning_rate": 2.1688356707126286e-06, - "loss": 0.9415, - "num_input_tokens_seen": 86425300, - "step": 4066 - }, - { - "epoch": 0.48902783622918294, - "flos": 12601130143680.0, - "grad_norm": 5.267967721734476, - "learning_rate": 2.168059466862001e-06, - "loss": 0.9314, - "num_input_tokens_seen": 86443170, - "step": 4067 - }, - { - "epoch": 0.48914807911982205, - "flos": 15852587358360.0, - "grad_norm": 3.714550821044723, - "learning_rate": 2.167283237516887e-06, - "loss": 1.0404, - "num_input_tokens_seen": 86461165, - "step": 4068 - }, - { - "epoch": 0.4892683220104611, - "flos": 11578381404120.0, - "grad_norm": 3.0654914414674135, - "learning_rate": 2.1665069827950383e-06, - "loss": 0.9805, - "num_input_tokens_seen": 86478170, - "step": 4069 - }, - { - "epoch": 0.4893885649011002, - "flos": 11129524494600.0, - "grad_norm": 3.6251869153470713, - "learning_rate": 2.1657307028142126e-06, - "loss": 1.0929, - "num_input_tokens_seen": 86495430, - "step": 4070 - }, - { - "epoch": 0.48950880779173933, - "flos": 20359411177920.0, - "grad_norm": 3.086544237970157, - "learning_rate": 2.164954397692171e-06, - "loss": 0.8954, - "num_input_tokens_seen": 86514575, - "step": 4071 - }, - { - "epoch": 0.4896290506823784, - "flos": 47388170359800.0, - "grad_norm": 1.1106339416830904, - "learning_rate": 2.164178067546678e-06, - "loss": 1.0579, - "num_input_tokens_seen": 86573460, - "step": 4072 - }, - { - "epoch": 0.4897492935730175, - "flos": 8824298783040.0, - "grad_norm": 2.9945503646276355, - "learning_rate": 2.163401712495504e-06, - "loss": 1.1326, - "num_input_tokens_seen": 86590875, - "step": 4073 - }, - { - "epoch": 0.4898695364636566, - "flos": 16690806654840.0, - "grad_norm": 2.1160053176164064, - "learning_rate": 2.1626253326564194e-06, - "loss": 1.0193, - "num_input_tokens_seen": 86609545, - "step": 4074 - }, - { - "epoch": 0.48998977935429566, - "flos": 19337766254520.0, - "grad_norm": 3.11063179418532, - "learning_rate": 2.161848928147201e-06, - "loss": 0.9902, - "num_input_tokens_seen": 86629535, - "step": 4075 - }, - { - "epoch": 0.4901100222449348, - "flos": 14436214275000.0, - "grad_norm": 3.0567122283446344, - "learning_rate": 2.161072499085629e-06, - "loss": 1.0113, - "num_input_tokens_seen": 86648250, - "step": 4076 - }, - { - "epoch": 0.4902302651355739, - "flos": 21699788271360.0, - "grad_norm": 2.6568227711391765, - "learning_rate": 2.160296045589487e-06, - "loss": 1.0599, - "num_input_tokens_seen": 86671430, - "step": 4077 - }, - { - "epoch": 0.49035050802621294, - "flos": 13598945486880.0, - "grad_norm": 2.5962594084185815, - "learning_rate": 2.159519567776562e-06, - "loss": 0.9132, - "num_input_tokens_seen": 86690800, - "step": 4078 - }, - { - "epoch": 0.49047075091685205, - "flos": 15792602250840.0, - "grad_norm": 4.164777225613545, - "learning_rate": 2.1587430657646463e-06, - "loss": 0.9272, - "num_input_tokens_seen": 86703955, - "step": 4079 - }, - { - "epoch": 0.4905909938074911, - "flos": 14304556457520.0, - "grad_norm": 2.8800386145440218, - "learning_rate": 2.157966539671533e-06, - "loss": 1.0015, - "num_input_tokens_seen": 86723315, - "step": 4080 - }, - { - "epoch": 0.4907112366981302, - "flos": 12181499248920.0, - "grad_norm": 3.452601052144391, - "learning_rate": 2.157189989615021e-06, - "loss": 0.884, - "num_input_tokens_seen": 86741625, - "step": 4081 - }, - { - "epoch": 0.4908314795887693, - "flos": 15405907419600.0, - "grad_norm": 3.4443748391888063, - "learning_rate": 2.156413415712913e-06, - "loss": 0.9724, - "num_input_tokens_seen": 86763395, - "step": 4082 - }, - { - "epoch": 0.4909517224794084, - "flos": 18657793896120.0, - "grad_norm": 2.744325157830138, - "learning_rate": 2.155636818083014e-06, - "loss": 1.0041, - "num_input_tokens_seen": 86784485, - "step": 4083 - }, - { - "epoch": 0.4910719653700475, - "flos": 16454754694800.0, - "grad_norm": 2.7045351440606176, - "learning_rate": 2.154860196843134e-06, - "loss": 1.0615, - "num_input_tokens_seen": 86803400, - "step": 4084 - }, - { - "epoch": 0.4911922082606866, - "flos": 16586749789440.0, - "grad_norm": 3.0084693884969873, - "learning_rate": 2.154083552111085e-06, - "loss": 0.9937, - "num_input_tokens_seen": 86822290, - "step": 4085 - }, - { - "epoch": 0.49131245115132566, - "flos": 20806029793560.0, - "grad_norm": 3.6378027983281234, - "learning_rate": 2.1533068840046834e-06, - "loss": 1.042, - "num_input_tokens_seen": 86842275, - "step": 4086 - }, - { - "epoch": 0.49143269404196477, - "flos": 14296829744400.0, - "grad_norm": 6.649263108806623, - "learning_rate": 2.152530192641749e-06, - "loss": 0.8309, - "num_input_tokens_seen": 86856905, - "step": 4087 - }, - { - "epoch": 0.4915529369326039, - "flos": 17347072079280.0, - "grad_norm": 3.223920354583493, - "learning_rate": 2.1517534781401068e-06, - "loss": 0.9527, - "num_input_tokens_seen": 86874505, - "step": 4088 - }, - { - "epoch": 0.49167317982324293, - "flos": 7382072456520.0, - "grad_norm": 3.249996625458359, - "learning_rate": 2.150976740617581e-06, - "loss": 0.9226, - "num_input_tokens_seen": 86890785, - "step": 4089 - }, - { - "epoch": 0.49179342271388204, - "flos": 18212033804160.0, - "grad_norm": 2.972884020306436, - "learning_rate": 2.150199980192006e-06, - "loss": 0.953, - "num_input_tokens_seen": 86909625, - "step": 4090 - }, - { - "epoch": 0.49191366560452116, - "flos": 14983670292240.0, - "grad_norm": 2.35672737769991, - "learning_rate": 2.1494231969812114e-06, - "loss": 1.0401, - "num_input_tokens_seen": 86928335, - "step": 4091 - }, - { - "epoch": 0.4920339084951602, - "flos": 18552725199240.0, - "grad_norm": 4.9784269328726865, - "learning_rate": 2.1486463911030372e-06, - "loss": 1.0231, - "num_input_tokens_seen": 86948705, - "step": 4092 - }, - { - "epoch": 0.4921541513857993, - "flos": 17843772118560.0, - "grad_norm": 2.404532079402149, - "learning_rate": 2.147869562675324e-06, - "loss": 0.9794, - "num_input_tokens_seen": 86967395, - "step": 4093 - }, - { - "epoch": 0.49227439427643843, - "flos": 17347072079280.0, - "grad_norm": 2.9028947391907427, - "learning_rate": 2.147092711815915e-06, - "loss": 0.9417, - "num_input_tokens_seen": 86986465, - "step": 4094 - }, - { - "epoch": 0.4923946371670775, - "flos": 7985404932240.0, - "grad_norm": 3.633059782410463, - "learning_rate": 2.1463158386426593e-06, - "loss": 1.0883, - "num_input_tokens_seen": 87003995, - "step": 4095 - }, - { - "epoch": 0.4925148800577166, - "flos": 21699450994200.0, - "grad_norm": 4.565557839946021, - "learning_rate": 2.145538943273407e-06, - "loss": 0.9984, - "num_input_tokens_seen": 87023990, - "step": 4096 - }, - { - "epoch": 0.49263512294835565, - "flos": 14803372144440.0, - "grad_norm": 3.9565365511274777, - "learning_rate": 2.144762025826013e-06, - "loss": 0.9417, - "num_input_tokens_seen": 87042800, - "step": 4097 - }, - { - "epoch": 0.49275536583899476, - "flos": 16899533616840.0, - "grad_norm": 3.8790310227839395, - "learning_rate": 2.143985086418334e-06, - "loss": 1.0892, - "num_input_tokens_seen": 87057700, - "step": 4098 - }, - { - "epoch": 0.4928756087296339, - "flos": 15826120884000.0, - "grad_norm": 2.8925916425452214, - "learning_rate": 2.1432081251682324e-06, - "loss": 0.9991, - "num_input_tokens_seen": 87077790, - "step": 4099 - }, - { - "epoch": 0.49299585162027293, - "flos": 13937245280280.0, - "grad_norm": 7.865815634950221, - "learning_rate": 2.142431142193572e-06, - "loss": 1.0929, - "num_input_tokens_seen": 87095290, - "step": 4100 - }, - { - "epoch": 0.49311609451091204, - "flos": 27729954887880.0, - "grad_norm": 6.813008700635252, - "learning_rate": 2.1416541376122207e-06, - "loss": 0.9506, - "num_input_tokens_seen": 87115190, - "step": 4101 - }, - { - "epoch": 0.49323633740155115, - "flos": 20178009213960.0, - "grad_norm": 2.7304884837560324, - "learning_rate": 2.1408771115420496e-06, - "loss": 0.9592, - "num_input_tokens_seen": 87134770, - "step": 4102 - }, - { - "epoch": 0.4933565802921902, - "flos": 15007377226200.0, - "grad_norm": 3.1301655610389303, - "learning_rate": 2.140100064100932e-06, - "loss": 0.8848, - "num_input_tokens_seen": 87150465, - "step": 4103 - }, - { - "epoch": 0.4934768231828293, - "flos": 12782562769200.0, - "grad_norm": 3.320242991000374, - "learning_rate": 2.139322995406746e-06, - "loss": 0.9886, - "num_input_tokens_seen": 87167820, - "step": 4104 - }, - { - "epoch": 0.4935970660734684, - "flos": 16684950296880.0, - "grad_norm": 3.683161106369586, - "learning_rate": 2.1385459055773727e-06, - "loss": 1.0252, - "num_input_tokens_seen": 87185730, - "step": 4105 - }, - { - "epoch": 0.4937173089641075, - "flos": 46157466745320.0, - "grad_norm": 6.118723241729986, - "learning_rate": 2.137768794730696e-06, - "loss": 0.9664, - "num_input_tokens_seen": 87208900, - "step": 4106 - }, - { - "epoch": 0.4938375518547466, - "flos": 15878164647480.0, - "grad_norm": 3.0037611590244433, - "learning_rate": 2.1369916629846026e-06, - "loss": 1.0278, - "num_input_tokens_seen": 87228370, - "step": 4107 - }, - { - "epoch": 0.4939577947453857, - "flos": 12653265891840.0, - "grad_norm": 18.186525089313207, - "learning_rate": 2.136214510456983e-06, - "loss": 0.9758, - "num_input_tokens_seen": 87246545, - "step": 4108 - }, - { - "epoch": 0.49407803763602476, - "flos": 50178715123800.0, - "grad_norm": 0.9748139864203466, - "learning_rate": 2.1354373372657296e-06, - "loss": 0.935, - "num_input_tokens_seen": 87304705, - "step": 4109 - }, - { - "epoch": 0.49419828052666387, - "flos": 17295181623600.0, - "grad_norm": 1.9615410107816122, - "learning_rate": 2.1346601435287404e-06, - "loss": 0.937, - "num_input_tokens_seen": 87326695, - "step": 4110 - }, - { - "epoch": 0.494318523417303, - "flos": 20937472980120.0, - "grad_norm": 3.4988289645143853, - "learning_rate": 2.1338829293639144e-06, - "loss": 1.033, - "num_input_tokens_seen": 87346775, - "step": 4111 - }, - { - "epoch": 0.49443876630794203, - "flos": 10791592639920.0, - "grad_norm": 3.6093569007849102, - "learning_rate": 2.1331056948891547e-06, - "loss": 1.0604, - "num_input_tokens_seen": 87363595, - "step": 4112 - }, - { - "epoch": 0.49455900919858115, - "flos": 8692181042160.0, - "grad_norm": 3.1794970406141934, - "learning_rate": 2.1323284402223666e-06, - "loss": 0.9851, - "num_input_tokens_seen": 87379305, - "step": 4113 - }, - { - "epoch": 0.4946792520892202, - "flos": 16189200765960.0, - "grad_norm": 5.333993493389771, - "learning_rate": 2.1315511654814597e-06, - "loss": 1.1089, - "num_input_tokens_seen": 87397435, - "step": 4114 - }, - { - "epoch": 0.4947994949798593, - "flos": 16454540063880.0, - "grad_norm": 3.180687899290861, - "learning_rate": 2.1307738707843456e-06, - "loss": 0.9971, - "num_input_tokens_seen": 87416820, - "step": 4115 - }, - { - "epoch": 0.4949197378704984, - "flos": 16822556457000.0, - "grad_norm": 4.001864952207049, - "learning_rate": 2.1299965562489385e-06, - "loss": 0.9057, - "num_input_tokens_seen": 87436345, - "step": 4116 - }, - { - "epoch": 0.4950399807611375, - "flos": 19159921031520.0, - "grad_norm": 2.5972028982040847, - "learning_rate": 2.129219221993158e-06, - "loss": 1.02, - "num_input_tokens_seen": 87460850, - "step": 4117 - }, - { - "epoch": 0.4951602236517766, - "flos": 48199248627600.0, - "grad_norm": 0.8165969990837945, - "learning_rate": 2.128441868134924e-06, - "loss": 0.8728, - "num_input_tokens_seen": 87522505, - "step": 4118 - }, - { - "epoch": 0.4952804665424157, - "flos": 14121284138400.0, - "grad_norm": 3.768609727871256, - "learning_rate": 2.1276644947921606e-06, - "loss": 1.0558, - "num_input_tokens_seen": 87541140, - "step": 4119 - }, - { - "epoch": 0.49540070943305475, - "flos": 13334587358880.0, - "grad_norm": 3.388814557064587, - "learning_rate": 2.126887102082795e-06, - "loss": 1.0477, - "num_input_tokens_seen": 87560885, - "step": 4120 - }, - { - "epoch": 0.49552095232369386, - "flos": 17737906221120.0, - "grad_norm": 2.6260927741345728, - "learning_rate": 2.126109690124757e-06, - "loss": 0.9236, - "num_input_tokens_seen": 87581420, - "step": 4121 - }, - { - "epoch": 0.495641195214333, - "flos": 16245629132520.0, - "grad_norm": 3.0166695322546513, - "learning_rate": 2.1253322590359786e-06, - "loss": 0.9364, - "num_input_tokens_seen": 87600475, - "step": 4122 - }, - { - "epoch": 0.49576143810497203, - "flos": 18337927248360.0, - "grad_norm": 4.060121045116879, - "learning_rate": 2.124554808934397e-06, - "loss": 0.9552, - "num_input_tokens_seen": 87620775, - "step": 4123 - }, - { - "epoch": 0.49588168099561114, - "flos": 15722309311080.0, - "grad_norm": 3.5420648837142856, - "learning_rate": 2.1237773399379496e-06, - "loss": 0.9561, - "num_input_tokens_seen": 87641460, - "step": 4124 - }, - { - "epoch": 0.49600192388625025, - "flos": 17344956431640.0, - "grad_norm": 2.179549674321949, - "learning_rate": 2.122999852164578e-06, - "loss": 1.0943, - "num_input_tokens_seen": 87661800, - "step": 4125 - }, - { - "epoch": 0.4961221667768893, - "flos": 16245782440320.0, - "grad_norm": 3.5710800981813544, - "learning_rate": 2.122222345732227e-06, - "loss": 0.799, - "num_input_tokens_seen": 87681435, - "step": 4126 - }, - { - "epoch": 0.4962424096675284, - "flos": 12652744645320.0, - "grad_norm": 2.8003767884006145, - "learning_rate": 2.121444820758843e-06, - "loss": 1.0578, - "num_input_tokens_seen": 87699795, - "step": 4127 - }, - { - "epoch": 0.49636265255816747, - "flos": 15480462316200.0, - "grad_norm": 2.673079548186896, - "learning_rate": 2.120667277362376e-06, - "loss": 0.9991, - "num_input_tokens_seen": 87718230, - "step": 4128 - }, - { - "epoch": 0.4964828954488066, - "flos": 11574610032240.0, - "grad_norm": 3.0764611099143493, - "learning_rate": 2.1198897156607796e-06, - "loss": 1.0707, - "num_input_tokens_seen": 87735305, - "step": 4129 - }, - { - "epoch": 0.4966031383394457, - "flos": 17577022388880.0, - "grad_norm": 6.076762931815934, - "learning_rate": 2.1191121357720085e-06, - "loss": 0.9739, - "num_input_tokens_seen": 87753085, - "step": 4130 - }, - { - "epoch": 0.49672338123008475, - "flos": 16298408773440.0, - "grad_norm": 2.46525558199017, - "learning_rate": 2.1183345378140206e-06, - "loss": 0.9716, - "num_input_tokens_seen": 87772550, - "step": 4131 - }, - { - "epoch": 0.49684362412072386, - "flos": 47238815274120.0, - "grad_norm": 1.0367914491991075, - "learning_rate": 2.1175569219047783e-06, - "loss": 0.8924, - "num_input_tokens_seen": 87833710, - "step": 4132 - }, - { - "epoch": 0.49696386701136297, - "flos": 14171058946440.0, - "grad_norm": 7.180959628271233, - "learning_rate": 2.1167792881622437e-06, - "loss": 0.9469, - "num_input_tokens_seen": 87852450, - "step": 4133 - }, - { - "epoch": 0.497084109902002, - "flos": 17606003111160.0, - "grad_norm": 3.245449401583285, - "learning_rate": 2.116001636704384e-06, - "loss": 1.0366, - "num_input_tokens_seen": 87872555, - "step": 4134 - }, - { - "epoch": 0.49720435279264114, - "flos": 15537381267720.0, - "grad_norm": 4.330252334607311, - "learning_rate": 2.1152239676491685e-06, - "loss": 1.0227, - "num_input_tokens_seen": 87890380, - "step": 4135 - }, - { - "epoch": 0.49732459568328025, - "flos": 16927318538280.0, - "grad_norm": 3.6296384666044075, - "learning_rate": 2.114446281114569e-06, - "loss": 0.9639, - "num_input_tokens_seen": 87909120, - "step": 4136 - }, - { - "epoch": 0.4974448385739193, - "flos": 14226475481520.0, - "grad_norm": 3.3751999124729952, - "learning_rate": 2.1136685772185587e-06, - "loss": 0.9821, - "num_input_tokens_seen": 87927865, - "step": 4137 - }, - { - "epoch": 0.4975650814645584, - "flos": 17656360488840.0, - "grad_norm": 2.329685277189426, - "learning_rate": 2.1128908560791163e-06, - "loss": 1.0035, - "num_input_tokens_seen": 87947415, - "step": 4138 - }, - { - "epoch": 0.4976853243551975, - "flos": 14069700298320.0, - "grad_norm": 4.3179843693176885, - "learning_rate": 2.1121131178142203e-06, - "loss": 1.0064, - "num_input_tokens_seen": 87966500, - "step": 4139 - }, - { - "epoch": 0.4978055672458366, - "flos": 16450155460800.0, - "grad_norm": 2.8757704656098726, - "learning_rate": 2.1113353625418544e-06, - "loss": 1.049, - "num_input_tokens_seen": 87984770, - "step": 4140 - }, - { - "epoch": 0.4979258101364757, - "flos": 11001147464040.0, - "grad_norm": 2.2157432707518994, - "learning_rate": 2.1105575903800017e-06, - "loss": 1.0159, - "num_input_tokens_seen": 88003210, - "step": 4141 - }, - { - "epoch": 0.4980460530271148, - "flos": 18759735113880.0, - "grad_norm": 2.8862804859611506, - "learning_rate": 2.1097798014466502e-06, - "loss": 1.0742, - "num_input_tokens_seen": 88022530, - "step": 4142 - }, - { - "epoch": 0.49816629591775385, - "flos": 12233389704600.0, - "grad_norm": 8.688987549690232, - "learning_rate": 2.109001995859791e-06, - "loss": 0.7883, - "num_input_tokens_seen": 88041150, - "step": 4143 - }, - { - "epoch": 0.49828653880839296, - "flos": 46488887253120.0, - "grad_norm": 0.7602895778551284, - "learning_rate": 2.108224173737415e-06, - "loss": 0.8637, - "num_input_tokens_seen": 88104170, - "step": 4144 - }, - { - "epoch": 0.498406781699032, - "flos": 19570537427640.0, - "grad_norm": 2.401856312496675, - "learning_rate": 2.1074463351975183e-06, - "loss": 0.9795, - "num_input_tokens_seen": 88122775, - "step": 4145 - }, - { - "epoch": 0.49852702458967113, - "flos": 22456308527760.0, - "grad_norm": 2.7057313206174256, - "learning_rate": 2.106668480358098e-06, - "loss": 0.9353, - "num_input_tokens_seen": 88142720, - "step": 4146 - }, - { - "epoch": 0.49864726748031024, - "flos": 16241888422200.0, - "grad_norm": 3.7030678273869784, - "learning_rate": 2.105890609337154e-06, - "loss": 0.9317, - "num_input_tokens_seen": 88160955, - "step": 4147 - }, - { - "epoch": 0.4987675103709493, - "flos": 50423750920920.0, - "grad_norm": 0.6850309250805034, - "learning_rate": 2.1051127222526883e-06, - "loss": 0.8759, - "num_input_tokens_seen": 88232790, - "step": 4148 - }, - { - "epoch": 0.4988877532615884, - "flos": 20493215304600.0, - "grad_norm": 4.56650294453601, - "learning_rate": 2.1043348192227067e-06, - "loss": 1.035, - "num_input_tokens_seen": 88252880, - "step": 4149 - }, - { - "epoch": 0.4990079961522275, - "flos": 11944527442080.0, - "grad_norm": 3.733749528801208, - "learning_rate": 2.1035569003652156e-06, - "loss": 0.8538, - "num_input_tokens_seen": 88271230, - "step": 4150 - }, - { - "epoch": 0.4991282390428666, - "flos": 9369639152640.0, - "grad_norm": 6.4975296608107325, - "learning_rate": 2.1027789657982255e-06, - "loss": 1.0387, - "num_input_tokens_seen": 88285165, - "step": 4151 - }, - { - "epoch": 0.4992484819335057, - "flos": 15296300811840.0, - "grad_norm": 3.028795329914634, - "learning_rate": 2.1020010156397482e-06, - "loss": 1.0029, - "num_input_tokens_seen": 88302105, - "step": 4152 - }, - { - "epoch": 0.4993687248241448, - "flos": 17684513349000.0, - "grad_norm": 2.678536393072697, - "learning_rate": 2.101223050007797e-06, - "loss": 1.0066, - "num_input_tokens_seen": 88320375, - "step": 4153 - }, - { - "epoch": 0.49948896771478385, - "flos": 45056600820120.0, - "grad_norm": 0.8700229238361988, - "learning_rate": 2.1004450690203904e-06, - "loss": 0.8112, - "num_input_tokens_seen": 88376175, - "step": 4154 - }, - { - "epoch": 0.49960921060542296, - "flos": 48898353799440.0, - "grad_norm": 0.9852020180437218, - "learning_rate": 2.099667072795546e-06, - "loss": 0.9426, - "num_input_tokens_seen": 88438015, - "step": 4155 - }, - { - "epoch": 0.49972945349606207, - "flos": 16822004548920.0, - "grad_norm": 3.672757955675348, - "learning_rate": 2.0988890614512864e-06, - "loss": 1.0135, - "num_input_tokens_seen": 88457625, - "step": 4156 - }, - { - "epoch": 0.4998496963867011, - "flos": 14016675364920.0, - "grad_norm": 2.7592016077051875, - "learning_rate": 2.098111035105635e-06, - "loss": 1.0653, - "num_input_tokens_seen": 88475770, - "step": 4157 - }, - { - "epoch": 0.49996993927734024, - "flos": 15819467325480.0, - "grad_norm": 3.7450895584459993, - "learning_rate": 2.0973329938766176e-06, - "loss": 0.9687, - "num_input_tokens_seen": 88492920, - "step": 4158 - }, - { - "epoch": 0.5000901821679793, - "flos": 16582794448200.0, - "grad_norm": 3.4395408814134703, - "learning_rate": 2.0965549378822618e-06, - "loss": 1.0176, - "num_input_tokens_seen": 88513930, - "step": 4159 - }, - { - "epoch": 0.5002104250586185, - "flos": 14435723690040.0, - "grad_norm": 3.6217744880796876, - "learning_rate": 2.095776867240599e-06, - "loss": 1.0648, - "num_input_tokens_seen": 88530640, - "step": 4160 - }, - { - "epoch": 0.5003306679492575, - "flos": 9873912597240.0, - "grad_norm": 3.8054599486977363, - "learning_rate": 2.094998782069661e-06, - "loss": 1.0312, - "num_input_tokens_seen": 88548065, - "step": 4161 - }, - { - "epoch": 0.5004509108398966, - "flos": 19706365217280.0, - "grad_norm": 3.39025018967211, - "learning_rate": 2.0942206824874845e-06, - "loss": 0.9816, - "num_input_tokens_seen": 88570560, - "step": 4162 - }, - { - "epoch": 0.5005711537305357, - "flos": 10581639215520.0, - "grad_norm": 3.428829424665174, - "learning_rate": 2.093442568612105e-06, - "loss": 1.0173, - "num_input_tokens_seen": 88588085, - "step": 4163 - }, - { - "epoch": 0.5006913966211748, - "flos": 18865601011320.0, - "grad_norm": 5.639520510589129, - "learning_rate": 2.0926644405615613e-06, - "loss": 1.0678, - "num_input_tokens_seen": 88608705, - "step": 4164 - }, - { - "epoch": 0.5008116395118138, - "flos": 14515092451560.0, - "grad_norm": 4.245510422715936, - "learning_rate": 2.091886298453897e-06, - "loss": 1.0397, - "num_input_tokens_seen": 88626610, - "step": 4165 - }, - { - "epoch": 0.500931882402453, - "flos": 15327550489560.0, - "grad_norm": 21.059275213306027, - "learning_rate": 2.091108142407153e-06, - "loss": 0.9437, - "num_input_tokens_seen": 88645070, - "step": 4166 - }, - { - "epoch": 0.5010521252930921, - "flos": 41353950869280.0, - "grad_norm": 0.8861041186510892, - "learning_rate": 2.090329972539377e-06, - "loss": 0.9187, - "num_input_tokens_seen": 88703355, - "step": 4167 - }, - { - "epoch": 0.5011723681837311, - "flos": 13204431957840.0, - "grad_norm": 2.9076877442193307, - "learning_rate": 2.089551788968616e-06, - "loss": 0.904, - "num_input_tokens_seen": 88721040, - "step": 4168 - }, - { - "epoch": 0.5012926110743702, - "flos": 39986494070280.0, - "grad_norm": 0.8743748475211653, - "learning_rate": 2.08877359181292e-06, - "loss": 0.8683, - "num_input_tokens_seen": 88777325, - "step": 4169 - }, - { - "epoch": 0.5014128539650093, - "flos": 17237404148400.0, - "grad_norm": 2.6367056387116143, - "learning_rate": 2.0879953811903396e-06, - "loss": 1.0667, - "num_input_tokens_seen": 88791930, - "step": 4170 - }, - { - "epoch": 0.5015330968556484, - "flos": 19602032397840.0, - "grad_norm": 3.0031251524291065, - "learning_rate": 2.08721715721893e-06, - "loss": 1.008, - "num_input_tokens_seen": 88810975, - "step": 4171 - }, - { - "epoch": 0.5016533397462875, - "flos": 16923056581440.0, - "grad_norm": 4.181505381906274, - "learning_rate": 2.0864389200167477e-06, - "loss": 0.9991, - "num_input_tokens_seen": 88828950, - "step": 4172 - }, - { - "epoch": 0.5017735826369266, - "flos": 17996683945200.0, - "grad_norm": 3.121586728461682, - "learning_rate": 2.0856606697018504e-06, - "loss": 1.0213, - "num_input_tokens_seen": 88846680, - "step": 4173 - }, - { - "epoch": 0.5018938255275657, - "flos": 11944956703920.0, - "grad_norm": 3.2903644223119435, - "learning_rate": 2.084882406392297e-06, - "loss": 0.9509, - "num_input_tokens_seen": 88864360, - "step": 4174 - }, - { - "epoch": 0.5020140684182047, - "flos": 18155298822000.0, - "grad_norm": 3.361265280544493, - "learning_rate": 2.0841041302061496e-06, - "loss": 0.9208, - "num_input_tokens_seen": 88883540, - "step": 4175 - }, - { - "epoch": 0.5021343113088439, - "flos": 16821943225800.0, - "grad_norm": 16.326393261935085, - "learning_rate": 2.083325841261473e-06, - "loss": 0.9773, - "num_input_tokens_seen": 88902320, - "step": 4176 - }, - { - "epoch": 0.502254554199483, - "flos": 17451036960000.0, - "grad_norm": 5.824932625180366, - "learning_rate": 2.0825475396763322e-06, - "loss": 0.8891, - "num_input_tokens_seen": 88922690, - "step": 4177 - }, - { - "epoch": 0.502374797090122, - "flos": 24426331263480.0, - "grad_norm": 1.8878144741190108, - "learning_rate": 2.081769225568796e-06, - "loss": 0.8735, - "num_input_tokens_seen": 88944860, - "step": 4178 - }, - { - "epoch": 0.5024950399807612, - "flos": 18526565340480.0, - "grad_norm": 5.663667483531644, - "learning_rate": 2.0809908990569327e-06, - "loss": 0.9821, - "num_input_tokens_seen": 88966360, - "step": 4179 - }, - { - "epoch": 0.5026152828714002, - "flos": 15092357053200.0, - "grad_norm": 2.847022217546282, - "learning_rate": 2.0802125602588146e-06, - "loss": 1.0231, - "num_input_tokens_seen": 88985345, - "step": 4180 - }, - { - "epoch": 0.5027355257620393, - "flos": 22065995632440.0, - "grad_norm": 4.315150897414371, - "learning_rate": 2.0794342092925146e-06, - "loss": 0.8891, - "num_input_tokens_seen": 89006200, - "step": 4181 - }, - { - "epoch": 0.5028557686526784, - "flos": 17635505079960.0, - "grad_norm": 3.1624162323604366, - "learning_rate": 2.078655846276108e-06, - "loss": 0.9087, - "num_input_tokens_seen": 89026250, - "step": 4182 - }, - { - "epoch": 0.5029760115433175, - "flos": 16324537970640.0, - "grad_norm": 3.7459858072300207, - "learning_rate": 2.0778774713276727e-06, - "loss": 0.9094, - "num_input_tokens_seen": 89045445, - "step": 4183 - }, - { - "epoch": 0.5030962544339566, - "flos": 10818151098960.0, - "grad_norm": 3.7087588691155378, - "learning_rate": 2.077099084565287e-06, - "loss": 0.8886, - "num_input_tokens_seen": 89062570, - "step": 4184 - }, - { - "epoch": 0.5032164973245957, - "flos": 17421964253040.0, - "grad_norm": 3.822539072679075, - "learning_rate": 2.0763206861070313e-06, - "loss": 0.864, - "num_input_tokens_seen": 89081350, - "step": 4185 - }, - { - "epoch": 0.5033367402152348, - "flos": 11470399859040.0, - "grad_norm": 2.805821436588757, - "learning_rate": 2.0755422760709876e-06, - "loss": 0.9818, - "num_input_tokens_seen": 89098470, - "step": 4186 - }, - { - "epoch": 0.5034569831058738, - "flos": 15191262776520.0, - "grad_norm": 3.110410584948233, - "learning_rate": 2.0747638545752417e-06, - "loss": 0.9949, - "num_input_tokens_seen": 89116750, - "step": 4187 - }, - { - "epoch": 0.503577225996513, - "flos": 14593755997200.0, - "grad_norm": 3.662590853765263, - "learning_rate": 2.073985421737878e-06, - "loss": 1.0613, - "num_input_tokens_seen": 89133780, - "step": 4188 - }, - { - "epoch": 0.5036974688871521, - "flos": 19386774523560.0, - "grad_norm": 9.277158715260024, - "learning_rate": 2.0732069776769844e-06, - "loss": 0.9753, - "num_input_tokens_seen": 89150910, - "step": 4189 - }, - { - "epoch": 0.5038177117777911, - "flos": 14488319361600.0, - "grad_norm": 3.479659511965771, - "learning_rate": 2.072428522510651e-06, - "loss": 0.9528, - "num_input_tokens_seen": 89167195, - "step": 4190 - }, - { - "epoch": 0.5039379546684303, - "flos": 15563326495560.0, - "grad_norm": 4.832274002971574, - "learning_rate": 2.071650056356968e-06, - "loss": 0.9776, - "num_input_tokens_seen": 89184455, - "step": 4191 - }, - { - "epoch": 0.5040581975590693, - "flos": 14199794376240.0, - "grad_norm": 5.2241197930660705, - "learning_rate": 2.070871579334028e-06, - "loss": 1.0167, - "num_input_tokens_seen": 89203285, - "step": 4192 - }, - { - "epoch": 0.5041784404497084, - "flos": 14881974366960.0, - "grad_norm": 3.2777858795094628, - "learning_rate": 2.0700930915599264e-06, - "loss": 0.9454, - "num_input_tokens_seen": 89222735, - "step": 4193 - }, - { - "epoch": 0.5042986833403476, - "flos": 8798506863000.0, - "grad_norm": 3.9169461711956717, - "learning_rate": 2.0693145931527583e-06, - "loss": 1.0097, - "num_input_tokens_seen": 89238935, - "step": 4194 - }, - { - "epoch": 0.5044189262309866, - "flos": 20805232593000.0, - "grad_norm": 3.244001575041552, - "learning_rate": 2.068536084230622e-06, - "loss": 1.007, - "num_input_tokens_seen": 89260520, - "step": 4195 - }, - { - "epoch": 0.5045391691216257, - "flos": 16975008360240.0, - "grad_norm": 3.9272743536162302, - "learning_rate": 2.067757564911616e-06, - "loss": 1.1033, - "num_input_tokens_seen": 89278815, - "step": 4196 - }, - { - "epoch": 0.5046594120122648, - "flos": 17530436383080.0, - "grad_norm": 2.8287511311594966, - "learning_rate": 2.0669790353138407e-06, - "loss": 1.1399, - "num_input_tokens_seen": 89297500, - "step": 4197 - }, - { - "epoch": 0.5047796549029039, - "flos": 16608279752640.0, - "grad_norm": 4.660784874826778, - "learning_rate": 2.0662004955553995e-06, - "loss": 0.9534, - "num_input_tokens_seen": 89316920, - "step": 4198 - }, - { - "epoch": 0.5048998977935429, - "flos": 12253693205400.0, - "grad_norm": 5.038921123561822, - "learning_rate": 2.065421945754395e-06, - "loss": 0.9903, - "num_input_tokens_seen": 89334370, - "step": 4199 - }, - { - "epoch": 0.505020140684182, - "flos": 24869055861000.0, - "grad_norm": 3.370963897502701, - "learning_rate": 2.0646433860289344e-06, - "loss": 1.0122, - "num_input_tokens_seen": 89353015, - "step": 4200 - }, - { - "epoch": 0.5051403835748212, - "flos": 17687824797480.0, - "grad_norm": 3.268151287148953, - "learning_rate": 2.0638648164971233e-06, - "loss": 1.0458, - "num_input_tokens_seen": 89371200, - "step": 4201 - }, - { - "epoch": 0.5052606264654602, - "flos": 14881851720720.0, - "grad_norm": 3.6363222293230217, - "learning_rate": 2.06308623727707e-06, - "loss": 1.1126, - "num_input_tokens_seen": 89391020, - "step": 4202 - }, - { - "epoch": 0.5053808693560993, - "flos": 13934608386120.0, - "grad_norm": 5.382903033244015, - "learning_rate": 2.0623076484868846e-06, - "loss": 0.9737, - "num_input_tokens_seen": 89408195, - "step": 4203 - }, - { - "epoch": 0.5055011122467384, - "flos": 48336517510560.0, - "grad_norm": 0.9169874749758269, - "learning_rate": 2.061529050244679e-06, - "loss": 0.9166, - "num_input_tokens_seen": 89467660, - "step": 4204 - }, - { - "epoch": 0.5056213551373775, - "flos": 11944742073000.0, - "grad_norm": 3.599085421418406, - "learning_rate": 2.060750442668565e-06, - "loss": 0.9752, - "num_input_tokens_seen": 89485135, - "step": 4205 - }, - { - "epoch": 0.5057415980280165, - "flos": 10837994676360.0, - "grad_norm": 12.832830906400412, - "learning_rate": 2.059971825876657e-06, - "loss": 0.8682, - "num_input_tokens_seen": 89499365, - "step": 4206 - }, - { - "epoch": 0.5058618409186557, - "flos": 13492129081080.0, - "grad_norm": 2.7178147559909376, - "learning_rate": 2.0591931999870713e-06, - "loss": 0.9944, - "num_input_tokens_seen": 89518010, - "step": 4207 - }, - { - "epoch": 0.5059820838092948, - "flos": 45424310597640.0, - "grad_norm": 0.8879494214173589, - "learning_rate": 2.0584145651179234e-06, - "loss": 0.8626, - "num_input_tokens_seen": 89573440, - "step": 4208 - }, - { - "epoch": 0.5061023266999338, - "flos": 10915094482440.0, - "grad_norm": 6.886872461326983, - "learning_rate": 2.0576359213873327e-06, - "loss": 1.0164, - "num_input_tokens_seen": 89588310, - "step": 4209 - }, - { - "epoch": 0.506222569590573, - "flos": 15953056821240.0, - "grad_norm": 3.293043033943055, - "learning_rate": 2.056857268913419e-06, - "loss": 0.9102, - "num_input_tokens_seen": 89608080, - "step": 4210 - }, - { - "epoch": 0.506342812481212, - "flos": 12436536262680.0, - "grad_norm": 4.972036750123643, - "learning_rate": 2.056078607814303e-06, - "loss": 1.0697, - "num_input_tokens_seen": 89623585, - "step": 4211 - }, - { - "epoch": 0.5064630553718511, - "flos": 16636831213080.0, - "grad_norm": 2.835865534770439, - "learning_rate": 2.055299938208106e-06, - "loss": 1.0119, - "num_input_tokens_seen": 89644295, - "step": 4212 - }, - { - "epoch": 0.5065832982624903, - "flos": 17057412616200.0, - "grad_norm": 3.208803679103069, - "learning_rate": 2.0545212602129526e-06, - "loss": 1.0878, - "num_input_tokens_seen": 89663870, - "step": 4213 - }, - { - "epoch": 0.5067035411531293, - "flos": 15271827338880.0, - "grad_norm": 3.099441379544768, - "learning_rate": 2.0537425739469673e-06, - "loss": 0.8627, - "num_input_tokens_seen": 89682525, - "step": 4214 - }, - { - "epoch": 0.5068237840437684, - "flos": 47207596257960.0, - "grad_norm": 0.8997203127411638, - "learning_rate": 2.052963879528276e-06, - "loss": 0.868, - "num_input_tokens_seen": 89742115, - "step": 4215 - }, - { - "epoch": 0.5069440269344075, - "flos": 19411769243040.0, - "grad_norm": 2.8781705871351515, - "learning_rate": 2.052185177075007e-06, - "loss": 0.9906, - "num_input_tokens_seen": 89761405, - "step": 4216 - }, - { - "epoch": 0.5070642698250466, - "flos": 16611253923960.0, - "grad_norm": 3.448437715711012, - "learning_rate": 2.051406466705288e-06, - "loss": 1.0495, - "num_input_tokens_seen": 89780665, - "step": 4217 - }, - { - "epoch": 0.5071845127156857, - "flos": 14436674198400.0, - "grad_norm": 2.5522321281394023, - "learning_rate": 2.0506277485372486e-06, - "loss": 1.0328, - "num_input_tokens_seen": 89799210, - "step": 4218 - }, - { - "epoch": 0.5073047556063248, - "flos": 8509399308000.0, - "grad_norm": 3.5630072516989326, - "learning_rate": 2.04984902268902e-06, - "loss": 0.8941, - "num_input_tokens_seen": 89816130, - "step": 4219 - }, - { - "epoch": 0.5074249984969639, - "flos": 13964233001160.0, - "grad_norm": 2.916792585288797, - "learning_rate": 2.0490702892787345e-06, - "loss": 0.961, - "num_input_tokens_seen": 89834910, - "step": 4220 - }, - { - "epoch": 0.5075452413876029, - "flos": 20491620903480.0, - "grad_norm": 2.372059658335995, - "learning_rate": 2.0482915484245246e-06, - "loss": 0.8542, - "num_input_tokens_seen": 89856250, - "step": 4221 - }, - { - "epoch": 0.5076654842782421, - "flos": 14436091628760.0, - "grad_norm": 4.556415419852703, - "learning_rate": 2.047512800244526e-06, - "loss": 1.0549, - "num_input_tokens_seen": 89871235, - "step": 4222 - }, - { - "epoch": 0.5077857271688812, - "flos": 18762709285200.0, - "grad_norm": 5.457235191865113, - "learning_rate": 2.046734044856873e-06, - "loss": 1.0063, - "num_input_tokens_seen": 89890365, - "step": 4223 - }, - { - "epoch": 0.5079059700595202, - "flos": 15485030888640.0, - "grad_norm": 3.4879414050053903, - "learning_rate": 2.045955282379702e-06, - "loss": 1.0298, - "num_input_tokens_seen": 89908745, - "step": 4224 - }, - { - "epoch": 0.5080262129501594, - "flos": 9296402703120.0, - "grad_norm": 5.12626464871423, - "learning_rate": 2.045176512931152e-06, - "loss": 0.9724, - "num_input_tokens_seen": 89923095, - "step": 4225 - }, - { - "epoch": 0.5081464558407984, - "flos": 18002203026000.0, - "grad_norm": 2.6335258083581743, - "learning_rate": 2.0443977366293604e-06, - "loss": 0.9799, - "num_input_tokens_seen": 89940855, - "step": 4226 - }, - { - "epoch": 0.5082666987314375, - "flos": 22061825660280.0, - "grad_norm": 4.486999806869363, - "learning_rate": 2.043618953592468e-06, - "loss": 0.999, - "num_input_tokens_seen": 89963030, - "step": 4227 - }, - { - "epoch": 0.5083869416220766, - "flos": 13963926385560.0, - "grad_norm": 2.8625427376167885, - "learning_rate": 2.0428401639386144e-06, - "loss": 1.0366, - "num_input_tokens_seen": 89983315, - "step": 4228 - }, - { - "epoch": 0.5085071845127157, - "flos": 51436902592200.0, - "grad_norm": 0.9035665310555173, - "learning_rate": 2.042061367785943e-06, - "loss": 0.8777, - "num_input_tokens_seen": 90036175, - "step": 4229 - }, - { - "epoch": 0.5086274274033548, - "flos": 25655752640520.0, - "grad_norm": 4.856461709610616, - "learning_rate": 2.041282565252594e-06, - "loss": 0.9719, - "num_input_tokens_seen": 90056060, - "step": 4230 - }, - { - "epoch": 0.5087476702939938, - "flos": 16717395775440.0, - "grad_norm": 2.8737289272629205, - "learning_rate": 2.040503756456714e-06, - "loss": 0.9868, - "num_input_tokens_seen": 90074990, - "step": 4231 - }, - { - "epoch": 0.508867913184633, - "flos": 10686493281480.0, - "grad_norm": 4.459805373320175, - "learning_rate": 2.0397249415164456e-06, - "loss": 1.0222, - "num_input_tokens_seen": 90092065, - "step": 4232 - }, - { - "epoch": 0.508988156075272, - "flos": 18421803259200.0, - "grad_norm": 2.68904610450973, - "learning_rate": 2.0389461205499354e-06, - "loss": 1.0256, - "num_input_tokens_seen": 90110920, - "step": 4233 - }, - { - "epoch": 0.5091083989659111, - "flos": 9768537284760.0, - "grad_norm": 3.9363273206465634, - "learning_rate": 2.03816729367533e-06, - "loss": 0.9556, - "num_input_tokens_seen": 90128795, - "step": 4234 - }, - { - "epoch": 0.5092286418565503, - "flos": 14986092555480.0, - "grad_norm": 6.334412566470221, - "learning_rate": 2.0373884610107765e-06, - "loss": 0.9254, - "num_input_tokens_seen": 90148830, - "step": 4235 - }, - { - "epoch": 0.5093488847471893, - "flos": 13200844555320.0, - "grad_norm": 12.102730662025783, - "learning_rate": 2.0366096226744225e-06, - "loss": 0.9063, - "num_input_tokens_seen": 90163745, - "step": 4236 - }, - { - "epoch": 0.5094691276378284, - "flos": 16925601490920.0, - "grad_norm": 3.287466210021215, - "learning_rate": 2.035830778784418e-06, - "loss": 0.9938, - "num_input_tokens_seen": 90184140, - "step": 4237 - }, - { - "epoch": 0.5095893705284675, - "flos": 12338213109000.0, - "grad_norm": 13.21352043145137, - "learning_rate": 2.0350519294589134e-06, - "loss": 1.0265, - "num_input_tokens_seen": 90201900, - "step": 4238 - }, - { - "epoch": 0.5097096134191066, - "flos": 18396042000720.0, - "grad_norm": 2.3464304537266867, - "learning_rate": 2.0342730748160588e-06, - "loss": 1.0534, - "num_input_tokens_seen": 90222085, - "step": 4239 - }, - { - "epoch": 0.5098298563097456, - "flos": 19758286334520.0, - "grad_norm": 2.6789014013585275, - "learning_rate": 2.033494214974006e-06, - "loss": 0.9117, - "num_input_tokens_seen": 90242950, - "step": 4240 - }, - { - "epoch": 0.5099500992003848, - "flos": 15168475689360.0, - "grad_norm": 3.3434841727716043, - "learning_rate": 2.0327153500509067e-06, - "loss": 1.0604, - "num_input_tokens_seen": 90261695, - "step": 4241 - }, - { - "epoch": 0.5100703420910239, - "flos": 14095952141760.0, - "grad_norm": 2.71614932495556, - "learning_rate": 2.031936480164916e-06, - "loss": 1.0847, - "num_input_tokens_seen": 90279155, - "step": 4242 - }, - { - "epoch": 0.5101905849816629, - "flos": 17533686508440.0, - "grad_norm": 3.570288726927828, - "learning_rate": 2.0311576054341857e-06, - "loss": 1.0303, - "num_input_tokens_seen": 90299490, - "step": 4243 - }, - { - "epoch": 0.5103108278723021, - "flos": 16297948850040.0, - "grad_norm": 4.933986137775792, - "learning_rate": 2.0303787259768715e-06, - "loss": 0.8573, - "num_input_tokens_seen": 90320110, - "step": 4244 - }, - { - "epoch": 0.5104310707629411, - "flos": 15274862833320.0, - "grad_norm": 4.323665593854417, - "learning_rate": 2.0295998419111294e-06, - "loss": 0.906, - "num_input_tokens_seen": 90337120, - "step": 4245 - }, - { - "epoch": 0.5105513136535802, - "flos": 10529166190200.0, - "grad_norm": 3.9190439895765814, - "learning_rate": 2.028820953355115e-06, - "loss": 0.95, - "num_input_tokens_seen": 90354940, - "step": 4246 - }, - { - "epoch": 0.5106715565442194, - "flos": 16062387474960.0, - "grad_norm": 4.47218521503014, - "learning_rate": 2.0280420604269834e-06, - "loss": 1.0074, - "num_input_tokens_seen": 90374200, - "step": 4247 - }, - { - "epoch": 0.5107917994348584, - "flos": 50870952106200.0, - "grad_norm": 0.8164123826960152, - "learning_rate": 2.027263163244895e-06, - "loss": 0.8741, - "num_input_tokens_seen": 90443045, - "step": 4248 - }, - { - "epoch": 0.5109120423254975, - "flos": 17661542292480.0, - "grad_norm": 2.46584448472063, - "learning_rate": 2.026484261927005e-06, - "loss": 0.9693, - "num_input_tokens_seen": 90462260, - "step": 4249 - }, - { - "epoch": 0.5110322852161366, - "flos": 15088279065720.0, - "grad_norm": 11.38137672181992, - "learning_rate": 2.025705356591475e-06, - "loss": 0.9572, - "num_input_tokens_seen": 90479670, - "step": 4250 - }, - { - "epoch": 0.5111525281067757, - "flos": 47584872442200.0, - "grad_norm": 0.8407097188809574, - "learning_rate": 2.024926447356462e-06, - "loss": 0.8518, - "num_input_tokens_seen": 90541675, - "step": 4251 - }, - { - "epoch": 0.5112727709974147, - "flos": 10502975669880.0, - "grad_norm": 2.5026937277517556, - "learning_rate": 2.024147534340127e-06, - "loss": 1.0075, - "num_input_tokens_seen": 90559255, - "step": 4252 - }, - { - "epoch": 0.5113930138880539, - "flos": 15038320288320.0, - "grad_norm": 4.926758296425893, - "learning_rate": 2.02336861766063e-06, - "loss": 1.0174, - "num_input_tokens_seen": 90578035, - "step": 4253 - }, - { - "epoch": 0.511513256778693, - "flos": 14485713129000.0, - "grad_norm": 3.479931105885429, - "learning_rate": 2.0225896974361327e-06, - "loss": 1.0081, - "num_input_tokens_seen": 90597860, - "step": 4254 - }, - { - "epoch": 0.511633499669332, - "flos": 50043899165640.0, - "grad_norm": 0.9402908111738314, - "learning_rate": 2.0218107737847962e-06, - "loss": 0.8848, - "num_input_tokens_seen": 90659625, - "step": 4255 - }, - { - "epoch": 0.5117537425599712, - "flos": 17137241301120.0, - "grad_norm": 3.1836464719560866, - "learning_rate": 2.0210318468247826e-06, - "loss": 0.9889, - "num_input_tokens_seen": 90678315, - "step": 4256 - }, - { - "epoch": 0.5118739854506102, - "flos": 14881729074480.0, - "grad_norm": 2.9465123977957006, - "learning_rate": 2.020252916674255e-06, - "loss": 1.0451, - "num_input_tokens_seen": 90697150, - "step": 4257 - }, - { - "epoch": 0.5119942283412493, - "flos": 12364802229600.0, - "grad_norm": 5.243593394108592, - "learning_rate": 2.019473983451375e-06, - "loss": 1.0287, - "num_input_tokens_seen": 90715290, - "step": 4258 - }, - { - "epoch": 0.5121144712318885, - "flos": 14958460941840.0, - "grad_norm": 3.1365226408768994, - "learning_rate": 2.0186950472743076e-06, - "loss": 0.9402, - "num_input_tokens_seen": 90734915, - "step": 4259 - }, - { - "epoch": 0.5122347141225275, - "flos": 14091506215560.0, - "grad_norm": 3.629274938802639, - "learning_rate": 2.0179161082612162e-06, - "loss": 0.9728, - "num_input_tokens_seen": 90754025, - "step": 4260 - }, - { - "epoch": 0.5123549570131666, - "flos": 16009301218440.0, - "grad_norm": 4.01652166545414, - "learning_rate": 2.017137166530266e-06, - "loss": 0.9418, - "num_input_tokens_seen": 90773280, - "step": 4261 - }, - { - "epoch": 0.5124751999038056, - "flos": 14430909825120.0, - "grad_norm": 3.2344220291625416, - "learning_rate": 2.0163582221996213e-06, - "loss": 1.0328, - "num_input_tokens_seen": 90791375, - "step": 4262 - }, - { - "epoch": 0.5125954427944448, - "flos": 28411889586120.0, - "grad_norm": 2.8331823495765587, - "learning_rate": 2.015579275387446e-06, - "loss": 0.9078, - "num_input_tokens_seen": 90816415, - "step": 4263 - }, - { - "epoch": 0.5127156856850839, - "flos": 20807869487160.0, - "grad_norm": 4.240574651319451, - "learning_rate": 2.0148003262119085e-06, - "loss": 0.9082, - "num_input_tokens_seen": 90837105, - "step": 4264 - }, - { - "epoch": 0.5128359285757229, - "flos": 9559381060920.0, - "grad_norm": 3.3739632320967172, - "learning_rate": 2.0140213747911728e-06, - "loss": 0.9829, - "num_input_tokens_seen": 90855360, - "step": 4265 - }, - { - "epoch": 0.5129561714663621, - "flos": 17923600803480.0, - "grad_norm": 5.361317985816915, - "learning_rate": 2.013242421243406e-06, - "loss": 1.0287, - "num_input_tokens_seen": 90873985, - "step": 4266 - }, - { - "epoch": 0.5130764143570011, - "flos": 12862728731280.0, - "grad_norm": 3.3738018310907343, - "learning_rate": 2.012463465686774e-06, - "loss": 1.0118, - "num_input_tokens_seen": 90893455, - "step": 4267 - }, - { - "epoch": 0.5131966572476402, - "flos": 42798844751520.0, - "grad_norm": 0.8562744387737937, - "learning_rate": 2.0116845082394446e-06, - "loss": 0.8291, - "num_input_tokens_seen": 90958010, - "step": 4268 - }, - { - "epoch": 0.5133169001382794, - "flos": 13125063196320.0, - "grad_norm": 4.326512666458674, - "learning_rate": 2.0109055490195836e-06, - "loss": 1.0134, - "num_input_tokens_seen": 90976185, - "step": 4269 - }, - { - "epoch": 0.5134371430289184, - "flos": 10975018266840.0, - "grad_norm": 5.777614130653293, - "learning_rate": 2.0101265881453605e-06, - "loss": 0.8671, - "num_input_tokens_seen": 90994380, - "step": 4270 - }, - { - "epoch": 0.5135573859195575, - "flos": 15222543115800.0, - "grad_norm": 3.1433571844992834, - "learning_rate": 2.009347625734941e-06, - "loss": 1.0151, - "num_input_tokens_seen": 91014010, - "step": 4271 - }, - { - "epoch": 0.5136776288101966, - "flos": 12548289179640.0, - "grad_norm": 3.7039442484060547, - "learning_rate": 2.0085686619064954e-06, - "loss": 0.9796, - "num_input_tokens_seen": 91030170, - "step": 4272 - }, - { - "epoch": 0.5137978717008357, - "flos": 11735493864480.0, - "grad_norm": 4.041859232192092, - "learning_rate": 2.00778969677819e-06, - "loss": 1.0609, - "num_input_tokens_seen": 91046925, - "step": 4273 - }, - { - "epoch": 0.5139181145914747, - "flos": 14672450204400.0, - "grad_norm": 5.544462985150245, - "learning_rate": 2.0070107304681934e-06, - "loss": 0.873, - "num_input_tokens_seen": 91065600, - "step": 4274 - }, - { - "epoch": 0.5140383574821139, - "flos": 23482522023600.0, - "grad_norm": 3.645287667266333, - "learning_rate": 2.006231763094675e-06, - "loss": 1.02, - "num_input_tokens_seen": 91086340, - "step": 4275 - }, - { - "epoch": 0.514158600372753, - "flos": 13859501581440.0, - "grad_norm": 3.138233241995335, - "learning_rate": 2.0054527947758027e-06, - "loss": 1.1067, - "num_input_tokens_seen": 91104860, - "step": 4276 - }, - { - "epoch": 0.514278843263392, - "flos": 44759086449600.0, - "grad_norm": 0.7658222100201044, - "learning_rate": 2.004673825629746e-06, - "loss": 0.8214, - "num_input_tokens_seen": 91165360, - "step": 4277 - }, - { - "epoch": 0.5143990861540312, - "flos": 18419258349720.0, - "grad_norm": 2.482087143510916, - "learning_rate": 2.0038948557746744e-06, - "loss": 0.9525, - "num_input_tokens_seen": 91186935, - "step": 4278 - }, - { - "epoch": 0.5145193290446702, - "flos": 16585737957960.0, - "grad_norm": 2.4935777188786177, - "learning_rate": 2.0031158853287558e-06, - "loss": 0.9688, - "num_input_tokens_seen": 91207090, - "step": 4279 - }, - { - "epoch": 0.5146395719353093, - "flos": 16245077224440.0, - "grad_norm": 2.78028578599734, - "learning_rate": 2.0023369144101593e-06, - "loss": 0.9372, - "num_input_tokens_seen": 91224980, - "step": 4280 - }, - { - "epoch": 0.5147598148259485, - "flos": 18785036448960.0, - "grad_norm": 3.3316502772569465, - "learning_rate": 2.0015579431370555e-06, - "loss": 0.9901, - "num_input_tokens_seen": 91246380, - "step": 4281 - }, - { - "epoch": 0.5148800577165875, - "flos": 21353485810800.0, - "grad_norm": 3.923171964992962, - "learning_rate": 2.000778971627612e-06, - "loss": 0.9291, - "num_input_tokens_seen": 91265565, - "step": 4282 - }, - { - "epoch": 0.5150003006072266, - "flos": 12705462963120.0, - "grad_norm": 9.265211639704493, - "learning_rate": 2e-06, - "loss": 1.1271, - "num_input_tokens_seen": 91282880, - "step": 4283 - }, - { - "epoch": 0.5151205434978657, - "flos": 13098842014440.0, - "grad_norm": 2.663572204756915, - "learning_rate": 1.9992210283723878e-06, - "loss": 1.0838, - "num_input_tokens_seen": 91299840, - "step": 4284 - }, - { - "epoch": 0.5152407863885048, - "flos": 18031245071400.0, - "grad_norm": 5.002003649747265, - "learning_rate": 1.9984420568629448e-06, - "loss": 1.0108, - "num_input_tokens_seen": 91322325, - "step": 4285 - }, - { - "epoch": 0.5153610292791438, - "flos": 12991351054320.0, - "grad_norm": 3.6132481259039135, - "learning_rate": 1.9976630855898405e-06, - "loss": 1.0077, - "num_input_tokens_seen": 91339800, - "step": 4286 - }, - { - "epoch": 0.515481272169783, - "flos": 21673628412600.0, - "grad_norm": 10.57755592426978, - "learning_rate": 1.9968841146712445e-06, - "loss": 0.9733, - "num_input_tokens_seen": 91359135, - "step": 4287 - }, - { - "epoch": 0.5156015150604221, - "flos": 16374711378960.0, - "grad_norm": 3.029864594357979, - "learning_rate": 1.996105144225326e-06, - "loss": 0.94, - "num_input_tokens_seen": 91379090, - "step": 4288 - }, - { - "epoch": 0.5157217579510611, - "flos": 12653143245600.0, - "grad_norm": 5.322112060571285, - "learning_rate": 1.995326174370254e-06, - "loss": 1.0145, - "num_input_tokens_seen": 91397910, - "step": 4289 - }, - { - "epoch": 0.5158420008417003, - "flos": 13596645869880.0, - "grad_norm": 3.0809757567148033, - "learning_rate": 1.994547205224197e-06, - "loss": 0.9555, - "num_input_tokens_seen": 91416535, - "step": 4290 - }, - { - "epoch": 0.5159622437323393, - "flos": 15931066934640.0, - "grad_norm": 3.3624853594720046, - "learning_rate": 1.993768236905325e-06, - "loss": 0.9019, - "num_input_tokens_seen": 91436925, - "step": 4291 - }, - { - "epoch": 0.5160824866229784, - "flos": 17499523982520.0, - "grad_norm": 3.1845486010548605, - "learning_rate": 1.992989269531807e-06, - "loss": 0.8809, - "num_input_tokens_seen": 91455455, - "step": 4292 - }, - { - "epoch": 0.5162027295136175, - "flos": 12756525556680.0, - "grad_norm": 4.40928621047265, - "learning_rate": 1.99221030322181e-06, - "loss": 0.8843, - "num_input_tokens_seen": 91471980, - "step": 4293 - }, - { - "epoch": 0.5163229724042566, - "flos": 19470558549720.0, - "grad_norm": 2.945871315315128, - "learning_rate": 1.991431338093505e-06, - "loss": 1.033, - "num_input_tokens_seen": 91494055, - "step": 4294 - }, - { - "epoch": 0.5164432152948957, - "flos": 15459054999240.0, - "grad_norm": 3.5005203950111006, - "learning_rate": 1.9906523742650587e-06, - "loss": 1.0091, - "num_input_tokens_seen": 91512635, - "step": 4295 - }, - { - "epoch": 0.5165634581855347, - "flos": 18180170895240.0, - "grad_norm": 10.314194418680872, - "learning_rate": 1.9898734118546397e-06, - "loss": 0.9812, - "num_input_tokens_seen": 91532305, - "step": 4296 - }, - { - "epoch": 0.5166837010761739, - "flos": 14121069507480.0, - "grad_norm": 2.7028411539803634, - "learning_rate": 1.989094450980416e-06, - "loss": 1.0373, - "num_input_tokens_seen": 91552125, - "step": 4297 - }, - { - "epoch": 0.516803943966813, - "flos": 18967818183120.0, - "grad_norm": 4.019882854064331, - "learning_rate": 1.9883154917605556e-06, - "loss": 0.9813, - "num_input_tokens_seen": 91571055, - "step": 4298 - }, - { - "epoch": 0.516924186857452, - "flos": 13964233001160.0, - "grad_norm": 2.093049231018128, - "learning_rate": 1.9875365343132262e-06, - "loss": 1.0507, - "num_input_tokens_seen": 91587895, - "step": 4299 - }, - { - "epoch": 0.5170444297480912, - "flos": 11285594461920.0, - "grad_norm": 3.251620108934115, - "learning_rate": 1.9867575787565946e-06, - "loss": 1.0763, - "num_input_tokens_seen": 91602275, - "step": 4300 - }, - { - "epoch": 0.5171646726387302, - "flos": 10005325122240.0, - "grad_norm": 3.75743484978657, - "learning_rate": 1.9859786252088275e-06, - "loss": 1.0878, - "num_input_tokens_seen": 91619175, - "step": 4301 - }, - { - "epoch": 0.5172849155293693, - "flos": 16762877965080.0, - "grad_norm": 4.137569581337004, - "learning_rate": 1.9851996737880914e-06, - "loss": 0.8971, - "num_input_tokens_seen": 91634080, - "step": 4302 - }, - { - "epoch": 0.5174051584200084, - "flos": 10083927344760.0, - "grad_norm": 2.950359523007737, - "learning_rate": 1.9844207246125537e-06, - "loss": 0.9716, - "num_input_tokens_seen": 91650380, - "step": 4303 - }, - { - "epoch": 0.5175254013106475, - "flos": 27048940036440.0, - "grad_norm": 7.128974207724035, - "learning_rate": 1.983641777800379e-06, - "loss": 0.9105, - "num_input_tokens_seen": 91672745, - "step": 4304 - }, - { - "epoch": 0.5176456442012866, - "flos": 49090548632520.0, - "grad_norm": 0.8941787624933442, - "learning_rate": 1.9828628334697343e-06, - "loss": 0.8563, - "num_input_tokens_seen": 91739675, - "step": 4305 - }, - { - "epoch": 0.5177658870919257, - "flos": 45879269157600.0, - "grad_norm": 0.8034150516396309, - "learning_rate": 1.982083891738784e-06, - "loss": 0.8211, - "num_input_tokens_seen": 91800265, - "step": 4306 - }, - { - "epoch": 0.5178861299825648, - "flos": 18972233447760.0, - "grad_norm": 2.1594617944876466, - "learning_rate": 1.9813049527256923e-06, - "loss": 1.057, - "num_input_tokens_seen": 91820380, - "step": 4307 - }, - { - "epoch": 0.5180063728732038, - "flos": 12705892224960.0, - "grad_norm": 5.214614051589173, - "learning_rate": 1.9805260165486252e-06, - "loss": 1.0379, - "num_input_tokens_seen": 91839470, - "step": 4308 - }, - { - "epoch": 0.518126615763843, - "flos": 13833525692040.0, - "grad_norm": 4.141501049719078, - "learning_rate": 1.9797470833257457e-06, - "loss": 1.09, - "num_input_tokens_seen": 91858890, - "step": 4309 - }, - { - "epoch": 0.5182468586544821, - "flos": 14698119478200.0, - "grad_norm": 6.545487213534897, - "learning_rate": 1.9789681531752177e-06, - "loss": 1.0019, - "num_input_tokens_seen": 91878830, - "step": 4310 - }, - { - "epoch": 0.5183671015451211, - "flos": 16428717482280.0, - "grad_norm": 2.8621384238217233, - "learning_rate": 1.978189226215204e-06, - "loss": 0.957, - "num_input_tokens_seen": 91899095, - "step": 4311 - }, - { - "epoch": 0.5184873444357603, - "flos": 12464413168800.0, - "grad_norm": 2.759861603597978, - "learning_rate": 1.9774103025638675e-06, - "loss": 0.9884, - "num_input_tokens_seen": 91916940, - "step": 4312 - }, - { - "epoch": 0.5186075873263993, - "flos": 17740144515000.0, - "grad_norm": 10.995408226855213, - "learning_rate": 1.9766313823393696e-06, - "loss": 0.9913, - "num_input_tokens_seen": 91937525, - "step": 4313 - }, - { - "epoch": 0.5187278302170384, - "flos": 10734704349960.0, - "grad_norm": 5.022082583606377, - "learning_rate": 1.975852465659873e-06, - "loss": 0.9099, - "num_input_tokens_seen": 91953225, - "step": 4314 - }, - { - "epoch": 0.5188480731076776, - "flos": 17792280263160.0, - "grad_norm": 3.1513290568463215, - "learning_rate": 1.9750735526435377e-06, - "loss": 0.9196, - "num_input_tokens_seen": 91969890, - "step": 4315 - }, - { - "epoch": 0.5189683159983166, - "flos": 17634646556280.0, - "grad_norm": 18.760837525499845, - "learning_rate": 1.974294643408525e-06, - "loss": 1.0038, - "num_input_tokens_seen": 91987405, - "step": 4316 - }, - { - "epoch": 0.5190885588889557, - "flos": 17608946620920.0, - "grad_norm": 3.1529845918181514, - "learning_rate": 1.9735157380729947e-06, - "loss": 0.8985, - "num_input_tokens_seen": 92007535, - "step": 4317 - }, - { - "epoch": 0.5192088017795948, - "flos": 17578340835960.0, - "grad_norm": 2.4829771319783123, - "learning_rate": 1.9727368367551053e-06, - "loss": 1.0589, - "num_input_tokens_seen": 92025805, - "step": 4318 - }, - { - "epoch": 0.5193290446702339, - "flos": 19387142462280.0, - "grad_norm": 4.351494341715434, - "learning_rate": 1.9719579395730164e-06, - "loss": 0.9213, - "num_input_tokens_seen": 92044900, - "step": 4319 - }, - { - "epoch": 0.5194492875608729, - "flos": 8064191124120.0, - "grad_norm": 7.985393302465969, - "learning_rate": 1.9711790466448854e-06, - "loss": 1.1552, - "num_input_tokens_seen": 92058640, - "step": 4320 - }, - { - "epoch": 0.5195695304515121, - "flos": 14435631705360.0, - "grad_norm": 4.231444298299267, - "learning_rate": 1.9704001580888704e-06, - "loss": 0.9282, - "num_input_tokens_seen": 92077100, - "step": 4321 - }, - { - "epoch": 0.5196897733421512, - "flos": 14226966066480.0, - "grad_norm": 3.9345580721220004, - "learning_rate": 1.9696212740231283e-06, - "loss": 1.1025, - "num_input_tokens_seen": 92095470, - "step": 4322 - }, - { - "epoch": 0.5198100162327902, - "flos": 16927073245800.0, - "grad_norm": 4.926769910633064, - "learning_rate": 1.9688423945658146e-06, - "loss": 1.0386, - "num_input_tokens_seen": 92116055, - "step": 4323 - }, - { - "epoch": 0.5199302591234293, - "flos": 17084001736800.0, - "grad_norm": 3.632006221429815, - "learning_rate": 1.9680635198350845e-06, - "loss": 0.9484, - "num_input_tokens_seen": 92135485, - "step": 4324 - }, - { - "epoch": 0.5200505020140684, - "flos": 18762463992720.0, - "grad_norm": 4.2246730339343825, - "learning_rate": 1.967284649949093e-06, - "loss": 0.9507, - "num_input_tokens_seen": 92154415, - "step": 4325 - }, - { - "epoch": 0.5201707449047075, - "flos": 28281396907920.0, - "grad_norm": 4.107235659859926, - "learning_rate": 1.966505785025994e-06, - "loss": 0.953, - "num_input_tokens_seen": 92176040, - "step": 4326 - }, - { - "epoch": 0.5202909877953465, - "flos": 38398695126120.0, - "grad_norm": 3.6721479598510305, - "learning_rate": 1.965726925183941e-06, - "loss": 0.9943, - "num_input_tokens_seen": 92198865, - "step": 4327 - }, - { - "epoch": 0.5204112306859857, - "flos": 13859440258320.0, - "grad_norm": 2.6370016666793332, - "learning_rate": 1.964948070541087e-06, - "loss": 1.0636, - "num_input_tokens_seen": 92217245, - "step": 4328 - }, - { - "epoch": 0.5205314735766248, - "flos": 10817752498680.0, - "grad_norm": 4.1534184727481565, - "learning_rate": 1.9641692212155816e-06, - "loss": 0.9091, - "num_input_tokens_seen": 92234730, - "step": 4329 - }, - { - "epoch": 0.5206517164672638, - "flos": 42409401476040.0, - "grad_norm": 2.7119450964025984, - "learning_rate": 1.9633903773255777e-06, - "loss": 0.9532, - "num_input_tokens_seen": 92256765, - "step": 4330 - }, - { - "epoch": 0.520771959357903, - "flos": 19130388401160.0, - "grad_norm": 2.469353131366621, - "learning_rate": 1.9626115389892237e-06, - "loss": 0.97, - "num_input_tokens_seen": 92277275, - "step": 4331 - }, - { - "epoch": 0.520892202248542, - "flos": 19156149659640.0, - "grad_norm": 2.886661926453584, - "learning_rate": 1.96183270632467e-06, - "loss": 1.0679, - "num_input_tokens_seen": 92296845, - "step": 4332 - }, - { - "epoch": 0.5210124451391811, - "flos": 18396103323840.0, - "grad_norm": 5.708674993755221, - "learning_rate": 1.9610538794500644e-06, - "loss": 1.0098, - "num_input_tokens_seen": 92316115, - "step": 4333 - }, - { - "epoch": 0.5211326880298203, - "flos": 50530475342040.0, - "grad_norm": 0.8027627016438944, - "learning_rate": 1.9602750584835542e-06, - "loss": 0.8624, - "num_input_tokens_seen": 92381770, - "step": 4334 - }, - { - "epoch": 0.5212529309204593, - "flos": 11050186394640.0, - "grad_norm": 3.25869468252789, - "learning_rate": 1.959496243543286e-06, - "loss": 1.0531, - "num_input_tokens_seen": 92399370, - "step": 4335 - }, - { - "epoch": 0.5213731738110984, - "flos": 18677606811960.0, - "grad_norm": 4.6457277750701875, - "learning_rate": 1.9587174347474057e-06, - "loss": 1.0164, - "num_input_tokens_seen": 92415600, - "step": 4336 - }, - { - "epoch": 0.5214934167017375, - "flos": 13772958014880.0, - "grad_norm": 5.914560435610103, - "learning_rate": 1.9579386322140574e-06, - "loss": 1.0348, - "num_input_tokens_seen": 92431000, - "step": 4337 - }, - { - "epoch": 0.5216136595923766, - "flos": 22065719678400.0, - "grad_norm": 2.7260324569583503, - "learning_rate": 1.9571598360613854e-06, - "loss": 1.0257, - "num_input_tokens_seen": 92453595, - "step": 4338 - }, - { - "epoch": 0.5217339024830157, - "flos": 15590038262400.0, - "grad_norm": 4.420269462346269, - "learning_rate": 1.956381046407532e-06, - "loss": 0.9211, - "num_input_tokens_seen": 92473610, - "step": 4339 - }, - { - "epoch": 0.5218541453736548, - "flos": 14855630538840.0, - "grad_norm": 4.277380538155154, - "learning_rate": 1.9556022633706394e-06, - "loss": 1.0821, - "num_input_tokens_seen": 92492120, - "step": 4340 - }, - { - "epoch": 0.5219743882642939, - "flos": 17032141942680.0, - "grad_norm": 5.753787576211688, - "learning_rate": 1.954823487068848e-06, - "loss": 1.0197, - "num_input_tokens_seen": 92512050, - "step": 4341 - }, - { - "epoch": 0.5220946311549329, - "flos": 20515358499000.0, - "grad_norm": 3.3365500823494867, - "learning_rate": 1.9540447176202976e-06, - "loss": 1.0281, - "num_input_tokens_seen": 92533015, - "step": 4342 - }, - { - "epoch": 0.5222148740455721, - "flos": 48111473050560.0, - "grad_norm": 0.9165122957003146, - "learning_rate": 1.9532659551431272e-06, - "loss": 0.8823, - "num_input_tokens_seen": 92599765, - "step": 4343 - }, - { - "epoch": 0.5223351169362112, - "flos": 44272822436880.0, - "grad_norm": 4.802400966022324, - "learning_rate": 1.9524871997554744e-06, - "loss": 0.8965, - "num_input_tokens_seen": 92627245, - "step": 4344 - }, - { - "epoch": 0.5224553598268502, - "flos": 10345280639880.0, - "grad_norm": 7.053740884632019, - "learning_rate": 1.951708451575475e-06, - "loss": 1.0043, - "num_input_tokens_seen": 92644030, - "step": 4345 - }, - { - "epoch": 0.5225756027174894, - "flos": 10473167085480.0, - "grad_norm": 3.152381414592601, - "learning_rate": 1.9509297107212657e-06, - "loss": 1.032, - "num_input_tokens_seen": 92660520, - "step": 4346 - }, - { - "epoch": 0.5226958456081284, - "flos": 16715924020560.0, - "grad_norm": 3.1347233374289476, - "learning_rate": 1.95015097731098e-06, - "loss": 1.0171, - "num_input_tokens_seen": 92679730, - "step": 4347 - }, - { - "epoch": 0.5228160884987675, - "flos": 13518748863240.0, - "grad_norm": 4.457568498171796, - "learning_rate": 1.949372251462751e-06, - "loss": 1.0469, - "num_input_tokens_seen": 92696865, - "step": 4348 - }, - { - "epoch": 0.5229363313894067, - "flos": 14955456108960.0, - "grad_norm": 3.3744801721435467, - "learning_rate": 1.9485935332947124e-06, - "loss": 1.0533, - "num_input_tokens_seen": 92714495, - "step": 4349 - }, - { - "epoch": 0.5230565742800457, - "flos": 10476877134240.0, - "grad_norm": 5.733294226638729, - "learning_rate": 1.947814822924993e-06, - "loss": 1.0563, - "num_input_tokens_seen": 92731725, - "step": 4350 - }, - { - "epoch": 0.5231768171706848, - "flos": 18369943465080.0, - "grad_norm": 3.1135531011230855, - "learning_rate": 1.9470361204717236e-06, - "loss": 1.0436, - "num_input_tokens_seen": 92750585, - "step": 4351 - }, - { - "epoch": 0.5232970600613239, - "flos": 16163562153720.0, - "grad_norm": 3.048357167791254, - "learning_rate": 1.9462574260530326e-06, - "loss": 1.0359, - "num_input_tokens_seen": 92770585, - "step": 4352 - }, - { - "epoch": 0.523417302951963, - "flos": 12259334932440.0, - "grad_norm": 4.624560773458612, - "learning_rate": 1.9454787397870472e-06, - "loss": 1.0334, - "num_input_tokens_seen": 92787625, - "step": 4353 - }, - { - "epoch": 0.523537545842602, - "flos": 13151315039760.0, - "grad_norm": 4.653007248617659, - "learning_rate": 1.944700061791894e-06, - "loss": 0.9489, - "num_input_tokens_seen": 92805740, - "step": 4354 - }, - { - "epoch": 0.5236577887332411, - "flos": 13990392859920.0, - "grad_norm": 4.454054564780279, - "learning_rate": 1.943921392185698e-06, - "loss": 0.8863, - "num_input_tokens_seen": 92824085, - "step": 4355 - }, - { - "epoch": 0.5237780316238803, - "flos": 16743616957320.0, - "grad_norm": 4.609509689173564, - "learning_rate": 1.9431427310865814e-06, - "loss": 0.9999, - "num_input_tokens_seen": 92843410, - "step": 4356 - }, - { - "epoch": 0.5238982745145193, - "flos": 15982988051880.0, - "grad_norm": 15.335521099638608, - "learning_rate": 1.942364078612667e-06, - "loss": 1.0212, - "num_input_tokens_seen": 92861860, - "step": 4357 - }, - { - "epoch": 0.5240185174051584, - "flos": 19286764984080.0, - "grad_norm": 3.5357163802191627, - "learning_rate": 1.9415854348820765e-06, - "loss": 0.9714, - "num_input_tokens_seen": 92881430, - "step": 4358 - }, - { - "epoch": 0.5241387602957975, - "flos": 16114400576880.0, - "grad_norm": 4.896096967808999, - "learning_rate": 1.940806800012929e-06, - "loss": 0.8968, - "num_input_tokens_seen": 92901220, - "step": 4359 - }, - { - "epoch": 0.5242590031864366, - "flos": 28963638221760.0, - "grad_norm": 15.615882105272863, - "learning_rate": 1.9400281741233432e-06, - "loss": 0.8618, - "num_input_tokens_seen": 92925830, - "step": 4360 - }, - { - "epoch": 0.5243792460770756, - "flos": 47742444825960.0, - "grad_norm": 0.6946104984962989, - "learning_rate": 1.939249557331435e-06, - "loss": 0.7875, - "num_input_tokens_seen": 92991365, - "step": 4361 - }, - { - "epoch": 0.5244994889677148, - "flos": 20073553748280.0, - "grad_norm": 4.516132260246706, - "learning_rate": 1.938470949755321e-06, - "loss": 0.9492, - "num_input_tokens_seen": 93010965, - "step": 4362 - }, - { - "epoch": 0.5246197318583539, - "flos": 47221240652160.0, - "grad_norm": 0.8865674263646187, - "learning_rate": 1.937692351513115e-06, - "loss": 0.8526, - "num_input_tokens_seen": 93069680, - "step": 4363 - }, - { - "epoch": 0.5247399747489929, - "flos": 14934999300360.0, - "grad_norm": 3.4826441002339075, - "learning_rate": 1.9369137627229297e-06, - "loss": 1.0257, - "num_input_tokens_seen": 93087800, - "step": 4364 - }, - { - "epoch": 0.5248602176396321, - "flos": 13492098419520.0, - "grad_norm": 3.0786085959148526, - "learning_rate": 1.936135183502877e-06, - "loss": 1.1043, - "num_input_tokens_seen": 93104820, - "step": 4365 - }, - { - "epoch": 0.5249804605302711, - "flos": 15773279919960.0, - "grad_norm": 5.174442633500038, - "learning_rate": 1.935356613971066e-06, - "loss": 1.0213, - "num_input_tokens_seen": 93125200, - "step": 4366 - }, - { - "epoch": 0.5251007034209102, - "flos": 16927226553600.0, - "grad_norm": 4.493404429257193, - "learning_rate": 1.9345780542456047e-06, - "loss": 0.9994, - "num_input_tokens_seen": 93144295, - "step": 4367 - }, - { - "epoch": 0.5252209463115494, - "flos": 16559884714800.0, - "grad_norm": 4.261305190176846, - "learning_rate": 1.9337995044446007e-06, - "loss": 0.9396, - "num_input_tokens_seen": 93162855, - "step": 4368 - }, - { - "epoch": 0.5253411892021884, - "flos": 14069025744000.0, - "grad_norm": 3.9950859163711376, - "learning_rate": 1.9330209646861596e-06, - "loss": 1.0292, - "num_input_tokens_seen": 93181725, - "step": 4369 - }, - { - "epoch": 0.5254614320928275, - "flos": 17162941236480.0, - "grad_norm": 4.005513444580097, - "learning_rate": 1.9322424350883843e-06, - "loss": 1.0091, - "num_input_tokens_seen": 93203280, - "step": 4370 - }, - { - "epoch": 0.5255816749834666, - "flos": 17530405721520.0, - "grad_norm": 3.321475985257471, - "learning_rate": 1.931463915769379e-06, - "loss": 1.0118, - "num_input_tokens_seen": 93223115, - "step": 4371 - }, - { - "epoch": 0.5257019178741057, - "flos": 9978184093560.0, - "grad_norm": 14.233077324047132, - "learning_rate": 1.930685406847242e-06, - "loss": 0.9691, - "num_input_tokens_seen": 93237410, - "step": 4372 - }, - { - "epoch": 0.5258221607647448, - "flos": 16742543802720.0, - "grad_norm": 3.430733836110846, - "learning_rate": 1.9299069084400734e-06, - "loss": 1.0524, - "num_input_tokens_seen": 93257990, - "step": 4373 - }, - { - "epoch": 0.5259424036553839, - "flos": 17760233384880.0, - "grad_norm": 3.3569497513815696, - "learning_rate": 1.9291284206659717e-06, - "loss": 0.9116, - "num_input_tokens_seen": 93275895, - "step": 4374 - }, - { - "epoch": 0.526062646546023, - "flos": 20490087825480.0, - "grad_norm": 4.150291588891935, - "learning_rate": 1.928349943643032e-06, - "loss": 0.9355, - "num_input_tokens_seen": 93294715, - "step": 4375 - }, - { - "epoch": 0.526182889436662, - "flos": 16219929197160.0, - "grad_norm": 7.5982444241322895, - "learning_rate": 1.9275714774893493e-06, - "loss": 1.0572, - "num_input_tokens_seen": 93313890, - "step": 4376 - }, - { - "epoch": 0.5263031323273012, - "flos": 16297427603520.0, - "grad_norm": 3.9086883258986145, - "learning_rate": 1.9267930223230154e-06, - "loss": 0.945, - "num_input_tokens_seen": 93332085, - "step": 4377 - }, - { - "epoch": 0.5264233752179402, - "flos": 12574081099680.0, - "grad_norm": 55.46381139986685, - "learning_rate": 1.9260145782621224e-06, - "loss": 1.0064, - "num_input_tokens_seen": 93349585, - "step": 4378 - }, - { - "epoch": 0.5265436181085793, - "flos": 17369307258360.0, - "grad_norm": 3.6070488373887617, - "learning_rate": 1.925236145424758e-06, - "loss": 1.1011, - "num_input_tokens_seen": 93368125, - "step": 4379 - }, - { - "epoch": 0.5266638609992185, - "flos": 49563235122240.0, - "grad_norm": 0.6936843325491662, - "learning_rate": 1.924457723929012e-06, - "loss": 0.8306, - "num_input_tokens_seen": 93438655, - "step": 4380 - }, - { - "epoch": 0.5267841038898575, - "flos": 14722746258960.0, - "grad_norm": 7.040153867600636, - "learning_rate": 1.9236793138929685e-06, - "loss": 1.0499, - "num_input_tokens_seen": 93457645, - "step": 4381 - }, - { - "epoch": 0.5269043467804966, - "flos": 12204041043600.0, - "grad_norm": 7.269753216486326, - "learning_rate": 1.9229009154347133e-06, - "loss": 1.035, - "num_input_tokens_seen": 93474955, - "step": 4382 - }, - { - "epoch": 0.5270245896711357, - "flos": 12915140433480.0, - "grad_norm": 5.221608732364601, - "learning_rate": 1.922122528672327e-06, - "loss": 1.0259, - "num_input_tokens_seen": 93493340, - "step": 4383 - }, - { - "epoch": 0.5271448325617748, - "flos": 15117413095800.0, - "grad_norm": 4.458176488286215, - "learning_rate": 1.9213441537238914e-06, - "loss": 1.0026, - "num_input_tokens_seen": 93509935, - "step": 4384 - }, - { - "epoch": 0.5272650754524139, - "flos": 46892022228600.0, - "grad_norm": 1.0255244285839498, - "learning_rate": 1.920565790707485e-06, - "loss": 0.8845, - "num_input_tokens_seen": 93575045, - "step": 4385 - }, - { - "epoch": 0.527385318343053, - "flos": 13958744581920.0, - "grad_norm": 8.689405932033567, - "learning_rate": 1.9197874397411853e-06, - "loss": 0.8812, - "num_input_tokens_seen": 93591395, - "step": 4386 - }, - { - "epoch": 0.5275055612336921, - "flos": 8954147568480.0, - "grad_norm": 10.359101396862968, - "learning_rate": 1.919009100943067e-06, - "loss": 0.8534, - "num_input_tokens_seen": 93606805, - "step": 4387 - }, - { - "epoch": 0.5276258041243311, - "flos": 12574418376840.0, - "grad_norm": 4.401240538067169, - "learning_rate": 1.9182307744312043e-06, - "loss": 0.8839, - "num_input_tokens_seen": 93623630, - "step": 4388 - }, - { - "epoch": 0.5277460470149702, - "flos": 16140008527560.0, - "grad_norm": 3.9255769439219206, - "learning_rate": 1.9174524603236676e-06, - "loss": 0.9823, - "num_input_tokens_seen": 93642300, - "step": 4389 - }, - { - "epoch": 0.5278662899056094, - "flos": 14122357293000.0, - "grad_norm": 69.7917101989914, - "learning_rate": 1.916674158738527e-06, - "loss": 0.9901, - "num_input_tokens_seen": 93660925, - "step": 4390 - }, - { - "epoch": 0.5279865327962484, - "flos": 12758641204320.0, - "grad_norm": 3.65896583830177, - "learning_rate": 1.9158958697938506e-06, - "loss": 0.8218, - "num_input_tokens_seen": 93679025, - "step": 4391 - }, - { - "epoch": 0.5281067756868875, - "flos": 11263083328800.0, - "grad_norm": 4.678387545733698, - "learning_rate": 1.9151175936077032e-06, - "loss": 1.0698, - "num_input_tokens_seen": 93693715, - "step": 4392 - }, - { - "epoch": 0.5282270185775266, - "flos": 13779856865880.0, - "grad_norm": 4.197185645177956, - "learning_rate": 1.9143393302981507e-06, - "loss": 1.0215, - "num_input_tokens_seen": 93711120, - "step": 4393 - }, - { - "epoch": 0.5283472614681657, - "flos": 11604694570680.0, - "grad_norm": 4.508713868464117, - "learning_rate": 1.913561079983252e-06, - "loss": 1.0497, - "num_input_tokens_seen": 93729665, - "step": 4394 - }, - { - "epoch": 0.5284675043588047, - "flos": 19050743685600.0, - "grad_norm": 16.423304008352925, - "learning_rate": 1.9127828427810693e-06, - "loss": 0.9669, - "num_input_tokens_seen": 93749950, - "step": 4395 - }, - { - "epoch": 0.5285877472494439, - "flos": 14119965691320.0, - "grad_norm": 3.6988682485167343, - "learning_rate": 1.9120046188096607e-06, - "loss": 1.0278, - "num_input_tokens_seen": 93767715, - "step": 4396 - }, - { - "epoch": 0.528707990140083, - "flos": 14200254299640.0, - "grad_norm": 3.864877461690639, - "learning_rate": 1.9112264081870804e-06, - "loss": 0.9763, - "num_input_tokens_seen": 93785825, - "step": 4397 - }, - { - "epoch": 0.528828233030722, - "flos": 14672204911920.0, - "grad_norm": 16.375755700298516, - "learning_rate": 1.9104482110313843e-06, - "loss": 0.9778, - "num_input_tokens_seen": 93805135, - "step": 4398 - }, - { - "epoch": 0.5289484759213612, - "flos": 17923907419080.0, - "grad_norm": 5.512266871221885, - "learning_rate": 1.909670027460623e-06, - "loss": 0.97, - "num_input_tokens_seen": 93822155, - "step": 4399 - }, - { - "epoch": 0.5290687188120002, - "flos": 22197714773040.0, - "grad_norm": 7.296365266758308, - "learning_rate": 1.908891857592847e-06, - "loss": 0.9392, - "num_input_tokens_seen": 93842945, - "step": 4400 - }, - { - "epoch": 0.5291889617026393, - "flos": 14278181967840.0, - "grad_norm": 5.426370916875721, - "learning_rate": 1.9081137015461034e-06, - "loss": 1.1164, - "num_input_tokens_seen": 93858740, - "step": 4401 - }, - { - "epoch": 0.5293092045932785, - "flos": 13935773525400.0, - "grad_norm": 3.9098952717914264, - "learning_rate": 1.9073355594384383e-06, - "loss": 1.1258, - "num_input_tokens_seen": 93876700, - "step": 4402 - }, - { - "epoch": 0.5294294474839175, - "flos": 17295396254520.0, - "grad_norm": 3.803525398635961, - "learning_rate": 1.906557431387895e-06, - "loss": 1.0363, - "num_input_tokens_seen": 93895410, - "step": 4403 - }, - { - "epoch": 0.5295496903745566, - "flos": 13383748935720.0, - "grad_norm": 7.8155965542952455, - "learning_rate": 1.905779317512516e-06, - "loss": 1.0193, - "num_input_tokens_seen": 93912675, - "step": 4404 - }, - { - "epoch": 0.5296699332651957, - "flos": 14855538554160.0, - "grad_norm": 5.427562533826188, - "learning_rate": 1.9050012179303385e-06, - "loss": 1.0218, - "num_input_tokens_seen": 93930905, - "step": 4405 - }, - { - "epoch": 0.5297901761558348, - "flos": 15662078911080.0, - "grad_norm": 5.081897074095864, - "learning_rate": 1.904223132759401e-06, - "loss": 0.9039, - "num_input_tokens_seen": 93949225, - "step": 4406 - }, - { - "epoch": 0.5299104190464738, - "flos": 15484632288360.0, - "grad_norm": 5.120522380644732, - "learning_rate": 1.9034450621177383e-06, - "loss": 0.8988, - "num_input_tokens_seen": 93967265, - "step": 4407 - }, - { - "epoch": 0.530030661937113, - "flos": 10398090942360.0, - "grad_norm": 5.559283237225311, - "learning_rate": 1.9026670061233824e-06, - "loss": 0.9245, - "num_input_tokens_seen": 93984420, - "step": 4408 - }, - { - "epoch": 0.5301509048277521, - "flos": 15091559852640.0, - "grad_norm": 12.100715149482127, - "learning_rate": 1.901888964894365e-06, - "loss": 1.0271, - "num_input_tokens_seen": 94003180, - "step": 4409 - }, - { - "epoch": 0.5302711477183911, - "flos": 18473203129920.0, - "grad_norm": 5.391943551942471, - "learning_rate": 1.9011109385487134e-06, - "loss": 0.8983, - "num_input_tokens_seen": 94024150, - "step": 4410 - }, - { - "epoch": 0.5303913906090303, - "flos": 15825783606840.0, - "grad_norm": 4.459723045063659, - "learning_rate": 1.900332927204454e-06, - "loss": 0.8728, - "num_input_tokens_seen": 94042320, - "step": 4411 - }, - { - "epoch": 0.5305116334996693, - "flos": 17739469960680.0, - "grad_norm": 3.7494798935380222, - "learning_rate": 1.8995549309796097e-06, - "loss": 0.987, - "num_input_tokens_seen": 94061345, - "step": 4412 - }, - { - "epoch": 0.5306318763903084, - "flos": 14327650160280.0, - "grad_norm": 2.762557661740681, - "learning_rate": 1.8987769499922028e-06, - "loss": 0.9858, - "num_input_tokens_seen": 94080035, - "step": 4413 - }, - { - "epoch": 0.5307521192809476, - "flos": 14383557280320.0, - "grad_norm": 3.975704797604484, - "learning_rate": 1.897998984360252e-06, - "loss": 0.9282, - "num_input_tokens_seen": 94098725, - "step": 4414 - }, - { - "epoch": 0.5308723621715866, - "flos": 20548846470600.0, - "grad_norm": 3.4696966030932863, - "learning_rate": 1.897221034201775e-06, - "loss": 1.0071, - "num_input_tokens_seen": 94122185, - "step": 4415 - }, - { - "epoch": 0.5309926050622257, - "flos": 19552380236040.0, - "grad_norm": 3.281936428386556, - "learning_rate": 1.8964430996347842e-06, - "loss": 0.8902, - "num_input_tokens_seen": 94143455, - "step": 4416 - }, - { - "epoch": 0.5311128479528648, - "flos": 14560942579920.0, - "grad_norm": 3.7781240609974795, - "learning_rate": 1.8956651807772931e-06, - "loss": 1.0537, - "num_input_tokens_seen": 94161210, - "step": 4417 - }, - { - "epoch": 0.5312330908435039, - "flos": 15196628549520.0, - "grad_norm": 5.800199886692349, - "learning_rate": 1.8948872777473115e-06, - "loss": 1.054, - "num_input_tokens_seen": 94178885, - "step": 4418 - }, - { - "epoch": 0.531353333734143, - "flos": 17582357500320.0, - "grad_norm": 3.5730659001420353, - "learning_rate": 1.8941093906628458e-06, - "loss": 0.8631, - "num_input_tokens_seen": 94196390, - "step": 4419 - }, - { - "epoch": 0.531473576624782, - "flos": 21724169759640.0, - "grad_norm": 2.9694420258912944, - "learning_rate": 1.893331519641902e-06, - "loss": 0.943, - "num_input_tokens_seen": 94218255, - "step": 4420 - }, - { - "epoch": 0.5315938195154212, - "flos": 16350329890680.0, - "grad_norm": 42.12886115696474, - "learning_rate": 1.8925536648024815e-06, - "loss": 0.9731, - "num_input_tokens_seen": 94235395, - "step": 4421 - }, - { - "epoch": 0.5317140624060602, - "flos": 16167640141200.0, - "grad_norm": 4.122451688669624, - "learning_rate": 1.8917758262625849e-06, - "loss": 0.9932, - "num_input_tokens_seen": 94255355, - "step": 4422 - }, - { - "epoch": 0.5318343052966993, - "flos": 16219683904680.0, - "grad_norm": 3.187714363818598, - "learning_rate": 1.8909980041402089e-06, - "loss": 1.037, - "num_input_tokens_seen": 94273670, - "step": 4423 - }, - { - "epoch": 0.5319545481873384, - "flos": 9611332839720.0, - "grad_norm": 2.9515378208855254, - "learning_rate": 1.8902201985533494e-06, - "loss": 0.8851, - "num_input_tokens_seen": 94290655, - "step": 4424 - }, - { - "epoch": 0.5320747910779775, - "flos": 15746138891280.0, - "grad_norm": 2.8240666223183335, - "learning_rate": 1.8894424096199983e-06, - "loss": 0.979, - "num_input_tokens_seen": 94309580, - "step": 4425 - }, - { - "epoch": 0.5321950339686166, - "flos": 13177413575400.0, - "grad_norm": 8.252427639833238, - "learning_rate": 1.8886646374581463e-06, - "loss": 1.0844, - "num_input_tokens_seen": 94328525, - "step": 4426 - }, - { - "epoch": 0.5323152768592557, - "flos": 16245261193800.0, - "grad_norm": 3.8300511088335174, - "learning_rate": 1.8878868821857795e-06, - "loss": 0.928, - "num_input_tokens_seen": 94347895, - "step": 4427 - }, - { - "epoch": 0.5324355197498948, - "flos": 24216807100920.0, - "grad_norm": 4.759737574291575, - "learning_rate": 1.8871091439208838e-06, - "loss": 0.966, - "num_input_tokens_seen": 94369225, - "step": 4428 - }, - { - "epoch": 0.5325557626405338, - "flos": 16532467732080.0, - "grad_norm": 5.645914318822364, - "learning_rate": 1.8863314227814414e-06, - "loss": 0.9843, - "num_input_tokens_seen": 94387255, - "step": 4429 - }, - { - "epoch": 0.532676005531173, - "flos": 19019953931280.0, - "grad_norm": 6.129262569808721, - "learning_rate": 1.8855537188854313e-06, - "loss": 0.69, - "num_input_tokens_seen": 94405950, - "step": 4430 - }, - { - "epoch": 0.5327962484218121, - "flos": 12678996488760.0, - "grad_norm": 4.151773436919311, - "learning_rate": 1.8847760323508315e-06, - "loss": 1.0055, - "num_input_tokens_seen": 94424575, - "step": 4431 - }, - { - "epoch": 0.5329164913124511, - "flos": 12700342482600.0, - "grad_norm": 2.946932966647207, - "learning_rate": 1.883998363295616e-06, - "loss": 0.9802, - "num_input_tokens_seen": 94441775, - "step": 4432 - }, - { - "epoch": 0.5330367342030903, - "flos": 40969745172480.0, - "grad_norm": 0.9556778349157851, - "learning_rate": 1.8832207118377565e-06, - "loss": 0.9177, - "num_input_tokens_seen": 94496865, - "step": 4433 - }, - { - "epoch": 0.5331569770937293, - "flos": 12338305093680.0, - "grad_norm": 2.8517405833198124, - "learning_rate": 1.882443078095222e-06, - "loss": 0.9102, - "num_input_tokens_seen": 94515465, - "step": 4434 - }, - { - "epoch": 0.5332772199843684, - "flos": 40608964907520.0, - "grad_norm": 0.866272211326771, - "learning_rate": 1.8816654621859794e-06, - "loss": 0.9187, - "num_input_tokens_seen": 94574850, - "step": 4435 - }, - { - "epoch": 0.5333974628750076, - "flos": 13256261090400.0, - "grad_norm": 3.596176787616672, - "learning_rate": 1.8808878642279915e-06, - "loss": 0.9371, - "num_input_tokens_seen": 94589975, - "step": 4436 - }, - { - "epoch": 0.5335177057656466, - "flos": 16927073245800.0, - "grad_norm": 12.132016383652106, - "learning_rate": 1.8801102843392209e-06, - "loss": 0.8799, - "num_input_tokens_seen": 94609100, - "step": 4437 - }, - { - "epoch": 0.5336379486562857, - "flos": 17841564486240.0, - "grad_norm": 3.416200016687564, - "learning_rate": 1.8793327226376238e-06, - "loss": 1.0821, - "num_input_tokens_seen": 94628140, - "step": 4438 - }, - { - "epoch": 0.5337581915469248, - "flos": 15195800687400.0, - "grad_norm": 3.1869647006667488, - "learning_rate": 1.8785551792411569e-06, - "loss": 1.0244, - "num_input_tokens_seen": 94646870, - "step": 4439 - }, - { - "epoch": 0.5338784344375639, - "flos": 10502270454000.0, - "grad_norm": 4.045539311503826, - "learning_rate": 1.8777776542677733e-06, - "loss": 1.0489, - "num_input_tokens_seen": 94664640, - "step": 4440 - }, - { - "epoch": 0.5339986773282029, - "flos": 14776752362280.0, - "grad_norm": 3.143590637172119, - "learning_rate": 1.8770001478354216e-06, - "loss": 0.9387, - "num_input_tokens_seen": 94684035, - "step": 4441 - }, - { - "epoch": 0.5341189202188421, - "flos": 12732726638040.0, - "grad_norm": 4.906145228342401, - "learning_rate": 1.8762226600620504e-06, - "loss": 1.0516, - "num_input_tokens_seen": 94702370, - "step": 4442 - }, - { - "epoch": 0.5342391631094812, - "flos": 7746409462440.0, - "grad_norm": 6.125023978050164, - "learning_rate": 1.8754451910656031e-06, - "loss": 0.8241, - "num_input_tokens_seen": 94715990, - "step": 4443 - }, - { - "epoch": 0.5343594060001202, - "flos": 10842225971640.0, - "grad_norm": 4.038046851193845, - "learning_rate": 1.8746677409640212e-06, - "loss": 1.0412, - "num_input_tokens_seen": 94732810, - "step": 4444 - }, - { - "epoch": 0.5344796488907594, - "flos": 19150875871320.0, - "grad_norm": 15.258179519075274, - "learning_rate": 1.8738903098752432e-06, - "loss": 1.0658, - "num_input_tokens_seen": 94751660, - "step": 4445 - }, - { - "epoch": 0.5345998917813984, - "flos": 18081111864120.0, - "grad_norm": 4.309744355304739, - "learning_rate": 1.8731128979172052e-06, - "loss": 0.9572, - "num_input_tokens_seen": 94770580, - "step": 4446 - }, - { - "epoch": 0.5347201346720375, - "flos": 23429527751760.0, - "grad_norm": 4.670061761092351, - "learning_rate": 1.8723355052078394e-06, - "loss": 0.8852, - "num_input_tokens_seen": 94790335, - "step": 4447 - }, - { - "epoch": 0.5348403775626767, - "flos": 12731745468120.0, - "grad_norm": 3.347539163008549, - "learning_rate": 1.8715581318650765e-06, - "loss": 0.9905, - "num_input_tokens_seen": 94809110, - "step": 4448 - }, - { - "epoch": 0.5349606204533157, - "flos": 12469748280240.0, - "grad_norm": 3.6579840045076586, - "learning_rate": 1.8707807780068422e-06, - "loss": 1.0339, - "num_input_tokens_seen": 94826645, - "step": 4449 - }, - { - "epoch": 0.5350808633439548, - "flos": 20780728458480.0, - "grad_norm": 12.177825687558617, - "learning_rate": 1.8700034437510611e-06, - "loss": 0.8926, - "num_input_tokens_seen": 94846460, - "step": 4450 - }, - { - "epoch": 0.5352011062345938, - "flos": 13832759153040.0, - "grad_norm": 4.06773344545839, - "learning_rate": 1.8692261292156549e-06, - "loss": 1.0365, - "num_input_tokens_seen": 94865415, - "step": 4451 - }, - { - "epoch": 0.535321349125233, - "flos": 16691511870720.0, - "grad_norm": 3.2463434630560815, - "learning_rate": 1.8684488345185401e-06, - "loss": 1.0414, - "num_input_tokens_seen": 94885310, - "step": 4452 - }, - { - "epoch": 0.535441592015872, - "flos": 14534721398040.0, - "grad_norm": 5.667164088837948, - "learning_rate": 1.8676715597776332e-06, - "loss": 1.0061, - "num_input_tokens_seen": 94903375, - "step": 4453 - }, - { - "epoch": 0.5355618349065111, - "flos": 13885354824600.0, - "grad_norm": 4.086454690618331, - "learning_rate": 1.8668943051108455e-06, - "loss": 0.9818, - "num_input_tokens_seen": 94920400, - "step": 4454 - }, - { - "epoch": 0.5356820777971503, - "flos": 17084584306440.0, - "grad_norm": 3.2541036374235626, - "learning_rate": 1.8661170706360856e-06, - "loss": 0.9856, - "num_input_tokens_seen": 94939285, - "step": 4455 - }, - { - "epoch": 0.5358023206877893, - "flos": 14827661648040.0, - "grad_norm": 3.110284260211804, - "learning_rate": 1.8653398564712594e-06, - "loss": 1.0485, - "num_input_tokens_seen": 94957950, - "step": 4456 - }, - { - "epoch": 0.5359225635784284, - "flos": 15930607011240.0, - "grad_norm": 2.773963126220077, - "learning_rate": 1.8645626627342704e-06, - "loss": 1.0476, - "num_input_tokens_seen": 94978435, - "step": 4457 - }, - { - "epoch": 0.5360428064690675, - "flos": 17137179978000.0, - "grad_norm": 3.5415349748848337, - "learning_rate": 1.8637854895430172e-06, - "loss": 1.0164, - "num_input_tokens_seen": 94997420, - "step": 4458 - }, - { - "epoch": 0.5361630493597066, - "flos": 15223156347000.0, - "grad_norm": 3.325256257471963, - "learning_rate": 1.8630083370153978e-06, - "loss": 0.9081, - "num_input_tokens_seen": 95016780, - "step": 4459 - }, - { - "epoch": 0.5362832922503457, - "flos": 49330653466560.0, - "grad_norm": 0.7864934437642809, - "learning_rate": 1.8622312052693041e-06, - "loss": 0.8177, - "num_input_tokens_seen": 95077680, - "step": 4460 - }, - { - "epoch": 0.5364035351409848, - "flos": 6856882279920.0, - "grad_norm": 13.39446289899446, - "learning_rate": 1.8614540944226267e-06, - "loss": 0.9467, - "num_input_tokens_seen": 95094070, - "step": 4461 - }, - { - "epoch": 0.5365237780316239, - "flos": 16556021358240.0, - "grad_norm": 4.1509758319045496, - "learning_rate": 1.8606770045932537e-06, - "loss": 0.9138, - "num_input_tokens_seen": 95112905, - "step": 4462 - }, - { - "epoch": 0.5366440209222629, - "flos": 18919668437760.0, - "grad_norm": 6.582924005201028, - "learning_rate": 1.859899935899068e-06, - "loss": 1.0448, - "num_input_tokens_seen": 95132480, - "step": 4463 - }, - { - "epoch": 0.5367642638129021, - "flos": 13910226897840.0, - "grad_norm": 13.074800531386582, - "learning_rate": 1.8591228884579506e-06, - "loss": 1.023, - "num_input_tokens_seen": 95150695, - "step": 4464 - }, - { - "epoch": 0.5368845067035412, - "flos": 17005951422360.0, - "grad_norm": 3.794193278375127, - "learning_rate": 1.8583458623877795e-06, - "loss": 1.0513, - "num_input_tokens_seen": 95169515, - "step": 4465 - }, - { - "epoch": 0.5370047495941802, - "flos": 11944895380800.0, - "grad_norm": 6.942911323008537, - "learning_rate": 1.8575688578064281e-06, - "loss": 0.9597, - "num_input_tokens_seen": 95187360, - "step": 4466 - }, - { - "epoch": 0.5371249924848194, - "flos": 14724647275680.0, - "grad_norm": 4.914756446927679, - "learning_rate": 1.8567918748317674e-06, - "loss": 0.9862, - "num_input_tokens_seen": 95206430, - "step": 4467 - }, - { - "epoch": 0.5372452353754584, - "flos": 12731898775920.0, - "grad_norm": 3.1783538945984415, - "learning_rate": 1.8560149135816659e-06, - "loss": 1.0476, - "num_input_tokens_seen": 95222985, - "step": 4468 - }, - { - "epoch": 0.5373654782660975, - "flos": 10869642954360.0, - "grad_norm": 4.595008314455167, - "learning_rate": 1.8552379741739873e-06, - "loss": 1.0546, - "num_input_tokens_seen": 95240050, - "step": 4469 - }, - { - "epoch": 0.5374857211567367, - "flos": 49410972736440.0, - "grad_norm": 0.8864712810952908, - "learning_rate": 1.8544610567265935e-06, - "loss": 0.8114, - "num_input_tokens_seen": 95293710, - "step": 4470 - }, - { - "epoch": 0.5376059640473757, - "flos": 10660057468680.0, - "grad_norm": 3.830123691005725, - "learning_rate": 1.853684161357341e-06, - "loss": 1.0603, - "num_input_tokens_seen": 95311090, - "step": 4471 - }, - { - "epoch": 0.5377262069380148, - "flos": 14043141839280.0, - "grad_norm": 3.5207292590287116, - "learning_rate": 1.852907288184085e-06, - "loss": 1.0008, - "num_input_tokens_seen": 95329695, - "step": 4472 - }, - { - "epoch": 0.5378464498286539, - "flos": 21380473531680.0, - "grad_norm": 3.340339704130294, - "learning_rate": 1.8521304373246762e-06, - "loss": 0.9292, - "num_input_tokens_seen": 95350460, - "step": 4473 - }, - { - "epoch": 0.537966692719293, - "flos": 15091314560160.0, - "grad_norm": 7.055901069224372, - "learning_rate": 1.8513536088969626e-06, - "loss": 1.1146, - "num_input_tokens_seen": 95367845, - "step": 4474 - }, - { - "epoch": 0.538086935609932, - "flos": 15301145338320.0, - "grad_norm": 3.5790412096292785, - "learning_rate": 1.8505768030187884e-06, - "loss": 1.0242, - "num_input_tokens_seen": 95387695, - "step": 4475 - }, - { - "epoch": 0.5382071785005712, - "flos": 16166475001920.0, - "grad_norm": 4.470305698873606, - "learning_rate": 1.849800019807995e-06, - "loss": 1.0322, - "num_input_tokens_seen": 95408640, - "step": 4476 - }, - { - "epoch": 0.5383274213912103, - "flos": 17738243498280.0, - "grad_norm": 5.155523260516015, - "learning_rate": 1.8490232593824186e-06, - "loss": 0.9422, - "num_input_tokens_seen": 95424815, - "step": 4477 - }, - { - "epoch": 0.5384476642818493, - "flos": 15852495373680.0, - "grad_norm": 3.7584319367482157, - "learning_rate": 1.8482465218598935e-06, - "loss": 1.0695, - "num_input_tokens_seen": 95444480, - "step": 4478 - }, - { - "epoch": 0.5385679071724885, - "flos": 16140499112520.0, - "grad_norm": 3.092082335161765, - "learning_rate": 1.8474698073582508e-06, - "loss": 1.0633, - "num_input_tokens_seen": 95465570, - "step": 4479 - }, - { - "epoch": 0.5386881500631275, - "flos": 11283172198680.0, - "grad_norm": 8.189673681220931, - "learning_rate": 1.8466931159953166e-06, - "loss": 1.0834, - "num_input_tokens_seen": 95481925, - "step": 4480 - }, - { - "epoch": 0.5388083929537666, - "flos": 17109885641520.0, - "grad_norm": 6.263271127459233, - "learning_rate": 1.8459164478889158e-06, - "loss": 1.0655, - "num_input_tokens_seen": 95503040, - "step": 4481 - }, - { - "epoch": 0.5389286358444056, - "flos": 15799777055880.0, - "grad_norm": 6.773230244810724, - "learning_rate": 1.8451398031568663e-06, - "loss": 0.9769, - "num_input_tokens_seen": 95522385, - "step": 4482 - }, - { - "epoch": 0.5390488787350448, - "flos": 17269573672920.0, - "grad_norm": 19.890348062437766, - "learning_rate": 1.844363181916986e-06, - "loss": 0.9758, - "num_input_tokens_seen": 95542830, - "step": 4483 - }, - { - "epoch": 0.5391691216256839, - "flos": 11735248572000.0, - "grad_norm": 3.966012085404212, - "learning_rate": 1.8435865842870868e-06, - "loss": 1.0556, - "num_input_tokens_seen": 95560490, - "step": 4484 - }, - { - "epoch": 0.5392893645163229, - "flos": 16585216711440.0, - "grad_norm": 4.8838368066240045, - "learning_rate": 1.8428100103849787e-06, - "loss": 0.94, - "num_input_tokens_seen": 95580005, - "step": 4485 - }, - { - "epoch": 0.5394096074069621, - "flos": 11080178948400.0, - "grad_norm": 7.042950334487848, - "learning_rate": 1.842033460328467e-06, - "loss": 0.9389, - "num_input_tokens_seen": 95598445, - "step": 4486 - }, - { - "epoch": 0.5395298502976011, - "flos": 16271605021920.0, - "grad_norm": 4.175988615933975, - "learning_rate": 1.8412569342353541e-06, - "loss": 0.9723, - "num_input_tokens_seen": 95618320, - "step": 4487 - }, - { - "epoch": 0.5396500931882402, - "flos": 16953417073920.0, - "grad_norm": 3.4116019104696287, - "learning_rate": 1.840480432223438e-06, - "loss": 1.0652, - "num_input_tokens_seen": 95637045, - "step": 4488 - }, - { - "epoch": 0.5397703360788794, - "flos": 18736334795520.0, - "grad_norm": 2.9110794139265614, - "learning_rate": 1.8397039544105131e-06, - "loss": 0.9816, - "num_input_tokens_seen": 95655850, - "step": 4489 - }, - { - "epoch": 0.5398905789695184, - "flos": 15064970732040.0, - "grad_norm": 2.8435784592084525, - "learning_rate": 1.8389275009143711e-06, - "loss": 0.9231, - "num_input_tokens_seen": 95675310, - "step": 4490 - }, - { - "epoch": 0.5400108218601575, - "flos": 17842974918000.0, - "grad_norm": 2.8996860663082287, - "learning_rate": 1.8381510718527988e-06, - "loss": 0.952, - "num_input_tokens_seen": 95694640, - "step": 4491 - }, - { - "epoch": 0.5401310647507966, - "flos": 18760869591600.0, - "grad_norm": 4.678307025555956, - "learning_rate": 1.8373746673435812e-06, - "loss": 0.8659, - "num_input_tokens_seen": 95715385, - "step": 4492 - }, - { - "epoch": 0.5402513076414357, - "flos": 19837593772920.0, - "grad_norm": 4.718456941308506, - "learning_rate": 1.8365982875044964e-06, - "loss": 1.017, - "num_input_tokens_seen": 95735415, - "step": 4493 - }, - { - "epoch": 0.5403715505320748, - "flos": 16271880975960.0, - "grad_norm": 5.49865966417426, - "learning_rate": 1.8358219324533217e-06, - "loss": 0.9815, - "num_input_tokens_seen": 95755400, - "step": 4494 - }, - { - "epoch": 0.5404917934227139, - "flos": 21539640316560.0, - "grad_norm": 2.8645220015924857, - "learning_rate": 1.8350456023078292e-06, - "loss": 0.9289, - "num_input_tokens_seen": 95777495, - "step": 4495 - }, - { - "epoch": 0.540612036313353, - "flos": 14147995905240.0, - "grad_norm": 11.136904401512904, - "learning_rate": 1.8342692971857874e-06, - "loss": 0.9811, - "num_input_tokens_seen": 95796415, - "step": 4496 - }, - { - "epoch": 0.540732279203992, - "flos": 17267764640880.0, - "grad_norm": 4.927705345113658, - "learning_rate": 1.833493017204962e-06, - "loss": 0.9357, - "num_input_tokens_seen": 95816240, - "step": 4497 - }, - { - "epoch": 0.5408525220946312, - "flos": 14330961608760.0, - "grad_norm": 7.020280071007896, - "learning_rate": 1.8327167624831134e-06, - "loss": 0.9933, - "num_input_tokens_seen": 95833690, - "step": 4498 - }, - { - "epoch": 0.5409727649852702, - "flos": 17163554467680.0, - "grad_norm": 5.888264716956357, - "learning_rate": 1.831940533137999e-06, - "loss": 0.9313, - "num_input_tokens_seen": 95852315, - "step": 4499 - }, - { - "epoch": 0.5410930078759093, - "flos": 17003805113160.0, - "grad_norm": 3.0601613983274603, - "learning_rate": 1.8311643292873718e-06, - "loss": 0.9424, - "num_input_tokens_seen": 95870855, - "step": 4500 - }, - { - "epoch": 0.5412132507665485, - "flos": 14986307186400.0, - "grad_norm": 3.079046585845628, - "learning_rate": 1.8303881510489818e-06, - "loss": 1.1153, - "num_input_tokens_seen": 95888965, - "step": 4501 - }, - { - "epoch": 0.5413334936571875, - "flos": 21542369195400.0, - "grad_norm": 6.060568905051807, - "learning_rate": 1.829611998540574e-06, - "loss": 0.9053, - "num_input_tokens_seen": 95909890, - "step": 4502 - }, - { - "epoch": 0.5414537365478266, - "flos": 17267795302440.0, - "grad_norm": 3.1827656610906763, - "learning_rate": 1.8288358718798914e-06, - "loss": 1.0308, - "num_input_tokens_seen": 95928800, - "step": 4503 - }, - { - "epoch": 0.5415739794384657, - "flos": 11787844243560.0, - "grad_norm": 6.389925370474555, - "learning_rate": 1.8280597711846703e-06, - "loss": 0.9503, - "num_input_tokens_seen": 95946760, - "step": 4504 - }, - { - "epoch": 0.5416942223291048, - "flos": 16481711754120.0, - "grad_norm": 3.450740491485153, - "learning_rate": 1.8272836965726455e-06, - "loss": 1.062, - "num_input_tokens_seen": 95965415, - "step": 4505 - }, - { - "epoch": 0.5418144652197439, - "flos": 14410115739360.0, - "grad_norm": 5.340580585935676, - "learning_rate": 1.8265076481615461e-06, - "loss": 0.9997, - "num_input_tokens_seen": 95985050, - "step": 4506 - }, - { - "epoch": 0.541934708110383, - "flos": 8772316342680.0, - "grad_norm": 5.5616893391903925, - "learning_rate": 1.8257316260690987e-06, - "loss": 1.0929, - "num_input_tokens_seen": 96002555, - "step": 4507 - }, - { - "epoch": 0.5420549510010221, - "flos": 15093522192480.0, - "grad_norm": 3.3063372018199644, - "learning_rate": 1.8249556304130254e-06, - "loss": 0.9885, - "num_input_tokens_seen": 96023555, - "step": 4508 - }, - { - "epoch": 0.5421751938916611, - "flos": 21011874568920.0, - "grad_norm": 3.916177692056283, - "learning_rate": 1.824179661311044e-06, - "loss": 0.9077, - "num_input_tokens_seen": 96042025, - "step": 4509 - }, - { - "epoch": 0.5422954367823003, - "flos": 13282482272280.0, - "grad_norm": 3.982696299243983, - "learning_rate": 1.823403718880868e-06, - "loss": 1.0135, - "num_input_tokens_seen": 96060505, - "step": 4510 - }, - { - "epoch": 0.5424156796729394, - "flos": 28543516742040.0, - "grad_norm": 3.341803438680725, - "learning_rate": 1.822627803240207e-06, - "loss": 0.8995, - "num_input_tokens_seen": 96082555, - "step": 4511 - }, - { - "epoch": 0.5425359225635784, - "flos": 8116817457240.0, - "grad_norm": 4.056823285563236, - "learning_rate": 1.8218519145067675e-06, - "loss": 1.0791, - "num_input_tokens_seen": 96097895, - "step": 4512 - }, - { - "epoch": 0.5426561654542175, - "flos": 14357182790640.0, - "grad_norm": 16.745141079007347, - "learning_rate": 1.8210760527982508e-06, - "loss": 1.1128, - "num_input_tokens_seen": 96117900, - "step": 4513 - }, - { - "epoch": 0.5427764083448566, - "flos": 15537258621480.0, - "grad_norm": 5.794940965169095, - "learning_rate": 1.8203002182323552e-06, - "loss": 0.9689, - "num_input_tokens_seen": 96135175, - "step": 4514 - }, - { - "epoch": 0.5428966512354957, - "flos": 13933381923720.0, - "grad_norm": 3.5620901307017996, - "learning_rate": 1.819524410926773e-06, - "loss": 0.985, - "num_input_tokens_seen": 96152575, - "step": 4515 - }, - { - "epoch": 0.5430168941261347, - "flos": 15904109875320.0, - "grad_norm": 3.9124187131974932, - "learning_rate": 1.8187486309991944e-06, - "loss": 0.9973, - "num_input_tokens_seen": 96173175, - "step": 4516 - }, - { - "epoch": 0.5431371370167739, - "flos": 13303123050240.0, - "grad_norm": 3.264999840869286, - "learning_rate": 1.817972878567304e-06, - "loss": 0.9957, - "num_input_tokens_seen": 96191550, - "step": 4517 - }, - { - "epoch": 0.543257379907413, - "flos": 13333912804560.0, - "grad_norm": 2.745586494924325, - "learning_rate": 1.8171971537487834e-06, - "loss": 0.9879, - "num_input_tokens_seen": 96209920, - "step": 4518 - }, - { - "epoch": 0.543377622798052, - "flos": 12390655472760.0, - "grad_norm": 6.900037361817864, - "learning_rate": 1.8164214566613093e-06, - "loss": 1.0251, - "num_input_tokens_seen": 96228265, - "step": 4519 - }, - { - "epoch": 0.5434978656886912, - "flos": 13465754591400.0, - "grad_norm": 5.155999934750624, - "learning_rate": 1.8156457874225547e-06, - "loss": 0.8641, - "num_input_tokens_seen": 96246445, - "step": 4520 - }, - { - "epoch": 0.5436181085793302, - "flos": 12233726981760.0, - "grad_norm": 3.2529132697161494, - "learning_rate": 1.814870146150187e-06, - "loss": 1.03, - "num_input_tokens_seen": 96264275, - "step": 4521 - }, - { - "epoch": 0.5437383514699693, - "flos": 13544264829240.0, - "grad_norm": 3.624512869891118, - "learning_rate": 1.814094532961871e-06, - "loss": 1.0167, - "num_input_tokens_seen": 96282570, - "step": 4522 - }, - { - "epoch": 0.5438585943606085, - "flos": 16062816736800.0, - "grad_norm": 3.823265884345464, - "learning_rate": 1.8133189479752666e-06, - "loss": 1.0499, - "num_input_tokens_seen": 96301220, - "step": 4523 - }, - { - "epoch": 0.5439788372512475, - "flos": 15484632288360.0, - "grad_norm": 6.332154448697395, - "learning_rate": 1.8125433913080292e-06, - "loss": 1.0455, - "num_input_tokens_seen": 96318640, - "step": 4524 - }, - { - "epoch": 0.5440990801418866, - "flos": 11604479939760.0, - "grad_norm": 4.50658655272233, - "learning_rate": 1.811767863077811e-06, - "loss": 1.0403, - "num_input_tokens_seen": 96337310, - "step": 4525 - }, - { - "epoch": 0.5442193230325257, - "flos": 15353158440240.0, - "grad_norm": 2.860918139356255, - "learning_rate": 1.8109923634022577e-06, - "loss": 1.0065, - "num_input_tokens_seen": 96357055, - "step": 4526 - }, - { - "epoch": 0.5443395659231648, - "flos": 10943799250680.0, - "grad_norm": 7.232359900068656, - "learning_rate": 1.8102168923990128e-06, - "loss": 1.0906, - "num_input_tokens_seen": 96370320, - "step": 4527 - }, - { - "epoch": 0.5444598088138038, - "flos": 13308458161680.0, - "grad_norm": 4.630071179561672, - "learning_rate": 1.809441450185714e-06, - "loss": 1.0174, - "num_input_tokens_seen": 96388525, - "step": 4528 - }, - { - "epoch": 0.544580051704443, - "flos": 15013141599480.0, - "grad_norm": 21.755366923824436, - "learning_rate": 1.8086660368799958e-06, - "loss": 0.9605, - "num_input_tokens_seen": 96406295, - "step": 4529 - }, - { - "epoch": 0.5447002945950821, - "flos": 23169860842440.0, - "grad_norm": 3.872278303642993, - "learning_rate": 1.807890652599488e-06, - "loss": 0.9919, - "num_input_tokens_seen": 96430400, - "step": 4530 - }, - { - "epoch": 0.5448205374857211, - "flos": 8297268912840.0, - "grad_norm": 5.812669977143343, - "learning_rate": 1.8071152974618156e-06, - "loss": 1.0486, - "num_input_tokens_seen": 96447920, - "step": 4531 - }, - { - "epoch": 0.5449407803763603, - "flos": 17162971898040.0, - "grad_norm": 3.203933985826591, - "learning_rate": 1.806339971584599e-06, - "loss": 1.0105, - "num_input_tokens_seen": 96464300, - "step": 4532 - }, - { - "epoch": 0.5450610232669993, - "flos": 16534031471640.0, - "grad_norm": 2.724178288309446, - "learning_rate": 1.8055646750854546e-06, - "loss": 1.08, - "num_input_tokens_seen": 96483530, - "step": 4533 - }, - { - "epoch": 0.5451812661576384, - "flos": 12600271620000.0, - "grad_norm": 4.386795491662331, - "learning_rate": 1.8047894080819945e-06, - "loss": 1.0364, - "num_input_tokens_seen": 96500500, - "step": 4534 - }, - { - "epoch": 0.5453015090482776, - "flos": 50895854841000.0, - "grad_norm": 0.7499417838708575, - "learning_rate": 1.8040141706918258e-06, - "loss": 0.8947, - "num_input_tokens_seen": 96561460, - "step": 4535 - }, - { - "epoch": 0.5454217519389166, - "flos": 18181765296360.0, - "grad_norm": 3.7264742091054286, - "learning_rate": 1.8032389630325525e-06, - "loss": 0.9919, - "num_input_tokens_seen": 96579930, - "step": 4536 - }, - { - "epoch": 0.5455419948295557, - "flos": 16821146025240.0, - "grad_norm": 4.613899105600928, - "learning_rate": 1.8024637852217707e-06, - "loss": 0.9879, - "num_input_tokens_seen": 96599375, - "step": 4537 - }, - { - "epoch": 0.5456622377201948, - "flos": 16638486937320.0, - "grad_norm": 4.66209607867553, - "learning_rate": 1.8016886373770766e-06, - "loss": 1.0708, - "num_input_tokens_seen": 96617610, - "step": 4538 - }, - { - "epoch": 0.5457824806108339, - "flos": 17058271139880.0, - "grad_norm": 2.7418873505828305, - "learning_rate": 1.8009135196160579e-06, - "loss": 0.9973, - "num_input_tokens_seen": 96636205, - "step": 4539 - }, - { - "epoch": 0.545902723501473, - "flos": 16036288939320.0, - "grad_norm": 3.2665733894992126, - "learning_rate": 1.8001384320563e-06, - "loss": 1.0653, - "num_input_tokens_seen": 96656180, - "step": 4540 - }, - { - "epoch": 0.5460229663921121, - "flos": 39490596779640.0, - "grad_norm": 0.8072494906001227, - "learning_rate": 1.7993633748153833e-06, - "loss": 0.836, - "num_input_tokens_seen": 96710505, - "step": 4541 - }, - { - "epoch": 0.5461432092827512, - "flos": 10896170751840.0, - "grad_norm": 3.427241606763073, - "learning_rate": 1.7985883480108834e-06, - "loss": 0.9549, - "num_input_tokens_seen": 96727860, - "step": 4542 - }, - { - "epoch": 0.5462634521733902, - "flos": 17084829598920.0, - "grad_norm": 4.277703591456217, - "learning_rate": 1.797813351760371e-06, - "loss": 0.949, - "num_input_tokens_seen": 96749285, - "step": 4543 - }, - { - "epoch": 0.5463836950640293, - "flos": 16219561258440.0, - "grad_norm": 5.184208756552783, - "learning_rate": 1.7970383861814116e-06, - "loss": 0.9979, - "num_input_tokens_seen": 96768775, - "step": 4544 - }, - { - "epoch": 0.5465039379546685, - "flos": 14514233927880.0, - "grad_norm": 3.9142015017499125, - "learning_rate": 1.7962634513915684e-06, - "loss": 0.9672, - "num_input_tokens_seen": 96785845, - "step": 4545 - }, - { - "epoch": 0.5466241808453075, - "flos": 12282244665840.0, - "grad_norm": 2.7592008300282287, - "learning_rate": 1.7954885475083969e-06, - "loss": 1.0201, - "num_input_tokens_seen": 96803235, - "step": 4546 - }, - { - "epoch": 0.5467444237359466, - "flos": 15354354241080.0, - "grad_norm": 4.705532535004756, - "learning_rate": 1.7947136746494513e-06, - "loss": 0.9586, - "num_input_tokens_seen": 96823870, - "step": 4547 - }, - { - "epoch": 0.5468646666265857, - "flos": 17189377049280.0, - "grad_norm": 3.146856549994015, - "learning_rate": 1.793938832932277e-06, - "loss": 1.0989, - "num_input_tokens_seen": 96841700, - "step": 4548 - }, - { - "epoch": 0.5469849095172248, - "flos": 19601633797560.0, - "grad_norm": 4.009755635794912, - "learning_rate": 1.7931640224744185e-06, - "loss": 0.9223, - "num_input_tokens_seen": 96861970, - "step": 4549 - }, - { - "epoch": 0.5471051524078638, - "flos": 19916165333880.0, - "grad_norm": 2.514190833667107, - "learning_rate": 1.7923892433934127e-06, - "loss": 0.9579, - "num_input_tokens_seen": 96882765, - "step": 4550 - }, - { - "epoch": 0.547225395298503, - "flos": 12862974023760.0, - "grad_norm": 3.706523896722191, - "learning_rate": 1.7916144958067939e-06, - "loss": 1.0055, - "num_input_tokens_seen": 96900345, - "step": 4551 - }, - { - "epoch": 0.5473456381891421, - "flos": 15170683321680.0, - "grad_norm": 3.2759749380976566, - "learning_rate": 1.7908397798320905e-06, - "loss": 1.0104, - "num_input_tokens_seen": 96919800, - "step": 4552 - }, - { - "epoch": 0.5474658810797811, - "flos": 14142568809120.0, - "grad_norm": 2.955571205970929, - "learning_rate": 1.7900650955868265e-06, - "loss": 0.9792, - "num_input_tokens_seen": 96939165, - "step": 4553 - }, - { - "epoch": 0.5475861239704203, - "flos": 36094757200080.0, - "grad_norm": 2.874414467272089, - "learning_rate": 1.7892904431885202e-06, - "loss": 0.9979, - "num_input_tokens_seen": 96966060, - "step": 4554 - }, - { - "epoch": 0.5477063668610593, - "flos": 14699008663440.0, - "grad_norm": 4.690052812356214, - "learning_rate": 1.788515822754686e-06, - "loss": 0.9799, - "num_input_tokens_seen": 96986200, - "step": 4555 - }, - { - "epoch": 0.5478266097516984, - "flos": 13911667991160.0, - "grad_norm": 5.0281866948223275, - "learning_rate": 1.7877412344028335e-06, - "loss": 1.0023, - "num_input_tokens_seen": 97005725, - "step": 4556 - }, - { - "epoch": 0.5479468526423376, - "flos": 9087093171480.0, - "grad_norm": 4.7775743106031285, - "learning_rate": 1.7869666782504668e-06, - "loss": 0.9992, - "num_input_tokens_seen": 97022025, - "step": 4557 - }, - { - "epoch": 0.5480670955329766, - "flos": 13178026806600.0, - "grad_norm": 8.671345352631816, - "learning_rate": 1.7861921544150867e-06, - "loss": 0.9185, - "num_input_tokens_seen": 97040595, - "step": 4558 - }, - { - "epoch": 0.5481873384236157, - "flos": 11283908076120.0, - "grad_norm": 11.788408204382984, - "learning_rate": 1.7854176630141856e-06, - "loss": 0.9834, - "num_input_tokens_seen": 97057450, - "step": 4559 - }, - { - "epoch": 0.5483075813142548, - "flos": 16192788168480.0, - "grad_norm": 3.6882328662959165, - "learning_rate": 1.784643204165255e-06, - "loss": 1.0502, - "num_input_tokens_seen": 97076490, - "step": 4560 - }, - { - "epoch": 0.5484278242048939, - "flos": 13701346628040.0, - "grad_norm": 4.039225650642895, - "learning_rate": 1.7838687779857783e-06, - "loss": 0.995, - "num_input_tokens_seen": 97094085, - "step": 4561 - }, - { - "epoch": 0.5485480670955329, - "flos": 16215360624720.0, - "grad_norm": 4.531159077751588, - "learning_rate": 1.7830943845932366e-06, - "loss": 0.8654, - "num_input_tokens_seen": 97113130, - "step": 4562 - }, - { - "epoch": 0.5486683099861721, - "flos": 16111518390240.0, - "grad_norm": 5.419261115409305, - "learning_rate": 1.7823200241051044e-06, - "loss": 0.9811, - "num_input_tokens_seen": 97131765, - "step": 4563 - }, - { - "epoch": 0.5487885528768112, - "flos": 16455613218480.0, - "grad_norm": 3.117221459225265, - "learning_rate": 1.7815456966388513e-06, - "loss": 1.0251, - "num_input_tokens_seen": 97150580, - "step": 4564 - }, - { - "epoch": 0.5489087957674502, - "flos": 15668211223080.0, - "grad_norm": 5.613445706968059, - "learning_rate": 1.780771402311943e-06, - "loss": 1.0487, - "num_input_tokens_seen": 97169135, - "step": 4565 - }, - { - "epoch": 0.5490290386580894, - "flos": 17293893838080.0, - "grad_norm": 2.924532574205302, - "learning_rate": 1.7799971412418374e-06, - "loss": 1.0069, - "num_input_tokens_seen": 97190250, - "step": 4566 - }, - { - "epoch": 0.5491492815487284, - "flos": 12966570965760.0, - "grad_norm": 4.646388113428575, - "learning_rate": 1.7792229135459918e-06, - "loss": 0.9617, - "num_input_tokens_seen": 97206620, - "step": 4567 - }, - { - "epoch": 0.5492695244393675, - "flos": 46213153686360.0, - "grad_norm": 0.7972928615853837, - "learning_rate": 1.7784487193418538e-06, - "loss": 0.8625, - "num_input_tokens_seen": 97264190, - "step": 4568 - }, - { - "epoch": 0.5493897673300067, - "flos": 12308281878360.0, - "grad_norm": 3.621467447596212, - "learning_rate": 1.7776745587468698e-06, - "loss": 0.8273, - "num_input_tokens_seen": 97281335, - "step": 4569 - }, - { - "epoch": 0.5495100102206457, - "flos": 14121560092440.0, - "grad_norm": 5.5848286842019395, - "learning_rate": 1.7769004318784776e-06, - "loss": 1.0406, - "num_input_tokens_seen": 97298700, - "step": 4570 - }, - { - "epoch": 0.5496302531112848, - "flos": 11840010653280.0, - "grad_norm": 6.8387583465608905, - "learning_rate": 1.776126338854113e-06, - "loss": 1.0264, - "num_input_tokens_seen": 97316210, - "step": 4571 - }, - { - "epoch": 0.5497504960019239, - "flos": 17478269973360.0, - "grad_norm": 3.768174191038518, - "learning_rate": 1.7753522797912044e-06, - "loss": 1.0733, - "num_input_tokens_seen": 97336015, - "step": 4572 - }, - { - "epoch": 0.549870738892563, - "flos": 10922514579960.0, - "grad_norm": 5.454095180306248, - "learning_rate": 1.7745782548071765e-06, - "loss": 0.9231, - "num_input_tokens_seen": 97352630, - "step": 4573 - }, - { - "epoch": 0.549990981783202, - "flos": 14960484604800.0, - "grad_norm": 2.585092550572539, - "learning_rate": 1.7738042640194482e-06, - "loss": 0.9642, - "num_input_tokens_seen": 97372015, - "step": 4574 - }, - { - "epoch": 0.5501112246738411, - "flos": 15195126133080.0, - "grad_norm": 3.9385093727324763, - "learning_rate": 1.7730303075454335e-06, - "loss": 0.9242, - "num_input_tokens_seen": 97390625, - "step": 4575 - }, - { - "epoch": 0.5502314675644803, - "flos": 12364464952440.0, - "grad_norm": 3.303571075085949, - "learning_rate": 1.7722563855025402e-06, - "loss": 1.0789, - "num_input_tokens_seen": 97408375, - "step": 4576 - }, - { - "epoch": 0.5503517104551193, - "flos": 17289141296280.0, - "grad_norm": 2.749792611365006, - "learning_rate": 1.7714824980081721e-06, - "loss": 0.9332, - "num_input_tokens_seen": 97427390, - "step": 4577 - }, - { - "epoch": 0.5504719533457584, - "flos": 15930974949960.0, - "grad_norm": 4.59118975443251, - "learning_rate": 1.7707086451797276e-06, - "loss": 0.9687, - "num_input_tokens_seen": 97447985, - "step": 4578 - }, - { - "epoch": 0.5505921962363975, - "flos": 48186150593400.0, - "grad_norm": 0.7264014034720865, - "learning_rate": 1.7699348271345993e-06, - "loss": 0.7704, - "num_input_tokens_seen": 97510330, - "step": 4579 - }, - { - "epoch": 0.5507124391270366, - "flos": 32654870976120.0, - "grad_norm": 0.7376057193436101, - "learning_rate": 1.7691610439901753e-06, - "loss": 0.7935, - "num_input_tokens_seen": 97572985, - "step": 4580 - }, - { - "epoch": 0.5508326820176757, - "flos": 15826396838040.0, - "grad_norm": 8.923170433262758, - "learning_rate": 1.7683872958638367e-06, - "loss": 0.9808, - "num_input_tokens_seen": 97591585, - "step": 4581 - }, - { - "epoch": 0.5509529249083148, - "flos": 14201266131120.0, - "grad_norm": 2.5535307432243295, - "learning_rate": 1.7676135828729614e-06, - "loss": 1.0596, - "num_input_tokens_seen": 97611015, - "step": 4582 - }, - { - "epoch": 0.5510731677989539, - "flos": 15510700162440.0, - "grad_norm": 6.303657910916591, - "learning_rate": 1.7668399051349205e-06, - "loss": 1.0574, - "num_input_tokens_seen": 97630415, - "step": 4583 - }, - { - "epoch": 0.5511934106895929, - "flos": 15246740634720.0, - "grad_norm": 3.922920239744553, - "learning_rate": 1.766066262767081e-06, - "loss": 1.0447, - "num_input_tokens_seen": 97647975, - "step": 4584 - }, - { - "epoch": 0.5513136535802321, - "flos": 14960331297000.0, - "grad_norm": 4.483862658218754, - "learning_rate": 1.765292655886803e-06, - "loss": 0.9992, - "num_input_tokens_seen": 97666340, - "step": 4585 - }, - { - "epoch": 0.5514338964708712, - "flos": 19808091804120.0, - "grad_norm": 3.189469719941136, - "learning_rate": 1.764519084611443e-06, - "loss": 0.936, - "num_input_tokens_seen": 97686515, - "step": 4586 - }, - { - "epoch": 0.5515541393615102, - "flos": 15563786418960.0, - "grad_norm": 5.542033228785259, - "learning_rate": 1.7637455490583505e-06, - "loss": 0.9907, - "num_input_tokens_seen": 97705560, - "step": 4587 - }, - { - "epoch": 0.5516743822521494, - "flos": 14541007017840.0, - "grad_norm": 3.906886910966711, - "learning_rate": 1.7629720493448701e-06, - "loss": 1.0025, - "num_input_tokens_seen": 97722575, - "step": 4588 - }, - { - "epoch": 0.5517946251427884, - "flos": 10556000603280.0, - "grad_norm": 2.996125739572133, - "learning_rate": 1.7621985855883418e-06, - "loss": 1.0755, - "num_input_tokens_seen": 97738995, - "step": 4589 - }, - { - "epoch": 0.5519148680334275, - "flos": 13045633111680.0, - "grad_norm": 2.618401635790861, - "learning_rate": 1.7614251579060983e-06, - "loss": 0.9426, - "num_input_tokens_seen": 97757310, - "step": 4590 - }, - { - "epoch": 0.5520351109240667, - "flos": 17866221928560.0, - "grad_norm": 4.5021365709642, - "learning_rate": 1.76065176641547e-06, - "loss": 1.0767, - "num_input_tokens_seen": 97779740, - "step": 4591 - }, - { - "epoch": 0.5521553538147057, - "flos": 14960852543520.0, - "grad_norm": 3.02999458665018, - "learning_rate": 1.759878411233777e-06, - "loss": 1.0019, - "num_input_tokens_seen": 97797920, - "step": 4592 - }, - { - "epoch": 0.5522755967053448, - "flos": 13387213692000.0, - "grad_norm": 3.0305121398793395, - "learning_rate": 1.7591050924783388e-06, - "loss": 0.9851, - "num_input_tokens_seen": 97814830, - "step": 4593 - }, - { - "epoch": 0.5523958395959839, - "flos": 46305400303080.0, - "grad_norm": 0.8430362084012389, - "learning_rate": 1.7583318102664661e-06, - "loss": 0.8646, - "num_input_tokens_seen": 97882115, - "step": 4594 - }, - { - "epoch": 0.552516082486623, - "flos": 7644774860280.0, - "grad_norm": 3.5293669954901232, - "learning_rate": 1.757558564715466e-06, - "loss": 1.0115, - "num_input_tokens_seen": 97899910, - "step": 4595 - }, - { - "epoch": 0.552636325377262, - "flos": 15773034627480.0, - "grad_norm": 8.874584779294343, - "learning_rate": 1.7567853559426386e-06, - "loss": 0.9447, - "num_input_tokens_seen": 97916680, - "step": 4596 - }, - { - "epoch": 0.5527565682679012, - "flos": 17058393786120.0, - "grad_norm": 6.299416760286256, - "learning_rate": 1.7560121840652797e-06, - "loss": 0.977, - "num_input_tokens_seen": 97935785, - "step": 4597 - }, - { - "epoch": 0.5528768111585403, - "flos": 13990392859920.0, - "grad_norm": 3.799540271054158, - "learning_rate": 1.7552390492006782e-06, - "loss": 0.9155, - "num_input_tokens_seen": 97953825, - "step": 4598 - }, - { - "epoch": 0.5529970540491793, - "flos": 18659756235960.0, - "grad_norm": 2.6788097311917722, - "learning_rate": 1.7544659514661184e-06, - "loss": 0.8766, - "num_input_tokens_seen": 97976635, - "step": 4599 - }, - { - "epoch": 0.5531172969398185, - "flos": 17373017307120.0, - "grad_norm": 3.697271686595335, - "learning_rate": 1.7536928909788786e-06, - "loss": 1.0226, - "num_input_tokens_seen": 97995660, - "step": 4600 - }, - { - "epoch": 0.5532375398304575, - "flos": 46468711946640.0, - "grad_norm": 0.8819481323111684, - "learning_rate": 1.752919867856231e-06, - "loss": 0.8707, - "num_input_tokens_seen": 98047025, - "step": 4601 - }, - { - "epoch": 0.5533577827210966, - "flos": 13964447632080.0, - "grad_norm": 2.669635749001843, - "learning_rate": 1.7521468822154436e-06, - "loss": 1.0079, - "num_input_tokens_seen": 98065660, - "step": 4602 - }, - { - "epoch": 0.5534780256117358, - "flos": 23036547300720.0, - "grad_norm": 2.3365417515464966, - "learning_rate": 1.751373934173777e-06, - "loss": 0.9596, - "num_input_tokens_seen": 98088125, - "step": 4603 - }, - { - "epoch": 0.5535982685023748, - "flos": 16508116905360.0, - "grad_norm": 10.93893370904669, - "learning_rate": 1.750601023848487e-06, - "loss": 0.9593, - "num_input_tokens_seen": 98108570, - "step": 4604 - }, - { - "epoch": 0.5537185113930139, - "flos": 17320176343080.0, - "grad_norm": 6.172545268687314, - "learning_rate": 1.749828151356823e-06, - "loss": 0.9563, - "num_input_tokens_seen": 98128485, - "step": 4605 - }, - { - "epoch": 0.553838754283653, - "flos": 16742635787400.0, - "grad_norm": 3.3190353989237718, - "learning_rate": 1.7490553168160297e-06, - "loss": 0.9868, - "num_input_tokens_seen": 98149275, - "step": 4606 - }, - { - "epoch": 0.5539589971742921, - "flos": 12233818966440.0, - "grad_norm": 4.91199036107065, - "learning_rate": 1.748282520343345e-06, - "loss": 0.9857, - "num_input_tokens_seen": 98168025, - "step": 4607 - }, - { - "epoch": 0.5540792400649311, - "flos": 19628437549080.0, - "grad_norm": 3.5715199104618183, - "learning_rate": 1.7475097620560023e-06, - "loss": 1.0049, - "num_input_tokens_seen": 98187810, - "step": 4608 - }, - { - "epoch": 0.5541994829555702, - "flos": 16979668917360.0, - "grad_norm": 3.848059494653071, - "learning_rate": 1.746737042071228e-06, - "loss": 0.9357, - "num_input_tokens_seen": 98206035, - "step": 4609 - }, - { - "epoch": 0.5543197258462094, - "flos": 14274655888440.0, - "grad_norm": 3.02484082520424, - "learning_rate": 1.7459643605062424e-06, - "loss": 1.026, - "num_input_tokens_seen": 98223015, - "step": 4610 - }, - { - "epoch": 0.5544399687368484, - "flos": 14849958150240.0, - "grad_norm": 2.3217908471774775, - "learning_rate": 1.745191717478262e-06, - "loss": 1.035, - "num_input_tokens_seen": 98241315, - "step": 4611 - }, - { - "epoch": 0.5545602116274875, - "flos": 18158058362400.0, - "grad_norm": 4.658208447979139, - "learning_rate": 1.7444191131044948e-06, - "loss": 1.0323, - "num_input_tokens_seen": 98261310, - "step": 4612 - }, - { - "epoch": 0.5546804545181266, - "flos": 14907551656080.0, - "grad_norm": 17.020422841432932, - "learning_rate": 1.7436465475021456e-06, - "loss": 0.9495, - "num_input_tokens_seen": 98281080, - "step": 4613 - }, - { - "epoch": 0.5548006974087657, - "flos": 19103308695600.0, - "grad_norm": 3.309310961455846, - "learning_rate": 1.7428740207884111e-06, - "loss": 0.9314, - "num_input_tokens_seen": 98301680, - "step": 4614 - }, - { - "epoch": 0.5549209402994048, - "flos": 24006700368720.0, - "grad_norm": 3.0070592159914318, - "learning_rate": 1.7421015330804833e-06, - "loss": 0.8412, - "num_input_tokens_seen": 98321320, - "step": 4615 - }, - { - "epoch": 0.5550411831900439, - "flos": 16901281325760.0, - "grad_norm": 26.55270729414305, - "learning_rate": 1.7413290844955475e-06, - "loss": 0.9465, - "num_input_tokens_seen": 98341070, - "step": 4616 - }, - { - "epoch": 0.555161426080683, - "flos": 15378582421560.0, - "grad_norm": 4.103634846628995, - "learning_rate": 1.7405566751507843e-06, - "loss": 1.0109, - "num_input_tokens_seen": 98358835, - "step": 4617 - }, - { - "epoch": 0.555281668971322, - "flos": 35438399790960.0, - "grad_norm": 2.30663194259056, - "learning_rate": 1.7397843051633668e-06, - "loss": 0.8988, - "num_input_tokens_seen": 98381250, - "step": 4618 - }, - { - "epoch": 0.5554019118619612, - "flos": 14725229845320.0, - "grad_norm": 3.261550186278719, - "learning_rate": 1.739011974650464e-06, - "loss": 0.9446, - "num_input_tokens_seen": 98400300, - "step": 4619 - }, - { - "epoch": 0.5555221547526003, - "flos": 18132726365760.0, - "grad_norm": 4.120721852665638, - "learning_rate": 1.7382396837292365e-06, - "loss": 0.9856, - "num_input_tokens_seen": 98420480, - "step": 4620 - }, - { - "epoch": 0.5556423976432393, - "flos": 15458840368320.0, - "grad_norm": 5.143237345886165, - "learning_rate": 1.737467432516841e-06, - "loss": 0.9557, - "num_input_tokens_seen": 98440300, - "step": 4621 - }, - { - "epoch": 0.5557626405338785, - "flos": 17713800686880.0, - "grad_norm": 4.164611894675946, - "learning_rate": 1.7366952211304274e-06, - "loss": 0.9514, - "num_input_tokens_seen": 98457865, - "step": 4622 - }, - { - "epoch": 0.5558828834245175, - "flos": 13256046459480.0, - "grad_norm": 3.0185210399794844, - "learning_rate": 1.735923049687139e-06, - "loss": 1.0587, - "num_input_tokens_seen": 98474160, - "step": 4623 - }, - { - "epoch": 0.5560031263151566, - "flos": 19418576109360.0, - "grad_norm": 3.3836890091629255, - "learning_rate": 1.7351509183041144e-06, - "loss": 0.9702, - "num_input_tokens_seen": 98494210, - "step": 4624 - }, - { - "epoch": 0.5561233692057957, - "flos": 16638272306400.0, - "grad_norm": 2.7675274700948305, - "learning_rate": 1.7343788270984852e-06, - "loss": 0.952, - "num_input_tokens_seen": 98513070, - "step": 4625 - }, - { - "epoch": 0.5562436120964348, - "flos": 26891950222320.0, - "grad_norm": 4.278274058719099, - "learning_rate": 1.7336067761873764e-06, - "loss": 0.9706, - "num_input_tokens_seen": 98535215, - "step": 4626 - }, - { - "epoch": 0.5563638549870739, - "flos": 17897073006000.0, - "grad_norm": 5.498624282790733, - "learning_rate": 1.7328347656879076e-06, - "loss": 0.982, - "num_input_tokens_seen": 98554795, - "step": 4627 - }, - { - "epoch": 0.556484097877713, - "flos": 9578458099320.0, - "grad_norm": 4.063435373461938, - "learning_rate": 1.7320627957171927e-06, - "loss": 0.9188, - "num_input_tokens_seen": 98569175, - "step": 4628 - }, - { - "epoch": 0.5566043407683521, - "flos": 17556565580280.0, - "grad_norm": 2.924530128494686, - "learning_rate": 1.7312908663923382e-06, - "loss": 1.0257, - "num_input_tokens_seen": 98585070, - "step": 4629 - }, - { - "epoch": 0.5567245836589911, - "flos": 14613936851760.0, - "grad_norm": 3.9253595078189973, - "learning_rate": 1.7305189778304463e-06, - "loss": 0.8921, - "num_input_tokens_seen": 98602965, - "step": 4630 - }, - { - "epoch": 0.5568448265496303, - "flos": 14698211462880.0, - "grad_norm": 2.7300447017377443, - "learning_rate": 1.729747130148611e-06, - "loss": 1.0229, - "num_input_tokens_seen": 98621880, - "step": 4631 - }, - { - "epoch": 0.5569650694402694, - "flos": 18003429488400.0, - "grad_norm": 3.70892549280209, - "learning_rate": 1.7289753234639208e-06, - "loss": 0.991, - "num_input_tokens_seen": 98640575, - "step": 4632 - }, - { - "epoch": 0.5570853123309084, - "flos": 13984720471320.0, - "grad_norm": 3.423439260575483, - "learning_rate": 1.7282035578934592e-06, - "loss": 0.9846, - "num_input_tokens_seen": 98658460, - "step": 4633 - }, - { - "epoch": 0.5572055552215476, - "flos": 11395231731240.0, - "grad_norm": 3.267445909888617, - "learning_rate": 1.727431833554301e-06, - "loss": 1.0096, - "num_input_tokens_seen": 98676655, - "step": 4634 - }, - { - "epoch": 0.5573257981121866, - "flos": 12128688946440.0, - "grad_norm": 3.1793014704437548, - "learning_rate": 1.7266601505635175e-06, - "loss": 0.995, - "num_input_tokens_seen": 98693715, - "step": 4635 - }, - { - "epoch": 0.5574460410028257, - "flos": 13334771328240.0, - "grad_norm": 3.456199002708855, - "learning_rate": 1.7258885090381717e-06, - "loss": 0.9801, - "num_input_tokens_seen": 98711475, - "step": 4636 - }, - { - "epoch": 0.5575662838934649, - "flos": 21122063746320.0, - "grad_norm": 4.392140625792098, - "learning_rate": 1.7251169090953213e-06, - "loss": 1.0075, - "num_input_tokens_seen": 98731670, - "step": 4637 - }, - { - "epoch": 0.5576865267841039, - "flos": 15668671146480.0, - "grad_norm": 5.31634543872504, - "learning_rate": 1.7243453508520168e-06, - "loss": 0.9822, - "num_input_tokens_seen": 98748375, - "step": 4638 - }, - { - "epoch": 0.557806769674743, - "flos": 12176225460600.0, - "grad_norm": 4.8602802377916605, - "learning_rate": 1.7235738344253038e-06, - "loss": 1.0744, - "num_input_tokens_seen": 98761725, - "step": 4639 - }, - { - "epoch": 0.557927012565382, - "flos": 17578402159080.0, - "grad_norm": 2.798434371563107, - "learning_rate": 1.72280235993222e-06, - "loss": 1.0405, - "num_input_tokens_seen": 98779750, - "step": 4640 - }, - { - "epoch": 0.5580472554560212, - "flos": 12023988188280.0, - "grad_norm": 3.79654393891694, - "learning_rate": 1.722030927489798e-06, - "loss": 0.9093, - "num_input_tokens_seen": 98796750, - "step": 4641 - }, - { - "epoch": 0.5581674983466602, - "flos": 16713440434200.0, - "grad_norm": 5.853633254469572, - "learning_rate": 1.7212595372150634e-06, - "loss": 0.97, - "num_input_tokens_seen": 98816450, - "step": 4642 - }, - { - "epoch": 0.5582877412372993, - "flos": 9506570758440.0, - "grad_norm": 4.097827307782647, - "learning_rate": 1.720488189225035e-06, - "loss": 0.9571, - "num_input_tokens_seen": 98833870, - "step": 4643 - }, - { - "epoch": 0.5584079841279385, - "flos": 15559585785240.0, - "grad_norm": 4.5679449747248775, - "learning_rate": 1.7197168836367265e-06, - "loss": 1.0179, - "num_input_tokens_seen": 98850400, - "step": 4644 - }, - { - "epoch": 0.5585282270185775, - "flos": 13361023171680.0, - "grad_norm": 3.0294471964133876, - "learning_rate": 1.7189456205671433e-06, - "loss": 1.0398, - "num_input_tokens_seen": 98868965, - "step": 4645 - }, - { - "epoch": 0.5586484699092166, - "flos": 15534499081080.0, - "grad_norm": 2.332462443451097, - "learning_rate": 1.7181744001332866e-06, - "loss": 1.0448, - "num_input_tokens_seen": 98887295, - "step": 4646 - }, - { - "epoch": 0.5587687127998557, - "flos": 16271421052560.0, - "grad_norm": 2.5724659163253296, - "learning_rate": 1.7174032224521493e-06, - "loss": 0.8666, - "num_input_tokens_seen": 98905725, - "step": 4647 - }, - { - "epoch": 0.5588889556904948, - "flos": 14409962431560.0, - "grad_norm": 2.580353761860268, - "learning_rate": 1.7166320876407184e-06, - "loss": 0.9267, - "num_input_tokens_seen": 98924865, - "step": 4648 - }, - { - "epoch": 0.5590091985811338, - "flos": 11656768995720.0, - "grad_norm": 3.025377070357789, - "learning_rate": 1.7158609958159742e-06, - "loss": 0.9066, - "num_input_tokens_seen": 98941990, - "step": 4649 - }, - { - "epoch": 0.559129441471773, - "flos": 10261711244640.0, - "grad_norm": 2.909812302713686, - "learning_rate": 1.7150899470948911e-06, - "loss": 1.0119, - "num_input_tokens_seen": 98956975, - "step": 4650 - }, - { - "epoch": 0.5592496843624121, - "flos": 43320202233120.0, - "grad_norm": 0.8105065025478217, - "learning_rate": 1.7143189415944365e-06, - "loss": 0.831, - "num_input_tokens_seen": 99021155, - "step": 4651 - }, - { - "epoch": 0.5593699272530511, - "flos": 14829010756680.0, - "grad_norm": 2.2113800145166174, - "learning_rate": 1.7135479794315714e-06, - "loss": 0.985, - "num_input_tokens_seen": 99037830, - "step": 4652 - }, - { - "epoch": 0.5594901701436903, - "flos": 9087215817720.0, - "grad_norm": 3.684717971589056, - "learning_rate": 1.7127770607232502e-06, - "loss": 1.0136, - "num_input_tokens_seen": 99056095, - "step": 4653 - }, - { - "epoch": 0.5596104130343293, - "flos": 16428717482280.0, - "grad_norm": 2.8113450115969325, - "learning_rate": 1.7120061855864204e-06, - "loss": 1.0312, - "num_input_tokens_seen": 99075825, - "step": 4654 - }, - { - "epoch": 0.5597306559249684, - "flos": 18473847022680.0, - "grad_norm": 4.286849543663711, - "learning_rate": 1.7112353541380233e-06, - "loss": 0.9356, - "num_input_tokens_seen": 99095405, - "step": 4655 - }, - { - "epoch": 0.5598508988156076, - "flos": 15983325329040.0, - "grad_norm": 1.8954744802823411, - "learning_rate": 1.7104645664949931e-06, - "loss": 0.9579, - "num_input_tokens_seen": 99117595, - "step": 4656 - }, - { - "epoch": 0.5599711417062466, - "flos": 16429085421000.0, - "grad_norm": 2.696524261970945, - "learning_rate": 1.7096938227742584e-06, - "loss": 0.9473, - "num_input_tokens_seen": 99138445, - "step": 4657 - }, - { - "epoch": 0.5600913845968857, - "flos": 15872461597320.0, - "grad_norm": 3.1592883602831208, - "learning_rate": 1.70892312309274e-06, - "loss": 1.0673, - "num_input_tokens_seen": 99156055, - "step": 4658 - }, - { - "epoch": 0.5602116274875248, - "flos": 12490818320040.0, - "grad_norm": 10.399659415316918, - "learning_rate": 1.7081524675673523e-06, - "loss": 0.9004, - "num_input_tokens_seen": 99171265, - "step": 4659 - }, - { - "epoch": 0.5603318703781639, - "flos": 50215790497920.0, - "grad_norm": 0.8311906606962755, - "learning_rate": 1.7073818563150026e-06, - "loss": 0.8744, - "num_input_tokens_seen": 99233065, - "step": 4660 - }, - { - "epoch": 0.560452113268803, - "flos": 13146286543920.0, - "grad_norm": 5.012291486783897, - "learning_rate": 1.7066112894525935e-06, - "loss": 1.0808, - "num_input_tokens_seen": 99250865, - "step": 4661 - }, - { - "epoch": 0.5605723561594421, - "flos": 17975705890080.0, - "grad_norm": 2.945544812984568, - "learning_rate": 1.7058407670970177e-06, - "loss": 0.9554, - "num_input_tokens_seen": 99272060, - "step": 4662 - }, - { - "epoch": 0.5606925990500812, - "flos": 14619731886600.0, - "grad_norm": 4.732762228848956, - "learning_rate": 1.7050702893651643e-06, - "loss": 0.8339, - "num_input_tokens_seen": 99291360, - "step": 4663 - }, - { - "epoch": 0.5608128419407202, - "flos": 24976608144240.0, - "grad_norm": 2.9353244923147943, - "learning_rate": 1.7042998563739134e-06, - "loss": 0.9798, - "num_input_tokens_seen": 99309430, - "step": 4664 - }, - { - "epoch": 0.5609330848313594, - "flos": 17526021118440.0, - "grad_norm": 5.43113160174414, - "learning_rate": 1.703529468240139e-06, - "loss": 0.9368, - "num_input_tokens_seen": 99328020, - "step": 4665 - }, - { - "epoch": 0.5610533277219985, - "flos": 13302019234080.0, - "grad_norm": 4.76452784181391, - "learning_rate": 1.7027591250807088e-06, - "loss": 0.9546, - "num_input_tokens_seen": 99344915, - "step": 4666 - }, - { - "epoch": 0.5611735706126375, - "flos": 10607615104920.0, - "grad_norm": 3.1335635533538673, - "learning_rate": 1.7019888270124825e-06, - "loss": 1.0497, - "num_input_tokens_seen": 99361800, - "step": 4667 - }, - { - "epoch": 0.5612938135032767, - "flos": 11653365562560.0, - "grad_norm": 7.515246851680869, - "learning_rate": 1.7012185741523147e-06, - "loss": 1.0471, - "num_input_tokens_seen": 99377845, - "step": 4668 - }, - { - "epoch": 0.5614140563939157, - "flos": 18264506829480.0, - "grad_norm": 2.1579040596311296, - "learning_rate": 1.7004483666170514e-06, - "loss": 0.8365, - "num_input_tokens_seen": 99398060, - "step": 4669 - }, - { - "epoch": 0.5615342992845548, - "flos": 17582541469680.0, - "grad_norm": 2.800273854623628, - "learning_rate": 1.699678204523533e-06, - "loss": 1.0125, - "num_input_tokens_seen": 99417645, - "step": 4670 - }, - { - "epoch": 0.5616545421751938, - "flos": 15640702255680.0, - "grad_norm": 3.628393623599948, - "learning_rate": 1.6989080879885918e-06, - "loss": 0.899, - "num_input_tokens_seen": 99435225, - "step": 4671 - }, - { - "epoch": 0.561774785065833, - "flos": 38459993132640.0, - "grad_norm": 0.908403107970064, - "learning_rate": 1.6981380171290544e-06, - "loss": 0.8676, - "num_input_tokens_seen": 99495970, - "step": 4672 - }, - { - "epoch": 0.5618950279564721, - "flos": 14011708192200.0, - "grad_norm": 3.688967396642482, - "learning_rate": 1.6973679920617396e-06, - "loss": 0.9657, - "num_input_tokens_seen": 99513225, - "step": 4673 - }, - { - "epoch": 0.5620152708471111, - "flos": 11892820955760.0, - "grad_norm": 4.456271589101148, - "learning_rate": 1.6965980129034603e-06, - "loss": 1.079, - "num_input_tokens_seen": 99530330, - "step": 4674 - }, - { - "epoch": 0.5621355137377503, - "flos": 19077639421800.0, - "grad_norm": 3.6022322356801135, - "learning_rate": 1.6958280797710209e-06, - "loss": 0.9922, - "num_input_tokens_seen": 99551975, - "step": 4675 - }, - { - "epoch": 0.5622557566283893, - "flos": 43812272376840.0, - "grad_norm": 0.7224188749841605, - "learning_rate": 1.6950581927812198e-06, - "loss": 0.7878, - "num_input_tokens_seen": 99611265, - "step": 4676 - }, - { - "epoch": 0.5623759995190284, - "flos": 18840882245880.0, - "grad_norm": 4.7393891123566965, - "learning_rate": 1.6942883520508486e-06, - "loss": 1.0057, - "num_input_tokens_seen": 99629720, - "step": 4677 - }, - { - "epoch": 0.5624962424096676, - "flos": 13754402223000.0, - "grad_norm": 3.2839496950578293, - "learning_rate": 1.693518557696691e-06, - "loss": 1.0012, - "num_input_tokens_seen": 99648580, - "step": 4678 - }, - { - "epoch": 0.5626164853003066, - "flos": 14671683665400.0, - "grad_norm": 14.761479452777273, - "learning_rate": 1.6927488098355252e-06, - "loss": 1.1173, - "num_input_tokens_seen": 99665930, - "step": 4679 - }, - { - "epoch": 0.5627367281909457, - "flos": 44931381930240.0, - "grad_norm": 0.8717483660981007, - "learning_rate": 1.6919791085841201e-06, - "loss": 0.903, - "num_input_tokens_seen": 99723060, - "step": 4680 - }, - { - "epoch": 0.5628569710815848, - "flos": 9008429625840.0, - "grad_norm": 9.392576703511537, - "learning_rate": 1.6912094540592396e-06, - "loss": 1.0123, - "num_input_tokens_seen": 99738300, - "step": 4681 - }, - { - "epoch": 0.5629772139722239, - "flos": 9707724315120.0, - "grad_norm": 4.669222994415883, - "learning_rate": 1.6904398463776393e-06, - "loss": 1.0271, - "num_input_tokens_seen": 99751820, - "step": 4682 - }, - { - "epoch": 0.5630974568628629, - "flos": 15246679311600.0, - "grad_norm": 2.5608894124849075, - "learning_rate": 1.6896702856560683e-06, - "loss": 0.9593, - "num_input_tokens_seen": 99770635, - "step": 4683 - }, - { - "epoch": 0.5632176997535021, - "flos": 10056724992960.0, - "grad_norm": 4.412968700900444, - "learning_rate": 1.6889007720112677e-06, - "loss": 0.8952, - "num_input_tokens_seen": 99788100, - "step": 4684 - }, - { - "epoch": 0.5633379426441412, - "flos": 14775679207680.0, - "grad_norm": 2.2971606433779015, - "learning_rate": 1.6881313055599734e-06, - "loss": 1.0013, - "num_input_tokens_seen": 99807750, - "step": 4685 - }, - { - "epoch": 0.5634581855347802, - "flos": 16061252997240.0, - "grad_norm": 4.603213779813092, - "learning_rate": 1.6873618864189117e-06, - "loss": 1.0414, - "num_input_tokens_seen": 99823240, - "step": 4686 - }, - { - "epoch": 0.5635784284254194, - "flos": 15275169448920.0, - "grad_norm": 3.278225185728883, - "learning_rate": 1.686592514704803e-06, - "loss": 1.0012, - "num_input_tokens_seen": 99840355, - "step": 4687 - }, - { - "epoch": 0.5636986713160584, - "flos": 14067983250960.0, - "grad_norm": 17.484667355408764, - "learning_rate": 1.685823190534361e-06, - "loss": 0.9275, - "num_input_tokens_seen": 99858315, - "step": 4688 - }, - { - "epoch": 0.5638189142066975, - "flos": 14043019193040.0, - "grad_norm": 5.1526532199516515, - "learning_rate": 1.6850539140242907e-06, - "loss": 1.0446, - "num_input_tokens_seen": 99877295, - "step": 4689 - }, - { - "epoch": 0.5639391570973367, - "flos": 16271880975960.0, - "grad_norm": 15.705174675777398, - "learning_rate": 1.684284685291292e-06, - "loss": 1.0376, - "num_input_tokens_seen": 99898660, - "step": 4690 - }, - { - "epoch": 0.5640593999879757, - "flos": 16869970324920.0, - "grad_norm": 2.9792429952024544, - "learning_rate": 1.683515504452055e-06, - "loss": 1.0264, - "num_input_tokens_seen": 99915755, - "step": 4691 - }, - { - "epoch": 0.5641796428786148, - "flos": 16139732573520.0, - "grad_norm": 6.514237583192926, - "learning_rate": 1.6827463716232648e-06, - "loss": 0.8872, - "num_input_tokens_seen": 99936135, - "step": 4692 - }, - { - "epoch": 0.5642998857692539, - "flos": 14042620592760.0, - "grad_norm": 4.065234159174708, - "learning_rate": 1.6819772869215972e-06, - "loss": 0.9796, - "num_input_tokens_seen": 99954935, - "step": 4693 - }, - { - "epoch": 0.564420128659893, - "flos": 16481865061920.0, - "grad_norm": 3.601571900196179, - "learning_rate": 1.6812082504637228e-06, - "loss": 1.0418, - "num_input_tokens_seen": 99975975, - "step": 4694 - }, - { - "epoch": 0.564540371550532, - "flos": 16533786179160.0, - "grad_norm": 2.1840171335267895, - "learning_rate": 1.6804392623663025e-06, - "loss": 0.9735, - "num_input_tokens_seen": 99996900, - "step": 4695 - }, - { - "epoch": 0.5646606144411712, - "flos": 17792832171240.0, - "grad_norm": 9.300059771602067, - "learning_rate": 1.6796703227459935e-06, - "loss": 0.9985, - "num_input_tokens_seen": 100014575, - "step": 4696 - }, - { - "epoch": 0.5647808573318103, - "flos": 26078940276240.0, - "grad_norm": 2.1939050907620703, - "learning_rate": 1.6789014317194407e-06, - "loss": 0.9854, - "num_input_tokens_seen": 100035775, - "step": 4697 - }, - { - "epoch": 0.5649011002224493, - "flos": 16009270556880.0, - "grad_norm": 8.366423813333652, - "learning_rate": 1.6781325894032853e-06, - "loss": 0.9423, - "num_input_tokens_seen": 100054455, - "step": 4698 - }, - { - "epoch": 0.5650213431130885, - "flos": 13125001873200.0, - "grad_norm": 6.367995958547412, - "learning_rate": 1.6773637959141608e-06, - "loss": 1.1475, - "num_input_tokens_seen": 100071150, - "step": 4699 - }, - { - "epoch": 0.5651415860037275, - "flos": 12413841160200.0, - "grad_norm": 4.191356192909383, - "learning_rate": 1.6765950513686915e-06, - "loss": 0.8654, - "num_input_tokens_seen": 100088980, - "step": 4700 - }, - { - "epoch": 0.5652618288943666, - "flos": 18159223501680.0, - "grad_norm": 3.025941112815168, - "learning_rate": 1.675826355883496e-06, - "loss": 0.9862, - "num_input_tokens_seen": 100107915, - "step": 4701 - }, - { - "epoch": 0.5653820717850057, - "flos": 13964447632080.0, - "grad_norm": 3.84265108604458, - "learning_rate": 1.6750577095751848e-06, - "loss": 1.0228, - "num_input_tokens_seen": 100126745, - "step": 4702 - }, - { - "epoch": 0.5655023146756448, - "flos": 19208316069360.0, - "grad_norm": 7.930829466724993, - "learning_rate": 1.6742891125603605e-06, - "loss": 0.9613, - "num_input_tokens_seen": 100147370, - "step": 4703 - }, - { - "epoch": 0.5656225575662839, - "flos": 19703452369080.0, - "grad_norm": 7.905261313397898, - "learning_rate": 1.6735205649556185e-06, - "loss": 0.9449, - "num_input_tokens_seen": 100166960, - "step": 4704 - }, - { - "epoch": 0.5657428004569229, - "flos": 17317784741400.0, - "grad_norm": 2.7349967031790268, - "learning_rate": 1.6727520668775476e-06, - "loss": 1.0657, - "num_input_tokens_seen": 100186965, - "step": 4705 - }, - { - "epoch": 0.5658630433475621, - "flos": 15589179738720.0, - "grad_norm": 6.403863527481928, - "learning_rate": 1.6719836184427275e-06, - "loss": 0.9801, - "num_input_tokens_seen": 100206990, - "step": 4706 - }, - { - "epoch": 0.5659832862382012, - "flos": 21672831212040.0, - "grad_norm": 3.454640879978575, - "learning_rate": 1.671215219767733e-06, - "loss": 0.8759, - "num_input_tokens_seen": 100226170, - "step": 4707 - }, - { - "epoch": 0.5661035291288402, - "flos": 9296617334040.0, - "grad_norm": 8.961196749328506, - "learning_rate": 1.670446870969127e-06, - "loss": 0.9849, - "num_input_tokens_seen": 100243555, - "step": 4708 - }, - { - "epoch": 0.5662237720194794, - "flos": 11420134466040.0, - "grad_norm": 16.673163609993907, - "learning_rate": 1.6696785721634685e-06, - "loss": 1.0364, - "num_input_tokens_seen": 100257760, - "step": 4709 - }, - { - "epoch": 0.5663440149101184, - "flos": 12522650567400.0, - "grad_norm": 3.2641067991268233, - "learning_rate": 1.6689103234673086e-06, - "loss": 0.9568, - "num_input_tokens_seen": 100275800, - "step": 4710 - }, - { - "epoch": 0.5664642578007575, - "flos": 16612970971320.0, - "grad_norm": 3.2371213006598234, - "learning_rate": 1.668142124997189e-06, - "loss": 0.9783, - "num_input_tokens_seen": 100295180, - "step": 4711 - }, - { - "epoch": 0.5665845006913967, - "flos": 46909928579640.0, - "grad_norm": 0.7453641669640706, - "learning_rate": 1.6673739768696453e-06, - "loss": 0.8553, - "num_input_tokens_seen": 100361470, - "step": 4712 - }, - { - "epoch": 0.5667047435820357, - "flos": 18605504840160.0, - "grad_norm": 3.4517156564381706, - "learning_rate": 1.6666058792012052e-06, - "loss": 0.9935, - "num_input_tokens_seen": 100382075, - "step": 4713 - }, - { - "epoch": 0.5668249864726748, - "flos": 51474161935680.0, - "grad_norm": 0.8412995960642259, - "learning_rate": 1.6658378321083878e-06, - "loss": 0.9286, - "num_input_tokens_seen": 100446125, - "step": 4714 - }, - { - "epoch": 0.5669452293633139, - "flos": 15769508548080.0, - "grad_norm": 6.221545947123429, - "learning_rate": 1.6650698357077055e-06, - "loss": 1.0508, - "num_input_tokens_seen": 100462890, - "step": 4715 - }, - { - "epoch": 0.567065472253953, - "flos": 12915171095040.0, - "grad_norm": 4.423637766668768, - "learning_rate": 1.6643018901156632e-06, - "loss": 1.0344, - "num_input_tokens_seen": 100481705, - "step": 4716 - }, - { - "epoch": 0.567185715144592, - "flos": 14458572100320.0, - "grad_norm": 4.1252478322804285, - "learning_rate": 1.6635339954487566e-06, - "loss": 1.0047, - "num_input_tokens_seen": 100497300, - "step": 4717 - }, - { - "epoch": 0.5673059580352312, - "flos": 16507197058560.0, - "grad_norm": 4.119021852172765, - "learning_rate": 1.6627661518234765e-06, - "loss": 1.0496, - "num_input_tokens_seen": 100516275, - "step": 4718 - }, - { - "epoch": 0.5674262009258703, - "flos": 15427130767200.0, - "grad_norm": 2.86196731875012, - "learning_rate": 1.661998359356302e-06, - "loss": 1.0725, - "num_input_tokens_seen": 100535025, - "step": 4719 - }, - { - "epoch": 0.5675464438165093, - "flos": 48311952052920.0, - "grad_norm": 0.8224916487272711, - "learning_rate": 1.6612306181637077e-06, - "loss": 0.8125, - "num_input_tokens_seen": 100594070, - "step": 4720 - }, - { - "epoch": 0.5676666867071485, - "flos": 13386508476120.0, - "grad_norm": 4.84735032683218, - "learning_rate": 1.6604629283621598e-06, - "loss": 0.8735, - "num_input_tokens_seen": 100611720, - "step": 4721 - }, - { - "epoch": 0.5677869295977875, - "flos": 23927883515280.0, - "grad_norm": 3.137593642581333, - "learning_rate": 1.6596952900681152e-06, - "loss": 0.9636, - "num_input_tokens_seen": 100632200, - "step": 4722 - }, - { - "epoch": 0.5679071724884266, - "flos": 19968270420480.0, - "grad_norm": 5.542302871935778, - "learning_rate": 1.658927703398025e-06, - "loss": 1.0469, - "num_input_tokens_seen": 100651985, - "step": 4723 - }, - { - "epoch": 0.5680274153790658, - "flos": 16743524972640.0, - "grad_norm": 5.07049042987588, - "learning_rate": 1.6581601684683309e-06, - "loss": 0.9885, - "num_input_tokens_seen": 100672130, - "step": 4724 - }, - { - "epoch": 0.5681476582697048, - "flos": 15956552239080.0, - "grad_norm": 5.146130799049684, - "learning_rate": 1.6573926853954674e-06, - "loss": 0.9134, - "num_input_tokens_seen": 100689435, - "step": 4725 - }, - { - "epoch": 0.5682679011603439, - "flos": 13859348273640.0, - "grad_norm": 3.9249129364659656, - "learning_rate": 1.6566252542958608e-06, - "loss": 1.05, - "num_input_tokens_seen": 100708655, - "step": 4726 - }, - { - "epoch": 0.568388144050983, - "flos": 20491436934120.0, - "grad_norm": 2.700762118403366, - "learning_rate": 1.6558578752859305e-06, - "loss": 0.9945, - "num_input_tokens_seen": 100727335, - "step": 4727 - }, - { - "epoch": 0.5685083869416221, - "flos": 15060586128960.0, - "grad_norm": 4.664126295103906, - "learning_rate": 1.6550905484820865e-06, - "loss": 1.0134, - "num_input_tokens_seen": 100745515, - "step": 4728 - }, - { - "epoch": 0.5686286298322611, - "flos": 17661205015320.0, - "grad_norm": 4.4089042298714896, - "learning_rate": 1.6543232740007328e-06, - "loss": 1.0217, - "num_input_tokens_seen": 100762350, - "step": 4729 - }, - { - "epoch": 0.5687488727229003, - "flos": 18946318881480.0, - "grad_norm": 4.392095244967844, - "learning_rate": 1.653556051958263e-06, - "loss": 0.866, - "num_input_tokens_seen": 100781750, - "step": 4730 - }, - { - "epoch": 0.5688691156135394, - "flos": 14772674374800.0, - "grad_norm": 4.773469865505047, - "learning_rate": 1.6527888824710642e-06, - "loss": 0.9649, - "num_input_tokens_seen": 100801070, - "step": 4731 - }, - { - "epoch": 0.5689893585041784, - "flos": 18418093210440.0, - "grad_norm": 6.600176386932628, - "learning_rate": 1.6520217656555166e-06, - "loss": 0.9738, - "num_input_tokens_seen": 100820080, - "step": 4732 - }, - { - "epoch": 0.5691096013948175, - "flos": 16691358562920.0, - "grad_norm": 2.820982539082673, - "learning_rate": 1.65125470162799e-06, - "loss": 0.9282, - "num_input_tokens_seen": 100840155, - "step": 4733 - }, - { - "epoch": 0.5692298442854566, - "flos": 12808875935760.0, - "grad_norm": 4.068869893406848, - "learning_rate": 1.6504876905048485e-06, - "loss": 0.9229, - "num_input_tokens_seen": 100856835, - "step": 4734 - }, - { - "epoch": 0.5693500871760957, - "flos": 16376428426320.0, - "grad_norm": 5.8188461607169035, - "learning_rate": 1.6497207324024464e-06, - "loss": 0.9577, - "num_input_tokens_seen": 100875455, - "step": 4735 - }, - { - "epoch": 0.5694703300667348, - "flos": 13466061207000.0, - "grad_norm": 3.326902352994061, - "learning_rate": 1.6489538274371305e-06, - "loss": 1.0483, - "num_input_tokens_seen": 100893780, - "step": 4736 - }, - { - "epoch": 0.5695905729573739, - "flos": 15563939726760.0, - "grad_norm": 3.4046945082516644, - "learning_rate": 1.6481869757252396e-06, - "loss": 1.0549, - "num_input_tokens_seen": 100911835, - "step": 4737 - }, - { - "epoch": 0.569710815848013, - "flos": 20284273711680.0, - "grad_norm": 2.172738541653308, - "learning_rate": 1.647420177383105e-06, - "loss": 0.9416, - "num_input_tokens_seen": 100934425, - "step": 4738 - }, - { - "epoch": 0.569831058738652, - "flos": 20204720980800.0, - "grad_norm": 3.3851645664347996, - "learning_rate": 1.646653432527049e-06, - "loss": 0.9545, - "num_input_tokens_seen": 100954785, - "step": 4739 - }, - { - "epoch": 0.5699513016292912, - "flos": 18396563247240.0, - "grad_norm": 2.2183248623008645, - "learning_rate": 1.645886741273387e-06, - "loss": 0.97, - "num_input_tokens_seen": 100976320, - "step": 4740 - }, - { - "epoch": 0.5700715445199303, - "flos": 12781581599280.0, - "grad_norm": 5.3588560870778865, - "learning_rate": 1.645120103738424e-06, - "loss": 0.9601, - "num_input_tokens_seen": 100993550, - "step": 4741 - }, - { - "epoch": 0.5701917874105693, - "flos": 8064191124120.0, - "grad_norm": 3.6416574741366805, - "learning_rate": 1.6443535200384591e-06, - "loss": 1.056, - "num_input_tokens_seen": 101011445, - "step": 4742 - }, - { - "epoch": 0.5703120303012085, - "flos": 15458196475560.0, - "grad_norm": 2.985904800818878, - "learning_rate": 1.6435869902897827e-06, - "loss": 0.9339, - "num_input_tokens_seen": 101029745, - "step": 4743 - }, - { - "epoch": 0.5704322731918475, - "flos": 40605101550960.0, - "grad_norm": 0.7887454387333407, - "learning_rate": 1.6428205146086764e-06, - "loss": 0.8673, - "num_input_tokens_seen": 101091445, - "step": 4744 - }, - { - "epoch": 0.5705525160824866, - "flos": 14724831245040.0, - "grad_norm": 3.045688965624952, - "learning_rate": 1.6420540931114142e-06, - "loss": 0.9356, - "num_input_tokens_seen": 101111755, - "step": 4745 - }, - { - "epoch": 0.5706727589731257, - "flos": 13309163377560.0, - "grad_norm": 2.481854966062844, - "learning_rate": 1.6412877259142616e-06, - "loss": 1.0108, - "num_input_tokens_seen": 101131395, - "step": 4746 - }, - { - "epoch": 0.5707930018637648, - "flos": 19678825588320.0, - "grad_norm": 4.3254536120729945, - "learning_rate": 1.6405214131334757e-06, - "loss": 0.957, - "num_input_tokens_seen": 101149640, - "step": 4747 - }, - { - "epoch": 0.5709132447544039, - "flos": 19652113821480.0, - "grad_norm": 7.05513953770472, - "learning_rate": 1.6397551548853052e-06, - "loss": 1.0282, - "num_input_tokens_seen": 101167525, - "step": 4748 - }, - { - "epoch": 0.571033487645043, - "flos": 15403454494800.0, - "grad_norm": 2.2688035438461496, - "learning_rate": 1.6389889512859917e-06, - "loss": 0.9285, - "num_input_tokens_seen": 101186905, - "step": 4749 - }, - { - "epoch": 0.5711537305356821, - "flos": 50261855257200.0, - "grad_norm": 0.864302811825067, - "learning_rate": 1.638222802451767e-06, - "loss": 0.8879, - "num_input_tokens_seen": 101248105, - "step": 4750 - }, - { - "epoch": 0.5712739734263211, - "flos": 17582480146560.0, - "grad_norm": 2.7278907841385416, - "learning_rate": 1.6374567084988561e-06, - "loss": 0.9709, - "num_input_tokens_seen": 101269010, - "step": 4751 - }, - { - "epoch": 0.5713942163169603, - "flos": 18919699099320.0, - "grad_norm": 5.9437621171669335, - "learning_rate": 1.6366906695434738e-06, - "loss": 0.9944, - "num_input_tokens_seen": 101291250, - "step": 4752 - }, - { - "epoch": 0.5715144592075994, - "flos": 15013233584160.0, - "grad_norm": 3.267893901918974, - "learning_rate": 1.6359246857018275e-06, - "loss": 1.0807, - "num_input_tokens_seen": 101308500, - "step": 4753 - }, - { - "epoch": 0.5716347020982384, - "flos": 16585400680800.0, - "grad_norm": 5.6053867453714155, - "learning_rate": 1.6351587570901178e-06, - "loss": 1.0117, - "num_input_tokens_seen": 101328345, - "step": 4754 - }, - { - "epoch": 0.5717549449888776, - "flos": 12041654794920.0, - "grad_norm": 5.02413133543095, - "learning_rate": 1.634392883824534e-06, - "loss": 0.9879, - "num_input_tokens_seen": 101340065, - "step": 4755 - }, - { - "epoch": 0.5718751878795166, - "flos": 25344808506720.0, - "grad_norm": 3.2817507407166584, - "learning_rate": 1.6336270660212595e-06, - "loss": 0.898, - "num_input_tokens_seen": 101361380, - "step": 4756 - }, - { - "epoch": 0.5719954307701557, - "flos": 27569009732520.0, - "grad_norm": 4.418372826348767, - "learning_rate": 1.6328613037964676e-06, - "loss": 0.8802, - "num_input_tokens_seen": 101384165, - "step": 4757 - }, - { - "epoch": 0.5721156736607949, - "flos": 14645677114440.0, - "grad_norm": 3.7749302282896022, - "learning_rate": 1.6320955972663241e-06, - "loss": 0.9065, - "num_input_tokens_seen": 101403480, - "step": 4758 - }, - { - "epoch": 0.5722359165514339, - "flos": 26708064672000.0, - "grad_norm": 3.42166443616742, - "learning_rate": 1.6313299465469857e-06, - "loss": 0.8736, - "num_input_tokens_seen": 101425930, - "step": 4759 - }, - { - "epoch": 0.572356159442073, - "flos": 15608870008320.0, - "grad_norm": 6.040965107491653, - "learning_rate": 1.6305643517546014e-06, - "loss": 1.0069, - "num_input_tokens_seen": 101441030, - "step": 4760 - }, - { - "epoch": 0.5724764023327121, - "flos": 13570884611400.0, - "grad_norm": 3.4637949150639744, - "learning_rate": 1.629798813005311e-06, - "loss": 1.0693, - "num_input_tokens_seen": 101460470, - "step": 4761 - }, - { - "epoch": 0.5725966452233512, - "flos": 16218549426960.0, - "grad_norm": 4.701015062494253, - "learning_rate": 1.6290333304152473e-06, - "loss": 0.9305, - "num_input_tokens_seen": 101480065, - "step": 4762 - }, - { - "epoch": 0.5727168881139902, - "flos": 29640728393520.0, - "grad_norm": 2.9916405240105877, - "learning_rate": 1.6282679041005314e-06, - "loss": 0.7988, - "num_input_tokens_seen": 101505375, - "step": 4763 - }, - { - "epoch": 0.5728371310046293, - "flos": 10345525932360.0, - "grad_norm": 3.7804819383133, - "learning_rate": 1.6275025341772789e-06, - "loss": 1.099, - "num_input_tokens_seen": 101521400, - "step": 4764 - }, - { - "epoch": 0.5729573738952685, - "flos": 15274862833320.0, - "grad_norm": 6.868435309076639, - "learning_rate": 1.626737220761596e-06, - "loss": 1.049, - "num_input_tokens_seen": 101538585, - "step": 4765 - }, - { - "epoch": 0.5730776167859075, - "flos": 16794771535560.0, - "grad_norm": 5.099024002332916, - "learning_rate": 1.62597196396958e-06, - "loss": 1.0135, - "num_input_tokens_seen": 101556475, - "step": 4766 - }, - { - "epoch": 0.5731978596765466, - "flos": 18316795885440.0, - "grad_norm": 4.821506741813449, - "learning_rate": 1.6252067639173197e-06, - "loss": 1.077, - "num_input_tokens_seen": 101578105, - "step": 4767 - }, - { - "epoch": 0.5733181025671857, - "flos": 18762586638960.0, - "grad_norm": 3.562998418909879, - "learning_rate": 1.6244416207208956e-06, - "loss": 0.9183, - "num_input_tokens_seen": 101598760, - "step": 4768 - }, - { - "epoch": 0.5734383454578248, - "flos": 20964828639720.0, - "grad_norm": 5.094405804876634, - "learning_rate": 1.6236765344963787e-06, - "loss": 0.965, - "num_input_tokens_seen": 101619740, - "step": 4769 - }, - { - "epoch": 0.5735585883484638, - "flos": 24950724239520.0, - "grad_norm": 7.457445855502012, - "learning_rate": 1.6229115053598322e-06, - "loss": 0.9193, - "num_input_tokens_seen": 101641215, - "step": 4770 - }, - { - "epoch": 0.573678831239103, - "flos": 13308734115720.0, - "grad_norm": 5.82130896723761, - "learning_rate": 1.6221465334273108e-06, - "loss": 0.9342, - "num_input_tokens_seen": 101660145, - "step": 4771 - }, - { - "epoch": 0.5737990741297421, - "flos": 18290697349800.0, - "grad_norm": 7.460278930358435, - "learning_rate": 1.6213816188148593e-06, - "loss": 0.8348, - "num_input_tokens_seen": 101678570, - "step": 4772 - }, - { - "epoch": 0.5739193170203811, - "flos": 19416061861440.0, - "grad_norm": 3.5695742588871116, - "learning_rate": 1.6206167616385162e-06, - "loss": 0.9981, - "num_input_tokens_seen": 101699355, - "step": 4773 - }, - { - "epoch": 0.5740395599110203, - "flos": 8614989251400.0, - "grad_norm": 3.6500717521826256, - "learning_rate": 1.6198519620143078e-06, - "loss": 0.972, - "num_input_tokens_seen": 101716230, - "step": 4774 - }, - { - "epoch": 0.5741598028016593, - "flos": 18447687163920.0, - "grad_norm": 2.7761433900633414, - "learning_rate": 1.6190872200582546e-06, - "loss": 1.0051, - "num_input_tokens_seen": 101737690, - "step": 4775 - }, - { - "epoch": 0.5742800456922984, - "flos": 13648812279600.0, - "grad_norm": 18.890671849094097, - "learning_rate": 1.6183225358863676e-06, - "loss": 1.0044, - "num_input_tokens_seen": 101754305, - "step": 4776 - }, - { - "epoch": 0.5744002885829376, - "flos": 22040510328000.0, - "grad_norm": 5.842532107485757, - "learning_rate": 1.617557909614648e-06, - "loss": 0.941, - "num_input_tokens_seen": 101773460, - "step": 4777 - }, - { - "epoch": 0.5745205314735766, - "flos": 16951608041880.0, - "grad_norm": 3.509475008184108, - "learning_rate": 1.6167933413590899e-06, - "loss": 1.0762, - "num_input_tokens_seen": 101792085, - "step": 4778 - }, - { - "epoch": 0.5746407743642157, - "flos": 8666757060840.0, - "grad_norm": 18.714030439826008, - "learning_rate": 1.6160288312356773e-06, - "loss": 1.1315, - "num_input_tokens_seen": 101808935, - "step": 4779 - }, - { - "epoch": 0.5747610172548548, - "flos": 17162879913360.0, - "grad_norm": 4.979912652777845, - "learning_rate": 1.6152643793603857e-06, - "loss": 1.0523, - "num_input_tokens_seen": 101829005, - "step": 4780 - }, - { - "epoch": 0.5748812601454939, - "flos": 18078750924000.0, - "grad_norm": 4.083219072468977, - "learning_rate": 1.6144999858491815e-06, - "loss": 1.1076, - "num_input_tokens_seen": 101847355, - "step": 4781 - }, - { - "epoch": 0.575001503036133, - "flos": 21827613393840.0, - "grad_norm": 5.793428368061469, - "learning_rate": 1.6137356508180232e-06, - "loss": 1.0836, - "num_input_tokens_seen": 101868785, - "step": 4782 - }, - { - "epoch": 0.5751217459267721, - "flos": 15433079109840.0, - "grad_norm": 3.528638971563708, - "learning_rate": 1.6129713743828593e-06, - "loss": 1.0427, - "num_input_tokens_seen": 101887515, - "step": 4783 - }, - { - "epoch": 0.5752419888174112, - "flos": 15378950360280.0, - "grad_norm": 3.0522332442163735, - "learning_rate": 1.6122071566596306e-06, - "loss": 0.9836, - "num_input_tokens_seen": 101907510, - "step": 4784 - }, - { - "epoch": 0.5753622317080502, - "flos": 12592514245320.0, - "grad_norm": 12.844096306728762, - "learning_rate": 1.6114429977642674e-06, - "loss": 1.0569, - "num_input_tokens_seen": 101921735, - "step": 4785 - }, - { - "epoch": 0.5754824745986894, - "flos": 14040596929800.0, - "grad_norm": 4.057917901198975, - "learning_rate": 1.6106788978126926e-06, - "loss": 0.9536, - "num_input_tokens_seen": 101940430, - "step": 4786 - }, - { - "epoch": 0.5756027174893285, - "flos": 22086299133240.0, - "grad_norm": 5.676737904903378, - "learning_rate": 1.6099148569208196e-06, - "loss": 1.0022, - "num_input_tokens_seen": 101957370, - "step": 4787 - }, - { - "epoch": 0.5757229603799675, - "flos": 20334355135320.0, - "grad_norm": 3.8242349595066325, - "learning_rate": 1.6091508752045523e-06, - "loss": 0.8556, - "num_input_tokens_seen": 101977970, - "step": 4788 - }, - { - "epoch": 0.5758432032706067, - "flos": 16346957119080.0, - "grad_norm": 6.387184914657881, - "learning_rate": 1.608386952779787e-06, - "loss": 1.0924, - "num_input_tokens_seen": 101997060, - "step": 4789 - }, - { - "epoch": 0.5759634461612457, - "flos": 18317225147280.0, - "grad_norm": 2.779782025690425, - "learning_rate": 1.6076230897624098e-06, - "loss": 0.9726, - "num_input_tokens_seen": 102018985, - "step": 4790 - }, - { - "epoch": 0.5760836890518848, - "flos": 21804213075480.0, - "grad_norm": 9.28615453326214, - "learning_rate": 1.6068592862682974e-06, - "loss": 1.0085, - "num_input_tokens_seen": 102036860, - "step": 4791 - }, - { - "epoch": 0.576203931942524, - "flos": 26078357706600.0, - "grad_norm": 14.304200543834744, - "learning_rate": 1.6060955424133187e-06, - "loss": 0.9553, - "num_input_tokens_seen": 102057505, - "step": 4792 - }, - { - "epoch": 0.576324174833163, - "flos": 18156586607520.0, - "grad_norm": 5.905740382899916, - "learning_rate": 1.6053318583133332e-06, - "loss": 1.1189, - "num_input_tokens_seen": 102078095, - "step": 4793 - }, - { - "epoch": 0.5764444177238021, - "flos": 17871281085960.0, - "grad_norm": 5.869247217210101, - "learning_rate": 1.6045682340841907e-06, - "loss": 0.9797, - "num_input_tokens_seen": 102096740, - "step": 4794 - }, - { - "epoch": 0.5765646606144411, - "flos": 44532851736840.0, - "grad_norm": 0.7872924970299597, - "learning_rate": 1.6038046698417336e-06, - "loss": 0.8359, - "num_input_tokens_seen": 102157355, - "step": 4795 - }, - { - "epoch": 0.5766849035050803, - "flos": 17870851824120.0, - "grad_norm": 5.472999186257334, - "learning_rate": 1.6030411657017919e-06, - "loss": 0.9056, - "num_input_tokens_seen": 102176730, - "step": 4796 - }, - { - "epoch": 0.5768051463957193, - "flos": 11310129258000.0, - "grad_norm": 5.450804470778796, - "learning_rate": 1.6022777217801903e-06, - "loss": 1.0725, - "num_input_tokens_seen": 102193405, - "step": 4797 - }, - { - "epoch": 0.5769253892863584, - "flos": 15747089399640.0, - "grad_norm": 4.314749531819763, - "learning_rate": 1.601514338192742e-06, - "loss": 0.9485, - "num_input_tokens_seen": 102213055, - "step": 4798 - }, - { - "epoch": 0.5770456321769976, - "flos": 16244893255080.0, - "grad_norm": 3.8847866144087946, - "learning_rate": 1.6007510150552514e-06, - "loss": 0.941, - "num_input_tokens_seen": 102230835, - "step": 4799 - }, - { - "epoch": 0.5771658750676366, - "flos": 33132008940120.0, - "grad_norm": 4.358868969496251, - "learning_rate": 1.599987752483515e-06, - "loss": 0.8429, - "num_input_tokens_seen": 102255000, - "step": 4800 - }, - { - "epoch": 0.5772861179582757, - "flos": 15743961920520.0, - "grad_norm": 4.615773250649994, - "learning_rate": 1.5992245505933184e-06, - "loss": 0.9077, - "num_input_tokens_seen": 102274420, - "step": 4801 - }, - { - "epoch": 0.5774063608489148, - "flos": 22276562288040.0, - "grad_norm": 16.06612560157872, - "learning_rate": 1.5984614095004388e-06, - "loss": 0.9388, - "num_input_tokens_seen": 102295275, - "step": 4802 - }, - { - "epoch": 0.5775266037395539, - "flos": 16008136079160.0, - "grad_norm": 2.730220581477429, - "learning_rate": 1.5976983293206438e-06, - "loss": 1.0253, - "num_input_tokens_seen": 102310800, - "step": 4803 - }, - { - "epoch": 0.577646846630193, - "flos": 14960208650760.0, - "grad_norm": 3.760830307856523, - "learning_rate": 1.5969353101696928e-06, - "loss": 0.9379, - "num_input_tokens_seen": 102328960, - "step": 4804 - }, - { - "epoch": 0.5777670895208321, - "flos": 21173340970800.0, - "grad_norm": 5.116145416614954, - "learning_rate": 1.5961723521633341e-06, - "loss": 1.027, - "num_input_tokens_seen": 102349920, - "step": 4805 - }, - { - "epoch": 0.5778873324114712, - "flos": 13833127091760.0, - "grad_norm": 4.599071923217949, - "learning_rate": 1.5954094554173097e-06, - "loss": 1.13, - "num_input_tokens_seen": 102367630, - "step": 4806 - }, - { - "epoch": 0.5780075753021102, - "flos": 9978398724480.0, - "grad_norm": 4.39186898524919, - "learning_rate": 1.5946466200473482e-06, - "loss": 1.0256, - "num_input_tokens_seen": 102385260, - "step": 4807 - }, - { - "epoch": 0.5781278181927494, - "flos": 10786380174720.0, - "grad_norm": 6.47934875559503, - "learning_rate": 1.5938838461691723e-06, - "loss": 1.0609, - "num_input_tokens_seen": 102401890, - "step": 4808 - }, - { - "epoch": 0.5782480610833884, - "flos": 11839704037680.0, - "grad_norm": 4.133966353354723, - "learning_rate": 1.593121133898494e-06, - "loss": 1.041, - "num_input_tokens_seen": 102418815, - "step": 4809 - }, - { - "epoch": 0.5783683039740275, - "flos": 18131806518960.0, - "grad_norm": 5.853357261371293, - "learning_rate": 1.592358483351016e-06, - "loss": 1.0075, - "num_input_tokens_seen": 102438710, - "step": 4810 - }, - { - "epoch": 0.5784885468646667, - "flos": 13046460973800.0, - "grad_norm": 3.3921243894549558, - "learning_rate": 1.5915958946424326e-06, - "loss": 0.9564, - "num_input_tokens_seen": 102457115, - "step": 4811 - }, - { - "epoch": 0.5786087897553057, - "flos": 32950545653040.0, - "grad_norm": 2.8611551382222506, - "learning_rate": 1.5908333678884271e-06, - "loss": 0.9735, - "num_input_tokens_seen": 102483255, - "step": 4812 - }, - { - "epoch": 0.5787290326459448, - "flos": 8720119271400.0, - "grad_norm": 3.640382292028778, - "learning_rate": 1.5900709032046743e-06, - "loss": 0.9566, - "num_input_tokens_seen": 102501050, - "step": 4813 - }, - { - "epoch": 0.5788492755365839, - "flos": 16556235989160.0, - "grad_norm": 5.591574133814674, - "learning_rate": 1.5893085007068391e-06, - "loss": 1.0165, - "num_input_tokens_seen": 102518330, - "step": 4814 - }, - { - "epoch": 0.578969518427223, - "flos": 17110744165200.0, - "grad_norm": 3.1974961856583666, - "learning_rate": 1.5885461605105786e-06, - "loss": 0.9419, - "num_input_tokens_seen": 102539650, - "step": 4815 - }, - { - "epoch": 0.579089761317862, - "flos": 15537381267720.0, - "grad_norm": 5.316082453344459, - "learning_rate": 1.5877838827315375e-06, - "loss": 0.9941, - "num_input_tokens_seen": 102557915, - "step": 4816 - }, - { - "epoch": 0.5792100042085012, - "flos": 16297611572880.0, - "grad_norm": 3.0416866632788393, - "learning_rate": 1.587021667485355e-06, - "loss": 0.9192, - "num_input_tokens_seen": 102577005, - "step": 4817 - }, - { - "epoch": 0.5793302470991403, - "flos": 15248580328320.0, - "grad_norm": 4.378600900231244, - "learning_rate": 1.5862595148876559e-06, - "loss": 1.0006, - "num_input_tokens_seen": 102596830, - "step": 4818 - }, - { - "epoch": 0.5794504899897793, - "flos": 8953289044800.0, - "grad_norm": 5.240059842940037, - "learning_rate": 1.58549742505406e-06, - "loss": 1.0007, - "num_input_tokens_seen": 102611295, - "step": 4819 - }, - { - "epoch": 0.5795707328804185, - "flos": 10503251623920.0, - "grad_norm": 4.562376464843515, - "learning_rate": 1.5847353981001747e-06, - "loss": 0.9771, - "num_input_tokens_seen": 102628195, - "step": 4820 - }, - { - "epoch": 0.5796909757710575, - "flos": 26000154084360.0, - "grad_norm": 2.752193096627758, - "learning_rate": 1.5839734341415993e-06, - "loss": 0.9201, - "num_input_tokens_seen": 102650115, - "step": 4821 - }, - { - "epoch": 0.5798112186616966, - "flos": 16376919011280.0, - "grad_norm": 2.8260505613324614, - "learning_rate": 1.5832115332939238e-06, - "loss": 0.9938, - "num_input_tokens_seen": 102668275, - "step": 4822 - }, - { - "epoch": 0.5799314615523358, - "flos": 11786188519320.0, - "grad_norm": 4.032014759475699, - "learning_rate": 1.5824496956727272e-06, - "loss": 0.9697, - "num_input_tokens_seen": 102685200, - "step": 4823 - }, - { - "epoch": 0.5800517044429748, - "flos": 14541221648760.0, - "grad_norm": 4.145605927689031, - "learning_rate": 1.5816879213935797e-06, - "loss": 0.9485, - "num_input_tokens_seen": 102703730, - "step": 4824 - }, - { - "epoch": 0.5801719473336139, - "flos": 22484154772320.0, - "grad_norm": 4.4269175539579475, - "learning_rate": 1.5809262105720416e-06, - "loss": 1.0247, - "num_input_tokens_seen": 102724490, - "step": 4825 - }, - { - "epoch": 0.580292190224253, - "flos": 14462343472200.0, - "grad_norm": 3.8988134780026193, - "learning_rate": 1.5801645633236644e-06, - "loss": 1.0199, - "num_input_tokens_seen": 102745195, - "step": 4826 - }, - { - "epoch": 0.5804124331148921, - "flos": 18946533512400.0, - "grad_norm": 4.121084030646122, - "learning_rate": 1.579402979763989e-06, - "loss": 1.0046, - "num_input_tokens_seen": 102765250, - "step": 4827 - }, - { - "epoch": 0.5805326760055312, - "flos": 9505037680440.0, - "grad_norm": 10.550736778529934, - "learning_rate": 1.578641460008548e-06, - "loss": 1.0172, - "num_input_tokens_seen": 102782705, - "step": 4828 - }, - { - "epoch": 0.5806529188961702, - "flos": 8508050199360.0, - "grad_norm": 7.837609064295473, - "learning_rate": 1.5778800041728613e-06, - "loss": 0.8864, - "num_input_tokens_seen": 102798715, - "step": 4829 - }, - { - "epoch": 0.5807731617868094, - "flos": 18658161834840.0, - "grad_norm": 4.255441211816533, - "learning_rate": 1.577118612372443e-06, - "loss": 0.8893, - "num_input_tokens_seen": 102820275, - "step": 4830 - }, - { - "epoch": 0.5808934046774484, - "flos": 27101198430840.0, - "grad_norm": 4.36536283368035, - "learning_rate": 1.5763572847227943e-06, - "loss": 0.9224, - "num_input_tokens_seen": 102840880, - "step": 4831 - }, - { - "epoch": 0.5810136475680875, - "flos": 14541374956560.0, - "grad_norm": 3.51679315445002, - "learning_rate": 1.5755960213394091e-06, - "loss": 1.0208, - "num_input_tokens_seen": 102857700, - "step": 4832 - }, - { - "epoch": 0.5811338904587267, - "flos": 12417121947120.0, - "grad_norm": 5.508668484016521, - "learning_rate": 1.5748348223377703e-06, - "loss": 1.0076, - "num_input_tokens_seen": 102874975, - "step": 4833 - }, - { - "epoch": 0.5812541333493657, - "flos": 13801846752480.0, - "grad_norm": 3.010499224911197, - "learning_rate": 1.5740736878333507e-06, - "loss": 1.0067, - "num_input_tokens_seen": 102892535, - "step": 4834 - }, - { - "epoch": 0.5813743762400048, - "flos": 14619731886600.0, - "grad_norm": 3.6980299524855096, - "learning_rate": 1.5733126179416143e-06, - "loss": 1.0015, - "num_input_tokens_seen": 102906740, - "step": 4835 - }, - { - "epoch": 0.5814946191306439, - "flos": 23662513555800.0, - "grad_norm": 4.476732713482442, - "learning_rate": 1.5725516127780137e-06, - "loss": 0.9385, - "num_input_tokens_seen": 102928595, - "step": 4836 - }, - { - "epoch": 0.581614862021283, - "flos": 11420349096960.0, - "grad_norm": 7.344236447070486, - "learning_rate": 1.5717906724579943e-06, - "loss": 1.1, - "num_input_tokens_seen": 102945375, - "step": 4837 - }, - { - "epoch": 0.581735104911922, - "flos": 24111830388720.0, - "grad_norm": 3.986784800902358, - "learning_rate": 1.571029797096989e-06, - "loss": 0.9026, - "num_input_tokens_seen": 102966200, - "step": 4838 - }, - { - "epoch": 0.5818553478025612, - "flos": 16586167219800.0, - "grad_norm": 7.171604259425404, - "learning_rate": 1.570268986810423e-06, - "loss": 1.0071, - "num_input_tokens_seen": 102985815, - "step": 4839 - }, - { - "epoch": 0.5819755906932003, - "flos": 14908042241040.0, - "grad_norm": 4.190219963691192, - "learning_rate": 1.5695082417137096e-06, - "loss": 0.9806, - "num_input_tokens_seen": 103003410, - "step": 4840 - }, - { - "epoch": 0.5820958335838393, - "flos": 15220734083760.0, - "grad_norm": 8.522588895656941, - "learning_rate": 1.5687475619222539e-06, - "loss": 0.9788, - "num_input_tokens_seen": 103023085, - "step": 4841 - }, - { - "epoch": 0.5822160764744785, - "flos": 12731346867840.0, - "grad_norm": 6.369481409542984, - "learning_rate": 1.5679869475514496e-06, - "loss": 0.956, - "num_input_tokens_seen": 103039740, - "step": 4842 - }, - { - "epoch": 0.5823363193651175, - "flos": 16371767869200.0, - "grad_norm": 6.735947569165085, - "learning_rate": 1.567226398716682e-06, - "loss": 1.025, - "num_input_tokens_seen": 103059375, - "step": 4843 - }, - { - "epoch": 0.5824565622557566, - "flos": 23457435319440.0, - "grad_norm": 4.787724916438734, - "learning_rate": 1.566465915533326e-06, - "loss": 0.8576, - "num_input_tokens_seen": 103081125, - "step": 4844 - }, - { - "epoch": 0.5825768051463958, - "flos": 15793614082320.0, - "grad_norm": 4.446707401079526, - "learning_rate": 1.5657054981167458e-06, - "loss": 1.0978, - "num_input_tokens_seen": 103099740, - "step": 4845 - }, - { - "epoch": 0.5826970480370348, - "flos": 19942723792920.0, - "grad_norm": 5.9040600788812325, - "learning_rate": 1.5649451465822965e-06, - "loss": 0.9022, - "num_input_tokens_seen": 103120850, - "step": 4846 - }, - { - "epoch": 0.5828172909276739, - "flos": 12652713983760.0, - "grad_norm": 3.2142014447019376, - "learning_rate": 1.5641848610453218e-06, - "loss": 1.0583, - "num_input_tokens_seen": 103139230, - "step": 4847 - }, - { - "epoch": 0.582937533818313, - "flos": 14095308249000.0, - "grad_norm": 4.206262941609804, - "learning_rate": 1.563424641621158e-06, - "loss": 1.0794, - "num_input_tokens_seen": 103158130, - "step": 4848 - }, - { - "epoch": 0.5830577767089521, - "flos": 19130051124000.0, - "grad_norm": 4.738093662558369, - "learning_rate": 1.5626644884251282e-06, - "loss": 0.9203, - "num_input_tokens_seen": 103177370, - "step": 4849 - }, - { - "epoch": 0.5831780195995911, - "flos": 17999412824040.0, - "grad_norm": 3.5922654776055567, - "learning_rate": 1.5619044015725488e-06, - "loss": 1.1073, - "num_input_tokens_seen": 103196780, - "step": 4850 - }, - { - "epoch": 0.5832982624902303, - "flos": 10424066831760.0, - "grad_norm": 5.621102572308921, - "learning_rate": 1.5611443811787224e-06, - "loss": 1.0884, - "num_input_tokens_seen": 103210625, - "step": 4851 - }, - { - "epoch": 0.5834185053808694, - "flos": 14511045125640.0, - "grad_norm": 4.415228171581726, - "learning_rate": 1.560384427358945e-06, - "loss": 0.9263, - "num_input_tokens_seen": 103229890, - "step": 4852 - }, - { - "epoch": 0.5835387482715084, - "flos": 19367145577080.0, - "grad_norm": 3.729530160475379, - "learning_rate": 1.5596245402284998e-06, - "loss": 0.9607, - "num_input_tokens_seen": 103253135, - "step": 4853 - }, - { - "epoch": 0.5836589911621476, - "flos": 11787905566680.0, - "grad_norm": 3.7896845041576017, - "learning_rate": 1.5588647199026619e-06, - "loss": 1.0391, - "num_input_tokens_seen": 103270590, - "step": 4854 - }, - { - "epoch": 0.5837792340527866, - "flos": 14512976803920.0, - "grad_norm": 3.1491394017830956, - "learning_rate": 1.5581049664966956e-06, - "loss": 1.0983, - "num_input_tokens_seen": 103288070, - "step": 4855 - }, - { - "epoch": 0.5838994769434257, - "flos": 47249889645360.0, - "grad_norm": 1.0414606399240964, - "learning_rate": 1.5573452801258545e-06, - "loss": 0.92, - "num_input_tokens_seen": 103334960, - "step": 4856 - }, - { - "epoch": 0.5840197198340649, - "flos": 15248978928600.0, - "grad_norm": 4.57047504885884, - "learning_rate": 1.5565856609053824e-06, - "loss": 0.8559, - "num_input_tokens_seen": 103353475, - "step": 4857 - }, - { - "epoch": 0.5841399627247039, - "flos": 13570547334240.0, - "grad_norm": 6.2480142108985195, - "learning_rate": 1.5558261089505127e-06, - "loss": 1.0253, - "num_input_tokens_seen": 103371925, - "step": 4858 - }, - { - "epoch": 0.584260205615343, - "flos": 18809233967880.0, - "grad_norm": 5.549703215490833, - "learning_rate": 1.5550666243764697e-06, - "loss": 1.0217, - "num_input_tokens_seen": 103389805, - "step": 4859 - }, - { - "epoch": 0.584380448505982, - "flos": 9795157066920.0, - "grad_norm": 5.3672248139396155, - "learning_rate": 1.554307207298465e-06, - "loss": 0.9885, - "num_input_tokens_seen": 103407785, - "step": 4860 - }, - { - "epoch": 0.5845006913966212, - "flos": 15301605261720.0, - "grad_norm": 3.4648965962711733, - "learning_rate": 1.553547857831704e-06, - "loss": 1.0101, - "num_input_tokens_seen": 103424015, - "step": 4861 - }, - { - "epoch": 0.5846209342872603, - "flos": 41774261866440.0, - "grad_norm": 0.9892197325777974, - "learning_rate": 1.5527885760913771e-06, - "loss": 0.9494, - "num_input_tokens_seen": 103473625, - "step": 4862 - }, - { - "epoch": 0.5847411771778993, - "flos": 13177382913840.0, - "grad_norm": 3.3893738297184877, - "learning_rate": 1.552029362192668e-06, - "loss": 0.9903, - "num_input_tokens_seen": 103492605, - "step": 4863 - }, - { - "epoch": 0.5848614200685385, - "flos": 17239550457600.0, - "grad_norm": 2.998981302872772, - "learning_rate": 1.5512702162507478e-06, - "loss": 0.9351, - "num_input_tokens_seen": 103512640, - "step": 4864 - }, - { - "epoch": 0.5849816629591775, - "flos": 51322328811720.0, - "grad_norm": 0.9466141180835754, - "learning_rate": 1.5505111383807792e-06, - "loss": 0.7988, - "num_input_tokens_seen": 103575030, - "step": 4865 - }, - { - "epoch": 0.5851019058498166, - "flos": 16924160397600.0, - "grad_norm": 3.1227874552687207, - "learning_rate": 1.5497521286979138e-06, - "loss": 1.0299, - "num_input_tokens_seen": 103594990, - "step": 4866 - }, - { - "epoch": 0.5852221487404557, - "flos": 17345968263120.0, - "grad_norm": 3.5693014704218204, - "learning_rate": 1.5489931873172927e-06, - "loss": 0.9725, - "num_input_tokens_seen": 103616030, - "step": 4867 - }, - { - "epoch": 0.5853423916310948, - "flos": 19417932216600.0, - "grad_norm": 3.947516160174505, - "learning_rate": 1.5482343143540467e-06, - "loss": 1.0219, - "num_input_tokens_seen": 103637015, - "step": 4868 - }, - { - "epoch": 0.5854626345217339, - "flos": 8430981054840.0, - "grad_norm": 7.355648709130209, - "learning_rate": 1.547475509923295e-06, - "loss": 1.0551, - "num_input_tokens_seen": 103653775, - "step": 4869 - }, - { - "epoch": 0.585582877412373, - "flos": 46065209032440.0, - "grad_norm": 0.7921576650445019, - "learning_rate": 1.5467167741401495e-06, - "loss": 0.8232, - "num_input_tokens_seen": 103714975, - "step": 4870 - }, - { - "epoch": 0.5857031203030121, - "flos": 12043678457880.0, - "grad_norm": 7.472213644269378, - "learning_rate": 1.5459581071197083e-06, - "loss": 0.941, - "num_input_tokens_seen": 103730355, - "step": 4871 - }, - { - "epoch": 0.5858233631936511, - "flos": 14828520171720.0, - "grad_norm": 7.867454183352353, - "learning_rate": 1.5451995089770624e-06, - "loss": 1.0442, - "num_input_tokens_seen": 103749860, - "step": 4872 - }, - { - "epoch": 0.5859436060842903, - "flos": 16486985542440.0, - "grad_norm": 2.529666829392897, - "learning_rate": 1.5444409798272885e-06, - "loss": 0.9471, - "num_input_tokens_seen": 103773670, - "step": 4873 - }, - { - "epoch": 0.5860638489749294, - "flos": 15983631944640.0, - "grad_norm": 3.312047387636418, - "learning_rate": 1.543682519785456e-06, - "loss": 1.0254, - "num_input_tokens_seen": 103791870, - "step": 4874 - }, - { - "epoch": 0.5861840918655684, - "flos": 12443005851840.0, - "grad_norm": 8.050681745200217, - "learning_rate": 1.5429241289666219e-06, - "loss": 1.0124, - "num_input_tokens_seen": 103809090, - "step": 4875 - }, - { - "epoch": 0.5863043347562076, - "flos": 18185138067960.0, - "grad_norm": 5.303301578205272, - "learning_rate": 1.5421658074858342e-06, - "loss": 0.9198, - "num_input_tokens_seen": 103826915, - "step": 4876 - }, - { - "epoch": 0.5864245776468466, - "flos": 14671836973200.0, - "grad_norm": 4.744053581887265, - "learning_rate": 1.5414075554581298e-06, - "loss": 0.8778, - "num_input_tokens_seen": 103844680, - "step": 4877 - }, - { - "epoch": 0.5865448205374857, - "flos": 20598437309280.0, - "grad_norm": 3.4226175138418737, - "learning_rate": 1.5406493729985348e-06, - "loss": 0.9963, - "num_input_tokens_seen": 103863595, - "step": 4878 - }, - { - "epoch": 0.5866650634281249, - "flos": 18390246965880.0, - "grad_norm": 3.3777539355568695, - "learning_rate": 1.5398912602220644e-06, - "loss": 0.9413, - "num_input_tokens_seen": 103882590, - "step": 4879 - }, - { - "epoch": 0.5867853063187639, - "flos": 12073241749800.0, - "grad_norm": 4.394140824670521, - "learning_rate": 1.539133217243724e-06, - "loss": 1.0016, - "num_input_tokens_seen": 103899330, - "step": 4880 - }, - { - "epoch": 0.586905549209403, - "flos": 17530467044640.0, - "grad_norm": 5.435032251678202, - "learning_rate": 1.5383752441785081e-06, - "loss": 0.9709, - "num_input_tokens_seen": 103918275, - "step": 4881 - }, - { - "epoch": 0.5870257921000421, - "flos": 10397876311440.0, - "grad_norm": 5.500717116335407, - "learning_rate": 1.5376173411414003e-06, - "loss": 1.0846, - "num_input_tokens_seen": 103936035, - "step": 4882 - }, - { - "epoch": 0.5871460349906812, - "flos": 17005767453000.0, - "grad_norm": 4.013186178033676, - "learning_rate": 1.5368595082473753e-06, - "loss": 1.0049, - "num_input_tokens_seen": 103954055, - "step": 4883 - }, - { - "epoch": 0.5872662778813202, - "flos": 15747549323040.0, - "grad_norm": 3.1655595416788582, - "learning_rate": 1.5361017456113935e-06, - "loss": 1.009, - "num_input_tokens_seen": 103974125, - "step": 4884 - }, - { - "epoch": 0.5873865207719594, - "flos": 13072344878520.0, - "grad_norm": 7.861629943740322, - "learning_rate": 1.5353440533484085e-06, - "loss": 1.0814, - "num_input_tokens_seen": 103992700, - "step": 4885 - }, - { - "epoch": 0.5875067636625985, - "flos": 38638242504000.0, - "grad_norm": 5.008912916282861, - "learning_rate": 1.534586431573361e-06, - "loss": 0.8908, - "num_input_tokens_seen": 104017360, - "step": 4886 - }, - { - "epoch": 0.5876270065532375, - "flos": 19937725958640.0, - "grad_norm": 4.873404021544094, - "learning_rate": 1.5338288804011817e-06, - "loss": 1.0012, - "num_input_tokens_seen": 104036580, - "step": 4887 - }, - { - "epoch": 0.5877472494438767, - "flos": 15170652660120.0, - "grad_norm": 6.959470035501901, - "learning_rate": 1.533071399946791e-06, - "loss": 0.9172, - "num_input_tokens_seen": 104055045, - "step": 4888 - }, - { - "epoch": 0.5878674923345157, - "flos": 15904815091200.0, - "grad_norm": 6.31903221639639, - "learning_rate": 1.5323139903250977e-06, - "loss": 0.7973, - "num_input_tokens_seen": 104075370, - "step": 4889 - }, - { - "epoch": 0.5879877352251548, - "flos": 15534683050440.0, - "grad_norm": 3.800071846132202, - "learning_rate": 1.5315566516510002e-06, - "loss": 0.9987, - "num_input_tokens_seen": 104093260, - "step": 4890 - }, - { - "epoch": 0.5881079781157939, - "flos": 12391023411480.0, - "grad_norm": 8.435254511676444, - "learning_rate": 1.5307993840393857e-06, - "loss": 0.9019, - "num_input_tokens_seen": 104111060, - "step": 4891 - }, - { - "epoch": 0.588228221006433, - "flos": 16062080859360.0, - "grad_norm": 5.331995299501057, - "learning_rate": 1.530042187605132e-06, - "loss": 1.0325, - "num_input_tokens_seen": 104130035, - "step": 4892 - }, - { - "epoch": 0.5883484638970721, - "flos": 18631358083320.0, - "grad_norm": 5.071352906305473, - "learning_rate": 1.5292850624631044e-06, - "loss": 1.063, - "num_input_tokens_seen": 104151950, - "step": 4893 - }, - { - "epoch": 0.5884687067877111, - "flos": 21697549977480.0, - "grad_norm": 9.803131879829033, - "learning_rate": 1.5285280087281593e-06, - "loss": 1.0125, - "num_input_tokens_seen": 104172400, - "step": 4894 - }, - { - "epoch": 0.5885889496783503, - "flos": 50495914215840.0, - "grad_norm": 0.6342857542354001, - "learning_rate": 1.5277710265151398e-06, - "loss": 0.7997, - "num_input_tokens_seen": 104241600, - "step": 4895 - }, - { - "epoch": 0.5887091925689893, - "flos": 13544418137040.0, - "grad_norm": 4.23174336429386, - "learning_rate": 1.5270141159388803e-06, - "loss": 0.9801, - "num_input_tokens_seen": 104258340, - "step": 4896 - }, - { - "epoch": 0.5888294354596284, - "flos": 16559608760760.0, - "grad_norm": 3.0526895452662446, - "learning_rate": 1.526257277114203e-06, - "loss": 1.0195, - "num_input_tokens_seen": 104279135, - "step": 4897 - }, - { - "epoch": 0.5889496783502676, - "flos": 15616259444280.0, - "grad_norm": 9.611508512519274, - "learning_rate": 1.5255005101559201e-06, - "loss": 1.0219, - "num_input_tokens_seen": 104296465, - "step": 4898 - }, - { - "epoch": 0.5890699212409066, - "flos": 15402718617360.0, - "grad_norm": 4.896476779075181, - "learning_rate": 1.524743815178833e-06, - "loss": 0.9853, - "num_input_tokens_seen": 104314145, - "step": 4899 - }, - { - "epoch": 0.5891901641315457, - "flos": 13807427156400.0, - "grad_norm": 4.049517030528701, - "learning_rate": 1.5239871922977315e-06, - "loss": 1.0316, - "num_input_tokens_seen": 104333780, - "step": 4900 - }, - { - "epoch": 0.5893104070221848, - "flos": 13911790637400.0, - "grad_norm": 4.577044779436167, - "learning_rate": 1.523230641627394e-06, - "loss": 1.1148, - "num_input_tokens_seen": 104352485, - "step": 4901 - }, - { - "epoch": 0.5894306499128239, - "flos": 20703506006160.0, - "grad_norm": 4.304685949197203, - "learning_rate": 1.5224741632825888e-06, - "loss": 0.9398, - "num_input_tokens_seen": 104372395, - "step": 4902 - }, - { - "epoch": 0.589550892803463, - "flos": 30217257117720.0, - "grad_norm": 2.7984399093711274, - "learning_rate": 1.521717757378074e-06, - "loss": 0.9158, - "num_input_tokens_seen": 104392660, - "step": 4903 - }, - { - "epoch": 0.5896711356941021, - "flos": 9978582693840.0, - "grad_norm": 5.4835889638816555, - "learning_rate": 1.5209614240285943e-06, - "loss": 0.9102, - "num_input_tokens_seen": 104410035, - "step": 4904 - }, - { - "epoch": 0.5897913785847412, - "flos": 12180763371480.0, - "grad_norm": 4.487439746505308, - "learning_rate": 1.520205163348887e-06, - "loss": 1.0658, - "num_input_tokens_seen": 104427690, - "step": 4905 - }, - { - "epoch": 0.5899116214753802, - "flos": 34890324994440.0, - "grad_norm": 0.7425604435372338, - "learning_rate": 1.519448975453674e-06, - "loss": 0.8096, - "num_input_tokens_seen": 104482510, - "step": 4906 - }, - { - "epoch": 0.5900318643660194, - "flos": 14985050062440.0, - "grad_norm": 9.281331790859097, - "learning_rate": 1.5186928604576696e-06, - "loss": 0.9889, - "num_input_tokens_seen": 104499425, - "step": 4907 - }, - { - "epoch": 0.5901521072566585, - "flos": 15038994842640.0, - "grad_norm": 4.998372194436526, - "learning_rate": 1.5179368184755752e-06, - "loss": 1.0023, - "num_input_tokens_seen": 104517230, - "step": 4908 - }, - { - "epoch": 0.5902723501472975, - "flos": 14353993988400.0, - "grad_norm": 4.166075143151966, - "learning_rate": 1.5171808496220821e-06, - "loss": 1.0524, - "num_input_tokens_seen": 104535705, - "step": 4909 - }, - { - "epoch": 0.5903925930379367, - "flos": 16322330338320.0, - "grad_norm": 2.8911327019303177, - "learning_rate": 1.5164249540118708e-06, - "loss": 1.0367, - "num_input_tokens_seen": 104554550, - "step": 4910 - }, - { - "epoch": 0.5905128359285757, - "flos": 16612449724800.0, - "grad_norm": 5.510304940815116, - "learning_rate": 1.5156691317596093e-06, - "loss": 1.0578, - "num_input_tokens_seen": 104575695, - "step": 4911 - }, - { - "epoch": 0.5906330788192148, - "flos": 19964069786760.0, - "grad_norm": 7.926634312535063, - "learning_rate": 1.5149133829799556e-06, - "loss": 0.9072, - "num_input_tokens_seen": 104593410, - "step": 4912 - }, - { - "epoch": 0.590753321709854, - "flos": 13098167460120.0, - "grad_norm": 3.439473227377725, - "learning_rate": 1.5141577077875556e-06, - "loss": 1.0238, - "num_input_tokens_seen": 104610455, - "step": 4913 - }, - { - "epoch": 0.590873564600493, - "flos": 11945385965760.0, - "grad_norm": 5.2685068094731395, - "learning_rate": 1.5134021062970451e-06, - "loss": 0.9379, - "num_input_tokens_seen": 104628555, - "step": 4914 - }, - { - "epoch": 0.5909938074911321, - "flos": 9532270693800.0, - "grad_norm": 5.250774144816462, - "learning_rate": 1.5126465786230483e-06, - "loss": 1.0342, - "num_input_tokens_seen": 104645050, - "step": 4915 - }, - { - "epoch": 0.5911140503817712, - "flos": 18520555674720.0, - "grad_norm": 104.78298816780804, - "learning_rate": 1.5118911248801787e-06, - "loss": 1.0345, - "num_input_tokens_seen": 104662780, - "step": 4916 - }, - { - "epoch": 0.5912342932724103, - "flos": 16534184779440.0, - "grad_norm": 4.638525079914109, - "learning_rate": 1.5111357451830364e-06, - "loss": 1.0124, - "num_input_tokens_seen": 104681195, - "step": 4917 - }, - { - "epoch": 0.5913545361630493, - "flos": 13885262839920.0, - "grad_norm": 2.585050401915664, - "learning_rate": 1.5103804396462131e-06, - "loss": 0.9336, - "num_input_tokens_seen": 104700850, - "step": 4918 - }, - { - "epoch": 0.5914747790536885, - "flos": 18657671249880.0, - "grad_norm": 5.279408511930086, - "learning_rate": 1.5096252083842877e-06, - "loss": 1.0274, - "num_input_tokens_seen": 104719780, - "step": 4919 - }, - { - "epoch": 0.5915950219443276, - "flos": 19522970251920.0, - "grad_norm": 4.65242963914998, - "learning_rate": 1.5088700515118285e-06, - "loss": 1.0754, - "num_input_tokens_seen": 104738820, - "step": 4920 - }, - { - "epoch": 0.5917152648349666, - "flos": 15563234510880.0, - "grad_norm": 3.400382491643777, - "learning_rate": 1.508114969143392e-06, - "loss": 0.8881, - "num_input_tokens_seen": 104758525, - "step": 4921 - }, - { - "epoch": 0.5918355077256057, - "flos": 20019823599000.0, - "grad_norm": 4.051172515307056, - "learning_rate": 1.5073599613935238e-06, - "loss": 1.0093, - "num_input_tokens_seen": 104780365, - "step": 4922 - }, - { - "epoch": 0.5919557506162448, - "flos": 20073676394520.0, - "grad_norm": 14.541569546408905, - "learning_rate": 1.5066050283767574e-06, - "loss": 0.8065, - "num_input_tokens_seen": 104800765, - "step": 4923 - }, - { - "epoch": 0.5920759935068839, - "flos": 8510595108840.0, - "grad_norm": 4.207525849379105, - "learning_rate": 1.505850170207616e-06, - "loss": 1.0486, - "num_input_tokens_seen": 104817350, - "step": 4924 - }, - { - "epoch": 0.592196236397523, - "flos": 20965595178720.0, - "grad_norm": 25.59753911588109, - "learning_rate": 1.505095387000611e-06, - "loss": 0.9887, - "num_input_tokens_seen": 104839370, - "step": 4925 - }, - { - "epoch": 0.5923164792881621, - "flos": 17342718137760.0, - "grad_norm": 3.9988396869032816, - "learning_rate": 1.504340678870242e-06, - "loss": 0.9776, - "num_input_tokens_seen": 104857305, - "step": 4926 - }, - { - "epoch": 0.5924367221788012, - "flos": 17084247029280.0, - "grad_norm": 6.023204121107094, - "learning_rate": 1.5035860459309989e-06, - "loss": 1.1238, - "num_input_tokens_seen": 104874740, - "step": 4927 - }, - { - "epoch": 0.5925569650694402, - "flos": 19129928477760.0, - "grad_norm": 3.378030193738228, - "learning_rate": 1.5028314882973568e-06, - "loss": 0.8582, - "num_input_tokens_seen": 104894865, - "step": 4928 - }, - { - "epoch": 0.5926772079600794, - "flos": 15846179092320.0, - "grad_norm": 7.184586705775949, - "learning_rate": 1.502077006083783e-06, - "loss": 1.0667, - "num_input_tokens_seen": 104913245, - "step": 4929 - }, - { - "epoch": 0.5927974508507184, - "flos": 14095308249000.0, - "grad_norm": 4.204051316301071, - "learning_rate": 1.5013225994047315e-06, - "loss": 1.0033, - "num_input_tokens_seen": 104930595, - "step": 4930 - }, - { - "epoch": 0.5929176937413575, - "flos": 11156696184840.0, - "grad_norm": 4.678176151829252, - "learning_rate": 1.5005682683746452e-06, - "loss": 1.0297, - "num_input_tokens_seen": 104948830, - "step": 4931 - }, - { - "epoch": 0.5930379366319967, - "flos": 12468153879120.0, - "grad_norm": 6.298455960420106, - "learning_rate": 1.4998140131079553e-06, - "loss": 0.9534, - "num_input_tokens_seen": 104964640, - "step": 4932 - }, - { - "epoch": 0.5931581795226357, - "flos": 12541145036160.0, - "grad_norm": 3.5883714315326456, - "learning_rate": 1.4990598337190821e-06, - "loss": 0.9725, - "num_input_tokens_seen": 104980715, - "step": 4933 - }, - { - "epoch": 0.5932784224132748, - "flos": 17268193902720.0, - "grad_norm": 4.34715938556556, - "learning_rate": 1.4983057303224338e-06, - "loss": 0.9019, - "num_input_tokens_seen": 105000250, - "step": 4934 - }, - { - "epoch": 0.5933986653039139, - "flos": 16295066663400.0, - "grad_norm": 3.13538253275561, - "learning_rate": 1.4975517030324072e-06, - "loss": 1.0911, - "num_input_tokens_seen": 105017980, - "step": 4935 - }, - { - "epoch": 0.593518908194553, - "flos": 50936492504160.0, - "grad_norm": 0.8211623376425475, - "learning_rate": 1.4967977519633882e-06, - "loss": 0.8798, - "num_input_tokens_seen": 105075160, - "step": 4936 - }, - { - "epoch": 0.593639151085192, - "flos": 14514540543480.0, - "grad_norm": 5.4304135502652375, - "learning_rate": 1.4960438772297494e-06, - "loss": 1.0045, - "num_input_tokens_seen": 105091925, - "step": 4937 - }, - { - "epoch": 0.5937593939758312, - "flos": 22013798561160.0, - "grad_norm": 4.452395516593471, - "learning_rate": 1.495290078945855e-06, - "loss": 0.9622, - "num_input_tokens_seen": 105111410, - "step": 4938 - }, - { - "epoch": 0.5938796368664703, - "flos": 26335663675800.0, - "grad_norm": 4.836537165276272, - "learning_rate": 1.4945363572260529e-06, - "loss": 0.9724, - "num_input_tokens_seen": 105132125, - "step": 4939 - }, - { - "epoch": 0.5939998797571093, - "flos": 16954091628240.0, - "grad_norm": 4.77403522751096, - "learning_rate": 1.4937827121846845e-06, - "loss": 0.8854, - "num_input_tokens_seen": 105152100, - "step": 4940 - }, - { - "epoch": 0.5941201226477485, - "flos": 17923018233840.0, - "grad_norm": 4.430107729185319, - "learning_rate": 1.4930291439360755e-06, - "loss": 0.964, - "num_input_tokens_seen": 105174385, - "step": 4941 - }, - { - "epoch": 0.5942403655383875, - "flos": 15930607011240.0, - "grad_norm": 4.142517348387265, - "learning_rate": 1.4922756525945427e-06, - "loss": 1.0164, - "num_input_tokens_seen": 105193415, - "step": 4942 - }, - { - "epoch": 0.5943606084290266, - "flos": 48427936265160.0, - "grad_norm": 0.7720248876053555, - "learning_rate": 1.4915222382743894e-06, - "loss": 0.8461, - "num_input_tokens_seen": 105251970, - "step": 4943 - }, - { - "epoch": 0.5944808513196658, - "flos": 12915140433480.0, - "grad_norm": 5.245171642713271, - "learning_rate": 1.4907689010899085e-06, - "loss": 0.9491, - "num_input_tokens_seen": 105269270, - "step": 4944 - }, - { - "epoch": 0.5946010942103048, - "flos": 17635229125920.0, - "grad_norm": 8.136210058960815, - "learning_rate": 1.4900156411553804e-06, - "loss": 0.8449, - "num_input_tokens_seen": 105288820, - "step": 4945 - }, - { - "epoch": 0.5947213371009439, - "flos": 10765218150240.0, - "grad_norm": 3.9057351955691666, - "learning_rate": 1.4892624585850739e-06, - "loss": 1.0745, - "num_input_tokens_seen": 105306895, - "step": 4946 - }, - { - "epoch": 0.594841579991583, - "flos": 18394938184560.0, - "grad_norm": 4.072668407838414, - "learning_rate": 1.4885093534932465e-06, - "loss": 1.0047, - "num_input_tokens_seen": 105324580, - "step": 4947 - }, - { - "epoch": 0.5949618228822221, - "flos": 17052353458800.0, - "grad_norm": 3.5620174415756805, - "learning_rate": 1.4877563259941433e-06, - "loss": 0.9458, - "num_input_tokens_seen": 105342155, - "step": 4948 - }, - { - "epoch": 0.5950820657728612, - "flos": 28958548402800.0, - "grad_norm": 3.636433689569727, - "learning_rate": 1.4870033762019988e-06, - "loss": 0.8982, - "num_input_tokens_seen": 105362040, - "step": 4949 - }, - { - "epoch": 0.5952023086635003, - "flos": 16480638599520.0, - "grad_norm": 3.471086144980978, - "learning_rate": 1.4862505042310334e-06, - "loss": 0.9561, - "num_input_tokens_seen": 105381045, - "step": 4950 - }, - { - "epoch": 0.5953225515541394, - "flos": 24004032813000.0, - "grad_norm": 4.63502837994394, - "learning_rate": 1.4854977101954587e-06, - "loss": 0.9281, - "num_input_tokens_seen": 105402985, - "step": 4951 - }, - { - "epoch": 0.5954427944447784, - "flos": 17396816225760.0, - "grad_norm": 4.8670217323546945, - "learning_rate": 1.4847449942094716e-06, - "loss": 1.0691, - "num_input_tokens_seen": 105421585, - "step": 4952 - }, - { - "epoch": 0.5955630373354175, - "flos": 13151407024440.0, - "grad_norm": 5.063966515139597, - "learning_rate": 1.4839923563872598e-06, - "loss": 1.0814, - "num_input_tokens_seen": 105439845, - "step": 4953 - }, - { - "epoch": 0.5956832802260567, - "flos": 14042375300280.0, - "grad_norm": 3.8831804128070333, - "learning_rate": 1.483239796842997e-06, - "loss": 0.9886, - "num_input_tokens_seen": 105457595, - "step": 4954 - }, - { - "epoch": 0.5958035231166957, - "flos": 14012444069640.0, - "grad_norm": 2.7659392097561817, - "learning_rate": 1.4824873156908462e-06, - "loss": 1.0589, - "num_input_tokens_seen": 105475240, - "step": 4955 - }, - { - "epoch": 0.5959237660073348, - "flos": 15379962191760.0, - "grad_norm": 4.548729167826584, - "learning_rate": 1.4817349130449584e-06, - "loss": 0.9828, - "num_input_tokens_seen": 105494680, - "step": 4956 - }, - { - "epoch": 0.5960440088979739, - "flos": 15032862530640.0, - "grad_norm": 3.873484899824064, - "learning_rate": 1.4809825890194717e-06, - "loss": 1.0496, - "num_input_tokens_seen": 105513070, - "step": 4957 - }, - { - "epoch": 0.596164251788613, - "flos": 10424526755160.0, - "grad_norm": 2.517660987502476, - "learning_rate": 1.4802303437285139e-06, - "loss": 0.9977, - "num_input_tokens_seen": 105530060, - "step": 4958 - }, - { - "epoch": 0.596284494679252, - "flos": 14541436279680.0, - "grad_norm": 9.834387210902612, - "learning_rate": 1.4794781772861994e-06, - "loss": 1.011, - "num_input_tokens_seen": 105546275, - "step": 4959 - }, - { - "epoch": 0.5964047375698912, - "flos": 22250187798360.0, - "grad_norm": 4.694554221671101, - "learning_rate": 1.4787260898066324e-06, - "loss": 0.8924, - "num_input_tokens_seen": 105565995, - "step": 4960 - }, - { - "epoch": 0.5965249804605303, - "flos": 19569525596160.0, - "grad_norm": 4.640317347953874, - "learning_rate": 1.4779740814039023e-06, - "loss": 1.0779, - "num_input_tokens_seen": 105585800, - "step": 4961 - }, - { - "epoch": 0.5966452233511693, - "flos": 21935104353960.0, - "grad_norm": 9.420000639508462, - "learning_rate": 1.4772221521920894e-06, - "loss": 0.9031, - "num_input_tokens_seen": 105605545, - "step": 4962 - }, - { - "epoch": 0.5967654662418085, - "flos": 18130978656840.0, - "grad_norm": 4.6572354701175565, - "learning_rate": 1.4764703022852598e-06, - "loss": 0.9668, - "num_input_tokens_seen": 105625785, - "step": 4963 - }, - { - "epoch": 0.5968857091324475, - "flos": 13544724752640.0, - "grad_norm": 5.328890837728317, - "learning_rate": 1.4757185317974696e-06, - "loss": 0.9846, - "num_input_tokens_seen": 105643890, - "step": 4964 - }, - { - "epoch": 0.5970059520230866, - "flos": 16845558175080.0, - "grad_norm": 4.4662805950271, - "learning_rate": 1.474966840842761e-06, - "loss": 0.9387, - "num_input_tokens_seen": 105663190, - "step": 4965 - }, - { - "epoch": 0.5971261949137258, - "flos": 16481558446320.0, - "grad_norm": 4.384714295356574, - "learning_rate": 1.4742152295351655e-06, - "loss": 1.0961, - "num_input_tokens_seen": 105682065, - "step": 4966 - }, - { - "epoch": 0.5972464378043648, - "flos": 14592866811960.0, - "grad_norm": 5.089562502722234, - "learning_rate": 1.4734636979887016e-06, - "loss": 0.8569, - "num_input_tokens_seen": 105699245, - "step": 4967 - }, - { - "epoch": 0.5973666806950039, - "flos": 20938300842240.0, - "grad_norm": 12.018107421963716, - "learning_rate": 1.4727122463173755e-06, - "loss": 1.1307, - "num_input_tokens_seen": 105717495, - "step": 4968 - }, - { - "epoch": 0.597486923585643, - "flos": 15826120884000.0, - "grad_norm": 3.0950470334424955, - "learning_rate": 1.471960874635183e-06, - "loss": 0.8679, - "num_input_tokens_seen": 105736775, - "step": 4969 - }, - { - "epoch": 0.5976071664762821, - "flos": 9716585505960.0, - "grad_norm": 6.191111117768651, - "learning_rate": 1.4712095830561055e-06, - "loss": 0.9116, - "num_input_tokens_seen": 105754985, - "step": 4970 - }, - { - "epoch": 0.5977274093669211, - "flos": 13544295490800.0, - "grad_norm": 7.948901060912249, - "learning_rate": 1.4704583716941147e-06, - "loss": 1.0164, - "num_input_tokens_seen": 105773570, - "step": 4971 - }, - { - "epoch": 0.5978476522575603, - "flos": 14462374133760.0, - "grad_norm": 5.053717070497487, - "learning_rate": 1.4697072406631672e-06, - "loss": 0.9408, - "num_input_tokens_seen": 105793195, - "step": 4972 - }, - { - "epoch": 0.5979678951481994, - "flos": 20676916885560.0, - "grad_norm": 3.824230470727059, - "learning_rate": 1.4689561900772097e-06, - "loss": 0.965, - "num_input_tokens_seen": 105812975, - "step": 4973 - }, - { - "epoch": 0.5980881380388384, - "flos": 12731530837200.0, - "grad_norm": 8.83718207622429, - "learning_rate": 1.4682052200501758e-06, - "loss": 0.9517, - "num_input_tokens_seen": 105829900, - "step": 4974 - }, - { - "epoch": 0.5982083809294776, - "flos": 16320981229680.0, - "grad_norm": 3.740997571245398, - "learning_rate": 1.4674543306959876e-06, - "loss": 1.006, - "num_input_tokens_seen": 105849090, - "step": 4975 - }, - { - "epoch": 0.5983286238201166, - "flos": 14904454838520.0, - "grad_norm": 4.922922755541798, - "learning_rate": 1.4667035221285535e-06, - "loss": 1.0689, - "num_input_tokens_seen": 105866450, - "step": 4976 - }, - { - "epoch": 0.5984488667107557, - "flos": 20073369778920.0, - "grad_norm": 4.86608776324769, - "learning_rate": 1.4659527944617715e-06, - "loss": 0.9728, - "num_input_tokens_seen": 105886115, - "step": 4977 - }, - { - "epoch": 0.5985691096013949, - "flos": 11656462380120.0, - "grad_norm": 4.023579478305021, - "learning_rate": 1.465202147809526e-06, - "loss": 0.981, - "num_input_tokens_seen": 105904330, - "step": 4978 - }, - { - "epoch": 0.5986893524920339, - "flos": 19020260546880.0, - "grad_norm": 4.600136190969118, - "learning_rate": 1.4644515822856888e-06, - "loss": 0.9901, - "num_input_tokens_seen": 105922485, - "step": 4979 - }, - { - "epoch": 0.598809595382673, - "flos": 44100429423480.0, - "grad_norm": 0.7596901118156324, - "learning_rate": 1.4637010980041215e-06, - "loss": 0.8176, - "num_input_tokens_seen": 105984315, - "step": 4980 - }, - { - "epoch": 0.5989298382733121, - "flos": 8300181761040.0, - "grad_norm": 5.074439016822374, - "learning_rate": 1.4629506950786707e-06, - "loss": 1.1219, - "num_input_tokens_seen": 106000215, - "step": 4981 - }, - { - "epoch": 0.5990500811639512, - "flos": 42961481840760.0, - "grad_norm": 1.0471550296495262, - "learning_rate": 1.4622003736231733e-06, - "loss": 0.8142, - "num_input_tokens_seen": 106058925, - "step": 4982 - }, - { - "epoch": 0.5991703240545903, - "flos": 12914772494760.0, - "grad_norm": 4.744959314973821, - "learning_rate": 1.461450133751451e-06, - "loss": 1.0159, - "num_input_tokens_seen": 106076715, - "step": 4983 - }, - { - "epoch": 0.5992905669452293, - "flos": 19733138307240.0, - "grad_norm": 4.693225263377279, - "learning_rate": 1.4606999755773153e-06, - "loss": 0.986, - "num_input_tokens_seen": 106097640, - "step": 4984 - }, - { - "epoch": 0.5994108098358685, - "flos": 14514847159080.0, - "grad_norm": 4.747300635837841, - "learning_rate": 1.4599498992145643e-06, - "loss": 1.0322, - "num_input_tokens_seen": 106117385, - "step": 4985 - }, - { - "epoch": 0.5995310527265075, - "flos": 15823085389560.0, - "grad_norm": 5.215480671251898, - "learning_rate": 1.4591999047769846e-06, - "loss": 0.9403, - "num_input_tokens_seen": 106135960, - "step": 4986 - }, - { - "epoch": 0.5996512956171466, - "flos": 13413833474160.0, - "grad_norm": 4.0903725705751155, - "learning_rate": 1.4584499923783486e-06, - "loss": 0.9796, - "num_input_tokens_seen": 106154260, - "step": 4987 - }, - { - "epoch": 0.5997715385077858, - "flos": 10864430489160.0, - "grad_norm": 4.368236217930487, - "learning_rate": 1.457700162132419e-06, - "loss": 0.9888, - "num_input_tokens_seen": 106170970, - "step": 4988 - }, - { - "epoch": 0.5998917813984248, - "flos": 17975859197880.0, - "grad_norm": 9.376696216994597, - "learning_rate": 1.4569504141529433e-06, - "loss": 0.9431, - "num_input_tokens_seen": 106188525, - "step": 4989 - }, - { - "epoch": 0.6000120242890639, - "flos": 15668701808040.0, - "grad_norm": 4.737948337567949, - "learning_rate": 1.456200748553658e-06, - "loss": 0.9472, - "num_input_tokens_seen": 106206240, - "step": 4990 - }, - { - "epoch": 0.600132267179703, - "flos": 21280954577160.0, - "grad_norm": 6.007833770992091, - "learning_rate": 1.455451165448287e-06, - "loss": 1.0059, - "num_input_tokens_seen": 106228615, - "step": 4991 - }, - { - "epoch": 0.6002525100703421, - "flos": 17818041521640.0, - "grad_norm": 14.669964910775978, - "learning_rate": 1.4547016649505407e-06, - "loss": 0.9374, - "num_input_tokens_seen": 106246345, - "step": 4992 - }, - { - "epoch": 0.6003727529609811, - "flos": 14798495333880.0, - "grad_norm": 5.951224436714059, - "learning_rate": 1.4539522471741193e-06, - "loss": 1.0621, - "num_input_tokens_seen": 106263490, - "step": 4993 - }, - { - "epoch": 0.6004929958516203, - "flos": 11025835567920.0, - "grad_norm": 3.5139191282342437, - "learning_rate": 1.4532029122327067e-06, - "loss": 0.9229, - "num_input_tokens_seen": 106279995, - "step": 4994 - }, - { - "epoch": 0.6006132387422594, - "flos": 15459484261080.0, - "grad_norm": 5.599799949614987, - "learning_rate": 1.4524536602399783e-06, - "loss": 0.9819, - "num_input_tokens_seen": 106298805, - "step": 4995 - }, - { - "epoch": 0.6007334816328984, - "flos": 16246732948680.0, - "grad_norm": 15.324432332662747, - "learning_rate": 1.4517044913095938e-06, - "loss": 1.0012, - "num_input_tokens_seen": 106318945, - "step": 4996 - }, - { - "epoch": 0.6008537245235376, - "flos": 20174145857400.0, - "grad_norm": 5.648143037794867, - "learning_rate": 1.4509554055552022e-06, - "loss": 1.0426, - "num_input_tokens_seen": 106338895, - "step": 4997 - }, - { - "epoch": 0.6009739674141766, - "flos": 14828888110440.0, - "grad_norm": 6.722318615322988, - "learning_rate": 1.450206403090439e-06, - "loss": 1.0593, - "num_input_tokens_seen": 106356810, - "step": 4998 - }, - { - "epoch": 0.6010942103048157, - "flos": 14537235645960.0, - "grad_norm": 3.9843163579477423, - "learning_rate": 1.4494574840289274e-06, - "loss": 1.0864, - "num_input_tokens_seen": 106373645, - "step": 4999 - }, - { - "epoch": 0.6012144531954549, - "flos": 16927471846080.0, - "grad_norm": 3.361345857421632, - "learning_rate": 1.4487086484842782e-06, - "loss": 0.973, - "num_input_tokens_seen": 106392010, - "step": 5000 - }, - { - "epoch": 0.6013346960860939, - "flos": 13464957390840.0, - "grad_norm": 7.430589815656959, - "learning_rate": 1.4479598965700878e-06, - "loss": 0.8217, - "num_input_tokens_seen": 106408995, - "step": 5001 - }, - { - "epoch": 0.601454938976733, - "flos": 17085166876080.0, - "grad_norm": 6.13325598018353, - "learning_rate": 1.4472112283999427e-06, - "loss": 0.9066, - "num_input_tokens_seen": 106427370, - "step": 5002 - }, - { - "epoch": 0.6015751818673721, - "flos": 18810460430280.0, - "grad_norm": 4.8253108344486835, - "learning_rate": 1.4464626440874143e-06, - "loss": 0.9229, - "num_input_tokens_seen": 106446205, - "step": 5003 - }, - { - "epoch": 0.6016954247580112, - "flos": 9244205631840.0, - "grad_norm": 4.828258524903237, - "learning_rate": 1.4457141437460636e-06, - "loss": 0.9559, - "num_input_tokens_seen": 106463150, - "step": 5004 - }, - { - "epoch": 0.6018156676486502, - "flos": 16900361478960.0, - "grad_norm": 3.0431237236391637, - "learning_rate": 1.444965727489436e-06, - "loss": 0.9588, - "num_input_tokens_seen": 106482315, - "step": 5005 - }, - { - "epoch": 0.6019359105392894, - "flos": 18841587461760.0, - "grad_norm": 10.094895052843773, - "learning_rate": 1.444217395431066e-06, - "loss": 0.8611, - "num_input_tokens_seen": 106504575, - "step": 5006 - }, - { - "epoch": 0.6020561534299285, - "flos": 49549192127760.0, - "grad_norm": 0.801656217097213, - "learning_rate": 1.4434691476844755e-06, - "loss": 0.8146, - "num_input_tokens_seen": 106565270, - "step": 5007 - }, - { - "epoch": 0.6021763963205675, - "flos": 15511129424280.0, - "grad_norm": 12.150689979082399, - "learning_rate": 1.4427209843631729e-06, - "loss": 0.8993, - "num_input_tokens_seen": 106582040, - "step": 5008 - }, - { - "epoch": 0.6022966392112067, - "flos": 18920097699600.0, - "grad_norm": 3.7837354830011924, - "learning_rate": 1.4419729055806534e-06, - "loss": 1.0393, - "num_input_tokens_seen": 106601195, - "step": 5009 - }, - { - "epoch": 0.6024168821018457, - "flos": 14697812862600.0, - "grad_norm": 3.95103429459557, - "learning_rate": 1.441224911450401e-06, - "loss": 1.0618, - "num_input_tokens_seen": 106616870, - "step": 5010 - }, - { - "epoch": 0.6025371249924848, - "flos": 17556228303120.0, - "grad_norm": 6.312163995543892, - "learning_rate": 1.4404770020858851e-06, - "loss": 1.0587, - "num_input_tokens_seen": 106636075, - "step": 5011 - }, - { - "epoch": 0.602657367883124, - "flos": 18472129975320.0, - "grad_norm": 3.613180821028305, - "learning_rate": 1.439729177600563e-06, - "loss": 1.092, - "num_input_tokens_seen": 106656290, - "step": 5012 - }, - { - "epoch": 0.602777610773763, - "flos": 11813728148280.0, - "grad_norm": 4.660035303661558, - "learning_rate": 1.4389814381078793e-06, - "loss": 0.946, - "num_input_tokens_seen": 106675250, - "step": 5013 - }, - { - "epoch": 0.6028978536644021, - "flos": 9401594046240.0, - "grad_norm": 4.121874694592838, - "learning_rate": 1.438233783721265e-06, - "loss": 1.0143, - "num_input_tokens_seen": 106691135, - "step": 5014 - }, - { - "epoch": 0.6030180965550412, - "flos": 13936693372200.0, - "grad_norm": 4.085973910867882, - "learning_rate": 1.43748621455414e-06, - "loss": 1.0043, - "num_input_tokens_seen": 106707290, - "step": 5015 - }, - { - "epoch": 0.6031383394456803, - "flos": 10208563665000.0, - "grad_norm": 6.02882391801856, - "learning_rate": 1.4367387307199082e-06, - "loss": 1.0281, - "num_input_tokens_seen": 106723860, - "step": 5016 - }, - { - "epoch": 0.6032585823363193, - "flos": 9820948986960.0, - "grad_norm": 3.3592885560734893, - "learning_rate": 1.4359913323319632e-06, - "loss": 1.0485, - "num_input_tokens_seen": 106740750, - "step": 5017 - }, - { - "epoch": 0.6033788252269584, - "flos": 17320789574280.0, - "grad_norm": 6.51180222886278, - "learning_rate": 1.4352440195036847e-06, - "loss": 1.0087, - "num_input_tokens_seen": 106760645, - "step": 5018 - }, - { - "epoch": 0.6034990681175976, - "flos": 18160265994720.0, - "grad_norm": 6.443538463239441, - "learning_rate": 1.4344967923484395e-06, - "loss": 1.0181, - "num_input_tokens_seen": 106782335, - "step": 5019 - }, - { - "epoch": 0.6036193110082366, - "flos": 18474337607640.0, - "grad_norm": 10.165146344018492, - "learning_rate": 1.433749650979581e-06, - "loss": 0.9438, - "num_input_tokens_seen": 106802040, - "step": 5020 - }, - { - "epoch": 0.6037395538988757, - "flos": 18212033804160.0, - "grad_norm": 5.079204362453474, - "learning_rate": 1.433002595510451e-06, - "loss": 0.9064, - "num_input_tokens_seen": 106820540, - "step": 5021 - }, - { - "epoch": 0.6038597967895148, - "flos": 12621954891000.0, - "grad_norm": 3.484559605929827, - "learning_rate": 1.4322556260543757e-06, - "loss": 0.9444, - "num_input_tokens_seen": 106836835, - "step": 5022 - }, - { - "epoch": 0.6039800396801539, - "flos": 46657809962160.0, - "grad_norm": 0.9194134645694692, - "learning_rate": 1.4315087427246703e-06, - "loss": 0.8857, - "num_input_tokens_seen": 106890380, - "step": 5023 - }, - { - "epoch": 0.604100282570793, - "flos": 48250127251800.0, - "grad_norm": 0.9134965106122835, - "learning_rate": 1.4307619456346372e-06, - "loss": 0.8468, - "num_input_tokens_seen": 106934405, - "step": 5024 - }, - { - "epoch": 0.6042205254614321, - "flos": 25186009660560.0, - "grad_norm": 4.27016891645673, - "learning_rate": 1.430015234897564e-06, - "loss": 0.9572, - "num_input_tokens_seen": 106957405, - "step": 5025 - }, - { - "epoch": 0.6043407683520712, - "flos": 32631936129240.0, - "grad_norm": 9.418203267896173, - "learning_rate": 1.4292686106267274e-06, - "loss": 0.884, - "num_input_tokens_seen": 106979975, - "step": 5026 - }, - { - "epoch": 0.6044610112427102, - "flos": 11446784909760.0, - "grad_norm": 3.6933904895651968, - "learning_rate": 1.4285220729353876e-06, - "loss": 0.9937, - "num_input_tokens_seen": 106998235, - "step": 5027 - }, - { - "epoch": 0.6045812541333494, - "flos": 9738636715680.0, - "grad_norm": 6.56239013579912, - "learning_rate": 1.4277756219367957e-06, - "loss": 1.0005, - "num_input_tokens_seen": 107014980, - "step": 5028 - }, - { - "epoch": 0.6047014970239885, - "flos": 14174278410240.0, - "grad_norm": 4.35767355909736, - "learning_rate": 1.4270292577441864e-06, - "loss": 1.0226, - "num_input_tokens_seen": 107034205, - "step": 5029 - }, - { - "epoch": 0.6048217399146275, - "flos": 18027688330440.0, - "grad_norm": 5.385225726738823, - "learning_rate": 1.4262829804707836e-06, - "loss": 0.9424, - "num_input_tokens_seen": 107055915, - "step": 5030 - }, - { - "epoch": 0.6049419828052667, - "flos": 18526320048000.0, - "grad_norm": 3.150951508212161, - "learning_rate": 1.4255367902297958e-06, - "loss": 0.9158, - "num_input_tokens_seen": 107076965, - "step": 5031 - }, - { - "epoch": 0.6050622256959057, - "flos": 10345893871080.0, - "grad_norm": 7.005083418137742, - "learning_rate": 1.4247906871344215e-06, - "loss": 1.0246, - "num_input_tokens_seen": 107092080, - "step": 5032 - }, - { - "epoch": 0.6051824685865448, - "flos": 16586412512280.0, - "grad_norm": 9.20574468299317, - "learning_rate": 1.4240446712978415e-06, - "loss": 0.9779, - "num_input_tokens_seen": 107110785, - "step": 5033 - }, - { - "epoch": 0.605302711477184, - "flos": 19627701671640.0, - "grad_norm": 4.241139094451094, - "learning_rate": 1.423298742833227e-06, - "loss": 0.9603, - "num_input_tokens_seen": 107129165, - "step": 5034 - }, - { - "epoch": 0.605422954367823, - "flos": 10709403014880.0, - "grad_norm": 3.5571698921476274, - "learning_rate": 1.4225529018537352e-06, - "loss": 0.9569, - "num_input_tokens_seen": 107144390, - "step": 5035 - }, - { - "epoch": 0.6055431972584621, - "flos": 19707346387200.0, - "grad_norm": 4.732095805548597, - "learning_rate": 1.4218071484725082e-06, - "loss": 1.0089, - "num_input_tokens_seen": 107166230, - "step": 5036 - }, - { - "epoch": 0.6056634401491012, - "flos": 13751489374800.0, - "grad_norm": 5.794988690316698, - "learning_rate": 1.4210614828026786e-06, - "loss": 0.9799, - "num_input_tokens_seen": 107183800, - "step": 5037 - }, - { - "epoch": 0.6057836830397403, - "flos": 17634063986640.0, - "grad_norm": 2.5953837385511402, - "learning_rate": 1.4203159049573605e-06, - "loss": 0.9691, - "num_input_tokens_seen": 107204755, - "step": 5038 - }, - { - "epoch": 0.6059039259303793, - "flos": 14593326735360.0, - "grad_norm": 3.8683301066119866, - "learning_rate": 1.4195704150496593e-06, - "loss": 1.0907, - "num_input_tokens_seen": 107222190, - "step": 5039 - }, - { - "epoch": 0.6060241688210185, - "flos": 14961005851320.0, - "grad_norm": 5.145070666837147, - "learning_rate": 1.4188250131926639e-06, - "loss": 0.957, - "num_input_tokens_seen": 107240710, - "step": 5040 - }, - { - "epoch": 0.6061444117116576, - "flos": 11576725679880.0, - "grad_norm": 4.015463977954721, - "learning_rate": 1.4180796994994525e-06, - "loss": 1.0353, - "num_input_tokens_seen": 107257845, - "step": 5041 - }, - { - "epoch": 0.6062646546022966, - "flos": 15275445402960.0, - "grad_norm": 5.93283283108595, - "learning_rate": 1.4173344740830877e-06, - "loss": 0.9416, - "num_input_tokens_seen": 107276695, - "step": 5042 - }, - { - "epoch": 0.6063848974929358, - "flos": 27834900938520.0, - "grad_norm": 4.1338657702766595, - "learning_rate": 1.4165893370566206e-06, - "loss": 0.9284, - "num_input_tokens_seen": 107300170, - "step": 5043 - }, - { - "epoch": 0.6065051403835748, - "flos": 13938195788640.0, - "grad_norm": 5.665882318106065, - "learning_rate": 1.4158442885330865e-06, - "loss": 0.9919, - "num_input_tokens_seen": 107318460, - "step": 5044 - }, - { - "epoch": 0.6066253832742139, - "flos": 16716843867360.0, - "grad_norm": 2.9086167737534723, - "learning_rate": 1.4150993286255094e-06, - "loss": 1.0175, - "num_input_tokens_seen": 107337430, - "step": 5045 - }, - { - "epoch": 0.6067456261648531, - "flos": 13569320871840.0, - "grad_norm": 4.1678351226236146, - "learning_rate": 1.4143544574468993e-06, - "loss": 1.0122, - "num_input_tokens_seen": 107355510, - "step": 5046 - }, - { - "epoch": 0.6068658690554921, - "flos": 14566124383560.0, - "grad_norm": 3.6163416093812524, - "learning_rate": 1.4136096751102523e-06, - "loss": 1.046, - "num_input_tokens_seen": 107373560, - "step": 5047 - }, - { - "epoch": 0.6069861119461312, - "flos": 19491199327680.0, - "grad_norm": 4.282239145327865, - "learning_rate": 1.4128649817285516e-06, - "loss": 1.0465, - "num_input_tokens_seen": 107391415, - "step": 5048 - }, - { - "epoch": 0.6071063548367702, - "flos": 18235342137840.0, - "grad_norm": 4.38070655314701, - "learning_rate": 1.412120377414766e-06, - "loss": 0.8565, - "num_input_tokens_seen": 107411325, - "step": 5049 - }, - { - "epoch": 0.6072265977274094, - "flos": 17397582764760.0, - "grad_norm": 5.7040402866775946, - "learning_rate": 1.4113758622818522e-06, - "loss": 0.9313, - "num_input_tokens_seen": 107431110, - "step": 5050 - }, - { - "epoch": 0.6073468406180484, - "flos": 12861900869160.0, - "grad_norm": 4.876424727958118, - "learning_rate": 1.410631436442751e-06, - "loss": 1.0495, - "num_input_tokens_seen": 107449625, - "step": 5051 - }, - { - "epoch": 0.6074670835086875, - "flos": 14692600397400.0, - "grad_norm": 2.9342784666650306, - "learning_rate": 1.4098871000103936e-06, - "loss": 1.0923, - "num_input_tokens_seen": 107467945, - "step": 5052 - }, - { - "epoch": 0.6075873263993267, - "flos": 16901649264480.0, - "grad_norm": 4.814127262995993, - "learning_rate": 1.409142853097693e-06, - "loss": 1.0524, - "num_input_tokens_seen": 107487905, - "step": 5053 - }, - { - "epoch": 0.6077075692899657, - "flos": 17392431622680.0, - "grad_norm": 2.896843731658968, - "learning_rate": 1.408398695817553e-06, - "loss": 1.0238, - "num_input_tokens_seen": 107504850, - "step": 5054 - }, - { - "epoch": 0.6078278121806048, - "flos": 19497638255280.0, - "grad_norm": 3.339643720024155, - "learning_rate": 1.4076546282828593e-06, - "loss": 0.9162, - "num_input_tokens_seen": 107527425, - "step": 5055 - }, - { - "epoch": 0.6079480550712439, - "flos": 27442073795280.0, - "grad_norm": 3.451796884725177, - "learning_rate": 1.4069106506064874e-06, - "loss": 0.881, - "num_input_tokens_seen": 107548570, - "step": 5056 - }, - { - "epoch": 0.608068297961883, - "flos": 18026983114560.0, - "grad_norm": 3.188592760851668, - "learning_rate": 1.4061667629012989e-06, - "loss": 0.9991, - "num_input_tokens_seen": 107568960, - "step": 5057 - }, - { - "epoch": 0.608188540852522, - "flos": 17211765536160.0, - "grad_norm": 3.144915027774421, - "learning_rate": 1.40542296528014e-06, - "loss": 1.0534, - "num_input_tokens_seen": 107588340, - "step": 5058 - }, - { - "epoch": 0.6083087837431612, - "flos": 15114500247600.0, - "grad_norm": 4.34624491882466, - "learning_rate": 1.4046792578558452e-06, - "loss": 0.9822, - "num_input_tokens_seen": 107605955, - "step": 5059 - }, - { - "epoch": 0.6084290266338003, - "flos": 11656155764520.0, - "grad_norm": 11.897290961666966, - "learning_rate": 1.4039356407412325e-06, - "loss": 0.9882, - "num_input_tokens_seen": 107618915, - "step": 5060 - }, - { - "epoch": 0.6085492695244393, - "flos": 47573313034080.0, - "grad_norm": 0.8448299631327615, - "learning_rate": 1.40319211404911e-06, - "loss": 0.8476, - "num_input_tokens_seen": 107673635, - "step": 5061 - }, - { - "epoch": 0.6086695124150785, - "flos": 16792471918560.0, - "grad_norm": 2.959035136476721, - "learning_rate": 1.4024486778922691e-06, - "loss": 1.1219, - "num_input_tokens_seen": 107691670, - "step": 5062 - }, - { - "epoch": 0.6087897553057176, - "flos": 14305016380920.0, - "grad_norm": 3.447025902385462, - "learning_rate": 1.4017053323834884e-06, - "loss": 0.9953, - "num_input_tokens_seen": 107711220, - "step": 5063 - }, - { - "epoch": 0.6089099981963566, - "flos": 18132143796120.0, - "grad_norm": 3.5559122847046822, - "learning_rate": 1.4009620776355333e-06, - "loss": 0.9782, - "num_input_tokens_seen": 107732540, - "step": 5064 - }, - { - "epoch": 0.6090302410869958, - "flos": 18024100927920.0, - "grad_norm": 4.970886538855794, - "learning_rate": 1.4002189137611553e-06, - "loss": 1.0144, - "num_input_tokens_seen": 107751600, - "step": 5065 - }, - { - "epoch": 0.6091504839776348, - "flos": 17057688570240.0, - "grad_norm": 3.2176175393759445, - "learning_rate": 1.3994758408730901e-06, - "loss": 0.9163, - "num_input_tokens_seen": 107770505, - "step": 5066 - }, - { - "epoch": 0.6092707268682739, - "flos": 21122370361920.0, - "grad_norm": 4.573120089490902, - "learning_rate": 1.3987328590840629e-06, - "loss": 1.0067, - "num_input_tokens_seen": 107791170, - "step": 5067 - }, - { - "epoch": 0.609390969758913, - "flos": 17084860260480.0, - "grad_norm": 3.4777033273475806, - "learning_rate": 1.397989968506783e-06, - "loss": 1.0876, - "num_input_tokens_seen": 107809900, - "step": 5068 - }, - { - "epoch": 0.6095112126495521, - "flos": 7796552209200.0, - "grad_norm": 4.320956135656729, - "learning_rate": 1.3972471692539458e-06, - "loss": 0.9475, - "num_input_tokens_seen": 107824335, - "step": 5069 - }, - { - "epoch": 0.6096314555401912, - "flos": 12225662991480.0, - "grad_norm": 3.894901361355003, - "learning_rate": 1.3965044614382348e-06, - "loss": 0.9704, - "num_input_tokens_seen": 107839505, - "step": 5070 - }, - { - "epoch": 0.6097516984308303, - "flos": 15374075172240.0, - "grad_norm": 5.11225478649834, - "learning_rate": 1.3957618451723162e-06, - "loss": 0.9736, - "num_input_tokens_seen": 107855255, - "step": 5071 - }, - { - "epoch": 0.6098719413214694, - "flos": 19366133745600.0, - "grad_norm": 4.389875572022719, - "learning_rate": 1.3950193205688457e-06, - "loss": 0.9267, - "num_input_tokens_seen": 107874700, - "step": 5072 - }, - { - "epoch": 0.6099921842121084, - "flos": 14488993915920.0, - "grad_norm": 10.962400853960329, - "learning_rate": 1.3942768877404627e-06, - "loss": 1.063, - "num_input_tokens_seen": 107893385, - "step": 5073 - }, - { - "epoch": 0.6101124271027476, - "flos": 16611100616160.0, - "grad_norm": 2.440248944449863, - "learning_rate": 1.393534546799795e-06, - "loss": 0.9741, - "num_input_tokens_seen": 107912805, - "step": 5074 - }, - { - "epoch": 0.6102326699933867, - "flos": 18998546614320.0, - "grad_norm": 4.6141911577527, - "learning_rate": 1.3927922978594536e-06, - "loss": 0.8945, - "num_input_tokens_seen": 107933610, - "step": 5075 - }, - { - "epoch": 0.6103529128840257, - "flos": 43406015470320.0, - "grad_norm": 0.8279127172776819, - "learning_rate": 1.3920501410320387e-06, - "loss": 0.8532, - "num_input_tokens_seen": 107989445, - "step": 5076 - }, - { - "epoch": 0.6104731557746649, - "flos": 13488143078280.0, - "grad_norm": 5.703930607163615, - "learning_rate": 1.3913080764301333e-06, - "loss": 0.9851, - "num_input_tokens_seen": 108006125, - "step": 5077 - }, - { - "epoch": 0.6105933986653039, - "flos": 16611744508920.0, - "grad_norm": 5.651060980369741, - "learning_rate": 1.3905661041663085e-06, - "loss": 0.9389, - "num_input_tokens_seen": 108027030, - "step": 5078 - }, - { - "epoch": 0.610713641555943, - "flos": 24711299507880.0, - "grad_norm": 4.124640478014536, - "learning_rate": 1.389824224353122e-06, - "loss": 0.8669, - "num_input_tokens_seen": 108048340, - "step": 5079 - }, - { - "epoch": 0.610833884446582, - "flos": 18968155460280.0, - "grad_norm": 2.947990353655205, - "learning_rate": 1.389082437103115e-06, - "loss": 0.9961, - "num_input_tokens_seen": 108067330, - "step": 5080 - }, - { - "epoch": 0.6109541273372212, - "flos": 15065430655440.0, - "grad_norm": 3.6441396357710496, - "learning_rate": 1.3883407425288172e-06, - "loss": 0.997, - "num_input_tokens_seen": 108087385, - "step": 5081 - }, - { - "epoch": 0.6110743702278603, - "flos": 14252574017160.0, - "grad_norm": 5.339206303107566, - "learning_rate": 1.3875991407427417e-06, - "loss": 1.002, - "num_input_tokens_seen": 108105330, - "step": 5082 - }, - { - "epoch": 0.6111946131184993, - "flos": 48909428170680.0, - "grad_norm": 0.7606183833991885, - "learning_rate": 1.38685763185739e-06, - "loss": 0.84, - "num_input_tokens_seen": 108158710, - "step": 5083 - }, - { - "epoch": 0.6113148560091385, - "flos": 14147290689360.0, - "grad_norm": 3.9023877390538693, - "learning_rate": 1.3861162159852476e-06, - "loss": 0.8923, - "num_input_tokens_seen": 108176565, - "step": 5084 - }, - { - "epoch": 0.6114350988997775, - "flos": 16874048312400.0, - "grad_norm": 4.993612978395796, - "learning_rate": 1.3853748932387875e-06, - "loss": 1.0289, - "num_input_tokens_seen": 108196925, - "step": 5085 - }, - { - "epoch": 0.6115553417904166, - "flos": 17083787105880.0, - "grad_norm": 4.538173884013049, - "learning_rate": 1.3846336637304671e-06, - "loss": 0.9767, - "num_input_tokens_seen": 108214915, - "step": 5086 - }, - { - "epoch": 0.6116755846810558, - "flos": 16875489405720.0, - "grad_norm": 3.5656090270477847, - "learning_rate": 1.3838925275727316e-06, - "loss": 1.041, - "num_input_tokens_seen": 108235375, - "step": 5087 - }, - { - "epoch": 0.6117958275716948, - "flos": 13230346524120.0, - "grad_norm": 4.639985217037987, - "learning_rate": 1.3831514848780089e-06, - "loss": 1.015, - "num_input_tokens_seen": 108254670, - "step": 5088 - }, - { - "epoch": 0.6119160704623339, - "flos": 11655941133600.0, - "grad_norm": 9.503909862437661, - "learning_rate": 1.3824105357587152e-06, - "loss": 1.1478, - "num_input_tokens_seen": 108271495, - "step": 5089 - }, - { - "epoch": 0.612036313352973, - "flos": 17006166053280.0, - "grad_norm": 4.80782279086941, - "learning_rate": 1.381669680327253e-06, - "loss": 1.0551, - "num_input_tokens_seen": 108292895, - "step": 5090 - }, - { - "epoch": 0.6121565562436121, - "flos": 19204084774080.0, - "grad_norm": 4.9132960560254535, - "learning_rate": 1.380928918696008e-06, - "loss": 0.9322, - "num_input_tokens_seen": 108311385, - "step": 5091 - }, - { - "epoch": 0.6122767991342511, - "flos": 11079381747840.0, - "grad_norm": 4.805184687499685, - "learning_rate": 1.3801882509773548e-06, - "loss": 0.9407, - "num_input_tokens_seen": 108328965, - "step": 5092 - }, - { - "epoch": 0.6123970420248903, - "flos": 19915858718280.0, - "grad_norm": 4.071746866313641, - "learning_rate": 1.3794476772836503e-06, - "loss": 1.0328, - "num_input_tokens_seen": 108349785, - "step": 5093 - }, - { - "epoch": 0.6125172849155294, - "flos": 15247445850600.0, - "grad_norm": 3.596203439230123, - "learning_rate": 1.3787071977272402e-06, - "loss": 1.0647, - "num_input_tokens_seen": 108368765, - "step": 5094 - }, - { - "epoch": 0.6126375278061684, - "flos": 11495241270720.0, - "grad_norm": 5.8265706052797945, - "learning_rate": 1.3779668124204535e-06, - "loss": 0.9287, - "num_input_tokens_seen": 108384900, - "step": 5095 - }, - { - "epoch": 0.6127577706968076, - "flos": 14514724512840.0, - "grad_norm": 3.184712107056772, - "learning_rate": 1.3772265214756074e-06, - "loss": 1.0349, - "num_input_tokens_seen": 108404380, - "step": 5096 - }, - { - "epoch": 0.6128780135874466, - "flos": 12942005508120.0, - "grad_norm": 6.689088436683248, - "learning_rate": 1.3764863250050025e-06, - "loss": 0.9626, - "num_input_tokens_seen": 108422340, - "step": 5097 - }, - { - "epoch": 0.6129982564780857, - "flos": 17740389807480.0, - "grad_norm": 4.615013888948811, - "learning_rate": 1.3757462231209272e-06, - "loss": 1.0232, - "num_input_tokens_seen": 108442365, - "step": 5098 - }, - { - "epoch": 0.6131184993687249, - "flos": 15983110698120.0, - "grad_norm": 4.4608372344659095, - "learning_rate": 1.3750062159356525e-06, - "loss": 1.1071, - "num_input_tokens_seen": 108461435, - "step": 5099 - }, - { - "epoch": 0.6132387422593639, - "flos": 11235053114880.0, - "grad_norm": 3.3635782604682225, - "learning_rate": 1.3742663035614382e-06, - "loss": 1.0478, - "num_input_tokens_seen": 108478525, - "step": 5100 - }, - { - "epoch": 0.613358985150003, - "flos": 18081387818160.0, - "grad_norm": 5.792794572883027, - "learning_rate": 1.3735264861105283e-06, - "loss": 1.0304, - "num_input_tokens_seen": 108498885, - "step": 5101 - }, - { - "epoch": 0.6134792280406421, - "flos": 15170499352320.0, - "grad_norm": 4.095915243598097, - "learning_rate": 1.372786763695152e-06, - "loss": 1.0129, - "num_input_tokens_seen": 108517365, - "step": 5102 - }, - { - "epoch": 0.6135994709312812, - "flos": 15062119206960.0, - "grad_norm": 2.6318794204432012, - "learning_rate": 1.3720471364275257e-06, - "loss": 0.9941, - "num_input_tokens_seen": 108536730, - "step": 5103 - }, - { - "epoch": 0.6137197138219203, - "flos": 10450778598600.0, - "grad_norm": 6.020913234651303, - "learning_rate": 1.3713076044198486e-06, - "loss": 1.0026, - "num_input_tokens_seen": 108553260, - "step": 5104 - }, - { - "epoch": 0.6138399567125594, - "flos": 14252941955880.0, - "grad_norm": 3.4070586198399755, - "learning_rate": 1.3705681677843086e-06, - "loss": 1.0258, - "num_input_tokens_seen": 108571575, - "step": 5105 - }, - { - "epoch": 0.6139601996031985, - "flos": 43035325973400.0, - "grad_norm": 2.147885560317054, - "learning_rate": 1.3698288266330768e-06, - "loss": 0.8583, - "num_input_tokens_seen": 108631920, - "step": 5106 - }, - { - "epoch": 0.6140804424938375, - "flos": 16769838139200.0, - "grad_norm": 9.702414174849087, - "learning_rate": 1.3690895810783113e-06, - "loss": 0.9505, - "num_input_tokens_seen": 108650435, - "step": 5107 - }, - { - "epoch": 0.6142006853844767, - "flos": 15197088472920.0, - "grad_norm": 4.8700401310765296, - "learning_rate": 1.3683504312321543e-06, - "loss": 0.9319, - "num_input_tokens_seen": 108670490, - "step": 5108 - }, - { - "epoch": 0.6143209282751158, - "flos": 8483944665120.0, - "grad_norm": 5.535651917699546, - "learning_rate": 1.3676113772067355e-06, - "loss": 1.0143, - "num_input_tokens_seen": 108687265, - "step": 5109 - }, - { - "epoch": 0.6144411711657548, - "flos": 17844630642240.0, - "grad_norm": 3.9827811849387422, - "learning_rate": 1.3668724191141671e-06, - "loss": 0.9413, - "num_input_tokens_seen": 108706255, - "step": 5110 - }, - { - "epoch": 0.6145614140563939, - "flos": 14273245456680.0, - "grad_norm": 6.8671286171394765, - "learning_rate": 1.3661335570665493e-06, - "loss": 0.8941, - "num_input_tokens_seen": 108723885, - "step": 5111 - }, - { - "epoch": 0.614681656947033, - "flos": 11892636986400.0, - "grad_norm": 5.5661146442259, - "learning_rate": 1.3653947911759676e-06, - "loss": 0.9177, - "num_input_tokens_seen": 108741155, - "step": 5112 - }, - { - "epoch": 0.6148018998376721, - "flos": 27704530906560.0, - "grad_norm": 3.025372184367218, - "learning_rate": 1.3646561215544904e-06, - "loss": 0.9656, - "num_input_tokens_seen": 108765515, - "step": 5113 - }, - { - "epoch": 0.6149221427283111, - "flos": 16582641140400.0, - "grad_norm": 4.333101682095394, - "learning_rate": 1.363917548314176e-06, - "loss": 1.0165, - "num_input_tokens_seen": 108784500, - "step": 5114 - }, - { - "epoch": 0.6150423856189503, - "flos": 15901810258320.0, - "grad_norm": 8.954391203547669, - "learning_rate": 1.3631790715670626e-06, - "loss": 0.9536, - "num_input_tokens_seen": 108802625, - "step": 5115 - }, - { - "epoch": 0.6151626285095894, - "flos": 13251416563920.0, - "grad_norm": 3.1492213179876654, - "learning_rate": 1.3624406914251783e-06, - "loss": 1.0816, - "num_input_tokens_seen": 108819465, - "step": 5116 - }, - { - "epoch": 0.6152828714002284, - "flos": 11210824934400.0, - "grad_norm": 3.134785855902397, - "learning_rate": 1.3617024080005335e-06, - "loss": 1.0974, - "num_input_tokens_seen": 108836085, - "step": 5117 - }, - { - "epoch": 0.6154031142908676, - "flos": 17692791970200.0, - "grad_norm": 8.097277016415235, - "learning_rate": 1.3609642214051266e-06, - "loss": 0.9646, - "num_input_tokens_seen": 108860030, - "step": 5118 - }, - { - "epoch": 0.6155233571815066, - "flos": 13649333526120.0, - "grad_norm": 13.316886358314898, - "learning_rate": 1.3602261317509385e-06, - "loss": 0.8863, - "num_input_tokens_seen": 108876410, - "step": 5119 - }, - { - "epoch": 0.6156436000721457, - "flos": 13308151546080.0, - "grad_norm": 4.927814496598936, - "learning_rate": 1.3594881391499387e-06, - "loss": 1.0402, - "num_input_tokens_seen": 108895045, - "step": 5120 - }, - { - "epoch": 0.6157638429627849, - "flos": 12784433124360.0, - "grad_norm": 2.5661606112230726, - "learning_rate": 1.3587502437140778e-06, - "loss": 1.0188, - "num_input_tokens_seen": 108912930, - "step": 5121 - }, - { - "epoch": 0.6158840858534239, - "flos": 18185628652920.0, - "grad_norm": 5.675788307054284, - "learning_rate": 1.3580124455552952e-06, - "loss": 1.0738, - "num_input_tokens_seen": 108932015, - "step": 5122 - }, - { - "epoch": 0.616004328744063, - "flos": 17526848980560.0, - "grad_norm": 4.2718574350549865, - "learning_rate": 1.3572747447855148e-06, - "loss": 1.0913, - "num_input_tokens_seen": 108952145, - "step": 5123 - }, - { - "epoch": 0.6161245716347021, - "flos": 15406520650800.0, - "grad_norm": 4.8847291160295985, - "learning_rate": 1.356537141516644e-06, - "loss": 0.9101, - "num_input_tokens_seen": 108969285, - "step": 5124 - }, - { - "epoch": 0.6162448145253412, - "flos": 25108357946400.0, - "grad_norm": 5.100783658125676, - "learning_rate": 1.3557996358605775e-06, - "loss": 0.8428, - "num_input_tokens_seen": 108988925, - "step": 5125 - }, - { - "epoch": 0.6163650574159802, - "flos": 15352851824640.0, - "grad_norm": 5.376832161847688, - "learning_rate": 1.3550622279291941e-06, - "loss": 0.9262, - "num_input_tokens_seen": 109006790, - "step": 5126 - }, - { - "epoch": 0.6164853003066194, - "flos": 17478116665560.0, - "grad_norm": 2.4773233009247475, - "learning_rate": 1.354324917834358e-06, - "loss": 1.0583, - "num_input_tokens_seen": 109027755, - "step": 5127 - }, - { - "epoch": 0.6166055431972585, - "flos": 15511711993920.0, - "grad_norm": 4.382544633428079, - "learning_rate": 1.353587705687918e-06, - "loss": 0.9967, - "num_input_tokens_seen": 109045650, - "step": 5128 - }, - { - "epoch": 0.6167257860878975, - "flos": 12600424927800.0, - "grad_norm": 5.12021860520266, - "learning_rate": 1.3528505916017096e-06, - "loss": 0.927, - "num_input_tokens_seen": 109063070, - "step": 5129 - }, - { - "epoch": 0.6168460289785367, - "flos": 16501616654640.0, - "grad_norm": 4.45115709155893, - "learning_rate": 1.3521135756875514e-06, - "loss": 1.1009, - "num_input_tokens_seen": 109079105, - "step": 5130 - }, - { - "epoch": 0.6169662718691757, - "flos": 18657456618960.0, - "grad_norm": 4.342224072621148, - "learning_rate": 1.3513766580572496e-06, - "loss": 1.0836, - "num_input_tokens_seen": 109101645, - "step": 5131 - }, - { - "epoch": 0.6170865147598148, - "flos": 13492251727320.0, - "grad_norm": 4.539740664431501, - "learning_rate": 1.3506398388225924e-06, - "loss": 0.997, - "num_input_tokens_seen": 109118685, - "step": 5132 - }, - { - "epoch": 0.617206757650454, - "flos": 12941974846560.0, - "grad_norm": 4.848337671184643, - "learning_rate": 1.349903118095355e-06, - "loss": 0.9414, - "num_input_tokens_seen": 109137540, - "step": 5133 - }, - { - "epoch": 0.617327000541093, - "flos": 12889164544080.0, - "grad_norm": 6.207857186237983, - "learning_rate": 1.349166495987298e-06, - "loss": 0.9531, - "num_input_tokens_seen": 109155825, - "step": 5134 - }, - { - "epoch": 0.6174472434317321, - "flos": 44263582211160.0, - "grad_norm": 0.9361290125985626, - "learning_rate": 1.348429972610166e-06, - "loss": 0.9065, - "num_input_tokens_seen": 109219850, - "step": 5135 - }, - { - "epoch": 0.6175674863223712, - "flos": 51014972080440.0, - "grad_norm": 0.8688031859549351, - "learning_rate": 1.3476935480756897e-06, - "loss": 0.8375, - "num_input_tokens_seen": 109276320, - "step": 5136 - }, - { - "epoch": 0.6176877292130103, - "flos": 15511252070520.0, - "grad_norm": 14.013964636532531, - "learning_rate": 1.346957222495583e-06, - "loss": 0.9731, - "num_input_tokens_seen": 109293835, - "step": 5137 - }, - { - "epoch": 0.6178079721036493, - "flos": 12568194080160.0, - "grad_norm": 6.689019146506757, - "learning_rate": 1.3462209959815466e-06, - "loss": 0.9451, - "num_input_tokens_seen": 109308295, - "step": 5138 - }, - { - "epoch": 0.6179282149942885, - "flos": 16083856115040.0, - "grad_norm": 3.450970560192018, - "learning_rate": 1.345484868645265e-06, - "loss": 0.9553, - "num_input_tokens_seen": 109326825, - "step": 5139 - }, - { - "epoch": 0.6180484578849276, - "flos": 15852618019920.0, - "grad_norm": 6.035963993276053, - "learning_rate": 1.3447488405984088e-06, - "loss": 1.0029, - "num_input_tokens_seen": 109344805, - "step": 5140 - }, - { - "epoch": 0.6181687007755666, - "flos": 25134364497360.0, - "grad_norm": 5.098137025340765, - "learning_rate": 1.3440129119526322e-06, - "loss": 0.9131, - "num_input_tokens_seen": 109366950, - "step": 5141 - }, - { - "epoch": 0.6182889436662057, - "flos": 44053542350160.0, - "grad_norm": 0.9254704529132984, - "learning_rate": 1.3432770828195762e-06, - "loss": 0.7959, - "num_input_tokens_seen": 109427655, - "step": 5142 - }, - { - "epoch": 0.6184091865568448, - "flos": 13911882622080.0, - "grad_norm": 5.652182955718701, - "learning_rate": 1.3425413533108635e-06, - "loss": 0.9418, - "num_input_tokens_seen": 109445975, - "step": 5143 - }, - { - "epoch": 0.6185294294474839, - "flos": 16769991447000.0, - "grad_norm": 7.347726857050438, - "learning_rate": 1.341805723538105e-06, - "loss": 0.9249, - "num_input_tokens_seen": 109465800, - "step": 5144 - }, - { - "epoch": 0.618649672338123, - "flos": 19052031471120.0, - "grad_norm": 3.146945495725016, - "learning_rate": 1.3410701936128948e-06, - "loss": 0.9998, - "num_input_tokens_seen": 109488300, - "step": 5145 - }, - { - "epoch": 0.6187699152287621, - "flos": 10207061248560.0, - "grad_norm": 5.310086588467873, - "learning_rate": 1.340334763646812e-06, - "loss": 1.0787, - "num_input_tokens_seen": 109502155, - "step": 5146 - }, - { - "epoch": 0.6188901581194012, - "flos": 14253156586800.0, - "grad_norm": 3.337580613437867, - "learning_rate": 1.3395994337514218e-06, - "loss": 0.971, - "num_input_tokens_seen": 109522045, - "step": 5147 - }, - { - "epoch": 0.6190104010100402, - "flos": 18288857656200.0, - "grad_norm": 19.120404657526947, - "learning_rate": 1.3388642040382725e-06, - "loss": 1.013, - "num_input_tokens_seen": 109542190, - "step": 5148 - }, - { - "epoch": 0.6191306439006794, - "flos": 21696384838200.0, - "grad_norm": 2.6908388024073036, - "learning_rate": 1.3381290746188975e-06, - "loss": 1.0596, - "num_input_tokens_seen": 109561280, - "step": 5149 - }, - { - "epoch": 0.6192508867913185, - "flos": 18996798905400.0, - "grad_norm": 4.195609140830694, - "learning_rate": 1.3373940456048152e-06, - "loss": 0.8915, - "num_input_tokens_seen": 109581025, - "step": 5150 - }, - { - "epoch": 0.6193711296819575, - "flos": 26210015524080.0, - "grad_norm": 2.7673168287851837, - "learning_rate": 1.3366591171075299e-06, - "loss": 0.8189, - "num_input_tokens_seen": 109604250, - "step": 5151 - }, - { - "epoch": 0.6194913725725967, - "flos": 17923999403760.0, - "grad_norm": 5.79510059601805, - "learning_rate": 1.335924289238529e-06, - "loss": 1.134, - "num_input_tokens_seen": 109623180, - "step": 5152 - }, - { - "epoch": 0.6196116154632357, - "flos": 15039362781360.0, - "grad_norm": 3.2997977512689753, - "learning_rate": 1.3351895621092859e-06, - "loss": 0.9871, - "num_input_tokens_seen": 109643245, - "step": 5153 - }, - { - "epoch": 0.6197318583538748, - "flos": 11499871166280.0, - "grad_norm": 5.455221652974428, - "learning_rate": 1.3344549358312567e-06, - "loss": 0.9911, - "num_input_tokens_seen": 109661365, - "step": 5154 - }, - { - "epoch": 0.619852101244514, - "flos": 17372710691520.0, - "grad_norm": 4.913594767957688, - "learning_rate": 1.3337204105158852e-06, - "loss": 1.0007, - "num_input_tokens_seen": 109679955, - "step": 5155 - }, - { - "epoch": 0.619972344135153, - "flos": 11840102637960.0, - "grad_norm": 4.326097585070378, - "learning_rate": 1.332985986274597e-06, - "loss": 0.935, - "num_input_tokens_seen": 109697305, - "step": 5156 - }, - { - "epoch": 0.6200925870257921, - "flos": 8798598847680.0, - "grad_norm": 7.240282057015232, - "learning_rate": 1.3322516632188047e-06, - "loss": 0.9836, - "num_input_tokens_seen": 109713920, - "step": 5157 - }, - { - "epoch": 0.6202128299164312, - "flos": 18891484916040.0, - "grad_norm": 3.2506374320900178, - "learning_rate": 1.3315174414599045e-06, - "loss": 0.903, - "num_input_tokens_seen": 109734960, - "step": 5158 - }, - { - "epoch": 0.6203330728070703, - "flos": 13308519484800.0, - "grad_norm": 3.0866613384170627, - "learning_rate": 1.3307833211092768e-06, - "loss": 0.98, - "num_input_tokens_seen": 109753345, - "step": 5159 - }, - { - "epoch": 0.6204533156977093, - "flos": 14644542636720.0, - "grad_norm": 2.1713934714484893, - "learning_rate": 1.3300493022782873e-06, - "loss": 0.9672, - "num_input_tokens_seen": 109773635, - "step": 5160 - }, - { - "epoch": 0.6205735585883485, - "flos": 12286291991760.0, - "grad_norm": 4.7979764885209075, - "learning_rate": 1.3293153850782855e-06, - "loss": 0.9453, - "num_input_tokens_seen": 109791675, - "step": 5161 - }, - { - "epoch": 0.6206938014789876, - "flos": 16322851584840.0, - "grad_norm": 3.7715697157858177, - "learning_rate": 1.3285815696206069e-06, - "loss": 0.9302, - "num_input_tokens_seen": 109812940, - "step": 5162 - }, - { - "epoch": 0.6208140443696266, - "flos": 17054131829280.0, - "grad_norm": 12.797725141556532, - "learning_rate": 1.32784785601657e-06, - "loss": 0.9994, - "num_input_tokens_seen": 109832070, - "step": 5163 - }, - { - "epoch": 0.6209342872602658, - "flos": 25027578753120.0, - "grad_norm": 4.300923607572428, - "learning_rate": 1.3271142443774798e-06, - "loss": 0.9688, - "num_input_tokens_seen": 109854025, - "step": 5164 - }, - { - "epoch": 0.6210545301509048, - "flos": 19207978792200.0, - "grad_norm": 3.848462944860437, - "learning_rate": 1.3263807348146228e-06, - "loss": 1.0454, - "num_input_tokens_seen": 109873600, - "step": 5165 - }, - { - "epoch": 0.6211747730415439, - "flos": 23979038093520.0, - "grad_norm": 4.500564963585692, - "learning_rate": 1.3256473274392733e-06, - "loss": 0.96, - "num_input_tokens_seen": 109894665, - "step": 5166 - }, - { - "epoch": 0.6212950159321831, - "flos": 24373704930360.0, - "grad_norm": 2.981334955376518, - "learning_rate": 1.3249140223626873e-06, - "loss": 0.9276, - "num_input_tokens_seen": 109916005, - "step": 5167 - }, - { - "epoch": 0.6214152588228221, - "flos": 19917085180680.0, - "grad_norm": 2.821777808457848, - "learning_rate": 1.3241808196961077e-06, - "loss": 0.9857, - "num_input_tokens_seen": 109936850, - "step": 5168 - }, - { - "epoch": 0.6215355017134612, - "flos": 14357612052480.0, - "grad_norm": 3.587743499679796, - "learning_rate": 1.3234477195507608e-06, - "loss": 0.9254, - "num_input_tokens_seen": 109955400, - "step": 5169 - }, - { - "epoch": 0.6216557446041003, - "flos": 29592118724760.0, - "grad_norm": 4.03537651413593, - "learning_rate": 1.322714722037857e-06, - "loss": 0.8573, - "num_input_tokens_seen": 109976565, - "step": 5170 - }, - { - "epoch": 0.6217759874947394, - "flos": 19890434736960.0, - "grad_norm": 18.54394754960918, - "learning_rate": 1.321981827268591e-06, - "loss": 0.9892, - "num_input_tokens_seen": 109996940, - "step": 5171 - }, - { - "epoch": 0.6218962303853784, - "flos": 15461538585600.0, - "grad_norm": 3.0551410933408176, - "learning_rate": 1.3212490353541426e-06, - "loss": 1.0419, - "num_input_tokens_seen": 110018920, - "step": 5172 - }, - { - "epoch": 0.6220164732760175, - "flos": 15087083264880.0, - "grad_norm": 5.8991129709469154, - "learning_rate": 1.3205163464056762e-06, - "loss": 1.0289, - "num_input_tokens_seen": 110035245, - "step": 5173 - }, - { - "epoch": 0.6221367161666567, - "flos": 18601641483600.0, - "grad_norm": 4.072686906791028, - "learning_rate": 1.319783760534339e-06, - "loss": 0.9473, - "num_input_tokens_seen": 110054210, - "step": 5174 - }, - { - "epoch": 0.6222569590572957, - "flos": 11520941206080.0, - "grad_norm": 4.49445425146977, - "learning_rate": 1.319051277851266e-06, - "loss": 0.9843, - "num_input_tokens_seen": 110070215, - "step": 5175 - }, - { - "epoch": 0.6223772019479348, - "flos": 12915723003120.0, - "grad_norm": 4.483492987152397, - "learning_rate": 1.3183188984675716e-06, - "loss": 1.0668, - "num_input_tokens_seen": 110088300, - "step": 5176 - }, - { - "epoch": 0.6224974448385739, - "flos": 19574799384480.0, - "grad_norm": 4.668205734047143, - "learning_rate": 1.3175866224943586e-06, - "loss": 0.9514, - "num_input_tokens_seen": 110106740, - "step": 5177 - }, - { - "epoch": 0.622617687729213, - "flos": 14042589931200.0, - "grad_norm": 10.470946670770934, - "learning_rate": 1.316854450042712e-06, - "loss": 0.9497, - "num_input_tokens_seen": 110124400, - "step": 5178 - }, - { - "epoch": 0.622737930619852, - "flos": 16376060487600.0, - "grad_norm": 4.248027119206935, - "learning_rate": 1.3161223812237024e-06, - "loss": 0.9702, - "num_input_tokens_seen": 110143475, - "step": 5179 - }, - { - "epoch": 0.6228581735104912, - "flos": 8897627217240.0, - "grad_norm": 7.185421850553394, - "learning_rate": 1.3153904161483842e-06, - "loss": 1.0739, - "num_input_tokens_seen": 110158495, - "step": 5180 - }, - { - "epoch": 0.6229784164011303, - "flos": 16924773628800.0, - "grad_norm": 6.237438242879863, - "learning_rate": 1.3146585549277953e-06, - "loss": 1.076, - "num_input_tokens_seen": 110176855, - "step": 5181 - }, - { - "epoch": 0.6230986592917693, - "flos": 15926529023760.0, - "grad_norm": 6.317731747710974, - "learning_rate": 1.3139267976729591e-06, - "loss": 1.0033, - "num_input_tokens_seen": 110196765, - "step": 5182 - }, - { - "epoch": 0.6232189021824085, - "flos": 24632727946920.0, - "grad_norm": 14.327328925029137, - "learning_rate": 1.3131951444948815e-06, - "loss": 0.9451, - "num_input_tokens_seen": 110215885, - "step": 5183 - }, - { - "epoch": 0.6233391450730476, - "flos": 15799869040560.0, - "grad_norm": 3.496422983362978, - "learning_rate": 1.3124635955045546e-06, - "loss": 0.9639, - "num_input_tokens_seen": 110235420, - "step": 5184 - }, - { - "epoch": 0.6234593879636866, - "flos": 14430327255480.0, - "grad_norm": 3.226310221343903, - "learning_rate": 1.3117321508129537e-06, - "loss": 1.0669, - "num_input_tokens_seen": 110253220, - "step": 5185 - }, - { - "epoch": 0.6235796308543258, - "flos": 14669598679320.0, - "grad_norm": 3.389837076778837, - "learning_rate": 1.3110008105310388e-06, - "loss": 0.9955, - "num_input_tokens_seen": 110272760, - "step": 5186 - }, - { - "epoch": 0.6236998737449648, - "flos": 18949323714360.0, - "grad_norm": 5.402073829290423, - "learning_rate": 1.3102695747697526e-06, - "loss": 1.0074, - "num_input_tokens_seen": 110295350, - "step": 5187 - }, - { - "epoch": 0.6238201166356039, - "flos": 8927129186040.0, - "grad_norm": 12.82423421793158, - "learning_rate": 1.3095384436400237e-06, - "loss": 1.1175, - "num_input_tokens_seen": 110306600, - "step": 5188 - }, - { - "epoch": 0.623940359526243, - "flos": 7330335308640.0, - "grad_norm": 3.8939547709937417, - "learning_rate": 1.3088074172527633e-06, - "loss": 1.0417, - "num_input_tokens_seen": 110323450, - "step": 5189 - }, - { - "epoch": 0.6240606024168821, - "flos": 20703046082760.0, - "grad_norm": 6.544249233377688, - "learning_rate": 1.3080764957188684e-06, - "loss": 0.941, - "num_input_tokens_seen": 110343415, - "step": 5190 - }, - { - "epoch": 0.6241808453075212, - "flos": 15642541949280.0, - "grad_norm": 4.240027452056113, - "learning_rate": 1.3073456791492192e-06, - "loss": 0.9282, - "num_input_tokens_seen": 110362845, - "step": 5191 - }, - { - "epoch": 0.6243010881981603, - "flos": 15010044781920.0, - "grad_norm": 3.739557094357952, - "learning_rate": 1.3066149676546801e-06, - "loss": 1.0143, - "num_input_tokens_seen": 110380745, - "step": 5192 - }, - { - "epoch": 0.6244213310887994, - "flos": 15877244800680.0, - "grad_norm": 4.278912205297572, - "learning_rate": 1.3058843613460985e-06, - "loss": 0.8831, - "num_input_tokens_seen": 110398405, - "step": 5193 - }, - { - "epoch": 0.6245415739794384, - "flos": 10765524765840.0, - "grad_norm": 7.511149955714275, - "learning_rate": 1.3051538603343075e-06, - "loss": 0.9585, - "num_input_tokens_seen": 110416055, - "step": 5194 - }, - { - "epoch": 0.6246618168700776, - "flos": 13387121707320.0, - "grad_norm": 3.7902349486788447, - "learning_rate": 1.3044234647301235e-06, - "loss": 0.8998, - "num_input_tokens_seen": 110433800, - "step": 5195 - }, - { - "epoch": 0.6247820597607167, - "flos": 10109443310760.0, - "grad_norm": 4.157722384434222, - "learning_rate": 1.303693174644347e-06, - "loss": 0.9433, - "num_input_tokens_seen": 110450995, - "step": 5196 - }, - { - "epoch": 0.6249023026513557, - "flos": 16087995425640.0, - "grad_norm": 53.72737532557309, - "learning_rate": 1.3029629901877625e-06, - "loss": 1.0256, - "num_input_tokens_seen": 110470090, - "step": 5197 - }, - { - "epoch": 0.6250225455419949, - "flos": 14383833234360.0, - "grad_norm": 4.6878525664936035, - "learning_rate": 1.3022329114711376e-06, - "loss": 1.0056, - "num_input_tokens_seen": 110488520, - "step": 5198 - }, - { - "epoch": 0.6251427884326339, - "flos": 16662469825320.0, - "grad_norm": 3.4566021151816453, - "learning_rate": 1.3015029386052256e-06, - "loss": 0.9182, - "num_input_tokens_seen": 110508410, - "step": 5199 - }, - { - "epoch": 0.625263031323273, - "flos": 22617284344680.0, - "grad_norm": 4.501772425535688, - "learning_rate": 1.3007730717007622e-06, - "loss": 0.9504, - "num_input_tokens_seen": 110528945, - "step": 5200 - }, - { - "epoch": 0.6253832742139122, - "flos": 17163462483000.0, - "grad_norm": 4.3985769137957735, - "learning_rate": 1.3000433108684676e-06, - "loss": 0.9807, - "num_input_tokens_seen": 110549165, - "step": 5201 - }, - { - "epoch": 0.6255035171045512, - "flos": 19702992445680.0, - "grad_norm": 5.057844116753204, - "learning_rate": 1.2993136562190467e-06, - "loss": 1.0279, - "num_input_tokens_seen": 110568005, - "step": 5202 - }, - { - "epoch": 0.6256237599951903, - "flos": 14355557727960.0, - "grad_norm": 3.4773838401716897, - "learning_rate": 1.2985841078631871e-06, - "loss": 0.9272, - "num_input_tokens_seen": 110587045, - "step": 5203 - }, - { - "epoch": 0.6257440028858293, - "flos": 17189469033960.0, - "grad_norm": 4.062266416436809, - "learning_rate": 1.2978546659115608e-06, - "loss": 1.0087, - "num_input_tokens_seen": 110604845, - "step": 5204 - }, - { - "epoch": 0.6258642457764685, - "flos": 11211039565320.0, - "grad_norm": 2.827387508107475, - "learning_rate": 1.2971253304748228e-06, - "loss": 1.0831, - "num_input_tokens_seen": 110622280, - "step": 5205 - }, - { - "epoch": 0.6259844886671075, - "flos": 8378998614480.0, - "grad_norm": 6.713221915014028, - "learning_rate": 1.296396101663614e-06, - "loss": 0.9769, - "num_input_tokens_seen": 110638560, - "step": 5206 - }, - { - "epoch": 0.6261047315577466, - "flos": 11236862146920.0, - "grad_norm": 3.9705974454087296, - "learning_rate": 1.2956669795885565e-06, - "loss": 1.0659, - "num_input_tokens_seen": 110654910, - "step": 5207 - }, - { - "epoch": 0.6262249744483858, - "flos": 22276562288040.0, - "grad_norm": 18.703369750904848, - "learning_rate": 1.294937964360259e-06, - "loss": 0.9136, - "num_input_tokens_seen": 110674900, - "step": 5208 - }, - { - "epoch": 0.6263452173390248, - "flos": 19365612499080.0, - "grad_norm": 3.8738205868400617, - "learning_rate": 1.2942090560893108e-06, - "loss": 0.9122, - "num_input_tokens_seen": 110694025, - "step": 5209 - }, - { - "epoch": 0.6264654602296639, - "flos": 26655591646680.0, - "grad_norm": 7.333553946673436, - "learning_rate": 1.2934802548862882e-06, - "loss": 0.825, - "num_input_tokens_seen": 110716530, - "step": 5210 - }, - { - "epoch": 0.626585703120303, - "flos": 10424036170200.0, - "grad_norm": 3.0256475212710474, - "learning_rate": 1.292751560861749e-06, - "loss": 1.0613, - "num_input_tokens_seen": 110731155, - "step": 5211 - }, - { - "epoch": 0.6267059460109421, - "flos": 15879268463640.0, - "grad_norm": 3.6472832693927657, - "learning_rate": 1.2920229741262354e-06, - "loss": 1.0222, - "num_input_tokens_seen": 110748880, - "step": 5212 - }, - { - "epoch": 0.6268261889015811, - "flos": 12574234407480.0, - "grad_norm": 3.7574104839532603, - "learning_rate": 1.2912944947902739e-06, - "loss": 0.9706, - "num_input_tokens_seen": 110765085, - "step": 5213 - }, - { - "epoch": 0.6269464317922203, - "flos": 23423640732240.0, - "grad_norm": 7.776041394127451, - "learning_rate": 1.2905661229643742e-06, - "loss": 0.9326, - "num_input_tokens_seen": 110784565, - "step": 5214 - }, - { - "epoch": 0.6270666746828594, - "flos": 12703592607960.0, - "grad_norm": 3.539067889413667, - "learning_rate": 1.2898378587590299e-06, - "loss": 1.0504, - "num_input_tokens_seen": 110800885, - "step": 5215 - }, - { - "epoch": 0.6271869175734984, - "flos": 12364802229600.0, - "grad_norm": 4.501990619759805, - "learning_rate": 1.2891097022847173e-06, - "loss": 1.0975, - "num_input_tokens_seen": 110817950, - "step": 5216 - }, - { - "epoch": 0.6273071604641376, - "flos": 19128242091960.0, - "grad_norm": 4.030925648970024, - "learning_rate": 1.2883816536518978e-06, - "loss": 0.8964, - "num_input_tokens_seen": 110838810, - "step": 5217 - }, - { - "epoch": 0.6274274033547766, - "flos": 18548340596160.0, - "grad_norm": 2.7606631666761756, - "learning_rate": 1.2876537129710155e-06, - "loss": 1.0448, - "num_input_tokens_seen": 110856260, - "step": 5218 - }, - { - "epoch": 0.6275476462454157, - "flos": 14383465295640.0, - "grad_norm": 5.617552510632793, - "learning_rate": 1.286925880352499e-06, - "loss": 0.972, - "num_input_tokens_seen": 110874840, - "step": 5219 - }, - { - "epoch": 0.6276678891360549, - "flos": 18734127163200.0, - "grad_norm": 3.649887156375175, - "learning_rate": 1.2861981559067592e-06, - "loss": 0.9267, - "num_input_tokens_seen": 110895165, - "step": 5220 - }, - { - "epoch": 0.6277881320266939, - "flos": 9816441737640.0, - "grad_norm": 5.166267236015832, - "learning_rate": 1.2854705397441917e-06, - "loss": 1.0324, - "num_input_tokens_seen": 110910425, - "step": 5221 - }, - { - "epoch": 0.627908374917333, - "flos": 19257538969320.0, - "grad_norm": 5.310183210642625, - "learning_rate": 1.2847430319751747e-06, - "loss": 0.9958, - "num_input_tokens_seen": 110928240, - "step": 5222 - }, - { - "epoch": 0.6280286178079721, - "flos": 16901005371720.0, - "grad_norm": 3.5841238051897335, - "learning_rate": 1.2840156327100712e-06, - "loss": 0.9045, - "num_input_tokens_seen": 110945085, - "step": 5223 - }, - { - "epoch": 0.6281488606986112, - "flos": 18762280023360.0, - "grad_norm": 5.679417159036429, - "learning_rate": 1.2832883420592272e-06, - "loss": 0.9479, - "num_input_tokens_seen": 110963700, - "step": 5224 - }, - { - "epoch": 0.6282691035892503, - "flos": 25789556767200.0, - "grad_norm": 8.313117671189255, - "learning_rate": 1.282561160132972e-06, - "loss": 0.8616, - "num_input_tokens_seen": 110983940, - "step": 5225 - }, - { - "epoch": 0.6283893464798894, - "flos": 18889675884000.0, - "grad_norm": 4.5656806623863515, - "learning_rate": 1.2818340870416186e-06, - "loss": 1.0434, - "num_input_tokens_seen": 111004795, - "step": 5226 - }, - { - "epoch": 0.6285095893705285, - "flos": 15800053009920.0, - "grad_norm": 3.747342503185643, - "learning_rate": 1.2811071228954626e-06, - "loss": 0.9825, - "num_input_tokens_seen": 111023150, - "step": 5227 - }, - { - "epoch": 0.6286298322611675, - "flos": 18893968502400.0, - "grad_norm": 3.7573331975302886, - "learning_rate": 1.2803802678047846e-06, - "loss": 1.0328, - "num_input_tokens_seen": 111043020, - "step": 5228 - }, - { - "epoch": 0.6287500751518067, - "flos": 15481842086400.0, - "grad_norm": 4.359698539951235, - "learning_rate": 1.279653521879848e-06, - "loss": 0.9563, - "num_input_tokens_seen": 111062805, - "step": 5229 - }, - { - "epoch": 0.6288703180424458, - "flos": 14198843867880.0, - "grad_norm": 20.819728386506167, - "learning_rate": 1.2789268852308997e-06, - "loss": 1.0718, - "num_input_tokens_seen": 111077735, - "step": 5230 - }, - { - "epoch": 0.6289905609330848, - "flos": 15718967201040.0, - "grad_norm": 4.0301594996539984, - "learning_rate": 1.2782003579681688e-06, - "loss": 0.9238, - "num_input_tokens_seen": 111096985, - "step": 5231 - }, - { - "epoch": 0.629110803823724, - "flos": 18157905054600.0, - "grad_norm": 5.153576336880884, - "learning_rate": 1.2774739402018701e-06, - "loss": 0.9717, - "num_input_tokens_seen": 111117540, - "step": 5232 - }, - { - "epoch": 0.629231046714363, - "flos": 14829317372280.0, - "grad_norm": 6.679927924218487, - "learning_rate": 1.2767476320422002e-06, - "loss": 0.9681, - "num_input_tokens_seen": 111137185, - "step": 5233 - }, - { - "epoch": 0.6293512896050021, - "flos": 46575768096840.0, - "grad_norm": 0.8787052518105709, - "learning_rate": 1.2760214335993392e-06, - "loss": 0.8272, - "num_input_tokens_seen": 111203550, - "step": 5234 - }, - { - "epoch": 0.6294715324956413, - "flos": 24740770815120.0, - "grad_norm": 4.054775935331345, - "learning_rate": 1.2752953449834514e-06, - "loss": 0.8053, - "num_input_tokens_seen": 111225720, - "step": 5235 - }, - { - "epoch": 0.6295917753862803, - "flos": 16193125445640.0, - "grad_norm": 3.2977846165380775, - "learning_rate": 1.2745693663046836e-06, - "loss": 1.019, - "num_input_tokens_seen": 111244510, - "step": 5236 - }, - { - "epoch": 0.6297120182769194, - "flos": 14803372144440.0, - "grad_norm": 3.974635048673224, - "learning_rate": 1.2738434976731662e-06, - "loss": 1.0313, - "num_input_tokens_seen": 111262415, - "step": 5237 - }, - { - "epoch": 0.6298322611675584, - "flos": 13830766151640.0, - "grad_norm": 5.737130732070277, - "learning_rate": 1.2731177391990125e-06, - "loss": 0.9758, - "num_input_tokens_seen": 111282060, - "step": 5238 - }, - { - "epoch": 0.6299525040581976, - "flos": 8877109085520.0, - "grad_norm": 4.570625072778143, - "learning_rate": 1.2723920909923203e-06, - "loss": 1.0433, - "num_input_tokens_seen": 111297525, - "step": 5239 - }, - { - "epoch": 0.6300727469488366, - "flos": 41308228935240.0, - "grad_norm": 0.9267428269295208, - "learning_rate": 1.2716665531631688e-06, - "loss": 0.8835, - "num_input_tokens_seen": 111351530, - "step": 5240 - }, - { - "epoch": 0.6301929898394757, - "flos": 16008289386960.0, - "grad_norm": 2.912636251905323, - "learning_rate": 1.270941125821623e-06, - "loss": 0.9915, - "num_input_tokens_seen": 111371675, - "step": 5241 - }, - { - "epoch": 0.6303132327301149, - "flos": 20152585232640.0, - "grad_norm": 5.098047421234455, - "learning_rate": 1.2702158090777278e-06, - "loss": 0.9818, - "num_input_tokens_seen": 111392485, - "step": 5242 - }, - { - "epoch": 0.6304334756207539, - "flos": 17975583243840.0, - "grad_norm": 4.138596754125934, - "learning_rate": 1.2694906030415148e-06, - "loss": 0.9753, - "num_input_tokens_seen": 111409950, - "step": 5243 - }, - { - "epoch": 0.630553718511393, - "flos": 12777902212080.0, - "grad_norm": 6.175293258313476, - "learning_rate": 1.2687655078229958e-06, - "loss": 1.0242, - "num_input_tokens_seen": 111424000, - "step": 5244 - }, - { - "epoch": 0.6306739614020321, - "flos": 19440903273120.0, - "grad_norm": 8.402638393398519, - "learning_rate": 1.2680405235321678e-06, - "loss": 0.9295, - "num_input_tokens_seen": 111445055, - "step": 5245 - }, - { - "epoch": 0.6307942042926712, - "flos": 10844188311480.0, - "grad_norm": 8.576548836298098, - "learning_rate": 1.267315650279011e-06, - "loss": 1.0109, - "num_input_tokens_seen": 111463245, - "step": 5246 - }, - { - "epoch": 0.6309144471833102, - "flos": 13908816466080.0, - "grad_norm": 6.57362193471576, - "learning_rate": 1.2665908881734874e-06, - "loss": 0.9581, - "num_input_tokens_seen": 111481800, - "step": 5247 - }, - { - "epoch": 0.6310346900739494, - "flos": 12390624811200.0, - "grad_norm": 3.4627492050666824, - "learning_rate": 1.2658662373255432e-06, - "loss": 1.0677, - "num_input_tokens_seen": 111499910, - "step": 5248 - }, - { - "epoch": 0.6311549329645885, - "flos": 39400828201200.0, - "grad_norm": 0.7992191243846684, - "learning_rate": 1.2651416978451063e-06, - "loss": 0.8012, - "num_input_tokens_seen": 111565015, - "step": 5249 - }, - { - "epoch": 0.6312751758552275, - "flos": 29933086073880.0, - "grad_norm": 5.886114766626491, - "learning_rate": 1.2644172698420903e-06, - "loss": 0.8743, - "num_input_tokens_seen": 111586840, - "step": 5250 - }, - { - "epoch": 0.6313954187458667, - "flos": 13938379758000.0, - "grad_norm": 3.2005229522640124, - "learning_rate": 1.2636929534263892e-06, - "loss": 1.0649, - "num_input_tokens_seen": 111605545, - "step": 5251 - }, - { - "epoch": 0.6315156616365057, - "flos": 16272310237800.0, - "grad_norm": 2.877890750659151, - "learning_rate": 1.2629687487078821e-06, - "loss": 0.9946, - "num_input_tokens_seen": 111624075, - "step": 5252 - }, - { - "epoch": 0.6316359045271448, - "flos": 16869265109040.0, - "grad_norm": 5.147564780747987, - "learning_rate": 1.2622446557964293e-06, - "loss": 0.9942, - "num_input_tokens_seen": 111641800, - "step": 5253 - }, - { - "epoch": 0.631756147417784, - "flos": 23612769409320.0, - "grad_norm": 5.923043732069007, - "learning_rate": 1.261520674801876e-06, - "loss": 0.935, - "num_input_tokens_seen": 111662115, - "step": 5254 - }, - { - "epoch": 0.631876390308423, - "flos": 22276133026200.0, - "grad_norm": 5.911199201598316, - "learning_rate": 1.2607968058340488e-06, - "loss": 0.9426, - "num_input_tokens_seen": 111681530, - "step": 5255 - }, - { - "epoch": 0.6319966331990621, - "flos": 17556381610920.0, - "grad_norm": 4.699704570819161, - "learning_rate": 1.2600730490027583e-06, - "loss": 0.953, - "num_input_tokens_seen": 111701490, - "step": 5256 - }, - { - "epoch": 0.6321168760897012, - "flos": 12389275702560.0, - "grad_norm": 3.2467814427427166, - "learning_rate": 1.2593494044177984e-06, - "loss": 1.0352, - "num_input_tokens_seen": 111719515, - "step": 5257 - }, - { - "epoch": 0.6322371189803403, - "flos": 12966938904480.0, - "grad_norm": 8.626476659003943, - "learning_rate": 1.2586258721889448e-06, - "loss": 1.0214, - "num_input_tokens_seen": 111736585, - "step": 5258 - }, - { - "epoch": 0.6323573618709794, - "flos": 14305200350280.0, - "grad_norm": 3.042919545394063, - "learning_rate": 1.2579024524259573e-06, - "loss": 1.0412, - "num_input_tokens_seen": 111752565, - "step": 5259 - }, - { - "epoch": 0.6324776047616185, - "flos": 14221906909080.0, - "grad_norm": 4.227062936090948, - "learning_rate": 1.2571791452385768e-06, - "loss": 1.1376, - "num_input_tokens_seen": 111769550, - "step": 5260 - }, - { - "epoch": 0.6325978476522576, - "flos": 21988681195440.0, - "grad_norm": 4.179611290923223, - "learning_rate": 1.2564559507365301e-06, - "loss": 0.9943, - "num_input_tokens_seen": 111791675, - "step": 5261 - }, - { - "epoch": 0.6327180905428966, - "flos": 17451404898720.0, - "grad_norm": 3.9017467939537567, - "learning_rate": 1.2557328690295244e-06, - "loss": 1.0077, - "num_input_tokens_seen": 111809585, - "step": 5262 - }, - { - "epoch": 0.6328383334335358, - "flos": 15323625809880.0, - "grad_norm": 3.835249021320779, - "learning_rate": 1.255009900227251e-06, - "loss": 0.9847, - "num_input_tokens_seen": 111828330, - "step": 5263 - }, - { - "epoch": 0.6329585763241748, - "flos": 16297764880680.0, - "grad_norm": 16.005059396001354, - "learning_rate": 1.254287044439383e-06, - "loss": 1.0245, - "num_input_tokens_seen": 111847655, - "step": 5264 - }, - { - "epoch": 0.6330788192148139, - "flos": 50803332270240.0, - "grad_norm": 0.9791296519053674, - "learning_rate": 1.2535643017755776e-06, - "loss": 0.7983, - "num_input_tokens_seen": 111909565, - "step": 5265 - }, - { - "epoch": 0.6331990621054531, - "flos": 15085580848440.0, - "grad_norm": 4.4296827635832265, - "learning_rate": 1.2528416723454737e-06, - "loss": 0.9454, - "num_input_tokens_seen": 111925955, - "step": 5266 - }, - { - "epoch": 0.6333193049960921, - "flos": 24583842324120.0, - "grad_norm": 3.4614859462895216, - "learning_rate": 1.2521191562586945e-06, - "loss": 0.9314, - "num_input_tokens_seen": 111949325, - "step": 5267 - }, - { - "epoch": 0.6334395478867312, - "flos": 12991473700560.0, - "grad_norm": 9.750586467613235, - "learning_rate": 1.2513967536248445e-06, - "loss": 0.9963, - "num_input_tokens_seen": 111965365, - "step": 5268 - }, - { - "epoch": 0.6335597907773702, - "flos": 16796028659520.0, - "grad_norm": 2.601808461885476, - "learning_rate": 1.2506744645535117e-06, - "loss": 1.0449, - "num_input_tokens_seen": 111985515, - "step": 5269 - }, - { - "epoch": 0.6336800336680094, - "flos": 16140407127840.0, - "grad_norm": 8.954843619135069, - "learning_rate": 1.249952289154267e-06, - "loss": 0.8239, - "num_input_tokens_seen": 112005275, - "step": 5270 - }, - { - "epoch": 0.6338002765586485, - "flos": 16795691382360.0, - "grad_norm": 3.5743534917396085, - "learning_rate": 1.2492302275366635e-06, - "loss": 0.9956, - "num_input_tokens_seen": 112024670, - "step": 5271 - }, - { - "epoch": 0.6339205194492875, - "flos": 18867471366480.0, - "grad_norm": 5.36785147167813, - "learning_rate": 1.2485082798102377e-06, - "loss": 0.8596, - "num_input_tokens_seen": 112044805, - "step": 5272 - }, - { - "epoch": 0.6340407623399267, - "flos": 13144968096840.0, - "grad_norm": 3.263088719828077, - "learning_rate": 1.2477864460845084e-06, - "loss": 0.907, - "num_input_tokens_seen": 112060925, - "step": 5273 - }, - { - "epoch": 0.6341610052305657, - "flos": 12548227856520.0, - "grad_norm": 8.5982143943661, - "learning_rate": 1.2470647264689776e-06, - "loss": 0.9468, - "num_input_tokens_seen": 112079125, - "step": 5274 - }, - { - "epoch": 0.6342812481212048, - "flos": 16770114093240.0, - "grad_norm": 4.26930050518687, - "learning_rate": 1.2463431210731282e-06, - "loss": 0.9307, - "num_input_tokens_seen": 112098430, - "step": 5275 - }, - { - "epoch": 0.634401491011844, - "flos": 12626799417480.0, - "grad_norm": 20.173538340358686, - "learning_rate": 1.2456216300064289e-06, - "loss": 0.9813, - "num_input_tokens_seen": 112115700, - "step": 5276 - }, - { - "epoch": 0.634521733902483, - "flos": 15167831796600.0, - "grad_norm": 4.5315907317338935, - "learning_rate": 1.244900253378328e-06, - "loss": 0.9944, - "num_input_tokens_seen": 112135475, - "step": 5277 - }, - { - "epoch": 0.6346419767931221, - "flos": 11709150036360.0, - "grad_norm": 3.978286339543932, - "learning_rate": 1.2441789912982583e-06, - "loss": 0.9116, - "num_input_tokens_seen": 112152280, - "step": 5278 - }, - { - "epoch": 0.6347622196837612, - "flos": 17318857896000.0, - "grad_norm": 3.991316788111909, - "learning_rate": 1.2434578438756346e-06, - "loss": 0.8664, - "num_input_tokens_seen": 112172430, - "step": 5279 - }, - { - "epoch": 0.6348824625744003, - "flos": 46190034870120.0, - "grad_norm": 6.69642200869284, - "learning_rate": 1.242736811219855e-06, - "loss": 1.0051, - "num_input_tokens_seen": 112198110, - "step": 5280 - }, - { - "epoch": 0.6350027054650393, - "flos": 20388759838920.0, - "grad_norm": 3.189004580370346, - "learning_rate": 1.2420158934402988e-06, - "loss": 1.0493, - "num_input_tokens_seen": 112218445, - "step": 5281 - }, - { - "epoch": 0.6351229483556785, - "flos": 16349961951960.0, - "grad_norm": 3.170464563835576, - "learning_rate": 1.2412950906463286e-06, - "loss": 1.0601, - "num_input_tokens_seen": 112235470, - "step": 5282 - }, - { - "epoch": 0.6352431912463176, - "flos": 15585132412800.0, - "grad_norm": 6.045087525496878, - "learning_rate": 1.2405744029472902e-06, - "loss": 1.1203, - "num_input_tokens_seen": 112254675, - "step": 5283 - }, - { - "epoch": 0.6353634341369566, - "flos": 9689751092880.0, - "grad_norm": 3.859400328271202, - "learning_rate": 1.2398538304525108e-06, - "loss": 0.9712, - "num_input_tokens_seen": 112273020, - "step": 5284 - }, - { - "epoch": 0.6354836770275958, - "flos": 13700457442800.0, - "grad_norm": 5.362575987519788, - "learning_rate": 1.2391333732713016e-06, - "loss": 0.9751, - "num_input_tokens_seen": 112290545, - "step": 5285 - }, - { - "epoch": 0.6356039199182348, - "flos": 15351226761960.0, - "grad_norm": 7.500403584112028, - "learning_rate": 1.2384130315129543e-06, - "loss": 1.0008, - "num_input_tokens_seen": 112308590, - "step": 5286 - }, - { - "epoch": 0.6357241628088739, - "flos": 12835005132960.0, - "grad_norm": 9.325495107970893, - "learning_rate": 1.2376928052867447e-06, - "loss": 0.9436, - "num_input_tokens_seen": 112327430, - "step": 5287 - }, - { - "epoch": 0.6358444056995131, - "flos": 17738826067920.0, - "grad_norm": 9.892022249621748, - "learning_rate": 1.2369726947019299e-06, - "loss": 1.0146, - "num_input_tokens_seen": 112347625, - "step": 5288 - }, - { - "epoch": 0.6359646485901521, - "flos": 16558811560200.0, - "grad_norm": 4.502012226767695, - "learning_rate": 1.2362526998677511e-06, - "loss": 0.8887, - "num_input_tokens_seen": 112363710, - "step": 5289 - }, - { - "epoch": 0.6360848914807912, - "flos": 14830022588160.0, - "grad_norm": 4.442835434425598, - "learning_rate": 1.2355328208934301e-06, - "loss": 1.076, - "num_input_tokens_seen": 112382305, - "step": 5290 - }, - { - "epoch": 0.6362051343714303, - "flos": 13099731199680.0, - "grad_norm": 2.626872439368464, - "learning_rate": 1.2348130578881728e-06, - "loss": 0.9557, - "num_input_tokens_seen": 112400245, - "step": 5291 - }, - { - "epoch": 0.6363253772620694, - "flos": 17346888109920.0, - "grad_norm": 3.630332182090137, - "learning_rate": 1.2340934109611664e-06, - "loss": 0.9667, - "num_input_tokens_seen": 112420725, - "step": 5292 - }, - { - "epoch": 0.6364456201527084, - "flos": 18474214961400.0, - "grad_norm": 8.225142513072436, - "learning_rate": 1.2333738802215798e-06, - "loss": 0.8925, - "num_input_tokens_seen": 112440665, - "step": 5293 - }, - { - "epoch": 0.6365658630433476, - "flos": 14724463306320.0, - "grad_norm": 3.0428938458568218, - "learning_rate": 1.2326544657785668e-06, - "loss": 1.0202, - "num_input_tokens_seen": 112460075, - "step": 5294 - }, - { - "epoch": 0.6366861059339867, - "flos": 15222849731400.0, - "grad_norm": 6.244926224235366, - "learning_rate": 1.2319351677412608e-06, - "loss": 0.9721, - "num_input_tokens_seen": 112476840, - "step": 5295 - }, - { - "epoch": 0.6368063488246257, - "flos": 15820632464760.0, - "grad_norm": 4.622483058339293, - "learning_rate": 1.2312159862187796e-06, - "loss": 0.9722, - "num_input_tokens_seen": 112494970, - "step": 5296 - }, - { - "epoch": 0.6369265917152649, - "flos": 15932078766120.0, - "grad_norm": 5.52897105710614, - "learning_rate": 1.2304969213202217e-06, - "loss": 0.9836, - "num_input_tokens_seen": 112515950, - "step": 5297 - }, - { - "epoch": 0.6370468346059039, - "flos": 17583062716200.0, - "grad_norm": 6.404971706974097, - "learning_rate": 1.2297779731546692e-06, - "loss": 1.0058, - "num_input_tokens_seen": 112534765, - "step": 5298 - }, - { - "epoch": 0.637167077496543, - "flos": 17998033053840.0, - "grad_norm": 4.179990269042739, - "learning_rate": 1.2290591418311853e-06, - "loss": 1.0043, - "num_input_tokens_seen": 112551880, - "step": 5299 - }, - { - "epoch": 0.637287320387182, - "flos": 19704954785520.0, - "grad_norm": 3.2599768458755243, - "learning_rate": 1.2283404274588172e-06, - "loss": 0.9394, - "num_input_tokens_seen": 112570545, - "step": 5300 - }, - { - "epoch": 0.6374075632778212, - "flos": 45224935411440.0, - "grad_norm": 0.7486914504504509, - "learning_rate": 1.227621830146592e-06, - "loss": 0.7781, - "num_input_tokens_seen": 112625630, - "step": 5301 - }, - { - "epoch": 0.6375278061684603, - "flos": 18186824453760.0, - "grad_norm": 4.185257724132079, - "learning_rate": 1.2269033500035217e-06, - "loss": 1.019, - "num_input_tokens_seen": 112645485, - "step": 5302 - }, - { - "epoch": 0.6376480490590993, - "flos": 18264445506360.0, - "grad_norm": 4.932976497598027, - "learning_rate": 1.2261849871385988e-06, - "loss": 0.9664, - "num_input_tokens_seen": 112666310, - "step": 5303 - }, - { - "epoch": 0.6377682919497385, - "flos": 22483664187360.0, - "grad_norm": 3.865005895059884, - "learning_rate": 1.2254667416607972e-06, - "loss": 0.8366, - "num_input_tokens_seen": 112687630, - "step": 5304 - }, - { - "epoch": 0.6378885348403776, - "flos": 16376336441640.0, - "grad_norm": 3.073099749439859, - "learning_rate": 1.2247486136790756e-06, - "loss": 1.061, - "num_input_tokens_seen": 112706830, - "step": 5305 - }, - { - "epoch": 0.6380087777310166, - "flos": 13255678520760.0, - "grad_norm": 4.722992461473732, - "learning_rate": 1.2240306033023726e-06, - "loss": 1.0221, - "num_input_tokens_seen": 112724375, - "step": 5306 - }, - { - "epoch": 0.6381290206216558, - "flos": 16586504496960.0, - "grad_norm": 3.4482366561305113, - "learning_rate": 1.223312710639611e-06, - "loss": 0.9479, - "num_input_tokens_seen": 112742815, - "step": 5307 - }, - { - "epoch": 0.6382492635122948, - "flos": 13387612292280.0, - "grad_norm": 4.134747865086445, - "learning_rate": 1.2225949357996928e-06, - "loss": 1.0737, - "num_input_tokens_seen": 112760660, - "step": 5308 - }, - { - "epoch": 0.6383695064029339, - "flos": 19811617883520.0, - "grad_norm": 2.2930821873225917, - "learning_rate": 1.221877278891505e-06, - "loss": 1.0315, - "num_input_tokens_seen": 112779635, - "step": 5309 - }, - { - "epoch": 0.638489749293573, - "flos": 18788501205240.0, - "grad_norm": 4.961972394516652, - "learning_rate": 1.221159740023915e-06, - "loss": 0.9239, - "num_input_tokens_seen": 112799185, - "step": 5310 - }, - { - "epoch": 0.6386099921842121, - "flos": 17058516432360.0, - "grad_norm": 5.919584853075735, - "learning_rate": 1.2204423193057735e-06, - "loss": 0.9524, - "num_input_tokens_seen": 112817735, - "step": 5311 - }, - { - "epoch": 0.6387302350748512, - "flos": 51373023466560.0, - "grad_norm": 0.9546355175594274, - "learning_rate": 1.2197250168459122e-06, - "loss": 0.9197, - "num_input_tokens_seen": 112873855, - "step": 5312 - }, - { - "epoch": 0.6388504779654903, - "flos": 10264777400640.0, - "grad_norm": 4.437015345796429, - "learning_rate": 1.2190078327531454e-06, - "loss": 0.9665, - "num_input_tokens_seen": 112889820, - "step": 5313 - }, - { - "epoch": 0.6389707208561294, - "flos": 15878256632160.0, - "grad_norm": 3.1064479219868795, - "learning_rate": 1.2182907671362697e-06, - "loss": 0.9542, - "num_input_tokens_seen": 112910235, - "step": 5314 - }, - { - "epoch": 0.6390909637467684, - "flos": 13780194143040.0, - "grad_norm": 3.3069001854712705, - "learning_rate": 1.2175738201040626e-06, - "loss": 1.0034, - "num_input_tokens_seen": 112926995, - "step": 5315 - }, - { - "epoch": 0.6392112066374076, - "flos": 17131262296920.0, - "grad_norm": 3.5216874811436276, - "learning_rate": 1.2168569917652855e-06, - "loss": 1.0154, - "num_input_tokens_seen": 112946570, - "step": 5316 - }, - { - "epoch": 0.6393314495280467, - "flos": 19075370466360.0, - "grad_norm": 4.144420798481112, - "learning_rate": 1.2161402822286797e-06, - "loss": 0.8698, - "num_input_tokens_seen": 112966975, - "step": 5317 - }, - { - "epoch": 0.6394516924186857, - "flos": 14378712753840.0, - "grad_norm": 7.130667055661161, - "learning_rate": 1.2154236916029703e-06, - "loss": 1.0058, - "num_input_tokens_seen": 112984670, - "step": 5318 - }, - { - "epoch": 0.6395719353093249, - "flos": 13019810530080.0, - "grad_norm": 7.677489919950873, - "learning_rate": 1.2147072199968627e-06, - "loss": 0.9429, - "num_input_tokens_seen": 113003025, - "step": 5319 - }, - { - "epoch": 0.6396921781999639, - "flos": 12443067174960.0, - "grad_norm": 4.032177722253558, - "learning_rate": 1.2139908675190454e-06, - "loss": 0.9405, - "num_input_tokens_seen": 113021955, - "step": 5320 - }, - { - "epoch": 0.639812421090603, - "flos": 15091345221720.0, - "grad_norm": 4.919231177015856, - "learning_rate": 1.2132746342781883e-06, - "loss": 0.9795, - "num_input_tokens_seen": 113042835, - "step": 5321 - }, - { - "epoch": 0.6399326639812422, - "flos": 7853992407240.0, - "grad_norm": 4.049042463724835, - "learning_rate": 1.2125585203829442e-06, - "loss": 1.0091, - "num_input_tokens_seen": 113058195, - "step": 5322 - }, - { - "epoch": 0.6400529068718812, - "flos": 17002394681400.0, - "grad_norm": 3.741907415878804, - "learning_rate": 1.211842525941946e-06, - "loss": 0.961, - "num_input_tokens_seen": 113077710, - "step": 5323 - }, - { - "epoch": 0.6401731497625203, - "flos": 31453945284480.0, - "grad_norm": 3.5779826706476765, - "learning_rate": 1.2111266510638105e-06, - "loss": 1.031, - "num_input_tokens_seen": 113100355, - "step": 5324 - }, - { - "epoch": 0.6402933926531594, - "flos": 14667299062320.0, - "grad_norm": 4.805338894486958, - "learning_rate": 1.2104108958571346e-06, - "loss": 1.0391, - "num_input_tokens_seen": 113118345, - "step": 5325 - }, - { - "epoch": 0.6404136355437985, - "flos": 17687272889400.0, - "grad_norm": 2.921483141076003, - "learning_rate": 1.2096952604304975e-06, - "loss": 0.9796, - "num_input_tokens_seen": 113138495, - "step": 5326 - }, - { - "epoch": 0.6405338784344375, - "flos": 28910398657440.0, - "grad_norm": 3.0869015503641553, - "learning_rate": 1.2089797448924616e-06, - "loss": 0.9195, - "num_input_tokens_seen": 113162090, - "step": 5327 - }, - { - "epoch": 0.6406541213250767, - "flos": 14828826787320.0, - "grad_norm": 5.66335933135234, - "learning_rate": 1.2082643493515692e-06, - "loss": 0.882, - "num_input_tokens_seen": 113180130, - "step": 5328 - }, - { - "epoch": 0.6407743642157158, - "flos": 16561049854080.0, - "grad_norm": 3.0563290763251474, - "learning_rate": 1.207549073916346e-06, - "loss": 1.0496, - "num_input_tokens_seen": 113200785, - "step": 5329 - }, - { - "epoch": 0.6408946071063548, - "flos": 10607829735840.0, - "grad_norm": 4.293696889312859, - "learning_rate": 1.2068339186952976e-06, - "loss": 1.01, - "num_input_tokens_seen": 113218045, - "step": 5330 - }, - { - "epoch": 0.6410148499969939, - "flos": 16009393203120.0, - "grad_norm": 5.3794811996980965, - "learning_rate": 1.2061188837969136e-06, - "loss": 0.9518, - "num_input_tokens_seen": 113237375, - "step": 5331 - }, - { - "epoch": 0.641135092887633, - "flos": 8745972514560.0, - "grad_norm": 5.197441043638649, - "learning_rate": 1.2054039693296631e-06, - "loss": 1.0562, - "num_input_tokens_seen": 113255090, - "step": 5332 - }, - { - "epoch": 0.6412553357782721, - "flos": 15721021525560.0, - "grad_norm": 3.8377764425206053, - "learning_rate": 1.2046891754019992e-06, - "loss": 1.0433, - "num_input_tokens_seen": 113275420, - "step": 5333 - }, - { - "epoch": 0.6413755786689112, - "flos": 11237414055000.0, - "grad_norm": 4.694476619713025, - "learning_rate": 1.2039745021223548e-06, - "loss": 1.0444, - "num_input_tokens_seen": 113292560, - "step": 5334 - }, - { - "epoch": 0.6414958215595503, - "flos": 48723636055560.0, - "grad_norm": 0.9611004947034361, - "learning_rate": 1.2032599495991456e-06, - "loss": 0.8497, - "num_input_tokens_seen": 113357020, - "step": 5335 - }, - { - "epoch": 0.6416160644501894, - "flos": 31506050371080.0, - "grad_norm": 10.625487911016979, - "learning_rate": 1.2025455179407685e-06, - "loss": 0.9167, - "num_input_tokens_seen": 113377900, - "step": 5336 - }, - { - "epoch": 0.6417363073408284, - "flos": 14881974366960.0, - "grad_norm": 6.513948586232626, - "learning_rate": 1.2018312072556022e-06, - "loss": 0.9605, - "num_input_tokens_seen": 113396120, - "step": 5337 - }, - { - "epoch": 0.6418565502314676, - "flos": 15956521577520.0, - "grad_norm": 6.31497234613279, - "learning_rate": 1.2011170176520077e-06, - "loss": 0.9697, - "num_input_tokens_seen": 113416755, - "step": 5338 - }, - { - "epoch": 0.6419767931221066, - "flos": 17818164167880.0, - "grad_norm": 3.697049722549181, - "learning_rate": 1.2004029492383256e-06, - "loss": 1.0337, - "num_input_tokens_seen": 113437815, - "step": 5339 - }, - { - "epoch": 0.6420970360127457, - "flos": 13806691278960.0, - "grad_norm": 3.4511450705488507, - "learning_rate": 1.1996890021228814e-06, - "loss": 0.9642, - "num_input_tokens_seen": 113454310, - "step": 5340 - }, - { - "epoch": 0.6422172789033849, - "flos": 28857588354960.0, - "grad_norm": 3.5839718685919393, - "learning_rate": 1.1989751764139785e-06, - "loss": 0.922, - "num_input_tokens_seen": 113477680, - "step": 5341 - }, - { - "epoch": 0.6423375217940239, - "flos": 19706457201960.0, - "grad_norm": 15.299830337281827, - "learning_rate": 1.1982614722199044e-06, - "loss": 1.0419, - "num_input_tokens_seen": 113498575, - "step": 5342 - }, - { - "epoch": 0.642457764684663, - "flos": 13020147807240.0, - "grad_norm": 16.61540874794623, - "learning_rate": 1.1975478896489276e-06, - "loss": 1.0049, - "num_input_tokens_seen": 113516130, - "step": 5343 - }, - { - "epoch": 0.6425780075753021, - "flos": 14012137454040.0, - "grad_norm": 3.823259773067794, - "learning_rate": 1.1968344288092981e-06, - "loss": 0.9849, - "num_input_tokens_seen": 113532430, - "step": 5344 - }, - { - "epoch": 0.6426982504659412, - "flos": 14593572027840.0, - "grad_norm": 3.372756636280727, - "learning_rate": 1.1961210898092468e-06, - "loss": 0.8713, - "num_input_tokens_seen": 113551100, - "step": 5345 - }, - { - "epoch": 0.6428184933565803, - "flos": 12647532180120.0, - "grad_norm": 9.306538356927609, - "learning_rate": 1.1954078727569874e-06, - "loss": 1.02, - "num_input_tokens_seen": 113568120, - "step": 5346 - }, - { - "epoch": 0.6429387362472194, - "flos": 15958085317080.0, - "grad_norm": 3.1887653307693866, - "learning_rate": 1.1946947777607141e-06, - "loss": 1.0082, - "num_input_tokens_seen": 113588975, - "step": 5347 - }, - { - "epoch": 0.6430589791378585, - "flos": 17607505527600.0, - "grad_norm": 4.092318218950155, - "learning_rate": 1.1939818049286024e-06, - "loss": 1.0245, - "num_input_tokens_seen": 113606855, - "step": 5348 - }, - { - "epoch": 0.6431792220284975, - "flos": 17714720533680.0, - "grad_norm": 3.0929607579146556, - "learning_rate": 1.1932689543688101e-06, - "loss": 0.9665, - "num_input_tokens_seen": 113627680, - "step": 5349 - }, - { - "epoch": 0.6432994649191367, - "flos": 14933987468880.0, - "grad_norm": 2.7805949093268096, - "learning_rate": 1.1925562261894756e-06, - "loss": 0.9505, - "num_input_tokens_seen": 113646480, - "step": 5350 - }, - { - "epoch": 0.6434197078097758, - "flos": 22014657084840.0, - "grad_norm": 3.9440483258519126, - "learning_rate": 1.1918436204987207e-06, - "loss": 1.0096, - "num_input_tokens_seen": 113668060, - "step": 5351 - }, - { - "epoch": 0.6435399507004148, - "flos": 10607707089600.0, - "grad_norm": 3.7107232925429514, - "learning_rate": 1.191131137404645e-06, - "loss": 1.0343, - "num_input_tokens_seen": 113684520, - "step": 5352 - }, - { - "epoch": 0.643660193591054, - "flos": 14121744061800.0, - "grad_norm": 21.434711063574106, - "learning_rate": 1.190418777015333e-06, - "loss": 0.9935, - "num_input_tokens_seen": 113703150, - "step": 5353 - }, - { - "epoch": 0.643780436481693, - "flos": 17162849251800.0, - "grad_norm": 2.778323830868818, - "learning_rate": 1.1897065394388487e-06, - "loss": 0.9674, - "num_input_tokens_seen": 113723310, - "step": 5354 - }, - { - "epoch": 0.6439006793723321, - "flos": 16455122633520.0, - "grad_norm": 3.6411260824859624, - "learning_rate": 1.1889944247832385e-06, - "loss": 0.9885, - "num_input_tokens_seen": 113743270, - "step": 5355 - }, - { - "epoch": 0.6440209222629713, - "flos": 16791552071760.0, - "grad_norm": 3.133529923406475, - "learning_rate": 1.1882824331565283e-06, - "loss": 0.9279, - "num_input_tokens_seen": 113762450, - "step": 5356 - }, - { - "epoch": 0.6441411651536103, - "flos": 11708996728560.0, - "grad_norm": 6.189061439858846, - "learning_rate": 1.1875705646667287e-06, - "loss": 1.1101, - "num_input_tokens_seen": 113780060, - "step": 5357 - }, - { - "epoch": 0.6442614080442494, - "flos": 18080897233200.0, - "grad_norm": 5.338302601989715, - "learning_rate": 1.1868588194218282e-06, - "loss": 0.9722, - "num_input_tokens_seen": 113800160, - "step": 5358 - }, - { - "epoch": 0.6443816509348885, - "flos": 20152983832920.0, - "grad_norm": 2.800729152562015, - "learning_rate": 1.1861471975297979e-06, - "loss": 0.9592, - "num_input_tokens_seen": 113821575, - "step": 5359 - }, - { - "epoch": 0.6445018938255276, - "flos": 26187657698760.0, - "grad_norm": 3.8936564573560104, - "learning_rate": 1.185435699098591e-06, - "loss": 0.9406, - "num_input_tokens_seen": 113847490, - "step": 5360 - }, - { - "epoch": 0.6446221367161666, - "flos": 10240365250800.0, - "grad_norm": 6.758738477473793, - "learning_rate": 1.1847243242361403e-06, - "loss": 0.9904, - "num_input_tokens_seen": 113865800, - "step": 5361 - }, - { - "epoch": 0.6447423796068057, - "flos": 17504951078640.0, - "grad_norm": 5.116829103921232, - "learning_rate": 1.1840130730503624e-06, - "loss": 1.0058, - "num_input_tokens_seen": 113886800, - "step": 5362 - }, - { - "epoch": 0.6448626224974449, - "flos": 17819697245880.0, - "grad_norm": 2.95533451801015, - "learning_rate": 1.1833019456491518e-06, - "loss": 0.9696, - "num_input_tokens_seen": 113908050, - "step": 5363 - }, - { - "epoch": 0.6449828653880839, - "flos": 16010895619560.0, - "grad_norm": 3.4458426761769614, - "learning_rate": 1.1825909421403871e-06, - "loss": 0.9814, - "num_input_tokens_seen": 113926865, - "step": 5364 - }, - { - "epoch": 0.645103108278723, - "flos": 18284595699360.0, - "grad_norm": 11.181808302509147, - "learning_rate": 1.181880062631926e-06, - "loss": 0.9838, - "num_input_tokens_seen": 113945920, - "step": 5365 - }, - { - "epoch": 0.6452233511693621, - "flos": 19546033293120.0, - "grad_norm": 2.958863671991522, - "learning_rate": 1.1811693072316093e-06, - "loss": 1.078, - "num_input_tokens_seen": 113963320, - "step": 5366 - }, - { - "epoch": 0.6453435940600012, - "flos": 13623418959840.0, - "grad_norm": 4.828122234652323, - "learning_rate": 1.1804586760472574e-06, - "loss": 1.0504, - "num_input_tokens_seen": 113979505, - "step": 5367 - }, - { - "epoch": 0.6454638369506402, - "flos": 18315477438360.0, - "grad_norm": 10.885929272033815, - "learning_rate": 1.1797481691866736e-06, - "loss": 1.0073, - "num_input_tokens_seen": 113996450, - "step": 5368 - }, - { - "epoch": 0.6455840798412794, - "flos": 14902093898400.0, - "grad_norm": 3.5121604293728996, - "learning_rate": 1.1790377867576393e-06, - "loss": 1.056, - "num_input_tokens_seen": 114013920, - "step": 5369 - }, - { - "epoch": 0.6457043227319185, - "flos": 18552755860800.0, - "grad_norm": 3.1908157157129313, - "learning_rate": 1.1783275288679203e-06, - "loss": 0.983, - "num_input_tokens_seen": 114030805, - "step": 5370 - }, - { - "epoch": 0.6458245656225575, - "flos": 43211699441520.0, - "grad_norm": 0.9323328673282351, - "learning_rate": 1.177617395625262e-06, - "loss": 0.8833, - "num_input_tokens_seen": 114088500, - "step": 5371 - }, - { - "epoch": 0.6459448085131967, - "flos": 16403477470320.0, - "grad_norm": 3.1452359943967987, - "learning_rate": 1.1769073871373908e-06, - "loss": 0.9825, - "num_input_tokens_seen": 114108425, - "step": 5372 - }, - { - "epoch": 0.6460650514038357, - "flos": 16058953380240.0, - "grad_norm": 3.6406456942645478, - "learning_rate": 1.176197503512015e-06, - "loss": 1.0606, - "num_input_tokens_seen": 114127860, - "step": 5373 - }, - { - "epoch": 0.6461852942944748, - "flos": 14383863895920.0, - "grad_norm": 5.217944785490148, - "learning_rate": 1.1754877448568223e-06, - "loss": 1.03, - "num_input_tokens_seen": 114147035, - "step": 5374 - }, - { - "epoch": 0.646305537185114, - "flos": 16611928478280.0, - "grad_norm": 16.196816138591675, - "learning_rate": 1.1747781112794837e-06, - "loss": 1.125, - "num_input_tokens_seen": 114163250, - "step": 5375 - }, - { - "epoch": 0.646425780075753, - "flos": 17265955608840.0, - "grad_norm": 3.3028030974887455, - "learning_rate": 1.1740686028876487e-06, - "loss": 1.051, - "num_input_tokens_seen": 114181835, - "step": 5376 - }, - { - "epoch": 0.6465460229663921, - "flos": 14777150962560.0, - "grad_norm": 11.776414764356273, - "learning_rate": 1.1733592197889507e-06, - "loss": 0.9678, - "num_input_tokens_seen": 114198465, - "step": 5377 - }, - { - "epoch": 0.6466662658570312, - "flos": 16242256360920.0, - "grad_norm": 4.556797173693134, - "learning_rate": 1.1726499620910014e-06, - "loss": 0.9511, - "num_input_tokens_seen": 114218465, - "step": 5378 - }, - { - "epoch": 0.6467865087476703, - "flos": 10817629852440.0, - "grad_norm": 3.6952376569786054, - "learning_rate": 1.1719408299013955e-06, - "loss": 0.9805, - "num_input_tokens_seen": 114236910, - "step": 5379 - }, - { - "epoch": 0.6469067516383094, - "flos": 13885784086440.0, - "grad_norm": 4.57353214417519, - "learning_rate": 1.1712318233277067e-06, - "loss": 0.9707, - "num_input_tokens_seen": 114255650, - "step": 5380 - }, - { - "epoch": 0.6470269945289485, - "flos": 46606870014840.0, - "grad_norm": 0.7335422745622411, - "learning_rate": 1.1705229424774916e-06, - "loss": 0.8202, - "num_input_tokens_seen": 114309640, - "step": 5381 - }, - { - "epoch": 0.6471472374195876, - "flos": 21879227895480.0, - "grad_norm": 3.126080898270507, - "learning_rate": 1.1698141874582867e-06, - "loss": 0.8608, - "num_input_tokens_seen": 114330405, - "step": 5382 - }, - { - "epoch": 0.6472674803102266, - "flos": 14567166876600.0, - "grad_norm": 3.218635186675369, - "learning_rate": 1.169105558377609e-06, - "loss": 0.9406, - "num_input_tokens_seen": 114350215, - "step": 5383 - }, - { - "epoch": 0.6473877232008658, - "flos": 17580732437640.0, - "grad_norm": 3.540855509298838, - "learning_rate": 1.1683970553429587e-06, - "loss": 1.0048, - "num_input_tokens_seen": 114371390, - "step": 5384 - }, - { - "epoch": 0.6475079660915048, - "flos": 11234807822400.0, - "grad_norm": 3.1524700531834178, - "learning_rate": 1.1676886784618128e-06, - "loss": 1.0485, - "num_input_tokens_seen": 114387775, - "step": 5385 - }, - { - "epoch": 0.6476282089821439, - "flos": 12309998925720.0, - "grad_norm": 32.3176090768688, - "learning_rate": 1.1669804278416332e-06, - "loss": 1.0435, - "num_input_tokens_seen": 114402220, - "step": 5386 - }, - { - "epoch": 0.6477484518727831, - "flos": 14331697486200.0, - "grad_norm": 4.73756809638611, - "learning_rate": 1.1662723035898602e-06, - "loss": 0.9383, - "num_input_tokens_seen": 114421700, - "step": 5387 - }, - { - "epoch": 0.6478686947634221, - "flos": 18080437309800.0, - "grad_norm": 4.18616202315807, - "learning_rate": 1.165564305813915e-06, - "loss": 1.0485, - "num_input_tokens_seen": 114440420, - "step": 5388 - }, - { - "epoch": 0.6479889376540612, - "flos": 14252144755320.0, - "grad_norm": 11.417141498427581, - "learning_rate": 1.1648564346212019e-06, - "loss": 1.0375, - "num_input_tokens_seen": 114459260, - "step": 5389 - }, - { - "epoch": 0.6481091805447003, - "flos": 19048658699520.0, - "grad_norm": 4.410876935758606, - "learning_rate": 1.164148690119104e-06, - "loss": 0.9901, - "num_input_tokens_seen": 114480260, - "step": 5390 - }, - { - "epoch": 0.6482294234353394, - "flos": 17032509881400.0, - "grad_norm": 12.141588955859397, - "learning_rate": 1.163441072414985e-06, - "loss": 0.9688, - "num_input_tokens_seen": 114500185, - "step": 5391 - }, - { - "epoch": 0.6483496663259785, - "flos": 18654513109200.0, - "grad_norm": 2.6045142488578676, - "learning_rate": 1.16273358161619e-06, - "loss": 0.9233, - "num_input_tokens_seen": 114520240, - "step": 5392 - }, - { - "epoch": 0.6484699092166175, - "flos": 14854802676720.0, - "grad_norm": 4.891734037092413, - "learning_rate": 1.1620262178300446e-06, - "loss": 1.072, - "num_input_tokens_seen": 114538575, - "step": 5393 - }, - { - "epoch": 0.6485901521072567, - "flos": 23587345428000.0, - "grad_norm": 3.1598790522257056, - "learning_rate": 1.1613189811638563e-06, - "loss": 0.9746, - "num_input_tokens_seen": 114560020, - "step": 5394 - }, - { - "epoch": 0.6487103949978957, - "flos": 15826580807400.0, - "grad_norm": 3.1978238037410613, - "learning_rate": 1.1606118717249117e-06, - "loss": 0.9991, - "num_input_tokens_seen": 114579840, - "step": 5395 - }, - { - "epoch": 0.6488306378885348, - "flos": 16298224804080.0, - "grad_norm": 3.3901001132822275, - "learning_rate": 1.1599048896204787e-06, - "loss": 0.8905, - "num_input_tokens_seen": 114599440, - "step": 5396 - }, - { - "epoch": 0.648950880779174, - "flos": 14462466118440.0, - "grad_norm": 4.4196122381737775, - "learning_rate": 1.1591980349578061e-06, - "loss": 1.0288, - "num_input_tokens_seen": 114617830, - "step": 5397 - }, - { - "epoch": 0.649071123669813, - "flos": 42895236226920.0, - "grad_norm": 0.9783643045889893, - "learning_rate": 1.158491307844123e-06, - "loss": 0.8047, - "num_input_tokens_seen": 114677470, - "step": 5398 - }, - { - "epoch": 0.6491913665604521, - "flos": 14512762173000.0, - "grad_norm": 4.308665907005713, - "learning_rate": 1.1577847083866387e-06, - "loss": 1.0745, - "num_input_tokens_seen": 114696225, - "step": 5399 - }, - { - "epoch": 0.6493116094510912, - "flos": 11997950975760.0, - "grad_norm": 5.895969061305784, - "learning_rate": 1.1570782366925453e-06, - "loss": 0.9458, - "num_input_tokens_seen": 114714460, - "step": 5400 - }, - { - "epoch": 0.6494318523417303, - "flos": 13331275910400.0, - "grad_norm": 2.624647843490179, - "learning_rate": 1.1563718928690132e-06, - "loss": 0.9864, - "num_input_tokens_seen": 114731615, - "step": 5401 - }, - { - "epoch": 0.6495520952323693, - "flos": 13460235510600.0, - "grad_norm": 3.239807873433654, - "learning_rate": 1.1556656770231942e-06, - "loss": 0.9488, - "num_input_tokens_seen": 114747530, - "step": 5402 - }, - { - "epoch": 0.6496723381230085, - "flos": 16164911262360.0, - "grad_norm": 2.7428400290329953, - "learning_rate": 1.1549595892622207e-06, - "loss": 0.9848, - "num_input_tokens_seen": 114766020, - "step": 5403 - }, - { - "epoch": 0.6497925810136476, - "flos": 44485161914880.0, - "grad_norm": 0.9248595981751337, - "learning_rate": 1.1542536296932047e-06, - "loss": 0.8615, - "num_input_tokens_seen": 114829275, - "step": 5404 - }, - { - "epoch": 0.6499128239042866, - "flos": 14304740426880.0, - "grad_norm": 3.9132001159019105, - "learning_rate": 1.1535477984232414e-06, - "loss": 0.9195, - "num_input_tokens_seen": 114848870, - "step": 5405 - }, - { - "epoch": 0.6500330667949258, - "flos": 17399207827440.0, - "grad_norm": 3.79811044694913, - "learning_rate": 1.152842095559404e-06, - "loss": 0.9906, - "num_input_tokens_seen": 114869250, - "step": 5406 - }, - { - "epoch": 0.6501533096855648, - "flos": 18127759193040.0, - "grad_norm": 6.2323294512890035, - "learning_rate": 1.1521365212087474e-06, - "loss": 0.9981, - "num_input_tokens_seen": 114888955, - "step": 5407 - }, - { - "epoch": 0.6502735525762039, - "flos": 32028971592240.0, - "grad_norm": 2.9906570863347794, - "learning_rate": 1.1514310754783062e-06, - "loss": 0.9192, - "num_input_tokens_seen": 114911625, - "step": 5408 - }, - { - "epoch": 0.6503937954668431, - "flos": 20414919697680.0, - "grad_norm": 2.543142661433886, - "learning_rate": 1.1507257584750964e-06, - "loss": 0.9425, - "num_input_tokens_seen": 114931525, - "step": 5409 - }, - { - "epoch": 0.6505140383574821, - "flos": 14855078630760.0, - "grad_norm": 3.3690037619393403, - "learning_rate": 1.150020570306113e-06, - "loss": 0.9868, - "num_input_tokens_seen": 114950385, - "step": 5410 - }, - { - "epoch": 0.6506342812481212, - "flos": 14619977179080.0, - "grad_norm": 4.311477097535512, - "learning_rate": 1.1493155110783338e-06, - "loss": 0.969, - "num_input_tokens_seen": 114968630, - "step": 5411 - }, - { - "epoch": 0.6507545241387603, - "flos": 21541633317960.0, - "grad_norm": 3.3849855275598375, - "learning_rate": 1.1486105808987155e-06, - "loss": 0.9327, - "num_input_tokens_seen": 114989840, - "step": 5412 - }, - { - "epoch": 0.6508747670293994, - "flos": 12128198361480.0, - "grad_norm": 5.072519442171803, - "learning_rate": 1.1479057798741947e-06, - "loss": 1.0355, - "num_input_tokens_seen": 115007615, - "step": 5413 - }, - { - "epoch": 0.6509950099200384, - "flos": 49094233567800.0, - "grad_norm": 0.8138265784073964, - "learning_rate": 1.14720110811169e-06, - "loss": 0.8095, - "num_input_tokens_seen": 115064565, - "step": 5414 - }, - { - "epoch": 0.6511152528106776, - "flos": 15879391109880.0, - "grad_norm": 3.6600115084858045, - "learning_rate": 1.146496565718098e-06, - "loss": 0.9914, - "num_input_tokens_seen": 115084855, - "step": 5415 - }, - { - "epoch": 0.6512354957013167, - "flos": 14567442830640.0, - "grad_norm": 3.227986831883985, - "learning_rate": 1.1457921528002996e-06, - "loss": 0.982, - "num_input_tokens_seen": 115103010, - "step": 5416 - }, - { - "epoch": 0.6513557385919557, - "flos": 23057954617680.0, - "grad_norm": 14.868758230379447, - "learning_rate": 1.1450878694651522e-06, - "loss": 0.95, - "num_input_tokens_seen": 115123295, - "step": 5417 - }, - { - "epoch": 0.6514759814825949, - "flos": 8509613938920.0, - "grad_norm": 4.018500460030536, - "learning_rate": 1.1443837158194954e-06, - "loss": 0.8455, - "num_input_tokens_seen": 115138160, - "step": 5418 - }, - { - "epoch": 0.651596224373234, - "flos": 16007952109800.0, - "grad_norm": 3.7792718930860367, - "learning_rate": 1.1436796919701484e-06, - "loss": 0.9678, - "num_input_tokens_seen": 115156595, - "step": 5419 - }, - { - "epoch": 0.651716467263873, - "flos": 19811464575720.0, - "grad_norm": 5.350821116529103, - "learning_rate": 1.1429757980239115e-06, - "loss": 0.8435, - "num_input_tokens_seen": 115176740, - "step": 5420 - }, - { - "epoch": 0.6518367101545122, - "flos": 17294169792120.0, - "grad_norm": 9.302032527521414, - "learning_rate": 1.1422720340875636e-06, - "loss": 1.0242, - "num_input_tokens_seen": 115195210, - "step": 5421 - }, - { - "epoch": 0.6519569530451512, - "flos": 14200836869280.0, - "grad_norm": 6.643232771512236, - "learning_rate": 1.1415684002678671e-06, - "loss": 1.0146, - "num_input_tokens_seen": 115213690, - "step": 5422 - }, - { - "epoch": 0.6520771959357903, - "flos": 15324300364200.0, - "grad_norm": 4.042343843236031, - "learning_rate": 1.1408648966715617e-06, - "loss": 1.0018, - "num_input_tokens_seen": 115230930, - "step": 5423 - }, - { - "epoch": 0.6521974388264293, - "flos": 16140499112520.0, - "grad_norm": 4.338291168613729, - "learning_rate": 1.1401615234053683e-06, - "loss": 0.9495, - "num_input_tokens_seen": 115249470, - "step": 5424 - }, - { - "epoch": 0.6523176817170685, - "flos": 16350299229120.0, - "grad_norm": 3.7709108348209353, - "learning_rate": 1.1394582805759885e-06, - "loss": 0.9767, - "num_input_tokens_seen": 115268470, - "step": 5425 - }, - { - "epoch": 0.6524379246077076, - "flos": 15405294188400.0, - "grad_norm": 2.9307979666281003, - "learning_rate": 1.1387551682901022e-06, - "loss": 0.9895, - "num_input_tokens_seen": 115288795, - "step": 5426 - }, - { - "epoch": 0.6525581674983466, - "flos": 13754432884560.0, - "grad_norm": 4.911017950007702, - "learning_rate": 1.138052186654373e-06, - "loss": 0.9333, - "num_input_tokens_seen": 115305985, - "step": 5427 - }, - { - "epoch": 0.6526784103889858, - "flos": 12155032774560.0, - "grad_norm": 5.870816303532603, - "learning_rate": 1.1373493357754417e-06, - "loss": 1.1024, - "num_input_tokens_seen": 115324610, - "step": 5428 - }, - { - "epoch": 0.6527986532796248, - "flos": 13465631945160.0, - "grad_norm": 3.86366620281753, - "learning_rate": 1.1366466157599303e-06, - "loss": 0.9947, - "num_input_tokens_seen": 115343605, - "step": 5429 - }, - { - "epoch": 0.6529188961702639, - "flos": 10050470034720.0, - "grad_norm": 3.701256002713083, - "learning_rate": 1.1359440267144412e-06, - "loss": 0.9769, - "num_input_tokens_seen": 115360780, - "step": 5430 - }, - { - "epoch": 0.653039139060903, - "flos": 26181556048320.0, - "grad_norm": 2.6600103352281375, - "learning_rate": 1.1352415687455556e-06, - "loss": 0.9679, - "num_input_tokens_seen": 115381760, - "step": 5431 - }, - { - "epoch": 0.6531593819515421, - "flos": 18056209129320.0, - "grad_norm": 8.793344876867685, - "learning_rate": 1.1345392419598362e-06, - "loss": 0.859, - "num_input_tokens_seen": 115400360, - "step": 5432 - }, - { - "epoch": 0.6532796248421812, - "flos": 15039117488880.0, - "grad_norm": 4.107586953931279, - "learning_rate": 1.1338370464638263e-06, - "loss": 0.9397, - "num_input_tokens_seen": 115419480, - "step": 5433 - }, - { - "epoch": 0.6533998677328203, - "flos": 12521884028400.0, - "grad_norm": 7.41416900526109, - "learning_rate": 1.1331349823640474e-06, - "loss": 0.858, - "num_input_tokens_seen": 115436630, - "step": 5434 - }, - { - "epoch": 0.6535201106234594, - "flos": 20229010484400.0, - "grad_norm": 5.631823999277489, - "learning_rate": 1.132433049767003e-06, - "loss": 1.0063, - "num_input_tokens_seen": 115454265, - "step": 5435 - }, - { - "epoch": 0.6536403535140984, - "flos": 16769807477640.0, - "grad_norm": 2.570479367297831, - "learning_rate": 1.1317312487791748e-06, - "loss": 1.0345, - "num_input_tokens_seen": 115475635, - "step": 5436 - }, - { - "epoch": 0.6537605964047376, - "flos": 15327213212400.0, - "grad_norm": 3.9563316638105337, - "learning_rate": 1.1310295795070253e-06, - "loss": 0.9406, - "num_input_tokens_seen": 115495295, - "step": 5437 - }, - { - "epoch": 0.6538808392953767, - "flos": 19103615311200.0, - "grad_norm": 3.741037721738934, - "learning_rate": 1.1303280420569982e-06, - "loss": 1.0225, - "num_input_tokens_seen": 115516900, - "step": 5438 - }, - { - "epoch": 0.6540010821860157, - "flos": 21909557726400.0, - "grad_norm": 5.685822291830562, - "learning_rate": 1.1296266365355158e-06, - "loss": 0.9966, - "num_input_tokens_seen": 115540005, - "step": 5439 - }, - { - "epoch": 0.6541213250766549, - "flos": 19156210982760.0, - "grad_norm": 3.883275700961462, - "learning_rate": 1.1289253630489806e-06, - "loss": 0.9412, - "num_input_tokens_seen": 115560775, - "step": 5440 - }, - { - "epoch": 0.6542415679672939, - "flos": 13597412408880.0, - "grad_norm": 5.796020773903717, - "learning_rate": 1.1282242217037753e-06, - "loss": 0.9671, - "num_input_tokens_seen": 115577995, - "step": 5441 - }, - { - "epoch": 0.654361810857933, - "flos": 34439296662240.0, - "grad_norm": 10.115716516495006, - "learning_rate": 1.127523212606262e-06, - "loss": 0.8512, - "num_input_tokens_seen": 115600540, - "step": 5442 - }, - { - "epoch": 0.6544820537485722, - "flos": 19182033564360.0, - "grad_norm": 3.3385824041316168, - "learning_rate": 1.1268223358627835e-06, - "loss": 0.9484, - "num_input_tokens_seen": 115622750, - "step": 5443 - }, - { - "epoch": 0.6546022966392112, - "flos": 14829072079800.0, - "grad_norm": 8.594054948424693, - "learning_rate": 1.126121591579663e-06, - "loss": 0.9266, - "num_input_tokens_seen": 115641675, - "step": 5444 - }, - { - "epoch": 0.6547225395298503, - "flos": 17740052530320.0, - "grad_norm": 4.016334797509849, - "learning_rate": 1.1254209798632018e-06, - "loss": 0.9133, - "num_input_tokens_seen": 115662415, - "step": 5445 - }, - { - "epoch": 0.6548427824204894, - "flos": 16036350262440.0, - "grad_norm": 7.542109973446588, - "learning_rate": 1.124720500819683e-06, - "loss": 1.0757, - "num_input_tokens_seen": 115680290, - "step": 5446 - }, - { - "epoch": 0.6549630253111285, - "flos": 13072835463480.0, - "grad_norm": 4.800907390976564, - "learning_rate": 1.1240201545553682e-06, - "loss": 1.0484, - "num_input_tokens_seen": 115697810, - "step": 5447 - }, - { - "epoch": 0.6550832682017675, - "flos": 17919522816000.0, - "grad_norm": 5.863804315442722, - "learning_rate": 1.1233199411764987e-06, - "loss": 0.9646, - "num_input_tokens_seen": 115716965, - "step": 5448 - }, - { - "epoch": 0.6552035110924067, - "flos": 16167272202480.0, - "grad_norm": 5.695258581170523, - "learning_rate": 1.1226198607892978e-06, - "loss": 0.9113, - "num_input_tokens_seen": 115737245, - "step": 5449 - }, - { - "epoch": 0.6553237539830458, - "flos": 15485214858000.0, - "grad_norm": 3.6823633916291114, - "learning_rate": 1.1219199134999664e-06, - "loss": 1.0288, - "num_input_tokens_seen": 115755465, - "step": 5450 - }, - { - "epoch": 0.6554439968736848, - "flos": 14829838618800.0, - "grad_norm": 3.8789624289817066, - "learning_rate": 1.1212200994146863e-06, - "loss": 1.0016, - "num_input_tokens_seen": 115772940, - "step": 5451 - }, - { - "epoch": 0.655564239764324, - "flos": 11416731032880.0, - "grad_norm": 5.672251286911154, - "learning_rate": 1.120520418639618e-06, - "loss": 0.9797, - "num_input_tokens_seen": 115791195, - "step": 5452 - }, - { - "epoch": 0.655684482654963, - "flos": 21070173290640.0, - "grad_norm": 5.078962708848725, - "learning_rate": 1.119820871280903e-06, - "loss": 1.0557, - "num_input_tokens_seen": 115811990, - "step": 5453 - }, - { - "epoch": 0.6558047255456021, - "flos": 21227623028160.0, - "grad_norm": 2.913663207814901, - "learning_rate": 1.1191214574446614e-06, - "loss": 0.9371, - "num_input_tokens_seen": 115831955, - "step": 5454 - }, - { - "epoch": 0.6559249684362413, - "flos": 20702739467160.0, - "grad_norm": 2.2590373059924715, - "learning_rate": 1.118422177236995e-06, - "loss": 1.0282, - "num_input_tokens_seen": 115853500, - "step": 5455 - }, - { - "epoch": 0.6560452113268803, - "flos": 14353564726560.0, - "grad_norm": 3.2146874131374843, - "learning_rate": 1.1177230307639835e-06, - "loss": 1.0679, - "num_input_tokens_seen": 115870760, - "step": 5456 - }, - { - "epoch": 0.6561654542175194, - "flos": 17818194829440.0, - "grad_norm": 2.8297038860889274, - "learning_rate": 1.1170240181316865e-06, - "loss": 1.0089, - "num_input_tokens_seen": 115891925, - "step": 5457 - }, - { - "epoch": 0.6562856971081584, - "flos": 16245169209120.0, - "grad_norm": 4.6330733073062875, - "learning_rate": 1.1163251394461442e-06, - "loss": 1.0212, - "num_input_tokens_seen": 115910125, - "step": 5458 - }, - { - "epoch": 0.6564059399987976, - "flos": 13380866749080.0, - "grad_norm": 3.99059716366564, - "learning_rate": 1.1156263948133746e-06, - "loss": 1.0533, - "num_input_tokens_seen": 115926500, - "step": 5459 - }, - { - "epoch": 0.6565261828894366, - "flos": 18133554227880.0, - "grad_norm": 3.138682813453248, - "learning_rate": 1.1149277843393787e-06, - "loss": 1.0027, - "num_input_tokens_seen": 115947380, - "step": 5460 - }, - { - "epoch": 0.6566464257800757, - "flos": 13964784909240.0, - "grad_norm": 6.734108196027205, - "learning_rate": 1.1142293081301342e-06, - "loss": 0.8556, - "num_input_tokens_seen": 115964980, - "step": 5461 - }, - { - "epoch": 0.6567666686707149, - "flos": 16717733052600.0, - "grad_norm": 2.974314403874564, - "learning_rate": 1.1135309662915995e-06, - "loss": 0.8915, - "num_input_tokens_seen": 115984865, - "step": 5462 - }, - { - "epoch": 0.6568869115613539, - "flos": 23377882588560.0, - "grad_norm": 4.323667358420341, - "learning_rate": 1.112832758929712e-06, - "loss": 0.8241, - "num_input_tokens_seen": 116007195, - "step": 5463 - }, - { - "epoch": 0.657007154451993, - "flos": 13072774140360.0, - "grad_norm": 4.048361015701558, - "learning_rate": 1.11213468615039e-06, - "loss": 0.9699, - "num_input_tokens_seen": 116026345, - "step": 5464 - }, - { - "epoch": 0.6571273973426321, - "flos": 17898330129960.0, - "grad_norm": 2.810717038748586, - "learning_rate": 1.1114367480595292e-06, - "loss": 0.9853, - "num_input_tokens_seen": 116047145, - "step": 5465 - }, - { - "epoch": 0.6572476402332712, - "flos": 12416845993080.0, - "grad_norm": 7.058481068812135, - "learning_rate": 1.1107389447630086e-06, - "loss": 1.0456, - "num_input_tokens_seen": 116065565, - "step": 5466 - }, - { - "epoch": 0.6573678831239103, - "flos": 12045671459280.0, - "grad_norm": 4.798991081380535, - "learning_rate": 1.1100412763666818e-06, - "loss": 1.0103, - "num_input_tokens_seen": 116080545, - "step": 5467 - }, - { - "epoch": 0.6574881260145494, - "flos": 17001566819280.0, - "grad_norm": 3.5264046433814573, - "learning_rate": 1.1093437429763865e-06, - "loss": 1.0248, - "num_input_tokens_seen": 116100530, - "step": 5468 - }, - { - "epoch": 0.6576083689051885, - "flos": 7881072112800.0, - "grad_norm": 8.452974272281418, - "learning_rate": 1.1086463446979361e-06, - "loss": 0.9565, - "num_input_tokens_seen": 116118600, - "step": 5469 - }, - { - "epoch": 0.6577286117958275, - "flos": 15956889516240.0, - "grad_norm": 3.2755363491081293, - "learning_rate": 1.1079490816371277e-06, - "loss": 0.9982, - "num_input_tokens_seen": 116138085, - "step": 5470 - }, - { - "epoch": 0.6578488546864667, - "flos": 15537687883320.0, - "grad_norm": 10.540432783648003, - "learning_rate": 1.1072519538997352e-06, - "loss": 0.9527, - "num_input_tokens_seen": 116156945, - "step": 5471 - }, - { - "epoch": 0.6579690975771058, - "flos": 16738097876520.0, - "grad_norm": 8.214591130804324, - "learning_rate": 1.1065549615915095e-06, - "loss": 1.0422, - "num_input_tokens_seen": 116176495, - "step": 5472 - }, - { - "epoch": 0.6580893404677448, - "flos": 23350925529240.0, - "grad_norm": 5.693657528642445, - "learning_rate": 1.105858104818187e-06, - "loss": 0.9878, - "num_input_tokens_seen": 116197370, - "step": 5473 - }, - { - "epoch": 0.658209583358384, - "flos": 11237015454720.0, - "grad_norm": 13.557110959506062, - "learning_rate": 1.105161383685478e-06, - "loss": 0.969, - "num_input_tokens_seen": 116213475, - "step": 5474 - }, - { - "epoch": 0.658329826249023, - "flos": 44881331168160.0, - "grad_norm": 0.7685785350468484, - "learning_rate": 1.1044647982990771e-06, - "loss": 0.8279, - "num_input_tokens_seen": 116275080, - "step": 5475 - }, - { - "epoch": 0.6584500691396621, - "flos": 22616916405960.0, - "grad_norm": 5.224088941661095, - "learning_rate": 1.1037683487646536e-06, - "loss": 0.8609, - "num_input_tokens_seen": 116295085, - "step": 5476 - }, - { - "epoch": 0.6585703120303013, - "flos": 13046675604720.0, - "grad_norm": 4.01870644932905, - "learning_rate": 1.1030720351878583e-06, - "loss": 0.9917, - "num_input_tokens_seen": 116312925, - "step": 5477 - }, - { - "epoch": 0.6586905549209403, - "flos": 41728135784040.0, - "grad_norm": 0.8045401206743444, - "learning_rate": 1.102375857674323e-06, - "loss": 0.8432, - "num_input_tokens_seen": 116374560, - "step": 5478 - }, - { - "epoch": 0.6588107978115794, - "flos": 16191776337000.0, - "grad_norm": 3.7493658165327215, - "learning_rate": 1.1016798163296561e-06, - "loss": 1.13, - "num_input_tokens_seen": 116393480, - "step": 5479 - }, - { - "epoch": 0.6589310407022185, - "flos": 14671836973200.0, - "grad_norm": 2.6544627740591817, - "learning_rate": 1.1009839112594471e-06, - "loss": 0.8803, - "num_input_tokens_seen": 116411225, - "step": 5480 - }, - { - "epoch": 0.6590512835928576, - "flos": 18238837555680.0, - "grad_norm": 4.375812999935028, - "learning_rate": 1.1002881425692638e-06, - "loss": 0.9475, - "num_input_tokens_seen": 116431375, - "step": 5481 - }, - { - "epoch": 0.6591715264834966, - "flos": 16869142462800.0, - "grad_norm": 5.035612690837048, - "learning_rate": 1.0995925103646532e-06, - "loss": 0.992, - "num_input_tokens_seen": 116449695, - "step": 5482 - }, - { - "epoch": 0.6592917693741358, - "flos": 25528632733920.0, - "grad_norm": 2.8775242214691437, - "learning_rate": 1.0988970147511437e-06, - "loss": 0.8941, - "num_input_tokens_seen": 116471295, - "step": 5483 - }, - { - "epoch": 0.6594120122647749, - "flos": 15195923333640.0, - "grad_norm": 5.698469378102669, - "learning_rate": 1.0982016558342405e-06, - "loss": 1.0253, - "num_input_tokens_seen": 116489985, - "step": 5484 - }, - { - "epoch": 0.6595322551554139, - "flos": 13725973408800.0, - "grad_norm": 3.3281069472991898, - "learning_rate": 1.0975064337194291e-06, - "loss": 0.9388, - "num_input_tokens_seen": 116507750, - "step": 5485 - }, - { - "epoch": 0.6596524980460531, - "flos": 11919072799200.0, - "grad_norm": 3.9672756309981807, - "learning_rate": 1.0968113485121743e-06, - "loss": 0.9269, - "num_input_tokens_seen": 116527060, - "step": 5486 - }, - { - "epoch": 0.6597727409366921, - "flos": 15484693611480.0, - "grad_norm": 2.879875527182206, - "learning_rate": 1.0961164003179185e-06, - "loss": 1.018, - "num_input_tokens_seen": 116545290, - "step": 5487 - }, - { - "epoch": 0.6598929838273312, - "flos": 16873067142480.0, - "grad_norm": 11.847376768876346, - "learning_rate": 1.0954215892420884e-06, - "loss": 1.0533, - "num_input_tokens_seen": 116565710, - "step": 5488 - }, - { - "epoch": 0.6600132267179702, - "flos": 14173818486840.0, - "grad_norm": 5.87212739882134, - "learning_rate": 1.094726915390082e-06, - "loss": 0.9282, - "num_input_tokens_seen": 116583765, - "step": 5489 - }, - { - "epoch": 0.6601334696086094, - "flos": 15875282460840.0, - "grad_norm": 4.631587929694121, - "learning_rate": 1.0940323788672836e-06, - "loss": 0.9217, - "num_input_tokens_seen": 116602660, - "step": 5490 - }, - { - "epoch": 0.6602537124992485, - "flos": 18290881319160.0, - "grad_norm": 2.5175766097745633, - "learning_rate": 1.0933379797790522e-06, - "loss": 0.967, - "num_input_tokens_seen": 116621795, - "step": 5491 - }, - { - "epoch": 0.6603739553898875, - "flos": 18395367446400.0, - "grad_norm": 4.8311453942427045, - "learning_rate": 1.0926437182307293e-06, - "loss": 0.9343, - "num_input_tokens_seen": 116640325, - "step": 5492 - }, - { - "epoch": 0.6604941982805267, - "flos": 17476522264440.0, - "grad_norm": 3.874582206599287, - "learning_rate": 1.0919495943276338e-06, - "loss": 0.9981, - "num_input_tokens_seen": 116661065, - "step": 5493 - }, - { - "epoch": 0.6606144411711657, - "flos": 9349580944320.0, - "grad_norm": 6.215608502994699, - "learning_rate": 1.0912556081750611e-06, - "loss": 0.9672, - "num_input_tokens_seen": 116678715, - "step": 5494 - }, - { - "epoch": 0.6607346840618048, - "flos": 17897103667560.0, - "grad_norm": 5.204059322197921, - "learning_rate": 1.0905617598782909e-06, - "loss": 0.9849, - "num_input_tokens_seen": 116698640, - "step": 5495 - }, - { - "epoch": 0.660854926952444, - "flos": 12494436384120.0, - "grad_norm": 3.6754465467634314, - "learning_rate": 1.0898680495425775e-06, - "loss": 1.0344, - "num_input_tokens_seen": 116716650, - "step": 5496 - }, - { - "epoch": 0.660975169843083, - "flos": 11918122290840.0, - "grad_norm": 3.088266927626557, - "learning_rate": 1.0891744772731594e-06, - "loss": 1.0326, - "num_input_tokens_seen": 116734185, - "step": 5497 - }, - { - "epoch": 0.6610954127337221, - "flos": 19129775169960.0, - "grad_norm": 3.2154728770997676, - "learning_rate": 1.088481043175248e-06, - "loss": 0.8793, - "num_input_tokens_seen": 116754475, - "step": 5498 - }, - { - "epoch": 0.6612156556243612, - "flos": 18838459982640.0, - "grad_norm": 2.290096843454154, - "learning_rate": 1.0877877473540368e-06, - "loss": 0.9825, - "num_input_tokens_seen": 116774780, - "step": 5499 - }, - { - "epoch": 0.6613358985150003, - "flos": 14042559269640.0, - "grad_norm": 4.626459252346555, - "learning_rate": 1.0870945899147002e-06, - "loss": 0.9473, - "num_input_tokens_seen": 116791145, - "step": 5500 - }, - { - "epoch": 0.6614561414056394, - "flos": 19101591648240.0, - "grad_norm": 11.078852232829671, - "learning_rate": 1.0864015709623879e-06, - "loss": 0.9773, - "num_input_tokens_seen": 116811735, - "step": 5501 - }, - { - "epoch": 0.6615763842962785, - "flos": 16272586191840.0, - "grad_norm": 4.407926416395627, - "learning_rate": 1.0857086906022313e-06, - "loss": 1.0219, - "num_input_tokens_seen": 116829790, - "step": 5502 - }, - { - "epoch": 0.6616966271869176, - "flos": 17634799864080.0, - "grad_norm": 3.953321007253163, - "learning_rate": 1.0850159489393388e-06, - "loss": 0.9533, - "num_input_tokens_seen": 116848770, - "step": 5503 - }, - { - "epoch": 0.6618168700775566, - "flos": 12181407264240.0, - "grad_norm": 5.112192666066144, - "learning_rate": 1.0843233460787992e-06, - "loss": 1.0438, - "num_input_tokens_seen": 116865705, - "step": 5504 - }, - { - "epoch": 0.6619371129681958, - "flos": 18106811799480.0, - "grad_norm": 3.424274178329835, - "learning_rate": 1.0836308821256805e-06, - "loss": 1.0054, - "num_input_tokens_seen": 116886225, - "step": 5505 - }, - { - "epoch": 0.6620573558588349, - "flos": 12783789231600.0, - "grad_norm": 5.501029871840264, - "learning_rate": 1.0829385571850282e-06, - "loss": 1.0113, - "num_input_tokens_seen": 116902925, - "step": 5506 - }, - { - "epoch": 0.6621775987494739, - "flos": 12600700881840.0, - "grad_norm": 25.79067062730727, - "learning_rate": 1.0822463713618679e-06, - "loss": 1.0489, - "num_input_tokens_seen": 116919500, - "step": 5507 - }, - { - "epoch": 0.6622978416401131, - "flos": 12389980918440.0, - "grad_norm": 3.7460159755107476, - "learning_rate": 1.0815543247612034e-06, - "loss": 1.0792, - "num_input_tokens_seen": 116936290, - "step": 5508 - }, - { - "epoch": 0.6624180845307521, - "flos": 15376160158320.0, - "grad_norm": 3.2469757387543075, - "learning_rate": 1.0808624174880168e-06, - "loss": 1.0551, - "num_input_tokens_seen": 116956660, - "step": 5509 - }, - { - "epoch": 0.6625383274213912, - "flos": 16927410522960.0, - "grad_norm": 3.1838622933613787, - "learning_rate": 1.080170649647272e-06, - "loss": 1.0214, - "num_input_tokens_seen": 116976185, - "step": 5510 - }, - { - "epoch": 0.6626585703120303, - "flos": 23724982249680.0, - "grad_norm": 4.0722115272935495, - "learning_rate": 1.0794790213439068e-06, - "loss": 0.8898, - "num_input_tokens_seen": 117002805, - "step": 5511 - }, - { - "epoch": 0.6627788132026694, - "flos": 15690477063720.0, - "grad_norm": 4.427122204179843, - "learning_rate": 1.078787532682843e-06, - "loss": 1.0047, - "num_input_tokens_seen": 117020000, - "step": 5512 - }, - { - "epoch": 0.6628990560933085, - "flos": 25815716625960.0, - "grad_norm": 5.14054838082861, - "learning_rate": 1.0780961837689773e-06, - "loss": 0.9868, - "num_input_tokens_seen": 117039230, - "step": 5513 - }, - { - "epoch": 0.6630192989839476, - "flos": 13123652764560.0, - "grad_norm": 5.996709875205951, - "learning_rate": 1.0774049747071883e-06, - "loss": 0.9281, - "num_input_tokens_seen": 117056830, - "step": 5514 - }, - { - "epoch": 0.6631395418745867, - "flos": 25554148699920.0, - "grad_norm": 3.1104222360004523, - "learning_rate": 1.076713905602332e-06, - "loss": 0.904, - "num_input_tokens_seen": 117077125, - "step": 5515 - }, - { - "epoch": 0.6632597847652257, - "flos": 14226475481520.0, - "grad_norm": 10.638899743119145, - "learning_rate": 1.07602297655924e-06, - "loss": 1.0335, - "num_input_tokens_seen": 117095165, - "step": 5516 - }, - { - "epoch": 0.6633800276558649, - "flos": 15064878747360.0, - "grad_norm": 3.619174485683815, - "learning_rate": 1.0753321876827292e-06, - "loss": 1.0297, - "num_input_tokens_seen": 117114170, - "step": 5517 - }, - { - "epoch": 0.663500270546504, - "flos": 17058025847400.0, - "grad_norm": 12.965209183423223, - "learning_rate": 1.0746415390775893e-06, - "loss": 0.9618, - "num_input_tokens_seen": 117132020, - "step": 5518 - }, - { - "epoch": 0.663620513437143, - "flos": 12706321486800.0, - "grad_norm": 5.471875676231844, - "learning_rate": 1.0739510308485939e-06, - "loss": 1.0051, - "num_input_tokens_seen": 117148955, - "step": 5519 - }, - { - "epoch": 0.6637407563277821, - "flos": 41391246422400.0, - "grad_norm": 1.0359979626256286, - "learning_rate": 1.07326066310049e-06, - "loss": 0.872, - "num_input_tokens_seen": 117212800, - "step": 5520 - }, - { - "epoch": 0.6638609992184212, - "flos": 19442528335800.0, - "grad_norm": 3.7634587685353775, - "learning_rate": 1.0725704359380059e-06, - "loss": 1.013, - "num_input_tokens_seen": 117232375, - "step": 5521 - }, - { - "epoch": 0.6639812421090603, - "flos": 13203880049760.0, - "grad_norm": 4.537856554046474, - "learning_rate": 1.0718803494658497e-06, - "loss": 0.9452, - "num_input_tokens_seen": 117250985, - "step": 5522 - }, - { - "epoch": 0.6641014849996993, - "flos": 11263113990360.0, - "grad_norm": 3.8438102903522404, - "learning_rate": 1.071190403788707e-06, - "loss": 1.06, - "num_input_tokens_seen": 117266010, - "step": 5523 - }, - { - "epoch": 0.6642217278903385, - "flos": 18867502028040.0, - "grad_norm": 3.741270331197824, - "learning_rate": 1.0705005990112415e-06, - "loss": 0.9651, - "num_input_tokens_seen": 117285510, - "step": 5524 - }, - { - "epoch": 0.6643419707809776, - "flos": 10869888246840.0, - "grad_norm": 6.9229479231594215, - "learning_rate": 1.0698109352380957e-06, - "loss": 0.9555, - "num_input_tokens_seen": 117302830, - "step": 5525 - }, - { - "epoch": 0.6644622136716166, - "flos": 17869870654200.0, - "grad_norm": 4.80479567491284, - "learning_rate": 1.0691214125738909e-06, - "loss": 1.006, - "num_input_tokens_seen": 117322755, - "step": 5526 - }, - { - "epoch": 0.6645824565622558, - "flos": 47401324169040.0, - "grad_norm": 0.7765150397244955, - "learning_rate": 1.0684320311232287e-06, - "loss": 0.8444, - "num_input_tokens_seen": 117385380, - "step": 5527 - }, - { - "epoch": 0.6647026994528948, - "flos": 17844507996000.0, - "grad_norm": 25.531729311978776, - "learning_rate": 1.0677427909906865e-06, - "loss": 1.0292, - "num_input_tokens_seen": 117405550, - "step": 5528 - }, - { - "epoch": 0.6648229423435339, - "flos": 12914312571360.0, - "grad_norm": 9.852711694471477, - "learning_rate": 1.0670536922808216e-06, - "loss": 0.9327, - "num_input_tokens_seen": 117425395, - "step": 5529 - }, - { - "epoch": 0.6649431852341731, - "flos": 12968226690000.0, - "grad_norm": 5.583447051906672, - "learning_rate": 1.06636473509817e-06, - "loss": 0.9413, - "num_input_tokens_seen": 117441495, - "step": 5530 - }, - { - "epoch": 0.6650634281248121, - "flos": 12050209370160.0, - "grad_norm": 3.6700447878066607, - "learning_rate": 1.0656759195472447e-06, - "loss": 1.0268, - "num_input_tokens_seen": 117458505, - "step": 5531 - }, - { - "epoch": 0.6651836710154512, - "flos": 49623164454720.0, - "grad_norm": 0.8073326633667417, - "learning_rate": 1.0649872457325414e-06, - "loss": 0.8722, - "num_input_tokens_seen": 117519510, - "step": 5532 - }, - { - "epoch": 0.6653039139060903, - "flos": 47889714925560.0, - "grad_norm": 0.8936276265376684, - "learning_rate": 1.0642987137585278e-06, - "loss": 0.8423, - "num_input_tokens_seen": 117578755, - "step": 5533 - }, - { - "epoch": 0.6654241567967294, - "flos": 15248580328320.0, - "grad_norm": 2.8320100349420896, - "learning_rate": 1.0636103237296561e-06, - "loss": 1.0436, - "num_input_tokens_seen": 117597400, - "step": 5534 - }, - { - "epoch": 0.6655443996873684, - "flos": 17871495716880.0, - "grad_norm": 5.98622489205442, - "learning_rate": 1.062922075750353e-06, - "loss": 1.0563, - "num_input_tokens_seen": 117617135, - "step": 5535 - }, - { - "epoch": 0.6656646425780076, - "flos": 12364802229600.0, - "grad_norm": 4.723614944824848, - "learning_rate": 1.0622339699250267e-06, - "loss": 0.9423, - "num_input_tokens_seen": 117634775, - "step": 5536 - }, - { - "epoch": 0.6657848854686467, - "flos": 16659342346200.0, - "grad_norm": 5.463592015105038, - "learning_rate": 1.0615460063580624e-06, - "loss": 1.0324, - "num_input_tokens_seen": 117652970, - "step": 5537 - }, - { - "epoch": 0.6659051283592857, - "flos": 8090473629120.0, - "grad_norm": 4.681905230515075, - "learning_rate": 1.060858185153821e-06, - "loss": 0.9571, - "num_input_tokens_seen": 117670790, - "step": 5538 - }, - { - "epoch": 0.6660253712499249, - "flos": 14619394609440.0, - "grad_norm": 6.0255773877052325, - "learning_rate": 1.0601705064166474e-06, - "loss": 0.9773, - "num_input_tokens_seen": 117688905, - "step": 5539 - }, - { - "epoch": 0.666145614140564, - "flos": 15091559852640.0, - "grad_norm": 7.96663981902681, - "learning_rate": 1.0594829702508596e-06, - "loss": 0.9604, - "num_input_tokens_seen": 117706340, - "step": 5540 - }, - { - "epoch": 0.666265857031203, - "flos": 24056413853640.0, - "grad_norm": 3.487336136444585, - "learning_rate": 1.0587955767607592e-06, - "loss": 0.7862, - "num_input_tokens_seen": 117727920, - "step": 5541 - }, - { - "epoch": 0.6663860999218422, - "flos": 12364219659960.0, - "grad_norm": 4.132810653945713, - "learning_rate": 1.0581083260506206e-06, - "loss": 0.9932, - "num_input_tokens_seen": 117744425, - "step": 5542 - }, - { - "epoch": 0.6665063428124812, - "flos": 12522558582720.0, - "grad_norm": 4.783195349179572, - "learning_rate": 1.0574212182246993e-06, - "loss": 0.9894, - "num_input_tokens_seen": 117762840, - "step": 5543 - }, - { - "epoch": 0.6666265857031203, - "flos": 19706794479120.0, - "grad_norm": 4.540271906503547, - "learning_rate": 1.0567342533872303e-06, - "loss": 0.9581, - "num_input_tokens_seen": 117782590, - "step": 5544 - }, - { - "epoch": 0.6667468285937594, - "flos": 17819298645600.0, - "grad_norm": 6.268802284334554, - "learning_rate": 1.0560474316424255e-06, - "loss": 1.0371, - "num_input_tokens_seen": 117802070, - "step": 5545 - }, - { - "epoch": 0.6668670714843985, - "flos": 16189936643400.0, - "grad_norm": 6.040592843575494, - "learning_rate": 1.0553607530944746e-06, - "loss": 0.9557, - "num_input_tokens_seen": 117819845, - "step": 5546 - }, - { - "epoch": 0.6669873143750376, - "flos": 15747304030560.0, - "grad_norm": 5.585339238067538, - "learning_rate": 1.0546742178475463e-06, - "loss": 1.1141, - "num_input_tokens_seen": 117838560, - "step": 5547 - }, - { - "epoch": 0.6671075572656767, - "flos": 14561187872400.0, - "grad_norm": 6.404776054713791, - "learning_rate": 1.0539878260057868e-06, - "loss": 1.0857, - "num_input_tokens_seen": 117857320, - "step": 5548 - }, - { - "epoch": 0.6672278001563158, - "flos": 12705309655320.0, - "grad_norm": 7.347427810776389, - "learning_rate": 1.0533015776733226e-06, - "loss": 0.9034, - "num_input_tokens_seen": 117873190, - "step": 5549 - }, - { - "epoch": 0.6673480430469548, - "flos": 15874853199000.0, - "grad_norm": 4.353721535058711, - "learning_rate": 1.0526154729542566e-06, - "loss": 1.0054, - "num_input_tokens_seen": 117892970, - "step": 5550 - }, - { - "epoch": 0.6674682859375939, - "flos": 14697904847280.0, - "grad_norm": 19.268096740517812, - "learning_rate": 1.0519295119526699e-06, - "loss": 1.0091, - "num_input_tokens_seen": 117908995, - "step": 5551 - }, - { - "epoch": 0.667588528828233, - "flos": 18651814891920.0, - "grad_norm": 2.5174060463822796, - "learning_rate": 1.0512436947726227e-06, - "loss": 1.0502, - "num_input_tokens_seen": 117930130, - "step": 5552 - }, - { - "epoch": 0.6677087717188721, - "flos": 16398571620720.0, - "grad_norm": 4.692396531099873, - "learning_rate": 1.0505580215181517e-06, - "loss": 0.8821, - "num_input_tokens_seen": 117948090, - "step": 5553 - }, - { - "epoch": 0.6678290146095112, - "flos": 50807103642120.0, - "grad_norm": 0.8103102005272371, - "learning_rate": 1.0498724922932753e-06, - "loss": 0.8224, - "num_input_tokens_seen": 118005925, - "step": 5554 - }, - { - "epoch": 0.6679492575001503, - "flos": 13229886600720.0, - "grad_norm": 7.064174487351239, - "learning_rate": 1.0491871072019851e-06, - "loss": 1.081, - "num_input_tokens_seen": 118023535, - "step": 5555 - }, - { - "epoch": 0.6680695003907894, - "flos": 21171378630960.0, - "grad_norm": 7.523031602092602, - "learning_rate": 1.0485018663482555e-06, - "loss": 0.8581, - "num_input_tokens_seen": 118043275, - "step": 5556 - }, - { - "epoch": 0.6681897432814284, - "flos": 20098763098680.0, - "grad_norm": 21.57506498147985, - "learning_rate": 1.0478167698360354e-06, - "loss": 0.9252, - "num_input_tokens_seen": 118062295, - "step": 5557 - }, - { - "epoch": 0.6683099861720676, - "flos": 17819053353120.0, - "grad_norm": 3.702042948699887, - "learning_rate": 1.0471318177692556e-06, - "loss": 0.9193, - "num_input_tokens_seen": 118082315, - "step": 5558 - }, - { - "epoch": 0.6684302290627067, - "flos": 16345209410160.0, - "grad_norm": 6.262109850153591, - "learning_rate": 1.046447010251821e-06, - "loss": 0.9611, - "num_input_tokens_seen": 118099365, - "step": 5559 - }, - { - "epoch": 0.6685504719533457, - "flos": 18915989050560.0, - "grad_norm": 3.664559076491694, - "learning_rate": 1.0457623473876157e-06, - "loss": 0.9816, - "num_input_tokens_seen": 118118590, - "step": 5560 - }, - { - "epoch": 0.6686707148439849, - "flos": 20650205118720.0, - "grad_norm": 2.9381075291701473, - "learning_rate": 1.0450778292805046e-06, - "loss": 0.9289, - "num_input_tokens_seen": 118138295, - "step": 5561 - }, - { - "epoch": 0.6687909577346239, - "flos": 16796028659520.0, - "grad_norm": 9.702318634091617, - "learning_rate": 1.0443934560343267e-06, - "loss": 1.0157, - "num_input_tokens_seen": 118159425, - "step": 5562 - }, - { - "epoch": 0.668911200625263, - "flos": 16454938664160.0, - "grad_norm": 3.0933649613418748, - "learning_rate": 1.0437092277529034e-06, - "loss": 0.9953, - "num_input_tokens_seen": 118178400, - "step": 5563 - }, - { - "epoch": 0.6690314435159022, - "flos": 13151468347560.0, - "grad_norm": 3.1503376492174846, - "learning_rate": 1.0430251445400292e-06, - "loss": 0.9563, - "num_input_tokens_seen": 118196165, - "step": 5564 - }, - { - "epoch": 0.6691516864065412, - "flos": 22643505526560.0, - "grad_norm": 8.05581600893567, - "learning_rate": 1.0423412064994787e-06, - "loss": 0.8478, - "num_input_tokens_seen": 118216655, - "step": 5565 - }, - { - "epoch": 0.6692719292971803, - "flos": 24924840334800.0, - "grad_norm": 7.638020888826145, - "learning_rate": 1.0416574137350064e-06, - "loss": 0.9657, - "num_input_tokens_seen": 118237080, - "step": 5566 - }, - { - "epoch": 0.6693921721878194, - "flos": 14515000466880.0, - "grad_norm": 5.312187634428862, - "learning_rate": 1.0409737663503428e-06, - "loss": 1.0296, - "num_input_tokens_seen": 118255180, - "step": 5567 - }, - { - "epoch": 0.6695124150784585, - "flos": 11758618228800.0, - "grad_norm": 9.254771883320771, - "learning_rate": 1.040290264449196e-06, - "loss": 1.0592, - "num_input_tokens_seen": 118273005, - "step": 5568 - }, - { - "epoch": 0.6696326579690975, - "flos": 18972999986760.0, - "grad_norm": 4.521980011306579, - "learning_rate": 1.0396069081352532e-06, - "loss": 0.8597, - "num_input_tokens_seen": 118291880, - "step": 5569 - }, - { - "epoch": 0.6697529008597367, - "flos": 47949148125000.0, - "grad_norm": 0.8327413403310735, - "learning_rate": 1.0389236975121782e-06, - "loss": 0.8218, - "num_input_tokens_seen": 118346450, - "step": 5570 - }, - { - "epoch": 0.6698731437503758, - "flos": 14829593326320.0, - "grad_norm": 6.601994138070792, - "learning_rate": 1.0382406326836147e-06, - "loss": 0.9354, - "num_input_tokens_seen": 118365315, - "step": 5571 - }, - { - "epoch": 0.6699933866410148, - "flos": 14486019744600.0, - "grad_norm": 4.902716269150796, - "learning_rate": 1.0375577137531828e-06, - "loss": 0.9644, - "num_input_tokens_seen": 118383595, - "step": 5572 - }, - { - "epoch": 0.670113629531654, - "flos": 20676855562440.0, - "grad_norm": 2.849636817679716, - "learning_rate": 1.0368749408244802e-06, - "loss": 0.942, - "num_input_tokens_seen": 118406235, - "step": 5573 - }, - { - "epoch": 0.670233872422293, - "flos": 14042375300280.0, - "grad_norm": 8.11999592372247, - "learning_rate": 1.0361923140010836e-06, - "loss": 0.999, - "num_input_tokens_seen": 118424440, - "step": 5574 - }, - { - "epoch": 0.6703541153129321, - "flos": 17242034043960.0, - "grad_norm": 4.912559388556412, - "learning_rate": 1.0355098333865455e-06, - "loss": 0.8539, - "num_input_tokens_seen": 118443390, - "step": 5575 - }, - { - "epoch": 0.6704743582035713, - "flos": 18999129183960.0, - "grad_norm": 2.8013002135624676, - "learning_rate": 1.0348274990844006e-06, - "loss": 0.9238, - "num_input_tokens_seen": 118465870, - "step": 5576 - }, - { - "epoch": 0.6705946010942103, - "flos": 16717947683520.0, - "grad_norm": 5.158902867212656, - "learning_rate": 1.034145311198155e-06, - "loss": 0.9492, - "num_input_tokens_seen": 118485605, - "step": 5577 - }, - { - "epoch": 0.6707148439848494, - "flos": 17110713503640.0, - "grad_norm": 3.8334668315916827, - "learning_rate": 1.0334632698312989e-06, - "loss": 0.8686, - "num_input_tokens_seen": 118506120, - "step": 5578 - }, - { - "epoch": 0.6708350868754885, - "flos": 16009454526240.0, - "grad_norm": 8.129781401863758, - "learning_rate": 1.032781375087295e-06, - "loss": 0.9767, - "num_input_tokens_seen": 118525740, - "step": 5579 - }, - { - "epoch": 0.6709553297661276, - "flos": 17949116769480.0, - "grad_norm": 3.864209934233972, - "learning_rate": 1.0320996270695891e-06, - "loss": 0.8995, - "num_input_tokens_seen": 118546530, - "step": 5580 - }, - { - "epoch": 0.6710755726567667, - "flos": 14514632528160.0, - "grad_norm": 3.5194389847697445, - "learning_rate": 1.0314180258815998e-06, - "loss": 0.9566, - "num_input_tokens_seen": 118564890, - "step": 5581 - }, - { - "epoch": 0.6711958155474057, - "flos": 18501018712920.0, - "grad_norm": 5.188173916715581, - "learning_rate": 1.0307365716267247e-06, - "loss": 0.9632, - "num_input_tokens_seen": 118585055, - "step": 5582 - }, - { - "epoch": 0.6713160584380449, - "flos": 14147321350920.0, - "grad_norm": 5.231305580345233, - "learning_rate": 1.0300552644083423e-06, - "loss": 1.0011, - "num_input_tokens_seen": 118603700, - "step": 5583 - }, - { - "epoch": 0.6714363013286839, - "flos": 12915324402840.0, - "grad_norm": 3.92516113242072, - "learning_rate": 1.0293741043298036e-06, - "loss": 0.9379, - "num_input_tokens_seen": 118621770, - "step": 5584 - }, - { - "epoch": 0.671556544219323, - "flos": 18369391557000.0, - "grad_norm": 6.789257472567129, - "learning_rate": 1.0286930914944436e-06, - "loss": 0.9405, - "num_input_tokens_seen": 118641305, - "step": 5585 - }, - { - "epoch": 0.6716767871099621, - "flos": 11209751779800.0, - "grad_norm": 5.608371655511319, - "learning_rate": 1.0280122260055684e-06, - "loss": 1.0037, - "num_input_tokens_seen": 118656735, - "step": 5586 - }, - { - "epoch": 0.6717970300006012, - "flos": 14016920657400.0, - "grad_norm": 2.8788761049346108, - "learning_rate": 1.0273315079664652e-06, - "loss": 1.025, - "num_input_tokens_seen": 118674410, - "step": 5587 - }, - { - "epoch": 0.6719172728912403, - "flos": 18134290105320.0, - "grad_norm": 4.291605877985872, - "learning_rate": 1.0266509374803992e-06, - "loss": 0.9674, - "num_input_tokens_seen": 118695290, - "step": 5588 - }, - { - "epoch": 0.6720375157818794, - "flos": 11230361896200.0, - "grad_norm": 7.175989637012292, - "learning_rate": 1.0259705146506123e-06, - "loss": 1.0701, - "num_input_tokens_seen": 118709905, - "step": 5589 - }, - { - "epoch": 0.6721577586725185, - "flos": 22823558381880.0, - "grad_norm": 4.999564533344008, - "learning_rate": 1.025290239580324e-06, - "loss": 1.0, - "num_input_tokens_seen": 118730295, - "step": 5590 - }, - { - "epoch": 0.6722780015631575, - "flos": 14722041043080.0, - "grad_norm": 5.504720396170422, - "learning_rate": 1.0246101123727313e-06, - "loss": 0.9798, - "num_input_tokens_seen": 118748995, - "step": 5591 - }, - { - "epoch": 0.6723982444537967, - "flos": 11971116562680.0, - "grad_norm": 3.430054803527226, - "learning_rate": 1.0239301331310085e-06, - "loss": 1.0131, - "num_input_tokens_seen": 118766335, - "step": 5592 - }, - { - "epoch": 0.6725184873444358, - "flos": 14672082265680.0, - "grad_norm": 3.193532732536452, - "learning_rate": 1.0232503019583088e-06, - "loss": 1.1096, - "num_input_tokens_seen": 118785665, - "step": 5593 - }, - { - "epoch": 0.6726387302350748, - "flos": 16870430248320.0, - "grad_norm": 3.9635663159172485, - "learning_rate": 1.0225706189577619e-06, - "loss": 0.9201, - "num_input_tokens_seen": 118803910, - "step": 5594 - }, - { - "epoch": 0.672758973125714, - "flos": 10733140610400.0, - "grad_norm": 3.685495769427403, - "learning_rate": 1.021891084232475e-06, - "loss": 0.9587, - "num_input_tokens_seen": 118821565, - "step": 5595 - }, - { - "epoch": 0.672879216016353, - "flos": 12810286367520.0, - "grad_norm": 3.314134158700096, - "learning_rate": 1.0212116978855325e-06, - "loss": 1.0153, - "num_input_tokens_seen": 118839300, - "step": 5596 - }, - { - "epoch": 0.6729994589069921, - "flos": 16690408054560.0, - "grad_norm": 4.864935438113912, - "learning_rate": 1.020532460019997e-06, - "loss": 1.0269, - "num_input_tokens_seen": 118858270, - "step": 5597 - }, - { - "epoch": 0.6731197017976313, - "flos": 18735506933400.0, - "grad_norm": 2.5681776091218125, - "learning_rate": 1.0198533707389096e-06, - "loss": 0.921, - "num_input_tokens_seen": 118878865, - "step": 5598 - }, - { - "epoch": 0.6732399446882703, - "flos": 15353832994560.0, - "grad_norm": 4.027678334261839, - "learning_rate": 1.0191744301452853e-06, - "loss": 0.9547, - "num_input_tokens_seen": 118897885, - "step": 5599 - }, - { - "epoch": 0.6733601875789094, - "flos": 18417418656120.0, - "grad_norm": 4.664906078836447, - "learning_rate": 1.0184956383421208e-06, - "loss": 0.9303, - "num_input_tokens_seen": 118916255, - "step": 5600 - }, - { - "epoch": 0.6734804304695485, - "flos": 16297519588200.0, - "grad_norm": 4.3113751257404545, - "learning_rate": 1.017816995432387e-06, - "loss": 0.8889, - "num_input_tokens_seen": 118935075, - "step": 5601 - }, - { - "epoch": 0.6736006733601876, - "flos": 13256445059760.0, - "grad_norm": 3.2136786054042505, - "learning_rate": 1.0171385015190353e-06, - "loss": 0.9636, - "num_input_tokens_seen": 118954655, - "step": 5602 - }, - { - "epoch": 0.6737209162508266, - "flos": 13780930020480.0, - "grad_norm": 3.82256651616526, - "learning_rate": 1.0164601567049908e-06, - "loss": 0.953, - "num_input_tokens_seen": 118972905, - "step": 5603 - }, - { - "epoch": 0.6738411591414658, - "flos": 14305874904600.0, - "grad_norm": 9.400103109889939, - "learning_rate": 1.015781961093158e-06, - "loss": 1.0264, - "num_input_tokens_seen": 118991945, - "step": 5604 - }, - { - "epoch": 0.6739614020321049, - "flos": 15380851377000.0, - "grad_norm": 2.4724507660780493, - "learning_rate": 1.0151039147864197e-06, - "loss": 0.997, - "num_input_tokens_seen": 119011640, - "step": 5605 - }, - { - "epoch": 0.6740816449227439, - "flos": 13596829839240.0, - "grad_norm": 5.623161184905318, - "learning_rate": 1.0144260178876336e-06, - "loss": 0.8933, - "num_input_tokens_seen": 119030705, - "step": 5606 - }, - { - "epoch": 0.6742018878133831, - "flos": 14979377673840.0, - "grad_norm": 5.090322452966109, - "learning_rate": 1.0137482704996388e-06, - "loss": 0.9015, - "num_input_tokens_seen": 119044775, - "step": 5607 - }, - { - "epoch": 0.6743221307040221, - "flos": 16743402326400.0, - "grad_norm": 3.66976413519771, - "learning_rate": 1.0130706727252461e-06, - "loss": 1.0196, - "num_input_tokens_seen": 119061550, - "step": 5608 - }, - { - "epoch": 0.6744423735946612, - "flos": 11496283763760.0, - "grad_norm": 6.239208823689545, - "learning_rate": 1.0123932246672468e-06, - "loss": 0.9067, - "num_input_tokens_seen": 119075415, - "step": 5609 - }, - { - "epoch": 0.6745626164853004, - "flos": 41393821993440.0, - "grad_norm": 0.8368340435076979, - "learning_rate": 1.0117159264284114e-06, - "loss": 0.8109, - "num_input_tokens_seen": 119138305, - "step": 5610 - }, - { - "epoch": 0.6746828593759394, - "flos": 14540669740680.0, - "grad_norm": 4.996583343449024, - "learning_rate": 1.0110387781114837e-06, - "loss": 1.0034, - "num_input_tokens_seen": 119156640, - "step": 5611 - }, - { - "epoch": 0.6748031022665785, - "flos": 13623081682680.0, - "grad_norm": 3.77612436489004, - "learning_rate": 1.0103617798191872e-06, - "loss": 0.9851, - "num_input_tokens_seen": 119175835, - "step": 5612 - }, - { - "epoch": 0.6749233451572175, - "flos": 10738904983680.0, - "grad_norm": 6.268402473347662, - "learning_rate": 1.0096849316542217e-06, - "loss": 1.0332, - "num_input_tokens_seen": 119192105, - "step": 5613 - }, - { - "epoch": 0.6750435880478567, - "flos": 18862381547520.0, - "grad_norm": 6.763905648122186, - "learning_rate": 1.0090082337192643e-06, - "loss": 0.9597, - "num_input_tokens_seen": 119211470, - "step": 5614 - }, - { - "epoch": 0.6751638309384957, - "flos": 16638762891360.0, - "grad_norm": 4.840813909776127, - "learning_rate": 1.0083316861169705e-06, - "loss": 1.0025, - "num_input_tokens_seen": 119229925, - "step": 5615 - }, - { - "epoch": 0.6752840738291348, - "flos": 16665505319760.0, - "grad_norm": 4.372407417516792, - "learning_rate": 1.0076552889499713e-06, - "loss": 0.9375, - "num_input_tokens_seen": 119250410, - "step": 5616 - }, - { - "epoch": 0.675404316719774, - "flos": 21620204878920.0, - "grad_norm": 4.501453800603725, - "learning_rate": 1.006979042320876e-06, - "loss": 0.9592, - "num_input_tokens_seen": 119270345, - "step": 5617 - }, - { - "epoch": 0.675524559610413, - "flos": 16795568736120.0, - "grad_norm": 6.466868251267227, - "learning_rate": 1.0063029463322702e-06, - "loss": 0.8598, - "num_input_tokens_seen": 119290340, - "step": 5618 - }, - { - "epoch": 0.6756448025010521, - "flos": 15089106927840.0, - "grad_norm": 4.074710044608308, - "learning_rate": 1.0056270010867164e-06, - "loss": 0.9785, - "num_input_tokens_seen": 119307630, - "step": 5619 - }, - { - "epoch": 0.6757650453916912, - "flos": 15374596418760.0, - "grad_norm": 15.08072911045516, - "learning_rate": 1.004951206686758e-06, - "loss": 0.9988, - "num_input_tokens_seen": 119325625, - "step": 5620 - }, - { - "epoch": 0.6758852882823303, - "flos": 15482210025120.0, - "grad_norm": 3.066602800229433, - "learning_rate": 1.0042755632349087e-06, - "loss": 0.9339, - "num_input_tokens_seen": 119342235, - "step": 5621 - }, - { - "epoch": 0.6760055311729694, - "flos": 19286642337840.0, - "grad_norm": 4.333855646395476, - "learning_rate": 1.0036000708336653e-06, - "loss": 0.8481, - "num_input_tokens_seen": 119361085, - "step": 5622 - }, - { - "epoch": 0.6761257740636085, - "flos": 12754011308760.0, - "grad_norm": 2.751006549080001, - "learning_rate": 1.0029247295854984e-06, - "loss": 1.0282, - "num_input_tokens_seen": 119377425, - "step": 5623 - }, - { - "epoch": 0.6762460169542476, - "flos": 10686339973680.0, - "grad_norm": 5.54520943578938, - "learning_rate": 1.0022495395928588e-06, - "loss": 0.9435, - "num_input_tokens_seen": 119395625, - "step": 5624 - }, - { - "epoch": 0.6763662598448866, - "flos": 48613416216600.0, - "grad_norm": 0.782944481967829, - "learning_rate": 1.0015745009581697e-06, - "loss": 0.8668, - "num_input_tokens_seen": 119456950, - "step": 5625 - }, - { - "epoch": 0.6764865027355258, - "flos": 14646290345640.0, - "grad_norm": 3.4549657826033475, - "learning_rate": 1.0008996137838343e-06, - "loss": 0.8927, - "num_input_tokens_seen": 119475645, - "step": 5626 - }, - { - "epoch": 0.6766067456261649, - "flos": 15066013225080.0, - "grad_norm": 9.537591144331245, - "learning_rate": 1.000224878172234e-06, - "loss": 1.018, - "num_input_tokens_seen": 119494490, - "step": 5627 - }, - { - "epoch": 0.6767269885168039, - "flos": 14147781274320.0, - "grad_norm": 7.655501597669095, - "learning_rate": 9.99550294225724e-07, - "loss": 0.9588, - "num_input_tokens_seen": 119513365, - "step": 5628 - }, - { - "epoch": 0.6768472314074431, - "flos": 14777242947240.0, - "grad_norm": 2.986450751651702, - "learning_rate": 9.988758620466402e-07, - "loss": 0.9455, - "num_input_tokens_seen": 119531955, - "step": 5629 - }, - { - "epoch": 0.6769674742980821, - "flos": 16481957046600.0, - "grad_norm": 4.5999576898370504, - "learning_rate": 9.982015817372917e-07, - "loss": 0.9816, - "num_input_tokens_seen": 119552115, - "step": 5630 - }, - { - "epoch": 0.6770877171887212, - "flos": 17241390151200.0, - "grad_norm": 3.9328240377372845, - "learning_rate": 9.975274533999657e-07, - "loss": 1.0384, - "num_input_tokens_seen": 119571365, - "step": 5631 - }, - { - "epoch": 0.6772079600793603, - "flos": 12855339295320.0, - "grad_norm": 3.927506131483667, - "learning_rate": 9.96853477136929e-07, - "loss": 1.0463, - "num_input_tokens_seen": 119585830, - "step": 5632 - }, - { - "epoch": 0.6773282029699994, - "flos": 15953915344920.0, - "grad_norm": 5.4206149261286, - "learning_rate": 9.96179653050422e-07, - "loss": 0.9691, - "num_input_tokens_seen": 119605710, - "step": 5633 - }, - { - "epoch": 0.6774484458606385, - "flos": 13252826995680.0, - "grad_norm": 6.31340337653347, - "learning_rate": 9.955059812426635e-07, - "loss": 0.9573, - "num_input_tokens_seen": 119622960, - "step": 5634 - }, - { - "epoch": 0.6775686887512776, - "flos": 18499853573640.0, - "grad_norm": 7.545104393663436, - "learning_rate": 9.948324618158493e-07, - "loss": 1.0475, - "num_input_tokens_seen": 119643020, - "step": 5635 - }, - { - "epoch": 0.6776889316419167, - "flos": 9583731887640.0, - "grad_norm": 5.195539747732056, - "learning_rate": 9.941590948721502e-07, - "loss": 1.0003, - "num_input_tokens_seen": 119659940, - "step": 5636 - }, - { - "epoch": 0.6778091745325557, - "flos": 19655057331240.0, - "grad_norm": 4.467241792992691, - "learning_rate": 9.934858805137188e-07, - "loss": 0.9776, - "num_input_tokens_seen": 119680310, - "step": 5637 - }, - { - "epoch": 0.6779294174231949, - "flos": 13282328964480.0, - "grad_norm": 2.9246493137465963, - "learning_rate": 9.92812818842677e-07, - "loss": 1.0359, - "num_input_tokens_seen": 119699205, - "step": 5638 - }, - { - "epoch": 0.678049660313834, - "flos": 32786381033880.0, - "grad_norm": 3.3695830212265845, - "learning_rate": 9.921399099611306e-07, - "loss": 0.854, - "num_input_tokens_seen": 119720090, - "step": 5639 - }, - { - "epoch": 0.678169903204473, - "flos": 14173849148400.0, - "grad_norm": 4.794168012912684, - "learning_rate": 9.914671539711588e-07, - "loss": 0.925, - "num_input_tokens_seen": 119739330, - "step": 5640 - }, - { - "epoch": 0.6782901460951122, - "flos": 15194880840600.0, - "grad_norm": 3.7613531550989796, - "learning_rate": 9.90794550974817e-07, - "loss": 1.0018, - "num_input_tokens_seen": 119759445, - "step": 5641 - }, - { - "epoch": 0.6784103889857512, - "flos": 15222727085160.0, - "grad_norm": 3.863159054326953, - "learning_rate": 9.901221010741407e-07, - "loss": 1.0316, - "num_input_tokens_seen": 119778485, - "step": 5642 - }, - { - "epoch": 0.6785306318763903, - "flos": 23299249704480.0, - "grad_norm": 2.7326119979144936, - "learning_rate": 9.894498043711375e-07, - "loss": 0.9737, - "num_input_tokens_seen": 119799950, - "step": 5643 - }, - { - "epoch": 0.6786508747670293, - "flos": 18240156002760.0, - "grad_norm": 8.813879892519845, - "learning_rate": 9.887776609677962e-07, - "loss": 0.9231, - "num_input_tokens_seen": 119821040, - "step": 5644 - }, - { - "epoch": 0.6787711176576685, - "flos": 13596983147040.0, - "grad_norm": 3.6295111637577033, - "learning_rate": 9.88105670966079e-07, - "loss": 0.947, - "num_input_tokens_seen": 119839220, - "step": 5645 - }, - { - "epoch": 0.6788913605483076, - "flos": 9868608147360.0, - "grad_norm": 2.777431395444825, - "learning_rate": 9.874338344679283e-07, - "loss": 1.0127, - "num_input_tokens_seen": 119854785, - "step": 5646 - }, - { - "epoch": 0.6790116034389466, - "flos": 15642112687440.0, - "grad_norm": 3.4113640148529294, - "learning_rate": 9.86762151575259e-07, - "loss": 0.9691, - "num_input_tokens_seen": 119874500, - "step": 5647 - }, - { - "epoch": 0.6791318463295858, - "flos": 14854986646080.0, - "grad_norm": 3.8686864548796707, - "learning_rate": 9.860906223899651e-07, - "loss": 1.0353, - "num_input_tokens_seen": 119893615, - "step": 5648 - }, - { - "epoch": 0.6792520892202248, - "flos": 20310157616400.0, - "grad_norm": 4.592956059154945, - "learning_rate": 9.854192470139184e-07, - "loss": 0.9755, - "num_input_tokens_seen": 119914815, - "step": 5649 - }, - { - "epoch": 0.6793723321108639, - "flos": 14200806207720.0, - "grad_norm": 10.27182957355144, - "learning_rate": 9.847480255489645e-07, - "loss": 0.9429, - "num_input_tokens_seen": 119933560, - "step": 5650 - }, - { - "epoch": 0.6794925750015031, - "flos": 18970731031320.0, - "grad_norm": 3.0156302180269785, - "learning_rate": 9.840769580969295e-07, - "loss": 0.9159, - "num_input_tokens_seen": 119953720, - "step": 5651 - }, - { - "epoch": 0.6796128178921421, - "flos": 15327979751400.0, - "grad_norm": 3.9958285992342075, - "learning_rate": 9.834060447596114e-07, - "loss": 1.0164, - "num_input_tokens_seen": 119972710, - "step": 5652 - }, - { - "epoch": 0.6797330607827812, - "flos": 15983080036560.0, - "grad_norm": 6.145702951201295, - "learning_rate": 9.827352856387868e-07, - "loss": 1.0018, - "num_input_tokens_seen": 119992140, - "step": 5653 - }, - { - "epoch": 0.6798533036734203, - "flos": 47476124358120.0, - "grad_norm": 0.806508420589261, - "learning_rate": 9.820646808362118e-07, - "loss": 0.8856, - "num_input_tokens_seen": 120058115, - "step": 5654 - }, - { - "epoch": 0.6799735465640594, - "flos": 11446907556000.0, - "grad_norm": 5.3640699813795045, - "learning_rate": 9.813942304536154e-07, - "loss": 0.9339, - "num_input_tokens_seen": 120075805, - "step": 5655 - }, - { - "epoch": 0.6800937894546984, - "flos": 15719672416920.0, - "grad_norm": 5.258844826915339, - "learning_rate": 9.807239345927043e-07, - "loss": 0.8555, - "num_input_tokens_seen": 120095535, - "step": 5656 - }, - { - "epoch": 0.6802140323453376, - "flos": 22538068890960.0, - "grad_norm": 3.6476051312277997, - "learning_rate": 9.80053793355162e-07, - "loss": 0.9426, - "num_input_tokens_seen": 120113950, - "step": 5657 - }, - { - "epoch": 0.6803342752359767, - "flos": 12548534472120.0, - "grad_norm": 3.7240663881786547, - "learning_rate": 9.793838068426472e-07, - "loss": 0.9642, - "num_input_tokens_seen": 120131365, - "step": 5658 - }, - { - "epoch": 0.6804545181266157, - "flos": 7958785150080.0, - "grad_norm": 6.156799601696422, - "learning_rate": 9.78713975156799e-07, - "loss": 0.8419, - "num_input_tokens_seen": 120146950, - "step": 5659 - }, - { - "epoch": 0.6805747610172549, - "flos": 20912447599080.0, - "grad_norm": 17.091050403829776, - "learning_rate": 9.780442983992273e-07, - "loss": 0.9476, - "num_input_tokens_seen": 120165185, - "step": 5660 - }, - { - "epoch": 0.680695003907894, - "flos": 26863582731240.0, - "grad_norm": 3.266525655414773, - "learning_rate": 9.773747766715238e-07, - "loss": 0.9402, - "num_input_tokens_seen": 120185725, - "step": 5661 - }, - { - "epoch": 0.680815246798533, - "flos": 15721052187120.0, - "grad_norm": 4.983428960964967, - "learning_rate": 9.767054100752536e-07, - "loss": 1.0285, - "num_input_tokens_seen": 120205395, - "step": 5662 - }, - { - "epoch": 0.6809354896891722, - "flos": 12181161971760.0, - "grad_norm": 4.984856370025636, - "learning_rate": 9.760361987119584e-07, - "loss": 1.0359, - "num_input_tokens_seen": 120222850, - "step": 5663 - }, - { - "epoch": 0.6810557325798112, - "flos": 8771917742400.0, - "grad_norm": 5.163062571614661, - "learning_rate": 9.753671426831592e-07, - "loss": 0.9044, - "num_input_tokens_seen": 120238585, - "step": 5664 - }, - { - "epoch": 0.6811759754704503, - "flos": 15740711795160.0, - "grad_norm": 3.748755439227751, - "learning_rate": 9.746982420903483e-07, - "loss": 1.0185, - "num_input_tokens_seen": 120256500, - "step": 5665 - }, - { - "epoch": 0.6812962183610894, - "flos": 12413043959640.0, - "grad_norm": 6.3153036708714385, - "learning_rate": 9.740294970349993e-07, - "loss": 0.9747, - "num_input_tokens_seen": 120272635, - "step": 5666 - }, - { - "epoch": 0.6814164612517285, - "flos": 43139296402200.0, - "grad_norm": 1.034445291764208, - "learning_rate": 9.733609076185594e-07, - "loss": 0.906, - "num_input_tokens_seen": 120328760, - "step": 5667 - }, - { - "epoch": 0.6815367041423676, - "flos": 13701806551440.0, - "grad_norm": 4.9835885602389105, - "learning_rate": 9.72692473942455e-07, - "loss": 1.0635, - "num_input_tokens_seen": 120345705, - "step": 5668 - }, - { - "epoch": 0.6816569470330067, - "flos": 15745433675400.0, - "grad_norm": 4.703761466420613, - "learning_rate": 9.720241961080849e-07, - "loss": 1.0011, - "num_input_tokens_seen": 120364740, - "step": 5669 - }, - { - "epoch": 0.6817771899236458, - "flos": 29617389398280.0, - "grad_norm": 37.63389021059094, - "learning_rate": 9.713560742168259e-07, - "loss": 0.9378, - "num_input_tokens_seen": 120387085, - "step": 5670 - }, - { - "epoch": 0.6818974328142848, - "flos": 14987165710080.0, - "grad_norm": 6.00327720151278, - "learning_rate": 9.706881083700333e-07, - "loss": 0.9378, - "num_input_tokens_seen": 120406490, - "step": 5671 - }, - { - "epoch": 0.682017675704924, - "flos": 14508776170200.0, - "grad_norm": 6.738772910052093, - "learning_rate": 9.700202986690357e-07, - "loss": 1.0418, - "num_input_tokens_seen": 120424510, - "step": 5672 - }, - { - "epoch": 0.682137918595563, - "flos": 14223654618000.0, - "grad_norm": 7.303590220894522, - "learning_rate": 9.693526452151413e-07, - "loss": 0.881, - "num_input_tokens_seen": 120443280, - "step": 5673 - }, - { - "epoch": 0.6822581614862021, - "flos": 22589346115440.0, - "grad_norm": 4.284633663724849, - "learning_rate": 9.686851481096305e-07, - "loss": 0.9909, - "num_input_tokens_seen": 120464310, - "step": 5674 - }, - { - "epoch": 0.6823784043768413, - "flos": 16691450547600.0, - "grad_norm": 5.790972807598876, - "learning_rate": 9.68017807453762e-07, - "loss": 0.9604, - "num_input_tokens_seen": 120482775, - "step": 5675 - }, - { - "epoch": 0.6824986472674803, - "flos": 9978644016960.0, - "grad_norm": 2.9443824169735175, - "learning_rate": 9.673506233487721e-07, - "loss": 0.9514, - "num_input_tokens_seen": 120500460, - "step": 5676 - }, - { - "epoch": 0.6826188901581194, - "flos": 15273483063120.0, - "grad_norm": 3.135681817047717, - "learning_rate": 9.666835958958717e-07, - "loss": 1.0856, - "num_input_tokens_seen": 120519500, - "step": 5677 - }, - { - "epoch": 0.6827391330487584, - "flos": 14773870175640.0, - "grad_norm": 3.893031590104022, - "learning_rate": 9.660167251962484e-07, - "loss": 1.0204, - "num_input_tokens_seen": 120537580, - "step": 5678 - }, - { - "epoch": 0.6828593759393976, - "flos": 15405754111800.0, - "grad_norm": 5.421547475810823, - "learning_rate": 9.653500113510654e-07, - "loss": 1.002, - "num_input_tokens_seen": 120556415, - "step": 5679 - }, - { - "epoch": 0.6829796188300367, - "flos": 18028209576960.0, - "grad_norm": 4.583326906141918, - "learning_rate": 9.646834544614627e-07, - "loss": 0.895, - "num_input_tokens_seen": 120576635, - "step": 5680 - }, - { - "epoch": 0.6830998617206757, - "flos": 14698732709400.0, - "grad_norm": 6.2781033201778484, - "learning_rate": 9.64017054628558e-07, - "loss": 0.9854, - "num_input_tokens_seen": 120595180, - "step": 5681 - }, - { - "epoch": 0.6832201046113149, - "flos": 15433140432960.0, - "grad_norm": 4.8296595236623, - "learning_rate": 9.63350811953441e-07, - "loss": 1.0255, - "num_input_tokens_seen": 120615275, - "step": 5682 - }, - { - "epoch": 0.6833403475019539, - "flos": 13859041658040.0, - "grad_norm": 7.130340181129706, - "learning_rate": 9.626847265371826e-07, - "loss": 0.9287, - "num_input_tokens_seen": 120634315, - "step": 5683 - }, - { - "epoch": 0.683460590392593, - "flos": 13726249362840.0, - "grad_norm": 3.3886845393315226, - "learning_rate": 9.620187984808262e-07, - "loss": 0.9999, - "num_input_tokens_seen": 120652835, - "step": 5684 - }, - { - "epoch": 0.6835808332832322, - "flos": 16554672249600.0, - "grad_norm": 6.80017178262383, - "learning_rate": 9.613530278853919e-07, - "loss": 1.083, - "num_input_tokens_seen": 120672530, - "step": 5685 - }, - { - "epoch": 0.6837010761738712, - "flos": 15380483438280.0, - "grad_norm": 6.9064922031377085, - "learning_rate": 9.60687414851879e-07, - "loss": 0.9648, - "num_input_tokens_seen": 120693255, - "step": 5686 - }, - { - "epoch": 0.6838213190645103, - "flos": 12443128498080.0, - "grad_norm": 3.6209045163185936, - "learning_rate": 9.600219594812575e-07, - "loss": 0.9837, - "num_input_tokens_seen": 120710915, - "step": 5687 - }, - { - "epoch": 0.6839415619551494, - "flos": 16429085421000.0, - "grad_norm": 3.3821149639890526, - "learning_rate": 9.593566618744786e-07, - "loss": 0.9492, - "num_input_tokens_seen": 120730785, - "step": 5688 - }, - { - "epoch": 0.6840618048457885, - "flos": 15721481448960.0, - "grad_norm": 3.6527877456148694, - "learning_rate": 9.58691522132466e-07, - "loss": 0.9604, - "num_input_tokens_seen": 120749315, - "step": 5689 - }, - { - "epoch": 0.6841820477364275, - "flos": 15640671594120.0, - "grad_norm": 5.0409057084950994, - "learning_rate": 9.58026540356123e-07, - "loss": 1.0801, - "num_input_tokens_seen": 120767300, - "step": 5690 - }, - { - "epoch": 0.6843022906270667, - "flos": 17714107302480.0, - "grad_norm": 2.8186645765534406, - "learning_rate": 9.573617166463246e-07, - "loss": 1.0856, - "num_input_tokens_seen": 120788235, - "step": 5691 - }, - { - "epoch": 0.6844225335177058, - "flos": 14169893807160.0, - "grad_norm": 4.113959130691979, - "learning_rate": 9.56697051103924e-07, - "loss": 0.8259, - "num_input_tokens_seen": 120805395, - "step": 5692 - }, - { - "epoch": 0.6845427764083448, - "flos": 18420116873400.0, - "grad_norm": 5.699911471566151, - "learning_rate": 9.560325438297522e-07, - "loss": 1.0288, - "num_input_tokens_seen": 120823425, - "step": 5693 - }, - { - "epoch": 0.684663019298984, - "flos": 13387336338240.0, - "grad_norm": 4.177399253155445, - "learning_rate": 9.553681949246127e-07, - "loss": 1.0999, - "num_input_tokens_seen": 120840770, - "step": 5694 - }, - { - "epoch": 0.684783262189623, - "flos": 38765914318680.0, - "grad_norm": 6.616047118013402, - "learning_rate": 9.547040044892886e-07, - "loss": 0.9872, - "num_input_tokens_seen": 120868005, - "step": 5695 - }, - { - "epoch": 0.6849035050802621, - "flos": 45796834240080.0, - "grad_norm": 0.8813585444488445, - "learning_rate": 9.540399726245354e-07, - "loss": 0.8652, - "num_input_tokens_seen": 120924430, - "step": 5696 - }, - { - "epoch": 0.6850237479709013, - "flos": 17946019951920.0, - "grad_norm": 4.025401286148546, - "learning_rate": 9.533760994310859e-07, - "loss": 0.909, - "num_input_tokens_seen": 120944550, - "step": 5697 - }, - { - "epoch": 0.6851439908615403, - "flos": 13727935748640.0, - "grad_norm": 3.4152767021264037, - "learning_rate": 9.527123850096508e-07, - "loss": 0.9759, - "num_input_tokens_seen": 120962630, - "step": 5698 - }, - { - "epoch": 0.6852642337521794, - "flos": 16478645598120.0, - "grad_norm": 10.988392687956965, - "learning_rate": 9.520488294609142e-07, - "loss": 0.9409, - "num_input_tokens_seen": 120981130, - "step": 5699 - }, - { - "epoch": 0.6853844766428185, - "flos": 44844685055880.0, - "grad_norm": 0.8689557168122906, - "learning_rate": 9.513854328855368e-07, - "loss": 0.8266, - "num_input_tokens_seen": 121038725, - "step": 5700 - }, - { - "epoch": 0.6855047195334576, - "flos": 16661948578800.0, - "grad_norm": 3.024441022694253, - "learning_rate": 9.507221953841558e-07, - "loss": 1.0276, - "num_input_tokens_seen": 121056075, - "step": 5701 - }, - { - "epoch": 0.6856249624240967, - "flos": 14669537356200.0, - "grad_norm": 2.9366254113280297, - "learning_rate": 9.500591170573824e-07, - "loss": 1.002, - "num_input_tokens_seen": 121075815, - "step": 5702 - }, - { - "epoch": 0.6857452053147358, - "flos": 12098175146160.0, - "grad_norm": 4.278732562061631, - "learning_rate": 9.493961980058078e-07, - "loss": 0.9701, - "num_input_tokens_seen": 121093130, - "step": 5703 - }, - { - "epoch": 0.6858654482053749, - "flos": 21987853333320.0, - "grad_norm": 5.79415788747597, - "learning_rate": 9.48733438329993e-07, - "loss": 0.9025, - "num_input_tokens_seen": 121113115, - "step": 5704 - }, - { - "epoch": 0.6859856910960139, - "flos": 20572124142720.0, - "grad_norm": 4.196779587741183, - "learning_rate": 9.480708381304807e-07, - "loss": 0.9582, - "num_input_tokens_seen": 121134130, - "step": 5705 - }, - { - "epoch": 0.6861059339866531, - "flos": 13728273025800.0, - "grad_norm": 5.65689167577005, - "learning_rate": 9.474083975077858e-07, - "loss": 1.0732, - "num_input_tokens_seen": 121150975, - "step": 5706 - }, - { - "epoch": 0.6862261768772921, - "flos": 15773187935280.0, - "grad_norm": 4.346906653919054, - "learning_rate": 9.467461165623994e-07, - "loss": 1.0222, - "num_input_tokens_seen": 121169745, - "step": 5707 - }, - { - "epoch": 0.6863464197679312, - "flos": 18709408397760.0, - "grad_norm": 9.013432968461746, - "learning_rate": 9.46083995394791e-07, - "loss": 1.0185, - "num_input_tokens_seen": 121187275, - "step": 5708 - }, - { - "epoch": 0.6864666626585703, - "flos": 26995485841200.0, - "grad_norm": 4.967057809639602, - "learning_rate": 9.454220341054012e-07, - "loss": 0.8519, - "num_input_tokens_seen": 121211780, - "step": 5709 - }, - { - "epoch": 0.6865869055492094, - "flos": 13754616853920.0, - "grad_norm": 4.912769238419999, - "learning_rate": 9.447602327946512e-07, - "loss": 1.0331, - "num_input_tokens_seen": 121230140, - "step": 5710 - }, - { - "epoch": 0.6867071484398485, - "flos": 14462343472200.0, - "grad_norm": 5.977937507780073, - "learning_rate": 9.440985915629338e-07, - "loss": 0.9905, - "num_input_tokens_seen": 121247190, - "step": 5711 - }, - { - "epoch": 0.6868273913304875, - "flos": 11237873978400.0, - "grad_norm": 3.0079735650869206, - "learning_rate": 9.434371105106223e-07, - "loss": 0.9527, - "num_input_tokens_seen": 121264510, - "step": 5712 - }, - { - "epoch": 0.6869476342211267, - "flos": 17399269150560.0, - "grad_norm": 3.6673263621034873, - "learning_rate": 9.427757897380602e-07, - "loss": 0.9107, - "num_input_tokens_seen": 121283630, - "step": 5713 - }, - { - "epoch": 0.6870678771117658, - "flos": 13072896786600.0, - "grad_norm": 3.5944690814214724, - "learning_rate": 9.421146293455695e-07, - "loss": 1.0679, - "num_input_tokens_seen": 121299090, - "step": 5714 - }, - { - "epoch": 0.6871881200024048, - "flos": 15773525212440.0, - "grad_norm": 8.227222325598367, - "learning_rate": 9.414536294334489e-07, - "loss": 0.9104, - "num_input_tokens_seen": 121318830, - "step": 5715 - }, - { - "epoch": 0.687308362893044, - "flos": 15721358802720.0, - "grad_norm": 6.419131544560911, - "learning_rate": 9.407927901019708e-07, - "loss": 0.9231, - "num_input_tokens_seen": 121337680, - "step": 5716 - }, - { - "epoch": 0.687428605783683, - "flos": 17813932872600.0, - "grad_norm": 3.2472171606831677, - "learning_rate": 9.401321114513854e-07, - "loss": 0.9983, - "num_input_tokens_seen": 121356295, - "step": 5717 - }, - { - "epoch": 0.6875488486743221, - "flos": 16901741249160.0, - "grad_norm": 3.481656369610563, - "learning_rate": 9.394715935819155e-07, - "loss": 0.9735, - "num_input_tokens_seen": 121376405, - "step": 5718 - }, - { - "epoch": 0.6876690915649613, - "flos": 18156586607520.0, - "grad_norm": 3.3915392790044865, - "learning_rate": 9.388112365937608e-07, - "loss": 0.8353, - "num_input_tokens_seen": 121395590, - "step": 5719 - }, - { - "epoch": 0.6877893344556003, - "flos": 13781144651400.0, - "grad_norm": 25.86924141460641, - "learning_rate": 9.381510405870985e-07, - "loss": 1.038, - "num_input_tokens_seen": 121414325, - "step": 5720 - }, - { - "epoch": 0.6879095773462394, - "flos": 13230070570080.0, - "grad_norm": 3.592158354864571, - "learning_rate": 9.374910056620791e-07, - "loss": 0.9809, - "num_input_tokens_seen": 121433110, - "step": 5721 - }, - { - "epoch": 0.6880298202368785, - "flos": 14826343200960.0, - "grad_norm": 3.030991533409182, - "learning_rate": 9.368311319188293e-07, - "loss": 1.0333, - "num_input_tokens_seen": 121450645, - "step": 5722 - }, - { - "epoch": 0.6881500631275176, - "flos": 21489006984840.0, - "grad_norm": 3.3565974618646326, - "learning_rate": 9.361714194574515e-07, - "loss": 1.0203, - "num_input_tokens_seen": 121472700, - "step": 5723 - }, - { - "epoch": 0.6882703060181566, - "flos": 47389023335400.0, - "grad_norm": 0.7311620838754112, - "learning_rate": 9.355118683780228e-07, - "loss": 0.828, - "num_input_tokens_seen": 121542490, - "step": 5724 - }, - { - "epoch": 0.6883905489087958, - "flos": 12907873643760.0, - "grad_norm": 3.3153291532529887, - "learning_rate": 9.348524787805987e-07, - "loss": 1.0145, - "num_input_tokens_seen": 121557400, - "step": 5725 - }, - { - "epoch": 0.6885107917994349, - "flos": 9919456110000.0, - "grad_norm": 6.181978478622863, - "learning_rate": 9.341932507652053e-07, - "loss": 1.0692, - "num_input_tokens_seen": 121571610, - "step": 5726 - }, - { - "epoch": 0.6886310346900739, - "flos": 20437461492360.0, - "grad_norm": 3.296845060818222, - "learning_rate": 9.335341844318489e-07, - "loss": 0.9966, - "num_input_tokens_seen": 121591470, - "step": 5727 - }, - { - "epoch": 0.6887512775807131, - "flos": 17451588868080.0, - "grad_norm": 3.2467441389278133, - "learning_rate": 9.328752798805091e-07, - "loss": 0.9565, - "num_input_tokens_seen": 121609660, - "step": 5728 - }, - { - "epoch": 0.6888715204713521, - "flos": 15926651670000.0, - "grad_norm": 9.581834581909568, - "learning_rate": 9.322165372111399e-07, - "loss": 0.9679, - "num_input_tokens_seen": 121627525, - "step": 5729 - }, - { - "epoch": 0.6889917633619912, - "flos": 15668548500240.0, - "grad_norm": 3.4859705808430066, - "learning_rate": 9.315579565236747e-07, - "loss": 0.9803, - "num_input_tokens_seen": 121646350, - "step": 5730 - }, - { - "epoch": 0.6891120062526304, - "flos": 17030486218440.0, - "grad_norm": 3.4925075444090177, - "learning_rate": 9.308995379180162e-07, - "loss": 0.9669, - "num_input_tokens_seen": 121665625, - "step": 5731 - }, - { - "epoch": 0.6892322491432694, - "flos": 45902700137520.0, - "grad_norm": 0.8458107401894999, - "learning_rate": 9.302412814940488e-07, - "loss": 0.8659, - "num_input_tokens_seen": 121728120, - "step": 5732 - }, - { - "epoch": 0.6893524920339085, - "flos": 16349869967280.0, - "grad_norm": 3.4652019603710933, - "learning_rate": 9.295831873516276e-07, - "loss": 0.9234, - "num_input_tokens_seen": 121747115, - "step": 5733 - }, - { - "epoch": 0.6894727349245476, - "flos": 15195432748680.0, - "grad_norm": 3.3301431330714846, - "learning_rate": 9.289252555905873e-07, - "loss": 0.9931, - "num_input_tokens_seen": 121766915, - "step": 5734 - }, - { - "epoch": 0.6895929778151867, - "flos": 14095369572120.0, - "grad_norm": 5.935539484097899, - "learning_rate": 9.282674863107334e-07, - "loss": 0.9707, - "num_input_tokens_seen": 121784450, - "step": 5735 - }, - { - "epoch": 0.6897132207058257, - "flos": 12882510985560.0, - "grad_norm": 4.262332525906603, - "learning_rate": 9.276098796118488e-07, - "loss": 0.9846, - "num_input_tokens_seen": 121800655, - "step": 5736 - }, - { - "epoch": 0.6898334635964649, - "flos": 23219880942960.0, - "grad_norm": 4.672521438018783, - "learning_rate": 9.269524355936938e-07, - "loss": 0.8913, - "num_input_tokens_seen": 121823555, - "step": 5737 - }, - { - "epoch": 0.689953706487104, - "flos": 16217905534200.0, - "grad_norm": 4.075688244323527, - "learning_rate": 9.262951543560002e-07, - "loss": 1.0726, - "num_input_tokens_seen": 121842500, - "step": 5738 - }, - { - "epoch": 0.690073949377743, - "flos": 13125185842560.0, - "grad_norm": 4.3360270173051045, - "learning_rate": 9.256380359984795e-07, - "loss": 1.0779, - "num_input_tokens_seen": 121859330, - "step": 5739 - }, - { - "epoch": 0.6901941922683821, - "flos": 24869239830360.0, - "grad_norm": 3.5104895488896943, - "learning_rate": 9.249810806208139e-07, - "loss": 0.9682, - "num_input_tokens_seen": 121878315, - "step": 5740 - }, - { - "epoch": 0.6903144351590212, - "flos": 11499809843160.0, - "grad_norm": 3.580855081829678, - "learning_rate": 9.243242883226627e-07, - "loss": 1.0303, - "num_input_tokens_seen": 121897130, - "step": 5741 - }, - { - "epoch": 0.6904346780496603, - "flos": 19966461388440.0, - "grad_norm": 3.7627389065246497, - "learning_rate": 9.236676592036628e-07, - "loss": 0.9199, - "num_input_tokens_seen": 121916525, - "step": 5742 - }, - { - "epoch": 0.6905549209402994, - "flos": 16796979167880.0, - "grad_norm": 5.376570183932227, - "learning_rate": 9.230111933634228e-07, - "loss": 0.9554, - "num_input_tokens_seen": 121937840, - "step": 5743 - }, - { - "epoch": 0.6906751638309385, - "flos": 16431139745520.0, - "grad_norm": 3.606436982753378, - "learning_rate": 9.223548909015288e-07, - "loss": 1.0368, - "num_input_tokens_seen": 121959250, - "step": 5744 - }, - { - "epoch": 0.6907954067215776, - "flos": 19442068412400.0, - "grad_norm": 3.533714042402367, - "learning_rate": 9.216987519175407e-07, - "loss": 0.9355, - "num_input_tokens_seen": 121979145, - "step": 5745 - }, - { - "epoch": 0.6909156496122166, - "flos": 15406275358320.0, - "grad_norm": 5.902062925049193, - "learning_rate": 9.210427765109942e-07, - "loss": 0.91, - "num_input_tokens_seen": 121998540, - "step": 5746 - }, - { - "epoch": 0.6910358925028558, - "flos": 16032517567440.0, - "grad_norm": 4.392003613357586, - "learning_rate": 9.20386964781402e-07, - "loss": 1.0298, - "num_input_tokens_seen": 122016280, - "step": 5747 - }, - { - "epoch": 0.6911561353934949, - "flos": 15668364530880.0, - "grad_norm": 3.701206788877212, - "learning_rate": 9.197313168282472e-07, - "loss": 1.0592, - "num_input_tokens_seen": 122033445, - "step": 5748 - }, - { - "epoch": 0.6912763782841339, - "flos": 17215260954000.0, - "grad_norm": 8.068796936891463, - "learning_rate": 9.190758327509935e-07, - "loss": 0.943, - "num_input_tokens_seen": 122051910, - "step": 5749 - }, - { - "epoch": 0.6913966211747731, - "flos": 37430713480800.0, - "grad_norm": 0.880503038958927, - "learning_rate": 9.184205126490767e-07, - "loss": 0.8993, - "num_input_tokens_seen": 122100525, - "step": 5750 - }, - { - "epoch": 0.6915168640654121, - "flos": 47788049661840.0, - "grad_norm": 0.9284631697458241, - "learning_rate": 9.177653566219075e-07, - "loss": 0.8667, - "num_input_tokens_seen": 122154970, - "step": 5751 - }, - { - "epoch": 0.6916371069560512, - "flos": 12967920074400.0, - "grad_norm": 4.218102073364081, - "learning_rate": 9.171103647688744e-07, - "loss": 0.978, - "num_input_tokens_seen": 122173430, - "step": 5752 - }, - { - "epoch": 0.6917573498466904, - "flos": 13937521234320.0, - "grad_norm": 17.98591147188847, - "learning_rate": 9.164555371893367e-07, - "loss": 0.9101, - "num_input_tokens_seen": 122193080, - "step": 5753 - }, - { - "epoch": 0.6918775927373294, - "flos": 10031270350080.0, - "grad_norm": 3.419114902695502, - "learning_rate": 9.158008739826333e-07, - "loss": 0.98, - "num_input_tokens_seen": 122210400, - "step": 5754 - }, - { - "epoch": 0.6919978356279685, - "flos": 17056063507560.0, - "grad_norm": 3.2662660982455702, - "learning_rate": 9.151463752480744e-07, - "loss": 1.0879, - "num_input_tokens_seen": 122228850, - "step": 5755 - }, - { - "epoch": 0.6921180785186076, - "flos": 16795844690160.0, - "grad_norm": 4.034806449625248, - "learning_rate": 9.144920410849493e-07, - "loss": 1.0281, - "num_input_tokens_seen": 122249805, - "step": 5756 - }, - { - "epoch": 0.6922383214092467, - "flos": 15013386891960.0, - "grad_norm": 2.9833415374074757, - "learning_rate": 9.138378715925176e-07, - "loss": 1.0228, - "num_input_tokens_seen": 122268620, - "step": 5757 - }, - { - "epoch": 0.6923585642998857, - "flos": 15249101574840.0, - "grad_norm": 4.054718781866927, - "learning_rate": 9.131838668700167e-07, - "loss": 1.0282, - "num_input_tokens_seen": 122288410, - "step": 5758 - }, - { - "epoch": 0.6924788071905249, - "flos": 14986368509520.0, - "grad_norm": 3.7058254005159075, - "learning_rate": 9.125300270166598e-07, - "loss": 1.0907, - "num_input_tokens_seen": 122308735, - "step": 5759 - }, - { - "epoch": 0.692599050081164, - "flos": 18683892431760.0, - "grad_norm": 9.447539187751568, - "learning_rate": 9.118763521316324e-07, - "loss": 1.0892, - "num_input_tokens_seen": 122329030, - "step": 5760 - }, - { - "epoch": 0.692719292971803, - "flos": 14828121571440.0, - "grad_norm": 6.379468361638644, - "learning_rate": 9.112228423140987e-07, - "loss": 0.99, - "num_input_tokens_seen": 122347670, - "step": 5761 - }, - { - "epoch": 0.6928395358624422, - "flos": 18448147087320.0, - "grad_norm": 4.946570647679597, - "learning_rate": 9.105694976631932e-07, - "loss": 1.0991, - "num_input_tokens_seen": 122365300, - "step": 5762 - }, - { - "epoch": 0.6929597787530812, - "flos": 16769715492960.0, - "grad_norm": 6.470313136734755, - "learning_rate": 9.099163182780283e-07, - "loss": 0.9486, - "num_input_tokens_seen": 122383175, - "step": 5763 - }, - { - "epoch": 0.6930800216437203, - "flos": 12937958182200.0, - "grad_norm": 4.657371027620198, - "learning_rate": 9.092633042576916e-07, - "loss": 0.7211, - "num_input_tokens_seen": 122400160, - "step": 5764 - }, - { - "epoch": 0.6932002645343595, - "flos": 20781403012800.0, - "grad_norm": 4.548928966854, - "learning_rate": 9.086104557012446e-07, - "loss": 0.7943, - "num_input_tokens_seen": 122420450, - "step": 5765 - }, - { - "epoch": 0.6933205074249985, - "flos": 16953938320440.0, - "grad_norm": 4.50506666839077, - "learning_rate": 9.079577727077239e-07, - "loss": 0.892, - "num_input_tokens_seen": 122439000, - "step": 5766 - }, - { - "epoch": 0.6934407503156376, - "flos": 17186157585480.0, - "grad_norm": 6.408388850745054, - "learning_rate": 9.073052553761404e-07, - "loss": 0.9343, - "num_input_tokens_seen": 122458085, - "step": 5767 - }, - { - "epoch": 0.6935609932062767, - "flos": 14645861083800.0, - "grad_norm": 3.354699718380244, - "learning_rate": 9.066529038054805e-07, - "loss": 0.9984, - "num_input_tokens_seen": 122477870, - "step": 5768 - }, - { - "epoch": 0.6936812360969158, - "flos": 12936701058240.0, - "grad_norm": 4.282458726424608, - "learning_rate": 9.060007180947071e-07, - "loss": 0.9688, - "num_input_tokens_seen": 122495645, - "step": 5769 - }, - { - "epoch": 0.6938014789875548, - "flos": 22325110633680.0, - "grad_norm": 6.6323236256085005, - "learning_rate": 9.053486983427534e-07, - "loss": 0.9545, - "num_input_tokens_seen": 122516615, - "step": 5770 - }, - { - "epoch": 0.6939217218781939, - "flos": 12415650192240.0, - "grad_norm": 3.756523053849339, - "learning_rate": 9.046968446485326e-07, - "loss": 0.9317, - "num_input_tokens_seen": 122534740, - "step": 5771 - }, - { - "epoch": 0.6940419647688331, - "flos": 13151345701320.0, - "grad_norm": 4.014551872812451, - "learning_rate": 9.040451571109295e-07, - "loss": 0.9283, - "num_input_tokens_seen": 122550080, - "step": 5772 - }, - { - "epoch": 0.6941622076594721, - "flos": 47921363203560.0, - "grad_norm": 0.9319811184775524, - "learning_rate": 9.033936358288042e-07, - "loss": 0.8763, - "num_input_tokens_seen": 122603535, - "step": 5773 - }, - { - "epoch": 0.6942824505501112, - "flos": 18919607114640.0, - "grad_norm": 10.731901325239974, - "learning_rate": 9.027422809009937e-07, - "loss": 1.0462, - "num_input_tokens_seen": 122623200, - "step": 5774 - }, - { - "epoch": 0.6944026934407503, - "flos": 15088830973800.0, - "grad_norm": 2.8903821688543627, - "learning_rate": 9.020910924263054e-07, - "loss": 1.0506, - "num_input_tokens_seen": 122641445, - "step": 5775 - }, - { - "epoch": 0.6945229363313894, - "flos": 50616104609880.0, - "grad_norm": 0.849516554398921, - "learning_rate": 9.014400705035261e-07, - "loss": 0.8534, - "num_input_tokens_seen": 122698070, - "step": 5776 - }, - { - "epoch": 0.6946431792220285, - "flos": 13439625394200.0, - "grad_norm": 4.019070701795413, - "learning_rate": 9.00789215231414e-07, - "loss": 0.9991, - "num_input_tokens_seen": 122716185, - "step": 5777 - }, - { - "epoch": 0.6947634221126676, - "flos": 14435570382240.0, - "grad_norm": 4.282651797052257, - "learning_rate": 9.001385267087056e-07, - "loss": 1.0472, - "num_input_tokens_seen": 122735050, - "step": 5778 - }, - { - "epoch": 0.6948836650033067, - "flos": 15509780315640.0, - "grad_norm": 5.434084115296629, - "learning_rate": 8.994880050341072e-07, - "loss": 0.9227, - "num_input_tokens_seen": 122754875, - "step": 5779 - }, - { - "epoch": 0.6950039078939457, - "flos": 16820440809360.0, - "grad_norm": 3.5563435144984568, - "learning_rate": 8.988376503063026e-07, - "loss": 1.0049, - "num_input_tokens_seen": 122775855, - "step": 5780 - }, - { - "epoch": 0.6951241507845849, - "flos": 15479481146280.0, - "grad_norm": 5.546507229500329, - "learning_rate": 8.981874626239521e-07, - "loss": 1.0422, - "num_input_tokens_seen": 122794150, - "step": 5781 - }, - { - "epoch": 0.695244393675224, - "flos": 10345433947680.0, - "grad_norm": 4.56944582294004, - "learning_rate": 8.975374420856872e-07, - "loss": 1.1074, - "num_input_tokens_seen": 122810765, - "step": 5782 - }, - { - "epoch": 0.695364636565863, - "flos": 11944956703920.0, - "grad_norm": 7.022148243129472, - "learning_rate": 8.968875887901157e-07, - "loss": 0.9465, - "num_input_tokens_seen": 122827865, - "step": 5783 - }, - { - "epoch": 0.6954848794565022, - "flos": 13728273025800.0, - "grad_norm": 4.568152701476157, - "learning_rate": 8.9623790283582e-07, - "loss": 0.8368, - "num_input_tokens_seen": 122845465, - "step": 5784 - }, - { - "epoch": 0.6956051223471412, - "flos": 13466521130400.0, - "grad_norm": 4.549527472807487, - "learning_rate": 8.955883843213561e-07, - "loss": 0.9863, - "num_input_tokens_seen": 122864200, - "step": 5785 - }, - { - "epoch": 0.6957253652377803, - "flos": 11394495853800.0, - "grad_norm": 3.580278706277491, - "learning_rate": 8.949390333452569e-07, - "loss": 1.1076, - "num_input_tokens_seen": 122881865, - "step": 5786 - }, - { - "epoch": 0.6958456081284194, - "flos": 20939680612440.0, - "grad_norm": 4.6035297120064484, - "learning_rate": 8.942898500060279e-07, - "loss": 0.8993, - "num_input_tokens_seen": 122901300, - "step": 5787 - }, - { - "epoch": 0.6959658510190585, - "flos": 17896735728840.0, - "grad_norm": 6.915323364464414, - "learning_rate": 8.936408344021493e-07, - "loss": 0.9207, - "num_input_tokens_seen": 122917935, - "step": 5788 - }, - { - "epoch": 0.6960860939096976, - "flos": 30588370328400.0, - "grad_norm": 3.5451799955526515, - "learning_rate": 8.929919866320765e-07, - "loss": 0.932, - "num_input_tokens_seen": 122938470, - "step": 5789 - }, - { - "epoch": 0.6962063368003367, - "flos": 12443281805880.0, - "grad_norm": 4.185121914308121, - "learning_rate": 8.923433067942385e-07, - "loss": 1.0385, - "num_input_tokens_seen": 122956755, - "step": 5790 - }, - { - "epoch": 0.6963265796909758, - "flos": 15091682498880.0, - "grad_norm": 5.261623050354657, - "learning_rate": 8.916947949870417e-07, - "loss": 0.9096, - "num_input_tokens_seen": 122976140, - "step": 5791 - }, - { - "epoch": 0.6964468225816148, - "flos": 49288942648800.0, - "grad_norm": 0.799966611761495, - "learning_rate": 8.910464513088615e-07, - "loss": 0.8612, - "num_input_tokens_seen": 123039900, - "step": 5792 - }, - { - "epoch": 0.696567065472254, - "flos": 13437877685280.0, - "grad_norm": 5.499436262890472, - "learning_rate": 8.903982758580542e-07, - "loss": 1.0097, - "num_input_tokens_seen": 123058560, - "step": 5793 - }, - { - "epoch": 0.696687308362893, - "flos": 16245138547560.0, - "grad_norm": 4.371849233402782, - "learning_rate": 8.897502687329457e-07, - "loss": 1.0348, - "num_input_tokens_seen": 123078080, - "step": 5794 - }, - { - "epoch": 0.6968075512535321, - "flos": 17766059081280.0, - "grad_norm": 4.228493748139136, - "learning_rate": 8.891024300318382e-07, - "loss": 1.0252, - "num_input_tokens_seen": 123096370, - "step": 5795 - }, - { - "epoch": 0.6969277941441713, - "flos": 14932393067760.0, - "grad_norm": 6.686829150055447, - "learning_rate": 8.884547598530103e-07, - "loss": 0.9836, - "num_input_tokens_seen": 123116660, - "step": 5796 - }, - { - "epoch": 0.6970480370348103, - "flos": 15327151889280.0, - "grad_norm": 3.8366442493498583, - "learning_rate": 8.8780725829471e-07, - "loss": 0.9795, - "num_input_tokens_seen": 123134285, - "step": 5797 - }, - { - "epoch": 0.6971682799254494, - "flos": 15930760319040.0, - "grad_norm": 4.508432963757617, - "learning_rate": 8.87159925455165e-07, - "loss": 0.9922, - "num_input_tokens_seen": 123153835, - "step": 5798 - }, - { - "epoch": 0.6972885228160886, - "flos": 14195624404080.0, - "grad_norm": 2.6574159643715043, - "learning_rate": 8.865127614325738e-07, - "loss": 0.9583, - "num_input_tokens_seen": 123171985, - "step": 5799 - }, - { - "epoch": 0.6974087657067276, - "flos": 27023178777960.0, - "grad_norm": 3.123030537842275, - "learning_rate": 8.85865766325113e-07, - "loss": 0.8863, - "num_input_tokens_seen": 123195635, - "step": 5800 - }, - { - "epoch": 0.6975290085973667, - "flos": 21010801414320.0, - "grad_norm": 5.59826750522504, - "learning_rate": 8.852189402309287e-07, - "loss": 0.9314, - "num_input_tokens_seen": 123214540, - "step": 5801 - }, - { - "epoch": 0.6976492514880057, - "flos": 9086265309360.0, - "grad_norm": 6.4457557849456375, - "learning_rate": 8.845722832481441e-07, - "loss": 0.9369, - "num_input_tokens_seen": 123229690, - "step": 5802 - }, - { - "epoch": 0.6977694943786449, - "flos": 17320513620240.0, - "grad_norm": 4.551178354120034, - "learning_rate": 8.83925795474858e-07, - "loss": 0.9961, - "num_input_tokens_seen": 123249535, - "step": 5803 - }, - { - "epoch": 0.6978897372692839, - "flos": 21305949296640.0, - "grad_norm": 5.633178360441977, - "learning_rate": 8.832794770091414e-07, - "loss": 0.8285, - "num_input_tokens_seen": 123270090, - "step": 5804 - }, - { - "epoch": 0.698009980159923, - "flos": 15458043167760.0, - "grad_norm": 5.242774350651161, - "learning_rate": 8.826333279490401e-07, - "loss": 1.0489, - "num_input_tokens_seen": 123290445, - "step": 5805 - }, - { - "epoch": 0.6981302230505622, - "flos": 14088409398000.0, - "grad_norm": 4.034892720778147, - "learning_rate": 8.819873483925748e-07, - "loss": 0.9031, - "num_input_tokens_seen": 123307285, - "step": 5806 - }, - { - "epoch": 0.6982504659412012, - "flos": 15772053457560.0, - "grad_norm": 5.779329878031823, - "learning_rate": 8.81341538437739e-07, - "loss": 0.9752, - "num_input_tokens_seen": 123325295, - "step": 5807 - }, - { - "epoch": 0.6983707088318403, - "flos": 25395012576600.0, - "grad_norm": 6.609794873908455, - "learning_rate": 8.80695898182503e-07, - "loss": 0.912, - "num_input_tokens_seen": 123345995, - "step": 5808 - }, - { - "epoch": 0.6984909517224794, - "flos": 46854542706120.0, - "grad_norm": 0.855758722359473, - "learning_rate": 8.800504277248093e-07, - "loss": 0.9151, - "num_input_tokens_seen": 123410465, - "step": 5809 - }, - { - "epoch": 0.6986111946131185, - "flos": 13147206390720.0, - "grad_norm": 3.8182521014742594, - "learning_rate": 8.794051271625753e-07, - "loss": 0.9707, - "num_input_tokens_seen": 123427820, - "step": 5810 - }, - { - "epoch": 0.6987314375037575, - "flos": 16376305780080.0, - "grad_norm": 2.2717861194881386, - "learning_rate": 8.787599965936925e-07, - "loss": 1.0607, - "num_input_tokens_seen": 123448470, - "step": 5811 - }, - { - "epoch": 0.6988516803943967, - "flos": 27416220552120.0, - "grad_norm": 2.878766122458958, - "learning_rate": 8.781150361160261e-07, - "loss": 0.9541, - "num_input_tokens_seen": 123470100, - "step": 5812 - }, - { - "epoch": 0.6989719232850358, - "flos": 17136505423680.0, - "grad_norm": 3.465652319866675, - "learning_rate": 8.774702458274181e-07, - "loss": 0.96, - "num_input_tokens_seen": 123490225, - "step": 5813 - }, - { - "epoch": 0.6990921661756748, - "flos": 10503067654560.0, - "grad_norm": 8.334099035053173, - "learning_rate": 8.768256258256799e-07, - "loss": 0.9143, - "num_input_tokens_seen": 123506570, - "step": 5814 - }, - { - "epoch": 0.699212409066314, - "flos": 14331390870600.0, - "grad_norm": 3.585013210213768, - "learning_rate": 8.76181176208602e-07, - "loss": 0.969, - "num_input_tokens_seen": 123524390, - "step": 5815 - }, - { - "epoch": 0.699332651956953, - "flos": 13781052666720.0, - "grad_norm": 3.640249601336999, - "learning_rate": 8.755368970739461e-07, - "loss": 0.9517, - "num_input_tokens_seen": 123543470, - "step": 5816 - }, - { - "epoch": 0.6994528948475921, - "flos": 11420717035680.0, - "grad_norm": 4.262332973395982, - "learning_rate": 8.748927885194479e-07, - "loss": 0.8289, - "num_input_tokens_seen": 123561495, - "step": 5817 - }, - { - "epoch": 0.6995731377382313, - "flos": 46502291902920.0, - "grad_norm": 0.7408182463452436, - "learning_rate": 8.742488506428209e-07, - "loss": 0.8123, - "num_input_tokens_seen": 123620305, - "step": 5818 - }, - { - "epoch": 0.6996933806288703, - "flos": 17713770025320.0, - "grad_norm": 3.823233955929836, - "learning_rate": 8.736050835417466e-07, - "loss": 1.0107, - "num_input_tokens_seen": 123640065, - "step": 5819 - }, - { - "epoch": 0.6998136235195094, - "flos": 14751205734720.0, - "grad_norm": 4.375048609872251, - "learning_rate": 8.729614873138862e-07, - "loss": 0.8387, - "num_input_tokens_seen": 123657420, - "step": 5820 - }, - { - "epoch": 0.6999338664101485, - "flos": 16875428082600.0, - "grad_norm": 4.221677541456486, - "learning_rate": 8.723180620568716e-07, - "loss": 1.0034, - "num_input_tokens_seen": 123676395, - "step": 5821 - }, - { - "epoch": 0.7000541093007876, - "flos": 14094756340920.0, - "grad_norm": 8.433072206980356, - "learning_rate": 8.716748078683116e-07, - "loss": 1.0887, - "num_input_tokens_seen": 123692890, - "step": 5822 - }, - { - "epoch": 0.7001743521914267, - "flos": 21148192943520.0, - "grad_norm": 5.844746713754944, - "learning_rate": 8.710317248457855e-07, - "loss": 0.9081, - "num_input_tokens_seen": 123712225, - "step": 5823 - }, - { - "epoch": 0.7002945950820658, - "flos": 19574768722920.0, - "grad_norm": 7.464975081425831, - "learning_rate": 8.703888130868482e-07, - "loss": 0.9444, - "num_input_tokens_seen": 123733795, - "step": 5824 - }, - { - "epoch": 0.7004148379727049, - "flos": 15742674135000.0, - "grad_norm": 8.595524053392056, - "learning_rate": 8.697460726890307e-07, - "loss": 1.0442, - "num_input_tokens_seen": 123750660, - "step": 5825 - }, - { - "epoch": 0.7005350808633439, - "flos": 13777526587320.0, - "grad_norm": 4.698705681675307, - "learning_rate": 8.691035037498354e-07, - "loss": 1.1189, - "num_input_tokens_seen": 123766370, - "step": 5826 - }, - { - "epoch": 0.7006553237539831, - "flos": 16690714670160.0, - "grad_norm": 6.229039866029747, - "learning_rate": 8.684611063667391e-07, - "loss": 0.9592, - "num_input_tokens_seen": 123786555, - "step": 5827 - }, - { - "epoch": 0.7007755666446221, - "flos": 22250709044880.0, - "grad_norm": 3.091718372690617, - "learning_rate": 8.678188806371935e-07, - "loss": 0.9857, - "num_input_tokens_seen": 123808310, - "step": 5828 - }, - { - "epoch": 0.7008958095352612, - "flos": 12862452777240.0, - "grad_norm": 5.090845883274309, - "learning_rate": 8.671768266586228e-07, - "loss": 1.0779, - "num_input_tokens_seen": 123826155, - "step": 5829 - }, - { - "epoch": 0.7010160524259004, - "flos": 19547719678920.0, - "grad_norm": 7.782653436909738, - "learning_rate": 8.665349445284275e-07, - "loss": 1.0067, - "num_input_tokens_seen": 123845615, - "step": 5830 - }, - { - "epoch": 0.7011362953165394, - "flos": 16953355750800.0, - "grad_norm": 2.8433643121436734, - "learning_rate": 8.658932343439799e-07, - "loss": 1.0363, - "num_input_tokens_seen": 123865120, - "step": 5831 - }, - { - "epoch": 0.7012565382071785, - "flos": 17658138859320.0, - "grad_norm": 3.3650255099685733, - "learning_rate": 8.65251696202627e-07, - "loss": 0.9898, - "num_input_tokens_seen": 123881220, - "step": 5832 - }, - { - "epoch": 0.7013767810978175, - "flos": 15196444580160.0, - "grad_norm": 4.00794836933132, - "learning_rate": 8.646103302016896e-07, - "loss": 1.0988, - "num_input_tokens_seen": 123899910, - "step": 5833 - }, - { - "epoch": 0.7014970239884567, - "flos": 11810753976960.0, - "grad_norm": 6.312209018761435, - "learning_rate": 8.639691364384614e-07, - "loss": 1.1123, - "num_input_tokens_seen": 123917255, - "step": 5834 - }, - { - "epoch": 0.7016172668790958, - "flos": 8851133196120.0, - "grad_norm": 3.820383519797755, - "learning_rate": 8.633281150102136e-07, - "loss": 0.944, - "num_input_tokens_seen": 123933825, - "step": 5835 - }, - { - "epoch": 0.7017375097697348, - "flos": 12360877549920.0, - "grad_norm": 18.6626031176509, - "learning_rate": 8.626872660141855e-07, - "loss": 0.9045, - "num_input_tokens_seen": 123951455, - "step": 5836 - }, - { - "epoch": 0.701857752660374, - "flos": 13123192841160.0, - "grad_norm": 7.5086665625371385, - "learning_rate": 8.620465895475957e-07, - "loss": 0.9743, - "num_input_tokens_seen": 123969395, - "step": 5837 - }, - { - "epoch": 0.701977995551013, - "flos": 17373017307120.0, - "grad_norm": 2.538742937372133, - "learning_rate": 8.614060857076333e-07, - "loss": 0.9795, - "num_input_tokens_seen": 123989785, - "step": 5838 - }, - { - "epoch": 0.7020982384416521, - "flos": 16350145921320.0, - "grad_norm": 3.2553739866704747, - "learning_rate": 8.60765754591462e-07, - "loss": 0.9665, - "num_input_tokens_seen": 124009200, - "step": 5839 - }, - { - "epoch": 0.7022184813322913, - "flos": 14514908482200.0, - "grad_norm": 4.452025588997976, - "learning_rate": 8.601255962962211e-07, - "loss": 0.9627, - "num_input_tokens_seen": 124027930, - "step": 5840 - }, - { - "epoch": 0.7023387242229303, - "flos": 14041363468800.0, - "grad_norm": 4.196506209655334, - "learning_rate": 8.594856109190194e-07, - "loss": 0.9556, - "num_input_tokens_seen": 124044680, - "step": 5841 - }, - { - "epoch": 0.7024589671135694, - "flos": 23721854770560.0, - "grad_norm": 4.012946872248286, - "learning_rate": 8.588457985569446e-07, - "loss": 0.9284, - "num_input_tokens_seen": 124067310, - "step": 5842 - }, - { - "epoch": 0.7025792100042085, - "flos": 13544602106400.0, - "grad_norm": 4.388679046013834, - "learning_rate": 8.582061593070542e-07, - "loss": 0.9397, - "num_input_tokens_seen": 124087760, - "step": 5843 - }, - { - "epoch": 0.7026994528948476, - "flos": 13439349440160.0, - "grad_norm": 5.807310679679731, - "learning_rate": 8.57566693266383e-07, - "loss": 0.9817, - "num_input_tokens_seen": 124105170, - "step": 5844 - }, - { - "epoch": 0.7028196957854866, - "flos": 13859133642720.0, - "grad_norm": 3.662633946804361, - "learning_rate": 8.569274005319354e-07, - "loss": 0.9255, - "num_input_tokens_seen": 124123290, - "step": 5845 - }, - { - "epoch": 0.7029399386761258, - "flos": 14798987541360.0, - "grad_norm": 4.263658901864138, - "learning_rate": 8.562882812006913e-07, - "loss": 1.0309, - "num_input_tokens_seen": 124140500, - "step": 5846 - }, - { - "epoch": 0.7030601815667649, - "flos": 15669131069880.0, - "grad_norm": 3.393810410778581, - "learning_rate": 8.556493353696066e-07, - "loss": 0.9956, - "num_input_tokens_seen": 124159220, - "step": 5847 - }, - { - "epoch": 0.7031804244574039, - "flos": 19365735145320.0, - "grad_norm": 7.219024785083942, - "learning_rate": 8.550105631356077e-07, - "loss": 0.8902, - "num_input_tokens_seen": 124178665, - "step": 5848 - }, - { - "epoch": 0.7033006673480431, - "flos": 15902116873920.0, - "grad_norm": 4.4476022840020395, - "learning_rate": 8.543719645955961e-07, - "loss": 0.9894, - "num_input_tokens_seen": 124196715, - "step": 5849 - }, - { - "epoch": 0.7034209102386821, - "flos": 17581192361040.0, - "grad_norm": 6.53372014256558, - "learning_rate": 8.537335398464467e-07, - "loss": 0.9692, - "num_input_tokens_seen": 124216755, - "step": 5850 - }, - { - "epoch": 0.7035411531293212, - "flos": 16028010318120.0, - "grad_norm": 7.058576185904406, - "learning_rate": 8.53095288985007e-07, - "loss": 1.076, - "num_input_tokens_seen": 124230210, - "step": 5851 - }, - { - "epoch": 0.7036613960199604, - "flos": 16114645869360.0, - "grad_norm": 4.788339382919773, - "learning_rate": 8.524572121081009e-07, - "loss": 1.0443, - "num_input_tokens_seen": 124250030, - "step": 5852 - }, - { - "epoch": 0.7037816389105994, - "flos": 15983202682800.0, - "grad_norm": 4.912651405233808, - "learning_rate": 8.518193093125232e-07, - "loss": 0.8493, - "num_input_tokens_seen": 124268805, - "step": 5853 - }, - { - "epoch": 0.7039018818012385, - "flos": 19285538521680.0, - "grad_norm": 16.936160326689404, - "learning_rate": 8.511815806950436e-07, - "loss": 1.0319, - "num_input_tokens_seen": 124289555, - "step": 5854 - }, - { - "epoch": 0.7040221246918776, - "flos": 12575092931160.0, - "grad_norm": 3.7997869933811237, - "learning_rate": 8.505440263524044e-07, - "loss": 1.0007, - "num_input_tokens_seen": 124308120, - "step": 5855 - }, - { - "epoch": 0.7041423675825167, - "flos": 11517231157320.0, - "grad_norm": 4.260123593323967, - "learning_rate": 8.49906646381322e-07, - "loss": 1.0874, - "num_input_tokens_seen": 124320675, - "step": 5856 - }, - { - "epoch": 0.7042626104731557, - "flos": 18132695704200.0, - "grad_norm": 4.882092134361911, - "learning_rate": 8.492694408784884e-07, - "loss": 0.9455, - "num_input_tokens_seen": 124340650, - "step": 5857 - }, - { - "epoch": 0.7043828533637949, - "flos": 12652346045040.0, - "grad_norm": 5.138755759957662, - "learning_rate": 8.486324099405642e-07, - "loss": 0.8201, - "num_input_tokens_seen": 124357215, - "step": 5858 - }, - { - "epoch": 0.704503096254434, - "flos": 21015063371160.0, - "grad_norm": 2.702649113214014, - "learning_rate": 8.479955536641887e-07, - "loss": 0.9835, - "num_input_tokens_seen": 124378430, - "step": 5859 - }, - { - "epoch": 0.704623339145073, - "flos": 22040724958920.0, - "grad_norm": 63.400125612922686, - "learning_rate": 8.473588721459716e-07, - "loss": 0.8845, - "num_input_tokens_seen": 124398060, - "step": 5860 - }, - { - "epoch": 0.7047435820357122, - "flos": 17005552822080.0, - "grad_norm": 65.37021971050733, - "learning_rate": 8.467223654824967e-07, - "loss": 0.922, - "num_input_tokens_seen": 124417235, - "step": 5861 - }, - { - "epoch": 0.7048638249263512, - "flos": 33232110464280.0, - "grad_norm": 4.555528723371514, - "learning_rate": 8.460860337703233e-07, - "loss": 0.8463, - "num_input_tokens_seen": 124437560, - "step": 5862 - }, - { - "epoch": 0.7049840678169903, - "flos": 15406122050520.0, - "grad_norm": 3.4346377854475603, - "learning_rate": 8.454498771059797e-07, - "loss": 0.9246, - "num_input_tokens_seen": 124456655, - "step": 5863 - }, - { - "epoch": 0.7051043107076294, - "flos": 13045817081040.0, - "grad_norm": 4.755310904269525, - "learning_rate": 8.448138955859725e-07, - "loss": 1.0416, - "num_input_tokens_seen": 124472960, - "step": 5864 - }, - { - "epoch": 0.7052245535982685, - "flos": 13702910367600.0, - "grad_norm": 5.793148848241417, - "learning_rate": 8.44178089306778e-07, - "loss": 1.1306, - "num_input_tokens_seen": 124490615, - "step": 5865 - }, - { - "epoch": 0.7053447964889076, - "flos": 13518074308920.0, - "grad_norm": 6.157832063432353, - "learning_rate": 8.4354245836485e-07, - "loss": 1.0035, - "num_input_tokens_seen": 124508780, - "step": 5866 - }, - { - "epoch": 0.7054650393795466, - "flos": 19495185330480.0, - "grad_norm": 3.018318199093091, - "learning_rate": 8.429070028566108e-07, - "loss": 0.9518, - "num_input_tokens_seen": 124529810, - "step": 5867 - }, - { - "epoch": 0.7055852822701858, - "flos": 11390203235400.0, - "grad_norm": 3.574565599632243, - "learning_rate": 8.422717228784586e-07, - "loss": 0.9726, - "num_input_tokens_seen": 124546405, - "step": 5868 - }, - { - "epoch": 0.7057055251608249, - "flos": 8221548876960.0, - "grad_norm": 5.084955029734181, - "learning_rate": 8.416366185267663e-07, - "loss": 0.9252, - "num_input_tokens_seen": 124563625, - "step": 5869 - }, - { - "epoch": 0.7058257680514639, - "flos": 16140959035920.0, - "grad_norm": 2.9874874481831313, - "learning_rate": 8.410016898978778e-07, - "loss": 1.004, - "num_input_tokens_seen": 124580820, - "step": 5870 - }, - { - "epoch": 0.7059460109421031, - "flos": 12416600700600.0, - "grad_norm": 5.639682849597393, - "learning_rate": 8.403669370881115e-07, - "loss": 1.0295, - "num_input_tokens_seen": 124599275, - "step": 5871 - }, - { - "epoch": 0.7060662538327421, - "flos": 16738895077080.0, - "grad_norm": 8.97121786264077, - "learning_rate": 8.397323601937587e-07, - "loss": 1.0015, - "num_input_tokens_seen": 124618895, - "step": 5872 - }, - { - "epoch": 0.7061864967233812, - "flos": 21565953483120.0, - "grad_norm": 3.3934914562454463, - "learning_rate": 8.390979593110838e-07, - "loss": 1.0011, - "num_input_tokens_seen": 124640745, - "step": 5873 - }, - { - "epoch": 0.7063067396140204, - "flos": 14695881184320.0, - "grad_norm": 3.705463684966494, - "learning_rate": 8.384637345363262e-07, - "loss": 1.0305, - "num_input_tokens_seen": 124659655, - "step": 5874 - }, - { - "epoch": 0.7064269825046594, - "flos": 23007045331920.0, - "grad_norm": 7.845227106233998, - "learning_rate": 8.378296859656964e-07, - "loss": 0.9949, - "num_input_tokens_seen": 124680530, - "step": 5875 - }, - { - "epoch": 0.7065472253952985, - "flos": 21542185226040.0, - "grad_norm": 4.208707257771228, - "learning_rate": 8.371958136953792e-07, - "loss": 0.908, - "num_input_tokens_seen": 124700280, - "step": 5876 - }, - { - "epoch": 0.7066674682859376, - "flos": 11473190061000.0, - "grad_norm": 14.189450302176251, - "learning_rate": 8.365621178215326e-07, - "loss": 0.8767, - "num_input_tokens_seen": 124716470, - "step": 5877 - }, - { - "epoch": 0.7067877111765767, - "flos": 10476938457360.0, - "grad_norm": 3.9439474936002115, - "learning_rate": 8.359285984402871e-07, - "loss": 0.9706, - "num_input_tokens_seen": 124733455, - "step": 5878 - }, - { - "epoch": 0.7069079540672157, - "flos": 18100924779960.0, - "grad_norm": 5.590364593189789, - "learning_rate": 8.352952556477489e-07, - "loss": 0.9723, - "num_input_tokens_seen": 124751085, - "step": 5879 - }, - { - "epoch": 0.7070281969578549, - "flos": 17503571308440.0, - "grad_norm": 3.6918092537949803, - "learning_rate": 8.34662089539993e-07, - "loss": 0.9932, - "num_input_tokens_seen": 124770315, - "step": 5880 - }, - { - "epoch": 0.707148439848494, - "flos": 19024890442440.0, - "grad_norm": 4.439710469471446, - "learning_rate": 8.340291002130722e-07, - "loss": 1.0056, - "num_input_tokens_seen": 124789225, - "step": 5881 - }, - { - "epoch": 0.707268682739133, - "flos": 10660394745840.0, - "grad_norm": 6.220904566499014, - "learning_rate": 8.3339628776301e-07, - "loss": 1.0124, - "num_input_tokens_seen": 124807085, - "step": 5882 - }, - { - "epoch": 0.7073889256297722, - "flos": 24478681642560.0, - "grad_norm": 5.1217248267361715, - "learning_rate": 8.327636522858033e-07, - "loss": 0.7912, - "num_input_tokens_seen": 124826410, - "step": 5883 - }, - { - "epoch": 0.7075091685204112, - "flos": 14252451370920.0, - "grad_norm": 3.9788562327352777, - "learning_rate": 8.321311938774225e-07, - "loss": 0.9878, - "num_input_tokens_seen": 124845220, - "step": 5884 - }, - { - "epoch": 0.7076294114110503, - "flos": 14750623165080.0, - "grad_norm": 3.975213262247282, - "learning_rate": 8.314989126338104e-07, - "loss": 1.0158, - "num_input_tokens_seen": 124864950, - "step": 5885 - }, - { - "epoch": 0.7077496543016895, - "flos": 12731530837200.0, - "grad_norm": 3.597311626748704, - "learning_rate": 8.308668086508847e-07, - "loss": 1.0761, - "num_input_tokens_seen": 124882750, - "step": 5886 - }, - { - "epoch": 0.7078698971923285, - "flos": 32502761898120.0, - "grad_norm": 3.4251194731208088, - "learning_rate": 8.302348820245342e-07, - "loss": 0.9586, - "num_input_tokens_seen": 124905035, - "step": 5887 - }, - { - "epoch": 0.7079901400829676, - "flos": 19182493487760.0, - "grad_norm": 5.290039873748092, - "learning_rate": 8.296031328506232e-07, - "loss": 0.9229, - "num_input_tokens_seen": 124924505, - "step": 5888 - }, - { - "epoch": 0.7081103829736067, - "flos": 17370656367000.0, - "grad_norm": 3.4987514176115413, - "learning_rate": 8.289715612249857e-07, - "loss": 0.995, - "num_input_tokens_seen": 124944840, - "step": 5889 - }, - { - "epoch": 0.7082306258642458, - "flos": 13144109573160.0, - "grad_norm": 5.201022436421061, - "learning_rate": 8.283401672434305e-07, - "loss": 0.9927, - "num_input_tokens_seen": 124959785, - "step": 5890 - }, - { - "epoch": 0.7083508687548848, - "flos": 16691419886040.0, - "grad_norm": 3.1822231220827777, - "learning_rate": 8.277089510017412e-07, - "loss": 0.9284, - "num_input_tokens_seen": 124980310, - "step": 5891 - }, - { - "epoch": 0.708471111645524, - "flos": 15931005611520.0, - "grad_norm": 4.812144476899331, - "learning_rate": 8.270779125956719e-07, - "loss": 1.038, - "num_input_tokens_seen": 125000410, - "step": 5892 - }, - { - "epoch": 0.7085913545361631, - "flos": 14855354584800.0, - "grad_norm": 6.402195089125956, - "learning_rate": 8.264470521209505e-07, - "loss": 1.0287, - "num_input_tokens_seen": 125018495, - "step": 5893 - }, - { - "epoch": 0.7087115974268021, - "flos": 10607615104920.0, - "grad_norm": 5.6656921147086585, - "learning_rate": 8.258163696732785e-07, - "loss": 0.9897, - "num_input_tokens_seen": 125035805, - "step": 5894 - }, - { - "epoch": 0.7088318403174413, - "flos": 15297619258920.0, - "grad_norm": 6.861533553091125, - "learning_rate": 8.251858653483288e-07, - "loss": 0.9865, - "num_input_tokens_seen": 125053690, - "step": 5895 - }, - { - "epoch": 0.7089520832080803, - "flos": 10969253893560.0, - "grad_norm": 3.8797515532006672, - "learning_rate": 8.245555392417501e-07, - "loss": 1.0819, - "num_input_tokens_seen": 125068065, - "step": 5896 - }, - { - "epoch": 0.7090723260987194, - "flos": 14488748623440.0, - "grad_norm": 6.051837471866596, - "learning_rate": 8.239253914491613e-07, - "loss": 1.0139, - "num_input_tokens_seen": 125086110, - "step": 5897 - }, - { - "epoch": 0.7091925689893585, - "flos": 18266131892160.0, - "grad_norm": 3.6926231356514356, - "learning_rate": 8.232954220661556e-07, - "loss": 0.9715, - "num_input_tokens_seen": 125108565, - "step": 5898 - }, - { - "epoch": 0.7093128118799976, - "flos": 17215322277120.0, - "grad_norm": 4.775478894250669, - "learning_rate": 8.226656311882989e-07, - "loss": 0.8901, - "num_input_tokens_seen": 125127595, - "step": 5899 - }, - { - "epoch": 0.7094330547706367, - "flos": 11781221346600.0, - "grad_norm": 5.422130567700162, - "learning_rate": 8.22036018911129e-07, - "loss": 0.9882, - "num_input_tokens_seen": 125145345, - "step": 5900 - }, - { - "epoch": 0.7095532976612757, - "flos": 11520235990200.0, - "grad_norm": 5.551876596272145, - "learning_rate": 8.214065853301599e-07, - "loss": 1.0317, - "num_input_tokens_seen": 125160595, - "step": 5901 - }, - { - "epoch": 0.7096735405519149, - "flos": 50649500596800.0, - "grad_norm": 0.7959853797653006, - "learning_rate": 8.207773305408734e-07, - "loss": 0.833, - "num_input_tokens_seen": 125227535, - "step": 5902 - }, - { - "epoch": 0.709793783442554, - "flos": 16794618227760.0, - "grad_norm": 5.808730348931818, - "learning_rate": 8.201482546387288e-07, - "loss": 1.022, - "num_input_tokens_seen": 125246730, - "step": 5903 - }, - { - "epoch": 0.709914026333193, - "flos": 18499362988680.0, - "grad_norm": 2.5620946796203428, - "learning_rate": 8.195193577191553e-07, - "loss": 1.1364, - "num_input_tokens_seen": 125268280, - "step": 5904 - }, - { - "epoch": 0.7100342692238322, - "flos": 17685739811400.0, - "grad_norm": 3.5392469480166837, - "learning_rate": 8.188906398775579e-07, - "loss": 1.0709, - "num_input_tokens_seen": 125288545, - "step": 5905 - }, - { - "epoch": 0.7101545121144712, - "flos": 17736219835320.0, - "grad_norm": 5.752170650598217, - "learning_rate": 8.18262101209311e-07, - "loss": 0.9074, - "num_input_tokens_seen": 125307475, - "step": 5906 - }, - { - "epoch": 0.7102747550051103, - "flos": 16900698756120.0, - "grad_norm": 34.47773532873697, - "learning_rate": 8.176337418097626e-07, - "loss": 0.9334, - "num_input_tokens_seen": 125327665, - "step": 5907 - }, - { - "epoch": 0.7103949978957494, - "flos": 10817077944360.0, - "grad_norm": 5.944782489105337, - "learning_rate": 8.170055617742364e-07, - "loss": 1.0231, - "num_input_tokens_seen": 125344665, - "step": 5908 - }, - { - "epoch": 0.7105152407863885, - "flos": 16088700641520.0, - "grad_norm": 4.0847184822635825, - "learning_rate": 8.163775611980252e-07, - "loss": 0.929, - "num_input_tokens_seen": 125363495, - "step": 5909 - }, - { - "epoch": 0.7106354836770276, - "flos": 12207628446120.0, - "grad_norm": 5.9379015335034, - "learning_rate": 8.157497401763982e-07, - "loss": 1.0154, - "num_input_tokens_seen": 125380880, - "step": 5910 - }, - { - "epoch": 0.7107557265676667, - "flos": 14331329547480.0, - "grad_norm": 5.649163121722551, - "learning_rate": 8.151220988045935e-07, - "loss": 1.0051, - "num_input_tokens_seen": 125399855, - "step": 5911 - }, - { - "epoch": 0.7108759694583058, - "flos": 15275261433600.0, - "grad_norm": 4.976495811219179, - "learning_rate": 8.144946371778234e-07, - "loss": 1.0531, - "num_input_tokens_seen": 125419685, - "step": 5912 - }, - { - "epoch": 0.7109962123489448, - "flos": 17111480042640.0, - "grad_norm": 5.370121005664446, - "learning_rate": 8.138673553912751e-07, - "loss": 1.0071, - "num_input_tokens_seen": 125439965, - "step": 5913 - }, - { - "epoch": 0.711116455239584, - "flos": 21723985790280.0, - "grad_norm": 7.276987293336632, - "learning_rate": 8.132402535401059e-07, - "loss": 0.7913, - "num_input_tokens_seen": 125460940, - "step": 5914 - }, - { - "epoch": 0.711236698130223, - "flos": 17818256152560.0, - "grad_norm": 3.350498412903394, - "learning_rate": 8.126133317194465e-07, - "loss": 0.9729, - "num_input_tokens_seen": 125480850, - "step": 5915 - }, - { - "epoch": 0.7113569410208621, - "flos": 17215383600240.0, - "grad_norm": 4.149566784043758, - "learning_rate": 8.11986590024401e-07, - "loss": 0.9597, - "num_input_tokens_seen": 125500310, - "step": 5916 - }, - { - "epoch": 0.7114771839115013, - "flos": 25287368308680.0, - "grad_norm": 3.550027395868034, - "learning_rate": 8.113600285500442e-07, - "loss": 0.9129, - "num_input_tokens_seen": 125520240, - "step": 5917 - }, - { - "epoch": 0.7115974268021403, - "flos": 14981462659920.0, - "grad_norm": 6.3967345490199135, - "learning_rate": 8.107336473914268e-07, - "loss": 0.9681, - "num_input_tokens_seen": 125538590, - "step": 5918 - }, - { - "epoch": 0.7117176696927794, - "flos": 40610743278000.0, - "grad_norm": 0.8213757844513936, - "learning_rate": 8.101074466435694e-07, - "loss": 0.8373, - "num_input_tokens_seen": 125597785, - "step": 5919 - }, - { - "epoch": 0.7118379125834186, - "flos": 11263788544680.0, - "grad_norm": 4.31540327890624, - "learning_rate": 8.094814264014662e-07, - "loss": 0.9131, - "num_input_tokens_seen": 125616260, - "step": 5920 - }, - { - "epoch": 0.7119581554740576, - "flos": 14331084255000.0, - "grad_norm": 4.499156872919489, - "learning_rate": 8.088555867600844e-07, - "loss": 1.0414, - "num_input_tokens_seen": 125632145, - "step": 5921 - }, - { - "epoch": 0.7120783983646967, - "flos": 24767881182240.0, - "grad_norm": 3.6461017328194965, - "learning_rate": 8.08229927814362e-07, - "loss": 0.8434, - "num_input_tokens_seen": 125654755, - "step": 5922 - }, - { - "epoch": 0.7121986412553358, - "flos": 18762249361800.0, - "grad_norm": 4.222600465418409, - "learning_rate": 8.076044496592134e-07, - "loss": 0.8667, - "num_input_tokens_seen": 125676325, - "step": 5923 - }, - { - "epoch": 0.7123188841459749, - "flos": 7828507102800.0, - "grad_norm": 4.002609832038254, - "learning_rate": 8.069791523895204e-07, - "loss": 1.0087, - "num_input_tokens_seen": 125692495, - "step": 5924 - }, - { - "epoch": 0.7124391270366139, - "flos": 14775188622720.0, - "grad_norm": 10.629292776530301, - "learning_rate": 8.063540361001422e-07, - "loss": 0.9997, - "num_input_tokens_seen": 125710785, - "step": 5925 - }, - { - "epoch": 0.7125593699272531, - "flos": 12470085557400.0, - "grad_norm": 3.86810316554564, - "learning_rate": 8.057291008859069e-07, - "loss": 1.0112, - "num_input_tokens_seen": 125728665, - "step": 5926 - }, - { - "epoch": 0.7126796128178922, - "flos": 20411087002680.0, - "grad_norm": 3.344226838809542, - "learning_rate": 8.051043468416187e-07, - "loss": 0.9072, - "num_input_tokens_seen": 125749635, - "step": 5927 - }, - { - "epoch": 0.7127998557085312, - "flos": 11342206797840.0, - "grad_norm": 3.0819320759451765, - "learning_rate": 8.044797740620506e-07, - "loss": 1.046, - "num_input_tokens_seen": 125767960, - "step": 5928 - }, - { - "epoch": 0.7129200985991703, - "flos": 16637873706120.0, - "grad_norm": 5.998414147926022, - "learning_rate": 8.038553826419494e-07, - "loss": 1.0073, - "num_input_tokens_seen": 125786390, - "step": 5929 - }, - { - "epoch": 0.7130403414898094, - "flos": 15196444580160.0, - "grad_norm": 3.840260604320264, - "learning_rate": 8.032311726760364e-07, - "loss": 1.0345, - "num_input_tokens_seen": 125807330, - "step": 5930 - }, - { - "epoch": 0.7131605843804485, - "flos": 53715514069680.0, - "grad_norm": 6.273061092576048, - "learning_rate": 8.026071442590022e-07, - "loss": 0.9203, - "num_input_tokens_seen": 125833980, - "step": 5931 - }, - { - "epoch": 0.7132808272710875, - "flos": 13019718545400.0, - "grad_norm": 4.024470579976536, - "learning_rate": 8.019832974855134e-07, - "loss": 1.032, - "num_input_tokens_seen": 125851660, - "step": 5932 - }, - { - "epoch": 0.7134010701617267, - "flos": 16529616207000.0, - "grad_norm": 4.660707119840678, - "learning_rate": 8.013596324502052e-07, - "loss": 1.0497, - "num_input_tokens_seen": 125869845, - "step": 5933 - }, - { - "epoch": 0.7135213130523658, - "flos": 16816700099040.0, - "grad_norm": 3.746509580633788, - "learning_rate": 8.007361492476872e-07, - "loss": 1.0078, - "num_input_tokens_seen": 125888890, - "step": 5934 - }, - { - "epoch": 0.7136415559430048, - "flos": 17635075818120.0, - "grad_norm": 2.6963234594928833, - "learning_rate": 8.001128479725426e-07, - "loss": 1.0103, - "num_input_tokens_seen": 125910515, - "step": 5935 - }, - { - "epoch": 0.713761798833644, - "flos": 12968288013120.0, - "grad_norm": 5.131690333775285, - "learning_rate": 7.994897287193248e-07, - "loss": 1.026, - "num_input_tokens_seen": 125929615, - "step": 5936 - }, - { - "epoch": 0.713882041724283, - "flos": 10999737032280.0, - "grad_norm": 3.6079265115532153, - "learning_rate": 7.988667915825605e-07, - "loss": 1.0499, - "num_input_tokens_seen": 125946400, - "step": 5937 - }, - { - "epoch": 0.7140022846149221, - "flos": 17110713503640.0, - "grad_norm": 3.9406017620420966, - "learning_rate": 7.982440366567491e-07, - "loss": 0.9712, - "num_input_tokens_seen": 125964610, - "step": 5938 - }, - { - "epoch": 0.7141225275055613, - "flos": 19863722970120.0, - "grad_norm": 4.365325913120209, - "learning_rate": 7.97621464036361e-07, - "loss": 0.9793, - "num_input_tokens_seen": 125986090, - "step": 5939 - }, - { - "epoch": 0.7142427703962003, - "flos": 13964018370240.0, - "grad_norm": 5.313043364618015, - "learning_rate": 7.969990738158417e-07, - "loss": 0.9039, - "num_input_tokens_seen": 126004220, - "step": 5940 - }, - { - "epoch": 0.7143630132868394, - "flos": 14934048792000.0, - "grad_norm": 5.617891355089371, - "learning_rate": 7.963768660896062e-07, - "loss": 1.0721, - "num_input_tokens_seen": 126022350, - "step": 5941 - }, - { - "epoch": 0.7144832561774785, - "flos": 17159691111120.0, - "grad_norm": 3.9440773418495754, - "learning_rate": 7.957548409520432e-07, - "loss": 1.0525, - "num_input_tokens_seen": 126041295, - "step": 5942 - }, - { - "epoch": 0.7146034990681176, - "flos": 11551546991040.0, - "grad_norm": 8.270754976325799, - "learning_rate": 7.951329984975135e-07, - "loss": 1.0617, - "num_input_tokens_seen": 126057955, - "step": 5943 - }, - { - "epoch": 0.7147237419587567, - "flos": 49862558524800.0, - "grad_norm": 0.7312759590431892, - "learning_rate": 7.94511338820349e-07, - "loss": 0.7922, - "num_input_tokens_seen": 126119980, - "step": 5944 - }, - { - "epoch": 0.7148439848493958, - "flos": 15820387172280.0, - "grad_norm": 4.926409663894556, - "learning_rate": 7.938898620148575e-07, - "loss": 1.0102, - "num_input_tokens_seen": 126137460, - "step": 5945 - }, - { - "epoch": 0.7149642277400349, - "flos": 12705462963120.0, - "grad_norm": 5.97864642743222, - "learning_rate": 7.932685681753135e-07, - "loss": 0.9489, - "num_input_tokens_seen": 126154460, - "step": 5946 - }, - { - "epoch": 0.7150844706306739, - "flos": 22587107821560.0, - "grad_norm": 3.133477423445816, - "learning_rate": 7.92647457395969e-07, - "loss": 0.8484, - "num_input_tokens_seen": 126176005, - "step": 5947 - }, - { - "epoch": 0.7152047135213131, - "flos": 7670996042160.0, - "grad_norm": 5.304084485124817, - "learning_rate": 7.920265297710444e-07, - "loss": 0.9712, - "num_input_tokens_seen": 126193115, - "step": 5948 - }, - { - "epoch": 0.7153249564119522, - "flos": 14907612979200.0, - "grad_norm": 2.759141293885919, - "learning_rate": 7.914057853947363e-07, - "loss": 0.9493, - "num_input_tokens_seen": 126212015, - "step": 5949 - }, - { - "epoch": 0.7154451993025912, - "flos": 17241543459000.0, - "grad_norm": 25.61482863110314, - "learning_rate": 7.907852243612089e-07, - "loss": 0.8596, - "num_input_tokens_seen": 126232140, - "step": 5950 - }, - { - "epoch": 0.7155654421932304, - "flos": 16585983250440.0, - "grad_norm": 3.497278517826397, - "learning_rate": 7.901648467646009e-07, - "loss": 0.9529, - "num_input_tokens_seen": 126250800, - "step": 5951 - }, - { - "epoch": 0.7156856850838694, - "flos": 16141234989960.0, - "grad_norm": 27.940014480663525, - "learning_rate": 7.895446526990244e-07, - "loss": 0.9523, - "num_input_tokens_seen": 126270535, - "step": 5952 - }, - { - "epoch": 0.7158059279745085, - "flos": 14095461556800.0, - "grad_norm": 4.106960501087771, - "learning_rate": 7.889246422585609e-07, - "loss": 0.983, - "num_input_tokens_seen": 126289640, - "step": 5953 - }, - { - "epoch": 0.7159261708651476, - "flos": 17163830421720.0, - "grad_norm": 7.883275845447749, - "learning_rate": 7.883048155372675e-07, - "loss": 0.9674, - "num_input_tokens_seen": 126307875, - "step": 5954 - }, - { - "epoch": 0.7160464137557867, - "flos": 12024080172960.0, - "grad_norm": 5.585328310313045, - "learning_rate": 7.876851726291698e-07, - "loss": 0.9249, - "num_input_tokens_seen": 126325895, - "step": 5955 - }, - { - "epoch": 0.7161666566464258, - "flos": 17949546031320.0, - "grad_norm": 6.079715471637177, - "learning_rate": 7.870657136282666e-07, - "loss": 0.9977, - "num_input_tokens_seen": 126344475, - "step": 5956 - }, - { - "epoch": 0.7162868995370649, - "flos": 18840759599640.0, - "grad_norm": 5.075025346063169, - "learning_rate": 7.86446438628531e-07, - "loss": 1.0461, - "num_input_tokens_seen": 126365265, - "step": 5957 - }, - { - "epoch": 0.716407142427704, - "flos": 50130657363120.0, - "grad_norm": 0.7855974921825494, - "learning_rate": 7.858273477239059e-07, - "loss": 0.8273, - "num_input_tokens_seen": 126433405, - "step": 5958 - }, - { - "epoch": 0.716527385318343, - "flos": 14724248675400.0, - "grad_norm": 3.166478569735278, - "learning_rate": 7.852084410083067e-07, - "loss": 0.9358, - "num_input_tokens_seen": 126451945, - "step": 5959 - }, - { - "epoch": 0.7166476282089821, - "flos": 18052529742120.0, - "grad_norm": 4.594187190213379, - "learning_rate": 7.84589718575621e-07, - "loss": 0.8666, - "num_input_tokens_seen": 126472110, - "step": 5960 - }, - { - "epoch": 0.7167678710996213, - "flos": 17162941236480.0, - "grad_norm": 10.19868388941298, - "learning_rate": 7.83971180519708e-07, - "loss": 0.9136, - "num_input_tokens_seen": 126490685, - "step": 5961 - }, - { - "epoch": 0.7168881139902603, - "flos": 21541725302640.0, - "grad_norm": 4.503704665371679, - "learning_rate": 7.833528269344008e-07, - "loss": 0.9724, - "num_input_tokens_seen": 126510310, - "step": 5962 - }, - { - "epoch": 0.7170083568808994, - "flos": 10315226763000.0, - "grad_norm": 4.930116565247096, - "learning_rate": 7.827346579135023e-07, - "loss": 0.999, - "num_input_tokens_seen": 126527370, - "step": 5963 - }, - { - "epoch": 0.7171285997715385, - "flos": 16586105896680.0, - "grad_norm": 5.30017869036321, - "learning_rate": 7.821166735507885e-07, - "loss": 1.0586, - "num_input_tokens_seen": 126546120, - "step": 5964 - }, - { - "epoch": 0.7172488426621776, - "flos": 11707862250840.0, - "grad_norm": 3.151342221052714, - "learning_rate": 7.81498873940007e-07, - "loss": 0.9115, - "num_input_tokens_seen": 126563055, - "step": 5965 - }, - { - "epoch": 0.7173690855528166, - "flos": 18893416594320.0, - "grad_norm": 4.648858469646154, - "learning_rate": 7.808812591748768e-07, - "loss": 0.9912, - "num_input_tokens_seen": 126583155, - "step": 5966 - }, - { - "epoch": 0.7174893284434558, - "flos": 16193033460960.0, - "grad_norm": 5.224091314852562, - "learning_rate": 7.802638293490915e-07, - "loss": 0.8694, - "num_input_tokens_seen": 126602520, - "step": 5967 - }, - { - "epoch": 0.7176095713340949, - "flos": 16558995529560.0, - "grad_norm": 4.103238589827409, - "learning_rate": 7.796465845563123e-07, - "loss": 1.0002, - "num_input_tokens_seen": 126621115, - "step": 5968 - }, - { - "epoch": 0.7177298142247339, - "flos": 18210500726160.0, - "grad_norm": 3.6076044140758383, - "learning_rate": 7.790295248901766e-07, - "loss": 1.0256, - "num_input_tokens_seen": 126641965, - "step": 5969 - }, - { - "epoch": 0.7178500571153731, - "flos": 22567141597920.0, - "grad_norm": 4.85436380447191, - "learning_rate": 7.784126504442902e-07, - "loss": 0.8419, - "num_input_tokens_seen": 126664915, - "step": 5970 - }, - { - "epoch": 0.7179703000060121, - "flos": 13780500758640.0, - "grad_norm": 3.129334304999044, - "learning_rate": 7.777959613122351e-07, - "loss": 0.9018, - "num_input_tokens_seen": 126684460, - "step": 5971 - }, - { - "epoch": 0.7180905428966512, - "flos": 20544553852200.0, - "grad_norm": 3.107364944137899, - "learning_rate": 7.771794575875604e-07, - "loss": 1.0009, - "num_input_tokens_seen": 126706050, - "step": 5972 - }, - { - "epoch": 0.7182107857872904, - "flos": 14226322173720.0, - "grad_norm": 4.8471407927505155, - "learning_rate": 7.765631393637888e-07, - "loss": 0.9898, - "num_input_tokens_seen": 126723965, - "step": 5973 - }, - { - "epoch": 0.7183310286779294, - "flos": 16166873602200.0, - "grad_norm": 5.619931459382249, - "learning_rate": 7.75947006734417e-07, - "loss": 0.6881, - "num_input_tokens_seen": 126741465, - "step": 5974 - }, - { - "epoch": 0.7184512715685685, - "flos": 12148900462560.0, - "grad_norm": 4.7245861619029785, - "learning_rate": 7.753310597929101e-07, - "loss": 1.0569, - "num_input_tokens_seen": 126757825, - "step": 5975 - }, - { - "epoch": 0.7185715144592076, - "flos": 46904072221680.0, - "grad_norm": 0.7848597104156397, - "learning_rate": 7.747152986327095e-07, - "loss": 0.8039, - "num_input_tokens_seen": 126818090, - "step": 5976 - }, - { - "epoch": 0.7186917573498467, - "flos": 11447214171600.0, - "grad_norm": 5.166271666327455, - "learning_rate": 7.740997233472228e-07, - "loss": 0.8995, - "num_input_tokens_seen": 126835430, - "step": 5977 - }, - { - "epoch": 0.7188120002404857, - "flos": 20834427946200.0, - "grad_norm": 5.672352500130981, - "learning_rate": 7.734843340298329e-07, - "loss": 0.9329, - "num_input_tokens_seen": 126854975, - "step": 5978 - }, - { - "epoch": 0.7189322431311249, - "flos": 23823274741800.0, - "grad_norm": 4.549914415833143, - "learning_rate": 7.72869130773895e-07, - "loss": 0.9688, - "num_input_tokens_seen": 126875295, - "step": 5979 - }, - { - "epoch": 0.719052486021764, - "flos": 43915685349480.0, - "grad_norm": 0.7933071898779529, - "learning_rate": 7.722541136727343e-07, - "loss": 0.8404, - "num_input_tokens_seen": 126931030, - "step": 5980 - }, - { - "epoch": 0.719172728912403, - "flos": 11027705923080.0, - "grad_norm": 3.204832338420032, - "learning_rate": 7.716392828196483e-07, - "loss": 1.0263, - "num_input_tokens_seen": 126948550, - "step": 5981 - }, - { - "epoch": 0.7192929718030422, - "flos": 10995383090760.0, - "grad_norm": 8.018291542939687, - "learning_rate": 7.710246383079064e-07, - "loss": 0.986, - "num_input_tokens_seen": 126963655, - "step": 5982 - }, - { - "epoch": 0.7194132146936812, - "flos": 15529409262120.0, - "grad_norm": 4.865999569743522, - "learning_rate": 7.704101802307492e-07, - "loss": 1.1417, - "num_input_tokens_seen": 126975675, - "step": 5983 - }, - { - "epoch": 0.7195334575843203, - "flos": 19465683361680.0, - "grad_norm": 4.136727741004748, - "learning_rate": 7.697959086813912e-07, - "loss": 1.0985, - "num_input_tokens_seen": 126991560, - "step": 5984 - }, - { - "epoch": 0.7196537004749595, - "flos": 13308611469480.0, - "grad_norm": 3.177316142239676, - "learning_rate": 7.691818237530145e-07, - "loss": 1.029, - "num_input_tokens_seen": 127010140, - "step": 5985 - }, - { - "epoch": 0.7197739433655985, - "flos": 17447909480880.0, - "grad_norm": 4.653684997678419, - "learning_rate": 7.685679255387774e-07, - "loss": 1.0036, - "num_input_tokens_seen": 127028175, - "step": 5986 - }, - { - "epoch": 0.7198941862562376, - "flos": 12783911877840.0, - "grad_norm": 11.009331992912912, - "learning_rate": 7.679542141318065e-07, - "loss": 0.9896, - "num_input_tokens_seen": 127045000, - "step": 5987 - }, - { - "epoch": 0.7200144291468767, - "flos": 20676272992800.0, - "grad_norm": 3.957889264277576, - "learning_rate": 7.673406896252013e-07, - "loss": 0.9944, - "num_input_tokens_seen": 127066095, - "step": 5988 - }, - { - "epoch": 0.7201346720375158, - "flos": 18054492081960.0, - "grad_norm": 3.2591432922456245, - "learning_rate": 7.667273521120347e-07, - "loss": 1.0084, - "num_input_tokens_seen": 127085375, - "step": 5989 - }, - { - "epoch": 0.7202549149281549, - "flos": 10135603169520.0, - "grad_norm": 5.120059167102836, - "learning_rate": 7.661142016853468e-07, - "loss": 1.0255, - "num_input_tokens_seen": 127102455, - "step": 5990 - }, - { - "epoch": 0.7203751578187939, - "flos": 16349226074520.0, - "grad_norm": 4.1326350442614235, - "learning_rate": 7.655012384381543e-07, - "loss": 0.9689, - "num_input_tokens_seen": 127121660, - "step": 5991 - }, - { - "epoch": 0.7204954007094331, - "flos": 16845312882600.0, - "grad_norm": 4.11492406021816, - "learning_rate": 7.648884624634415e-07, - "loss": 1.0391, - "num_input_tokens_seen": 127139930, - "step": 5992 - }, - { - "epoch": 0.7206156436000721, - "flos": 11499257935080.0, - "grad_norm": 3.8639183329132165, - "learning_rate": 7.642758738541683e-07, - "loss": 1.1135, - "num_input_tokens_seen": 127156230, - "step": 5993 - }, - { - "epoch": 0.7207358864907112, - "flos": 38903361622920.0, - "grad_norm": 0.7747243714108053, - "learning_rate": 7.636634727032621e-07, - "loss": 0.8477, - "num_input_tokens_seen": 127213055, - "step": 5994 - }, - { - "epoch": 0.7208561293813504, - "flos": 13570884611400.0, - "grad_norm": 3.509451909976123, - "learning_rate": 7.630512591036231e-07, - "loss": 1.0184, - "num_input_tokens_seen": 127232085, - "step": 5995 - }, - { - "epoch": 0.7209763722719894, - "flos": 12573989115000.0, - "grad_norm": 6.6606425088253385, - "learning_rate": 7.624392331481255e-07, - "loss": 0.8689, - "num_input_tokens_seen": 127249460, - "step": 5996 - }, - { - "epoch": 0.7210966151626285, - "flos": 47844968613360.0, - "grad_norm": 0.7372448221117326, - "learning_rate": 7.618273949296115e-07, - "loss": 0.7706, - "num_input_tokens_seen": 127308690, - "step": 5997 - }, - { - "epoch": 0.7212168580532676, - "flos": 15012742999200.0, - "grad_norm": 5.564783891867104, - "learning_rate": 7.612157445408987e-07, - "loss": 0.9087, - "num_input_tokens_seen": 127326220, - "step": 5998 - }, - { - "epoch": 0.7213371009439067, - "flos": 15877919355000.0, - "grad_norm": 11.696385560533821, - "learning_rate": 7.606042820747716e-07, - "loss": 0.9593, - "num_input_tokens_seen": 127342345, - "step": 5999 - }, - { - "epoch": 0.7214573438345457, - "flos": 13125645765960.0, - "grad_norm": 5.137320804487274, - "learning_rate": 7.599930076239889e-07, - "loss": 1.0776, - "num_input_tokens_seen": 127359350, - "step": 6000 - }, - { - "epoch": 0.7215775867251849, - "flos": 25501338397440.0, - "grad_norm": 4.563739347004009, - "learning_rate": 7.593819212812818e-07, - "loss": 0.9353, - "num_input_tokens_seen": 127380650, - "step": 6001 - }, - { - "epoch": 0.721697829615824, - "flos": 14459307977760.0, - "grad_norm": 3.423654311403846, - "learning_rate": 7.587710231393508e-07, - "loss": 0.9505, - "num_input_tokens_seen": 127398725, - "step": 6002 - }, - { - "epoch": 0.721818072506463, - "flos": 14357274775320.0, - "grad_norm": 7.960162872871791, - "learning_rate": 7.581603132908685e-07, - "loss": 1.0537, - "num_input_tokens_seen": 127416415, - "step": 6003 - }, - { - "epoch": 0.7219383153971022, - "flos": 12889103220960.0, - "grad_norm": 7.808539523479797, - "learning_rate": 7.575497918284795e-07, - "loss": 1.0024, - "num_input_tokens_seen": 127433680, - "step": 6004 - }, - { - "epoch": 0.7220585582877412, - "flos": 12281232834360.0, - "grad_norm": 5.814994030832128, - "learning_rate": 7.569394588447984e-07, - "loss": 0.976, - "num_input_tokens_seen": 127450415, - "step": 6005 - }, - { - "epoch": 0.7221788011783803, - "flos": 12018223815000.0, - "grad_norm": 5.9801542771255205, - "learning_rate": 7.563293144324146e-07, - "loss": 1.0054, - "num_input_tokens_seen": 127465685, - "step": 6006 - }, - { - "epoch": 0.7222990440690195, - "flos": 18709990967400.0, - "grad_norm": 3.6366773166611273, - "learning_rate": 7.557193586838834e-07, - "loss": 1.0264, - "num_input_tokens_seen": 127480770, - "step": 6007 - }, - { - "epoch": 0.7224192869596585, - "flos": 12468337848480.0, - "grad_norm": 6.440134638924909, - "learning_rate": 7.551095916917371e-07, - "loss": 0.9228, - "num_input_tokens_seen": 127497820, - "step": 6008 - }, - { - "epoch": 0.7225395298502976, - "flos": 9112670460600.0, - "grad_norm": 5.078350543368684, - "learning_rate": 7.545000135484758e-07, - "loss": 0.8882, - "num_input_tokens_seen": 127514975, - "step": 6009 - }, - { - "epoch": 0.7226597727409367, - "flos": 21123106239360.0, - "grad_norm": 4.5592725652073645, - "learning_rate": 7.538906243465714e-07, - "loss": 0.8563, - "num_input_tokens_seen": 127534830, - "step": 6010 - }, - { - "epoch": 0.7227800156315758, - "flos": 9715880290080.0, - "grad_norm": 4.375024849957328, - "learning_rate": 7.5328142417847e-07, - "loss": 1.0067, - "num_input_tokens_seen": 127551315, - "step": 6011 - }, - { - "epoch": 0.7229002585222148, - "flos": 14408950600080.0, - "grad_norm": 6.606569673279802, - "learning_rate": 7.526724131365838e-07, - "loss": 0.9188, - "num_input_tokens_seen": 127571990, - "step": 6012 - }, - { - "epoch": 0.723020501412854, - "flos": 11735125925760.0, - "grad_norm": 3.325845430206064, - "learning_rate": 7.520635913133017e-07, - "loss": 0.9266, - "num_input_tokens_seen": 127590340, - "step": 6013 - }, - { - "epoch": 0.7231407443034931, - "flos": 20335581597720.0, - "grad_norm": 11.727648314361732, - "learning_rate": 7.514549588009798e-07, - "loss": 1.0443, - "num_input_tokens_seen": 127610935, - "step": 6014 - }, - { - "epoch": 0.7232609871941321, - "flos": 21385134088800.0, - "grad_norm": 3.703619429018052, - "learning_rate": 7.508465156919492e-07, - "loss": 0.9435, - "num_input_tokens_seen": 127634165, - "step": 6015 - }, - { - "epoch": 0.7233812300847713, - "flos": 11787384320160.0, - "grad_norm": 9.411613076054465, - "learning_rate": 7.502382620785083e-07, - "loss": 0.8331, - "num_input_tokens_seen": 127650435, - "step": 6016 - }, - { - "epoch": 0.7235014729754103, - "flos": 48160879919880.0, - "grad_norm": 0.889012097637907, - "learning_rate": 7.496301980529289e-07, - "loss": 0.9228, - "num_input_tokens_seen": 127713365, - "step": 6017 - }, - { - "epoch": 0.7236217158660494, - "flos": 19182094887480.0, - "grad_norm": 4.568247897715323, - "learning_rate": 7.490223237074547e-07, - "loss": 0.9676, - "num_input_tokens_seen": 127732795, - "step": 6018 - }, - { - "epoch": 0.7237419587566886, - "flos": 20964399377880.0, - "grad_norm": 15.716852092491855, - "learning_rate": 7.484146391342989e-07, - "loss": 0.8893, - "num_input_tokens_seen": 127752310, - "step": 6019 - }, - { - "epoch": 0.7238622016473276, - "flos": 12626431478760.0, - "grad_norm": 4.978158739721566, - "learning_rate": 7.478071444256484e-07, - "loss": 0.786, - "num_input_tokens_seen": 127769790, - "step": 6020 - }, - { - "epoch": 0.7239824445379667, - "flos": 18316703900760.0, - "grad_norm": 6.821445855951202, - "learning_rate": 7.471998396736579e-07, - "loss": 1.0006, - "num_input_tokens_seen": 127789890, - "step": 6021 - }, - { - "epoch": 0.7241026874286057, - "flos": 16455214618200.0, - "grad_norm": 2.346351298938372, - "learning_rate": 7.465927249704549e-07, - "loss": 0.9825, - "num_input_tokens_seen": 127807495, - "step": 6022 - }, - { - "epoch": 0.7242229303192449, - "flos": 14384017203720.0, - "grad_norm": 9.275903671126368, - "learning_rate": 7.459858004081398e-07, - "loss": 0.993, - "num_input_tokens_seen": 127825185, - "step": 6023 - }, - { - "epoch": 0.724343173209884, - "flos": 44607493070040.0, - "grad_norm": 0.6622418746478581, - "learning_rate": 7.453790660787815e-07, - "loss": 0.8299, - "num_input_tokens_seen": 127893000, - "step": 6024 - }, - { - "epoch": 0.724463416100523, - "flos": 24976914759840.0, - "grad_norm": 4.238145114101063, - "learning_rate": 7.447725220744214e-07, - "loss": 0.8615, - "num_input_tokens_seen": 127914965, - "step": 6025 - }, - { - "epoch": 0.7245836589911622, - "flos": 15537503913960.0, - "grad_norm": 4.342243399848778, - "learning_rate": 7.441661684870717e-07, - "loss": 0.9768, - "num_input_tokens_seen": 127934940, - "step": 6026 - }, - { - "epoch": 0.7247039018818012, - "flos": 16353334723560.0, - "grad_norm": 3.3568777339102422, - "learning_rate": 7.435600054087152e-07, - "loss": 1.0416, - "num_input_tokens_seen": 127956825, - "step": 6027 - }, - { - "epoch": 0.7248241447724403, - "flos": 22616885744400.0, - "grad_norm": 3.6721615943257873, - "learning_rate": 7.42954032931308e-07, - "loss": 0.9743, - "num_input_tokens_seen": 127977585, - "step": 6028 - }, - { - "epoch": 0.7249443876630794, - "flos": 24898281875760.0, - "grad_norm": 5.459835961920385, - "learning_rate": 7.423482511467733e-07, - "loss": 0.9732, - "num_input_tokens_seen": 127998075, - "step": 6029 - }, - { - "epoch": 0.7250646305537185, - "flos": 18762402669600.0, - "grad_norm": 4.953298860500992, - "learning_rate": 7.417426601470099e-07, - "loss": 0.8795, - "num_input_tokens_seen": 128018155, - "step": 6030 - }, - { - "epoch": 0.7251848734443576, - "flos": 21437668437240.0, - "grad_norm": 15.22924993921032, - "learning_rate": 7.411372600238841e-07, - "loss": 1.0018, - "num_input_tokens_seen": 128038490, - "step": 6031 - }, - { - "epoch": 0.7253051163349967, - "flos": 12600424927800.0, - "grad_norm": 4.5733795047649926, - "learning_rate": 7.405320508692346e-07, - "loss": 0.9495, - "num_input_tokens_seen": 128056950, - "step": 6032 - }, - { - "epoch": 0.7254253592256358, - "flos": 8903667544560.0, - "grad_norm": 3.98136902156641, - "learning_rate": 7.399270327748727e-07, - "loss": 0.9641, - "num_input_tokens_seen": 128074330, - "step": 6033 - }, - { - "epoch": 0.7255456021162748, - "flos": 19366133745600.0, - "grad_norm": 5.655400333551928, - "learning_rate": 7.39322205832577e-07, - "loss": 0.9707, - "num_input_tokens_seen": 128094940, - "step": 6034 - }, - { - "epoch": 0.725665845006914, - "flos": 15117965003880.0, - "grad_norm": 3.3973341981516425, - "learning_rate": 7.387175701341009e-07, - "loss": 1.0339, - "num_input_tokens_seen": 128113330, - "step": 6035 - }, - { - "epoch": 0.7257860878975531, - "flos": 11341562905080.0, - "grad_norm": 7.544487301020124, - "learning_rate": 7.381131257711659e-07, - "loss": 0.9459, - "num_input_tokens_seen": 128130155, - "step": 6036 - }, - { - "epoch": 0.7259063307881921, - "flos": 8536049751720.0, - "grad_norm": 5.297899715362964, - "learning_rate": 7.375088728354677e-07, - "loss": 1.0657, - "num_input_tokens_seen": 128144905, - "step": 6037 - }, - { - "epoch": 0.7260265736788313, - "flos": 21697641962160.0, - "grad_norm": 3.9940360430990376, - "learning_rate": 7.369048114186691e-07, - "loss": 0.8944, - "num_input_tokens_seen": 128165670, - "step": 6038 - }, - { - "epoch": 0.7261468165694703, - "flos": 15013080276360.0, - "grad_norm": 2.8467976095858742, - "learning_rate": 7.363009416124055e-07, - "loss": 1.0629, - "num_input_tokens_seen": 128184715, - "step": 6039 - }, - { - "epoch": 0.7262670594601094, - "flos": 15849183925200.0, - "grad_norm": 4.5059192721284385, - "learning_rate": 7.356972635082852e-07, - "loss": 0.8536, - "num_input_tokens_seen": 128203290, - "step": 6040 - }, - { - "epoch": 0.7263873023507486, - "flos": 18025940621520.0, - "grad_norm": 4.7504467001228425, - "learning_rate": 7.35093777197884e-07, - "loss": 0.9829, - "num_input_tokens_seen": 128223080, - "step": 6041 - }, - { - "epoch": 0.7265075452413876, - "flos": 16979638255800.0, - "grad_norm": 5.311985664153, - "learning_rate": 7.344904827727525e-07, - "loss": 1.076, - "num_input_tokens_seen": 128239980, - "step": 6042 - }, - { - "epoch": 0.7266277881320267, - "flos": 20461413718800.0, - "grad_norm": 9.482116329719439, - "learning_rate": 7.338873803244076e-07, - "loss": 0.9349, - "num_input_tokens_seen": 128254935, - "step": 6043 - }, - { - "epoch": 0.7267480310226658, - "flos": 17687426197200.0, - "grad_norm": 4.930924298223859, - "learning_rate": 7.332844699443401e-07, - "loss": 1.0275, - "num_input_tokens_seen": 128273255, - "step": 6044 - }, - { - "epoch": 0.7268682739133049, - "flos": 19365520514400.0, - "grad_norm": 6.1191901854241, - "learning_rate": 7.326817517240121e-07, - "loss": 0.9768, - "num_input_tokens_seen": 128294680, - "step": 6045 - }, - { - "epoch": 0.7269885168039439, - "flos": 23899822639800.0, - "grad_norm": 6.224942445297437, - "learning_rate": 7.320792257548545e-07, - "loss": 1.0569, - "num_input_tokens_seen": 128315575, - "step": 6046 - }, - { - "epoch": 0.7271087596945831, - "flos": 17291992821360.0, - "grad_norm": 8.284052536544095, - "learning_rate": 7.314768921282704e-07, - "loss": 0.9817, - "num_input_tokens_seen": 128335950, - "step": 6047 - }, - { - "epoch": 0.7272290025852222, - "flos": 16926766630200.0, - "grad_norm": 4.733486583180975, - "learning_rate": 7.30874750935633e-07, - "loss": 0.9397, - "num_input_tokens_seen": 128355355, - "step": 6048 - }, - { - "epoch": 0.7273492454758612, - "flos": 11834184956880.0, - "grad_norm": 5.664893190701882, - "learning_rate": 7.30272802268286e-07, - "loss": 1.012, - "num_input_tokens_seen": 128372070, - "step": 6049 - }, - { - "epoch": 0.7274694883665004, - "flos": 19963211263080.0, - "grad_norm": 2.9285267882982176, - "learning_rate": 7.29671046217547e-07, - "loss": 0.9917, - "num_input_tokens_seen": 128390900, - "step": 6050 - }, - { - "epoch": 0.7275897312571394, - "flos": 21646763337960.0, - "grad_norm": 5.129291133573818, - "learning_rate": 7.290694828746988e-07, - "loss": 1.0421, - "num_input_tokens_seen": 128410285, - "step": 6051 - }, - { - "epoch": 0.7277099741477785, - "flos": 13620138172920.0, - "grad_norm": 3.2748069356839067, - "learning_rate": 7.284681123310004e-07, - "loss": 1.0821, - "num_input_tokens_seen": 128428720, - "step": 6052 - }, - { - "epoch": 0.7278302170384175, - "flos": 14672143588800.0, - "grad_norm": 3.12662662133422, - "learning_rate": 7.27866934677678e-07, - "loss": 1.012, - "num_input_tokens_seen": 128448110, - "step": 6053 - }, - { - "epoch": 0.7279504599290567, - "flos": 13539665595240.0, - "grad_norm": 3.950137008029234, - "learning_rate": 7.272659500059297e-07, - "loss": 1.0016, - "num_input_tokens_seen": 128465170, - "step": 6054 - }, - { - "epoch": 0.7280707028196958, - "flos": 13518104970480.0, - "grad_norm": 4.658263110528342, - "learning_rate": 7.266651584069264e-07, - "loss": 1.0305, - "num_input_tokens_seen": 128482555, - "step": 6055 - }, - { - "epoch": 0.7281909457103348, - "flos": 26550860226960.0, - "grad_norm": 5.350012606311813, - "learning_rate": 7.260645599718045e-07, - "loss": 0.804, - "num_input_tokens_seen": 128508630, - "step": 6056 - }, - { - "epoch": 0.728311188600974, - "flos": 14671469034480.0, - "grad_norm": 6.085940320622742, - "learning_rate": 7.254641547916767e-07, - "loss": 0.8993, - "num_input_tokens_seen": 128525845, - "step": 6057 - }, - { - "epoch": 0.728431431491613, - "flos": 20545780314600.0, - "grad_norm": 5.18284121974915, - "learning_rate": 7.248639429576226e-07, - "loss": 0.9152, - "num_input_tokens_seen": 128545020, - "step": 6058 - }, - { - "epoch": 0.7285516743822521, - "flos": 18497983218480.0, - "grad_norm": 5.816376408691918, - "learning_rate": 7.242639245606959e-07, - "loss": 0.948, - "num_input_tokens_seen": 128564530, - "step": 6059 - }, - { - "epoch": 0.7286719172728913, - "flos": 11604786555360.0, - "grad_norm": 4.424761473697886, - "learning_rate": 7.236640996919168e-07, - "loss": 1.0493, - "num_input_tokens_seen": 128583295, - "step": 6060 - }, - { - "epoch": 0.7287921601635303, - "flos": 15642603272400.0, - "grad_norm": 2.4080849128909203, - "learning_rate": 7.230644684422782e-07, - "loss": 0.9264, - "num_input_tokens_seen": 128603245, - "step": 6061 - }, - { - "epoch": 0.7289124030541694, - "flos": 17497745612040.0, - "grad_norm": 4.287551585963127, - "learning_rate": 7.224650309027451e-07, - "loss": 1.0489, - "num_input_tokens_seen": 128622715, - "step": 6062 - }, - { - "epoch": 0.7290326459448085, - "flos": 15193194454800.0, - "grad_norm": 3.0808917164208394, - "learning_rate": 7.218657871642506e-07, - "loss": 0.9082, - "num_input_tokens_seen": 128641240, - "step": 6063 - }, - { - "epoch": 0.7291528888354476, - "flos": 13177076298240.0, - "grad_norm": 5.683141694329138, - "learning_rate": 7.212667373177012e-07, - "loss": 0.8388, - "num_input_tokens_seen": 128655955, - "step": 6064 - }, - { - "epoch": 0.7292731317260867, - "flos": 13438030993080.0, - "grad_norm": 11.359317910576967, - "learning_rate": 7.206678814539704e-07, - "loss": 0.9829, - "num_input_tokens_seen": 128673975, - "step": 6065 - }, - { - "epoch": 0.7293933746167258, - "flos": 14959166157720.0, - "grad_norm": 2.5653268991708202, - "learning_rate": 7.20069219663904e-07, - "loss": 0.9645, - "num_input_tokens_seen": 128693580, - "step": 6066 - }, - { - "epoch": 0.7295136175073649, - "flos": 15954988499520.0, - "grad_norm": 10.7348760629508, - "learning_rate": 7.1947075203832e-07, - "loss": 1.0122, - "num_input_tokens_seen": 128713280, - "step": 6067 - }, - { - "epoch": 0.7296338603980039, - "flos": 40158912197160.0, - "grad_norm": 0.8611294264195802, - "learning_rate": 7.188724786680049e-07, - "loss": 0.8424, - "num_input_tokens_seen": 128773470, - "step": 6068 - }, - { - "epoch": 0.7297541032886431, - "flos": 17949576692880.0, - "grad_norm": 3.466504719203728, - "learning_rate": 7.182743996437162e-07, - "loss": 0.9721, - "num_input_tokens_seen": 128792725, - "step": 6069 - }, - { - "epoch": 0.7298743461792822, - "flos": 18840085045320.0, - "grad_norm": 5.277740034448061, - "learning_rate": 7.176765150561819e-07, - "loss": 0.9081, - "num_input_tokens_seen": 128811050, - "step": 6070 - }, - { - "epoch": 0.7299945890699212, - "flos": 13881859406760.0, - "grad_norm": 8.072579640562406, - "learning_rate": 7.170788249961002e-07, - "loss": 1.0084, - "num_input_tokens_seen": 128829280, - "step": 6071 - }, - { - "epoch": 0.7301148319605604, - "flos": 16296998341680.0, - "grad_norm": 11.736522980835828, - "learning_rate": 7.164813295541418e-07, - "loss": 1.0991, - "num_input_tokens_seen": 128848565, - "step": 6072 - }, - { - "epoch": 0.7302350748511994, - "flos": 18050598063840.0, - "grad_norm": 3.4806068470077887, - "learning_rate": 7.15884028820944e-07, - "loss": 0.9206, - "num_input_tokens_seen": 128867340, - "step": 6073 - }, - { - "epoch": 0.7303553177418385, - "flos": 19811648545080.0, - "grad_norm": 4.241452991272313, - "learning_rate": 7.152869228871185e-07, - "loss": 0.809, - "num_input_tokens_seen": 128889545, - "step": 6074 - }, - { - "epoch": 0.7304755606324776, - "flos": 17373477230520.0, - "grad_norm": 3.2857533387416282, - "learning_rate": 7.146900118432457e-07, - "loss": 0.942, - "num_input_tokens_seen": 128909010, - "step": 6075 - }, - { - "epoch": 0.7305958035231167, - "flos": 16952006642160.0, - "grad_norm": 2.5099425969645943, - "learning_rate": 7.140932957798753e-07, - "loss": 1.0881, - "num_input_tokens_seen": 128927170, - "step": 6076 - }, - { - "epoch": 0.7307160464137558, - "flos": 11839796022360.0, - "grad_norm": 6.999125834786218, - "learning_rate": 7.134967747875309e-07, - "loss": 0.9336, - "num_input_tokens_seen": 128945100, - "step": 6077 - }, - { - "epoch": 0.7308362893043949, - "flos": 15484448319000.0, - "grad_norm": 4.286446195502572, - "learning_rate": 7.129004489567014e-07, - "loss": 1.0478, - "num_input_tokens_seen": 128962300, - "step": 6078 - }, - { - "epoch": 0.730956532195034, - "flos": 7513730274000.0, - "grad_norm": 5.315389408405397, - "learning_rate": 7.123043183778512e-07, - "loss": 1.0064, - "num_input_tokens_seen": 128979350, - "step": 6079 - }, - { - "epoch": 0.731076775085673, - "flos": 14042620592760.0, - "grad_norm": 8.853901028107742, - "learning_rate": 7.117083831414114e-07, - "loss": 0.8725, - "num_input_tokens_seen": 128998345, - "step": 6080 - }, - { - "epoch": 0.7311970179763122, - "flos": 14513160773280.0, - "grad_norm": 5.17252882774731, - "learning_rate": 7.11112643337787e-07, - "loss": 0.9272, - "num_input_tokens_seen": 129017110, - "step": 6081 - }, - { - "epoch": 0.7313172608669513, - "flos": 13123867395480.0, - "grad_norm": 17.459221986476, - "learning_rate": 7.10517099057349e-07, - "loss": 0.9948, - "num_input_tokens_seen": 129033780, - "step": 6082 - }, - { - "epoch": 0.7314375037575903, - "flos": 11447398140960.0, - "grad_norm": 5.648973030672879, - "learning_rate": 7.099217503904411e-07, - "loss": 0.8069, - "num_input_tokens_seen": 129051355, - "step": 6083 - }, - { - "epoch": 0.7315577466482295, - "flos": 12731745468120.0, - "grad_norm": 9.699253753586657, - "learning_rate": 7.093265974273788e-07, - "loss": 1.1319, - "num_input_tokens_seen": 129068970, - "step": 6084 - }, - { - "epoch": 0.7316779895388685, - "flos": 13046123696640.0, - "grad_norm": 4.954072975874797, - "learning_rate": 7.087316402584447e-07, - "loss": 0.953, - "num_input_tokens_seen": 129087515, - "step": 6085 - }, - { - "epoch": 0.7317982324295076, - "flos": 12702826068960.0, - "grad_norm": 5.729998090121232, - "learning_rate": 7.081368789738953e-07, - "loss": 1.0822, - "num_input_tokens_seen": 129104435, - "step": 6086 - }, - { - "epoch": 0.7319184753201466, - "flos": 19387357093200.0, - "grad_norm": 6.868471131985485, - "learning_rate": 7.075423136639537e-07, - "loss": 1.0068, - "num_input_tokens_seen": 129123410, - "step": 6087 - }, - { - "epoch": 0.7320387182107858, - "flos": 26731710282840.0, - "grad_norm": 4.072172417248658, - "learning_rate": 7.069479444188149e-07, - "loss": 0.9592, - "num_input_tokens_seen": 129143720, - "step": 6088 - }, - { - "epoch": 0.7321589611014249, - "flos": 12653541845880.0, - "grad_norm": 4.144589465970141, - "learning_rate": 7.063537713286453e-07, - "loss": 1.0435, - "num_input_tokens_seen": 129161120, - "step": 6089 - }, - { - "epoch": 0.7322792039920639, - "flos": 18576033532920.0, - "grad_norm": 3.6947530762548433, - "learning_rate": 7.057597944835803e-07, - "loss": 1.0312, - "num_input_tokens_seen": 129180115, - "step": 6090 - }, - { - "epoch": 0.7323994468827031, - "flos": 18050076817320.0, - "grad_norm": 3.5365374654119948, - "learning_rate": 7.051660139737253e-07, - "loss": 0.9738, - "num_input_tokens_seen": 129198055, - "step": 6091 - }, - { - "epoch": 0.7325196897733421, - "flos": 19156149659640.0, - "grad_norm": 5.006333345440935, - "learning_rate": 7.045724298891565e-07, - "loss": 0.9832, - "num_input_tokens_seen": 129217245, - "step": 6092 - }, - { - "epoch": 0.7326399326639812, - "flos": 18343078390440.0, - "grad_norm": 8.44990768833638, - "learning_rate": 7.039790423199192e-07, - "loss": 0.9205, - "num_input_tokens_seen": 129236605, - "step": 6093 - }, - { - "epoch": 0.7327601755546204, - "flos": 14933956807320.0, - "grad_norm": 5.213850634016849, - "learning_rate": 7.033858513560322e-07, - "loss": 0.998, - "num_input_tokens_seen": 129252620, - "step": 6094 - }, - { - "epoch": 0.7328804184452594, - "flos": 11525233824480.0, - "grad_norm": 6.029772641781136, - "learning_rate": 7.027928570874794e-07, - "loss": 0.9813, - "num_input_tokens_seen": 129270530, - "step": 6095 - }, - { - "epoch": 0.7330006613358985, - "flos": 12652867291560.0, - "grad_norm": 3.5864440959049895, - "learning_rate": 7.022000596042194e-07, - "loss": 1.0759, - "num_input_tokens_seen": 129287350, - "step": 6096 - }, - { - "epoch": 0.7331209042265376, - "flos": 15983141359680.0, - "grad_norm": 16.926058462563724, - "learning_rate": 7.016074589961784e-07, - "loss": 1.0393, - "num_input_tokens_seen": 129305635, - "step": 6097 - }, - { - "epoch": 0.7332411471171767, - "flos": 23587253443320.0, - "grad_norm": 3.8830387041912253, - "learning_rate": 7.01015055353253e-07, - "loss": 0.8971, - "num_input_tokens_seen": 129327780, - "step": 6098 - }, - { - "epoch": 0.7333613900078157, - "flos": 16162887599400.0, - "grad_norm": 3.694872711012646, - "learning_rate": 7.004228487653123e-07, - "loss": 0.9983, - "num_input_tokens_seen": 129348305, - "step": 6099 - }, - { - "epoch": 0.7334816328984549, - "flos": 15878256632160.0, - "grad_norm": 4.2292356939586275, - "learning_rate": 6.998308393221906e-07, - "loss": 1.0072, - "num_input_tokens_seen": 129366430, - "step": 6100 - }, - { - "epoch": 0.733601875789094, - "flos": 14720569288200.0, - "grad_norm": 5.520154784147124, - "learning_rate": 6.992390271136977e-07, - "loss": 0.9402, - "num_input_tokens_seen": 129381860, - "step": 6101 - }, - { - "epoch": 0.733722118679733, - "flos": 16035001153800.0, - "grad_norm": 3.9402283192816183, - "learning_rate": 6.986474122296094e-07, - "loss": 1.083, - "num_input_tokens_seen": 129400695, - "step": 6102 - }, - { - "epoch": 0.7338423615703722, - "flos": 14252666001840.0, - "grad_norm": 3.497555151307551, - "learning_rate": 6.980559947596751e-07, - "loss": 0.9474, - "num_input_tokens_seen": 129418955, - "step": 6103 - }, - { - "epoch": 0.7339626044610112, - "flos": 15404803603440.0, - "grad_norm": 5.873061814081432, - "learning_rate": 6.974647747936109e-07, - "loss": 0.9806, - "num_input_tokens_seen": 129437060, - "step": 6104 - }, - { - "epoch": 0.7340828473516503, - "flos": 10791561978360.0, - "grad_norm": 3.640598673582053, - "learning_rate": 6.968737524211039e-07, - "loss": 1.0491, - "num_input_tokens_seen": 129453590, - "step": 6105 - }, - { - "epoch": 0.7342030902422895, - "flos": 16297979511600.0, - "grad_norm": 4.915362017969805, - "learning_rate": 6.962829277318132e-07, - "loss": 1.0097, - "num_input_tokens_seen": 129472905, - "step": 6106 - }, - { - "epoch": 0.7343233331329285, - "flos": 18394784876760.0, - "grad_norm": 6.848409381715605, - "learning_rate": 6.956923008153652e-07, - "loss": 1.0493, - "num_input_tokens_seen": 129492390, - "step": 6107 - }, - { - "epoch": 0.7344435760235676, - "flos": 13099056645360.0, - "grad_norm": 6.925940979193053, - "learning_rate": 6.951018717613593e-07, - "loss": 1.0651, - "num_input_tokens_seen": 129511125, - "step": 6108 - }, - { - "epoch": 0.7345638189142067, - "flos": 12496153431480.0, - "grad_norm": 3.607605603655743, - "learning_rate": 6.945116406593614e-07, - "loss": 0.9903, - "num_input_tokens_seen": 129529700, - "step": 6109 - }, - { - "epoch": 0.7346840618048458, - "flos": 14378314153560.0, - "grad_norm": 10.672116993161321, - "learning_rate": 6.939216075989089e-07, - "loss": 0.955, - "num_input_tokens_seen": 129547350, - "step": 6110 - }, - { - "epoch": 0.7348043046954849, - "flos": 20676947547120.0, - "grad_norm": 2.904047469799374, - "learning_rate": 6.933317726695109e-07, - "loss": 0.8922, - "num_input_tokens_seen": 129568300, - "step": 6111 - }, - { - "epoch": 0.734924547586124, - "flos": 12705309655320.0, - "grad_norm": 4.48938410317657, - "learning_rate": 6.92742135960644e-07, - "loss": 1.0112, - "num_input_tokens_seen": 129585720, - "step": 6112 - }, - { - "epoch": 0.7350447904767631, - "flos": 45524958481800.0, - "grad_norm": 0.917500737785022, - "learning_rate": 6.921526975617556e-07, - "loss": 0.8263, - "num_input_tokens_seen": 129644900, - "step": 6113 - }, - { - "epoch": 0.7351650333674021, - "flos": 15327673135800.0, - "grad_norm": 4.546713914672332, - "learning_rate": 6.915634575622631e-07, - "loss": 0.9694, - "num_input_tokens_seen": 129663135, - "step": 6114 - }, - { - "epoch": 0.7352852762580413, - "flos": 12888643297560.0, - "grad_norm": 8.607295787382027, - "learning_rate": 6.909744160515532e-07, - "loss": 0.9391, - "num_input_tokens_seen": 129680995, - "step": 6115 - }, - { - "epoch": 0.7354055191486804, - "flos": 27782366590080.0, - "grad_norm": 3.4874334896404156, - "learning_rate": 6.903855731189849e-07, - "loss": 0.8957, - "num_input_tokens_seen": 129703350, - "step": 6116 - }, - { - "epoch": 0.7355257620393194, - "flos": 11525877717240.0, - "grad_norm": 7.32679642140965, - "learning_rate": 6.897969288538825e-07, - "loss": 1.0309, - "num_input_tokens_seen": 129721015, - "step": 6117 - }, - { - "epoch": 0.7356460049299585, - "flos": 12836936811240.0, - "grad_norm": 3.506654544006783, - "learning_rate": 6.892084833455452e-07, - "loss": 1.0317, - "num_input_tokens_seen": 129740305, - "step": 6118 - }, - { - "epoch": 0.7357662478205976, - "flos": 15144615447600.0, - "grad_norm": 3.5719037339330497, - "learning_rate": 6.886202366832384e-07, - "loss": 1.0648, - "num_input_tokens_seen": 129761710, - "step": 6119 - }, - { - "epoch": 0.7358864907112367, - "flos": 10057522193520.0, - "grad_norm": 7.937126571401594, - "learning_rate": 6.880321889561987e-07, - "loss": 0.9584, - "num_input_tokens_seen": 129779405, - "step": 6120 - }, - { - "epoch": 0.7360067336018757, - "flos": 15852066111840.0, - "grad_norm": 4.929631978007956, - "learning_rate": 6.874443402536338e-07, - "loss": 0.8684, - "num_input_tokens_seen": 129798215, - "step": 6121 - }, - { - "epoch": 0.7361269764925149, - "flos": 18183819620880.0, - "grad_norm": 4.949300450687135, - "learning_rate": 6.868566906647177e-07, - "loss": 1.039, - "num_input_tokens_seen": 129818885, - "step": 6122 - }, - { - "epoch": 0.736247219383154, - "flos": 14462803395600.0, - "grad_norm": 4.859439567308683, - "learning_rate": 6.862692402785984e-07, - "loss": 1.0548, - "num_input_tokens_seen": 129838855, - "step": 6123 - }, - { - "epoch": 0.736367462273793, - "flos": 50377103592000.0, - "grad_norm": 0.6980839476643212, - "learning_rate": 6.856819891843899e-07, - "loss": 0.7428, - "num_input_tokens_seen": 129903280, - "step": 6124 - }, - { - "epoch": 0.7364877051644322, - "flos": 15925241238240.0, - "grad_norm": 4.550742480722733, - "learning_rate": 6.8509493747118e-07, - "loss": 0.9433, - "num_input_tokens_seen": 129921810, - "step": 6125 - }, - { - "epoch": 0.7366079480550712, - "flos": 8536325705760.0, - "grad_norm": 5.409236546093791, - "learning_rate": 6.845080852280221e-07, - "loss": 1.0962, - "num_input_tokens_seen": 129938600, - "step": 6126 - }, - { - "epoch": 0.7367281909457103, - "flos": 10634234887080.0, - "grad_norm": 3.182711276512702, - "learning_rate": 6.839214325439409e-07, - "loss": 0.9733, - "num_input_tokens_seen": 129956015, - "step": 6127 - }, - { - "epoch": 0.7368484338363495, - "flos": 16714850865960.0, - "grad_norm": 3.1258627654232165, - "learning_rate": 6.833349795079327e-07, - "loss": 0.9401, - "num_input_tokens_seen": 129974845, - "step": 6128 - }, - { - "epoch": 0.7369686767269885, - "flos": 19523215544400.0, - "grad_norm": 11.949270467156797, - "learning_rate": 6.827487262089613e-07, - "loss": 0.9033, - "num_input_tokens_seen": 129995070, - "step": 6129 - }, - { - "epoch": 0.7370889196176276, - "flos": 50342603788920.0, - "grad_norm": 0.8799188502782238, - "learning_rate": 6.821626727359606e-07, - "loss": 0.8275, - "num_input_tokens_seen": 130060350, - "step": 6130 - }, - { - "epoch": 0.7372091625082667, - "flos": 12784126508760.0, - "grad_norm": 8.490096212859621, - "learning_rate": 6.815768191778348e-07, - "loss": 0.9882, - "num_input_tokens_seen": 130078150, - "step": 6131 - }, - { - "epoch": 0.7373294053989058, - "flos": 24056536499880.0, - "grad_norm": 6.172526728347007, - "learning_rate": 6.809911656234569e-07, - "loss": 0.9564, - "num_input_tokens_seen": 130099845, - "step": 6132 - }, - { - "epoch": 0.7374496482895448, - "flos": 15274617540840.0, - "grad_norm": 11.305840486835534, - "learning_rate": 6.804057121616707e-07, - "loss": 1.0055, - "num_input_tokens_seen": 130117770, - "step": 6133 - }, - { - "epoch": 0.737569891180184, - "flos": 17740236499680.0, - "grad_norm": 3.440866521107243, - "learning_rate": 6.798204588812888e-07, - "loss": 0.943, - "num_input_tokens_seen": 130136905, - "step": 6134 - }, - { - "epoch": 0.7376901340708231, - "flos": 14669292063720.0, - "grad_norm": 4.496969049993193, - "learning_rate": 6.792354058710937e-07, - "loss": 0.9787, - "num_input_tokens_seen": 130154095, - "step": 6135 - }, - { - "epoch": 0.7378103769614621, - "flos": 16927318538280.0, - "grad_norm": 2.998728641689958, - "learning_rate": 6.786505532198374e-07, - "loss": 0.8828, - "num_input_tokens_seen": 130172760, - "step": 6136 - }, - { - "epoch": 0.7379306198521013, - "flos": 15800053009920.0, - "grad_norm": 5.52079241275617, - "learning_rate": 6.780659010162411e-07, - "loss": 1.0776, - "num_input_tokens_seen": 130191430, - "step": 6137 - }, - { - "epoch": 0.7380508627427403, - "flos": 10529104867080.0, - "grad_norm": 4.88507271906786, - "learning_rate": 6.774814493489975e-07, - "loss": 1.0615, - "num_input_tokens_seen": 130208825, - "step": 6138 - }, - { - "epoch": 0.7381711056333794, - "flos": 15403239863880.0, - "grad_norm": 5.853898808153717, - "learning_rate": 6.768971983067655e-07, - "loss": 0.8978, - "num_input_tokens_seen": 130228875, - "step": 6139 - }, - { - "epoch": 0.7382913485240186, - "flos": 37484566276320.0, - "grad_norm": 1.023358698715894, - "learning_rate": 6.763131479781772e-07, - "loss": 0.9324, - "num_input_tokens_seen": 130278355, - "step": 6140 - }, - { - "epoch": 0.7384115914146576, - "flos": 15484908242400.0, - "grad_norm": 3.7181385723718496, - "learning_rate": 6.757292984518316e-07, - "loss": 0.9997, - "num_input_tokens_seen": 130297475, - "step": 6141 - }, - { - "epoch": 0.7385318343052967, - "flos": 44018117152200.0, - "grad_norm": 2.780861902483619, - "learning_rate": 6.751456498162981e-07, - "loss": 0.8429, - "num_input_tokens_seen": 130356230, - "step": 6142 - }, - { - "epoch": 0.7386520771959358, - "flos": 12045242197440.0, - "grad_norm": 4.555979000870402, - "learning_rate": 6.745622021601174e-07, - "loss": 1.0785, - "num_input_tokens_seen": 130372975, - "step": 6143 - }, - { - "epoch": 0.7387723200865749, - "flos": 13308734115720.0, - "grad_norm": 4.154436447173493, - "learning_rate": 6.739789555717954e-07, - "loss": 0.9234, - "num_input_tokens_seen": 130389670, - "step": 6144 - }, - { - "epoch": 0.738892562977214, - "flos": 16006786970520.0, - "grad_norm": 3.667515670857302, - "learning_rate": 6.733959101398124e-07, - "loss": 1.009, - "num_input_tokens_seen": 130407520, - "step": 6145 - }, - { - "epoch": 0.7390128058678531, - "flos": 15270478230240.0, - "grad_norm": 5.330450451050868, - "learning_rate": 6.728130659526143e-07, - "loss": 1.0402, - "num_input_tokens_seen": 130425050, - "step": 6146 - }, - { - "epoch": 0.7391330487584922, - "flos": 18343415667600.0, - "grad_norm": 4.529498583935643, - "learning_rate": 6.7223042309862e-07, - "loss": 0.9235, - "num_input_tokens_seen": 130444970, - "step": 6147 - }, - { - "epoch": 0.7392532916491312, - "flos": 20205211565760.0, - "grad_norm": 3.9672119284107215, - "learning_rate": 6.716479816662144e-07, - "loss": 0.9641, - "num_input_tokens_seen": 130466420, - "step": 6148 - }, - { - "epoch": 0.7393735345397703, - "flos": 16769132923320.0, - "grad_norm": 7.345376212065474, - "learning_rate": 6.710657417437531e-07, - "loss": 0.9577, - "num_input_tokens_seen": 130485845, - "step": 6149 - }, - { - "epoch": 0.7394937774304094, - "flos": 14174033117760.0, - "grad_norm": 3.6859891997006797, - "learning_rate": 6.704837034195628e-07, - "loss": 1.0062, - "num_input_tokens_seen": 130504030, - "step": 6150 - }, - { - "epoch": 0.7396140203210485, - "flos": 16691849147880.0, - "grad_norm": 5.7762244573712325, - "learning_rate": 6.699018667819376e-07, - "loss": 1.0736, - "num_input_tokens_seen": 130523150, - "step": 6151 - }, - { - "epoch": 0.7397342632116876, - "flos": 18184524836760.0, - "grad_norm": 4.494457858683453, - "learning_rate": 6.693202319191415e-07, - "loss": 0.9538, - "num_input_tokens_seen": 130544605, - "step": 6152 - }, - { - "epoch": 0.7398545061023267, - "flos": 17609590513680.0, - "grad_norm": 3.3114553279729075, - "learning_rate": 6.687387989194084e-07, - "loss": 0.9601, - "num_input_tokens_seen": 130563840, - "step": 6153 - }, - { - "epoch": 0.7399747489929658, - "flos": 11682959516040.0, - "grad_norm": 3.839636113091582, - "learning_rate": 6.681575678709404e-07, - "loss": 1.016, - "num_input_tokens_seen": 130582250, - "step": 6154 - }, - { - "epoch": 0.7400949918836048, - "flos": 17136566746800.0, - "grad_norm": 4.434345252251127, - "learning_rate": 6.67576538861911e-07, - "loss": 0.9359, - "num_input_tokens_seen": 130600545, - "step": 6155 - }, - { - "epoch": 0.740215234774244, - "flos": 15488066383080.0, - "grad_norm": 2.7159870182957477, - "learning_rate": 6.669957119804612e-07, - "loss": 1.0414, - "num_input_tokens_seen": 130621900, - "step": 6156 - }, - { - "epoch": 0.7403354776648831, - "flos": 13281593087040.0, - "grad_norm": 6.575011325144714, - "learning_rate": 6.66415087314702e-07, - "loss": 0.9202, - "num_input_tokens_seen": 130636575, - "step": 6157 - }, - { - "epoch": 0.7404557205555221, - "flos": 11971085901120.0, - "grad_norm": 4.27054789760086, - "learning_rate": 6.65834664952714e-07, - "loss": 0.945, - "num_input_tokens_seen": 130653745, - "step": 6158 - }, - { - "epoch": 0.7405759634461613, - "flos": 15064817424240.0, - "grad_norm": 7.754723370830546, - "learning_rate": 6.652544449825457e-07, - "loss": 0.9815, - "num_input_tokens_seen": 130673720, - "step": 6159 - }, - { - "epoch": 0.7406962063368003, - "flos": 14536469106960.0, - "grad_norm": 3.3896841686391017, - "learning_rate": 6.646744274922182e-07, - "loss": 0.9842, - "num_input_tokens_seen": 130691885, - "step": 6160 - }, - { - "epoch": 0.7408164492274394, - "flos": 14042559269640.0, - "grad_norm": 9.484313059672123, - "learning_rate": 6.640946125697171e-07, - "loss": 0.9701, - "num_input_tokens_seen": 130709135, - "step": 6161 - }, - { - "epoch": 0.7409366921180786, - "flos": 20807409563760.0, - "grad_norm": 4.407051392312926, - "learning_rate": 6.635150003030017e-07, - "loss": 0.9806, - "num_input_tokens_seen": 130727380, - "step": 6162 - }, - { - "epoch": 0.7410569350087176, - "flos": 16297887526920.0, - "grad_norm": 6.605156600678172, - "learning_rate": 6.629355907799981e-07, - "loss": 1.0815, - "num_input_tokens_seen": 130746905, - "step": 6163 - }, - { - "epoch": 0.7411771778993567, - "flos": 21694698452400.0, - "grad_norm": 5.060390833661398, - "learning_rate": 6.623563840886015e-07, - "loss": 0.9192, - "num_input_tokens_seen": 130767550, - "step": 6164 - }, - { - "epoch": 0.7412974207899958, - "flos": 14567350845960.0, - "grad_norm": 4.229370538216784, - "learning_rate": 6.617773803166795e-07, - "loss": 0.9246, - "num_input_tokens_seen": 130785595, - "step": 6165 - }, - { - "epoch": 0.7414176636806349, - "flos": 15694677697440.0, - "grad_norm": 12.049663136945663, - "learning_rate": 6.611985795520634e-07, - "loss": 1.0507, - "num_input_tokens_seen": 130803860, - "step": 6166 - }, - { - "epoch": 0.7415379065712739, - "flos": 17897073006000.0, - "grad_norm": 4.603757584063508, - "learning_rate": 6.606199818825588e-07, - "loss": 0.9971, - "num_input_tokens_seen": 130824035, - "step": 6167 - }, - { - "epoch": 0.7416581494619131, - "flos": 11943699579960.0, - "grad_norm": 40.448450507250584, - "learning_rate": 6.600415873959377e-07, - "loss": 1.0413, - "num_input_tokens_seen": 130841630, - "step": 6168 - }, - { - "epoch": 0.7417783923525522, - "flos": 20256948713640.0, - "grad_norm": 29.909289740697098, - "learning_rate": 6.594633961799437e-07, - "loss": 0.8677, - "num_input_tokens_seen": 130860390, - "step": 6169 - }, - { - "epoch": 0.7418986352431912, - "flos": 14252757986520.0, - "grad_norm": 4.387322646683683, - "learning_rate": 6.588854083222857e-07, - "loss": 1.0431, - "num_input_tokens_seen": 130879545, - "step": 6170 - }, - { - "epoch": 0.7420188781338304, - "flos": 12941300292240.0, - "grad_norm": 6.031691777153411, - "learning_rate": 6.583076239106444e-07, - "loss": 1.0177, - "num_input_tokens_seen": 130897770, - "step": 6171 - }, - { - "epoch": 0.7421391210244694, - "flos": 9716186905680.0, - "grad_norm": 5.159178116274841, - "learning_rate": 6.577300430326707e-07, - "loss": 0.9786, - "num_input_tokens_seen": 130912435, - "step": 6172 - }, - { - "epoch": 0.7422593639151085, - "flos": 11289948403440.0, - "grad_norm": 3.6739105426278322, - "learning_rate": 6.571526657759821e-07, - "loss": 0.948, - "num_input_tokens_seen": 130927895, - "step": 6173 - }, - { - "epoch": 0.7423796068057477, - "flos": 21460731478440.0, - "grad_norm": 3.4916441492585224, - "learning_rate": 6.565754922281663e-07, - "loss": 0.9377, - "num_input_tokens_seen": 130949860, - "step": 6174 - }, - { - "epoch": 0.7424998496963867, - "flos": 14567228199720.0, - "grad_norm": 3.7339340651932313, - "learning_rate": 6.559985224767801e-07, - "loss": 1.0186, - "num_input_tokens_seen": 130967455, - "step": 6175 - }, - { - "epoch": 0.7426200925870258, - "flos": 15537166636800.0, - "grad_norm": 10.912158141838855, - "learning_rate": 6.55421756609349e-07, - "loss": 0.9804, - "num_input_tokens_seen": 130985430, - "step": 6176 - }, - { - "epoch": 0.7427403354776649, - "flos": 18814661064000.0, - "grad_norm": 12.730897243670858, - "learning_rate": 6.54845194713369e-07, - "loss": 1.016, - "num_input_tokens_seen": 131006100, - "step": 6177 - }, - { - "epoch": 0.742860578368304, - "flos": 14118677905800.0, - "grad_norm": 4.77016145508904, - "learning_rate": 6.542688368763034e-07, - "loss": 1.0288, - "num_input_tokens_seen": 131024225, - "step": 6178 - }, - { - "epoch": 0.742980821258943, - "flos": 17661358323120.0, - "grad_norm": 4.669865306733399, - "learning_rate": 6.536926831855854e-07, - "loss": 1.0001, - "num_input_tokens_seen": 131043110, - "step": 6179 - }, - { - "epoch": 0.7431010641495821, - "flos": 17949883308480.0, - "grad_norm": 6.8584022614301245, - "learning_rate": 6.531167337286165e-07, - "loss": 0.9504, - "num_input_tokens_seen": 131062850, - "step": 6180 - }, - { - "epoch": 0.7432213070402213, - "flos": 15458717722080.0, - "grad_norm": 2.275249375310019, - "learning_rate": 6.52540988592768e-07, - "loss": 1.0228, - "num_input_tokens_seen": 131083590, - "step": 6181 - }, - { - "epoch": 0.7433415499308603, - "flos": 10450594629240.0, - "grad_norm": 3.6482962146007574, - "learning_rate": 6.519654478653814e-07, - "loss": 1.0553, - "num_input_tokens_seen": 131101675, - "step": 6182 - }, - { - "epoch": 0.7434617928214994, - "flos": 48087551485680.0, - "grad_norm": 0.7531664682582058, - "learning_rate": 6.51390111633763e-07, - "loss": 0.8053, - "num_input_tokens_seen": 131166670, - "step": 6183 - }, - { - "epoch": 0.7435820357121385, - "flos": 19493774898720.0, - "grad_norm": 4.4950419769194125, - "learning_rate": 6.508149799851932e-07, - "loss": 0.9837, - "num_input_tokens_seen": 131188055, - "step": 6184 - }, - { - "epoch": 0.7437022786027776, - "flos": 17057933862720.0, - "grad_norm": 6.3073213693647565, - "learning_rate": 6.502400530069183e-07, - "loss": 0.8434, - "num_input_tokens_seen": 131207660, - "step": 6185 - }, - { - "epoch": 0.7438225214934167, - "flos": 15533057987760.0, - "grad_norm": 5.550361161036671, - "learning_rate": 6.496653307861535e-07, - "loss": 0.9076, - "num_input_tokens_seen": 131228050, - "step": 6186 - }, - { - "epoch": 0.7439427643840558, - "flos": 14357520067800.0, - "grad_norm": 2.9775079942623153, - "learning_rate": 6.490908134100857e-07, - "loss": 0.8732, - "num_input_tokens_seen": 131246235, - "step": 6187 - }, - { - "epoch": 0.7440630072746949, - "flos": 14802912221040.0, - "grad_norm": 4.640910849453288, - "learning_rate": 6.48516500965866e-07, - "loss": 0.9158, - "num_input_tokens_seen": 131265890, - "step": 6188 - }, - { - "epoch": 0.7441832501653339, - "flos": 18866030273160.0, - "grad_norm": 3.8068083505490606, - "learning_rate": 6.479423935406192e-07, - "loss": 1.0411, - "num_input_tokens_seen": 131285595, - "step": 6189 - }, - { - "epoch": 0.7443034930559731, - "flos": 49126035153600.0, - "grad_norm": 1.1009886308718828, - "learning_rate": 6.473684912214357e-07, - "loss": 0.9399, - "num_input_tokens_seen": 131348875, - "step": 6190 - }, - { - "epoch": 0.7444237359466122, - "flos": 13224674135520.0, - "grad_norm": 7.847975850183814, - "learning_rate": 6.467947940953778e-07, - "loss": 0.9302, - "num_input_tokens_seen": 131367120, - "step": 6191 - }, - { - "epoch": 0.7445439788372512, - "flos": 16216709733360.0, - "grad_norm": 23.423168795570366, - "learning_rate": 6.462213022494732e-07, - "loss": 0.9486, - "num_input_tokens_seen": 131386085, - "step": 6192 - }, - { - "epoch": 0.7446642217278904, - "flos": 48008520001320.0, - "grad_norm": 0.857798604383643, - "learning_rate": 6.456480157707201e-07, - "loss": 0.8795, - "num_input_tokens_seen": 131450580, - "step": 6193 - }, - { - "epoch": 0.7447844646185294, - "flos": 12335698861080.0, - "grad_norm": 4.808107625775451, - "learning_rate": 6.450749347460866e-07, - "loss": 1.0759, - "num_input_tokens_seen": 131467275, - "step": 6194 - }, - { - "epoch": 0.7449047075091685, - "flos": 18946962774240.0, - "grad_norm": 4.234287838197127, - "learning_rate": 6.445020592625083e-07, - "loss": 1.0255, - "num_input_tokens_seen": 131487645, - "step": 6195 - }, - { - "epoch": 0.7450249503998077, - "flos": 10002166981560.0, - "grad_norm": 11.489440050151078, - "learning_rate": 6.4392938940689e-07, - "loss": 1.029, - "num_input_tokens_seen": 131502780, - "step": 6196 - }, - { - "epoch": 0.7451451932904467, - "flos": 13909061758560.0, - "grad_norm": 6.270035634890201, - "learning_rate": 6.433569252661049e-07, - "loss": 0.9241, - "num_input_tokens_seen": 131520500, - "step": 6197 - }, - { - "epoch": 0.7452654361810858, - "flos": 8798782817040.0, - "grad_norm": 3.4818181120525926, - "learning_rate": 6.427846669269952e-07, - "loss": 0.923, - "num_input_tokens_seen": 131537840, - "step": 6198 - }, - { - "epoch": 0.7453856790717249, - "flos": 15721021525560.0, - "grad_norm": 4.036624608050439, - "learning_rate": 6.422126144763729e-07, - "loss": 1.0341, - "num_input_tokens_seen": 131556950, - "step": 6199 - }, - { - "epoch": 0.745505921962364, - "flos": 14199763714680.0, - "grad_norm": 3.7788615604429285, - "learning_rate": 6.416407680010174e-07, - "loss": 0.9913, - "num_input_tokens_seen": 131571030, - "step": 6200 - }, - { - "epoch": 0.745626164853003, - "flos": 17553591408960.0, - "grad_norm": 13.690682376548908, - "learning_rate": 6.410691275876774e-07, - "loss": 1.0349, - "num_input_tokens_seen": 131590170, - "step": 6201 - }, - { - "epoch": 0.7457464077436422, - "flos": 10267383633240.0, - "grad_norm": 7.9678799023290665, - "learning_rate": 6.404976933230704e-07, - "loss": 0.9768, - "num_input_tokens_seen": 131606410, - "step": 6202 - }, - { - "epoch": 0.7458666506342813, - "flos": 24268850864400.0, - "grad_norm": 4.550881629458921, - "learning_rate": 6.399264652938813e-07, - "loss": 0.9534, - "num_input_tokens_seen": 131627035, - "step": 6203 - }, - { - "epoch": 0.7459868935249203, - "flos": 17267274055920.0, - "grad_norm": 3.2019085318696976, - "learning_rate": 6.393554435867679e-07, - "loss": 0.9674, - "num_input_tokens_seen": 131647605, - "step": 6204 - }, - { - "epoch": 0.7461071364155595, - "flos": 15564001049880.0, - "grad_norm": 4.554659233201845, - "learning_rate": 6.387846282883502e-07, - "loss": 1.048, - "num_input_tokens_seen": 131663855, - "step": 6205 - }, - { - "epoch": 0.7462273793061985, - "flos": 16268416219680.0, - "grad_norm": 3.4536978820336572, - "learning_rate": 6.38214019485223e-07, - "loss": 0.9918, - "num_input_tokens_seen": 131682400, - "step": 6206 - }, - { - "epoch": 0.7463476221968376, - "flos": 14168636683200.0, - "grad_norm": 9.980432439390896, - "learning_rate": 6.376436172639461e-07, - "loss": 0.9395, - "num_input_tokens_seen": 131699965, - "step": 6207 - }, - { - "epoch": 0.7464678650874768, - "flos": 11918643537360.0, - "grad_norm": 15.156374838403394, - "learning_rate": 6.370734217110487e-07, - "loss": 0.8711, - "num_input_tokens_seen": 131718430, - "step": 6208 - }, - { - "epoch": 0.7465881079781158, - "flos": 34387130252520.0, - "grad_norm": 4.321996433099478, - "learning_rate": 6.36503432913031e-07, - "loss": 0.8698, - "num_input_tokens_seen": 131741295, - "step": 6209 - }, - { - "epoch": 0.7467083508687549, - "flos": 13959541782480.0, - "grad_norm": 5.017078604952436, - "learning_rate": 6.359336509563569e-07, - "loss": 0.909, - "num_input_tokens_seen": 131757035, - "step": 6210 - }, - { - "epoch": 0.7468285937593939, - "flos": 12679701704640.0, - "grad_norm": 3.2803529512263068, - "learning_rate": 6.353640759274641e-07, - "loss": 1.0323, - "num_input_tokens_seen": 131775645, - "step": 6211 - }, - { - "epoch": 0.7469488366500331, - "flos": 16448837013720.0, - "grad_norm": 5.0247093002726135, - "learning_rate": 6.347947079127556e-07, - "loss": 0.9659, - "num_input_tokens_seen": 131793265, - "step": 6212 - }, - { - "epoch": 0.7470690795406721, - "flos": 11813912117640.0, - "grad_norm": 7.121410871298718, - "learning_rate": 6.342255469986053e-07, - "loss": 0.9862, - "num_input_tokens_seen": 131811730, - "step": 6213 - }, - { - "epoch": 0.7471893224313112, - "flos": 17924122050000.0, - "grad_norm": 14.376068871606448, - "learning_rate": 6.336565932713533e-07, - "loss": 0.9927, - "num_input_tokens_seen": 131830875, - "step": 6214 - }, - { - "epoch": 0.7473095653219504, - "flos": 16007369540160.0, - "grad_norm": 3.9137677902163364, - "learning_rate": 6.330878468173088e-07, - "loss": 0.9978, - "num_input_tokens_seen": 131850660, - "step": 6215 - }, - { - "epoch": 0.7474298082125894, - "flos": 12836722180320.0, - "grad_norm": 2.8129575569105163, - "learning_rate": 6.32519307722752e-07, - "loss": 0.9588, - "num_input_tokens_seen": 131868275, - "step": 6216 - }, - { - "epoch": 0.7475500511032285, - "flos": 44444861528880.0, - "grad_norm": 0.7717185522134439, - "learning_rate": 6.31950976073929e-07, - "loss": 0.806, - "num_input_tokens_seen": 131922085, - "step": 6217 - }, - { - "epoch": 0.7476702939938676, - "flos": 12600700881840.0, - "grad_norm": 4.35144349625485, - "learning_rate": 6.31382851957055e-07, - "loss": 1.0235, - "num_input_tokens_seen": 131938625, - "step": 6218 - }, - { - "epoch": 0.7477905368845067, - "flos": 19889729521080.0, - "grad_norm": 3.1722673445961047, - "learning_rate": 6.308149354583143e-07, - "loss": 0.9267, - "num_input_tokens_seen": 131957750, - "step": 6219 - }, - { - "epoch": 0.7479107797751458, - "flos": 19129744508400.0, - "grad_norm": 4.200048519035924, - "learning_rate": 6.302472266638586e-07, - "loss": 1.033, - "num_input_tokens_seen": 131978010, - "step": 6220 - }, - { - "epoch": 0.7480310226657849, - "flos": 24212269190040.0, - "grad_norm": 9.875832051823295, - "learning_rate": 6.296797256598101e-07, - "loss": 0.9319, - "num_input_tokens_seen": 131999210, - "step": 6221 - }, - { - "epoch": 0.748151265556424, - "flos": 17660806415040.0, - "grad_norm": 2.8072913363898278, - "learning_rate": 6.291124325322576e-07, - "loss": 1.0384, - "num_input_tokens_seen": 132019055, - "step": 6222 - }, - { - "epoch": 0.748271508447063, - "flos": 27415331366880.0, - "grad_norm": 3.4472327038973356, - "learning_rate": 6.285453473672595e-07, - "loss": 0.8527, - "num_input_tokens_seen": 132041345, - "step": 6223 - }, - { - "epoch": 0.7483917513377022, - "flos": 15300010860600.0, - "grad_norm": 6.284850042552571, - "learning_rate": 6.279784702508415e-07, - "loss": 0.9655, - "num_input_tokens_seen": 132061815, - "step": 6224 - }, - { - "epoch": 0.7485119942283412, - "flos": 44609945994840.0, - "grad_norm": 0.8305812971158343, - "learning_rate": 6.274118012689979e-07, - "loss": 0.8733, - "num_input_tokens_seen": 132123435, - "step": 6225 - }, - { - "epoch": 0.7486322371189803, - "flos": 21334439433960.0, - "grad_norm": 2.818370287709512, - "learning_rate": 6.268453405076943e-07, - "loss": 0.9105, - "num_input_tokens_seen": 132145550, - "step": 6226 - }, - { - "epoch": 0.7487524800096195, - "flos": 13436620561320.0, - "grad_norm": 4.837889442243497, - "learning_rate": 6.262790880528592e-07, - "loss": 1.0553, - "num_input_tokens_seen": 132162890, - "step": 6227 - }, - { - "epoch": 0.7488727229002585, - "flos": 13255954474800.0, - "grad_norm": 7.337751040477678, - "learning_rate": 6.257130439903951e-07, - "loss": 1.0116, - "num_input_tokens_seen": 132179105, - "step": 6228 - }, - { - "epoch": 0.7489929657908976, - "flos": 16796212628880.0, - "grad_norm": 9.665463295936602, - "learning_rate": 6.251472084061695e-07, - "loss": 1.0333, - "num_input_tokens_seen": 132197745, - "step": 6229 - }, - { - "epoch": 0.7491132086815367, - "flos": 14587593023640.0, - "grad_norm": 6.221270639831017, - "learning_rate": 6.245815813860191e-07, - "loss": 1.1249, - "num_input_tokens_seen": 132212975, - "step": 6230 - }, - { - "epoch": 0.7492334515721758, - "flos": 16350360552240.0, - "grad_norm": 5.831178576000316, - "learning_rate": 6.240161630157495e-07, - "loss": 0.9139, - "num_input_tokens_seen": 132232050, - "step": 6231 - }, - { - "epoch": 0.7493536944628149, - "flos": 11604020016360.0, - "grad_norm": 5.975022141164193, - "learning_rate": 6.23450953381133e-07, - "loss": 0.9158, - "num_input_tokens_seen": 132249860, - "step": 6232 - }, - { - "epoch": 0.749473937353454, - "flos": 10841735386680.0, - "grad_norm": 10.15513290130484, - "learning_rate": 6.228859525679131e-07, - "loss": 0.9006, - "num_input_tokens_seen": 132263995, - "step": 6233 - }, - { - "epoch": 0.7495941802440931, - "flos": 13438092316200.0, - "grad_norm": 12.09859588294721, - "learning_rate": 6.223211606617986e-07, - "loss": 1.0312, - "num_input_tokens_seen": 132282135, - "step": 6234 - }, - { - "epoch": 0.7497144231347321, - "flos": 15983478636840.0, - "grad_norm": 2.9030020007140718, - "learning_rate": 6.217565777484701e-07, - "loss": 1.06, - "num_input_tokens_seen": 132300950, - "step": 6235 - }, - { - "epoch": 0.7498346660253713, - "flos": 17241880736160.0, - "grad_norm": 4.346436033702597, - "learning_rate": 6.211922039135722e-07, - "loss": 1.0214, - "num_input_tokens_seen": 132320815, - "step": 6236 - }, - { - "epoch": 0.7499549089160104, - "flos": 17344741800720.0, - "grad_norm": 6.77278642704228, - "learning_rate": 6.206280392427201e-07, - "loss": 1.0313, - "num_input_tokens_seen": 132340120, - "step": 6237 - }, - { - "epoch": 0.7500751518066494, - "flos": 24294765430680.0, - "grad_norm": 4.304856755690219, - "learning_rate": 6.200640838214983e-07, - "loss": 0.9585, - "num_input_tokens_seen": 132362615, - "step": 6238 - }, - { - "epoch": 0.7501953946972886, - "flos": 13361053833240.0, - "grad_norm": 11.85094577540056, - "learning_rate": 6.195003377354578e-07, - "loss": 0.8946, - "num_input_tokens_seen": 132381605, - "step": 6239 - }, - { - "epoch": 0.7503156375879276, - "flos": 14376137182800.0, - "grad_norm": 11.101025411994781, - "learning_rate": 6.189368010701183e-07, - "loss": 0.9394, - "num_input_tokens_seen": 132398385, - "step": 6240 - }, - { - "epoch": 0.7504358804785667, - "flos": 9506356127520.0, - "grad_norm": 6.925906279716045, - "learning_rate": 6.183734739109683e-07, - "loss": 0.9862, - "num_input_tokens_seen": 132415925, - "step": 6241 - }, - { - "epoch": 0.7505561233692057, - "flos": 20991632391240.0, - "grad_norm": 6.420866578461836, - "learning_rate": 6.178103563434629e-07, - "loss": 0.9191, - "num_input_tokens_seen": 132434645, - "step": 6242 - }, - { - "epoch": 0.7506763662598449, - "flos": 14409441185040.0, - "grad_norm": 4.286793037161583, - "learning_rate": 6.172474484530283e-07, - "loss": 1.0747, - "num_input_tokens_seen": 132453100, - "step": 6243 - }, - { - "epoch": 0.750796609150484, - "flos": 26575517669280.0, - "grad_norm": 4.1130025539190855, - "learning_rate": 6.166847503250563e-07, - "loss": 0.9797, - "num_input_tokens_seen": 132475060, - "step": 6244 - }, - { - "epoch": 0.750916852041123, - "flos": 13911392037120.0, - "grad_norm": 4.318926901168474, - "learning_rate": 6.161222620449078e-07, - "loss": 1.0155, - "num_input_tokens_seen": 132493555, - "step": 6245 - }, - { - "epoch": 0.7510370949317622, - "flos": 17865670020480.0, - "grad_norm": 5.356979145999984, - "learning_rate": 6.155599836979117e-07, - "loss": 1.0217, - "num_input_tokens_seen": 132511960, - "step": 6246 - }, - { - "epoch": 0.7511573378224012, - "flos": 13518196955160.0, - "grad_norm": 5.302707218220056, - "learning_rate": 6.149979153693649e-07, - "loss": 1.0354, - "num_input_tokens_seen": 132528935, - "step": 6247 - }, - { - "epoch": 0.7512775807130403, - "flos": 14147290689360.0, - "grad_norm": 10.3029911475704, - "learning_rate": 6.144360571445343e-07, - "loss": 0.9776, - "num_input_tokens_seen": 132547800, - "step": 6248 - }, - { - "epoch": 0.7513978236036795, - "flos": 14723788752000.0, - "grad_norm": 3.851552471898393, - "learning_rate": 6.138744091086509e-07, - "loss": 1.0279, - "num_input_tokens_seen": 132567105, - "step": 6249 - }, - { - "epoch": 0.7515180664943185, - "flos": 19627579025400.0, - "grad_norm": 4.931468901713206, - "learning_rate": 6.133129713469183e-07, - "loss": 0.9469, - "num_input_tokens_seen": 132586030, - "step": 6250 - }, - { - "epoch": 0.7516383093849576, - "flos": 24112290312120.0, - "grad_norm": 4.629358866650473, - "learning_rate": 6.127517439445053e-07, - "loss": 0.8603, - "num_input_tokens_seen": 132606595, - "step": 6251 - }, - { - "epoch": 0.7517585522755967, - "flos": 21195637473000.0, - "grad_norm": 8.32515722361155, - "learning_rate": 6.121907269865498e-07, - "loss": 1.0444, - "num_input_tokens_seen": 132625805, - "step": 6252 - }, - { - "epoch": 0.7518787951662358, - "flos": 49992284664000.0, - "grad_norm": 0.9488107879926774, - "learning_rate": 6.116299205581577e-07, - "loss": 0.957, - "num_input_tokens_seen": 132680355, - "step": 6253 - }, - { - "epoch": 0.7519990380568748, - "flos": 24399619496640.0, - "grad_norm": 3.797987409983614, - "learning_rate": 6.110693247444018e-07, - "loss": 0.9016, - "num_input_tokens_seen": 132701910, - "step": 6254 - }, - { - "epoch": 0.752119280947514, - "flos": 15432343232400.0, - "grad_norm": 4.26421715835777, - "learning_rate": 6.105089396303258e-07, - "loss": 1.0503, - "num_input_tokens_seen": 132720020, - "step": 6255 - }, - { - "epoch": 0.7522395238381531, - "flos": 23349883036200.0, - "grad_norm": 11.008591851070555, - "learning_rate": 6.099487653009383e-07, - "loss": 0.9819, - "num_input_tokens_seen": 132739085, - "step": 6256 - }, - { - "epoch": 0.7523597667287921, - "flos": 16690132100520.0, - "grad_norm": 4.006491400576757, - "learning_rate": 6.093888018412192e-07, - "loss": 1.0506, - "num_input_tokens_seen": 132754995, - "step": 6257 - }, - { - "epoch": 0.7524800096194313, - "flos": 48226108154160.0, - "grad_norm": 0.7327809585257712, - "learning_rate": 6.088290493361125e-07, - "loss": 0.8058, - "num_input_tokens_seen": 132819600, - "step": 6258 - }, - { - "epoch": 0.7526002525100703, - "flos": 9165756717120.0, - "grad_norm": 5.335919448576256, - "learning_rate": 6.082695078705322e-07, - "loss": 0.9309, - "num_input_tokens_seen": 132836800, - "step": 6259 - }, - { - "epoch": 0.7527204954007094, - "flos": 15196260610800.0, - "grad_norm": 4.910385041532233, - "learning_rate": 6.077101775293618e-07, - "loss": 0.9082, - "num_input_tokens_seen": 132855345, - "step": 6260 - }, - { - "epoch": 0.7528407382913486, - "flos": 13434719544600.0, - "grad_norm": 6.607069691138761, - "learning_rate": 6.071510583974504e-07, - "loss": 1.0532, - "num_input_tokens_seen": 132870250, - "step": 6261 - }, - { - "epoch": 0.7529609811819876, - "flos": 10765371458040.0, - "grad_norm": 4.310494661555089, - "learning_rate": 6.065921505596161e-07, - "loss": 0.9363, - "num_input_tokens_seen": 132888250, - "step": 6262 - }, - { - "epoch": 0.7530812240726267, - "flos": 13728242364240.0, - "grad_norm": 2.975021484040976, - "learning_rate": 6.060334541006445e-07, - "loss": 0.9978, - "num_input_tokens_seen": 132906465, - "step": 6263 - }, - { - "epoch": 0.7532014669632658, - "flos": 19760953890240.0, - "grad_norm": 6.0146458212823095, - "learning_rate": 6.05474969105289e-07, - "loss": 0.9128, - "num_input_tokens_seen": 132929175, - "step": 6264 - }, - { - "epoch": 0.7533217098539049, - "flos": 9978889309440.0, - "grad_norm": 5.5299136169198615, - "learning_rate": 6.049166956582725e-07, - "loss": 0.9628, - "num_input_tokens_seen": 132947160, - "step": 6265 - }, - { - "epoch": 0.753441952744544, - "flos": 18812116154520.0, - "grad_norm": 11.90681067721032, - "learning_rate": 6.043586338442841e-07, - "loss": 1.0929, - "num_input_tokens_seen": 132965935, - "step": 6266 - }, - { - "epoch": 0.7535621956351831, - "flos": 16979638255800.0, - "grad_norm": 2.971022689655656, - "learning_rate": 6.038007837479815e-07, - "loss": 0.9551, - "num_input_tokens_seen": 132986760, - "step": 6267 - }, - { - "epoch": 0.7536824385258222, - "flos": 15482393994480.0, - "grad_norm": 5.846871424663191, - "learning_rate": 6.032431454539897e-07, - "loss": 0.8556, - "num_input_tokens_seen": 133005325, - "step": 6268 - }, - { - "epoch": 0.7538026814164612, - "flos": 20596505631000.0, - "grad_norm": 2.7587471477734056, - "learning_rate": 6.026857190469014e-07, - "loss": 1.0353, - "num_input_tokens_seen": 133026800, - "step": 6269 - }, - { - "epoch": 0.7539229243071004, - "flos": 14985939247680.0, - "grad_norm": 10.010722705768893, - "learning_rate": 6.0212850461128e-07, - "loss": 0.9639, - "num_input_tokens_seen": 133045640, - "step": 6270 - }, - { - "epoch": 0.7540431671977395, - "flos": 10712622478680.0, - "grad_norm": 6.726215136483211, - "learning_rate": 6.015715022316516e-07, - "loss": 0.9706, - "num_input_tokens_seen": 133063340, - "step": 6271 - }, - { - "epoch": 0.7541634100883785, - "flos": 13308580807920.0, - "grad_norm": 7.071229114444309, - "learning_rate": 6.010147119925154e-07, - "loss": 0.997, - "num_input_tokens_seen": 133080815, - "step": 6272 - }, - { - "epoch": 0.7542836529790176, - "flos": 14619578578800.0, - "grad_norm": 3.5807818414686206, - "learning_rate": 6.004581339783348e-07, - "loss": 0.8771, - "num_input_tokens_seen": 133098855, - "step": 6273 - }, - { - "epoch": 0.7544038958696567, - "flos": 13541290657920.0, - "grad_norm": 8.377855411960207, - "learning_rate": 5.999017682735425e-07, - "loss": 0.8969, - "num_input_tokens_seen": 133114965, - "step": 6274 - }, - { - "epoch": 0.7545241387602958, - "flos": 22617498975600.0, - "grad_norm": 4.5870987885612955, - "learning_rate": 5.993456149625387e-07, - "loss": 0.8897, - "num_input_tokens_seen": 133135835, - "step": 6275 - }, - { - "epoch": 0.7546443816509348, - "flos": 14404351366080.0, - "grad_norm": 3.288512412710313, - "learning_rate": 5.987896741296909e-07, - "loss": 1.0516, - "num_input_tokens_seen": 133153295, - "step": 6276 - }, - { - "epoch": 0.754764624541574, - "flos": 16848501684840.0, - "grad_norm": 5.658429984988728, - "learning_rate": 5.982339458593361e-07, - "loss": 1.0017, - "num_input_tokens_seen": 133172955, - "step": 6277 - }, - { - "epoch": 0.7548848674322131, - "flos": 18027780315120.0, - "grad_norm": 5.542883756065245, - "learning_rate": 5.976784302357767e-07, - "loss": 1.0723, - "num_input_tokens_seen": 133193240, - "step": 6278 - }, - { - "epoch": 0.7550051103228521, - "flos": 13885354824600.0, - "grad_norm": 6.765543807249532, - "learning_rate": 5.971231273432855e-07, - "loss": 0.9498, - "num_input_tokens_seen": 133212445, - "step": 6279 - }, - { - "epoch": 0.7551253532134913, - "flos": 45925701855600.0, - "grad_norm": 0.8253976066066611, - "learning_rate": 5.965680372661e-07, - "loss": 0.8136, - "num_input_tokens_seen": 133269730, - "step": 6280 - }, - { - "epoch": 0.7552455961041303, - "flos": 18553123799520.0, - "grad_norm": 4.466725137432249, - "learning_rate": 5.960131600884266e-07, - "loss": 0.7747, - "num_input_tokens_seen": 133288720, - "step": 6281 - }, - { - "epoch": 0.7553658389947694, - "flos": 17424846439680.0, - "grad_norm": 3.5531510941781073, - "learning_rate": 5.954584958944413e-07, - "loss": 0.9861, - "num_input_tokens_seen": 133307105, - "step": 6282 - }, - { - "epoch": 0.7554860818854086, - "flos": 15485460150480.0, - "grad_norm": 7.002342377168503, - "learning_rate": 5.949040447682854e-07, - "loss": 1.0313, - "num_input_tokens_seen": 133326650, - "step": 6283 - }, - { - "epoch": 0.7556063247760476, - "flos": 11577614865120.0, - "grad_norm": 3.9120918231633226, - "learning_rate": 5.943498067940686e-07, - "loss": 0.9091, - "num_input_tokens_seen": 133343395, - "step": 6284 - }, - { - "epoch": 0.7557265676666867, - "flos": 19391895004080.0, - "grad_norm": 2.8330535937571137, - "learning_rate": 5.937957820558686e-07, - "loss": 1.0353, - "num_input_tokens_seen": 133362460, - "step": 6285 - }, - { - "epoch": 0.7558468105573258, - "flos": 44517668716560.0, - "grad_norm": 0.8386174319772002, - "learning_rate": 5.932419706377296e-07, - "loss": 0.8978, - "num_input_tokens_seen": 133420485, - "step": 6286 - }, - { - "epoch": 0.7559670534479649, - "flos": 23713146887520.0, - "grad_norm": 22.339623471658353, - "learning_rate": 5.92688372623666e-07, - "loss": 0.9641, - "num_input_tokens_seen": 133438910, - "step": 6287 - }, - { - "epoch": 0.7560872963386039, - "flos": 9926385622560.0, - "grad_norm": 4.852366996565733, - "learning_rate": 5.921349880976574e-07, - "loss": 0.966, - "num_input_tokens_seen": 133456465, - "step": 6288 - }, - { - "epoch": 0.7562075392292431, - "flos": 14488442007840.0, - "grad_norm": 20.434198982027883, - "learning_rate": 5.915818171436515e-07, - "loss": 1.0329, - "num_input_tokens_seen": 133475520, - "step": 6289 - }, - { - "epoch": 0.7563277821198822, - "flos": 14463048688080.0, - "grad_norm": 4.012052735686641, - "learning_rate": 5.910288598455642e-07, - "loss": 0.9673, - "num_input_tokens_seen": 133494590, - "step": 6290 - }, - { - "epoch": 0.7564480250105212, - "flos": 13177566883200.0, - "grad_norm": 3.5174429707082755, - "learning_rate": 5.90476116287278e-07, - "loss": 0.9549, - "num_input_tokens_seen": 133511910, - "step": 6291 - }, - { - "epoch": 0.7565682679011604, - "flos": 15065767932600.0, - "grad_norm": 4.059359466311576, - "learning_rate": 5.899235865526456e-07, - "loss": 0.9077, - "num_input_tokens_seen": 133530925, - "step": 6292 - }, - { - "epoch": 0.7566885107917994, - "flos": 14514847159080.0, - "grad_norm": 3.22357734232607, - "learning_rate": 5.893712707254825e-07, - "loss": 1.0404, - "num_input_tokens_seen": 133548105, - "step": 6293 - }, - { - "epoch": 0.7568087536824385, - "flos": 13492006434840.0, - "grad_norm": 6.651113901565045, - "learning_rate": 5.888191688895769e-07, - "loss": 0.8695, - "num_input_tokens_seen": 133565085, - "step": 6294 - }, - { - "epoch": 0.7569289965730777, - "flos": 10759944361920.0, - "grad_norm": 7.952939134385736, - "learning_rate": 5.882672811286813e-07, - "loss": 0.8301, - "num_input_tokens_seen": 133581085, - "step": 6295 - }, - { - "epoch": 0.7570492394637167, - "flos": 14744000268120.0, - "grad_norm": 5.227626173411364, - "learning_rate": 5.877156075265166e-07, - "loss": 0.9199, - "num_input_tokens_seen": 133597070, - "step": 6296 - }, - { - "epoch": 0.7571694823543558, - "flos": 11075303760360.0, - "grad_norm": 14.303593025442943, - "learning_rate": 5.871641481667715e-07, - "loss": 0.9166, - "num_input_tokens_seen": 133611235, - "step": 6297 - }, - { - "epoch": 0.7572897252449949, - "flos": 18079425478320.0, - "grad_norm": 3.316860280224398, - "learning_rate": 5.866129031331011e-07, - "loss": 1.0676, - "num_input_tokens_seen": 133630610, - "step": 6298 - }, - { - "epoch": 0.757409968135634, - "flos": 17267550009960.0, - "grad_norm": 8.727603259474227, - "learning_rate": 5.8606187250913e-07, - "loss": 1.0455, - "num_input_tokens_seen": 133648380, - "step": 6299 - }, - { - "epoch": 0.757530211026273, - "flos": 17162971898040.0, - "grad_norm": 6.454599133608991, - "learning_rate": 5.855110563784482e-07, - "loss": 1.0584, - "num_input_tokens_seen": 133666635, - "step": 6300 - }, - { - "epoch": 0.7576504539169122, - "flos": 17032264588920.0, - "grad_norm": 3.9242206261349537, - "learning_rate": 5.849604548246156e-07, - "loss": 0.8666, - "num_input_tokens_seen": 133687465, - "step": 6301 - }, - { - "epoch": 0.7577706968075513, - "flos": 15091437206400.0, - "grad_norm": 10.590246470809827, - "learning_rate": 5.844100679311565e-07, - "loss": 1.0249, - "num_input_tokens_seen": 133706145, - "step": 6302 - }, - { - "epoch": 0.7578909396981903, - "flos": 12967981397520.0, - "grad_norm": 5.735456567365218, - "learning_rate": 5.838598957815637e-07, - "loss": 0.9957, - "num_input_tokens_seen": 133723095, - "step": 6303 - }, - { - "epoch": 0.7580111825888295, - "flos": 18054154804800.0, - "grad_norm": 2.4174018158250092, - "learning_rate": 5.833099384592996e-07, - "loss": 1.0794, - "num_input_tokens_seen": 133743390, - "step": 6304 - }, - { - "epoch": 0.7581314254794685, - "flos": 16900545448320.0, - "grad_norm": 7.306488957343949, - "learning_rate": 5.827601960477913e-07, - "loss": 0.9314, - "num_input_tokens_seen": 133761035, - "step": 6305 - }, - { - "epoch": 0.7582516683701076, - "flos": 15668793792720.0, - "grad_norm": 4.122403339018488, - "learning_rate": 5.822106686304344e-07, - "loss": 0.9415, - "num_input_tokens_seen": 133780045, - "step": 6306 - }, - { - "epoch": 0.7583719112607467, - "flos": 22564535365320.0, - "grad_norm": 6.260718348423066, - "learning_rate": 5.816613562905919e-07, - "loss": 0.8024, - "num_input_tokens_seen": 133800950, - "step": 6307 - }, - { - "epoch": 0.7584921541513858, - "flos": 23583512733000.0, - "grad_norm": 4.579883433028819, - "learning_rate": 5.811122591115933e-07, - "loss": 0.9343, - "num_input_tokens_seen": 133821655, - "step": 6308 - }, - { - "epoch": 0.7586123970420249, - "flos": 16582058570760.0, - "grad_norm": 6.71101338495731, - "learning_rate": 5.805633771767376e-07, - "loss": 0.9192, - "num_input_tokens_seen": 133838770, - "step": 6309 - }, - { - "epoch": 0.7587326399326639, - "flos": 12995183749320.0, - "grad_norm": 3.1003009527003105, - "learning_rate": 5.800147105692888e-07, - "loss": 1.0007, - "num_input_tokens_seen": 133858065, - "step": 6310 - }, - { - "epoch": 0.7588528828233031, - "flos": 12233849628000.0, - "grad_norm": 3.939819016342091, - "learning_rate": 5.794662593724795e-07, - "loss": 1.0173, - "num_input_tokens_seen": 133876790, - "step": 6311 - }, - { - "epoch": 0.7589731257139422, - "flos": 12549117041760.0, - "grad_norm": 4.105191846632292, - "learning_rate": 5.789180236695091e-07, - "loss": 0.9792, - "num_input_tokens_seen": 133893365, - "step": 6312 - }, - { - "epoch": 0.7590933686045812, - "flos": 11289427156920.0, - "grad_norm": 4.59504006884085, - "learning_rate": 5.78370003543544e-07, - "loss": 1.083, - "num_input_tokens_seen": 133911840, - "step": 6313 - }, - { - "epoch": 0.7592136114952204, - "flos": 14959595419560.0, - "grad_norm": 4.006763938277382, - "learning_rate": 5.778221990777203e-07, - "loss": 1.0579, - "num_input_tokens_seen": 133929300, - "step": 6314 - }, - { - "epoch": 0.7593338543858594, - "flos": 17998952900640.0, - "grad_norm": 5.505981314054098, - "learning_rate": 5.772746103551372e-07, - "loss": 1.0528, - "num_input_tokens_seen": 133944415, - "step": 6315 - }, - { - "epoch": 0.7594540972764985, - "flos": 22695825244080.0, - "grad_norm": 6.3256442258339325, - "learning_rate": 5.767272374588648e-07, - "loss": 0.9507, - "num_input_tokens_seen": 133965540, - "step": 6316 - }, - { - "epoch": 0.7595743401671377, - "flos": 26838955950480.0, - "grad_norm": 3.2078227507979786, - "learning_rate": 5.76180080471939e-07, - "loss": 1.011, - "num_input_tokens_seen": 133988430, - "step": 6317 - }, - { - "epoch": 0.7596945830577767, - "flos": 12960407992200.0, - "grad_norm": 5.316051059268157, - "learning_rate": 5.756331394773631e-07, - "loss": 0.9441, - "num_input_tokens_seen": 134004365, - "step": 6318 - }, - { - "epoch": 0.7598148259484158, - "flos": 15796557592080.0, - "grad_norm": 3.9775084923558235, - "learning_rate": 5.750864145581071e-07, - "loss": 0.997, - "num_input_tokens_seen": 134023305, - "step": 6319 - }, - { - "epoch": 0.7599350688390549, - "flos": 19339054040040.0, - "grad_norm": 4.090470492808751, - "learning_rate": 5.745399057971085e-07, - "loss": 1.082, - "num_input_tokens_seen": 134044160, - "step": 6320 - }, - { - "epoch": 0.760055311729694, - "flos": 11001822018360.0, - "grad_norm": 6.116942730874856, - "learning_rate": 5.739936132772738e-07, - "loss": 0.9749, - "num_input_tokens_seen": 134062445, - "step": 6321 - }, - { - "epoch": 0.760175554620333, - "flos": 17897287636920.0, - "grad_norm": 5.983773861166716, - "learning_rate": 5.734475370814733e-07, - "loss": 0.9763, - "num_input_tokens_seen": 134081845, - "step": 6322 - }, - { - "epoch": 0.7602957975109722, - "flos": 17321249497680.0, - "grad_norm": 3.2787256612394278, - "learning_rate": 5.729016772925483e-07, - "loss": 1.0042, - "num_input_tokens_seen": 134103140, - "step": 6323 - }, - { - "epoch": 0.7604160404016113, - "flos": 17924091388440.0, - "grad_norm": 3.4178158447941254, - "learning_rate": 5.723560339933038e-07, - "loss": 0.9343, - "num_input_tokens_seen": 134123195, - "step": 6324 - }, - { - "epoch": 0.7605362832922503, - "flos": 21279605468520.0, - "grad_norm": 7.089420291245115, - "learning_rate": 5.71810607266513e-07, - "loss": 0.8672, - "num_input_tokens_seen": 134141500, - "step": 6325 - }, - { - "epoch": 0.7606565261828895, - "flos": 9821071633200.0, - "grad_norm": 7.596779552567818, - "learning_rate": 5.712653971949184e-07, - "loss": 0.825, - "num_input_tokens_seen": 134159340, - "step": 6326 - }, - { - "epoch": 0.7607767690735285, - "flos": 13150916439480.0, - "grad_norm": 4.3401806114340635, - "learning_rate": 5.707204038612268e-07, - "loss": 0.9853, - "num_input_tokens_seen": 134176490, - "step": 6327 - }, - { - "epoch": 0.7608970119641676, - "flos": 14855446569480.0, - "grad_norm": 3.703729250254653, - "learning_rate": 5.701756273481138e-07, - "loss": 0.9554, - "num_input_tokens_seen": 134193630, - "step": 6328 - }, - { - "epoch": 0.7610172548548068, - "flos": 16928606323800.0, - "grad_norm": 4.7596043311689495, - "learning_rate": 5.696310677382212e-07, - "loss": 0.9599, - "num_input_tokens_seen": 134214745, - "step": 6329 - }, - { - "epoch": 0.7611374977454458, - "flos": 47613638533560.0, - "grad_norm": 2.0746715791312327, - "learning_rate": 5.690867251141576e-07, - "loss": 0.8754, - "num_input_tokens_seen": 134281120, - "step": 6330 - }, - { - "epoch": 0.7612577406360849, - "flos": 11053804458720.0, - "grad_norm": 3.5421377130550797, - "learning_rate": 5.685425995585013e-07, - "loss": 1.1341, - "num_input_tokens_seen": 134298765, - "step": 6331 - }, - { - "epoch": 0.761377983526724, - "flos": 43324678820880.0, - "grad_norm": 0.7844996197555525, - "learning_rate": 5.679986911537935e-07, - "loss": 0.8498, - "num_input_tokens_seen": 134366015, - "step": 6332 - }, - { - "epoch": 0.7614982264173631, - "flos": 25527498256200.0, - "grad_norm": 3.8375946629680957, - "learning_rate": 5.674549999825462e-07, - "loss": 0.8931, - "num_input_tokens_seen": 134388550, - "step": 6333 - }, - { - "epoch": 0.7616184693080021, - "flos": 48641385107400.0, - "grad_norm": 0.984493490686785, - "learning_rate": 5.669115261272363e-07, - "loss": 1.0032, - "num_input_tokens_seen": 134448590, - "step": 6334 - }, - { - "epoch": 0.7617387121986413, - "flos": 14567289522840.0, - "grad_norm": 13.858630722687307, - "learning_rate": 5.663682696703081e-07, - "loss": 0.9464, - "num_input_tokens_seen": 134466575, - "step": 6335 - }, - { - "epoch": 0.7618589550892804, - "flos": 13203573434160.0, - "grad_norm": 4.786706540960846, - "learning_rate": 5.658252306941746e-07, - "loss": 1.0554, - "num_input_tokens_seen": 134485615, - "step": 6336 - }, - { - "epoch": 0.7619791979799194, - "flos": 12361736073600.0, - "grad_norm": 3.787464552105996, - "learning_rate": 5.65282409281212e-07, - "loss": 0.9815, - "num_input_tokens_seen": 134502800, - "step": 6337 - }, - { - "epoch": 0.7620994408705585, - "flos": 9978889309440.0, - "grad_norm": 3.48951294481637, - "learning_rate": 5.64739805513768e-07, - "loss": 0.9208, - "num_input_tokens_seen": 134520065, - "step": 6338 - }, - { - "epoch": 0.7622196837611976, - "flos": 50638155819600.0, - "grad_norm": 0.817263358626903, - "learning_rate": 5.641974194741541e-07, - "loss": 0.8157, - "num_input_tokens_seen": 134575470, - "step": 6339 - }, - { - "epoch": 0.7623399266518367, - "flos": 43434842884800.0, - "grad_norm": 0.8349679333131632, - "learning_rate": 5.636552512446502e-07, - "loss": 0.8792, - "num_input_tokens_seen": 134636245, - "step": 6340 - }, - { - "epoch": 0.7624601695424758, - "flos": 18841188861480.0, - "grad_norm": 4.52643408206367, - "learning_rate": 5.631133009075027e-07, - "loss": 1.0068, - "num_input_tokens_seen": 134655150, - "step": 6341 - }, - { - "epoch": 0.7625804124331149, - "flos": 13571068580760.0, - "grad_norm": 6.693055029373411, - "learning_rate": 5.625715685449242e-07, - "loss": 0.9189, - "num_input_tokens_seen": 134672975, - "step": 6342 - }, - { - "epoch": 0.762700655323754, - "flos": 18657303311160.0, - "grad_norm": 3.3579311372967653, - "learning_rate": 5.620300542390966e-07, - "loss": 0.9462, - "num_input_tokens_seen": 134693740, - "step": 6343 - }, - { - "epoch": 0.762820898214393, - "flos": 15904447152480.0, - "grad_norm": 13.733390069739416, - "learning_rate": 5.614887580721659e-07, - "loss": 1.07, - "num_input_tokens_seen": 134713605, - "step": 6344 - }, - { - "epoch": 0.7629411411050322, - "flos": 11101463619120.0, - "grad_norm": 5.407990103921357, - "learning_rate": 5.609476801262481e-07, - "loss": 0.9568, - "num_input_tokens_seen": 134728185, - "step": 6345 - }, - { - "epoch": 0.7630613839956712, - "flos": 9714776473920.0, - "grad_norm": 8.439942861065285, - "learning_rate": 5.604068204834223e-07, - "loss": 0.8608, - "num_input_tokens_seen": 134744800, - "step": 6346 - }, - { - "epoch": 0.7631816268863103, - "flos": 10288698965520.0, - "grad_norm": 5.814445908208487, - "learning_rate": 5.598661792257367e-07, - "loss": 0.9726, - "num_input_tokens_seen": 134761565, - "step": 6347 - }, - { - "epoch": 0.7633018697769495, - "flos": 13518135632040.0, - "grad_norm": 11.169866720171138, - "learning_rate": 5.593257564352071e-07, - "loss": 0.9919, - "num_input_tokens_seen": 134779725, - "step": 6348 - }, - { - "epoch": 0.7634221126675885, - "flos": 15667383360960.0, - "grad_norm": 3.762988548624303, - "learning_rate": 5.58785552193815e-07, - "loss": 0.9814, - "num_input_tokens_seen": 134799690, - "step": 6349 - }, - { - "epoch": 0.7635423555582276, - "flos": 20934621455040.0, - "grad_norm": 4.031931738169654, - "learning_rate": 5.582455665835086e-07, - "loss": 0.9777, - "num_input_tokens_seen": 134819705, - "step": 6350 - }, - { - "epoch": 0.7636625984488667, - "flos": 12600118312200.0, - "grad_norm": 6.477920003427086, - "learning_rate": 5.577057996862036e-07, - "loss": 0.9499, - "num_input_tokens_seen": 134837050, - "step": 6351 - }, - { - "epoch": 0.7637828413395058, - "flos": 16876378590960.0, - "grad_norm": 5.101375745827989, - "learning_rate": 5.571662515837814e-07, - "loss": 0.9778, - "num_input_tokens_seen": 134858730, - "step": 6352 - }, - { - "epoch": 0.7639030842301449, - "flos": 25894564140960.0, - "grad_norm": 4.547296773218743, - "learning_rate": 5.566269223580926e-07, - "loss": 1.0597, - "num_input_tokens_seen": 134880160, - "step": 6353 - }, - { - "epoch": 0.764023327120784, - "flos": 20572798697040.0, - "grad_norm": 2.428711330166836, - "learning_rate": 5.560878120909511e-07, - "loss": 0.9792, - "num_input_tokens_seen": 134902480, - "step": 6354 - }, - { - "epoch": 0.7641435700114231, - "flos": 46388019189960.0, - "grad_norm": 1.1660883128450785, - "learning_rate": 5.55548920864141e-07, - "loss": 0.8811, - "num_input_tokens_seen": 134962855, - "step": 6355 - }, - { - "epoch": 0.7642638129020621, - "flos": 11917938321480.0, - "grad_norm": 4.740048072023917, - "learning_rate": 5.550102487594113e-07, - "loss": 1.0016, - "num_input_tokens_seen": 134981245, - "step": 6356 - }, - { - "epoch": 0.7643840557927013, - "flos": 21672769888920.0, - "grad_norm": 3.613360693805704, - "learning_rate": 5.54471795858477e-07, - "loss": 0.9396, - "num_input_tokens_seen": 135001035, - "step": 6357 - }, - { - "epoch": 0.7645042986833404, - "flos": 12024110834520.0, - "grad_norm": 3.74133392984173, - "learning_rate": 5.539335622430235e-07, - "loss": 1.0514, - "num_input_tokens_seen": 135019375, - "step": 6358 - }, - { - "epoch": 0.7646245415739794, - "flos": 12260009486760.0, - "grad_norm": 3.8177916277879818, - "learning_rate": 5.533955479946975e-07, - "loss": 0.9706, - "num_input_tokens_seen": 135037875, - "step": 6359 - }, - { - "epoch": 0.7647447844646186, - "flos": 50420868734280.0, - "grad_norm": 0.8706371325149327, - "learning_rate": 5.528577531951173e-07, - "loss": 0.9023, - "num_input_tokens_seen": 135098000, - "step": 6360 - }, - { - "epoch": 0.7648650273552576, - "flos": 12521853366840.0, - "grad_norm": 9.342356498173512, - "learning_rate": 5.523201779258653e-07, - "loss": 0.9684, - "num_input_tokens_seen": 135116695, - "step": 6361 - }, - { - "epoch": 0.7649852702458967, - "flos": 15745709629440.0, - "grad_norm": 3.8976505685513687, - "learning_rate": 5.517828222684912e-07, - "loss": 1.0676, - "num_input_tokens_seen": 135137070, - "step": 6362 - }, - { - "epoch": 0.7651055131365359, - "flos": 50021817294360.0, - "grad_norm": 0.7709753361058409, - "learning_rate": 5.512456863045117e-07, - "loss": 0.835, - "num_input_tokens_seen": 135197480, - "step": 6363 - }, - { - "epoch": 0.7652257560271749, - "flos": 13806967233000.0, - "grad_norm": 5.843371047518257, - "learning_rate": 5.507087701154089e-07, - "loss": 0.963, - "num_input_tokens_seen": 135217120, - "step": 6364 - }, - { - "epoch": 0.765345998917814, - "flos": 11289457818480.0, - "grad_norm": 5.833449807593689, - "learning_rate": 5.50172073782634e-07, - "loss": 0.9625, - "num_input_tokens_seen": 135234820, - "step": 6365 - }, - { - "epoch": 0.7654662418084531, - "flos": 16822433810760.0, - "grad_norm": 6.338127764246174, - "learning_rate": 5.496355973876023e-07, - "loss": 1.101, - "num_input_tokens_seen": 135253795, - "step": 6366 - }, - { - "epoch": 0.7655864846990922, - "flos": 29617481382960.0, - "grad_norm": 3.948450513726946, - "learning_rate": 5.490993410116984e-07, - "loss": 0.9313, - "num_input_tokens_seen": 135276505, - "step": 6367 - }, - { - "epoch": 0.7657067275897312, - "flos": 30824299642200.0, - "grad_norm": 8.235851756217485, - "learning_rate": 5.485633047362704e-07, - "loss": 0.9305, - "num_input_tokens_seen": 135298230, - "step": 6368 - }, - { - "epoch": 0.7658269704803703, - "flos": 12260254779240.0, - "grad_norm": 4.136891881041225, - "learning_rate": 5.480274886426341e-07, - "loss": 1.0122, - "num_input_tokens_seen": 135314590, - "step": 6369 - }, - { - "epoch": 0.7659472133710095, - "flos": 8851041211440.0, - "grad_norm": 5.8108508323859285, - "learning_rate": 5.474918928120744e-07, - "loss": 0.9962, - "num_input_tokens_seen": 135330805, - "step": 6370 - }, - { - "epoch": 0.7660674562616485, - "flos": 16137432956520.0, - "grad_norm": 3.1300676074054232, - "learning_rate": 5.469565173258392e-07, - "loss": 1.1024, - "num_input_tokens_seen": 135349040, - "step": 6371 - }, - { - "epoch": 0.7661876991522876, - "flos": 12076767829200.0, - "grad_norm": 2.86944840184649, - "learning_rate": 5.464213622651454e-07, - "loss": 0.8607, - "num_input_tokens_seen": 135366575, - "step": 6372 - }, - { - "epoch": 0.7663079420429267, - "flos": 14252696663400.0, - "grad_norm": 6.202947272918372, - "learning_rate": 5.458864277111753e-07, - "loss": 1.0611, - "num_input_tokens_seen": 135384130, - "step": 6373 - }, - { - "epoch": 0.7664281849335658, - "flos": 8929582110840.0, - "grad_norm": 11.03591534999724, - "learning_rate": 5.453517137450769e-07, - "loss": 0.9182, - "num_input_tokens_seen": 135400425, - "step": 6374 - }, - { - "epoch": 0.7665484278242048, - "flos": 15877520754720.0, - "grad_norm": 3.019475188728041, - "learning_rate": 5.448172204479684e-07, - "loss": 0.9845, - "num_input_tokens_seen": 135419425, - "step": 6375 - }, - { - "epoch": 0.766668670714844, - "flos": 16791552071760.0, - "grad_norm": 2.7437996757720127, - "learning_rate": 5.442829479009294e-07, - "loss": 0.9702, - "num_input_tokens_seen": 135437925, - "step": 6376 - }, - { - "epoch": 0.7667889136054831, - "flos": 13780408773960.0, - "grad_norm": 12.970000645645584, - "learning_rate": 5.437488961850103e-07, - "loss": 0.9425, - "num_input_tokens_seen": 135457445, - "step": 6377 - }, - { - "epoch": 0.7669091564961221, - "flos": 19126310413680.0, - "grad_norm": 5.325373769904284, - "learning_rate": 5.432150653812258e-07, - "loss": 0.9778, - "num_input_tokens_seen": 135477200, - "step": 6378 - }, - { - "epoch": 0.7670293993867613, - "flos": 8719567363320.0, - "grad_norm": 13.652096184271826, - "learning_rate": 5.42681455570557e-07, - "loss": 1.0495, - "num_input_tokens_seen": 135493450, - "step": 6379 - }, - { - "epoch": 0.7671496422774003, - "flos": 15459208307040.0, - "grad_norm": 4.7534905708775606, - "learning_rate": 5.42148066833954e-07, - "loss": 0.8735, - "num_input_tokens_seen": 135512415, - "step": 6380 - }, - { - "epoch": 0.7672698851680394, - "flos": 14960576589480.0, - "grad_norm": 7.130498805421071, - "learning_rate": 5.416148992523289e-07, - "loss": 0.9747, - "num_input_tokens_seen": 135530710, - "step": 6381 - }, - { - "epoch": 0.7673901280586786, - "flos": 12018530430600.0, - "grad_norm": 3.863078452439156, - "learning_rate": 5.410819529065644e-07, - "loss": 1.0069, - "num_input_tokens_seen": 135548385, - "step": 6382 - }, - { - "epoch": 0.7675103709493176, - "flos": 20834765223360.0, - "grad_norm": 3.0121390795266443, - "learning_rate": 5.405492278775079e-07, - "loss": 0.875, - "num_input_tokens_seen": 135567885, - "step": 6383 - }, - { - "epoch": 0.7676306138399567, - "flos": 20676794239320.0, - "grad_norm": 5.9243674153837675, - "learning_rate": 5.400167242459732e-07, - "loss": 1.0166, - "num_input_tokens_seen": 135586565, - "step": 6384 - }, - { - "epoch": 0.7677508567305958, - "flos": 16035645046560.0, - "grad_norm": 3.4444124944975707, - "learning_rate": 5.394844420927405e-07, - "loss": 1.0328, - "num_input_tokens_seen": 135605895, - "step": 6385 - }, - { - "epoch": 0.7678710996212349, - "flos": 18081295833480.0, - "grad_norm": 5.482837080988732, - "learning_rate": 5.389523814985562e-07, - "loss": 0.947, - "num_input_tokens_seen": 135625035, - "step": 6386 - }, - { - "epoch": 0.767991342511874, - "flos": 19047585544920.0, - "grad_norm": 3.242431484372055, - "learning_rate": 5.384205425441344e-07, - "loss": 0.9837, - "num_input_tokens_seen": 135645665, - "step": 6387 - }, - { - "epoch": 0.7681115854025131, - "flos": 18762586638960.0, - "grad_norm": 4.288199916463733, - "learning_rate": 5.378889253101537e-07, - "loss": 1.0631, - "num_input_tokens_seen": 135665940, - "step": 6388 - }, - { - "epoch": 0.7682318282931522, - "flos": 16532743686120.0, - "grad_norm": 2.759039241283572, - "learning_rate": 5.373575298772617e-07, - "loss": 1.0304, - "num_input_tokens_seen": 135684780, - "step": 6389 - }, - { - "epoch": 0.7683520711837912, - "flos": 50183590311840.0, - "grad_norm": 0.7338581701475868, - "learning_rate": 5.368263563260689e-07, - "loss": 0.8539, - "num_input_tokens_seen": 135749635, - "step": 6390 - }, - { - "epoch": 0.7684723140744304, - "flos": 13203910711320.0, - "grad_norm": 4.207953306860099, - "learning_rate": 5.362954047371537e-07, - "loss": 0.8568, - "num_input_tokens_seen": 135768465, - "step": 6391 - }, - { - "epoch": 0.7685925569650695, - "flos": 19548302248560.0, - "grad_norm": 3.0044652768390328, - "learning_rate": 5.357646751910627e-07, - "loss": 0.9516, - "num_input_tokens_seen": 135789365, - "step": 6392 - }, - { - "epoch": 0.7687127998557085, - "flos": 17451926145240.0, - "grad_norm": 5.803642825850746, - "learning_rate": 5.352341677683061e-07, - "loss": 1.0194, - "num_input_tokens_seen": 135810385, - "step": 6393 - }, - { - "epoch": 0.7688330427463477, - "flos": 17897686237200.0, - "grad_norm": 3.855315509753843, - "learning_rate": 5.347038825493617e-07, - "loss": 1.0086, - "num_input_tokens_seen": 135831635, - "step": 6394 - }, - { - "epoch": 0.7689532856369867, - "flos": 15062149868520.0, - "grad_norm": 6.55565555230436, - "learning_rate": 5.341738196146732e-07, - "loss": 0.8925, - "num_input_tokens_seen": 135849700, - "step": 6395 - }, - { - "epoch": 0.7690735285276258, - "flos": 17871127778160.0, - "grad_norm": 4.16786601289442, - "learning_rate": 5.336439790446503e-07, - "loss": 0.9461, - "num_input_tokens_seen": 135868520, - "step": 6396 - }, - { - "epoch": 0.769193771418265, - "flos": 39161807617920.0, - "grad_norm": 18.852082139025452, - "learning_rate": 5.331143609196711e-07, - "loss": 0.8383, - "num_input_tokens_seen": 135892055, - "step": 6397 - }, - { - "epoch": 0.769314014308904, - "flos": 26655438338880.0, - "grad_norm": 4.322919559094791, - "learning_rate": 5.325849653200758e-07, - "loss": 0.9982, - "num_input_tokens_seen": 135915725, - "step": 6398 - }, - { - "epoch": 0.7694342571995431, - "flos": 14646167699400.0, - "grad_norm": 8.05830640923132, - "learning_rate": 5.32055792326175e-07, - "loss": 0.9822, - "num_input_tokens_seen": 135933870, - "step": 6399 - }, - { - "epoch": 0.7695545000901821, - "flos": 17216242123920.0, - "grad_norm": 4.878648028782679, - "learning_rate": 5.315268420182437e-07, - "loss": 0.9486, - "num_input_tokens_seen": 135952265, - "step": 6400 - }, - { - "epoch": 0.7696747429808213, - "flos": 19942478500440.0, - "grad_norm": 5.513257261410279, - "learning_rate": 5.309981144765221e-07, - "loss": 0.9804, - "num_input_tokens_seen": 135972130, - "step": 6401 - }, - { - "epoch": 0.7697949858714603, - "flos": 8090167013520.0, - "grad_norm": 4.922412467554138, - "learning_rate": 5.304696097812196e-07, - "loss": 0.9756, - "num_input_tokens_seen": 135988450, - "step": 6402 - }, - { - "epoch": 0.7699152287620994, - "flos": 18998914553040.0, - "grad_norm": 6.372705083923932, - "learning_rate": 5.299413280125078e-07, - "loss": 0.8101, - "num_input_tokens_seen": 136006480, - "step": 6403 - }, - { - "epoch": 0.7700354716527386, - "flos": 11709058051680.0, - "grad_norm": 7.21930035231284, - "learning_rate": 5.294132692505284e-07, - "loss": 0.9521, - "num_input_tokens_seen": 136024610, - "step": 6404 - }, - { - "epoch": 0.7701557145433776, - "flos": 13647616478760.0, - "grad_norm": 3.524186488219311, - "learning_rate": 5.288854335753861e-07, - "loss": 1.0015, - "num_input_tokens_seen": 136042590, - "step": 6405 - }, - { - "epoch": 0.7702759574340167, - "flos": 22590357946920.0, - "grad_norm": 3.360719917238207, - "learning_rate": 5.283578210671551e-07, - "loss": 0.983, - "num_input_tokens_seen": 136064550, - "step": 6406 - }, - { - "epoch": 0.7703962003246558, - "flos": 11892391693920.0, - "grad_norm": 10.271679537091066, - "learning_rate": 5.278304318058719e-07, - "loss": 0.9855, - "num_input_tokens_seen": 136082125, - "step": 6407 - }, - { - "epoch": 0.7705164432152949, - "flos": 25501307735880.0, - "grad_norm": 4.789191339200354, - "learning_rate": 5.273032658715411e-07, - "loss": 1.0115, - "num_input_tokens_seen": 136104655, - "step": 6408 - }, - { - "epoch": 0.7706366861059339, - "flos": 16610947308360.0, - "grad_norm": 3.725908969858545, - "learning_rate": 5.267763233441347e-07, - "loss": 0.9971, - "num_input_tokens_seen": 136125005, - "step": 6409 - }, - { - "epoch": 0.7707569289965731, - "flos": 16297795542240.0, - "grad_norm": 8.035571171530645, - "learning_rate": 5.26249604303588e-07, - "loss": 0.9112, - "num_input_tokens_seen": 136143230, - "step": 6410 - }, - { - "epoch": 0.7708771718872122, - "flos": 12338427739920.0, - "grad_norm": 7.7210336619733875, - "learning_rate": 5.257231088298057e-07, - "loss": 0.9985, - "num_input_tokens_seen": 136161360, - "step": 6411 - }, - { - "epoch": 0.7709974147778512, - "flos": 51739568104800.0, - "grad_norm": 0.8398698669631012, - "learning_rate": 5.25196837002655e-07, - "loss": 0.7948, - "num_input_tokens_seen": 136220790, - "step": 6412 - }, - { - "epoch": 0.7711176576684904, - "flos": 28464975842640.0, - "grad_norm": 6.955182060529918, - "learning_rate": 5.24670788901971e-07, - "loss": 0.918, - "num_input_tokens_seen": 136243600, - "step": 6413 - }, - { - "epoch": 0.7712379005591294, - "flos": 26392674612000.0, - "grad_norm": 6.817807990721053, - "learning_rate": 5.241449646075557e-07, - "loss": 0.8958, - "num_input_tokens_seen": 136266545, - "step": 6414 - }, - { - "epoch": 0.7713581434497685, - "flos": 16186778502720.0, - "grad_norm": 4.0885743482057, - "learning_rate": 5.236193641991762e-07, - "loss": 0.9521, - "num_input_tokens_seen": 136284195, - "step": 6415 - }, - { - "epoch": 0.7714783863404077, - "flos": 17136904023960.0, - "grad_norm": 5.73758019617751, - "learning_rate": 5.23093987756565e-07, - "loss": 0.9257, - "num_input_tokens_seen": 136302610, - "step": 6416 - }, - { - "epoch": 0.7715986292310467, - "flos": 14955824047680.0, - "grad_norm": 6.59280461047011, - "learning_rate": 5.225688353594217e-07, - "loss": 0.9853, - "num_input_tokens_seen": 136321960, - "step": 6417 - }, - { - "epoch": 0.7717188721216858, - "flos": 14619670563480.0, - "grad_norm": 7.07491568255645, - "learning_rate": 5.220439070874108e-07, - "loss": 0.9937, - "num_input_tokens_seen": 136340920, - "step": 6418 - }, - { - "epoch": 0.7718391150123249, - "flos": 18684689632320.0, - "grad_norm": 5.421547299906372, - "learning_rate": 5.215192030201652e-07, - "loss": 0.9349, - "num_input_tokens_seen": 136361630, - "step": 6419 - }, - { - "epoch": 0.771959357902964, - "flos": 15664654482120.0, - "grad_norm": 3.964367226213089, - "learning_rate": 5.209947232372798e-07, - "loss": 1.081, - "num_input_tokens_seen": 136378840, - "step": 6420 - }, - { - "epoch": 0.772079600793603, - "flos": 21699481655760.0, - "grad_norm": 3.5404761314239384, - "learning_rate": 5.204704678183196e-07, - "loss": 1.032, - "num_input_tokens_seen": 136397295, - "step": 6421 - }, - { - "epoch": 0.7721998436842422, - "flos": 9139780827720.0, - "grad_norm": 3.2944727819409447, - "learning_rate": 5.19946436842813e-07, - "loss": 1.0737, - "num_input_tokens_seen": 136414145, - "step": 6422 - }, - { - "epoch": 0.7723200865748813, - "flos": 23273089845720.0, - "grad_norm": 13.363814480738123, - "learning_rate": 5.194226303902546e-07, - "loss": 0.9177, - "num_input_tokens_seen": 136433600, - "step": 6423 - }, - { - "epoch": 0.7724403294655203, - "flos": 14986797771360.0, - "grad_norm": 3.4189093292879367, - "learning_rate": 5.188990485401072e-07, - "loss": 0.9269, - "num_input_tokens_seen": 136452525, - "step": 6424 - }, - { - "epoch": 0.7725605723561595, - "flos": 15694708359000.0, - "grad_norm": 4.9906198251944645, - "learning_rate": 5.183756913717954e-07, - "loss": 1.0786, - "num_input_tokens_seen": 136472020, - "step": 6425 - }, - { - "epoch": 0.7726808152467985, - "flos": 24609818213520.0, - "grad_norm": 4.869118492666807, - "learning_rate": 5.178525589647136e-07, - "loss": 0.9596, - "num_input_tokens_seen": 136493380, - "step": 6426 - }, - { - "epoch": 0.7728010581374376, - "flos": 15849214586760.0, - "grad_norm": 4.319915366933225, - "learning_rate": 5.173296513982197e-07, - "loss": 1.0135, - "num_input_tokens_seen": 136511625, - "step": 6427 - }, - { - "epoch": 0.7729213010280768, - "flos": 19313875351200.0, - "grad_norm": 10.124353082028987, - "learning_rate": 5.168069687516398e-07, - "loss": 0.8758, - "num_input_tokens_seen": 136531115, - "step": 6428 - }, - { - "epoch": 0.7730415439187158, - "flos": 12862728731280.0, - "grad_norm": 3.5209752041074434, - "learning_rate": 5.16284511104263e-07, - "loss": 0.936, - "num_input_tokens_seen": 136549970, - "step": 6429 - }, - { - "epoch": 0.7731617868093549, - "flos": 8404913180760.0, - "grad_norm": 6.829449208784068, - "learning_rate": 5.157622785353457e-07, - "loss": 1.0013, - "num_input_tokens_seen": 136567805, - "step": 6430 - }, - { - "epoch": 0.7732820296999939, - "flos": 45964279646160.0, - "grad_norm": 0.6661654217195971, - "learning_rate": 5.152402711241113e-07, - "loss": 0.8488, - "num_input_tokens_seen": 136635430, - "step": 6431 - }, - { - "epoch": 0.7734022725906331, - "flos": 18002816257200.0, - "grad_norm": 4.623136608959608, - "learning_rate": 5.147184889497465e-07, - "loss": 1.0569, - "num_input_tokens_seen": 136654620, - "step": 6432 - }, - { - "epoch": 0.7735225154812722, - "flos": 12285770745240.0, - "grad_norm": 5.0844375578367575, - "learning_rate": 5.141969320914072e-07, - "loss": 1.0253, - "num_input_tokens_seen": 136671845, - "step": 6433 - }, - { - "epoch": 0.7736427583719112, - "flos": 23268459950160.0, - "grad_norm": 11.797147270243782, - "learning_rate": 5.136756006282113e-07, - "loss": 0.8451, - "num_input_tokens_seen": 136690230, - "step": 6434 - }, - { - "epoch": 0.7737630012625504, - "flos": 14090892984360.0, - "grad_norm": 4.385658795097562, - "learning_rate": 5.131544946392446e-07, - "loss": 1.0843, - "num_input_tokens_seen": 136705230, - "step": 6435 - }, - { - "epoch": 0.7738832441531894, - "flos": 25706539280040.0, - "grad_norm": 4.451757388336037, - "learning_rate": 5.126336142035592e-07, - "loss": 0.8649, - "num_input_tokens_seen": 136724985, - "step": 6436 - }, - { - "epoch": 0.7740034870438285, - "flos": 9452380685760.0, - "grad_norm": 4.996343229147908, - "learning_rate": 5.121129594001721e-07, - "loss": 0.9486, - "num_input_tokens_seen": 136738970, - "step": 6437 - }, - { - "epoch": 0.7741237299344677, - "flos": 15691274264280.0, - "grad_norm": 2.8231291931404963, - "learning_rate": 5.115925303080661e-07, - "loss": 1.0426, - "num_input_tokens_seen": 136758400, - "step": 6438 - }, - { - "epoch": 0.7742439728251067, - "flos": 14094909648720.0, - "grad_norm": 3.4844988471631537, - "learning_rate": 5.110723270061899e-07, - "loss": 1.007, - "num_input_tokens_seen": 136774610, - "step": 6439 - }, - { - "epoch": 0.7743642157157458, - "flos": 11813329548000.0, - "grad_norm": 4.171411838609911, - "learning_rate": 5.105523495734572e-07, - "loss": 1.0246, - "num_input_tokens_seen": 136791730, - "step": 6440 - }, - { - "epoch": 0.7744844586063849, - "flos": 14411066247720.0, - "grad_norm": 3.4617874789533913, - "learning_rate": 5.100325980887499e-07, - "loss": 0.9672, - "num_input_tokens_seen": 136811375, - "step": 6441 - }, - { - "epoch": 0.774604701497024, - "flos": 16324292678160.0, - "grad_norm": 5.221069916735406, - "learning_rate": 5.095130726309116e-07, - "loss": 1.0478, - "num_input_tokens_seen": 136831270, - "step": 6442 - }, - { - "epoch": 0.774724944387663, - "flos": 43151285072160.0, - "grad_norm": 0.9846274037183493, - "learning_rate": 5.089937732787559e-07, - "loss": 0.8935, - "num_input_tokens_seen": 136895550, - "step": 6443 - }, - { - "epoch": 0.7748451872783022, - "flos": 19051172947440.0, - "grad_norm": 8.835749697523417, - "learning_rate": 5.084747001110592e-07, - "loss": 0.8776, - "num_input_tokens_seen": 136914895, - "step": 6444 - }, - { - "epoch": 0.7749654301689413, - "flos": 21622596480600.0, - "grad_norm": 3.2784512163878334, - "learning_rate": 5.07955853206564e-07, - "loss": 0.9209, - "num_input_tokens_seen": 136939320, - "step": 6445 - }, - { - "epoch": 0.7750856730595803, - "flos": 30851103393720.0, - "grad_norm": 3.4495401545912183, - "learning_rate": 5.074372326439807e-07, - "loss": 0.932, - "num_input_tokens_seen": 136962050, - "step": 6446 - }, - { - "epoch": 0.7752059159502195, - "flos": 12496153431480.0, - "grad_norm": 4.892709111270541, - "learning_rate": 5.069188385019814e-07, - "loss": 0.9569, - "num_input_tokens_seen": 136979470, - "step": 6447 - }, - { - "epoch": 0.7753261588408585, - "flos": 8929766080200.0, - "grad_norm": 7.780038762326994, - "learning_rate": 5.064006708592077e-07, - "loss": 0.8167, - "num_input_tokens_seen": 136995435, - "step": 6448 - }, - { - "epoch": 0.7754464017314976, - "flos": 11813820132960.0, - "grad_norm": 3.8028661610045105, - "learning_rate": 5.058827297942641e-07, - "loss": 0.9795, - "num_input_tokens_seen": 137010260, - "step": 6449 - }, - { - "epoch": 0.7755666446221368, - "flos": 13990116905880.0, - "grad_norm": 4.24369950861382, - "learning_rate": 5.053650153857237e-07, - "loss": 0.9564, - "num_input_tokens_seen": 137028990, - "step": 6450 - }, - { - "epoch": 0.7756868875127758, - "flos": 13252735011000.0, - "grad_norm": 5.509720795025438, - "learning_rate": 5.048475277121214e-07, - "loss": 0.9259, - "num_input_tokens_seen": 137045925, - "step": 6451 - }, - { - "epoch": 0.7758071304034149, - "flos": 20232260609760.0, - "grad_norm": 4.028432171742389, - "learning_rate": 5.043302668519598e-07, - "loss": 0.9994, - "num_input_tokens_seen": 137064980, - "step": 6452 - }, - { - "epoch": 0.775927373294054, - "flos": 14620069163760.0, - "grad_norm": 3.420806160438279, - "learning_rate": 5.038132328837079e-07, - "loss": 0.9489, - "num_input_tokens_seen": 137083090, - "step": 6453 - }, - { - "epoch": 0.7760476161846931, - "flos": 16007492186400.0, - "grad_norm": 5.7134471550505035, - "learning_rate": 5.032964258857993e-07, - "loss": 0.9636, - "num_input_tokens_seen": 137102905, - "step": 6454 - }, - { - "epoch": 0.7761678590753321, - "flos": 34783299505800.0, - "grad_norm": 3.945890297456708, - "learning_rate": 5.027798459366329e-07, - "loss": 0.9091, - "num_input_tokens_seen": 137127990, - "step": 6455 - }, - { - "epoch": 0.7762881019659713, - "flos": 18631450068000.0, - "grad_norm": 3.672621890460951, - "learning_rate": 5.02263493114573e-07, - "loss": 0.8648, - "num_input_tokens_seen": 137149505, - "step": 6456 - }, - { - "epoch": 0.7764083448566104, - "flos": 14614090159560.0, - "grad_norm": 6.042040723424133, - "learning_rate": 5.017473674979502e-07, - "loss": 0.9851, - "num_input_tokens_seen": 137165250, - "step": 6457 - }, - { - "epoch": 0.7765285877472494, - "flos": 48185230746600.0, - "grad_norm": 0.7628272183071566, - "learning_rate": 5.01231469165061e-07, - "loss": 0.8286, - "num_input_tokens_seen": 137220795, - "step": 6458 - }, - { - "epoch": 0.7766488306378886, - "flos": 43909890314640.0, - "grad_norm": 1.57563872933979, - "learning_rate": 5.007157981941663e-07, - "loss": 0.8446, - "num_input_tokens_seen": 137285875, - "step": 6459 - }, - { - "epoch": 0.7767690735285276, - "flos": 45060525499800.0, - "grad_norm": 0.8842152616381824, - "learning_rate": 5.002003546634928e-07, - "loss": 0.9294, - "num_input_tokens_seen": 137341695, - "step": 6460 - }, - { - "epoch": 0.7768893164191667, - "flos": 14829133402920.0, - "grad_norm": 14.651865223957516, - "learning_rate": 4.996851386512331e-07, - "loss": 0.9895, - "num_input_tokens_seen": 137360120, - "step": 6461 - }, - { - "epoch": 0.7770095593098058, - "flos": 14698548740040.0, - "grad_norm": 8.257167592485368, - "learning_rate": 4.991701502355444e-07, - "loss": 1.0576, - "num_input_tokens_seen": 137380305, - "step": 6462 - }, - { - "epoch": 0.7771298022004449, - "flos": 17582664115920.0, - "grad_norm": 2.4542750681676844, - "learning_rate": 4.986553894945518e-07, - "loss": 0.995, - "num_input_tokens_seen": 137401235, - "step": 6463 - }, - { - "epoch": 0.777250045091084, - "flos": 17792126955360.0, - "grad_norm": 3.992586776564286, - "learning_rate": 4.981408565063416e-07, - "loss": 1.0983, - "num_input_tokens_seen": 137420900, - "step": 6464 - }, - { - "epoch": 0.777370287981723, - "flos": 14278212629400.0, - "grad_norm": 3.9631927510432026, - "learning_rate": 4.976265513489701e-07, - "loss": 0.9859, - "num_input_tokens_seen": 137440590, - "step": 6465 - }, - { - "epoch": 0.7774905308723622, - "flos": 15426456212880.0, - "grad_norm": 3.7886625413104174, - "learning_rate": 4.971124741004562e-07, - "loss": 1.0401, - "num_input_tokens_seen": 137459310, - "step": 6466 - }, - { - "epoch": 0.7776107737630013, - "flos": 11341930843800.0, - "grad_norm": 3.9101357626211533, - "learning_rate": 4.965986248387846e-07, - "loss": 0.9897, - "num_input_tokens_seen": 137477345, - "step": 6467 - }, - { - "epoch": 0.7777310166536403, - "flos": 17635290449040.0, - "grad_norm": 6.361254065331097, - "learning_rate": 4.960850036419073e-07, - "loss": 0.9997, - "num_input_tokens_seen": 137496165, - "step": 6468 - }, - { - "epoch": 0.7778512595442795, - "flos": 12231672657240.0, - "grad_norm": 2.7166158257936277, - "learning_rate": 4.955716105877378e-07, - "loss": 1.0111, - "num_input_tokens_seen": 137514655, - "step": 6469 - }, - { - "epoch": 0.7779715024349185, - "flos": 12573897130320.0, - "grad_norm": 5.025760475422922, - "learning_rate": 4.950584457541598e-07, - "loss": 1.0679, - "num_input_tokens_seen": 137532840, - "step": 6470 - }, - { - "epoch": 0.7780917453255576, - "flos": 17294660377080.0, - "grad_norm": 2.625861253685807, - "learning_rate": 4.945455092190183e-07, - "loss": 1.0516, - "num_input_tokens_seen": 137553815, - "step": 6471 - }, - { - "epoch": 0.7782119882161967, - "flos": 40333471085160.0, - "grad_norm": 0.6907109789600812, - "learning_rate": 4.940328010601271e-07, - "loss": 0.8073, - "num_input_tokens_seen": 137618450, - "step": 6472 - }, - { - "epoch": 0.7783322311068358, - "flos": 33445375337160.0, - "grad_norm": 3.4290612415842263, - "learning_rate": 4.935203213552621e-07, - "loss": 0.9874, - "num_input_tokens_seen": 137641910, - "step": 6473 - }, - { - "epoch": 0.7784524739974749, - "flos": 13514425583280.0, - "grad_norm": 3.744371004135911, - "learning_rate": 4.930080701821662e-07, - "loss": 0.8873, - "num_input_tokens_seen": 137659095, - "step": 6474 - }, - { - "epoch": 0.778572716888114, - "flos": 17635474418400.0, - "grad_norm": 5.15667038562117, - "learning_rate": 4.92496047618548e-07, - "loss": 0.994, - "num_input_tokens_seen": 137678575, - "step": 6475 - }, - { - "epoch": 0.7786929597787531, - "flos": 14247453536640.0, - "grad_norm": 8.775263038453204, - "learning_rate": 4.919842537420811e-07, - "loss": 1.0062, - "num_input_tokens_seen": 137695410, - "step": 6476 - }, - { - "epoch": 0.7788132026693921, - "flos": 15537718544880.0, - "grad_norm": 3.4401953100605067, - "learning_rate": 4.91472688630404e-07, - "loss": 1.0197, - "num_input_tokens_seen": 137715870, - "step": 6477 - }, - { - "epoch": 0.7789334455600313, - "flos": 7854820269360.0, - "grad_norm": 4.203601455627532, - "learning_rate": 4.909613523611202e-07, - "loss": 0.9687, - "num_input_tokens_seen": 137732470, - "step": 6478 - }, - { - "epoch": 0.7790536884506704, - "flos": 20441478156720.0, - "grad_norm": 3.463042916931125, - "learning_rate": 4.904502450117991e-07, - "loss": 0.9654, - "num_input_tokens_seen": 137753150, - "step": 6479 - }, - { - "epoch": 0.7791739313413094, - "flos": 7776003415920.0, - "grad_norm": 7.337356706354087, - "learning_rate": 4.899393666599762e-07, - "loss": 0.9472, - "num_input_tokens_seen": 137769445, - "step": 6480 - }, - { - "epoch": 0.7792941742319486, - "flos": 10367577142080.0, - "grad_norm": 5.23892970270948, - "learning_rate": 4.894287173831506e-07, - "loss": 0.9519, - "num_input_tokens_seen": 137785125, - "step": 6481 - }, - { - "epoch": 0.7794144171225876, - "flos": 16534307425680.0, - "grad_norm": 4.213079413798494, - "learning_rate": 4.889182972587877e-07, - "loss": 1.066, - "num_input_tokens_seen": 137804140, - "step": 6482 - }, - { - "epoch": 0.7795346600132267, - "flos": 15275261433600.0, - "grad_norm": 4.639408041913335, - "learning_rate": 4.884081063643177e-07, - "loss": 0.8919, - "num_input_tokens_seen": 137822520, - "step": 6483 - }, - { - "epoch": 0.7796549029038659, - "flos": 50166358515120.0, - "grad_norm": 0.8752264002407987, - "learning_rate": 4.878981447771353e-07, - "loss": 0.7955, - "num_input_tokens_seen": 137876620, - "step": 6484 - }, - { - "epoch": 0.7797751457945049, - "flos": 17059436279160.0, - "grad_norm": 2.9199759283771924, - "learning_rate": 4.873884125746035e-07, - "loss": 0.9594, - "num_input_tokens_seen": 137898015, - "step": 6485 - }, - { - "epoch": 0.779895388685144, - "flos": 16114461900000.0, - "grad_norm": 6.496789579577639, - "learning_rate": 4.868789098340456e-07, - "loss": 0.9415, - "num_input_tokens_seen": 137915640, - "step": 6486 - }, - { - "epoch": 0.7800156315757831, - "flos": 16900576109880.0, - "grad_norm": 9.974368243181416, - "learning_rate": 4.863696366327543e-07, - "loss": 0.9359, - "num_input_tokens_seen": 137934530, - "step": 6487 - }, - { - "epoch": 0.7801358744664222, - "flos": 18812668062600.0, - "grad_norm": 3.1640222617522413, - "learning_rate": 4.85860593047986e-07, - "loss": 1.0137, - "num_input_tokens_seen": 137954315, - "step": 6488 - }, - { - "epoch": 0.7802561173570612, - "flos": 18735629579640.0, - "grad_norm": 3.634852863886247, - "learning_rate": 4.853517791569613e-07, - "loss": 0.9651, - "num_input_tokens_seen": 137976215, - "step": 6489 - }, - { - "epoch": 0.7803763602477004, - "flos": 28805636576160.0, - "grad_norm": 5.847486637486736, - "learning_rate": 4.848431950368684e-07, - "loss": 0.8892, - "num_input_tokens_seen": 137998495, - "step": 6490 - }, - { - "epoch": 0.7804966031383395, - "flos": 47975675922480.0, - "grad_norm": 0.7317801957985206, - "learning_rate": 4.843348407648569e-07, - "loss": 0.8126, - "num_input_tokens_seen": 138059495, - "step": 6491 - }, - { - "epoch": 0.7806168460289785, - "flos": 12567335556480.0, - "grad_norm": 6.436729310947611, - "learning_rate": 4.838267164180457e-07, - "loss": 1.0499, - "num_input_tokens_seen": 138074885, - "step": 6492 - }, - { - "epoch": 0.7807370889196176, - "flos": 17027512047120.0, - "grad_norm": 4.998640066217678, - "learning_rate": 4.833188220735156e-07, - "loss": 1.0673, - "num_input_tokens_seen": 138094275, - "step": 6493 - }, - { - "epoch": 0.7808573318102567, - "flos": 13465509298920.0, - "grad_norm": 5.954716827552582, - "learning_rate": 4.828111578083152e-07, - "loss": 0.9693, - "num_input_tokens_seen": 138110900, - "step": 6494 - }, - { - "epoch": 0.7809775747008958, - "flos": 17052046843200.0, - "grad_norm": 13.619648013503094, - "learning_rate": 4.823037236994556e-07, - "loss": 1.0352, - "num_input_tokens_seen": 138128785, - "step": 6495 - }, - { - "epoch": 0.7810978175915348, - "flos": 49078713270360.0, - "grad_norm": 0.7986154436371713, - "learning_rate": 4.817965198239136e-07, - "loss": 0.8221, - "num_input_tokens_seen": 138194965, - "step": 6496 - }, - { - "epoch": 0.781218060482174, - "flos": 13934393755200.0, - "grad_norm": 5.228908496983307, - "learning_rate": 4.812895462586331e-07, - "loss": 0.9434, - "num_input_tokens_seen": 138212510, - "step": 6497 - }, - { - "epoch": 0.7813383033728131, - "flos": 18236507277120.0, - "grad_norm": 11.244198129575468, - "learning_rate": 4.807828030805207e-07, - "loss": 1.0434, - "num_input_tokens_seen": 138231220, - "step": 6498 - }, - { - "epoch": 0.7814585462634521, - "flos": 14541436279680.0, - "grad_norm": 7.799391101643543, - "learning_rate": 4.802762903664495e-07, - "loss": 0.9046, - "num_input_tokens_seen": 138250120, - "step": 6499 - }, - { - "epoch": 0.7815787891540913, - "flos": 15847466877840.0, - "grad_norm": 4.712652650177042, - "learning_rate": 4.797700081932565e-07, - "loss": 0.9454, - "num_input_tokens_seen": 138267705, - "step": 6500 - }, - { - "epoch": 0.7816990320447303, - "flos": 16061069027880.0, - "grad_norm": 4.134674748794676, - "learning_rate": 4.792639566377442e-07, - "loss": 1.0403, - "num_input_tokens_seen": 138284835, - "step": 6501 - }, - { - "epoch": 0.7818192749353694, - "flos": 17739439299120.0, - "grad_norm": 3.5056524954193815, - "learning_rate": 4.78758135776681e-07, - "loss": 1.0082, - "num_input_tokens_seen": 138304410, - "step": 6502 - }, - { - "epoch": 0.7819395178260086, - "flos": 16875060143880.0, - "grad_norm": 3.1219475142999213, - "learning_rate": 4.782525456867989e-07, - "loss": 1.0072, - "num_input_tokens_seen": 138322985, - "step": 6503 - }, - { - "epoch": 0.7820597607166476, - "flos": 16507473012600.0, - "grad_norm": 5.332463352454796, - "learning_rate": 4.777471864447959e-07, - "loss": 1.0597, - "num_input_tokens_seen": 138343445, - "step": 6504 - }, - { - "epoch": 0.7821800036072867, - "flos": 15851943465600.0, - "grad_norm": 3.8932330723703585, - "learning_rate": 4.772420581273344e-07, - "loss": 1.0217, - "num_input_tokens_seen": 138360650, - "step": 6505 - }, - { - "epoch": 0.7823002464979258, - "flos": 15302003862000.0, - "grad_norm": 3.8711738154718303, - "learning_rate": 4.7673716081104134e-07, - "loss": 0.9956, - "num_input_tokens_seen": 138380545, - "step": 6506 - }, - { - "epoch": 0.7824204893885649, - "flos": 17451864822120.0, - "grad_norm": 5.010730101786974, - "learning_rate": 4.762324945725109e-07, - "loss": 1.0683, - "num_input_tokens_seen": 138399710, - "step": 6507 - }, - { - "epoch": 0.782540732279204, - "flos": 19520977250520.0, - "grad_norm": 15.346842964898606, - "learning_rate": 4.7572805948829844e-07, - "loss": 0.9887, - "num_input_tokens_seen": 138419690, - "step": 6508 - }, - { - "epoch": 0.7826609751698431, - "flos": 17320758912720.0, - "grad_norm": 6.710048132551815, - "learning_rate": 4.7522385563492795e-07, - "loss": 0.932, - "num_input_tokens_seen": 138439710, - "step": 6509 - }, - { - "epoch": 0.7827812180604822, - "flos": 17058700401720.0, - "grad_norm": 2.916884623058622, - "learning_rate": 4.747198830888863e-07, - "loss": 0.9135, - "num_input_tokens_seen": 138459300, - "step": 6510 - }, - { - "epoch": 0.7829014609511212, - "flos": 19549436726280.0, - "grad_norm": 3.215935522866005, - "learning_rate": 4.742161419266251e-07, - "loss": 0.9126, - "num_input_tokens_seen": 138478180, - "step": 6511 - }, - { - "epoch": 0.7830217038417604, - "flos": 20807133609720.0, - "grad_norm": 7.349516207803717, - "learning_rate": 4.7371263222456304e-07, - "loss": 0.8603, - "num_input_tokens_seen": 138495220, - "step": 6512 - }, - { - "epoch": 0.7831419467323995, - "flos": 43628080210920.0, - "grad_norm": 0.8208237644225179, - "learning_rate": 4.7320935405908004e-07, - "loss": 0.868, - "num_input_tokens_seen": 138555810, - "step": 6513 - }, - { - "epoch": 0.7832621896230385, - "flos": 13963957047120.0, - "grad_norm": 3.8220138661818237, - "learning_rate": 4.7270630750652475e-07, - "loss": 1.0464, - "num_input_tokens_seen": 138571485, - "step": 6514 - }, - { - "epoch": 0.7833824325136777, - "flos": 17792126955360.0, - "grad_norm": 2.577634545115938, - "learning_rate": 4.7220349264320746e-07, - "loss": 1.0377, - "num_input_tokens_seen": 138590290, - "step": 6515 - }, - { - "epoch": 0.7835026754043167, - "flos": 49269834948840.0, - "grad_norm": 0.7918303261961215, - "learning_rate": 4.71700909545407e-07, - "loss": 0.8198, - "num_input_tokens_seen": 138652955, - "step": 6516 - }, - { - "epoch": 0.7836229182949558, - "flos": 14094756340920.0, - "grad_norm": 4.331460376821157, - "learning_rate": 4.711985582893627e-07, - "loss": 0.9887, - "num_input_tokens_seen": 138671195, - "step": 6517 - }, - { - "epoch": 0.783743161185595, - "flos": 16323556800720.0, - "grad_norm": 5.676722449168409, - "learning_rate": 4.706964389512811e-07, - "loss": 0.9466, - "num_input_tokens_seen": 138690950, - "step": 6518 - }, - { - "epoch": 0.783863404076234, - "flos": 8772193696440.0, - "grad_norm": 3.0262189190547537, - "learning_rate": 4.701945516073345e-07, - "loss": 1.099, - "num_input_tokens_seen": 138708145, - "step": 6519 - }, - { - "epoch": 0.7839836469668731, - "flos": 17242248674880.0, - "grad_norm": 4.822176234489425, - "learning_rate": 4.696928963336577e-07, - "loss": 0.9812, - "num_input_tokens_seen": 138727295, - "step": 6520 - }, - { - "epoch": 0.7841038898575122, - "flos": 44470193525520.0, - "grad_norm": 0.8624813810356773, - "learning_rate": 4.6919147320635224e-07, - "loss": 0.8615, - "num_input_tokens_seen": 138789725, - "step": 6521 - }, - { - "epoch": 0.7842241327481513, - "flos": 14331544178400.0, - "grad_norm": 6.193029533849238, - "learning_rate": 4.6869028230148286e-07, - "loss": 0.9384, - "num_input_tokens_seen": 138807240, - "step": 6522 - }, - { - "epoch": 0.7843443756387903, - "flos": 19995166156680.0, - "grad_norm": 4.329315572655419, - "learning_rate": 4.6818932369507957e-07, - "loss": 0.8271, - "num_input_tokens_seen": 138826460, - "step": 6523 - }, - { - "epoch": 0.7844646185294295, - "flos": 15143082369600.0, - "grad_norm": 6.511242460648171, - "learning_rate": 4.676885974631386e-07, - "loss": 1.1074, - "num_input_tokens_seen": 138844540, - "step": 6524 - }, - { - "epoch": 0.7845848614200686, - "flos": 16819643608800.0, - "grad_norm": 4.872577358706288, - "learning_rate": 4.67188103681619e-07, - "loss": 1.0278, - "num_input_tokens_seen": 138864045, - "step": 6525 - }, - { - "epoch": 0.7847051043107076, - "flos": 16636953859320.0, - "grad_norm": 4.040764042585456, - "learning_rate": 4.666878424264453e-07, - "loss": 0.9236, - "num_input_tokens_seen": 138883720, - "step": 6526 - }, - { - "epoch": 0.7848253472013467, - "flos": 13488970940400.0, - "grad_norm": 3.0916298431369653, - "learning_rate": 4.661878137735069e-07, - "loss": 0.9532, - "num_input_tokens_seen": 138901630, - "step": 6527 - }, - { - "epoch": 0.7849455900919858, - "flos": 15039669396960.0, - "grad_norm": 4.470161695180018, - "learning_rate": 4.656880177986571e-07, - "loss": 0.9716, - "num_input_tokens_seen": 138919895, - "step": 6528 - }, - { - "epoch": 0.7850658329826249, - "flos": 13858980334920.0, - "grad_norm": 4.971756127720895, - "learning_rate": 4.6518845457771607e-07, - "loss": 1.0334, - "num_input_tokens_seen": 138938475, - "step": 6529 - }, - { - "epoch": 0.7851860758732639, - "flos": 8798721493920.0, - "grad_norm": 4.645387203310049, - "learning_rate": 4.646891241864652e-07, - "loss": 1.0138, - "num_input_tokens_seen": 138956760, - "step": 6530 - }, - { - "epoch": 0.7853063187639031, - "flos": 16319386828560.0, - "grad_norm": 4.205223949152342, - "learning_rate": 4.6419002670065397e-07, - "loss": 0.9519, - "num_input_tokens_seen": 138976060, - "step": 6531 - }, - { - "epoch": 0.7854265616545422, - "flos": 12285617437440.0, - "grad_norm": 3.6448729785194183, - "learning_rate": 4.6369116219599445e-07, - "loss": 1.0717, - "num_input_tokens_seen": 138991765, - "step": 6532 - }, - { - "epoch": 0.7855468045451812, - "flos": 16950136287000.0, - "grad_norm": 3.1722063164016236, - "learning_rate": 4.631925307481637e-07, - "loss": 1.0026, - "num_input_tokens_seen": 139011300, - "step": 6533 - }, - { - "epoch": 0.7856670474358204, - "flos": 18264813445080.0, - "grad_norm": 4.554208197001509, - "learning_rate": 4.6269413243280533e-07, - "loss": 0.9641, - "num_input_tokens_seen": 139030440, - "step": 6534 - }, - { - "epoch": 0.7857872903264594, - "flos": 12857976189480.0, - "grad_norm": 6.162149613577021, - "learning_rate": 4.621959673255236e-07, - "loss": 0.9513, - "num_input_tokens_seen": 139046460, - "step": 6535 - }, - { - "epoch": 0.7859075332170985, - "flos": 10156734532440.0, - "grad_norm": 8.02782370067633, - "learning_rate": 4.6169803550189135e-07, - "loss": 1.1196, - "num_input_tokens_seen": 139061875, - "step": 6536 - }, - { - "epoch": 0.7860277761077377, - "flos": 14094296417520.0, - "grad_norm": 3.605107414349001, - "learning_rate": 4.6120033703744355e-07, - "loss": 0.9871, - "num_input_tokens_seen": 139080490, - "step": 6537 - }, - { - "epoch": 0.7861480189983767, - "flos": 18788961128640.0, - "grad_norm": 8.779795461718306, - "learning_rate": 4.607028720076822e-07, - "loss": 1.0074, - "num_input_tokens_seen": 139096890, - "step": 6538 - }, - { - "epoch": 0.7862682618890158, - "flos": 17235963055080.0, - "grad_norm": 3.792973778639818, - "learning_rate": 4.6020564048807074e-07, - "loss": 0.9517, - "num_input_tokens_seen": 139114285, - "step": 6539 - }, - { - "epoch": 0.7863885047796549, - "flos": 33992034153840.0, - "grad_norm": 5.076434890021436, - "learning_rate": 4.5970864255403883e-07, - "loss": 0.9349, - "num_input_tokens_seen": 139135530, - "step": 6540 - }, - { - "epoch": 0.786508747670294, - "flos": 17346581494320.0, - "grad_norm": 4.211464928486532, - "learning_rate": 4.59211878280982e-07, - "loss": 1.0471, - "num_input_tokens_seen": 139154765, - "step": 6541 - }, - { - "epoch": 0.786628990560933, - "flos": 12784463785920.0, - "grad_norm": 8.321882187614245, - "learning_rate": 4.587153477442578e-07, - "loss": 0.9202, - "num_input_tokens_seen": 139170800, - "step": 6542 - }, - { - "epoch": 0.7867492334515722, - "flos": 18395612738880.0, - "grad_norm": 11.199563113274671, - "learning_rate": 4.582190510191899e-07, - "loss": 1.0341, - "num_input_tokens_seen": 139189180, - "step": 6543 - }, - { - "epoch": 0.7868694763422113, - "flos": 11734757987040.0, - "grad_norm": 6.236864553737713, - "learning_rate": 4.5772298818106625e-07, - "loss": 1.0928, - "num_input_tokens_seen": 139204690, - "step": 6544 - }, - { - "epoch": 0.7869897192328503, - "flos": 20937656949480.0, - "grad_norm": 4.76905254239242, - "learning_rate": 4.572271593051384e-07, - "loss": 0.9438, - "num_input_tokens_seen": 139221765, - "step": 6545 - }, - { - "epoch": 0.7871099621234895, - "flos": 12128780931120.0, - "grad_norm": 2.9765129986348926, - "learning_rate": 4.567315644666245e-07, - "loss": 0.9996, - "num_input_tokens_seen": 139240280, - "step": 6546 - }, - { - "epoch": 0.7872302050141285, - "flos": 16664462826720.0, - "grad_norm": 5.017307462775766, - "learning_rate": 4.5623620374070507e-07, - "loss": 1.0753, - "num_input_tokens_seen": 139259315, - "step": 6547 - }, - { - "epoch": 0.7873504479047676, - "flos": 47229243319320.0, - "grad_norm": 0.8242188584747966, - "learning_rate": 4.557410772025263e-07, - "loss": 0.8556, - "num_input_tokens_seen": 139320985, - "step": 6548 - }, - { - "epoch": 0.7874706907954068, - "flos": 16533847502280.0, - "grad_norm": 3.5153427349794155, - "learning_rate": 4.5524618492719803e-07, - "loss": 0.8729, - "num_input_tokens_seen": 139339925, - "step": 6549 - }, - { - "epoch": 0.7875909336860458, - "flos": 20493399273960.0, - "grad_norm": 5.877978280166522, - "learning_rate": 4.54751526989795e-07, - "loss": 1.0052, - "num_input_tokens_seen": 139361485, - "step": 6550 - }, - { - "epoch": 0.7877111765766849, - "flos": 13255862490120.0, - "grad_norm": 7.308907433307138, - "learning_rate": 4.5425710346535775e-07, - "loss": 1.0278, - "num_input_tokens_seen": 139379150, - "step": 6551 - }, - { - "epoch": 0.787831419467324, - "flos": 19648097157120.0, - "grad_norm": 6.639168613274427, - "learning_rate": 4.537629144288877e-07, - "loss": 1.0419, - "num_input_tokens_seen": 139396325, - "step": 6552 - }, - { - "epoch": 0.7879516623579631, - "flos": 12862698069720.0, - "grad_norm": 5.5551179056504925, - "learning_rate": 4.5326895995535477e-07, - "loss": 0.9651, - "num_input_tokens_seen": 139414945, - "step": 6553 - }, - { - "epoch": 0.7880719052486022, - "flos": 14435876997840.0, - "grad_norm": 4.881504707675654, - "learning_rate": 4.527752401196907e-07, - "loss": 1.0642, - "num_input_tokens_seen": 139432680, - "step": 6554 - }, - { - "epoch": 0.7881921481392413, - "flos": 15380330130480.0, - "grad_norm": 7.002818221488609, - "learning_rate": 4.5228175499679254e-07, - "loss": 0.8939, - "num_input_tokens_seen": 139451985, - "step": 6555 - }, - { - "epoch": 0.7883123910298804, - "flos": 49099384709880.0, - "grad_norm": 0.8596044754211809, - "learning_rate": 4.5178850466152174e-07, - "loss": 0.7933, - "num_input_tokens_seen": 139510535, - "step": 6556 - }, - { - "epoch": 0.7884326339205194, - "flos": 13702205151720.0, - "grad_norm": 3.246789960876562, - "learning_rate": 4.512954891887031e-07, - "loss": 1.045, - "num_input_tokens_seen": 139528555, - "step": 6557 - }, - { - "epoch": 0.7885528768111585, - "flos": 12600118312200.0, - "grad_norm": 4.188971417527844, - "learning_rate": 4.5080270865312806e-07, - "loss": 1.0604, - "num_input_tokens_seen": 139545470, - "step": 6558 - }, - { - "epoch": 0.7886731197017977, - "flos": 13335292574760.0, - "grad_norm": 4.238343803541837, - "learning_rate": 4.5031016312954985e-07, - "loss": 0.938, - "num_input_tokens_seen": 139563505, - "step": 6559 - }, - { - "epoch": 0.7887933625924367, - "flos": 23797636129560.0, - "grad_norm": 5.8325872034911, - "learning_rate": 4.498178526926886e-07, - "loss": 0.9746, - "num_input_tokens_seen": 139584090, - "step": 6560 - }, - { - "epoch": 0.7889136054830758, - "flos": 12050025400800.0, - "grad_norm": 5.411379106496653, - "learning_rate": 4.4932577741722635e-07, - "loss": 0.9602, - "num_input_tokens_seen": 139602340, - "step": 6561 - }, - { - "epoch": 0.7890338483737149, - "flos": 20965472532480.0, - "grad_norm": 3.0662829331792185, - "learning_rate": 4.4883393737780985e-07, - "loss": 0.979, - "num_input_tokens_seen": 139623010, - "step": 6562 - }, - { - "epoch": 0.789154091264354, - "flos": 14171304238920.0, - "grad_norm": 5.317593634623492, - "learning_rate": 4.4834233264905254e-07, - "loss": 1.005, - "num_input_tokens_seen": 139639745, - "step": 6563 - }, - { - "epoch": 0.789274334154993, - "flos": 10266525109560.0, - "grad_norm": 7.218975905809779, - "learning_rate": 4.478509633055294e-07, - "loss": 0.9276, - "num_input_tokens_seen": 139657175, - "step": 6564 - }, - { - "epoch": 0.7893945770456322, - "flos": 15504383881080.0, - "grad_norm": 4.144157083575574, - "learning_rate": 4.473598294217813e-07, - "loss": 1.0073, - "num_input_tokens_seen": 139672320, - "step": 6565 - }, - { - "epoch": 0.7895148199362713, - "flos": 14724401983200.0, - "grad_norm": 4.153540854089064, - "learning_rate": 4.468689310723124e-07, - "loss": 0.9477, - "num_input_tokens_seen": 139689855, - "step": 6566 - }, - { - "epoch": 0.7896350628269103, - "flos": 11813758809840.0, - "grad_norm": 6.401631397698711, - "learning_rate": 4.463782683315913e-07, - "loss": 1.0077, - "num_input_tokens_seen": 139708580, - "step": 6567 - }, - { - "epoch": 0.7897553057175495, - "flos": 16088332702800.0, - "grad_norm": 5.067959702449724, - "learning_rate": 4.458878412740523e-07, - "loss": 0.952, - "num_input_tokens_seen": 139727080, - "step": 6568 - }, - { - "epoch": 0.7898755486081885, - "flos": 10266310478640.0, - "grad_norm": 27.541112512350853, - "learning_rate": 4.453976499740919e-07, - "loss": 0.9864, - "num_input_tokens_seen": 139744445, - "step": 6569 - }, - { - "epoch": 0.7899957914988276, - "flos": 12207536461440.0, - "grad_norm": 3.8214455388995017, - "learning_rate": 4.4490769450607215e-07, - "loss": 1.0108, - "num_input_tokens_seen": 139761790, - "step": 6570 - }, - { - "epoch": 0.7901160343894668, - "flos": 29484811734000.0, - "grad_norm": 14.273998197286309, - "learning_rate": 4.4441797494431845e-07, - "loss": 0.9566, - "num_input_tokens_seen": 139783315, - "step": 6571 - }, - { - "epoch": 0.7902362772801058, - "flos": 11918980814520.0, - "grad_norm": 4.960529364670548, - "learning_rate": 4.439284913631207e-07, - "loss": 1.0009, - "num_input_tokens_seen": 139800245, - "step": 6572 - }, - { - "epoch": 0.7903565201707449, - "flos": 19313476750920.0, - "grad_norm": 3.145604375797771, - "learning_rate": 4.434392438367347e-07, - "loss": 1.0574, - "num_input_tokens_seen": 139819390, - "step": 6573 - }, - { - "epoch": 0.790476763061384, - "flos": 22115095886160.0, - "grad_norm": 6.675896105104718, - "learning_rate": 4.4295023243937677e-07, - "loss": 0.9601, - "num_input_tokens_seen": 139839315, - "step": 6574 - }, - { - "epoch": 0.7905970059520231, - "flos": 15693297927240.0, - "grad_norm": 7.558573562634541, - "learning_rate": 4.4246145724523123e-07, - "loss": 1.0357, - "num_input_tokens_seen": 139856780, - "step": 6575 - }, - { - "epoch": 0.7907172488426621, - "flos": 14593234750680.0, - "grad_norm": 5.697924774926627, - "learning_rate": 4.41972918328444e-07, - "loss": 0.9845, - "num_input_tokens_seen": 139873935, - "step": 6576 - }, - { - "epoch": 0.7908374917333013, - "flos": 21437607114120.0, - "grad_norm": 7.022529585800779, - "learning_rate": 4.4148461576312646e-07, - "loss": 1.0167, - "num_input_tokens_seen": 139893320, - "step": 6577 - }, - { - "epoch": 0.7909577346239404, - "flos": 14908226210400.0, - "grad_norm": 3.0942673684056743, - "learning_rate": 4.4099654962335343e-07, - "loss": 0.9689, - "num_input_tokens_seen": 139913490, - "step": 6578 - }, - { - "epoch": 0.7910779775145794, - "flos": 18682114061280.0, - "grad_norm": 3.6576425883619654, - "learning_rate": 4.405087199831636e-07, - "loss": 0.9715, - "num_input_tokens_seen": 139933450, - "step": 6579 - }, - { - "epoch": 0.7911982204052186, - "flos": 16035062476920.0, - "grad_norm": 5.02463205233503, - "learning_rate": 4.400211269165619e-07, - "loss": 0.8978, - "num_input_tokens_seen": 139949625, - "step": 6580 - }, - { - "epoch": 0.7913184632958576, - "flos": 16429085421000.0, - "grad_norm": 2.5968464212876343, - "learning_rate": 4.3953377049751416e-07, - "loss": 0.9985, - "num_input_tokens_seen": 139969770, - "step": 6581 - }, - { - "epoch": 0.7914387061864967, - "flos": 8666634414600.0, - "grad_norm": 9.34704286863945, - "learning_rate": 4.390466507999537e-07, - "loss": 0.9923, - "num_input_tokens_seen": 139985240, - "step": 6582 - }, - { - "epoch": 0.7915589490771359, - "flos": 12469686957120.0, - "grad_norm": 8.517831834890876, - "learning_rate": 4.385597678977748e-07, - "loss": 0.9887, - "num_input_tokens_seen": 140003795, - "step": 6583 - }, - { - "epoch": 0.7916791919677749, - "flos": 18210194110560.0, - "grad_norm": 7.651393549033242, - "learning_rate": 4.3807312186483726e-07, - "loss": 0.9683, - "num_input_tokens_seen": 140024235, - "step": 6584 - }, - { - "epoch": 0.791799434858414, - "flos": 13361697726000.0, - "grad_norm": 5.025404478277155, - "learning_rate": 4.375867127749655e-07, - "loss": 1.0067, - "num_input_tokens_seen": 140042230, - "step": 6585 - }, - { - "epoch": 0.7919196777490531, - "flos": 18369084941400.0, - "grad_norm": 4.98020047531141, - "learning_rate": 4.3710054070194744e-07, - "loss": 0.9039, - "num_input_tokens_seen": 140061645, - "step": 6586 - }, - { - "epoch": 0.7920399206396922, - "flos": 8405035827000.0, - "grad_norm": 15.029043943818728, - "learning_rate": 4.3661460571953455e-07, - "loss": 0.8742, - "num_input_tokens_seen": 140078100, - "step": 6587 - }, - { - "epoch": 0.7921601635303313, - "flos": 15327581151120.0, - "grad_norm": 16.19691694144917, - "learning_rate": 4.36128907901443e-07, - "loss": 0.9111, - "num_input_tokens_seen": 140097415, - "step": 6588 - }, - { - "epoch": 0.7922804064209703, - "flos": 12836722180320.0, - "grad_norm": 3.4446487998501505, - "learning_rate": 4.356434473213519e-07, - "loss": 0.9516, - "num_input_tokens_seen": 140114585, - "step": 6589 - }, - { - "epoch": 0.7924006493116095, - "flos": 15379962191760.0, - "grad_norm": 4.89879999842416, - "learning_rate": 4.351582240529068e-07, - "loss": 1.0067, - "num_input_tokens_seen": 140135135, - "step": 6590 - }, - { - "epoch": 0.7925208922022485, - "flos": 45994977415800.0, - "grad_norm": 0.8529001281941242, - "learning_rate": 4.346732381697149e-07, - "loss": 0.8383, - "num_input_tokens_seen": 140198985, - "step": 6591 - }, - { - "epoch": 0.7926411350928876, - "flos": 11993443726440.0, - "grad_norm": 3.120077910795433, - "learning_rate": 4.3418848974534825e-07, - "loss": 1.0352, - "num_input_tokens_seen": 140215645, - "step": 6592 - }, - { - "epoch": 0.7927613779835267, - "flos": 24584332909080.0, - "grad_norm": 3.251973066755673, - "learning_rate": 4.3370397885334276e-07, - "loss": 0.9075, - "num_input_tokens_seen": 140235995, - "step": 6593 - }, - { - "epoch": 0.7928816208741658, - "flos": 13438122977760.0, - "grad_norm": 4.232023029344056, - "learning_rate": 4.3321970556719777e-07, - "loss": 0.9813, - "num_input_tokens_seen": 140254010, - "step": 6594 - }, - { - "epoch": 0.7930018637648049, - "flos": 13202316310200.0, - "grad_norm": 5.028654957517238, - "learning_rate": 4.3273566996037856e-07, - "loss": 0.9228, - "num_input_tokens_seen": 140270425, - "step": 6595 - }, - { - "epoch": 0.793122106655444, - "flos": 17447602865280.0, - "grad_norm": 38.79523097352383, - "learning_rate": 4.322518721063113e-07, - "loss": 1.0255, - "num_input_tokens_seen": 140288695, - "step": 6596 - }, - { - "epoch": 0.7932423495460831, - "flos": 24556149387360.0, - "grad_norm": 6.38359059478566, - "learning_rate": 4.3176831207838906e-07, - "loss": 0.9212, - "num_input_tokens_seen": 140311825, - "step": 6597 - }, - { - "epoch": 0.7933625924367221, - "flos": 19208837315880.0, - "grad_norm": 3.772811174243172, - "learning_rate": 4.3128498994996685e-07, - "loss": 0.966, - "num_input_tokens_seen": 140331020, - "step": 6598 - }, - { - "epoch": 0.7934828353273613, - "flos": 21068701535760.0, - "grad_norm": 6.135749259530582, - "learning_rate": 4.308019057943646e-07, - "loss": 0.9294, - "num_input_tokens_seen": 140352465, - "step": 6599 - }, - { - "epoch": 0.7936030782180004, - "flos": 20382995465640.0, - "grad_norm": 3.030583259167626, - "learning_rate": 4.3031905968486535e-07, - "loss": 0.9628, - "num_input_tokens_seen": 140373015, - "step": 6600 - }, - { - "epoch": 0.7937233211086394, - "flos": 11598684904920.0, - "grad_norm": 3.049327626142789, - "learning_rate": 4.298364516947162e-07, - "loss": 0.9085, - "num_input_tokens_seen": 140389965, - "step": 6601 - }, - { - "epoch": 0.7938435639992786, - "flos": 15638555946480.0, - "grad_norm": 4.293106479535409, - "learning_rate": 4.293540818971295e-07, - "loss": 0.8849, - "num_input_tokens_seen": 140407490, - "step": 6602 - }, - { - "epoch": 0.7939638068899176, - "flos": 15770673687360.0, - "grad_norm": 6.464891840361164, - "learning_rate": 4.2887195036527934e-07, - "loss": 0.9875, - "num_input_tokens_seen": 140426015, - "step": 6603 - }, - { - "epoch": 0.7940840497805567, - "flos": 12571137589920.0, - "grad_norm": 4.268152850316177, - "learning_rate": 4.28390057172306e-07, - "loss": 0.9422, - "num_input_tokens_seen": 140442240, - "step": 6604 - }, - { - "epoch": 0.7942042926711959, - "flos": 16927011922680.0, - "grad_norm": 8.139582298115362, - "learning_rate": 4.279084023913111e-07, - "loss": 0.9374, - "num_input_tokens_seen": 140459835, - "step": 6605 - }, - { - "epoch": 0.7943245355618349, - "flos": 13649517495480.0, - "grad_norm": 6.184127350832015, - "learning_rate": 4.2742698609536096e-07, - "loss": 0.9122, - "num_input_tokens_seen": 140477865, - "step": 6606 - }, - { - "epoch": 0.794444778452474, - "flos": 17790747185160.0, - "grad_norm": 5.637775240182378, - "learning_rate": 4.2694580835748706e-07, - "loss": 1.006, - "num_input_tokens_seen": 140497445, - "step": 6607 - }, - { - "epoch": 0.7945650213431131, - "flos": 16507319704800.0, - "grad_norm": 4.680708784896141, - "learning_rate": 4.264648692506836e-07, - "loss": 0.9634, - "num_input_tokens_seen": 140515955, - "step": 6608 - }, - { - "epoch": 0.7946852642337522, - "flos": 18548769858000.0, - "grad_norm": 3.898462085225897, - "learning_rate": 4.2598416884790824e-07, - "loss": 0.9394, - "num_input_tokens_seen": 140534725, - "step": 6609 - }, - { - "epoch": 0.7948055071243912, - "flos": 16901434633560.0, - "grad_norm": 4.203841478054022, - "learning_rate": 4.255037072220828e-07, - "loss": 1.0249, - "num_input_tokens_seen": 140555815, - "step": 6610 - }, - { - "epoch": 0.7949257500150304, - "flos": 15615646213080.0, - "grad_norm": 3.6329841819311723, - "learning_rate": 4.2502348444609293e-07, - "loss": 0.9398, - "num_input_tokens_seen": 140575155, - "step": 6611 - }, - { - "epoch": 0.7950459929056695, - "flos": 18341667958680.0, - "grad_norm": 3.58513663995849, - "learning_rate": 4.2454350059278844e-07, - "loss": 0.9087, - "num_input_tokens_seen": 140595935, - "step": 6612 - }, - { - "epoch": 0.7951662357963085, - "flos": 15742336857840.0, - "grad_norm": 8.158227205868817, - "learning_rate": 4.240637557349824e-07, - "loss": 1.0715, - "num_input_tokens_seen": 140612870, - "step": 6613 - }, - { - "epoch": 0.7952864786869477, - "flos": 17527830150480.0, - "grad_norm": 3.468597271280492, - "learning_rate": 4.235842499454516e-07, - "loss": 0.8801, - "num_input_tokens_seen": 140632505, - "step": 6614 - }, - { - "epoch": 0.7954067215775867, - "flos": 15507388713960.0, - "grad_norm": 29.24981637228685, - "learning_rate": 4.2310498329693687e-07, - "loss": 1.0434, - "num_input_tokens_seen": 140653125, - "step": 6615 - }, - { - "epoch": 0.7955269644682258, - "flos": 17110406888040.0, - "grad_norm": 3.8438844734797875, - "learning_rate": 4.2262595586214164e-07, - "loss": 1.0358, - "num_input_tokens_seen": 140673940, - "step": 6616 - }, - { - "epoch": 0.795647207358865, - "flos": 17793414740880.0, - "grad_norm": 2.5969201443233545, - "learning_rate": 4.221471677137358e-07, - "loss": 0.9908, - "num_input_tokens_seen": 140694475, - "step": 6617 - }, - { - "epoch": 0.795767450249504, - "flos": 10346108502000.0, - "grad_norm": 4.682388570572724, - "learning_rate": 4.216686189243492e-07, - "loss": 0.9372, - "num_input_tokens_seen": 140712985, - "step": 6618 - }, - { - "epoch": 0.7958876931401431, - "flos": 13148034252840.0, - "grad_norm": 5.559886361202655, - "learning_rate": 4.211903095665785e-07, - "loss": 0.954, - "num_input_tokens_seen": 140732090, - "step": 6619 - }, - { - "epoch": 0.7960079360307821, - "flos": 15301819892640.0, - "grad_norm": 3.4832994657144654, - "learning_rate": 4.2071223971298277e-07, - "loss": 0.978, - "num_input_tokens_seen": 140748995, - "step": 6620 - }, - { - "epoch": 0.7961281789214213, - "flos": 18131469241800.0, - "grad_norm": 4.002990796641077, - "learning_rate": 4.2023440943608433e-07, - "loss": 0.8276, - "num_input_tokens_seen": 140768680, - "step": 6621 - }, - { - "epoch": 0.7962484218120603, - "flos": 15589517015880.0, - "grad_norm": 3.179650758723154, - "learning_rate": 4.1975681880837023e-07, - "loss": 1.0049, - "num_input_tokens_seen": 140788405, - "step": 6622 - }, - { - "epoch": 0.7963686647026994, - "flos": 13384024889760.0, - "grad_norm": 6.451172761977687, - "learning_rate": 4.192794679022895e-07, - "loss": 1.0554, - "num_input_tokens_seen": 140806450, - "step": 6623 - }, - { - "epoch": 0.7964889075933386, - "flos": 21175303310640.0, - "grad_norm": 4.390693636863425, - "learning_rate": 4.1880235679025743e-07, - "loss": 0.9498, - "num_input_tokens_seen": 140826265, - "step": 6624 - }, - { - "epoch": 0.7966091504839776, - "flos": 21012671769480.0, - "grad_norm": 2.9627205102676237, - "learning_rate": 4.1832548554464986e-07, - "loss": 0.8525, - "num_input_tokens_seen": 140844280, - "step": 6625 - }, - { - "epoch": 0.7967293933746167, - "flos": 48181490036280.0, - "grad_norm": 0.7640757163685322, - "learning_rate": 4.178488542378098e-07, - "loss": 0.8342, - "num_input_tokens_seen": 140901580, - "step": 6626 - }, - { - "epoch": 0.7968496362652558, - "flos": 18183880944000.0, - "grad_norm": 5.651496702469828, - "learning_rate": 4.173724629420401e-07, - "loss": 1.1187, - "num_input_tokens_seen": 140922660, - "step": 6627 - }, - { - "epoch": 0.7969698791558949, - "flos": 10240089296760.0, - "grad_norm": 40.035732331022125, - "learning_rate": 4.168963117296087e-07, - "loss": 0.9101, - "num_input_tokens_seen": 140939715, - "step": 6628 - }, - { - "epoch": 0.797090122046534, - "flos": 15721205494920.0, - "grad_norm": 5.230692829463061, - "learning_rate": 4.1642040067274876e-07, - "loss": 0.9683, - "num_input_tokens_seen": 140959105, - "step": 6629 - }, - { - "epoch": 0.7972103649371731, - "flos": 14118064674600.0, - "grad_norm": 3.3459354282129397, - "learning_rate": 4.1594472984365493e-07, - "loss": 0.954, - "num_input_tokens_seen": 140977510, - "step": 6630 - }, - { - "epoch": 0.7973306078278122, - "flos": 25732331200080.0, - "grad_norm": 9.365694030364763, - "learning_rate": 4.154692993144862e-07, - "loss": 1.002, - "num_input_tokens_seen": 140997000, - "step": 6631 - }, - { - "epoch": 0.7974508507184512, - "flos": 15169916782680.0, - "grad_norm": 4.240289252869201, - "learning_rate": 4.1499410915736476e-07, - "loss": 0.9347, - "num_input_tokens_seen": 141015650, - "step": 6632 - }, - { - "epoch": 0.7975710936090904, - "flos": 48876731851560.0, - "grad_norm": 0.8108089161326635, - "learning_rate": 4.145191594443762e-07, - "loss": 0.9378, - "num_input_tokens_seen": 141079725, - "step": 6633 - }, - { - "epoch": 0.7976913364997295, - "flos": 15983447975280.0, - "grad_norm": 3.6923904317975476, - "learning_rate": 4.140444502475713e-07, - "loss": 0.9319, - "num_input_tokens_seen": 141098995, - "step": 6634 - }, - { - "epoch": 0.7978115793903685, - "flos": 10787269359960.0, - "grad_norm": 13.569998219770344, - "learning_rate": 4.1356998163896216e-07, - "loss": 0.9273, - "num_input_tokens_seen": 141115765, - "step": 6635 - }, - { - "epoch": 0.7979318222810077, - "flos": 13990362198360.0, - "grad_norm": 5.686290235026954, - "learning_rate": 4.130957536905255e-07, - "loss": 0.9596, - "num_input_tokens_seen": 141133500, - "step": 6636 - }, - { - "epoch": 0.7980520651716467, - "flos": 11001454079640.0, - "grad_norm": 7.910868264636914, - "learning_rate": 4.1262176647420134e-07, - "loss": 0.92, - "num_input_tokens_seen": 141151385, - "step": 6637 - }, - { - "epoch": 0.7981723080622858, - "flos": 15851851480920.0, - "grad_norm": 6.709531625697167, - "learning_rate": 4.121480200618923e-07, - "loss": 1.022, - "num_input_tokens_seen": 141170760, - "step": 6638 - }, - { - "epoch": 0.798292550952925, - "flos": 16297519588200.0, - "grad_norm": 5.406403467723302, - "learning_rate": 4.116745145254674e-07, - "loss": 1.026, - "num_input_tokens_seen": 141190015, - "step": 6639 - }, - { - "epoch": 0.798412793843564, - "flos": 46178341719600.0, - "grad_norm": 0.7833660080026763, - "learning_rate": 4.1120124993675476e-07, - "loss": 0.8318, - "num_input_tokens_seen": 141254165, - "step": 6640 - }, - { - "epoch": 0.7985330367342031, - "flos": 9582812040840.0, - "grad_norm": 3.10408159860542, - "learning_rate": 4.107282263675498e-07, - "loss": 0.8415, - "num_input_tokens_seen": 141271555, - "step": 6641 - }, - { - "epoch": 0.7986532796248422, - "flos": 48477404457600.0, - "grad_norm": 1.3519166250958794, - "learning_rate": 4.1025544388960907e-07, - "loss": 0.7694, - "num_input_tokens_seen": 141332315, - "step": 6642 - }, - { - "epoch": 0.7987735225154813, - "flos": 15956736208440.0, - "grad_norm": 38.22570587763881, - "learning_rate": 4.097829025746538e-07, - "loss": 0.9343, - "num_input_tokens_seen": 141353580, - "step": 6643 - }, - { - "epoch": 0.7988937654061203, - "flos": 49314581261040.0, - "grad_norm": 0.682835447157137, - "learning_rate": 4.0931060249436757e-07, - "loss": 0.8532, - "num_input_tokens_seen": 141417140, - "step": 6644 - }, - { - "epoch": 0.7990140082967595, - "flos": 14771110635240.0, - "grad_norm": 3.5198481308080094, - "learning_rate": 4.088385437203978e-07, - "loss": 0.9221, - "num_input_tokens_seen": 141433870, - "step": 6645 - }, - { - "epoch": 0.7991342511873986, - "flos": 13462320496680.0, - "grad_norm": 5.0728087782565625, - "learning_rate": 4.083667263243564e-07, - "loss": 0.9893, - "num_input_tokens_seen": 141451935, - "step": 6646 - }, - { - "epoch": 0.7992544940780376, - "flos": 14779573225800.0, - "grad_norm": 3.132104960235783, - "learning_rate": 4.0789515037781653e-07, - "loss": 0.9431, - "num_input_tokens_seen": 141472380, - "step": 6647 - }, - { - "epoch": 0.7993747369686768, - "flos": 8903176959600.0, - "grad_norm": 3.6034706340348555, - "learning_rate": 4.0742381595231755e-07, - "loss": 1.0453, - "num_input_tokens_seen": 141488825, - "step": 6648 - }, - { - "epoch": 0.7994949798593158, - "flos": 14248036106280.0, - "grad_norm": 15.04539284383081, - "learning_rate": 4.06952723119359e-07, - "loss": 1.0144, - "num_input_tokens_seen": 141508420, - "step": 6649 - }, - { - "epoch": 0.7996152227499549, - "flos": 27598419716640.0, - "grad_norm": 4.878008574343345, - "learning_rate": 4.0648187195040504e-07, - "loss": 0.8894, - "num_input_tokens_seen": 141530345, - "step": 6650 - }, - { - "epoch": 0.799735465640594, - "flos": 50305252460760.0, - "grad_norm": 0.9385187971156292, - "learning_rate": 4.060112625168848e-07, - "loss": 0.956, - "num_input_tokens_seen": 141595175, - "step": 6651 - }, - { - "epoch": 0.7998557085312331, - "flos": 17239734426960.0, - "grad_norm": 3.859995062434542, - "learning_rate": 4.055408948901886e-07, - "loss": 0.957, - "num_input_tokens_seen": 141616295, - "step": 6652 - }, - { - "epoch": 0.7999759514218722, - "flos": 19628314902840.0, - "grad_norm": 3.4954897566601475, - "learning_rate": 4.050707691416708e-07, - "loss": 0.9389, - "num_input_tokens_seen": 141637325, - "step": 6653 - }, - { - "epoch": 0.8000961943125112, - "flos": 48218166810120.0, - "grad_norm": 0.7115597359893016, - "learning_rate": 4.046008853426495e-07, - "loss": 0.8517, - "num_input_tokens_seen": 141700360, - "step": 6654 - }, - { - "epoch": 0.8002164372031504, - "flos": 20469324401280.0, - "grad_norm": 4.69271580748081, - "learning_rate": 4.0413124356440464e-07, - "loss": 0.8543, - "num_input_tokens_seen": 141724125, - "step": 6655 - }, - { - "epoch": 0.8003366800937894, - "flos": 12495632184960.0, - "grad_norm": 4.373951813608997, - "learning_rate": 4.0366184387818223e-07, - "loss": 1.0411, - "num_input_tokens_seen": 141742305, - "step": 6656 - }, - { - "epoch": 0.8004569229844285, - "flos": 18500926728240.0, - "grad_norm": 11.071340702956327, - "learning_rate": 4.0319268635518797e-07, - "loss": 1.0663, - "num_input_tokens_seen": 141762600, - "step": 6657 - }, - { - "epoch": 0.8005771658750677, - "flos": 14775771192360.0, - "grad_norm": 3.120258701798603, - "learning_rate": 4.027237710665943e-07, - "loss": 0.983, - "num_input_tokens_seen": 141780785, - "step": 6658 - }, - { - "epoch": 0.8006974087657067, - "flos": 18369606187920.0, - "grad_norm": 4.00173102116715, - "learning_rate": 4.022550980835344e-07, - "loss": 0.9194, - "num_input_tokens_seen": 141802750, - "step": 6659 - }, - { - "epoch": 0.8008176516563458, - "flos": 12154388881800.0, - "grad_norm": 5.018484090979181, - "learning_rate": 4.017866674771051e-07, - "loss": 1.0333, - "num_input_tokens_seen": 141819955, - "step": 6660 - }, - { - "epoch": 0.8009378945469849, - "flos": 17215904846760.0, - "grad_norm": 2.680224217529513, - "learning_rate": 4.013184793183688e-07, - "loss": 0.9679, - "num_input_tokens_seen": 141841770, - "step": 6661 - }, - { - "epoch": 0.801058137437624, - "flos": 14038634589960.0, - "grad_norm": 4.623587315263467, - "learning_rate": 4.008505336783472e-07, - "loss": 0.9509, - "num_input_tokens_seen": 141859215, - "step": 6662 - }, - { - "epoch": 0.801178380328263, - "flos": 13228445507400.0, - "grad_norm": 4.16587300689134, - "learning_rate": 4.003828306280284e-07, - "loss": 1.044, - "num_input_tokens_seen": 141876610, - "step": 6663 - }, - { - "epoch": 0.8012986232189022, - "flos": 11106645422760.0, - "grad_norm": 6.165674421882829, - "learning_rate": 3.999153702383626e-07, - "loss": 1.0037, - "num_input_tokens_seen": 141894220, - "step": 6664 - }, - { - "epoch": 0.8014188661095413, - "flos": 20361250871520.0, - "grad_norm": 6.922221913481423, - "learning_rate": 3.9944815258026263e-07, - "loss": 0.9562, - "num_input_tokens_seen": 141915760, - "step": 6665 - }, - { - "epoch": 0.8015391090001803, - "flos": 20882700337800.0, - "grad_norm": 6.92693837714482, - "learning_rate": 3.989811777246057e-07, - "loss": 1.0612, - "num_input_tokens_seen": 141935650, - "step": 6666 - }, - { - "epoch": 0.8016593518908195, - "flos": 50416606777440.0, - "grad_norm": 0.8914425427122293, - "learning_rate": 3.985144457422305e-07, - "loss": 0.9293, - "num_input_tokens_seen": 141989655, - "step": 6667 - }, - { - "epoch": 0.8017795947814585, - "flos": 18522303383640.0, - "grad_norm": 3.3382177488365645, - "learning_rate": 3.9804795670394096e-07, - "loss": 0.9763, - "num_input_tokens_seen": 142009500, - "step": 6668 - }, - { - "epoch": 0.8018998376720976, - "flos": 15691672864560.0, - "grad_norm": 5.920665446332243, - "learning_rate": 3.975817106805022e-07, - "loss": 0.9393, - "num_input_tokens_seen": 142027920, - "step": 6669 - }, - { - "epoch": 0.8020200805627368, - "flos": 24662229915720.0, - "grad_norm": 5.017402690405707, - "learning_rate": 3.97115707742645e-07, - "loss": 0.8587, - "num_input_tokens_seen": 142048315, - "step": 6670 - }, - { - "epoch": 0.8021403234533758, - "flos": 14278611229680.0, - "grad_norm": 13.391623745754343, - "learning_rate": 3.966499479610599e-07, - "loss": 0.8736, - "num_input_tokens_seen": 142066130, - "step": 6671 - }, - { - "epoch": 0.8022605663440149, - "flos": 19759206181320.0, - "grad_norm": 4.898192379794565, - "learning_rate": 3.9618443140640225e-07, - "loss": 0.8758, - "num_input_tokens_seen": 142084760, - "step": 6672 - }, - { - "epoch": 0.802380809234654, - "flos": 43119054224520.0, - "grad_norm": 0.7327680659700955, - "learning_rate": 3.957191581492918e-07, - "loss": 0.7737, - "num_input_tokens_seen": 142145240, - "step": 6673 - }, - { - "epoch": 0.8025010521252931, - "flos": 10656102127440.0, - "grad_norm": 4.354562817428705, - "learning_rate": 3.952541282603097e-07, - "loss": 0.9226, - "num_input_tokens_seen": 142160065, - "step": 6674 - }, - { - "epoch": 0.8026212950159322, - "flos": 15746997414960.0, - "grad_norm": 3.452040005590218, - "learning_rate": 3.9478934181000013e-07, - "loss": 1.0634, - "num_input_tokens_seen": 142179810, - "step": 6675 - }, - { - "epoch": 0.8027415379065713, - "flos": 12521914689960.0, - "grad_norm": 4.962758224264495, - "learning_rate": 3.943247988688714e-07, - "loss": 1.0691, - "num_input_tokens_seen": 142198225, - "step": 6676 - }, - { - "epoch": 0.8028617807972104, - "flos": 15614603720040.0, - "grad_norm": 4.456440009604539, - "learning_rate": 3.938604995073933e-07, - "loss": 0.9469, - "num_input_tokens_seen": 142216415, - "step": 6677 - }, - { - "epoch": 0.8029820236878494, - "flos": 18811594908000.0, - "grad_norm": 4.44018108342316, - "learning_rate": 3.9339644379600157e-07, - "loss": 0.8839, - "num_input_tokens_seen": 142235965, - "step": 6678 - }, - { - "epoch": 0.8031022665784886, - "flos": 12522343951800.0, - "grad_norm": 3.63289558561742, - "learning_rate": 3.929326318050907e-07, - "loss": 0.9431, - "num_input_tokens_seen": 142253355, - "step": 6679 - }, - { - "epoch": 0.8032225094691277, - "flos": 10922330610600.0, - "grad_norm": 4.908528764565648, - "learning_rate": 3.924690636050225e-07, - "loss": 1.0091, - "num_input_tokens_seen": 142270485, - "step": 6680 - }, - { - "epoch": 0.8033427523597667, - "flos": 18633289761600.0, - "grad_norm": 3.6307206889193866, - "learning_rate": 3.9200573926611915e-07, - "loss": 0.9435, - "num_input_tokens_seen": 142291620, - "step": 6681 - }, - { - "epoch": 0.8034629952504058, - "flos": 15144370155120.0, - "grad_norm": 4.029274863230593, - "learning_rate": 3.9154265885866613e-07, - "loss": 0.9503, - "num_input_tokens_seen": 142310650, - "step": 6682 - }, - { - "epoch": 0.8035832381410449, - "flos": 15378919698720.0, - "grad_norm": 71.04153302162405, - "learning_rate": 3.9107982245291394e-07, - "loss": 0.9548, - "num_input_tokens_seen": 142328495, - "step": 6683 - }, - { - "epoch": 0.803703481031684, - "flos": 14564591305560.0, - "grad_norm": 7.640762296663933, - "learning_rate": 3.9061723011907245e-07, - "loss": 0.9863, - "num_input_tokens_seen": 142347570, - "step": 6684 - }, - { - "epoch": 0.803823723922323, - "flos": 16243544146440.0, - "grad_norm": 3.5597127082715097, - "learning_rate": 3.901548819273179e-07, - "loss": 1.0023, - "num_input_tokens_seen": 142367305, - "step": 6685 - }, - { - "epoch": 0.8039439668129622, - "flos": 15171235229760.0, - "grad_norm": 6.038696751739787, - "learning_rate": 3.896927779477881e-07, - "loss": 0.923, - "num_input_tokens_seen": 142386285, - "step": 6686 - }, - { - "epoch": 0.8040642097036013, - "flos": 16924405690080.0, - "grad_norm": 5.421305425887011, - "learning_rate": 3.892309182505833e-07, - "loss": 0.9063, - "num_input_tokens_seen": 142403820, - "step": 6687 - }, - { - "epoch": 0.8041844525942403, - "flos": 18448239072000.0, - "grad_norm": 4.084665016437552, - "learning_rate": 3.887693029057675e-07, - "loss": 1.0817, - "num_input_tokens_seen": 142423050, - "step": 6688 - }, - { - "epoch": 0.8043046954848795, - "flos": 17922772941360.0, - "grad_norm": 3.7594172170144233, - "learning_rate": 3.8830793198336684e-07, - "loss": 1.0389, - "num_input_tokens_seen": 142442360, - "step": 6689 - }, - { - "epoch": 0.8044249383755185, - "flos": 29801182963920.0, - "grad_norm": 3.228836699003684, - "learning_rate": 3.878468055533721e-07, - "loss": 0.9289, - "num_input_tokens_seen": 142464620, - "step": 6690 - }, - { - "epoch": 0.8045451812661576, - "flos": 14645953068480.0, - "grad_norm": 9.91383240104333, - "learning_rate": 3.8738592368573464e-07, - "loss": 1.0614, - "num_input_tokens_seen": 142481895, - "step": 6691 - }, - { - "epoch": 0.8046654241567968, - "flos": 21171225323160.0, - "grad_norm": 8.532531418472866, - "learning_rate": 3.8692528645037137e-07, - "loss": 1.0971, - "num_input_tokens_seen": 142500795, - "step": 6692 - }, - { - "epoch": 0.8047856670474358, - "flos": 12517990010280.0, - "grad_norm": 3.7007924391174924, - "learning_rate": 3.8646489391715907e-07, - "loss": 1.0138, - "num_input_tokens_seen": 142514810, - "step": 6693 - }, - { - "epoch": 0.8049059099380749, - "flos": 12121759433880.0, - "grad_norm": 6.852904446066925, - "learning_rate": 3.8600474615593903e-07, - "loss": 1.0999, - "num_input_tokens_seen": 142529145, - "step": 6694 - }, - { - "epoch": 0.805026152828714, - "flos": 44533035706200.0, - "grad_norm": 0.8366960666691243, - "learning_rate": 3.8554484323651605e-07, - "loss": 0.8872, - "num_input_tokens_seen": 142590735, - "step": 6695 - }, - { - "epoch": 0.8051463957193531, - "flos": 15405600804000.0, - "grad_norm": 6.173613098319666, - "learning_rate": 3.85085185228657e-07, - "loss": 1.0107, - "num_input_tokens_seen": 142609425, - "step": 6696 - }, - { - "epoch": 0.8052666386099921, - "flos": 22853305643160.0, - "grad_norm": 15.352718925114036, - "learning_rate": 3.8462577220209114e-07, - "loss": 0.965, - "num_input_tokens_seen": 142629520, - "step": 6697 - }, - { - "epoch": 0.8053868815006313, - "flos": 48089513825520.0, - "grad_norm": 0.6831740262447202, - "learning_rate": 3.8416660422651127e-07, - "loss": 0.8366, - "num_input_tokens_seen": 142698890, - "step": 6698 - }, - { - "epoch": 0.8055071243912704, - "flos": 16949032470840.0, - "grad_norm": 5.192341578881088, - "learning_rate": 3.837076813715723e-07, - "loss": 0.9083, - "num_input_tokens_seen": 142718495, - "step": 6699 - }, - { - "epoch": 0.8056273672819094, - "flos": 15144032877960.0, - "grad_norm": 9.823363686695803, - "learning_rate": 3.832490037068941e-07, - "loss": 0.9764, - "num_input_tokens_seen": 142737005, - "step": 6700 - }, - { - "epoch": 0.8057476101725486, - "flos": 18336762109080.0, - "grad_norm": 12.233635384332691, - "learning_rate": 3.827905713020554e-07, - "loss": 0.9833, - "num_input_tokens_seen": 142754370, - "step": 6701 - }, - { - "epoch": 0.8058678530631876, - "flos": 17451558206520.0, - "grad_norm": 4.615747010846635, - "learning_rate": 3.823323842266017e-07, - "loss": 0.9054, - "num_input_tokens_seen": 142773485, - "step": 6702 - }, - { - "epoch": 0.8059880959538267, - "flos": 17608486697520.0, - "grad_norm": 4.591994177093536, - "learning_rate": 3.818744425500393e-07, - "loss": 0.9561, - "num_input_tokens_seen": 142791220, - "step": 6703 - }, - { - "epoch": 0.8061083388444659, - "flos": 15770091117720.0, - "grad_norm": 3.8144434993718157, - "learning_rate": 3.8141674634183675e-07, - "loss": 1.0371, - "num_input_tokens_seen": 142809970, - "step": 6704 - }, - { - "epoch": 0.8062285817351049, - "flos": 21410650054800.0, - "grad_norm": 4.00261030856447, - "learning_rate": 3.809592956714278e-07, - "loss": 0.8716, - "num_input_tokens_seen": 142832925, - "step": 6705 - }, - { - "epoch": 0.806348824625744, - "flos": 16191745675440.0, - "grad_norm": 9.874076317189157, - "learning_rate": 3.805020906082057e-07, - "loss": 0.9647, - "num_input_tokens_seen": 142851220, - "step": 6706 - }, - { - "epoch": 0.8064690675163831, - "flos": 16639008183840.0, - "grad_norm": 5.53466880875738, - "learning_rate": 3.8004513122152917e-07, - "loss": 1.0404, - "num_input_tokens_seen": 142869250, - "step": 6707 - }, - { - "epoch": 0.8065893104070222, - "flos": 17110621518960.0, - "grad_norm": 3.516783120877247, - "learning_rate": 3.79588417580718e-07, - "loss": 0.9043, - "num_input_tokens_seen": 142887080, - "step": 6708 - }, - { - "epoch": 0.8067095532976613, - "flos": 15848417386200.0, - "grad_norm": 13.128498882589595, - "learning_rate": 3.791319497550558e-07, - "loss": 0.9772, - "num_input_tokens_seen": 142904630, - "step": 6709 - }, - { - "epoch": 0.8068297961883004, - "flos": 12129271516080.0, - "grad_norm": 3.981205655843603, - "learning_rate": 3.78675727813788e-07, - "loss": 0.9421, - "num_input_tokens_seen": 142921915, - "step": 6710 - }, - { - "epoch": 0.8069500390789395, - "flos": 15643185842040.0, - "grad_norm": 3.640783216452089, - "learning_rate": 3.782197518261225e-07, - "loss": 0.9512, - "num_input_tokens_seen": 142941075, - "step": 6711 - }, - { - "epoch": 0.8070702819695785, - "flos": 13648904264280.0, - "grad_norm": 5.8163852627223775, - "learning_rate": 3.777640218612319e-07, - "loss": 1.1982, - "num_input_tokens_seen": 142958780, - "step": 6712 - }, - { - "epoch": 0.8071905248602176, - "flos": 15302126508240.0, - "grad_norm": 8.830235591869005, - "learning_rate": 3.773085379882488e-07, - "loss": 0.9449, - "num_input_tokens_seen": 142977555, - "step": 6713 - }, - { - "epoch": 0.8073107677508568, - "flos": 26602474728600.0, - "grad_norm": 3.5693344679802035, - "learning_rate": 3.768533002762715e-07, - "loss": 0.992, - "num_input_tokens_seen": 143000810, - "step": 6714 - }, - { - "epoch": 0.8074310106414958, - "flos": 20204812965480.0, - "grad_norm": 8.30526395503436, - "learning_rate": 3.763983087943572e-07, - "loss": 0.9827, - "num_input_tokens_seen": 143019920, - "step": 6715 - }, - { - "epoch": 0.8075512535321349, - "flos": 17269512349800.0, - "grad_norm": 2.818956635676481, - "learning_rate": 3.759435636115282e-07, - "loss": 1.0274, - "num_input_tokens_seen": 143040425, - "step": 6716 - }, - { - "epoch": 0.807671496422774, - "flos": 18526350709560.0, - "grad_norm": 2.8737003872020606, - "learning_rate": 3.7548906479676967e-07, - "loss": 0.9614, - "num_input_tokens_seen": 143059740, - "step": 6717 - }, - { - "epoch": 0.8077917393134131, - "flos": 16873097804040.0, - "grad_norm": 2.4410997854525776, - "learning_rate": 3.7503481241902855e-07, - "loss": 0.9343, - "num_input_tokens_seen": 143079435, - "step": 6718 - }, - { - "epoch": 0.8079119822040521, - "flos": 13043210848440.0, - "grad_norm": 6.753767304455668, - "learning_rate": 3.745808065472145e-07, - "loss": 1.0238, - "num_input_tokens_seen": 143096450, - "step": 6719 - }, - { - "epoch": 0.8080322250946913, - "flos": 16794863520240.0, - "grad_norm": 4.3376872613153585, - "learning_rate": 3.741270472501994e-07, - "loss": 0.9913, - "num_input_tokens_seen": 143116810, - "step": 6720 - }, - { - "epoch": 0.8081524679853304, - "flos": 16219346627520.0, - "grad_norm": 3.165296073913887, - "learning_rate": 3.736735345968183e-07, - "loss": 0.9607, - "num_input_tokens_seen": 143136140, - "step": 6721 - }, - { - "epoch": 0.8082727108759694, - "flos": 12496122769920.0, - "grad_norm": 3.3843446569940565, - "learning_rate": 3.7322026865586986e-07, - "loss": 1.0168, - "num_input_tokens_seen": 143154895, - "step": 6722 - }, - { - "epoch": 0.8083929537666086, - "flos": 18474153638280.0, - "grad_norm": 4.387297431638538, - "learning_rate": 3.7276724949611206e-07, - "loss": 0.9539, - "num_input_tokens_seen": 143174725, - "step": 6723 - }, - { - "epoch": 0.8085131966572476, - "flos": 19286979615000.0, - "grad_norm": 8.401538308385295, - "learning_rate": 3.723144771862694e-07, - "loss": 0.9675, - "num_input_tokens_seen": 143195085, - "step": 6724 - }, - { - "epoch": 0.8086334395478867, - "flos": 17058700401720.0, - "grad_norm": 3.0330056680738924, - "learning_rate": 3.718619517950263e-07, - "loss": 0.98, - "num_input_tokens_seen": 143215400, - "step": 6725 - }, - { - "epoch": 0.8087536824385259, - "flos": 14482953588600.0, - "grad_norm": 6.586776155084329, - "learning_rate": 3.714096733910301e-07, - "loss": 0.9902, - "num_input_tokens_seen": 143232645, - "step": 6726 - }, - { - "epoch": 0.8088739253291649, - "flos": 18446184747480.0, - "grad_norm": 8.503562629426453, - "learning_rate": 3.709576420428926e-07, - "loss": 0.9169, - "num_input_tokens_seen": 143253165, - "step": 6727 - }, - { - "epoch": 0.808994168219804, - "flos": 20230451577720.0, - "grad_norm": 4.086088743018232, - "learning_rate": 3.7050585781918463e-07, - "loss": 0.956, - "num_input_tokens_seen": 143273185, - "step": 6728 - }, - { - "epoch": 0.8091144111104431, - "flos": 12338765017080.0, - "grad_norm": 6.468601980681194, - "learning_rate": 3.700543207884428e-07, - "loss": 0.9014, - "num_input_tokens_seen": 143289815, - "step": 6729 - }, - { - "epoch": 0.8092346540010822, - "flos": 22925898199920.0, - "grad_norm": 5.862325429045641, - "learning_rate": 3.6960303101916466e-07, - "loss": 0.9363, - "num_input_tokens_seen": 143309450, - "step": 6730 - }, - { - "epoch": 0.8093548968917212, - "flos": 41532961231560.0, - "grad_norm": 0.7998841633976, - "learning_rate": 3.6915198857981047e-07, - "loss": 0.8133, - "num_input_tokens_seen": 143374370, - "step": 6731 - }, - { - "epoch": 0.8094751397823604, - "flos": 19497208993440.0, - "grad_norm": 2.984090102412936, - "learning_rate": 3.687011935388027e-07, - "loss": 0.9061, - "num_input_tokens_seen": 143396985, - "step": 6732 - }, - { - "epoch": 0.8095953826729995, - "flos": 17110774826760.0, - "grad_norm": 3.670456632639847, - "learning_rate": 3.6825064596452646e-07, - "loss": 0.9501, - "num_input_tokens_seen": 143417050, - "step": 6733 - }, - { - "epoch": 0.8097156255636385, - "flos": 17031927311760.0, - "grad_norm": 3.0408172640729147, - "learning_rate": 3.678003459253305e-07, - "loss": 0.9251, - "num_input_tokens_seen": 143437620, - "step": 6734 - }, - { - "epoch": 0.8098358684542777, - "flos": 15485184196440.0, - "grad_norm": 4.342283591421693, - "learning_rate": 3.673502934895236e-07, - "loss": 0.9621, - "num_input_tokens_seen": 143456845, - "step": 6735 - }, - { - "epoch": 0.8099561113449167, - "flos": 49277837616000.0, - "grad_norm": 0.7412781213762136, - "learning_rate": 3.669004887253802e-07, - "loss": 0.8183, - "num_input_tokens_seen": 143522855, - "step": 6736 - }, - { - "epoch": 0.8100763542355558, - "flos": 16769286231120.0, - "grad_norm": 3.983066957470752, - "learning_rate": 3.664509317011335e-07, - "loss": 1.0142, - "num_input_tokens_seen": 143542910, - "step": 6737 - }, - { - "epoch": 0.810196597126195, - "flos": 22565117934960.0, - "grad_norm": 3.9329880794574366, - "learning_rate": 3.6600162248498134e-07, - "loss": 0.9453, - "num_input_tokens_seen": 143566260, - "step": 6738 - }, - { - "epoch": 0.810316840016834, - "flos": 17714045979360.0, - "grad_norm": 3.530841770037494, - "learning_rate": 3.6555256114508426e-07, - "loss": 0.9898, - "num_input_tokens_seen": 143585775, - "step": 6739 - }, - { - "epoch": 0.8104370829074731, - "flos": 19916441287920.0, - "grad_norm": 4.9227336800568064, - "learning_rate": 3.651037477495642e-07, - "loss": 0.9533, - "num_input_tokens_seen": 143606945, - "step": 6740 - }, - { - "epoch": 0.8105573257981122, - "flos": 17526818319000.0, - "grad_norm": 4.3635406826828955, - "learning_rate": 3.6465518236650584e-07, - "loss": 0.8978, - "num_input_tokens_seen": 143626810, - "step": 6741 - }, - { - "epoch": 0.8106775686887513, - "flos": 18762096054000.0, - "grad_norm": 2.9820582799608406, - "learning_rate": 3.642068650639558e-07, - "loss": 1.0128, - "num_input_tokens_seen": 143646275, - "step": 6742 - }, - { - "epoch": 0.8107978115793903, - "flos": 19418330816880.0, - "grad_norm": 7.464361074384016, - "learning_rate": 3.6375879590992334e-07, - "loss": 0.8713, - "num_input_tokens_seen": 143666340, - "step": 6743 - }, - { - "epoch": 0.8109180544700295, - "flos": 17738028867360.0, - "grad_norm": 4.6510371784503235, - "learning_rate": 3.6331097497238173e-07, - "loss": 1.0309, - "num_input_tokens_seen": 143685505, - "step": 6744 - }, - { - "epoch": 0.8110382973606686, - "flos": 14986521817320.0, - "grad_norm": 4.107774546126133, - "learning_rate": 3.628634023192627e-07, - "loss": 1.0206, - "num_input_tokens_seen": 143705470, - "step": 6745 - }, - { - "epoch": 0.8111585402513076, - "flos": 10896354721200.0, - "grad_norm": 5.356186877738929, - "learning_rate": 3.624160780184644e-07, - "loss": 0.9762, - "num_input_tokens_seen": 143722405, - "step": 6746 - }, - { - "epoch": 0.8112787831419467, - "flos": 17135432269080.0, - "grad_norm": 2.9576721706241003, - "learning_rate": 3.6196900213784496e-07, - "loss": 0.9746, - "num_input_tokens_seen": 143741440, - "step": 6747 - }, - { - "epoch": 0.8113990260325858, - "flos": 14539259308920.0, - "grad_norm": 6.777711006349835, - "learning_rate": 3.6152217474522527e-07, - "loss": 1.0923, - "num_input_tokens_seen": 143757975, - "step": 6748 - }, - { - "epoch": 0.8115192689232249, - "flos": 17714628549000.0, - "grad_norm": 5.3279313306765035, - "learning_rate": 3.6107559590838975e-07, - "loss": 0.9542, - "num_input_tokens_seen": 143776680, - "step": 6749 - }, - { - "epoch": 0.811639511813864, - "flos": 17107463378280.0, - "grad_norm": 5.4225331547358, - "learning_rate": 3.606292656950822e-07, - "loss": 0.8598, - "num_input_tokens_seen": 143794810, - "step": 6750 - }, - { - "epoch": 0.8117597547045031, - "flos": 16481221169160.0, - "grad_norm": 38.75334694467128, - "learning_rate": 3.601831841730121e-07, - "loss": 1.0845, - "num_input_tokens_seen": 143812450, - "step": 6751 - }, - { - "epoch": 0.8118799975951422, - "flos": 16377225626880.0, - "grad_norm": 3.072399255328491, - "learning_rate": 3.5973735140984916e-07, - "loss": 0.9616, - "num_input_tokens_seen": 143832340, - "step": 6752 - }, - { - "epoch": 0.8120002404857812, - "flos": 17526143764680.0, - "grad_norm": 10.441968498119627, - "learning_rate": 3.5929176747322607e-07, - "loss": 1.0007, - "num_input_tokens_seen": 143851165, - "step": 6753 - }, - { - "epoch": 0.8121204833764204, - "flos": 41087967678600.0, - "grad_norm": 0.807764595721605, - "learning_rate": 3.588464324307372e-07, - "loss": 0.8036, - "num_input_tokens_seen": 143914510, - "step": 6754 - }, - { - "epoch": 0.8122407262670595, - "flos": 13807457817960.0, - "grad_norm": 5.14775689912891, - "learning_rate": 3.584013463499391e-07, - "loss": 0.9809, - "num_input_tokens_seen": 143932850, - "step": 6755 - }, - { - "epoch": 0.8123609691576985, - "flos": 40375457856960.0, - "grad_norm": 0.7185735071179816, - "learning_rate": 3.579565092983521e-07, - "loss": 0.8833, - "num_input_tokens_seen": 143993690, - "step": 6756 - }, - { - "epoch": 0.8124812120483377, - "flos": 14646412991880.0, - "grad_norm": 5.024921298952745, - "learning_rate": 3.575119213434565e-07, - "loss": 1.0727, - "num_input_tokens_seen": 144011925, - "step": 6757 - }, - { - "epoch": 0.8126014549389767, - "flos": 15983417313720.0, - "grad_norm": 5.431723848135355, - "learning_rate": 3.5706758255269765e-07, - "loss": 1.0395, - "num_input_tokens_seen": 144030100, - "step": 6758 - }, - { - "epoch": 0.8127216978296158, - "flos": 16554212326200.0, - "grad_norm": 3.592578730241468, - "learning_rate": 3.566234929934795e-07, - "loss": 0.9263, - "num_input_tokens_seen": 144049020, - "step": 6759 - }, - { - "epoch": 0.812841940720255, - "flos": 17896858375080.0, - "grad_norm": 2.1463279771275383, - "learning_rate": 3.561796527331706e-07, - "loss": 0.9485, - "num_input_tokens_seen": 144070415, - "step": 6760 - }, - { - "epoch": 0.812962183610894, - "flos": 18969841846080.0, - "grad_norm": 3.900013576386125, - "learning_rate": 3.5573606183910163e-07, - "loss": 0.9933, - "num_input_tokens_seen": 144090140, - "step": 6761 - }, - { - "epoch": 0.8130824265015331, - "flos": 17760969262320.0, - "grad_norm": 3.0442460676131655, - "learning_rate": 3.5529272037856493e-07, - "loss": 1.0021, - "num_input_tokens_seen": 144108075, - "step": 6762 - }, - { - "epoch": 0.8132026693921722, - "flos": 48422478507480.0, - "grad_norm": 0.7424971888068314, - "learning_rate": 3.548496284188149e-07, - "loss": 0.8004, - "num_input_tokens_seen": 144168000, - "step": 6763 - }, - { - "epoch": 0.8133229122828113, - "flos": 13828619842440.0, - "grad_norm": 4.875160605890594, - "learning_rate": 3.544067860270681e-07, - "loss": 1.0113, - "num_input_tokens_seen": 144185295, - "step": 6764 - }, - { - "epoch": 0.8134431551734503, - "flos": 14672358219720.0, - "grad_norm": 5.852505413119727, - "learning_rate": 3.539641932705029e-07, - "loss": 0.9272, - "num_input_tokens_seen": 144203495, - "step": 6765 - }, - { - "epoch": 0.8135633980640895, - "flos": 15275476064520.0, - "grad_norm": 4.502133605384499, - "learning_rate": 3.53521850216262e-07, - "loss": 0.9677, - "num_input_tokens_seen": 144222785, - "step": 6766 - }, - { - "epoch": 0.8136836409547286, - "flos": 14590076610000.0, - "grad_norm": 4.159159853534829, - "learning_rate": 3.530797569314461e-07, - "loss": 0.9961, - "num_input_tokens_seen": 144241530, - "step": 6767 - }, - { - "epoch": 0.8138038838453676, - "flos": 14406528336840.0, - "grad_norm": 4.715632337156216, - "learning_rate": 3.5263791348312235e-07, - "loss": 0.9928, - "num_input_tokens_seen": 144260445, - "step": 6768 - }, - { - "epoch": 0.8139241267360068, - "flos": 21227990966880.0, - "grad_norm": 3.275579875831029, - "learning_rate": 3.521963199383171e-07, - "loss": 0.9283, - "num_input_tokens_seen": 144283120, - "step": 6769 - }, - { - "epoch": 0.8140443696266458, - "flos": 13985272379400.0, - "grad_norm": 6.2082903901050805, - "learning_rate": 3.517549763640197e-07, - "loss": 0.979, - "num_input_tokens_seen": 144300480, - "step": 6770 - }, - { - "epoch": 0.8141646125172849, - "flos": 19338072870120.0, - "grad_norm": 9.678923717596636, - "learning_rate": 3.513138828271829e-07, - "loss": 0.9407, - "num_input_tokens_seen": 144320070, - "step": 6771 - }, - { - "epoch": 0.8142848554079241, - "flos": 28330313192280.0, - "grad_norm": 3.2494971546560363, - "learning_rate": 3.508730393947179e-07, - "loss": 0.9373, - "num_input_tokens_seen": 144343045, - "step": 6772 - }, - { - "epoch": 0.8144050982985631, - "flos": 15800114333040.0, - "grad_norm": 4.650493365950857, - "learning_rate": 3.504324461335024e-07, - "loss": 0.9337, - "num_input_tokens_seen": 144362875, - "step": 6773 - }, - { - "epoch": 0.8145253411892022, - "flos": 16375508579520.0, - "grad_norm": 3.6581022347439753, - "learning_rate": 3.499921031103732e-07, - "loss": 1.1045, - "num_input_tokens_seen": 144383365, - "step": 6774 - }, - { - "epoch": 0.8146455840798413, - "flos": 17661756923400.0, - "grad_norm": 5.5568500070392775, - "learning_rate": 3.4955201039212987e-07, - "loss": 1.0049, - "num_input_tokens_seen": 144404005, - "step": 6775 - }, - { - "epoch": 0.8147658269704804, - "flos": 14173818486840.0, - "grad_norm": 6.973957117796135, - "learning_rate": 3.4911216804553465e-07, - "loss": 0.8709, - "num_input_tokens_seen": 144422625, - "step": 6776 - }, - { - "epoch": 0.8148860698611194, - "flos": 15038688227040.0, - "grad_norm": 3.174716972892417, - "learning_rate": 3.4867257613731017e-07, - "loss": 0.9235, - "num_input_tokens_seen": 144441540, - "step": 6777 - }, - { - "epoch": 0.8150063127517585, - "flos": 13909245727920.0, - "grad_norm": 3.2237430104138856, - "learning_rate": 3.4823323473414343e-07, - "loss": 1.0704, - "num_input_tokens_seen": 144460780, - "step": 6778 - }, - { - "epoch": 0.8151265556423977, - "flos": 16088332702800.0, - "grad_norm": 10.009539059935918, - "learning_rate": 3.477941439026812e-07, - "loss": 0.9673, - "num_input_tokens_seen": 144478720, - "step": 6779 - }, - { - "epoch": 0.8152467985330367, - "flos": 12731837452800.0, - "grad_norm": 5.057195639623482, - "learning_rate": 3.473553037095349e-07, - "loss": 0.9436, - "num_input_tokens_seen": 144497465, - "step": 6780 - }, - { - "epoch": 0.8153670414236758, - "flos": 17762716971240.0, - "grad_norm": 3.4718248640092058, - "learning_rate": 3.469167142212743e-07, - "loss": 1.055, - "num_input_tokens_seen": 144519030, - "step": 6781 - }, - { - "epoch": 0.8154872843143149, - "flos": 22145088439920.0, - "grad_norm": 6.009795458164496, - "learning_rate": 3.4647837550443337e-07, - "loss": 0.8607, - "num_input_tokens_seen": 144537315, - "step": 6782 - }, - { - "epoch": 0.815607527204954, - "flos": 13755046115760.0, - "grad_norm": 4.965999777822685, - "learning_rate": 3.460402876255086e-07, - "loss": 0.9667, - "num_input_tokens_seen": 144554425, - "step": 6783 - }, - { - "epoch": 0.815727770095593, - "flos": 18605106239880.0, - "grad_norm": 8.991015294062535, - "learning_rate": 3.456024506509574e-07, - "loss": 0.9331, - "num_input_tokens_seen": 144575065, - "step": 6784 - }, - { - "epoch": 0.8158480129862322, - "flos": 18028056269160.0, - "grad_norm": 3.122548171466676, - "learning_rate": 3.4516486464719873e-07, - "loss": 0.968, - "num_input_tokens_seen": 144594175, - "step": 6785 - }, - { - "epoch": 0.8159682558768713, - "flos": 24558234373440.0, - "grad_norm": 5.03654570899868, - "learning_rate": 3.4472752968061445e-07, - "loss": 0.8528, - "num_input_tokens_seen": 144618325, - "step": 6786 - }, - { - "epoch": 0.8160884987675103, - "flos": 13223171719080.0, - "grad_norm": 6.72147530573791, - "learning_rate": 3.442904458175475e-07, - "loss": 0.9566, - "num_input_tokens_seen": 144635365, - "step": 6787 - }, - { - "epoch": 0.8162087416581495, - "flos": 22406962981560.0, - "grad_norm": 3.180440977458345, - "learning_rate": 3.438536131243044e-07, - "loss": 0.9832, - "num_input_tokens_seen": 144656245, - "step": 6788 - }, - { - "epoch": 0.8163289845487885, - "flos": 26836104425400.0, - "grad_norm": 4.506411329706795, - "learning_rate": 3.434170316671503e-07, - "loss": 0.8469, - "num_input_tokens_seen": 144680995, - "step": 6789 - }, - { - "epoch": 0.8164492274394276, - "flos": 9846924876360.0, - "grad_norm": 10.807508429992339, - "learning_rate": 3.4298070151231583e-07, - "loss": 1.1118, - "num_input_tokens_seen": 144696115, - "step": 6790 - }, - { - "epoch": 0.8165694703300668, - "flos": 20650603719000.0, - "grad_norm": 3.247499237412854, - "learning_rate": 3.425446227259916e-07, - "loss": 0.8278, - "num_input_tokens_seen": 144716800, - "step": 6791 - }, - { - "epoch": 0.8166897132207058, - "flos": 17870514546960.0, - "grad_norm": 4.3270218197241785, - "learning_rate": 3.421087953743296e-07, - "loss": 1.0519, - "num_input_tokens_seen": 144736285, - "step": 6792 - }, - { - "epoch": 0.8168099561113449, - "flos": 16454938664160.0, - "grad_norm": 7.044405553144138, - "learning_rate": 3.416732195234464e-07, - "loss": 1.0169, - "num_input_tokens_seen": 144756060, - "step": 6793 - }, - { - "epoch": 0.816930199001984, - "flos": 13048024713360.0, - "grad_norm": 2.6096951408214992, - "learning_rate": 3.4123789523941613e-07, - "loss": 1.0191, - "num_input_tokens_seen": 144775605, - "step": 6794 - }, - { - "epoch": 0.8170504418926231, - "flos": 15091406544840.0, - "grad_norm": 3.5752737353970563, - "learning_rate": 3.4080282258827884e-07, - "loss": 0.8684, - "num_input_tokens_seen": 144793700, - "step": 6795 - }, - { - "epoch": 0.8171706847832622, - "flos": 13544816737320.0, - "grad_norm": 4.920574876337872, - "learning_rate": 3.403680016360342e-07, - "loss": 0.9375, - "num_input_tokens_seen": 144812025, - "step": 6796 - }, - { - "epoch": 0.8172909276739013, - "flos": 15248641651440.0, - "grad_norm": 3.393723719935944, - "learning_rate": 3.3993343244864403e-07, - "loss": 0.8934, - "num_input_tokens_seen": 144831335, - "step": 6797 - }, - { - "epoch": 0.8174111705645404, - "flos": 19653922853520.0, - "grad_norm": 6.172549903763689, - "learning_rate": 3.394991150920323e-07, - "loss": 0.9476, - "num_input_tokens_seen": 144854175, - "step": 6798 - }, - { - "epoch": 0.8175314134551794, - "flos": 9926569591920.0, - "grad_norm": 16.199220162918916, - "learning_rate": 3.3906504963208396e-07, - "loss": 0.9668, - "num_input_tokens_seen": 144870590, - "step": 6799 - }, - { - "epoch": 0.8176516563458186, - "flos": 16137984864600.0, - "grad_norm": 5.606122532376764, - "learning_rate": 3.3863123613464774e-07, - "loss": 0.8809, - "num_input_tokens_seen": 144889210, - "step": 6800 - }, - { - "epoch": 0.8177718992364577, - "flos": 15590344878000.0, - "grad_norm": 3.3559700676875597, - "learning_rate": 3.381976746655317e-07, - "loss": 0.9662, - "num_input_tokens_seen": 144908685, - "step": 6801 - }, - { - "epoch": 0.8178921421270967, - "flos": 15641990041200.0, - "grad_norm": 3.5525981421876986, - "learning_rate": 3.3776436529050756e-07, - "loss": 0.8944, - "num_input_tokens_seen": 144927955, - "step": 6802 - }, - { - "epoch": 0.8180123850177359, - "flos": 23666683527960.0, - "grad_norm": 8.56844908382774, - "learning_rate": 3.373313080753073e-07, - "loss": 0.9474, - "num_input_tokens_seen": 144951735, - "step": 6803 - }, - { - "epoch": 0.8181326279083749, - "flos": 15695076297720.0, - "grad_norm": 5.050726115079968, - "learning_rate": 3.3689850308562527e-07, - "loss": 1.0081, - "num_input_tokens_seen": 144971900, - "step": 6804 - }, - { - "epoch": 0.818252870799014, - "flos": 11105940206880.0, - "grad_norm": 3.358063197728924, - "learning_rate": 3.364659503871183e-07, - "loss": 1.0, - "num_input_tokens_seen": 144989555, - "step": 6805 - }, - { - "epoch": 0.8183731136896532, - "flos": 13308672792600.0, - "grad_norm": 4.7398488847168085, - "learning_rate": 3.3603365004540417e-07, - "loss": 1.0586, - "num_input_tokens_seen": 145007570, - "step": 6806 - }, - { - "epoch": 0.8184933565802922, - "flos": 18893569902120.0, - "grad_norm": 4.642688639171592, - "learning_rate": 3.356016021260624e-07, - "loss": 0.9837, - "num_input_tokens_seen": 145027620, - "step": 6807 - }, - { - "epoch": 0.8186135994709313, - "flos": 12417091285560.0, - "grad_norm": 4.906596760735585, - "learning_rate": 3.35169806694634e-07, - "loss": 0.8617, - "num_input_tokens_seen": 145045590, - "step": 6808 - }, - { - "epoch": 0.8187338423615703, - "flos": 45447521398560.0, - "grad_norm": 0.730282071172992, - "learning_rate": 3.3473826381662186e-07, - "loss": 0.8515, - "num_input_tokens_seen": 145116450, - "step": 6809 - }, - { - "epoch": 0.8188540852522095, - "flos": 12416968639320.0, - "grad_norm": 3.7371892181093966, - "learning_rate": 3.3430697355749216e-07, - "loss": 1.0425, - "num_input_tokens_seen": 145133860, - "step": 6810 - }, - { - "epoch": 0.8189743281428485, - "flos": 10162192290120.0, - "grad_norm": 4.998240161182964, - "learning_rate": 3.3387593598266907e-07, - "loss": 0.9705, - "num_input_tokens_seen": 145150190, - "step": 6811 - }, - { - "epoch": 0.8190945710334876, - "flos": 17843097564240.0, - "grad_norm": 5.266336907956097, - "learning_rate": 3.3344515115754225e-07, - "loss": 1.0064, - "num_input_tokens_seen": 145168890, - "step": 6812 - }, - { - "epoch": 0.8192148139241268, - "flos": 15275322756720.0, - "grad_norm": 4.604528536607585, - "learning_rate": 3.33014619147461e-07, - "loss": 1.0156, - "num_input_tokens_seen": 145186635, - "step": 6813 - }, - { - "epoch": 0.8193350568147658, - "flos": 17032571204520.0, - "grad_norm": 3.098290704355595, - "learning_rate": 3.325843400177362e-07, - "loss": 0.9283, - "num_input_tokens_seen": 145207695, - "step": 6814 - }, - { - "epoch": 0.8194552997054049, - "flos": 14594369228400.0, - "grad_norm": 3.3716203227482544, - "learning_rate": 3.32154313833642e-07, - "loss": 0.955, - "num_input_tokens_seen": 145227570, - "step": 6815 - }, - { - "epoch": 0.819575542596044, - "flos": 18527117248560.0, - "grad_norm": 5.6616374118795125, - "learning_rate": 3.3172454066041164e-07, - "loss": 0.8127, - "num_input_tokens_seen": 145246795, - "step": 6816 - }, - { - "epoch": 0.8196957854866831, - "flos": 20730064465200.0, - "grad_norm": 5.391593514387898, - "learning_rate": 3.3129502056324234e-07, - "loss": 0.9737, - "num_input_tokens_seen": 145267880, - "step": 6817 - }, - { - "epoch": 0.8198160283773221, - "flos": 49434679670400.0, - "grad_norm": 0.7828783993574877, - "learning_rate": 3.3086575360729165e-07, - "loss": 0.8519, - "num_input_tokens_seen": 145325135, - "step": 6818 - }, - { - "epoch": 0.8199362712679613, - "flos": 11761623061680.0, - "grad_norm": 5.945505307012423, - "learning_rate": 3.3043673985767906e-07, - "loss": 0.938, - "num_input_tokens_seen": 145343920, - "step": 6819 - }, - { - "epoch": 0.8200565141586004, - "flos": 15454701057720.0, - "grad_norm": 3.4238655884733746, - "learning_rate": 3.3000797937948564e-07, - "loss": 0.9929, - "num_input_tokens_seen": 145361935, - "step": 6820 - }, - { - "epoch": 0.8201767570492394, - "flos": 49991548786560.0, - "grad_norm": 0.9509182068882469, - "learning_rate": 3.295794722377534e-07, - "loss": 0.9069, - "num_input_tokens_seen": 145425260, - "step": 6821 - }, - { - "epoch": 0.8202969999398786, - "flos": 16428533512920.0, - "grad_norm": 12.257571624664857, - "learning_rate": 3.291512184974876e-07, - "loss": 1.0233, - "num_input_tokens_seen": 145445370, - "step": 6822 - }, - { - "epoch": 0.8204172428305176, - "flos": 20100173530440.0, - "grad_norm": 2.4655710347389608, - "learning_rate": 3.2872321822365346e-07, - "loss": 0.8941, - "num_input_tokens_seen": 145465305, - "step": 6823 - }, - { - "epoch": 0.8205374857211567, - "flos": 14829654649440.0, - "grad_norm": 57.667592888304696, - "learning_rate": 3.282954714811783e-07, - "loss": 0.9553, - "num_input_tokens_seen": 145483930, - "step": 6824 - }, - { - "epoch": 0.8206577286117959, - "flos": 9270764090880.0, - "grad_norm": 10.226161124189792, - "learning_rate": 3.2786797833495093e-07, - "loss": 0.933, - "num_input_tokens_seen": 145499005, - "step": 6825 - }, - { - "epoch": 0.8207779715024349, - "flos": 17976533752200.0, - "grad_norm": 2.7886936146820798, - "learning_rate": 3.274407388498213e-07, - "loss": 0.9498, - "num_input_tokens_seen": 145516855, - "step": 6826 - }, - { - "epoch": 0.820898214393074, - "flos": 13912097253000.0, - "grad_norm": 3.432769294505076, - "learning_rate": 3.270137530906021e-07, - "loss": 0.9714, - "num_input_tokens_seen": 145535810, - "step": 6827 - }, - { - "epoch": 0.8210184572837131, - "flos": 11027552615280.0, - "grad_norm": 8.721182316411792, - "learning_rate": 3.265870211220665e-07, - "loss": 1.0556, - "num_input_tokens_seen": 145553365, - "step": 6828 - }, - { - "epoch": 0.8211387001743522, - "flos": 14777120301000.0, - "grad_norm": 3.14270603448322, - "learning_rate": 3.2616054300894934e-07, - "loss": 1.0354, - "num_input_tokens_seen": 145572535, - "step": 6829 - }, - { - "epoch": 0.8212589430649913, - "flos": 19728600396360.0, - "grad_norm": 11.711287988834613, - "learning_rate": 3.2573431881594693e-07, - "loss": 1.0759, - "num_input_tokens_seen": 145591800, - "step": 6830 - }, - { - "epoch": 0.8213791859556304, - "flos": 15956950839360.0, - "grad_norm": 6.126311181305235, - "learning_rate": 3.2530834860771663e-07, - "loss": 0.8699, - "num_input_tokens_seen": 145610900, - "step": 6831 - }, - { - "epoch": 0.8214994288462695, - "flos": 11787782920440.0, - "grad_norm": 3.426179312499053, - "learning_rate": 3.248826324488794e-07, - "loss": 0.9611, - "num_input_tokens_seen": 145627915, - "step": 6832 - }, - { - "epoch": 0.8216196717369085, - "flos": 17944026950520.0, - "grad_norm": 5.474496492548317, - "learning_rate": 3.244571704040138e-07, - "loss": 1.1032, - "num_input_tokens_seen": 145647795, - "step": 6833 - }, - { - "epoch": 0.8217399146275477, - "flos": 18052499080560.0, - "grad_norm": 44.55917983774499, - "learning_rate": 3.2403196253766374e-07, - "loss": 0.9762, - "num_input_tokens_seen": 145666595, - "step": 6834 - }, - { - "epoch": 0.8218601575181868, - "flos": 18238009693560.0, - "grad_norm": 6.172366352079026, - "learning_rate": 3.2360700891433254e-07, - "loss": 1.0111, - "num_input_tokens_seen": 145685340, - "step": 6835 - }, - { - "epoch": 0.8219804004088258, - "flos": 48449404905240.0, - "grad_norm": 0.8051307346380812, - "learning_rate": 3.231823095984847e-07, - "loss": 0.8211, - "num_input_tokens_seen": 145739700, - "step": 6836 - }, - { - "epoch": 0.822100643299465, - "flos": 13807181863920.0, - "grad_norm": 3.3314586294949877, - "learning_rate": 3.2275786465454814e-07, - "loss": 0.9815, - "num_input_tokens_seen": 145756070, - "step": 6837 - }, - { - "epoch": 0.822220886190104, - "flos": 17556565580280.0, - "grad_norm": 2.9555460375996674, - "learning_rate": 3.2233367414690917e-07, - "loss": 0.9833, - "num_input_tokens_seen": 145777980, - "step": 6838 - }, - { - "epoch": 0.8223411290807431, - "flos": 19811556560400.0, - "grad_norm": 6.073109889241808, - "learning_rate": 3.219097381399183e-07, - "loss": 1.0586, - "num_input_tokens_seen": 145794875, - "step": 6839 - }, - { - "epoch": 0.8224613719713821, - "flos": 16504805456880.0, - "grad_norm": 3.960416680579643, - "learning_rate": 3.2148605669788584e-07, - "loss": 1.0295, - "num_input_tokens_seen": 145814485, - "step": 6840 - }, - { - "epoch": 0.8225816148620213, - "flos": 11106369468720.0, - "grad_norm": 7.108595585280426, - "learning_rate": 3.2106262988508405e-07, - "loss": 0.9868, - "num_input_tokens_seen": 145832255, - "step": 6841 - }, - { - "epoch": 0.8227018577526604, - "flos": 13124971211640.0, - "grad_norm": 5.939873471951484, - "learning_rate": 3.206394577657465e-07, - "loss": 0.9645, - "num_input_tokens_seen": 145849755, - "step": 6842 - }, - { - "epoch": 0.8228221006432994, - "flos": 15799685071200.0, - "grad_norm": 3.620257333492409, - "learning_rate": 3.202165404040675e-07, - "loss": 0.9248, - "num_input_tokens_seen": 145867395, - "step": 6843 - }, - { - "epoch": 0.8229423435339386, - "flos": 17136965347080.0, - "grad_norm": 5.729344627621954, - "learning_rate": 3.1979387786420396e-07, - "loss": 0.9674, - "num_input_tokens_seen": 145887355, - "step": 6844 - }, - { - "epoch": 0.8230625864245776, - "flos": 16979576932680.0, - "grad_norm": 10.660672967506187, - "learning_rate": 3.1937147021027346e-07, - "loss": 1.0471, - "num_input_tokens_seen": 145905530, - "step": 6845 - }, - { - "epoch": 0.8231828293152167, - "flos": 11735003279520.0, - "grad_norm": 3.792442968916652, - "learning_rate": 3.189493175063547e-07, - "loss": 0.9923, - "num_input_tokens_seen": 145922485, - "step": 6846 - }, - { - "epoch": 0.8233030722058559, - "flos": 13387275015120.0, - "grad_norm": 3.0589311006885667, - "learning_rate": 3.1852741981648776e-07, - "loss": 0.8979, - "num_input_tokens_seen": 145940855, - "step": 6847 - }, - { - "epoch": 0.8234233150964949, - "flos": 20257071359880.0, - "grad_norm": 33.55272749659761, - "learning_rate": 3.1810577720467404e-07, - "loss": 0.9122, - "num_input_tokens_seen": 145962305, - "step": 6848 - }, - { - "epoch": 0.823543557987134, - "flos": 24085425237480.0, - "grad_norm": 3.178671334718737, - "learning_rate": 3.176843897348769e-07, - "loss": 0.7971, - "num_input_tokens_seen": 145985220, - "step": 6849 - }, - { - "epoch": 0.8236638008777731, - "flos": 12102498426120.0, - "grad_norm": 4.580121851470374, - "learning_rate": 3.1726325747102034e-07, - "loss": 0.9808, - "num_input_tokens_seen": 146003315, - "step": 6850 - }, - { - "epoch": 0.8237840437684122, - "flos": 44117825624160.0, - "grad_norm": 9.14794753771752, - "learning_rate": 3.1684238047698974e-07, - "loss": 0.8712, - "num_input_tokens_seen": 146031305, - "step": 6851 - }, - { - "epoch": 0.8239042866590512, - "flos": 19445103906840.0, - "grad_norm": 3.313633562860567, - "learning_rate": 3.1642175881663155e-07, - "loss": 0.745, - "num_input_tokens_seen": 146050755, - "step": 6852 - }, - { - "epoch": 0.8240245295496904, - "flos": 15432895140480.0, - "grad_norm": 7.734194975019772, - "learning_rate": 3.160013925537537e-07, - "loss": 1.0659, - "num_input_tokens_seen": 146071310, - "step": 6853 - }, - { - "epoch": 0.8241447724403295, - "flos": 14198598575400.0, - "grad_norm": 4.749557072919117, - "learning_rate": 3.155812817521266e-07, - "loss": 0.9758, - "num_input_tokens_seen": 146091405, - "step": 6854 - }, - { - "epoch": 0.8242650153309685, - "flos": 15825660960600.0, - "grad_norm": 5.6722025290676745, - "learning_rate": 3.151614264754787e-07, - "loss": 0.9923, - "num_input_tokens_seen": 146109070, - "step": 6855 - }, - { - "epoch": 0.8243852582216077, - "flos": 15852311404320.0, - "grad_norm": 4.623970329519467, - "learning_rate": 3.147418267875035e-07, - "loss": 1.0214, - "num_input_tokens_seen": 146126920, - "step": 6856 - }, - { - "epoch": 0.8245055011122467, - "flos": 17530804321800.0, - "grad_norm": 3.8991719320486844, - "learning_rate": 3.1432248275185315e-07, - "loss": 0.8672, - "num_input_tokens_seen": 146147150, - "step": 6857 - }, - { - "epoch": 0.8246257440028858, - "flos": 12385443007560.0, - "grad_norm": 5.860578977866482, - "learning_rate": 3.139033944321412e-07, - "loss": 1.0049, - "num_input_tokens_seen": 146164230, - "step": 6858 - }, - { - "epoch": 0.824745986893525, - "flos": 17792893494360.0, - "grad_norm": 3.3069830963284126, - "learning_rate": 3.1348456189194507e-07, - "loss": 1.0143, - "num_input_tokens_seen": 146184410, - "step": 6859 - }, - { - "epoch": 0.824866229784164, - "flos": 13310389839960.0, - "grad_norm": 3.071151812811673, - "learning_rate": 3.1306598519479876e-07, - "loss": 1.056, - "num_input_tokens_seen": 146203950, - "step": 6860 - }, - { - "epoch": 0.8249864726748031, - "flos": 16953447735480.0, - "grad_norm": 4.336880747979172, - "learning_rate": 3.1264766440420177e-07, - "loss": 1.0058, - "num_input_tokens_seen": 146226140, - "step": 6861 - }, - { - "epoch": 0.8251067155654422, - "flos": 14409993093120.0, - "grad_norm": 5.445139287002528, - "learning_rate": 3.122295995836124e-07, - "loss": 0.9065, - "num_input_tokens_seen": 146245730, - "step": 6862 - }, - { - "epoch": 0.8252269584560813, - "flos": 17792525555640.0, - "grad_norm": 3.673478965249411, - "learning_rate": 3.118117907964508e-07, - "loss": 0.997, - "num_input_tokens_seen": 146267395, - "step": 6863 - }, - { - "epoch": 0.8253472013467203, - "flos": 12128474315520.0, - "grad_norm": 3.198240994751686, - "learning_rate": 3.1139423810609856e-07, - "loss": 1.0428, - "num_input_tokens_seen": 146283810, - "step": 6864 - }, - { - "epoch": 0.8254674442373595, - "flos": 15927019608720.0, - "grad_norm": 4.956823273993586, - "learning_rate": 3.1097694157589714e-07, - "loss": 0.9758, - "num_input_tokens_seen": 146303415, - "step": 6865 - }, - { - "epoch": 0.8255876871279986, - "flos": 17631764369640.0, - "grad_norm": 3.316840153538602, - "learning_rate": 3.105599012691511e-07, - "loss": 0.9696, - "num_input_tokens_seen": 146321565, - "step": 6866 - }, - { - "epoch": 0.8257079300186376, - "flos": 19550387234640.0, - "grad_norm": 2.4644986015280255, - "learning_rate": 3.101431172491249e-07, - "loss": 1.0472, - "num_input_tokens_seen": 146342830, - "step": 6867 - }, - { - "epoch": 0.8258281729092768, - "flos": 11656523703240.0, - "grad_norm": 10.69451566950432, - "learning_rate": 3.097265895790444e-07, - "loss": 0.9392, - "num_input_tokens_seen": 146360760, - "step": 6868 - }, - { - "epoch": 0.8259484157999158, - "flos": 15222573777360.0, - "grad_norm": 6.1220992089613535, - "learning_rate": 3.093103183220962e-07, - "loss": 1.0528, - "num_input_tokens_seen": 146380525, - "step": 6869 - }, - { - "epoch": 0.8260686586905549, - "flos": 41738775345360.0, - "grad_norm": 0.9341267935461148, - "learning_rate": 3.0889430354142796e-07, - "loss": 0.8719, - "num_input_tokens_seen": 146441755, - "step": 6870 - }, - { - "epoch": 0.826188901581194, - "flos": 19601879090040.0, - "grad_norm": 5.9496486656259595, - "learning_rate": 3.084785453001497e-07, - "loss": 0.924, - "num_input_tokens_seen": 146462390, - "step": 6871 - }, - { - "epoch": 0.8263091444718331, - "flos": 16848409700160.0, - "grad_norm": 21.1166318311557, - "learning_rate": 3.080630436613314e-07, - "loss": 1.0366, - "num_input_tokens_seen": 146479880, - "step": 6872 - }, - { - "epoch": 0.8264293873624722, - "flos": 12154787482080.0, - "grad_norm": 4.837403501982361, - "learning_rate": 3.076477986880039e-07, - "loss": 1.0755, - "num_input_tokens_seen": 146497395, - "step": 6873 - }, - { - "epoch": 0.8265496302531112, - "flos": 17137915855440.0, - "grad_norm": 3.7062066369953524, - "learning_rate": 3.0723281044315986e-07, - "loss": 0.9189, - "num_input_tokens_seen": 146519070, - "step": 6874 - }, - { - "epoch": 0.8266698731437504, - "flos": 9951778942320.0, - "grad_norm": 4.463718596267583, - "learning_rate": 3.068180789897521e-07, - "loss": 0.9863, - "num_input_tokens_seen": 146537200, - "step": 6875 - }, - { - "epoch": 0.8267901160343895, - "flos": 21938201171520.0, - "grad_norm": 17.670592729507106, - "learning_rate": 3.064036043906966e-07, - "loss": 1.0443, - "num_input_tokens_seen": 146560360, - "step": 6876 - }, - { - "epoch": 0.8269103589250285, - "flos": 29014792800000.0, - "grad_norm": 3.4668161311084935, - "learning_rate": 3.059893867088668e-07, - "loss": 0.9021, - "num_input_tokens_seen": 146584225, - "step": 6877 - }, - { - "epoch": 0.8270306018156677, - "flos": 21568437069480.0, - "grad_norm": 3.9763255470291954, - "learning_rate": 3.055754260071004e-07, - "loss": 0.8942, - "num_input_tokens_seen": 146606240, - "step": 6878 - }, - { - "epoch": 0.8271508447063067, - "flos": 17948135599560.0, - "grad_norm": 5.180120895919118, - "learning_rate": 3.051617223481948e-07, - "loss": 0.9685, - "num_input_tokens_seen": 146627280, - "step": 6879 - }, - { - "epoch": 0.8272710875969458, - "flos": 12443312467440.0, - "grad_norm": 3.9232707788433334, - "learning_rate": 3.047482757949078e-07, - "loss": 0.9793, - "num_input_tokens_seen": 146644630, - "step": 6880 - }, - { - "epoch": 0.827391330487585, - "flos": 14016368749320.0, - "grad_norm": 3.745299635997542, - "learning_rate": 3.043350864099605e-07, - "loss": 1.0757, - "num_input_tokens_seen": 146662910, - "step": 6881 - }, - { - "epoch": 0.827511573378224, - "flos": 11917417074960.0, - "grad_norm": 6.260840817860013, - "learning_rate": 3.039221542560315e-07, - "loss": 1.0352, - "num_input_tokens_seen": 146679195, - "step": 6882 - }, - { - "epoch": 0.8276318162688631, - "flos": 13020147807240.0, - "grad_norm": 2.6591141461866488, - "learning_rate": 3.0350947939576356e-07, - "loss": 0.973, - "num_input_tokens_seen": 146698070, - "step": 6883 - }, - { - "epoch": 0.8277520591595022, - "flos": 13726709286240.0, - "grad_norm": 4.804622792374231, - "learning_rate": 3.0309706189175876e-07, - "loss": 0.9502, - "num_input_tokens_seen": 146717625, - "step": 6884 - }, - { - "epoch": 0.8278723020501413, - "flos": 48635712718800.0, - "grad_norm": 0.8002810148564787, - "learning_rate": 3.0268490180658045e-07, - "loss": 0.8231, - "num_input_tokens_seen": 146780125, - "step": 6885 - }, - { - "epoch": 0.8279925449407803, - "flos": 12888122051040.0, - "grad_norm": 5.190635928257893, - "learning_rate": 3.0227299920275305e-07, - "loss": 1.0041, - "num_input_tokens_seen": 146796160, - "step": 6886 - }, - { - "epoch": 0.8281127878314195, - "flos": 14645953068480.0, - "grad_norm": 3.522790413791248, - "learning_rate": 3.018613541427613e-07, - "loss": 1.0764, - "num_input_tokens_seen": 146815400, - "step": 6887 - }, - { - "epoch": 0.8282330307220586, - "flos": 12757844003760.0, - "grad_norm": 2.98279997321926, - "learning_rate": 3.0144996668905243e-07, - "loss": 0.9649, - "num_input_tokens_seen": 146832500, - "step": 6888 - }, - { - "epoch": 0.8283532736126976, - "flos": 14252604678720.0, - "grad_norm": 16.16199501942172, - "learning_rate": 3.010388369040331e-07, - "loss": 1.0507, - "num_input_tokens_seen": 146850880, - "step": 6889 - }, - { - "epoch": 0.8284735165033368, - "flos": 22721678487240.0, - "grad_norm": 3.914941258872833, - "learning_rate": 3.0062796485007156e-07, - "loss": 1.0609, - "num_input_tokens_seen": 146871540, - "step": 6890 - }, - { - "epoch": 0.8285937593939758, - "flos": 18972294770880.0, - "grad_norm": 6.139581567379797, - "learning_rate": 3.002173505894965e-07, - "loss": 0.8737, - "num_input_tokens_seen": 146890410, - "step": 6891 - }, - { - "epoch": 0.8287140022846149, - "flos": 14331360209040.0, - "grad_norm": 14.431303205429206, - "learning_rate": 2.998069941845973e-07, - "loss": 0.8405, - "num_input_tokens_seen": 146909200, - "step": 6892 - }, - { - "epoch": 0.8288342451752541, - "flos": 50673514146360.0, - "grad_norm": 1.0879687515778154, - "learning_rate": 2.993968956976258e-07, - "loss": 0.848, - "num_input_tokens_seen": 146976665, - "step": 6893 - }, - { - "epoch": 0.8289544880658931, - "flos": 17478177988680.0, - "grad_norm": 4.150504823796546, - "learning_rate": 2.9898705519079313e-07, - "loss": 0.9246, - "num_input_tokens_seen": 146995490, - "step": 6894 - }, - { - "epoch": 0.8290747309565322, - "flos": 15825998237760.0, - "grad_norm": 5.001922047260445, - "learning_rate": 2.985774727262715e-07, - "loss": 0.9688, - "num_input_tokens_seen": 147014055, - "step": 6895 - }, - { - "epoch": 0.8291949738471713, - "flos": 16531486562160.0, - "grad_norm": 4.147347689655007, - "learning_rate": 2.981681483661949e-07, - "loss": 1.03, - "num_input_tokens_seen": 147033360, - "step": 6896 - }, - { - "epoch": 0.8293152167378104, - "flos": 37589272582560.0, - "grad_norm": 4.412116940218724, - "learning_rate": 2.9775908217265633e-07, - "loss": 0.9306, - "num_input_tokens_seen": 147058315, - "step": 6897 - }, - { - "epoch": 0.8294354596284494, - "flos": 45356317274880.0, - "grad_norm": 0.8279381667213869, - "learning_rate": 2.9735027420771253e-07, - "loss": 0.751, - "num_input_tokens_seen": 147118370, - "step": 6898 - }, - { - "epoch": 0.8295557025190886, - "flos": 17661205015320.0, - "grad_norm": 3.2273728505329875, - "learning_rate": 2.969417245333774e-07, - "loss": 0.9339, - "num_input_tokens_seen": 147137470, - "step": 6899 - }, - { - "epoch": 0.8296759454097277, - "flos": 17870698516320.0, - "grad_norm": 6.242299333126814, - "learning_rate": 2.9653343321162915e-07, - "loss": 0.9961, - "num_input_tokens_seen": 147156700, - "step": 6900 - }, - { - "epoch": 0.8297961883003667, - "flos": 17161960066560.0, - "grad_norm": 4.527939211744016, - "learning_rate": 2.9612540030440446e-07, - "loss": 0.8677, - "num_input_tokens_seen": 147176965, - "step": 6901 - }, - { - "epoch": 0.8299164311910058, - "flos": 48296554401720.0, - "grad_norm": 0.836636082031956, - "learning_rate": 2.9571762587360206e-07, - "loss": 0.8717, - "num_input_tokens_seen": 147233070, - "step": 6902 - }, - { - "epoch": 0.8300366740816449, - "flos": 17949913970040.0, - "grad_norm": 3.3024011372763873, - "learning_rate": 2.953101099810806e-07, - "loss": 0.9637, - "num_input_tokens_seen": 147252395, - "step": 6903 - }, - { - "epoch": 0.830156916972284, - "flos": 12784279816560.0, - "grad_norm": 27.70611786745224, - "learning_rate": 2.9490285268865965e-07, - "loss": 1.0645, - "num_input_tokens_seen": 147269605, - "step": 6904 - }, - { - "epoch": 0.830277159862923, - "flos": 18735905533680.0, - "grad_norm": 4.702332084452427, - "learning_rate": 2.9449585405812085e-07, - "loss": 1.0253, - "num_input_tokens_seen": 147286705, - "step": 6905 - }, - { - "epoch": 0.8303974027535622, - "flos": 14148302520840.0, - "grad_norm": 6.547830225193609, - "learning_rate": 2.940891141512043e-07, - "loss": 0.9638, - "num_input_tokens_seen": 147304445, - "step": 6906 - }, - { - "epoch": 0.8305176456442013, - "flos": 12155124759240.0, - "grad_norm": 5.05884410577391, - "learning_rate": 2.9368263302961385e-07, - "loss": 0.9352, - "num_input_tokens_seen": 147322865, - "step": 6907 - }, - { - "epoch": 0.8306378885348403, - "flos": 18236292646200.0, - "grad_norm": 5.106352025557103, - "learning_rate": 2.9327641075501075e-07, - "loss": 1.0236, - "num_input_tokens_seen": 147341575, - "step": 6908 - }, - { - "epoch": 0.8307581314254795, - "flos": 24216101885040.0, - "grad_norm": 3.7379974289380615, - "learning_rate": 2.9287044738901866e-07, - "loss": 0.8825, - "num_input_tokens_seen": 147359280, - "step": 6909 - }, - { - "epoch": 0.8308783743161186, - "flos": 12440001018960.0, - "grad_norm": 3.92776848898195, - "learning_rate": 2.9246474299322274e-07, - "loss": 1.1454, - "num_input_tokens_seen": 147374560, - "step": 6910 - }, - { - "epoch": 0.8309986172067576, - "flos": 49709186774760.0, - "grad_norm": 0.9048667413909329, - "learning_rate": 2.920592976291678e-07, - "loss": 0.8926, - "num_input_tokens_seen": 147431610, - "step": 6911 - }, - { - "epoch": 0.8311188600973968, - "flos": 15852127434960.0, - "grad_norm": 4.617306057461589, - "learning_rate": 2.916541113583595e-07, - "loss": 1.0319, - "num_input_tokens_seen": 147449830, - "step": 6912 - }, - { - "epoch": 0.8312391029880358, - "flos": 13308151546080.0, - "grad_norm": 4.125184257322375, - "learning_rate": 2.912491842422642e-07, - "loss": 0.8814, - "num_input_tokens_seen": 147467255, - "step": 6913 - }, - { - "epoch": 0.8313593458786749, - "flos": 14462926041840.0, - "grad_norm": 2.598985283252577, - "learning_rate": 2.9084451634230857e-07, - "loss": 0.9241, - "num_input_tokens_seen": 147486275, - "step": 6914 - }, - { - "epoch": 0.831479588769314, - "flos": 22905441391320.0, - "grad_norm": 7.255603597863925, - "learning_rate": 2.9044010771988125e-07, - "loss": 0.9448, - "num_input_tokens_seen": 147505810, - "step": 6915 - }, - { - "epoch": 0.8315998316599531, - "flos": 32292747150600.0, - "grad_norm": 3.7212663359406797, - "learning_rate": 2.900359584363303e-07, - "loss": 0.9661, - "num_input_tokens_seen": 147528635, - "step": 6916 - }, - { - "epoch": 0.8317200745505922, - "flos": 13015456588560.0, - "grad_norm": 13.727822469030219, - "learning_rate": 2.8963206855296494e-07, - "loss": 1.0632, - "num_input_tokens_seen": 147544595, - "step": 6917 - }, - { - "epoch": 0.8318403174412313, - "flos": 17215260954000.0, - "grad_norm": 3.211645934574642, - "learning_rate": 2.892284381310548e-07, - "loss": 1.0008, - "num_input_tokens_seen": 147565730, - "step": 6918 - }, - { - "epoch": 0.8319605603318704, - "flos": 15930423041880.0, - "grad_norm": 5.825452258190017, - "learning_rate": 2.888250672318302e-07, - "loss": 0.9375, - "num_input_tokens_seen": 147582850, - "step": 6919 - }, - { - "epoch": 0.8320808032225094, - "flos": 26707574087040.0, - "grad_norm": 4.756573395129822, - "learning_rate": 2.884219559164831e-07, - "loss": 0.9215, - "num_input_tokens_seen": 147605715, - "step": 6920 - }, - { - "epoch": 0.8322010461131486, - "flos": 9008644256760.0, - "grad_norm": 7.532213837929255, - "learning_rate": 2.880191042461635e-07, - "loss": 1.034, - "num_input_tokens_seen": 147621375, - "step": 6921 - }, - { - "epoch": 0.8323212890037877, - "flos": 11184818383440.0, - "grad_norm": 2.484720937767524, - "learning_rate": 2.876165122819849e-07, - "loss": 1.0298, - "num_input_tokens_seen": 147639075, - "step": 6922 - }, - { - "epoch": 0.8324415318944267, - "flos": 15426854813160.0, - "grad_norm": 3.4444095873008966, - "learning_rate": 2.872141800850201e-07, - "loss": 1.015, - "num_input_tokens_seen": 147655970, - "step": 6923 - }, - { - "epoch": 0.8325617747850659, - "flos": 24395756140080.0, - "grad_norm": 3.4855664871828607, - "learning_rate": 2.868121077163024e-07, - "loss": 0.9606, - "num_input_tokens_seen": 147675245, - "step": 6924 - }, - { - "epoch": 0.8326820176757049, - "flos": 13020331776600.0, - "grad_norm": 4.592181710067271, - "learning_rate": 2.864102952368257e-07, - "loss": 0.9511, - "num_input_tokens_seen": 147692890, - "step": 6925 - }, - { - "epoch": 0.832802260566344, - "flos": 25684733362800.0, - "grad_norm": 2.595832815726419, - "learning_rate": 2.860087427075444e-07, - "loss": 0.8201, - "num_input_tokens_seen": 147716860, - "step": 6926 - }, - { - "epoch": 0.8329225034569832, - "flos": 10055467869000.0, - "grad_norm": 5.205977107815086, - "learning_rate": 2.856074501893744e-07, - "loss": 1.0793, - "num_input_tokens_seen": 147731780, - "step": 6927 - }, - { - "epoch": 0.8330427463476222, - "flos": 12810470336880.0, - "grad_norm": 3.9223008855460555, - "learning_rate": 2.8520641774319054e-07, - "loss": 1.0434, - "num_input_tokens_seen": 147749590, - "step": 6928 - }, - { - "epoch": 0.8331629892382613, - "flos": 12784126508760.0, - "grad_norm": 7.179452653599259, - "learning_rate": 2.848056454298309e-07, - "loss": 0.9725, - "num_input_tokens_seen": 147766635, - "step": 6929 - }, - { - "epoch": 0.8332832321289004, - "flos": 12364863552720.0, - "grad_norm": 6.220086802278636, - "learning_rate": 2.844051333100905e-07, - "loss": 0.8711, - "num_input_tokens_seen": 147783900, - "step": 6930 - }, - { - "epoch": 0.8334034750195395, - "flos": 10659628206840.0, - "grad_norm": 6.484476185204505, - "learning_rate": 2.840048814447269e-07, - "loss": 1.0698, - "num_input_tokens_seen": 147801785, - "step": 6931 - }, - { - "epoch": 0.8335237179101785, - "flos": 13780439435520.0, - "grad_norm": 6.589803807488612, - "learning_rate": 2.836048898944587e-07, - "loss": 0.9533, - "num_input_tokens_seen": 147819930, - "step": 6932 - }, - { - "epoch": 0.8336439608008177, - "flos": 15458441768040.0, - "grad_norm": 4.859262544857851, - "learning_rate": 2.832051587199642e-07, - "loss": 0.9445, - "num_input_tokens_seen": 147836905, - "step": 6933 - }, - { - "epoch": 0.8337642036914568, - "flos": 42730059776280.0, - "grad_norm": 0.8620193026476884, - "learning_rate": 2.828056879818821e-07, - "loss": 0.8254, - "num_input_tokens_seen": 147895700, - "step": 6934 - }, - { - "epoch": 0.8338844465820958, - "flos": 19339452640320.0, - "grad_norm": 5.5175741496113995, - "learning_rate": 2.824064777408117e-07, - "loss": 1.0605, - "num_input_tokens_seen": 147915210, - "step": 6935 - }, - { - "epoch": 0.8340046894727349, - "flos": 21724660344600.0, - "grad_norm": 4.212602671835068, - "learning_rate": 2.8200752805731263e-07, - "loss": 0.9861, - "num_input_tokens_seen": 147937920, - "step": 6936 - }, - { - "epoch": 0.834124932363374, - "flos": 19313630058720.0, - "grad_norm": 2.452668846143228, - "learning_rate": 2.8160883899190625e-07, - "loss": 1.0303, - "num_input_tokens_seen": 147960910, - "step": 6937 - }, - { - "epoch": 0.8342451752540131, - "flos": 17476154325720.0, - "grad_norm": 6.732526698125065, - "learning_rate": 2.8121041060507234e-07, - "loss": 0.97, - "num_input_tokens_seen": 147979660, - "step": 6938 - }, - { - "epoch": 0.8343654181446521, - "flos": 18946288219920.0, - "grad_norm": 2.7011740816208, - "learning_rate": 2.808122429572528e-07, - "loss": 0.9366, - "num_input_tokens_seen": 147999585, - "step": 6939 - }, - { - "epoch": 0.8344856610352913, - "flos": 14750745811320.0, - "grad_norm": 11.953654757460033, - "learning_rate": 2.804143361088489e-07, - "loss": 0.9788, - "num_input_tokens_seen": 148018485, - "step": 6940 - }, - { - "epoch": 0.8346059039259304, - "flos": 18572078191680.0, - "grad_norm": 4.85141113831419, - "learning_rate": 2.8001669012022277e-07, - "loss": 0.9784, - "num_input_tokens_seen": 148036175, - "step": 6941 - }, - { - "epoch": 0.8347261468165694, - "flos": 20680810903680.0, - "grad_norm": 2.995567066596252, - "learning_rate": 2.7961930505169795e-07, - "loss": 0.9193, - "num_input_tokens_seen": 148060140, - "step": 6942 - }, - { - "epoch": 0.8348463897072086, - "flos": 18789206421120.0, - "grad_norm": 8.215178086990967, - "learning_rate": 2.792221809635558e-07, - "loss": 0.9727, - "num_input_tokens_seen": 148080490, - "step": 6943 - }, - { - "epoch": 0.8349666325978476, - "flos": 16612449724800.0, - "grad_norm": 3.497636542010173, - "learning_rate": 2.788253179160411e-07, - "loss": 0.9803, - "num_input_tokens_seen": 148101370, - "step": 6944 - }, - { - "epoch": 0.8350868754884867, - "flos": 9087031848360.0, - "grad_norm": 3.53676222261946, - "learning_rate": 2.7842871596935725e-07, - "loss": 0.8671, - "num_input_tokens_seen": 148119605, - "step": 6945 - }, - { - "epoch": 0.8352071183791259, - "flos": 18867532689600.0, - "grad_norm": 5.256965920479884, - "learning_rate": 2.780323751836682e-07, - "loss": 0.9124, - "num_input_tokens_seen": 148140540, - "step": 6946 - }, - { - "epoch": 0.8353273612697649, - "flos": 14672296896600.0, - "grad_norm": 2.2982597069661344, - "learning_rate": 2.7763629561909876e-07, - "loss": 1.0132, - "num_input_tokens_seen": 148161090, - "step": 6947 - }, - { - "epoch": 0.835447604160404, - "flos": 14015234271600.0, - "grad_norm": 3.0191050002851147, - "learning_rate": 2.772404773357335e-07, - "loss": 0.9927, - "num_input_tokens_seen": 148180215, - "step": 6948 - }, - { - "epoch": 0.8355678470510431, - "flos": 16660752777960.0, - "grad_norm": 5.044835673624757, - "learning_rate": 2.7684492039361853e-07, - "loss": 0.9969, - "num_input_tokens_seen": 148199160, - "step": 6949 - }, - { - "epoch": 0.8356880899416822, - "flos": 15064970732040.0, - "grad_norm": 4.895563624102115, - "learning_rate": 2.764496248527586e-07, - "loss": 1.0628, - "num_input_tokens_seen": 148217855, - "step": 6950 - }, - { - "epoch": 0.8358083328323213, - "flos": 19968515712960.0, - "grad_norm": 3.7978337337264727, - "learning_rate": 2.760545907731211e-07, - "loss": 0.9921, - "num_input_tokens_seen": 148238150, - "step": 6951 - }, - { - "epoch": 0.8359285757229604, - "flos": 19786255225320.0, - "grad_norm": 19.579442107032985, - "learning_rate": 2.75659818214631e-07, - "loss": 0.8991, - "num_input_tokens_seen": 148258975, - "step": 6952 - }, - { - "epoch": 0.8360488186135995, - "flos": 15223677593520.0, - "grad_norm": 5.659842851701541, - "learning_rate": 2.752653072371749e-07, - "loss": 1.0073, - "num_input_tokens_seen": 148278130, - "step": 6953 - }, - { - "epoch": 0.8361690615042385, - "flos": 19676771263800.0, - "grad_norm": 2.812721667561279, - "learning_rate": 2.7487105790060105e-07, - "loss": 0.9758, - "num_input_tokens_seen": 148297260, - "step": 6954 - }, - { - "epoch": 0.8362893043948777, - "flos": 27992626630080.0, - "grad_norm": 5.691718580568669, - "learning_rate": 2.7447707026471587e-07, - "loss": 0.9255, - "num_input_tokens_seen": 148319955, - "step": 6955 - }, - { - "epoch": 0.8364095472855168, - "flos": 17630537907240.0, - "grad_norm": 3.7191182443069377, - "learning_rate": 2.740833443892874e-07, - "loss": 1.0328, - "num_input_tokens_seen": 148337845, - "step": 6956 - }, - { - "epoch": 0.8365297901761558, - "flos": 16163224876560.0, - "grad_norm": 5.246078843403058, - "learning_rate": 2.7368988033404327e-07, - "loss": 1.0204, - "num_input_tokens_seen": 148355080, - "step": 6957 - }, - { - "epoch": 0.836650033066795, - "flos": 20225269774080.0, - "grad_norm": 13.610590883723658, - "learning_rate": 2.732966781586712e-07, - "loss": 1.0795, - "num_input_tokens_seen": 148374545, - "step": 6958 - }, - { - "epoch": 0.836770275957434, - "flos": 15799746394320.0, - "grad_norm": 7.010342450667782, - "learning_rate": 2.729037379228205e-07, - "loss": 0.8874, - "num_input_tokens_seen": 148394450, - "step": 6959 - }, - { - "epoch": 0.8368905188480731, - "flos": 15983018713440.0, - "grad_norm": 2.7288288654808124, - "learning_rate": 2.725110596860998e-07, - "loss": 1.0289, - "num_input_tokens_seen": 148414850, - "step": 6960 - }, - { - "epoch": 0.8370107617387123, - "flos": 9427539274080.0, - "grad_norm": 3.5566596628231553, - "learning_rate": 2.7211864350807776e-07, - "loss": 0.9337, - "num_input_tokens_seen": 148432770, - "step": 6961 - }, - { - "epoch": 0.8371310046293513, - "flos": 17972823703440.0, - "grad_norm": 7.18402652567361, - "learning_rate": 2.717264894482836e-07, - "loss": 0.9556, - "num_input_tokens_seen": 148452830, - "step": 6962 - }, - { - "epoch": 0.8372512475199904, - "flos": 14043233823960.0, - "grad_norm": 3.1177424269134506, - "learning_rate": 2.7133459756620646e-07, - "loss": 1.0289, - "num_input_tokens_seen": 148469745, - "step": 6963 - }, - { - "epoch": 0.8373714904106295, - "flos": 13754678177040.0, - "grad_norm": 3.4683500867002417, - "learning_rate": 2.7094296792129733e-07, - "loss": 0.9488, - "num_input_tokens_seen": 148489065, - "step": 6964 - }, - { - "epoch": 0.8374917333012686, - "flos": 10581424584600.0, - "grad_norm": 3.6949958262336087, - "learning_rate": 2.7055160057296424e-07, - "loss": 0.983, - "num_input_tokens_seen": 148506025, - "step": 6965 - }, - { - "epoch": 0.8376119761919076, - "flos": 21616801445760.0, - "grad_norm": 3.0712817654424445, - "learning_rate": 2.7016049558057896e-07, - "loss": 0.9425, - "num_input_tokens_seen": 148527705, - "step": 6966 - }, - { - "epoch": 0.8377322190825467, - "flos": 20964368716320.0, - "grad_norm": 3.2855945707511767, - "learning_rate": 2.6976965300347074e-07, - "loss": 0.9394, - "num_input_tokens_seen": 148550035, - "step": 6967 - }, - { - "epoch": 0.8378524619731859, - "flos": 18997902721560.0, - "grad_norm": 4.656944274346134, - "learning_rate": 2.693790729009309e-07, - "loss": 0.9086, - "num_input_tokens_seen": 148571365, - "step": 6968 - }, - { - "epoch": 0.8379727048638249, - "flos": 14697935508840.0, - "grad_norm": 2.799353753171308, - "learning_rate": 2.6898875533220946e-07, - "loss": 1.0997, - "num_input_tokens_seen": 148590390, - "step": 6969 - }, - { - "epoch": 0.838092947754464, - "flos": 14252574017160.0, - "grad_norm": 2.838958392336522, - "learning_rate": 2.685987003565171e-07, - "loss": 1.0488, - "num_input_tokens_seen": 148608150, - "step": 6970 - }, - { - "epoch": 0.8382131906451031, - "flos": 12836108949120.0, - "grad_norm": 3.63300583847925, - "learning_rate": 2.6820890803302566e-07, - "loss": 0.971, - "num_input_tokens_seen": 148623395, - "step": 6971 - }, - { - "epoch": 0.8383334335357422, - "flos": 12102743718600.0, - "grad_norm": 11.325247479359998, - "learning_rate": 2.6781937842086557e-07, - "loss": 1.0457, - "num_input_tokens_seen": 148641905, - "step": 6972 - }, - { - "epoch": 0.8384536764263812, - "flos": 14698671386280.0, - "grad_norm": 4.929351650108733, - "learning_rate": 2.6743011157912933e-07, - "loss": 0.9126, - "num_input_tokens_seen": 148661345, - "step": 6973 - }, - { - "epoch": 0.8385739193170204, - "flos": 20650849011480.0, - "grad_norm": 3.3450293544513348, - "learning_rate": 2.6704110756686725e-07, - "loss": 0.8716, - "num_input_tokens_seen": 148681890, - "step": 6974 - }, - { - "epoch": 0.8386941622076595, - "flos": 16662653794680.0, - "grad_norm": 3.8760784248236386, - "learning_rate": 2.6665236644309085e-07, - "loss": 1.0675, - "num_input_tokens_seen": 148701920, - "step": 6975 - }, - { - "epoch": 0.8388144050982985, - "flos": 16350115259760.0, - "grad_norm": 6.7293078837817015, - "learning_rate": 2.662638882667727e-07, - "loss": 1.0174, - "num_input_tokens_seen": 148720580, - "step": 6976 - }, - { - "epoch": 0.8389346479889377, - "flos": 17268408533640.0, - "grad_norm": 4.935663581920519, - "learning_rate": 2.658756730968443e-07, - "loss": 0.9557, - "num_input_tokens_seen": 148738765, - "step": 6977 - }, - { - "epoch": 0.8390548908795767, - "flos": 15065338670760.0, - "grad_norm": 2.8152089953717594, - "learning_rate": 2.654877209921975e-07, - "loss": 1.1151, - "num_input_tokens_seen": 148756020, - "step": 6978 - }, - { - "epoch": 0.8391751337702158, - "flos": 25423226759880.0, - "grad_norm": 4.315819830781583, - "learning_rate": 2.651000320116843e-07, - "loss": 0.8535, - "num_input_tokens_seen": 148776625, - "step": 6979 - }, - { - "epoch": 0.839295376660855, - "flos": 15144646109160.0, - "grad_norm": 4.664470201113957, - "learning_rate": 2.647126062141163e-07, - "loss": 0.974, - "num_input_tokens_seen": 148795420, - "step": 6980 - }, - { - "epoch": 0.839415619551494, - "flos": 13072866125040.0, - "grad_norm": 6.786553765477641, - "learning_rate": 2.643254436582669e-07, - "loss": 1.0601, - "num_input_tokens_seen": 148814630, - "step": 6981 - }, - { - "epoch": 0.8395358624421331, - "flos": 16507718305080.0, - "grad_norm": 6.8283124326212725, - "learning_rate": 2.6393854440286743e-07, - "loss": 1.0364, - "num_input_tokens_seen": 148833520, - "step": 6982 - }, - { - "epoch": 0.8396561053327722, - "flos": 17340111905160.0, - "grad_norm": 25.665722453927362, - "learning_rate": 2.6355190850661045e-07, - "loss": 0.9345, - "num_input_tokens_seen": 148850075, - "step": 6983 - }, - { - "epoch": 0.8397763482234113, - "flos": 15800512933320.0, - "grad_norm": 3.1645380863640793, - "learning_rate": 2.631655360281486e-07, - "loss": 1.0882, - "num_input_tokens_seen": 148869470, - "step": 6984 - }, - { - "epoch": 0.8398965911140504, - "flos": 15747181384320.0, - "grad_norm": 3.47777791583542, - "learning_rate": 2.6277942702609323e-07, - "loss": 0.8857, - "num_input_tokens_seen": 148888670, - "step": 6985 - }, - { - "epoch": 0.8400168340046895, - "flos": 15300808061160.0, - "grad_norm": 4.278222343056282, - "learning_rate": 2.623935815590186e-07, - "loss": 1.098, - "num_input_tokens_seen": 148906770, - "step": 6986 - }, - { - "epoch": 0.8401370768953286, - "flos": 15983049375000.0, - "grad_norm": 10.038188495514735, - "learning_rate": 2.6200799968545516e-07, - "loss": 1.0292, - "num_input_tokens_seen": 148926785, - "step": 6987 - }, - { - "epoch": 0.8402573197859676, - "flos": 42398076264240.0, - "grad_norm": 0.8011059730688053, - "learning_rate": 2.616226814638969e-07, - "loss": 0.8228, - "num_input_tokens_seen": 148991610, - "step": 6988 - }, - { - "epoch": 0.8403775626766068, - "flos": 16114339253760.0, - "grad_norm": 4.526097386949639, - "learning_rate": 2.612376269527954e-07, - "loss": 0.9913, - "num_input_tokens_seen": 149011035, - "step": 6989 - }, - { - "epoch": 0.8404978055672458, - "flos": 13911637329600.0, - "grad_norm": 3.2991170626774537, - "learning_rate": 2.608528362105635e-07, - "loss": 0.897, - "num_input_tokens_seen": 149030125, - "step": 6990 - }, - { - "epoch": 0.8406180484578849, - "flos": 19601572474440.0, - "grad_norm": 3.1543177364828723, - "learning_rate": 2.6046830929557374e-07, - "loss": 0.9483, - "num_input_tokens_seen": 149049495, - "step": 6991 - }, - { - "epoch": 0.8407382913485241, - "flos": 15721144171800.0, - "grad_norm": 6.303142599403727, - "learning_rate": 2.6008404626615776e-07, - "loss": 1.0602, - "num_input_tokens_seen": 149067715, - "step": 6992 - }, - { - "epoch": 0.8408585342391631, - "flos": 9821531556600.0, - "grad_norm": 10.119298389223273, - "learning_rate": 2.597000471806092e-07, - "loss": 0.9589, - "num_input_tokens_seen": 149084000, - "step": 6993 - }, - { - "epoch": 0.8409787771298022, - "flos": 14326393036320.0, - "grad_norm": 4.114482302169759, - "learning_rate": 2.593163120971793e-07, - "loss": 0.9519, - "num_input_tokens_seen": 149102585, - "step": 6994 - }, - { - "epoch": 0.8410990200204413, - "flos": 16449756860520.0, - "grad_norm": 5.617950769596453, - "learning_rate": 2.5893284107408165e-07, - "loss": 0.9199, - "num_input_tokens_seen": 149119675, - "step": 6995 - }, - { - "epoch": 0.8412192629110804, - "flos": 17086791938760.0, - "grad_norm": 5.075136026899015, - "learning_rate": 2.5854963416948726e-07, - "loss": 1.0134, - "num_input_tokens_seen": 149141660, - "step": 6996 - }, - { - "epoch": 0.8413395058017195, - "flos": 18210562049280.0, - "grad_norm": 3.876172288338938, - "learning_rate": 2.5816669144152816e-07, - "loss": 0.9088, - "num_input_tokens_seen": 149162560, - "step": 6997 - }, - { - "epoch": 0.8414597486923585, - "flos": 45555293860800.0, - "grad_norm": 0.9061200607802988, - "learning_rate": 2.5778401294829777e-07, - "loss": 0.9624, - "num_input_tokens_seen": 149221020, - "step": 6998 - }, - { - "epoch": 0.8415799915829977, - "flos": 13544387475480.0, - "grad_norm": 7.527454226939668, - "learning_rate": 2.574015987478473e-07, - "loss": 0.8783, - "num_input_tokens_seen": 149238870, - "step": 6999 - }, - { - "epoch": 0.8417002344736367, - "flos": 14062985416680.0, - "grad_norm": 4.136912859143928, - "learning_rate": 2.570194488981887e-07, - "loss": 1.08, - "num_input_tokens_seen": 149255135, - "step": 7000 - }, - { - "epoch": 0.8418204773642758, - "flos": 44499020940000.0, - "grad_norm": 0.8537197959997104, - "learning_rate": 2.566375634572939e-07, - "loss": 0.8649, - "num_input_tokens_seen": 149315495, - "step": 7001 - }, - { - "epoch": 0.841940720254915, - "flos": 12102958349520.0, - "grad_norm": 5.036295380926384, - "learning_rate": 2.562559424830943e-07, - "loss": 0.9735, - "num_input_tokens_seen": 149333175, - "step": 7002 - }, - { - "epoch": 0.842060963145554, - "flos": 11520419959560.0, - "grad_norm": 4.142126192713388, - "learning_rate": 2.5587458603348256e-07, - "loss": 0.9154, - "num_input_tokens_seen": 149350185, - "step": 7003 - }, - { - "epoch": 0.8421812060361931, - "flos": 15563541126480.0, - "grad_norm": 4.496826960537029, - "learning_rate": 2.554934941663085e-07, - "loss": 1.0711, - "num_input_tokens_seen": 149367440, - "step": 7004 - }, - { - "epoch": 0.8423014489268322, - "flos": 19781165406360.0, - "grad_norm": 3.204865964089344, - "learning_rate": 2.5511266693938484e-07, - "loss": 0.9445, - "num_input_tokens_seen": 149385620, - "step": 7005 - }, - { - "epoch": 0.8424216918174713, - "flos": 17869870654200.0, - "grad_norm": 6.2365391543531015, - "learning_rate": 2.547321044104822e-07, - "loss": 0.9927, - "num_input_tokens_seen": 149406835, - "step": 7006 - }, - { - "epoch": 0.8425419347081103, - "flos": 17603979448200.0, - "grad_norm": 4.648838776009772, - "learning_rate": 2.5435180663733113e-07, - "loss": 0.9896, - "num_input_tokens_seen": 149426855, - "step": 7007 - }, - { - "epoch": 0.8426621775987495, - "flos": 17655992550120.0, - "grad_norm": 11.229736389164136, - "learning_rate": 2.539717736776241e-07, - "loss": 0.9368, - "num_input_tokens_seen": 149442800, - "step": 7008 - }, - { - "epoch": 0.8427824204893886, - "flos": 16743494311080.0, - "grad_norm": 2.4775355981449962, - "learning_rate": 2.535920055890097e-07, - "loss": 0.9878, - "num_input_tokens_seen": 149463815, - "step": 7009 - }, - { - "epoch": 0.8429026633800276, - "flos": 11420839681920.0, - "grad_norm": 6.146841227327627, - "learning_rate": 2.5321250242910006e-07, - "loss": 0.8698, - "num_input_tokens_seen": 149481450, - "step": 7010 - }, - { - "epoch": 0.8430229062706668, - "flos": 15771961472880.0, - "grad_norm": 3.675478072438213, - "learning_rate": 2.5283326425546493e-07, - "loss": 1.0878, - "num_input_tokens_seen": 149500280, - "step": 7011 - }, - { - "epoch": 0.8431431491613058, - "flos": 25209103363320.0, - "grad_norm": 5.9659912764267755, - "learning_rate": 2.5245429112563443e-07, - "loss": 0.9149, - "num_input_tokens_seen": 149520675, - "step": 7012 - }, - { - "epoch": 0.8432633920519449, - "flos": 18369391557000.0, - "grad_norm": 3.6471052003984528, - "learning_rate": 2.5207558309709865e-07, - "loss": 1.0497, - "num_input_tokens_seen": 149540130, - "step": 7013 - }, - { - "epoch": 0.8433836349425841, - "flos": 46508362891800.0, - "grad_norm": 0.6756580659466035, - "learning_rate": 2.516971402273065e-07, - "loss": 0.8099, - "num_input_tokens_seen": 149605915, - "step": 7014 - }, - { - "epoch": 0.8435038778332231, - "flos": 14357428083120.0, - "grad_norm": 3.417095452051373, - "learning_rate": 2.513189625736687e-07, - "loss": 0.8929, - "num_input_tokens_seen": 149622530, - "step": 7015 - }, - { - "epoch": 0.8436241207238622, - "flos": 14904700131000.0, - "grad_norm": 3.5168990475741038, - "learning_rate": 2.509410501935534e-07, - "loss": 0.9434, - "num_input_tokens_seen": 149637885, - "step": 7016 - }, - { - "epoch": 0.8437443636145013, - "flos": 10370122051560.0, - "grad_norm": 3.873710417740683, - "learning_rate": 2.5056340314429116e-07, - "loss": 0.9677, - "num_input_tokens_seen": 149655070, - "step": 7017 - }, - { - "epoch": 0.8438646065051404, - "flos": 15347332743840.0, - "grad_norm": 4.756898588654089, - "learning_rate": 2.5018602148316904e-07, - "loss": 1.0186, - "num_input_tokens_seen": 149670825, - "step": 7018 - }, - { - "epoch": 0.8439848493957794, - "flos": 16555898712000.0, - "grad_norm": 2.205294359415523, - "learning_rate": 2.498089052674359e-07, - "loss": 1.0183, - "num_input_tokens_seen": 149688520, - "step": 7019 - }, - { - "epoch": 0.8441050922864186, - "flos": 13990454183040.0, - "grad_norm": 13.687530099496014, - "learning_rate": 2.494320545543007e-07, - "loss": 0.9778, - "num_input_tokens_seen": 149707810, - "step": 7020 - }, - { - "epoch": 0.8442253351770577, - "flos": 15511098762720.0, - "grad_norm": 4.076001312240674, - "learning_rate": 2.490554694009308e-07, - "loss": 0.8919, - "num_input_tokens_seen": 149728395, - "step": 7021 - }, - { - "epoch": 0.8443455780676967, - "flos": 24502112622480.0, - "grad_norm": 10.120984823685367, - "learning_rate": 2.4867914986445426e-07, - "loss": 1.0206, - "num_input_tokens_seen": 149750505, - "step": 7022 - }, - { - "epoch": 0.8444658209583359, - "flos": 34469442523800.0, - "grad_norm": 4.06419478057801, - "learning_rate": 2.483030960019581e-07, - "loss": 0.9221, - "num_input_tokens_seen": 149774155, - "step": 7023 - }, - { - "epoch": 0.8445860638489749, - "flos": 49040012833560.0, - "grad_norm": 0.7411570975510352, - "learning_rate": 2.479273078704891e-07, - "loss": 0.7959, - "num_input_tokens_seen": 149827240, - "step": 7024 - }, - { - "epoch": 0.844706306739614, - "flos": 44978949105960.0, - "grad_norm": 0.7876420407639594, - "learning_rate": 2.475517855270552e-07, - "loss": 0.8955, - "num_input_tokens_seen": 149887040, - "step": 7025 - }, - { - "epoch": 0.8448265496302532, - "flos": 10581577892400.0, - "grad_norm": 5.426449299957098, - "learning_rate": 2.4717652902862143e-07, - "loss": 0.9469, - "num_input_tokens_seen": 149905735, - "step": 7026 - }, - { - "epoch": 0.8449467925208922, - "flos": 16665382673520.0, - "grad_norm": 3.274733111739795, - "learning_rate": 2.4680153843211495e-07, - "loss": 1.0467, - "num_input_tokens_seen": 149925385, - "step": 7027 - }, - { - "epoch": 0.8450670354115313, - "flos": 16167180217800.0, - "grad_norm": 4.955877556101868, - "learning_rate": 2.464268137944212e-07, - "loss": 0.9425, - "num_input_tokens_seen": 149946400, - "step": 7028 - }, - { - "epoch": 0.8451872783021703, - "flos": 21253568256000.0, - "grad_norm": 3.4671892658657204, - "learning_rate": 2.46052355172385e-07, - "loss": 1.0066, - "num_input_tokens_seen": 149964160, - "step": 7029 - }, - { - "epoch": 0.8453075211928095, - "flos": 15535848189720.0, - "grad_norm": 4.840436823655364, - "learning_rate": 2.456781626228128e-07, - "loss": 0.9623, - "num_input_tokens_seen": 149983385, - "step": 7030 - }, - { - "epoch": 0.8454277640834486, - "flos": 42046346707560.0, - "grad_norm": 1.0649192979171496, - "learning_rate": 2.453042362024675e-07, - "loss": 0.9728, - "num_input_tokens_seen": 150036350, - "step": 7031 - }, - { - "epoch": 0.8455480069740876, - "flos": 19287746154000.0, - "grad_norm": 6.990363163279932, - "learning_rate": 2.449305759680751e-07, - "loss": 0.9603, - "num_input_tokens_seen": 150057395, - "step": 7032 - }, - { - "epoch": 0.8456682498647268, - "flos": 19365673822200.0, - "grad_norm": 3.3341431269871054, - "learning_rate": 2.445571819763188e-07, - "loss": 0.9695, - "num_input_tokens_seen": 150079415, - "step": 7033 - }, - { - "epoch": 0.8457884927553658, - "flos": 14646351668760.0, - "grad_norm": 4.109035695503731, - "learning_rate": 2.4418405428384227e-07, - "loss": 0.8186, - "num_input_tokens_seen": 150099345, - "step": 7034 - }, - { - "epoch": 0.8459087356460049, - "flos": 10812907972200.0, - "grad_norm": 3.147277466873744, - "learning_rate": 2.4381119294724864e-07, - "loss": 0.9494, - "num_input_tokens_seen": 150116510, - "step": 7035 - }, - { - "epoch": 0.846028978536644, - "flos": 13360685894520.0, - "grad_norm": 3.8864771782261323, - "learning_rate": 2.434385980231004e-07, - "loss": 0.7686, - "num_input_tokens_seen": 150135070, - "step": 7036 - }, - { - "epoch": 0.8461492214272831, - "flos": 37664961956880.0, - "grad_norm": 3.8498651505671297, - "learning_rate": 2.4306626956792043e-07, - "loss": 0.8785, - "num_input_tokens_seen": 150159735, - "step": 7037 - }, - { - "epoch": 0.8462694643179222, - "flos": 13177689529440.0, - "grad_norm": 7.462403222355875, - "learning_rate": 2.4269420763819017e-07, - "loss": 0.9771, - "num_input_tokens_seen": 150177500, - "step": 7038 - }, - { - "epoch": 0.8463897072085613, - "flos": 17344741800720.0, - "grad_norm": 4.315206590571479, - "learning_rate": 2.4232241229035223e-07, - "loss": 1.0511, - "num_input_tokens_seen": 150194975, - "step": 7039 - }, - { - "epoch": 0.8465099500992004, - "flos": 49196021477760.0, - "grad_norm": 0.840819603612869, - "learning_rate": 2.419508835808064e-07, - "loss": 0.8222, - "num_input_tokens_seen": 150251250, - "step": 7040 - }, - { - "epoch": 0.8466301929898394, - "flos": 9690149693160.0, - "grad_norm": 4.2703902348109075, - "learning_rate": 2.415796215659134e-07, - "loss": 0.8477, - "num_input_tokens_seen": 150267675, - "step": 7041 - }, - { - "epoch": 0.8467504358804786, - "flos": 13646359354800.0, - "grad_norm": 6.079240789695828, - "learning_rate": 2.412086263019939e-07, - "loss": 1.0031, - "num_input_tokens_seen": 150285420, - "step": 7042 - }, - { - "epoch": 0.8468706787711177, - "flos": 15143695600800.0, - "grad_norm": 5.272426480051555, - "learning_rate": 2.408378978453276e-07, - "loss": 1.0305, - "num_input_tokens_seen": 150305260, - "step": 7043 - }, - { - "epoch": 0.8469909216617567, - "flos": 46448132491800.0, - "grad_norm": 0.8148890730486291, - "learning_rate": 2.404674362521533e-07, - "loss": 0.8903, - "num_input_tokens_seen": 150363475, - "step": 7044 - }, - { - "epoch": 0.8471111645523959, - "flos": 13675002799920.0, - "grad_norm": 3.8951309033238304, - "learning_rate": 2.4009724157866997e-07, - "loss": 0.964, - "num_input_tokens_seen": 150380255, - "step": 7045 - }, - { - "epoch": 0.8472314074430349, - "flos": 15640610271000.0, - "grad_norm": 4.110856518007592, - "learning_rate": 2.3972731388103564e-07, - "loss": 0.9864, - "num_input_tokens_seen": 150398455, - "step": 7046 - }, - { - "epoch": 0.847351650333674, - "flos": 41422465438560.0, - "grad_norm": 0.8343691507770237, - "learning_rate": 2.393576532153687e-07, - "loss": 0.8782, - "num_input_tokens_seen": 150461960, - "step": 7047 - }, - { - "epoch": 0.8474718932243132, - "flos": 29493912669240.0, - "grad_norm": 1.029586953795266, - "learning_rate": 2.389882596377453e-07, - "loss": 0.8533, - "num_input_tokens_seen": 150515945, - "step": 7048 - }, - { - "epoch": 0.8475921361149522, - "flos": 27258402875880.0, - "grad_norm": 9.098510335204562, - "learning_rate": 2.386191332042031e-07, - "loss": 0.9899, - "num_input_tokens_seen": 150537560, - "step": 7049 - }, - { - "epoch": 0.8477123790055913, - "flos": 18055166636280.0, - "grad_norm": 4.754031829303755, - "learning_rate": 2.3825027397073794e-07, - "loss": 0.9531, - "num_input_tokens_seen": 150557755, - "step": 7050 - }, - { - "epoch": 0.8478326218962304, - "flos": 21540529501800.0, - "grad_norm": 3.6989681557808076, - "learning_rate": 2.3788168199330515e-07, - "loss": 0.898, - "num_input_tokens_seen": 150579035, - "step": 7051 - }, - { - "epoch": 0.8479528647868695, - "flos": 27281557901760.0, - "grad_norm": 3.9562943008618228, - "learning_rate": 2.3751335732782074e-07, - "loss": 0.9586, - "num_input_tokens_seen": 150600015, - "step": 7052 - }, - { - "epoch": 0.8480731076775085, - "flos": 14880226658040.0, - "grad_norm": 3.144733379809858, - "learning_rate": 2.371453000301582e-07, - "loss": 1.0227, - "num_input_tokens_seen": 150618420, - "step": 7053 - }, - { - "epoch": 0.8481933505681477, - "flos": 23244722354640.0, - "grad_norm": 4.179115669849594, - "learning_rate": 2.3677751015615222e-07, - "loss": 0.973, - "num_input_tokens_seen": 150640215, - "step": 7054 - }, - { - "epoch": 0.8483135934587868, - "flos": 14724861906600.0, - "grad_norm": 3.1267512183018886, - "learning_rate": 2.3640998776159593e-07, - "loss": 1.0756, - "num_input_tokens_seen": 150657440, - "step": 7055 - }, - { - "epoch": 0.8484338363494258, - "flos": 15380176822680.0, - "grad_norm": 3.707052602595907, - "learning_rate": 2.3604273290224253e-07, - "loss": 1.0388, - "num_input_tokens_seen": 150677875, - "step": 7056 - }, - { - "epoch": 0.848554079240065, - "flos": 10608964213560.0, - "grad_norm": 3.493945743761852, - "learning_rate": 2.356757456338039e-07, - "loss": 0.9661, - "num_input_tokens_seen": 150695080, - "step": 7057 - }, - { - "epoch": 0.848674322130704, - "flos": 48736887397560.0, - "grad_norm": 0.81632385568217, - "learning_rate": 2.3530902601195147e-07, - "loss": 0.8795, - "num_input_tokens_seen": 150763290, - "step": 7058 - }, - { - "epoch": 0.8487945650213431, - "flos": 13096327766520.0, - "grad_norm": 9.072379784332878, - "learning_rate": 2.34942574092317e-07, - "loss": 1.009, - "num_input_tokens_seen": 150778260, - "step": 7059 - }, - { - "epoch": 0.8489148079119821, - "flos": 16687709837280.0, - "grad_norm": 6.047612423744765, - "learning_rate": 2.3457638993049045e-07, - "loss": 0.9811, - "num_input_tokens_seen": 150795970, - "step": 7060 - }, - { - "epoch": 0.8490350508026213, - "flos": 14147229366240.0, - "grad_norm": 7.5004129931864005, - "learning_rate": 2.3421047358202252e-07, - "loss": 0.8658, - "num_input_tokens_seen": 150814540, - "step": 7061 - }, - { - "epoch": 0.8491552936932604, - "flos": 17268071256480.0, - "grad_norm": 4.0684301669609, - "learning_rate": 2.3384482510242144e-07, - "loss": 1.039, - "num_input_tokens_seen": 150832120, - "step": 7062 - }, - { - "epoch": 0.8492755365838994, - "flos": 16004211399480.0, - "grad_norm": 3.706346615437451, - "learning_rate": 2.3347944454715575e-07, - "loss": 0.995, - "num_input_tokens_seen": 150848230, - "step": 7063 - }, - { - "epoch": 0.8493957794745386, - "flos": 19208929300560.0, - "grad_norm": 3.4558735261310245, - "learning_rate": 2.331143319716542e-07, - "loss": 0.8884, - "num_input_tokens_seen": 150867480, - "step": 7064 - }, - { - "epoch": 0.8495160223651776, - "flos": 21279728114760.0, - "grad_norm": 4.763882477919426, - "learning_rate": 2.3274948743130363e-07, - "loss": 0.8739, - "num_input_tokens_seen": 150887035, - "step": 7065 - }, - { - "epoch": 0.8496362652558167, - "flos": 16402342992600.0, - "grad_norm": 3.4950398311064363, - "learning_rate": 2.3238491098145085e-07, - "loss": 1.0247, - "num_input_tokens_seen": 150906285, - "step": 7066 - }, - { - "epoch": 0.8497565081464559, - "flos": 10317526380000.0, - "grad_norm": 3.848316984035897, - "learning_rate": 2.3202060267740141e-07, - "loss": 0.9401, - "num_input_tokens_seen": 150923530, - "step": 7067 - }, - { - "epoch": 0.8498767510370949, - "flos": 15008113103640.0, - "grad_norm": 6.5652770932967925, - "learning_rate": 2.3165656257442044e-07, - "loss": 0.9952, - "num_input_tokens_seen": 150941770, - "step": 7068 - }, - { - "epoch": 0.849996993927734, - "flos": 16817895899880.0, - "grad_norm": 3.949141353524174, - "learning_rate": 2.31292790727734e-07, - "loss": 1.1322, - "num_input_tokens_seen": 150959055, - "step": 7069 - }, - { - "epoch": 0.8501172368183731, - "flos": 14593173427560.0, - "grad_norm": 13.145657803693084, - "learning_rate": 2.3092928719252392e-07, - "loss": 1.0198, - "num_input_tokens_seen": 150977175, - "step": 7070 - }, - { - "epoch": 0.8502374797090122, - "flos": 15825354345000.0, - "grad_norm": 8.297273155759612, - "learning_rate": 2.3056605202393475e-07, - "loss": 1.0044, - "num_input_tokens_seen": 150994455, - "step": 7071 - }, - { - "epoch": 0.8503577225996513, - "flos": 16822403149200.0, - "grad_norm": 10.215608913144072, - "learning_rate": 2.3020308527706888e-07, - "loss": 0.9027, - "num_input_tokens_seen": 151013590, - "step": 7072 - }, - { - "epoch": 0.8504779654902904, - "flos": 19049026638240.0, - "grad_norm": 4.054075691062305, - "learning_rate": 2.2984038700698715e-07, - "loss": 1.1066, - "num_input_tokens_seen": 151032620, - "step": 7073 - }, - { - "epoch": 0.8505982083809295, - "flos": 18840636953400.0, - "grad_norm": 2.566731285751513, - "learning_rate": 2.2947795726871222e-07, - "loss": 1.0178, - "num_input_tokens_seen": 151053365, - "step": 7074 - }, - { - "epoch": 0.8507184512715685, - "flos": 14410238385600.0, - "grad_norm": 9.010194303087728, - "learning_rate": 2.2911579611722253e-07, - "loss": 1.0813, - "num_input_tokens_seen": 151072230, - "step": 7075 - }, - { - "epoch": 0.8508386941622077, - "flos": 13491454526760.0, - "grad_norm": 6.405823335162964, - "learning_rate": 2.2875390360745905e-07, - "loss": 1.1001, - "num_input_tokens_seen": 151091355, - "step": 7076 - }, - { - "epoch": 0.8509589370528468, - "flos": 11628616135560.0, - "grad_norm": 6.911518831618305, - "learning_rate": 2.2839227979432008e-07, - "loss": 1.0044, - "num_input_tokens_seen": 151108725, - "step": 7077 - }, - { - "epoch": 0.8510791799434858, - "flos": 12941422938480.0, - "grad_norm": 4.164827602156958, - "learning_rate": 2.2803092473266373e-07, - "loss": 1.0715, - "num_input_tokens_seen": 151125970, - "step": 7078 - }, - { - "epoch": 0.851199422834125, - "flos": 16665474658200.0, - "grad_norm": 6.877325323639833, - "learning_rate": 2.2766983847730724e-07, - "loss": 1.0736, - "num_input_tokens_seen": 151145360, - "step": 7079 - }, - { - "epoch": 0.851319665724764, - "flos": 11525387132280.0, - "grad_norm": 5.0940400023490415, - "learning_rate": 2.2730902108302663e-07, - "loss": 0.89, - "num_input_tokens_seen": 151161995, - "step": 7080 - }, - { - "epoch": 0.8514399086154031, - "flos": 13465785252960.0, - "grad_norm": 11.46885909774404, - "learning_rate": 2.269484726045583e-07, - "loss": 0.9172, - "num_input_tokens_seen": 151180630, - "step": 7081 - }, - { - "epoch": 0.8515601515060423, - "flos": 17477288803440.0, - "grad_norm": 6.045683514215141, - "learning_rate": 2.2658819309659672e-07, - "loss": 1.0161, - "num_input_tokens_seen": 151200550, - "step": 7082 - }, - { - "epoch": 0.8516803943966813, - "flos": 13852970669160.0, - "grad_norm": 3.3670212423607886, - "learning_rate": 2.2622818261379706e-07, - "loss": 1.0682, - "num_input_tokens_seen": 151217290, - "step": 7083 - }, - { - "epoch": 0.8518006372873204, - "flos": 14383066695360.0, - "grad_norm": 7.668388864335077, - "learning_rate": 2.2586844121077142e-07, - "loss": 0.98, - "num_input_tokens_seen": 151235520, - "step": 7084 - }, - { - "epoch": 0.8519208801779595, - "flos": 17162511974640.0, - "grad_norm": 4.128296892859779, - "learning_rate": 2.2550896894209215e-07, - "loss": 0.9433, - "num_input_tokens_seen": 151254755, - "step": 7085 - }, - { - "epoch": 0.8520411230685986, - "flos": 45126673580880.0, - "grad_norm": 0.6925889153670014, - "learning_rate": 2.2514976586229184e-07, - "loss": 0.8192, - "num_input_tokens_seen": 151322420, - "step": 7086 - }, - { - "epoch": 0.8521613659592376, - "flos": 47139112350240.0, - "grad_norm": 0.8789596880456058, - "learning_rate": 2.247908320258609e-07, - "loss": 0.8547, - "num_input_tokens_seen": 151382230, - "step": 7087 - }, - { - "epoch": 0.8522816088498768, - "flos": 16428901451640.0, - "grad_norm": 3.649399559067715, - "learning_rate": 2.2443216748724914e-07, - "loss": 1.0052, - "num_input_tokens_seen": 151402660, - "step": 7088 - }, - { - "epoch": 0.8524018517405159, - "flos": 22642309725720.0, - "grad_norm": 2.828919019988956, - "learning_rate": 2.2407377230086588e-07, - "loss": 0.9695, - "num_input_tokens_seen": 151424735, - "step": 7089 - }, - { - "epoch": 0.8525220946311549, - "flos": 13249914147480.0, - "grad_norm": 8.069437989585982, - "learning_rate": 2.23715646521079e-07, - "loss": 1.0584, - "num_input_tokens_seen": 151441975, - "step": 7090 - }, - { - "epoch": 0.852642337521794, - "flos": 15480125039040.0, - "grad_norm": 6.086590282605734, - "learning_rate": 2.2335779020221724e-07, - "loss": 1.0624, - "num_input_tokens_seen": 151458315, - "step": 7091 - }, - { - "epoch": 0.8527625804124331, - "flos": 49440229412760.0, - "grad_norm": 0.9424711986461748, - "learning_rate": 2.2300020339856497e-07, - "loss": 0.852, - "num_input_tokens_seen": 151520720, - "step": 7092 - }, - { - "epoch": 0.8528828233030722, - "flos": 19208040115320.0, - "grad_norm": 11.714376788167446, - "learning_rate": 2.2264288616436966e-07, - "loss": 1.0055, - "num_input_tokens_seen": 151540695, - "step": 7093 - }, - { - "epoch": 0.8530030661937112, - "flos": 12385718961600.0, - "grad_norm": 11.2530198176796, - "learning_rate": 2.222858385538351e-07, - "loss": 0.9654, - "num_input_tokens_seen": 151557215, - "step": 7094 - }, - { - "epoch": 0.8531233090843504, - "flos": 15744667136400.0, - "grad_norm": 7.4862025185947285, - "learning_rate": 2.2192906062112527e-07, - "loss": 0.9031, - "num_input_tokens_seen": 151576810, - "step": 7095 - }, - { - "epoch": 0.8532435519749895, - "flos": 26866587564120.0, - "grad_norm": 100.05160289664279, - "learning_rate": 2.2157255242036377e-07, - "loss": 0.9295, - "num_input_tokens_seen": 151600195, - "step": 7096 - }, - { - "epoch": 0.8533637948656285, - "flos": 15197057811360.0, - "grad_norm": 3.1155940598584397, - "learning_rate": 2.2121631400563135e-07, - "loss": 0.9613, - "num_input_tokens_seen": 151619745, - "step": 7097 - }, - { - "epoch": 0.8534840377562677, - "flos": 38159694108240.0, - "grad_norm": 0.8328460500518722, - "learning_rate": 2.208603454309701e-07, - "loss": 0.8333, - "num_input_tokens_seen": 151677555, - "step": 7098 - }, - { - "epoch": 0.8536042806469067, - "flos": 14777304270360.0, - "grad_norm": 4.311891374624655, - "learning_rate": 2.2050464675037994e-07, - "loss": 0.9235, - "num_input_tokens_seen": 151695900, - "step": 7099 - }, - { - "epoch": 0.8537245235375458, - "flos": 17556565580280.0, - "grad_norm": 6.639898570101823, - "learning_rate": 2.2014921801782016e-07, - "loss": 0.9454, - "num_input_tokens_seen": 151715110, - "step": 7100 - }, - { - "epoch": 0.853844766428185, - "flos": 17342810122440.0, - "grad_norm": 14.497322624583134, - "learning_rate": 2.1979405928720872e-07, - "loss": 0.9707, - "num_input_tokens_seen": 151734485, - "step": 7101 - }, - { - "epoch": 0.853965009318824, - "flos": 14878448287560.0, - "grad_norm": 5.357304387155061, - "learning_rate": 2.1943917061242257e-07, - "loss": 1.0171, - "num_input_tokens_seen": 151754060, - "step": 7102 - }, - { - "epoch": 0.8540852522094631, - "flos": 17211213628080.0, - "grad_norm": 3.2144672826922696, - "learning_rate": 2.1908455204729903e-07, - "loss": 0.8839, - "num_input_tokens_seen": 151772930, - "step": 7103 - }, - { - "epoch": 0.8542054951001022, - "flos": 17845182550320.0, - "grad_norm": 17.7687124557179, - "learning_rate": 2.1873020364563265e-07, - "loss": 0.9996, - "num_input_tokens_seen": 151791715, - "step": 7104 - }, - { - "epoch": 0.8543257379907413, - "flos": 17294384423040.0, - "grad_norm": 4.3889939071602555, - "learning_rate": 2.183761254611789e-07, - "loss": 0.981, - "num_input_tokens_seen": 151811760, - "step": 7105 - }, - { - "epoch": 0.8544459808813804, - "flos": 39551139343320.0, - "grad_norm": 3.6058256854993065, - "learning_rate": 2.1802231754764987e-07, - "loss": 0.9141, - "num_input_tokens_seen": 151836920, - "step": 7106 - }, - { - "epoch": 0.8545662237720195, - "flos": 18343476990720.0, - "grad_norm": 4.413122134346188, - "learning_rate": 2.17668779958718e-07, - "loss": 0.9915, - "num_input_tokens_seen": 151859220, - "step": 7107 - }, - { - "epoch": 0.8546864666626586, - "flos": 7801825997520.0, - "grad_norm": 4.606412705093146, - "learning_rate": 2.1731551274801553e-07, - "loss": 1.0129, - "num_input_tokens_seen": 151875380, - "step": 7108 - }, - { - "epoch": 0.8548067095532976, - "flos": 18159346147920.0, - "grad_norm": 6.013733722583068, - "learning_rate": 2.169625159691324e-07, - "loss": 0.8268, - "num_input_tokens_seen": 151894975, - "step": 7109 - }, - { - "epoch": 0.8549269524439368, - "flos": 17582786762160.0, - "grad_norm": 5.018846849621053, - "learning_rate": 2.1660978967561784e-07, - "loss": 0.9594, - "num_input_tokens_seen": 151914030, - "step": 7110 - }, - { - "epoch": 0.8550471953345758, - "flos": 14066388849840.0, - "grad_norm": 8.749265803460847, - "learning_rate": 2.1625733392098035e-07, - "loss": 1.0169, - "num_input_tokens_seen": 151929360, - "step": 7111 - }, - { - "epoch": 0.8551674382252149, - "flos": 16219131996600.0, - "grad_norm": 13.311744632692427, - "learning_rate": 2.159051487586867e-07, - "loss": 1.024, - "num_input_tokens_seen": 151949210, - "step": 7112 - }, - { - "epoch": 0.8552876811158541, - "flos": 14645799760680.0, - "grad_norm": 6.561662311676857, - "learning_rate": 2.155532342421642e-07, - "loss": 0.9524, - "num_input_tokens_seen": 151966930, - "step": 7113 - }, - { - "epoch": 0.8554079240064931, - "flos": 16428932113200.0, - "grad_norm": 3.9437845122796547, - "learning_rate": 2.1520159042479636e-07, - "loss": 1.0133, - "num_input_tokens_seen": 151984940, - "step": 7114 - }, - { - "epoch": 0.8555281668971322, - "flos": 15721634756760.0, - "grad_norm": 3.7804210163972822, - "learning_rate": 2.148502173599287e-07, - "loss": 0.9386, - "num_input_tokens_seen": 152002800, - "step": 7115 - }, - { - "epoch": 0.8556484097877713, - "flos": 22197530803680.0, - "grad_norm": 3.2045895089540624, - "learning_rate": 2.1449911510086372e-07, - "loss": 0.8805, - "num_input_tokens_seen": 152021990, - "step": 7116 - }, - { - "epoch": 0.8557686526784104, - "flos": 17294384423040.0, - "grad_norm": 3.4026691618385057, - "learning_rate": 2.141482837008628e-07, - "loss": 0.9979, - "num_input_tokens_seen": 152042250, - "step": 7117 - }, - { - "epoch": 0.8558888955690495, - "flos": 12548350502760.0, - "grad_norm": 4.752118090154477, - "learning_rate": 2.1379772321314826e-07, - "loss": 0.9436, - "num_input_tokens_seen": 152060015, - "step": 7118 - }, - { - "epoch": 0.8560091384596886, - "flos": 13596829839240.0, - "grad_norm": 3.3951950565535847, - "learning_rate": 2.1344743369089802e-07, - "loss": 1.0418, - "num_input_tokens_seen": 152075515, - "step": 7119 - }, - { - "epoch": 0.8561293813503277, - "flos": 17005644806760.0, - "grad_norm": 3.5523448560690767, - "learning_rate": 2.130974151872522e-07, - "loss": 1.0521, - "num_input_tokens_seen": 152095570, - "step": 7120 - }, - { - "epoch": 0.8562496242409667, - "flos": 16009914449640.0, - "grad_norm": 3.3916249009418724, - "learning_rate": 2.1274766775530773e-07, - "loss": 1.0068, - "num_input_tokens_seen": 152115155, - "step": 7121 - }, - { - "epoch": 0.8563698671316058, - "flos": 10391222752920.0, - "grad_norm": 5.305119312896956, - "learning_rate": 2.1239819144812077e-07, - "loss": 1.0257, - "num_input_tokens_seen": 152129335, - "step": 7122 - }, - { - "epoch": 0.856490110022245, - "flos": 27967877203080.0, - "grad_norm": 2.5485503882145064, - "learning_rate": 2.1204898631870716e-07, - "loss": 0.9257, - "num_input_tokens_seen": 152153945, - "step": 7123 - }, - { - "epoch": 0.856610352912884, - "flos": 20702892774960.0, - "grad_norm": 3.4179994418264865, - "learning_rate": 2.1170005242004006e-07, - "loss": 0.9805, - "num_input_tokens_seen": 152175015, - "step": 7124 - }, - { - "epoch": 0.8567305958035231, - "flos": 16979392963320.0, - "grad_norm": 4.432935274270765, - "learning_rate": 2.1135138980505384e-07, - "loss": 0.9959, - "num_input_tokens_seen": 152195405, - "step": 7125 - }, - { - "epoch": 0.8568508386941622, - "flos": 15773310581520.0, - "grad_norm": 5.316072227865679, - "learning_rate": 2.110029985266395e-07, - "loss": 0.9469, - "num_input_tokens_seen": 152214830, - "step": 7126 - }, - { - "epoch": 0.8569710815848013, - "flos": 12256606053600.0, - "grad_norm": 2.6452177838551285, - "learning_rate": 2.1065487863764787e-07, - "loss": 0.9686, - "num_input_tokens_seen": 152232895, - "step": 7127 - }, - { - "epoch": 0.8570913244754403, - "flos": 16874416251120.0, - "grad_norm": 2.742905221245236, - "learning_rate": 2.1030703019088846e-07, - "loss": 1.084, - "num_input_tokens_seen": 152253245, - "step": 7128 - }, - { - "epoch": 0.8572115673660795, - "flos": 14227334005200.0, - "grad_norm": 3.375571308524758, - "learning_rate": 2.099594532391291e-07, - "loss": 0.9275, - "num_input_tokens_seen": 152271650, - "step": 7129 - }, - { - "epoch": 0.8573318102567186, - "flos": 19337919562320.0, - "grad_norm": 4.051305047272986, - "learning_rate": 2.0961214783509806e-07, - "loss": 1.018, - "num_input_tokens_seen": 152294250, - "step": 7130 - }, - { - "epoch": 0.8574520531473576, - "flos": 17739960545640.0, - "grad_norm": 6.698973323674949, - "learning_rate": 2.0926511403148051e-07, - "loss": 0.9764, - "num_input_tokens_seen": 152312935, - "step": 7131 - }, - { - "epoch": 0.8575722960379968, - "flos": 13309071392880.0, - "grad_norm": 2.819879472199465, - "learning_rate": 2.0891835188092143e-07, - "loss": 0.9701, - "num_input_tokens_seen": 152329655, - "step": 7132 - }, - { - "epoch": 0.8576925389286358, - "flos": 15773463889320.0, - "grad_norm": 3.042260691149764, - "learning_rate": 2.0857186143602434e-07, - "loss": 1.0401, - "num_input_tokens_seen": 152348020, - "step": 7133 - }, - { - "epoch": 0.8578127818192749, - "flos": 16272371560920.0, - "grad_norm": 2.870400938147027, - "learning_rate": 2.0822564274935094e-07, - "loss": 0.903, - "num_input_tokens_seen": 152367165, - "step": 7134 - }, - { - "epoch": 0.8579330247099141, - "flos": 24846544727880.0, - "grad_norm": 8.805569568172286, - "learning_rate": 2.078796958734239e-07, - "loss": 0.894, - "num_input_tokens_seen": 152389605, - "step": 7135 - }, - { - "epoch": 0.8580532676005531, - "flos": 14016552718680.0, - "grad_norm": 5.5069411433967534, - "learning_rate": 2.0753402086072124e-07, - "loss": 0.9654, - "num_input_tokens_seen": 152407955, - "step": 7136 - }, - { - "epoch": 0.8581735104911922, - "flos": 16159729458720.0, - "grad_norm": 6.2561154773132595, - "learning_rate": 2.071886177636828e-07, - "loss": 0.9846, - "num_input_tokens_seen": 152424460, - "step": 7137 - }, - { - "epoch": 0.8582937533818313, - "flos": 16455398587560.0, - "grad_norm": 3.8696455108575134, - "learning_rate": 2.0684348663470575e-07, - "loss": 1.0509, - "num_input_tokens_seen": 152444360, - "step": 7138 - }, - { - "epoch": 0.8584139962724704, - "flos": 13831624675320.0, - "grad_norm": 6.3784134645846935, - "learning_rate": 2.0649862752614555e-07, - "loss": 0.8466, - "num_input_tokens_seen": 152462790, - "step": 7139 - }, - { - "epoch": 0.8585342391631094, - "flos": 51048981298560.0, - "grad_norm": 0.7845994863672299, - "learning_rate": 2.0615404049031838e-07, - "loss": 0.8232, - "num_input_tokens_seen": 152519480, - "step": 7140 - }, - { - "epoch": 0.8586544820537486, - "flos": 7591198018800.0, - "grad_norm": 6.053415630022022, - "learning_rate": 2.0580972557949616e-07, - "loss": 0.9995, - "num_input_tokens_seen": 152534290, - "step": 7141 - }, - { - "epoch": 0.8587747249443877, - "flos": 46401423839760.0, - "grad_norm": 0.7995434009373428, - "learning_rate": 2.054656828459125e-07, - "loss": 0.7783, - "num_input_tokens_seen": 152598120, - "step": 7142 - }, - { - "epoch": 0.8588949678350267, - "flos": 19104136557720.0, - "grad_norm": 4.840084140771324, - "learning_rate": 2.051219123417578e-07, - "loss": 0.9919, - "num_input_tokens_seen": 152617900, - "step": 7143 - }, - { - "epoch": 0.8590152107256659, - "flos": 18579161012040.0, - "grad_norm": 4.529900290631837, - "learning_rate": 2.0477841411918196e-07, - "loss": 0.8156, - "num_input_tokens_seen": 152637145, - "step": 7144 - }, - { - "epoch": 0.859135453616305, - "flos": 18605504840160.0, - "grad_norm": 3.5332058241766915, - "learning_rate": 2.0443518823029326e-07, - "loss": 0.9558, - "num_input_tokens_seen": 152657405, - "step": 7145 - }, - { - "epoch": 0.859255696506944, - "flos": 9139320904320.0, - "grad_norm": 4.393163849760198, - "learning_rate": 2.0409223472715854e-07, - "loss": 0.9811, - "num_input_tokens_seen": 152674270, - "step": 7146 - }, - { - "epoch": 0.8593759393975832, - "flos": 13096297104960.0, - "grad_norm": 4.995516101648175, - "learning_rate": 2.0374955366180434e-07, - "loss": 0.965, - "num_input_tokens_seen": 152691630, - "step": 7147 - }, - { - "epoch": 0.8594961822882222, - "flos": 15773341243080.0, - "grad_norm": 5.52871732325668, - "learning_rate": 2.034071450862147e-07, - "loss": 0.9463, - "num_input_tokens_seen": 152708820, - "step": 7148 - }, - { - "epoch": 0.8596164251788613, - "flos": 16560007361040.0, - "grad_norm": 3.41829867269302, - "learning_rate": 2.030650090523327e-07, - "loss": 0.9913, - "num_input_tokens_seen": 152727730, - "step": 7149 - }, - { - "epoch": 0.8597366680695004, - "flos": 22564688673120.0, - "grad_norm": 10.831079649644957, - "learning_rate": 2.0272314561205995e-07, - "loss": 0.8317, - "num_input_tokens_seen": 152747845, - "step": 7150 - }, - { - "epoch": 0.8598569109601395, - "flos": 15117566403600.0, - "grad_norm": 2.914891674718256, - "learning_rate": 2.023815548172567e-07, - "loss": 0.9574, - "num_input_tokens_seen": 152767635, - "step": 7151 - }, - { - "epoch": 0.8599771538507786, - "flos": 18107425030680.0, - "grad_norm": 6.502439481284211, - "learning_rate": 2.0204023671974267e-07, - "loss": 0.8956, - "num_input_tokens_seen": 152786740, - "step": 7152 - }, - { - "epoch": 0.8600973967414177, - "flos": 11837251112880.0, - "grad_norm": 4.070079364799488, - "learning_rate": 2.0169919137129532e-07, - "loss": 1.0361, - "num_input_tokens_seen": 152804900, - "step": 7153 - }, - { - "epoch": 0.8602176396320568, - "flos": 17949270077280.0, - "grad_norm": 11.247396210180783, - "learning_rate": 2.013584188236508e-07, - "loss": 0.9212, - "num_input_tokens_seen": 152822525, - "step": 7154 - }, - { - "epoch": 0.8603378825226958, - "flos": 14488380684720.0, - "grad_norm": 3.482428721337213, - "learning_rate": 2.0101791912850396e-07, - "loss": 1.0166, - "num_input_tokens_seen": 152841785, - "step": 7155 - }, - { - "epoch": 0.8604581254133349, - "flos": 24921712855680.0, - "grad_norm": 8.130667250537611, - "learning_rate": 2.006776923375082e-07, - "loss": 0.862, - "num_input_tokens_seen": 152863160, - "step": 7156 - }, - { - "epoch": 0.860578368303974, - "flos": 16057542948480.0, - "grad_norm": 4.210026063815735, - "learning_rate": 2.003377385022764e-07, - "loss": 0.9419, - "num_input_tokens_seen": 152881705, - "step": 7157 - }, - { - "epoch": 0.8606986111946131, - "flos": 15143879570160.0, - "grad_norm": 4.58702830870205, - "learning_rate": 1.9999805767437826e-07, - "loss": 0.9955, - "num_input_tokens_seen": 152900315, - "step": 7158 - }, - { - "epoch": 0.8608188540852522, - "flos": 20572032158040.0, - "grad_norm": 3.642882339005728, - "learning_rate": 1.9965864990534386e-07, - "loss": 0.9435, - "num_input_tokens_seen": 152920560, - "step": 7159 - }, - { - "epoch": 0.8609390969758913, - "flos": 21174966033480.0, - "grad_norm": 2.997514171678196, - "learning_rate": 1.9931951524666092e-07, - "loss": 1.0029, - "num_input_tokens_seen": 152941370, - "step": 7160 - }, - { - "epoch": 0.8610593398665304, - "flos": 15089720159040.0, - "grad_norm": 3.0505350675419587, - "learning_rate": 1.9898065374977534e-07, - "loss": 1.0344, - "num_input_tokens_seen": 152961295, - "step": 7161 - }, - { - "epoch": 0.8611795827571694, - "flos": 10476631841760.0, - "grad_norm": 5.859758125495167, - "learning_rate": 1.9864206546609342e-07, - "loss": 0.9566, - "num_input_tokens_seen": 152979855, - "step": 7162 - }, - { - "epoch": 0.8612998256478086, - "flos": 17241666105240.0, - "grad_norm": 3.738180862721138, - "learning_rate": 1.983037504469771e-07, - "loss": 1.0688, - "num_input_tokens_seen": 152998285, - "step": 7163 - }, - { - "epoch": 0.8614200685384477, - "flos": 15092541022560.0, - "grad_norm": 4.14570737612262, - "learning_rate": 1.9796570874374984e-07, - "loss": 0.8955, - "num_input_tokens_seen": 153018110, - "step": 7164 - }, - { - "epoch": 0.8615403114290867, - "flos": 14197249466760.0, - "grad_norm": 13.173109256933884, - "learning_rate": 1.976279404076917e-07, - "loss": 1.0017, - "num_input_tokens_seen": 153037230, - "step": 7165 - }, - { - "epoch": 0.8616605543197259, - "flos": 21227592366600.0, - "grad_norm": 2.6648300720776392, - "learning_rate": 1.9729044549004193e-07, - "loss": 0.9809, - "num_input_tokens_seen": 153058335, - "step": 7166 - }, - { - "epoch": 0.8617807972103649, - "flos": 20596474969440.0, - "grad_norm": 3.8486662647620005, - "learning_rate": 1.9695322404199822e-07, - "loss": 0.9329, - "num_input_tokens_seen": 153080100, - "step": 7167 - }, - { - "epoch": 0.861901040101004, - "flos": 19707254402520.0, - "grad_norm": 5.444577051135108, - "learning_rate": 1.9661627611471654e-07, - "loss": 1.0519, - "num_input_tokens_seen": 153099615, - "step": 7168 - }, - { - "epoch": 0.8620212829916432, - "flos": 35571652009560.0, - "grad_norm": 21.661160439413933, - "learning_rate": 1.9627960175931246e-07, - "loss": 0.9254, - "num_input_tokens_seen": 153124035, - "step": 7169 - }, - { - "epoch": 0.8621415258822822, - "flos": 15010014120360.0, - "grad_norm": 7.390941395348271, - "learning_rate": 1.9594320102685847e-07, - "loss": 0.9667, - "num_input_tokens_seen": 153143025, - "step": 7170 - }, - { - "epoch": 0.8622617687729213, - "flos": 15405968742720.0, - "grad_norm": 8.435932833335901, - "learning_rate": 1.956070739683864e-07, - "loss": 0.8532, - "num_input_tokens_seen": 153162080, - "step": 7171 - }, - { - "epoch": 0.8623820116635604, - "flos": 18684383016720.0, - "grad_norm": 2.7012960608146246, - "learning_rate": 1.9527122063488678e-07, - "loss": 0.9604, - "num_input_tokens_seen": 153182915, - "step": 7172 - }, - { - "epoch": 0.8625022545541995, - "flos": 13939023650760.0, - "grad_norm": 2.5503055426857126, - "learning_rate": 1.9493564107730755e-07, - "loss": 1.0312, - "num_input_tokens_seen": 153202635, - "step": 7173 - }, - { - "epoch": 0.8626224974448385, - "flos": 15563663772720.0, - "grad_norm": 3.713993011196876, - "learning_rate": 1.9460033534655684e-07, - "loss": 0.8442, - "num_input_tokens_seen": 153221715, - "step": 7174 - }, - { - "epoch": 0.8627427403354777, - "flos": 16586197881360.0, - "grad_norm": 2.8139606497642484, - "learning_rate": 1.9426530349349978e-07, - "loss": 1.0593, - "num_input_tokens_seen": 153241885, - "step": 7175 - }, - { - "epoch": 0.8628629832261168, - "flos": 11578258757880.0, - "grad_norm": 3.586515758156975, - "learning_rate": 1.9393054556896038e-07, - "loss": 0.8765, - "num_input_tokens_seen": 153259305, - "step": 7176 - }, - { - "epoch": 0.8629832261167558, - "flos": 20015347011240.0, - "grad_norm": 5.297825910796194, - "learning_rate": 1.9359606162372133e-07, - "loss": 0.9123, - "num_input_tokens_seen": 153280630, - "step": 7177 - }, - { - "epoch": 0.863103469007395, - "flos": 14357489406240.0, - "grad_norm": 4.226732627843331, - "learning_rate": 1.9326185170852293e-07, - "loss": 0.9354, - "num_input_tokens_seen": 153299315, - "step": 7178 - }, - { - "epoch": 0.863223711898034, - "flos": 17425122393720.0, - "grad_norm": 4.9912274650907635, - "learning_rate": 1.9292791587406598e-07, - "loss": 0.9499, - "num_input_tokens_seen": 153317895, - "step": 7179 - }, - { - "epoch": 0.8633439547886731, - "flos": 12521393443440.0, - "grad_norm": 6.40809448484944, - "learning_rate": 1.9259425417100661e-07, - "loss": 1.09, - "num_input_tokens_seen": 153333730, - "step": 7180 - }, - { - "epoch": 0.8634641976793123, - "flos": 9085774724400.0, - "grad_norm": 6.79157339706814, - "learning_rate": 1.9226086664996234e-07, - "loss": 0.9683, - "num_input_tokens_seen": 153351695, - "step": 7181 - }, - { - "epoch": 0.8635844405699513, - "flos": 16979944871400.0, - "grad_norm": 6.829542767927097, - "learning_rate": 1.9192775336150712e-07, - "loss": 0.9674, - "num_input_tokens_seen": 153371715, - "step": 7182 - }, - { - "epoch": 0.8637046834605904, - "flos": 43267397478720.0, - "grad_norm": 0.7957862447285667, - "learning_rate": 1.915949143561739e-07, - "loss": 0.8033, - "num_input_tokens_seen": 153426110, - "step": 7183 - }, - { - "epoch": 0.8638249263512295, - "flos": 14593755997200.0, - "grad_norm": 2.9263074577390595, - "learning_rate": 1.9126234968445498e-07, - "loss": 0.9894, - "num_input_tokens_seen": 153445520, - "step": 7184 - }, - { - "epoch": 0.8639451692418686, - "flos": 18946870789560.0, - "grad_norm": 3.1243293042474565, - "learning_rate": 1.9093005939679884e-07, - "loss": 0.8984, - "num_input_tokens_seen": 153467195, - "step": 7185 - }, - { - "epoch": 0.8640654121325076, - "flos": 10686799897080.0, - "grad_norm": 4.293256199856634, - "learning_rate": 1.9059804354361452e-07, - "loss": 0.9894, - "num_input_tokens_seen": 153484690, - "step": 7186 - }, - { - "epoch": 0.8641856550231467, - "flos": 22722230395320.0, - "grad_norm": 5.600006525853305, - "learning_rate": 1.902663021752684e-07, - "loss": 0.9353, - "num_input_tokens_seen": 153505840, - "step": 7187 - }, - { - "epoch": 0.8643058979137859, - "flos": 10581945831120.0, - "grad_norm": 4.114638058584183, - "learning_rate": 1.8993483534208556e-07, - "loss": 1.045, - "num_input_tokens_seen": 153524470, - "step": 7188 - }, - { - "epoch": 0.8644261408044249, - "flos": 9244604232120.0, - "grad_norm": 5.142101319343441, - "learning_rate": 1.8960364309434884e-07, - "loss": 0.9668, - "num_input_tokens_seen": 153541685, - "step": 7189 - }, - { - "epoch": 0.864546383695064, - "flos": 14803770744720.0, - "grad_norm": 3.862387404297563, - "learning_rate": 1.8927272548229967e-07, - "loss": 1.0092, - "num_input_tokens_seen": 153561095, - "step": 7190 - }, - { - "epoch": 0.8646666265857031, - "flos": 15144094201080.0, - "grad_norm": 15.133647873980443, - "learning_rate": 1.8894208255613876e-07, - "loss": 1.0556, - "num_input_tokens_seen": 153580130, - "step": 7191 - }, - { - "epoch": 0.8647868694763422, - "flos": 14012413408080.0, - "grad_norm": 4.352973640696847, - "learning_rate": 1.8861171436602397e-07, - "loss": 0.9899, - "num_input_tokens_seen": 153596965, - "step": 7192 - }, - { - "epoch": 0.8649071123669813, - "flos": 18631204775520.0, - "grad_norm": 4.75509390496591, - "learning_rate": 1.882816209620719e-07, - "loss": 1.0288, - "num_input_tokens_seen": 153613395, - "step": 7193 - }, - { - "epoch": 0.8650273552576204, - "flos": 14409809123760.0, - "grad_norm": 7.203765509669964, - "learning_rate": 1.8795180239435738e-07, - "loss": 0.9826, - "num_input_tokens_seen": 153631970, - "step": 7194 - }, - { - "epoch": 0.8651475981482595, - "flos": 17031651357720.0, - "grad_norm": 4.1742286472106835, - "learning_rate": 1.8762225871291348e-07, - "loss": 0.9712, - "num_input_tokens_seen": 153647565, - "step": 7195 - }, - { - "epoch": 0.8652678410388985, - "flos": 15402197370840.0, - "grad_norm": 4.408636650702791, - "learning_rate": 1.8729298996773201e-07, - "loss": 1.0373, - "num_input_tokens_seen": 153666035, - "step": 7196 - }, - { - "epoch": 0.8653880839295377, - "flos": 46700220447720.0, - "grad_norm": 0.8618998803191429, - "learning_rate": 1.8696399620876301e-07, - "loss": 0.8618, - "num_input_tokens_seen": 153722785, - "step": 7197 - }, - { - "epoch": 0.8655083268201768, - "flos": 12574541023080.0, - "grad_norm": 4.0730439903605395, - "learning_rate": 1.866352774859141e-07, - "loss": 1.0148, - "num_input_tokens_seen": 153737730, - "step": 7198 - }, - { - "epoch": 0.8656285697108158, - "flos": 14698027493520.0, - "grad_norm": 5.501115165710154, - "learning_rate": 1.8630683384905188e-07, - "loss": 0.9074, - "num_input_tokens_seen": 153756780, - "step": 7199 - }, - { - "epoch": 0.865748812601455, - "flos": 13223692965600.0, - "grad_norm": 5.319677553085227, - "learning_rate": 1.8597866534800045e-07, - "loss": 1.1186, - "num_input_tokens_seen": 153771615, - "step": 7200 - }, - { - "epoch": 0.865869055492094, - "flos": 50595070779720.0, - "grad_norm": 7.829842080269124, - "learning_rate": 1.8565077203254398e-07, - "loss": 0.9706, - "num_input_tokens_seen": 153796795, - "step": 7201 - }, - { - "epoch": 0.8659892983827331, - "flos": 12312022588680.0, - "grad_norm": 7.023527388798393, - "learning_rate": 1.8532315395242203e-07, - "loss": 0.9397, - "num_input_tokens_seen": 153812965, - "step": 7202 - }, - { - "epoch": 0.8661095412733723, - "flos": 12679517735280.0, - "grad_norm": 7.903015922737903, - "learning_rate": 1.849958111573353e-07, - "loss": 0.94, - "num_input_tokens_seen": 153831290, - "step": 7203 - }, - { - "epoch": 0.8662297841640113, - "flos": 12915998957160.0, - "grad_norm": 7.010922493524307, - "learning_rate": 1.8466874369694074e-07, - "loss": 0.8672, - "num_input_tokens_seen": 153848705, - "step": 7204 - }, - { - "epoch": 0.8663500270546504, - "flos": 11577645526680.0, - "grad_norm": 8.62129419298586, - "learning_rate": 1.843419516208542e-07, - "loss": 0.9284, - "num_input_tokens_seen": 153865350, - "step": 7205 - }, - { - "epoch": 0.8664702699452895, - "flos": 12678843180960.0, - "grad_norm": 3.556048256459761, - "learning_rate": 1.8401543497865047e-07, - "loss": 1.0046, - "num_input_tokens_seen": 153883070, - "step": 7206 - }, - { - "epoch": 0.8665905128359286, - "flos": 21907963325280.0, - "grad_norm": 5.29272063965303, - "learning_rate": 1.836891938198608e-07, - "loss": 0.8622, - "num_input_tokens_seen": 153903215, - "step": 7207 - }, - { - "epoch": 0.8667107557265676, - "flos": 13225900597920.0, - "grad_norm": 3.8940346113252793, - "learning_rate": 1.8336322819397677e-07, - "loss": 0.9283, - "num_input_tokens_seen": 153920470, - "step": 7208 - }, - { - "epoch": 0.8668309986172068, - "flos": 14252267401560.0, - "grad_norm": 6.380625617466023, - "learning_rate": 1.8303753815044654e-07, - "loss": 0.8375, - "num_input_tokens_seen": 153939495, - "step": 7209 - }, - { - "epoch": 0.8669512415078459, - "flos": 15352698516840.0, - "grad_norm": 6.247326698784661, - "learning_rate": 1.827121237386773e-07, - "loss": 0.9231, - "num_input_tokens_seen": 153956660, - "step": 7210 - }, - { - "epoch": 0.8670714843984849, - "flos": 12540562466520.0, - "grad_norm": 12.719096710953107, - "learning_rate": 1.8238698500803374e-07, - "loss": 0.9799, - "num_input_tokens_seen": 153969145, - "step": 7211 - }, - { - "epoch": 0.8671917272891241, - "flos": 43453055851440.0, - "grad_norm": 0.8238636716406166, - "learning_rate": 1.820621220078391e-07, - "loss": 0.8487, - "num_input_tokens_seen": 154032775, - "step": 7212 - }, - { - "epoch": 0.8673119701797631, - "flos": 14517300083880.0, - "grad_norm": 5.964380392427714, - "learning_rate": 1.8173753478737553e-07, - "loss": 0.8957, - "num_input_tokens_seen": 154052930, - "step": 7213 - }, - { - "epoch": 0.8674322130704022, - "flos": 13938901004520.0, - "grad_norm": 11.036631538552578, - "learning_rate": 1.8141322339588205e-07, - "loss": 1.0108, - "num_input_tokens_seen": 154069990, - "step": 7214 - }, - { - "epoch": 0.8675524559610414, - "flos": 18521352875280.0, - "grad_norm": 15.089388704956136, - "learning_rate": 1.810891878825569e-07, - "loss": 0.9267, - "num_input_tokens_seen": 154089685, - "step": 7215 - }, - { - "epoch": 0.8676726988516804, - "flos": 10634326871760.0, - "grad_norm": 3.9263635004319726, - "learning_rate": 1.8076542829655561e-07, - "loss": 0.9317, - "num_input_tokens_seen": 154108210, - "step": 7216 - }, - { - "epoch": 0.8677929417423195, - "flos": 11524283316120.0, - "grad_norm": 9.714069380074369, - "learning_rate": 1.8044194468699203e-07, - "loss": 1.0314, - "num_input_tokens_seen": 154125240, - "step": 7217 - }, - { - "epoch": 0.8679131846329585, - "flos": 13361544418200.0, - "grad_norm": 6.7165182975997455, - "learning_rate": 1.8011873710293912e-07, - "loss": 0.9544, - "num_input_tokens_seen": 154143465, - "step": 7218 - }, - { - "epoch": 0.8680334275235977, - "flos": 23981153741160.0, - "grad_norm": 6.601593125853516, - "learning_rate": 1.7979580559342677e-07, - "loss": 0.913, - "num_input_tokens_seen": 154163915, - "step": 7219 - }, - { - "epoch": 0.8681536704142367, - "flos": 17556381610920.0, - "grad_norm": 6.741768798557477, - "learning_rate": 1.7947315020744358e-07, - "loss": 0.8929, - "num_input_tokens_seen": 154184730, - "step": 7220 - }, - { - "epoch": 0.8682739133048758, - "flos": 14200070330280.0, - "grad_norm": 11.63154991784031, - "learning_rate": 1.7915077099393594e-07, - "loss": 1.0204, - "num_input_tokens_seen": 154201050, - "step": 7221 - }, - { - "epoch": 0.868394156195515, - "flos": 11788028212920.0, - "grad_norm": 9.0512431771419, - "learning_rate": 1.788286680018083e-07, - "loss": 0.962, - "num_input_tokens_seen": 154219480, - "step": 7222 - }, - { - "epoch": 0.868514399086154, - "flos": 19942570485120.0, - "grad_norm": 3.680772884518248, - "learning_rate": 1.7850684127992443e-07, - "loss": 0.9504, - "num_input_tokens_seen": 154238945, - "step": 7223 - }, - { - "epoch": 0.8686346419767931, - "flos": 14253003279000.0, - "grad_norm": 4.048043689838803, - "learning_rate": 1.7818529087710378e-07, - "loss": 0.9268, - "num_input_tokens_seen": 154259020, - "step": 7224 - }, - { - "epoch": 0.8687548848674322, - "flos": 12915447049080.0, - "grad_norm": 3.242922338835041, - "learning_rate": 1.7786401684212637e-07, - "loss": 1.0703, - "num_input_tokens_seen": 154277570, - "step": 7225 - }, - { - "epoch": 0.8688751277580713, - "flos": 50458440241440.0, - "grad_norm": 0.7318866045363306, - "learning_rate": 1.7754301922372883e-07, - "loss": 0.8014, - "num_input_tokens_seen": 154326935, - "step": 7226 - }, - { - "epoch": 0.8689953706487104, - "flos": 19155904367160.0, - "grad_norm": 2.846696773104816, - "learning_rate": 1.7722229807060617e-07, - "loss": 1.0351, - "num_input_tokens_seen": 154345235, - "step": 7227 - }, - { - "epoch": 0.8691156135393495, - "flos": 24711146200080.0, - "grad_norm": 6.731990666492359, - "learning_rate": 1.7690185343141172e-07, - "loss": 1.0463, - "num_input_tokens_seen": 154364870, - "step": 7228 - }, - { - "epoch": 0.8692358564299886, - "flos": 13466183853240.0, - "grad_norm": 6.476709453908041, - "learning_rate": 1.7658168535475615e-07, - "loss": 0.9292, - "num_input_tokens_seen": 154382375, - "step": 7229 - }, - { - "epoch": 0.8693560993206276, - "flos": 21645168936840.0, - "grad_norm": 2.5603092412137505, - "learning_rate": 1.7626179388920948e-07, - "loss": 0.8723, - "num_input_tokens_seen": 154403375, - "step": 7230 - }, - { - "epoch": 0.8694763422112668, - "flos": 19367176238640.0, - "grad_norm": 6.605458932392172, - "learning_rate": 1.7594217908329866e-07, - "loss": 1.0307, - "num_input_tokens_seen": 154425280, - "step": 7231 - }, - { - "epoch": 0.8695965851019059, - "flos": 18604033085280.0, - "grad_norm": 3.9230895574432423, - "learning_rate": 1.7562284098550895e-07, - "loss": 0.958, - "num_input_tokens_seen": 154444710, - "step": 7232 - }, - { - "epoch": 0.8697168279925449, - "flos": 48214058161080.0, - "grad_norm": 0.8719193177256884, - "learning_rate": 1.753037796442838e-07, - "loss": 0.8883, - "num_input_tokens_seen": 154503870, - "step": 7233 - }, - { - "epoch": 0.8698370708831841, - "flos": 13989902274960.0, - "grad_norm": 6.824013037111232, - "learning_rate": 1.74984995108024e-07, - "loss": 0.9729, - "num_input_tokens_seen": 154521520, - "step": 7234 - }, - { - "epoch": 0.8699573137738231, - "flos": 9060749343360.0, - "grad_norm": 16.85402251544633, - "learning_rate": 1.7466648742508981e-07, - "loss": 1.0511, - "num_input_tokens_seen": 154537425, - "step": 7235 - }, - { - "epoch": 0.8700775566644622, - "flos": 12338489063040.0, - "grad_norm": 4.279794487341867, - "learning_rate": 1.7434825664379837e-07, - "loss": 1.0643, - "num_input_tokens_seen": 154555650, - "step": 7236 - }, - { - "epoch": 0.8701977995551013, - "flos": 9689720431320.0, - "grad_norm": 10.831157837578541, - "learning_rate": 1.740303028124246e-07, - "loss": 1.0706, - "num_input_tokens_seen": 154571430, - "step": 7237 - }, - { - "epoch": 0.8703180424457404, - "flos": 21778359832320.0, - "grad_norm": 3.561709801557428, - "learning_rate": 1.7371262597920212e-07, - "loss": 0.9808, - "num_input_tokens_seen": 154593210, - "step": 7238 - }, - { - "epoch": 0.8704382853363795, - "flos": 13911024098400.0, - "grad_norm": 2.8190079733799074, - "learning_rate": 1.7339522619232195e-07, - "loss": 0.9829, - "num_input_tokens_seen": 154611310, - "step": 7239 - }, - { - "epoch": 0.8705585282270186, - "flos": 18945276388440.0, - "grad_norm": 6.433717079115429, - "learning_rate": 1.730781034999338e-07, - "loss": 0.9633, - "num_input_tokens_seen": 154632610, - "step": 7240 - }, - { - "epoch": 0.8706787711176577, - "flos": 24318135087480.0, - "grad_norm": 4.913697439925247, - "learning_rate": 1.7276125795014497e-07, - "loss": 0.9656, - "num_input_tokens_seen": 154650780, - "step": 7241 - }, - { - "epoch": 0.8707990140082967, - "flos": 10319580704520.0, - "grad_norm": 4.430056494359061, - "learning_rate": 1.7244468959102054e-07, - "loss": 0.8929, - "num_input_tokens_seen": 154667555, - "step": 7242 - }, - { - "epoch": 0.8709192568989359, - "flos": 14252635340280.0, - "grad_norm": 4.096334558800947, - "learning_rate": 1.7212839847058348e-07, - "loss": 1.0817, - "num_input_tokens_seen": 154682405, - "step": 7243 - }, - { - "epoch": 0.871039499789575, - "flos": 11839918668600.0, - "grad_norm": 4.1475875184871915, - "learning_rate": 1.718123846368147e-07, - "loss": 0.9649, - "num_input_tokens_seen": 154701170, - "step": 7244 - }, - { - "epoch": 0.871159742680214, - "flos": 14960208650760.0, - "grad_norm": 3.498797891445098, - "learning_rate": 1.714966481376543e-07, - "loss": 0.9487, - "num_input_tokens_seen": 154717570, - "step": 7245 - }, - { - "epoch": 0.8712799855708532, - "flos": 20125904127360.0, - "grad_norm": 3.3502139065900955, - "learning_rate": 1.7118118902099797e-07, - "loss": 1.0291, - "num_input_tokens_seen": 154735375, - "step": 7246 - }, - { - "epoch": 0.8714002284614922, - "flos": 15799593086520.0, - "grad_norm": 3.366057239428844, - "learning_rate": 1.7086600733470146e-07, - "loss": 1.0237, - "num_input_tokens_seen": 154755765, - "step": 7247 - }, - { - "epoch": 0.8715204713521313, - "flos": 15220948714680.0, - "grad_norm": 7.095784034341217, - "learning_rate": 1.7055110312657738e-07, - "loss": 0.9887, - "num_input_tokens_seen": 154774980, - "step": 7248 - }, - { - "epoch": 0.8716407142427703, - "flos": 16664677457640.0, - "grad_norm": 9.185153376046799, - "learning_rate": 1.702364764443962e-07, - "loss": 0.9726, - "num_input_tokens_seen": 154793775, - "step": 7249 - }, - { - "epoch": 0.8717609571334095, - "flos": 19911044853360.0, - "grad_norm": 3.7484063894961324, - "learning_rate": 1.6992212733588685e-07, - "loss": 0.9536, - "num_input_tokens_seen": 154813160, - "step": 7250 - }, - { - "epoch": 0.8718812000240486, - "flos": 18129537563520.0, - "grad_norm": 8.07291703419018, - "learning_rate": 1.6960805584873538e-07, - "loss": 0.9737, - "num_input_tokens_seen": 154830880, - "step": 7251 - }, - { - "epoch": 0.8720014429146876, - "flos": 16637689736760.0, - "grad_norm": 3.1389857326935577, - "learning_rate": 1.6929426203058684e-07, - "loss": 1.0096, - "num_input_tokens_seen": 154851025, - "step": 7252 - }, - { - "epoch": 0.8721216858053268, - "flos": 17320421635560.0, - "grad_norm": 6.240968816815298, - "learning_rate": 1.689807459290431e-07, - "loss": 1.0377, - "num_input_tokens_seen": 154869400, - "step": 7253 - }, - { - "epoch": 0.8722419286959658, - "flos": 24159213595080.0, - "grad_norm": 9.789676631773263, - "learning_rate": 1.6866750759166437e-07, - "loss": 0.936, - "num_input_tokens_seen": 154889100, - "step": 7254 - }, - { - "epoch": 0.8723621715866049, - "flos": 13020270453480.0, - "grad_norm": 4.8646376591112475, - "learning_rate": 1.6835454706596865e-07, - "loss": 0.9929, - "num_input_tokens_seen": 154906650, - "step": 7255 - }, - { - "epoch": 0.8724824144772441, - "flos": 15638494623360.0, - "grad_norm": 4.806849940523212, - "learning_rate": 1.680418643994317e-07, - "loss": 0.9701, - "num_input_tokens_seen": 154924940, - "step": 7256 - }, - { - "epoch": 0.8726026573678831, - "flos": 47756646676320.0, - "grad_norm": 1.0179823294119574, - "learning_rate": 1.6772945963948738e-07, - "loss": 0.9143, - "num_input_tokens_seen": 154982825, - "step": 7257 - }, - { - "epoch": 0.8727229002585222, - "flos": 9427968535920.0, - "grad_norm": 5.194654563602497, - "learning_rate": 1.6741733283352733e-07, - "loss": 0.9825, - "num_input_tokens_seen": 155000150, - "step": 7258 - }, - { - "epoch": 0.8728431431491613, - "flos": 15483344502840.0, - "grad_norm": 5.7185415427982145, - "learning_rate": 1.6710548402890102e-07, - "loss": 1.0629, - "num_input_tokens_seen": 155020395, - "step": 7259 - }, - { - "epoch": 0.8729633860398004, - "flos": 25815900595320.0, - "grad_norm": 6.969089653735748, - "learning_rate": 1.6679391327291527e-07, - "loss": 0.888, - "num_input_tokens_seen": 155041320, - "step": 7260 - }, - { - "epoch": 0.8730836289304394, - "flos": 11575529879040.0, - "grad_norm": 4.518689656679125, - "learning_rate": 1.6648262061283492e-07, - "loss": 0.8894, - "num_input_tokens_seen": 155056340, - "step": 7261 - }, - { - "epoch": 0.8732038718210786, - "flos": 15065430655440.0, - "grad_norm": 9.623807362569945, - "learning_rate": 1.6617160609588353e-07, - "loss": 0.9609, - "num_input_tokens_seen": 155075235, - "step": 7262 - }, - { - "epoch": 0.8733241147117177, - "flos": 11755337441880.0, - "grad_norm": 4.807222519443054, - "learning_rate": 1.6586086976924163e-07, - "loss": 0.9369, - "num_input_tokens_seen": 155090455, - "step": 7263 - }, - { - "epoch": 0.8734443576023567, - "flos": 14331574839960.0, - "grad_norm": 8.066002373363734, - "learning_rate": 1.6555041168004747e-07, - "loss": 1.0099, - "num_input_tokens_seen": 155109495, - "step": 7264 - }, - { - "epoch": 0.8735646004929959, - "flos": 12784402462800.0, - "grad_norm": 3.4068397221464672, - "learning_rate": 1.6524023187539715e-07, - "loss": 0.9093, - "num_input_tokens_seen": 155127500, - "step": 7265 - }, - { - "epoch": 0.873684843383635, - "flos": 14380123185600.0, - "grad_norm": 3.476992325518402, - "learning_rate": 1.649303304023446e-07, - "loss": 0.9646, - "num_input_tokens_seen": 155146975, - "step": 7266 - }, - { - "epoch": 0.873805086274274, - "flos": 11998349576040.0, - "grad_norm": 5.579212072110277, - "learning_rate": 1.6462070730790246e-07, - "loss": 1.015, - "num_input_tokens_seen": 155165855, - "step": 7267 - }, - { - "epoch": 0.8739253291649132, - "flos": 12784494447480.0, - "grad_norm": 7.804314803961087, - "learning_rate": 1.6431136263903912e-07, - "loss": 0.9984, - "num_input_tokens_seen": 155184575, - "step": 7268 - }, - { - "epoch": 0.8740455720555522, - "flos": 15144523462920.0, - "grad_norm": 3.2526033685019953, - "learning_rate": 1.6400229644268282e-07, - "loss": 0.9589, - "num_input_tokens_seen": 155202650, - "step": 7269 - }, - { - "epoch": 0.8741658149461913, - "flos": 10712806448040.0, - "grad_norm": 6.934274009323323, - "learning_rate": 1.6369350876571852e-07, - "loss": 1.0339, - "num_input_tokens_seen": 155220525, - "step": 7270 - }, - { - "epoch": 0.8742860578368304, - "flos": 16376888349720.0, - "grad_norm": 4.551256513541154, - "learning_rate": 1.6338499965498874e-07, - "loss": 1.0205, - "num_input_tokens_seen": 155238975, - "step": 7271 - }, - { - "epoch": 0.8744063007274695, - "flos": 20045768826840.0, - "grad_norm": 2.9828453737568554, - "learning_rate": 1.630767691572943e-07, - "loss": 1.0005, - "num_input_tokens_seen": 155258715, - "step": 7272 - }, - { - "epoch": 0.8745265436181086, - "flos": 45841457906040.0, - "grad_norm": 0.7512696169356428, - "learning_rate": 1.6276881731939306e-07, - "loss": 0.7892, - "num_input_tokens_seen": 155320325, - "step": 7273 - }, - { - "epoch": 0.8746467865087477, - "flos": 20414981020800.0, - "grad_norm": 8.290004033178583, - "learning_rate": 1.6246114418800193e-07, - "loss": 0.9755, - "num_input_tokens_seen": 155340325, - "step": 7274 - }, - { - "epoch": 0.8747670293993868, - "flos": 17056400784720.0, - "grad_norm": 3.8110824200804796, - "learning_rate": 1.6215374980979423e-07, - "loss": 0.9895, - "num_input_tokens_seen": 155360455, - "step": 7275 - }, - { - "epoch": 0.8748872722900258, - "flos": 32318293778160.0, - "grad_norm": 4.194167110433744, - "learning_rate": 1.6184663423140133e-07, - "loss": 0.9027, - "num_input_tokens_seen": 155380475, - "step": 7276 - }, - { - "epoch": 0.875007515180665, - "flos": 14015356917840.0, - "grad_norm": 3.2790707572150306, - "learning_rate": 1.615397974994126e-07, - "loss": 0.8761, - "num_input_tokens_seen": 155398000, - "step": 7277 - }, - { - "epoch": 0.875127758071304, - "flos": 16140253820040.0, - "grad_norm": 2.7311747979257848, - "learning_rate": 1.6123323966037438e-07, - "loss": 1.0322, - "num_input_tokens_seen": 155416240, - "step": 7278 - }, - { - "epoch": 0.8752480009619431, - "flos": 16638364291080.0, - "grad_norm": 6.179306925208339, - "learning_rate": 1.6092696076079216e-07, - "loss": 1.0084, - "num_input_tokens_seen": 155434335, - "step": 7279 - }, - { - "epoch": 0.8753682438525822, - "flos": 18657303311160.0, - "grad_norm": 3.414775852426401, - "learning_rate": 1.6062096084712785e-07, - "loss": 0.9651, - "num_input_tokens_seen": 155455405, - "step": 7280 - }, - { - "epoch": 0.8754884867432213, - "flos": 16582395847920.0, - "grad_norm": 2.7172019321964336, - "learning_rate": 1.6031523996580098e-07, - "loss": 0.9324, - "num_input_tokens_seen": 155472685, - "step": 7281 - }, - { - "epoch": 0.8756087296338604, - "flos": 8798629509240.0, - "grad_norm": 5.2640954577942, - "learning_rate": 1.6000979816318981e-07, - "loss": 0.8859, - "num_input_tokens_seen": 155490870, - "step": 7282 - }, - { - "epoch": 0.8757289725244994, - "flos": 13439594732640.0, - "grad_norm": 5.660508718105989, - "learning_rate": 1.5970463548562886e-07, - "loss": 0.9718, - "num_input_tokens_seen": 155509745, - "step": 7283 - }, - { - "epoch": 0.8758492154151386, - "flos": 17976809706240.0, - "grad_norm": 2.6449279036908844, - "learning_rate": 1.5939975197941192e-07, - "loss": 0.9373, - "num_input_tokens_seen": 155531120, - "step": 7284 - }, - { - "epoch": 0.8759694583057777, - "flos": 46947193471200.0, - "grad_norm": 0.9385975453081106, - "learning_rate": 1.5909514769078892e-07, - "loss": 0.7884, - "num_input_tokens_seen": 155595945, - "step": 7285 - }, - { - "epoch": 0.8760897011964167, - "flos": 18106045260480.0, - "grad_norm": 22.455771824464684, - "learning_rate": 1.5879082266596867e-07, - "loss": 0.9947, - "num_input_tokens_seen": 155617005, - "step": 7286 - }, - { - "epoch": 0.8762099440870559, - "flos": 20649131964120.0, - "grad_norm": 6.978369398744382, - "learning_rate": 1.5848677695111645e-07, - "loss": 0.9595, - "num_input_tokens_seen": 155638325, - "step": 7287 - }, - { - "epoch": 0.8763301869776949, - "flos": 15347884651920.0, - "grad_norm": 6.763147900581983, - "learning_rate": 1.5818301059235562e-07, - "loss": 0.9246, - "num_input_tokens_seen": 155653220, - "step": 7288 - }, - { - "epoch": 0.876450429868334, - "flos": 17529945798120.0, - "grad_norm": 2.9498059225089284, - "learning_rate": 1.578795236357684e-07, - "loss": 1.041, - "num_input_tokens_seen": 155674405, - "step": 7289 - }, - { - "epoch": 0.8765706727589732, - "flos": 14378160845760.0, - "grad_norm": 4.693985788084083, - "learning_rate": 1.5757631612739218e-07, - "loss": 1.0804, - "num_input_tokens_seen": 155687670, - "step": 7290 - }, - { - "epoch": 0.8766909156496122, - "flos": 51114950958360.0, - "grad_norm": 0.8582823656437066, - "learning_rate": 1.572733881132242e-07, - "loss": 0.8989, - "num_input_tokens_seen": 155748035, - "step": 7291 - }, - { - "epoch": 0.8768111585402513, - "flos": 49788831490320.0, - "grad_norm": 0.7671288493126522, - "learning_rate": 1.5697073963921814e-07, - "loss": 0.8445, - "num_input_tokens_seen": 155806995, - "step": 7292 - }, - { - "epoch": 0.8769314014308904, - "flos": 13356761214840.0, - "grad_norm": 2.4422433135329364, - "learning_rate": 1.566683707512857e-07, - "loss": 1.0693, - "num_input_tokens_seen": 155824390, - "step": 7293 - }, - { - "epoch": 0.8770516443215295, - "flos": 10581823184880.0, - "grad_norm": 4.22015899864886, - "learning_rate": 1.5636628149529553e-07, - "loss": 1.0239, - "num_input_tokens_seen": 155841900, - "step": 7294 - }, - { - "epoch": 0.8771718872121685, - "flos": 22564474042200.0, - "grad_norm": 6.011022932745977, - "learning_rate": 1.560644719170743e-07, - "loss": 1.0257, - "num_input_tokens_seen": 155862490, - "step": 7295 - }, - { - "epoch": 0.8772921301028077, - "flos": 25758858997560.0, - "grad_norm": 3.665575443072673, - "learning_rate": 1.5576294206240692e-07, - "loss": 0.9435, - "num_input_tokens_seen": 155881735, - "step": 7296 - }, - { - "epoch": 0.8774123729934468, - "flos": 40867380240960.0, - "grad_norm": 2.672030171132638, - "learning_rate": 1.5546169197703507e-07, - "loss": 0.9108, - "num_input_tokens_seen": 155907730, - "step": 7297 - }, - { - "epoch": 0.8775326158840858, - "flos": 17005338191160.0, - "grad_norm": 5.7995322992579945, - "learning_rate": 1.5516072170665774e-07, - "loss": 0.9949, - "num_input_tokens_seen": 155925420, - "step": 7298 - }, - { - "epoch": 0.877652858774725, - "flos": 12123967066200.0, - "grad_norm": 3.017042232198877, - "learning_rate": 1.5486003129693214e-07, - "loss": 1.086, - "num_input_tokens_seen": 155942505, - "step": 7299 - }, - { - "epoch": 0.877773101665364, - "flos": 11683266131640.0, - "grad_norm": 5.612948753581057, - "learning_rate": 1.545596207934725e-07, - "loss": 0.9975, - "num_input_tokens_seen": 155960710, - "step": 7300 - }, - { - "epoch": 0.8778933445560031, - "flos": 15667597991880.0, - "grad_norm": 3.5647727007853054, - "learning_rate": 1.5425949024185147e-07, - "loss": 0.9956, - "num_input_tokens_seen": 155980455, - "step": 7301 - }, - { - "epoch": 0.8780135874466423, - "flos": 16035338430960.0, - "grad_norm": 4.622840583430761, - "learning_rate": 1.5395963968759818e-07, - "loss": 0.8954, - "num_input_tokens_seen": 156000450, - "step": 7302 - }, - { - "epoch": 0.8781338303372813, - "flos": 44040419202480.0, - "grad_norm": 8.60458700586518, - "learning_rate": 1.536600691761998e-07, - "loss": 0.8698, - "num_input_tokens_seen": 156026000, - "step": 7303 - }, - { - "epoch": 0.8782540732279204, - "flos": 16114461900000.0, - "grad_norm": 7.804113662900904, - "learning_rate": 1.5336077875310084e-07, - "loss": 0.9362, - "num_input_tokens_seen": 156044945, - "step": 7304 - }, - { - "epoch": 0.8783743161185595, - "flos": 11367937394760.0, - "grad_norm": 10.00014800915857, - "learning_rate": 1.5306176846370321e-07, - "loss": 0.9641, - "num_input_tokens_seen": 156062810, - "step": 7305 - }, - { - "epoch": 0.8784945590091986, - "flos": 18553031814840.0, - "grad_norm": 24.694561144058753, - "learning_rate": 1.5276303835336712e-07, - "loss": 0.9562, - "num_input_tokens_seen": 156083070, - "step": 7306 - }, - { - "epoch": 0.8786148018998376, - "flos": 44897526019920.0, - "grad_norm": 0.7763470341207939, - "learning_rate": 1.524645884674094e-07, - "loss": 0.7867, - "num_input_tokens_seen": 156139720, - "step": 7307 - }, - { - "epoch": 0.8787350447904768, - "flos": 15379900868640.0, - "grad_norm": 3.5541005750431394, - "learning_rate": 1.521664188511047e-07, - "loss": 1.0172, - "num_input_tokens_seen": 156159465, - "step": 7308 - }, - { - "epoch": 0.8788552876811159, - "flos": 18128679039840.0, - "grad_norm": 7.821642843497792, - "learning_rate": 1.518685295496851e-07, - "loss": 1.028, - "num_input_tokens_seen": 156177045, - "step": 7309 - }, - { - "epoch": 0.8789755305717549, - "flos": 15852526035240.0, - "grad_norm": 2.6926583753387168, - "learning_rate": 1.5157092060833975e-07, - "loss": 1.0728, - "num_input_tokens_seen": 156196415, - "step": 7310 - }, - { - "epoch": 0.879095773462394, - "flos": 20883221584320.0, - "grad_norm": 3.481441751400703, - "learning_rate": 1.5127359207221658e-07, - "loss": 0.8743, - "num_input_tokens_seen": 156215615, - "step": 7311 - }, - { - "epoch": 0.8792160163530331, - "flos": 11813206901760.0, - "grad_norm": 4.452959238280001, - "learning_rate": 1.5097654398641923e-07, - "loss": 0.949, - "num_input_tokens_seen": 156233240, - "step": 7312 - }, - { - "epoch": 0.8793362592436722, - "flos": 17425766286480.0, - "grad_norm": 3.1072380352006284, - "learning_rate": 1.5067977639601014e-07, - "loss": 0.9525, - "num_input_tokens_seen": 156255720, - "step": 7313 - }, - { - "epoch": 0.8794565021343113, - "flos": 10267015694520.0, - "grad_norm": 10.122786668527764, - "learning_rate": 1.5038328934600864e-07, - "loss": 0.9293, - "num_input_tokens_seen": 156272075, - "step": 7314 - }, - { - "epoch": 0.8795767450249504, - "flos": 28228096020480.0, - "grad_norm": 8.252163689991022, - "learning_rate": 1.5008708288139161e-07, - "loss": 0.9236, - "num_input_tokens_seen": 156294155, - "step": 7315 - }, - { - "epoch": 0.8796969879155895, - "flos": 16319141536080.0, - "grad_norm": 7.072299066994946, - "learning_rate": 1.497911570470931e-07, - "loss": 0.9579, - "num_input_tokens_seen": 156313880, - "step": 7316 - }, - { - "epoch": 0.8798172308062285, - "flos": 20201532178560.0, - "grad_norm": 3.9997790990868105, - "learning_rate": 1.494955118880048e-07, - "loss": 1.0834, - "num_input_tokens_seen": 156334585, - "step": 7317 - }, - { - "epoch": 0.8799374736968677, - "flos": 17059068340440.0, - "grad_norm": 6.184690511942827, - "learning_rate": 1.4920014744897634e-07, - "loss": 0.9556, - "num_input_tokens_seen": 156353720, - "step": 7318 - }, - { - "epoch": 0.8800577165875068, - "flos": 18238561601640.0, - "grad_norm": 3.808755755769132, - "learning_rate": 1.4890506377481392e-07, - "loss": 1.0833, - "num_input_tokens_seen": 156372530, - "step": 7319 - }, - { - "epoch": 0.8801779594781458, - "flos": 16664585472960.0, - "grad_norm": 2.218978655141292, - "learning_rate": 1.486102609102815e-07, - "loss": 0.8582, - "num_input_tokens_seen": 156392800, - "step": 7320 - }, - { - "epoch": 0.880298202368785, - "flos": 8089339151400.0, - "grad_norm": 16.268879882669204, - "learning_rate": 1.483157389001004e-07, - "loss": 1.0686, - "num_input_tokens_seen": 156410080, - "step": 7321 - }, - { - "epoch": 0.880418445259424, - "flos": 16111641036480.0, - "grad_norm": 9.49625724548168, - "learning_rate": 1.4802149778894933e-07, - "loss": 1.0036, - "num_input_tokens_seen": 156428590, - "step": 7322 - }, - { - "epoch": 0.8805386881500631, - "flos": 14567442830640.0, - "grad_norm": 4.280050736242454, - "learning_rate": 1.4772753762146484e-07, - "loss": 1.1016, - "num_input_tokens_seen": 156447565, - "step": 7323 - }, - { - "epoch": 0.8806589310407023, - "flos": 26078296383480.0, - "grad_norm": 2.8401798852177493, - "learning_rate": 1.474338584422401e-07, - "loss": 0.9206, - "num_input_tokens_seen": 156472495, - "step": 7324 - }, - { - "epoch": 0.8807791739313413, - "flos": 16664462826720.0, - "grad_norm": 3.6181906839438986, - "learning_rate": 1.4714046029582595e-07, - "loss": 0.9743, - "num_input_tokens_seen": 156491280, - "step": 7325 - }, - { - "epoch": 0.8808994168219804, - "flos": 18473019160560.0, - "grad_norm": 3.2199269337390506, - "learning_rate": 1.46847343226731e-07, - "loss": 0.9944, - "num_input_tokens_seen": 156512040, - "step": 7326 - }, - { - "epoch": 0.8810196597126195, - "flos": 12102253133640.0, - "grad_norm": 4.498173872976491, - "learning_rate": 1.465545072794203e-07, - "loss": 0.917, - "num_input_tokens_seen": 156529635, - "step": 7327 - }, - { - "epoch": 0.8811399026032586, - "flos": 16350207244440.0, - "grad_norm": 3.3347282987580993, - "learning_rate": 1.4626195249831774e-07, - "loss": 0.9846, - "num_input_tokens_seen": 156550255, - "step": 7328 - }, - { - "epoch": 0.8812601454938976, - "flos": 10213193560560.0, - "grad_norm": 8.251184551856118, - "learning_rate": 1.4596967892780244e-07, - "loss": 0.9502, - "num_input_tokens_seen": 156566305, - "step": 7329 - }, - { - "epoch": 0.8813803883845368, - "flos": 15984122529600.0, - "grad_norm": 3.094423933510993, - "learning_rate": 1.4567768661221314e-07, - "loss": 0.9724, - "num_input_tokens_seen": 156586595, - "step": 7330 - }, - { - "epoch": 0.8815006312751759, - "flos": 15275108125800.0, - "grad_norm": 4.480150951499042, - "learning_rate": 1.4538597559584442e-07, - "loss": 0.9581, - "num_input_tokens_seen": 156604105, - "step": 7331 - }, - { - "epoch": 0.8816208741658149, - "flos": 16193432061240.0, - "grad_norm": 3.7389698570242365, - "learning_rate": 1.4509454592294823e-07, - "loss": 0.9995, - "num_input_tokens_seen": 156624310, - "step": 7332 - }, - { - "epoch": 0.8817411170564541, - "flos": 12595427093520.0, - "grad_norm": 5.061769786023384, - "learning_rate": 1.448033976377354e-07, - "loss": 1.0146, - "num_input_tokens_seen": 156639015, - "step": 7333 - }, - { - "epoch": 0.8818613599470931, - "flos": 13151376362880.0, - "grad_norm": 7.892425392143827, - "learning_rate": 1.445125307843713e-07, - "loss": 0.9602, - "num_input_tokens_seen": 156656960, - "step": 7334 - }, - { - "epoch": 0.8819816028377322, - "flos": 19653922853520.0, - "grad_norm": 5.130371070514003, - "learning_rate": 1.442219454069813e-07, - "loss": 0.9659, - "num_input_tokens_seen": 156677705, - "step": 7335 - }, - { - "epoch": 0.8821018457283714, - "flos": 16639008183840.0, - "grad_norm": 4.05962540086961, - "learning_rate": 1.4393164154964676e-07, - "loss": 0.8896, - "num_input_tokens_seen": 156696955, - "step": 7336 - }, - { - "epoch": 0.8822220886190104, - "flos": 20755457784960.0, - "grad_norm": 2.9430355103943096, - "learning_rate": 1.4364161925640649e-07, - "loss": 1.164, - "num_input_tokens_seen": 156718075, - "step": 7337 - }, - { - "epoch": 0.8823423315096495, - "flos": 14541007017840.0, - "grad_norm": 4.199806917384482, - "learning_rate": 1.4335187857125663e-07, - "loss": 1.0738, - "num_input_tokens_seen": 156736495, - "step": 7338 - }, - { - "epoch": 0.8824625744002886, - "flos": 17215107646200.0, - "grad_norm": 5.7158082159928805, - "learning_rate": 1.4306241953815023e-07, - "loss": 0.9632, - "num_input_tokens_seen": 156757275, - "step": 7339 - }, - { - "epoch": 0.8825828172909277, - "flos": 17554971179160.0, - "grad_norm": 3.1807937384814338, - "learning_rate": 1.4277324220099862e-07, - "loss": 0.9254, - "num_input_tokens_seen": 156778905, - "step": 7340 - }, - { - "epoch": 0.8827030601815667, - "flos": 15957410762760.0, - "grad_norm": 4.441138266981261, - "learning_rate": 1.4248434660366938e-07, - "loss": 0.9643, - "num_input_tokens_seen": 156798100, - "step": 7341 - }, - { - "epoch": 0.8828233030722058, - "flos": 14095645526160.0, - "grad_norm": 2.739710893066619, - "learning_rate": 1.4219573278998808e-07, - "loss": 0.9281, - "num_input_tokens_seen": 156816280, - "step": 7342 - }, - { - "epoch": 0.882943545962845, - "flos": 28096438203000.0, - "grad_norm": 3.3047253040649456, - "learning_rate": 1.4190740080373685e-07, - "loss": 0.8852, - "num_input_tokens_seen": 156836280, - "step": 7343 - }, - { - "epoch": 0.883063788853484, - "flos": 13511574058200.0, - "grad_norm": 4.281621986434206, - "learning_rate": 1.4161935068865538e-07, - "loss": 1.0747, - "num_input_tokens_seen": 156851145, - "step": 7344 - }, - { - "epoch": 0.8831840317441231, - "flos": 13281593087040.0, - "grad_norm": 3.8332435486824505, - "learning_rate": 1.4133158248844113e-07, - "loss": 0.9822, - "num_input_tokens_seen": 156869770, - "step": 7345 - }, - { - "epoch": 0.8833042746347622, - "flos": 19098249538200.0, - "grad_norm": 3.476657686109151, - "learning_rate": 1.4104409624674785e-07, - "loss": 0.9557, - "num_input_tokens_seen": 156889275, - "step": 7346 - }, - { - "epoch": 0.8834245175254013, - "flos": 18579007704240.0, - "grad_norm": 22.128214284746083, - "learning_rate": 1.407568920071873e-07, - "loss": 1.0075, - "num_input_tokens_seen": 156907860, - "step": 7347 - }, - { - "epoch": 0.8835447604160404, - "flos": 21831108811680.0, - "grad_norm": 12.2541640074899, - "learning_rate": 1.4046996981332782e-07, - "loss": 0.892, - "num_input_tokens_seen": 156927465, - "step": 7348 - }, - { - "epoch": 0.8836650033066795, - "flos": 17583216024000.0, - "grad_norm": 4.026381754195394, - "learning_rate": 1.4018332970869516e-07, - "loss": 1.0021, - "num_input_tokens_seen": 156945125, - "step": 7349 - }, - { - "epoch": 0.8837852461973186, - "flos": 18082951557720.0, - "grad_norm": 3.1715256071035705, - "learning_rate": 1.398969717367733e-07, - "loss": 1.0619, - "num_input_tokens_seen": 156966170, - "step": 7350 - }, - { - "epoch": 0.8839054890879576, - "flos": 12627351325560.0, - "grad_norm": 3.2196993122330304, - "learning_rate": 1.396108959410014e-07, - "loss": 0.9868, - "num_input_tokens_seen": 156985105, - "step": 7351 - }, - { - "epoch": 0.8840257319785968, - "flos": 16900852063920.0, - "grad_norm": 5.548427482731294, - "learning_rate": 1.3932510236477745e-07, - "loss": 1.0232, - "num_input_tokens_seen": 157005495, - "step": 7352 - }, - { - "epoch": 0.8841459748692359, - "flos": 20703260713680.0, - "grad_norm": 4.8368506732609395, - "learning_rate": 1.3903959105145636e-07, - "loss": 0.7765, - "num_input_tokens_seen": 157025705, - "step": 7353 - }, - { - "epoch": 0.8842662177598749, - "flos": 17290091804640.0, - "grad_norm": 4.186266589355403, - "learning_rate": 1.387543620443492e-07, - "loss": 1.0557, - "num_input_tokens_seen": 157042270, - "step": 7354 - }, - { - "epoch": 0.8843864606505141, - "flos": 17790961816080.0, - "grad_norm": 2.8975042192693343, - "learning_rate": 1.3846941538672606e-07, - "loss": 1.0631, - "num_input_tokens_seen": 157060695, - "step": 7355 - }, - { - "epoch": 0.8845067035411531, - "flos": 20073768379200.0, - "grad_norm": 4.845930629987835, - "learning_rate": 1.3818475112181193e-07, - "loss": 1.0288, - "num_input_tokens_seen": 157079210, - "step": 7356 - }, - { - "epoch": 0.8846269464317922, - "flos": 9054984970080.0, - "grad_norm": 4.327360560805833, - "learning_rate": 1.3790036929279091e-07, - "loss": 1.0122, - "num_input_tokens_seen": 157096085, - "step": 7357 - }, - { - "epoch": 0.8847471893224313, - "flos": 13203450787920.0, - "grad_norm": 3.854741082903383, - "learning_rate": 1.3761626994280363e-07, - "loss": 0.8178, - "num_input_tokens_seen": 157113275, - "step": 7358 - }, - { - "epoch": 0.8848674322130704, - "flos": 25524861362040.0, - "grad_norm": 6.884731826158172, - "learning_rate": 1.3733245311494735e-07, - "loss": 0.9587, - "num_input_tokens_seen": 157135650, - "step": 7359 - }, - { - "epoch": 0.8849876751037095, - "flos": 17242095367080.0, - "grad_norm": 11.031580933531984, - "learning_rate": 1.3704891885227676e-07, - "loss": 0.939, - "num_input_tokens_seen": 157155415, - "step": 7360 - }, - { - "epoch": 0.8851079179943486, - "flos": 15269711691240.0, - "grad_norm": 3.3647790779043767, - "learning_rate": 1.367656671978037e-07, - "loss": 1.0009, - "num_input_tokens_seen": 157172600, - "step": 7361 - }, - { - "epoch": 0.8852281608849877, - "flos": 10814349065520.0, - "grad_norm": 4.063689541890607, - "learning_rate": 1.36482698194498e-07, - "loss": 0.9677, - "num_input_tokens_seen": 157188865, - "step": 7362 - }, - { - "epoch": 0.8853484037756267, - "flos": 16560835223160.0, - "grad_norm": 3.718866170142543, - "learning_rate": 1.3620001188528506e-07, - "loss": 0.9374, - "num_input_tokens_seen": 157209305, - "step": 7363 - }, - { - "epoch": 0.8854686466662659, - "flos": 17867448390960.0, - "grad_norm": 4.656467714276206, - "learning_rate": 1.3591760831304865e-07, - "loss": 0.9453, - "num_input_tokens_seen": 157226715, - "step": 7364 - }, - { - "epoch": 0.885588889556905, - "flos": 15190833514680.0, - "grad_norm": 6.699111412678731, - "learning_rate": 1.356354875206287e-07, - "loss": 1.0272, - "num_input_tokens_seen": 157244270, - "step": 7365 - }, - { - "epoch": 0.885709132447544, - "flos": 19155873705600.0, - "grad_norm": 4.005620583829263, - "learning_rate": 1.3535364955082296e-07, - "loss": 0.9202, - "num_input_tokens_seen": 157263840, - "step": 7366 - }, - { - "epoch": 0.8858293753381832, - "flos": 18578425134600.0, - "grad_norm": 3.233903933511953, - "learning_rate": 1.3507209444638613e-07, - "loss": 0.8691, - "num_input_tokens_seen": 157285560, - "step": 7367 - }, - { - "epoch": 0.8859496182288222, - "flos": 16558351636800.0, - "grad_norm": 4.1699088388994126, - "learning_rate": 1.347908222500298e-07, - "loss": 0.9634, - "num_input_tokens_seen": 157305355, - "step": 7368 - }, - { - "epoch": 0.8860698611194613, - "flos": 11944711411440.0, - "grad_norm": 4.442847669711372, - "learning_rate": 1.3450983300442276e-07, - "loss": 0.9143, - "num_input_tokens_seen": 157324305, - "step": 7369 - }, - { - "epoch": 0.8861901040101005, - "flos": 17556596241840.0, - "grad_norm": 4.421452262809662, - "learning_rate": 1.3422912675219068e-07, - "loss": 0.9537, - "num_input_tokens_seen": 157343780, - "step": 7370 - }, - { - "epoch": 0.8863103469007395, - "flos": 17370503059200.0, - "grad_norm": 2.9096669444981624, - "learning_rate": 1.339487035359166e-07, - "loss": 1.0055, - "num_input_tokens_seen": 157363870, - "step": 7371 - }, - { - "epoch": 0.8864305897913786, - "flos": 15668241884640.0, - "grad_norm": 3.7868069256967516, - "learning_rate": 1.336685633981409e-07, - "loss": 1.0778, - "num_input_tokens_seen": 157384675, - "step": 7372 - }, - { - "epoch": 0.8865508326820177, - "flos": 13545000706680.0, - "grad_norm": 51.20538025243905, - "learning_rate": 1.333887063813597e-07, - "loss": 0.9687, - "num_input_tokens_seen": 157402500, - "step": 7373 - }, - { - "epoch": 0.8866710755726568, - "flos": 10896446705880.0, - "grad_norm": 3.450402200446874, - "learning_rate": 1.331091325280278e-07, - "loss": 0.8846, - "num_input_tokens_seen": 157421190, - "step": 7374 - }, - { - "epoch": 0.8867913184632958, - "flos": 14252236740000.0, - "grad_norm": 2.8715017846215023, - "learning_rate": 1.3282984188055625e-07, - "loss": 1.0076, - "num_input_tokens_seen": 157440700, - "step": 7375 - }, - { - "epoch": 0.8869115613539349, - "flos": 16610855323680.0, - "grad_norm": 3.571419642301024, - "learning_rate": 1.3255083448131288e-07, - "loss": 1.0156, - "num_input_tokens_seen": 157459465, - "step": 7376 - }, - { - "epoch": 0.8870318042445741, - "flos": 15116799864600.0, - "grad_norm": 5.593112333809619, - "learning_rate": 1.3227211037262365e-07, - "loss": 1.0075, - "num_input_tokens_seen": 157476425, - "step": 7377 - }, - { - "epoch": 0.8871520471352131, - "flos": 14199917022480.0, - "grad_norm": 5.2046174895846695, - "learning_rate": 1.319936695967696e-07, - "loss": 1.0844, - "num_input_tokens_seen": 157493970, - "step": 7378 - }, - { - "epoch": 0.8872722900258522, - "flos": 16061712920640.0, - "grad_norm": 4.830932590968986, - "learning_rate": 1.3171551219599097e-07, - "loss": 1.0425, - "num_input_tokens_seen": 157512215, - "step": 7379 - }, - { - "epoch": 0.8873925329164913, - "flos": 15746782784040.0, - "grad_norm": 5.358062672441098, - "learning_rate": 1.3143763821248377e-07, - "loss": 0.9939, - "num_input_tokens_seen": 157529020, - "step": 7380 - }, - { - "epoch": 0.8875127758071304, - "flos": 13623081682680.0, - "grad_norm": 3.8113774382347088, - "learning_rate": 1.3116004768840118e-07, - "loss": 0.9503, - "num_input_tokens_seen": 157547115, - "step": 7381 - }, - { - "epoch": 0.8876330186977694, - "flos": 12834085286160.0, - "grad_norm": 3.669289445615536, - "learning_rate": 1.3088274066585348e-07, - "loss": 0.9573, - "num_input_tokens_seen": 157564445, - "step": 7382 - }, - { - "epoch": 0.8877532615884086, - "flos": 15635152513320.0, - "grad_norm": 6.58586451805648, - "learning_rate": 1.3060571718690749e-07, - "loss": 1.1317, - "num_input_tokens_seen": 157581660, - "step": 7383 - }, - { - "epoch": 0.8878735044790477, - "flos": 51665227839120.0, - "grad_norm": 0.7826019034286736, - "learning_rate": 1.3032897729358805e-07, - "loss": 0.8289, - "num_input_tokens_seen": 157642335, - "step": 7384 - }, - { - "epoch": 0.8879937473696867, - "flos": 19601081889480.0, - "grad_norm": 4.9279853796184705, - "learning_rate": 1.3005252102787645e-07, - "loss": 1.0231, - "num_input_tokens_seen": 157660995, - "step": 7385 - }, - { - "epoch": 0.8881139902603259, - "flos": 15797385454200.0, - "grad_norm": 2.9293811688806857, - "learning_rate": 1.297763484317105e-07, - "loss": 0.9586, - "num_input_tokens_seen": 157679010, - "step": 7386 - }, - { - "epoch": 0.888234233150965, - "flos": 14406957598680.0, - "grad_norm": 7.232481212202579, - "learning_rate": 1.2950045954698551e-07, - "loss": 0.9197, - "num_input_tokens_seen": 157696565, - "step": 7387 - }, - { - "epoch": 0.888354476041604, - "flos": 12860827714560.0, - "grad_norm": 3.405809006448148, - "learning_rate": 1.2922485441555343e-07, - "loss": 0.9865, - "num_input_tokens_seen": 157715365, - "step": 7388 - }, - { - "epoch": 0.8884747189322432, - "flos": 15639291823920.0, - "grad_norm": 3.4919643800192524, - "learning_rate": 1.2894953307922363e-07, - "loss": 1.0469, - "num_input_tokens_seen": 157734045, - "step": 7389 - }, - { - "epoch": 0.8885949618228822, - "flos": 14038174666560.0, - "grad_norm": 6.77287626306253, - "learning_rate": 1.2867449557976208e-07, - "loss": 1.0699, - "num_input_tokens_seen": 157751865, - "step": 7390 - }, - { - "epoch": 0.8887152047135213, - "flos": 14226352835280.0, - "grad_norm": 4.4083896068497905, - "learning_rate": 1.283997419588916e-07, - "loss": 0.9798, - "num_input_tokens_seen": 157771055, - "step": 7391 - }, - { - "epoch": 0.8888354476041604, - "flos": 13177444236960.0, - "grad_norm": 6.119967202990048, - "learning_rate": 1.2812527225829216e-07, - "loss": 0.847, - "num_input_tokens_seen": 157789000, - "step": 7392 - }, - { - "epoch": 0.8889556904947995, - "flos": 15406612635480.0, - "grad_norm": 148.04404433309702, - "learning_rate": 1.2785108651960052e-07, - "loss": 0.9831, - "num_input_tokens_seen": 157810355, - "step": 7393 - }, - { - "epoch": 0.8890759333854386, - "flos": 19496871716280.0, - "grad_norm": 3.2060091757354505, - "learning_rate": 1.2757718478441094e-07, - "loss": 1.0242, - "num_input_tokens_seen": 157830820, - "step": 7394 - }, - { - "epoch": 0.8891961762760777, - "flos": 17425367686200.0, - "grad_norm": 3.648504154486824, - "learning_rate": 1.2730356709427302e-07, - "loss": 0.992, - "num_input_tokens_seen": 157849220, - "step": 7395 - }, - { - "epoch": 0.8893164191667168, - "flos": 29643426610800.0, - "grad_norm": 3.552391836840521, - "learning_rate": 1.2703023349069542e-07, - "loss": 0.8173, - "num_input_tokens_seen": 157873790, - "step": 7396 - }, - { - "epoch": 0.8894366620573558, - "flos": 23954411312760.0, - "grad_norm": 5.212631750843086, - "learning_rate": 1.2675718401514223e-07, - "loss": 0.8354, - "num_input_tokens_seen": 157897690, - "step": 7397 - }, - { - "epoch": 0.889556904947995, - "flos": 11971300532040.0, - "grad_norm": 4.113738669440987, - "learning_rate": 1.264844187090346e-07, - "loss": 0.9653, - "num_input_tokens_seen": 157914535, - "step": 7398 - }, - { - "epoch": 0.889677147838634, - "flos": 18526350709560.0, - "grad_norm": 4.414223240989185, - "learning_rate": 1.262119376137516e-07, - "loss": 0.9752, - "num_input_tokens_seen": 157935315, - "step": 7399 - }, - { - "epoch": 0.8897973907292731, - "flos": 18840667614960.0, - "grad_norm": 5.6358894906618024, - "learning_rate": 1.2593974077062707e-07, - "loss": 1.0669, - "num_input_tokens_seen": 157956655, - "step": 7400 - }, - { - "epoch": 0.8899176336199123, - "flos": 18683616477720.0, - "grad_norm": 4.61797066644775, - "learning_rate": 1.2566782822095423e-07, - "loss": 0.8614, - "num_input_tokens_seen": 157976630, - "step": 7401 - }, - { - "epoch": 0.8900378765105513, - "flos": 14775249945840.0, - "grad_norm": 6.496255823293883, - "learning_rate": 1.2539620000598162e-07, - "loss": 0.9359, - "num_input_tokens_seen": 157995685, - "step": 7402 - }, - { - "epoch": 0.8901581194011904, - "flos": 11656922303520.0, - "grad_norm": 2.8612684640751107, - "learning_rate": 1.2512485616691492e-07, - "loss": 1.019, - "num_input_tokens_seen": 158012460, - "step": 7403 - }, - { - "epoch": 0.8902783622918296, - "flos": 25084283073720.0, - "grad_norm": 2.551012668364325, - "learning_rate": 1.2485379674491681e-07, - "loss": 1.0343, - "num_input_tokens_seen": 158038375, - "step": 7404 - }, - { - "epoch": 0.8903986051824686, - "flos": 12180518079000.0, - "grad_norm": 11.116743131162035, - "learning_rate": 1.2458302178110657e-07, - "loss": 1.0017, - "num_input_tokens_seen": 158056460, - "step": 7405 - }, - { - "epoch": 0.8905188480731077, - "flos": 18133278273840.0, - "grad_norm": 4.5832551198123515, - "learning_rate": 1.2431253131656118e-07, - "loss": 1.0434, - "num_input_tokens_seen": 158075655, - "step": 7406 - }, - { - "epoch": 0.8906390909637467, - "flos": 16610885985240.0, - "grad_norm": 3.5702978087943094, - "learning_rate": 1.240423253923133e-07, - "loss": 0.9942, - "num_input_tokens_seen": 158094980, - "step": 7407 - }, - { - "epoch": 0.8907593338543859, - "flos": 14960208650760.0, - "grad_norm": 4.904887016153925, - "learning_rate": 1.237724040493533e-07, - "loss": 0.9157, - "num_input_tokens_seen": 158113325, - "step": 7408 - }, - { - "epoch": 0.8908795767450249, - "flos": 15535449589440.0, - "grad_norm": 2.9679707357706735, - "learning_rate": 1.2350276732862773e-07, - "loss": 0.936, - "num_input_tokens_seen": 158134070, - "step": 7409 - }, - { - "epoch": 0.890999819635664, - "flos": 47477166851160.0, - "grad_norm": 0.9006517077264239, - "learning_rate": 1.2323341527103993e-07, - "loss": 0.8484, - "num_input_tokens_seen": 158188990, - "step": 7410 - }, - { - "epoch": 0.8911200625263032, - "flos": 19129345908120.0, - "grad_norm": 5.2000754644346685, - "learning_rate": 1.2296434791745135e-07, - "loss": 1.0654, - "num_input_tokens_seen": 158207160, - "step": 7411 - }, - { - "epoch": 0.8912403054169422, - "flos": 14828642817960.0, - "grad_norm": 3.2612707869779984, - "learning_rate": 1.2269556530867875e-07, - "loss": 0.9896, - "num_input_tokens_seen": 158225435, - "step": 7412 - }, - { - "epoch": 0.8913605483075813, - "flos": 19234843866840.0, - "grad_norm": 4.722363837618164, - "learning_rate": 1.2242706748549614e-07, - "loss": 1.0457, - "num_input_tokens_seen": 158243150, - "step": 7413 - }, - { - "epoch": 0.8914807911982204, - "flos": 16794618227760.0, - "grad_norm": 2.9407514068644254, - "learning_rate": 1.2215885448863473e-07, - "loss": 1.0332, - "num_input_tokens_seen": 158263745, - "step": 7414 - }, - { - "epoch": 0.8916010340888595, - "flos": 17399085181200.0, - "grad_norm": 3.8491371674173616, - "learning_rate": 1.2189092635878152e-07, - "loss": 1.0207, - "num_input_tokens_seen": 158284915, - "step": 7415 - }, - { - "epoch": 0.8917212769794985, - "flos": 15065982563520.0, - "grad_norm": 4.153927720728083, - "learning_rate": 1.216232831365822e-07, - "loss": 1.0028, - "num_input_tokens_seen": 158303580, - "step": 7416 - }, - { - "epoch": 0.8918415198701377, - "flos": 18154072359600.0, - "grad_norm": 3.220650417435603, - "learning_rate": 1.2135592486263678e-07, - "loss": 1.0336, - "num_input_tokens_seen": 158322550, - "step": 7417 - }, - { - "epoch": 0.8919617627607768, - "flos": 27022810839240.0, - "grad_norm": 3.020041122914428, - "learning_rate": 1.2108885157750415e-07, - "loss": 0.8322, - "num_input_tokens_seen": 158344630, - "step": 7418 - }, - { - "epoch": 0.8920820056514158, - "flos": 18657793896120.0, - "grad_norm": 9.02378415189115, - "learning_rate": 1.2082206332169897e-07, - "loss": 1.024, - "num_input_tokens_seen": 158364445, - "step": 7419 - }, - { - "epoch": 0.892202248542055, - "flos": 12311317372800.0, - "grad_norm": 12.043158802830565, - "learning_rate": 1.2055556013569225e-07, - "loss": 0.9595, - "num_input_tokens_seen": 158379675, - "step": 7420 - }, - { - "epoch": 0.892322491432694, - "flos": 15143787585480.0, - "grad_norm": 4.781712727812925, - "learning_rate": 1.2028934205991315e-07, - "loss": 1.0411, - "num_input_tokens_seen": 158398715, - "step": 7421 - }, - { - "epoch": 0.8924427343233331, - "flos": 17088631632360.0, - "grad_norm": 2.7447190296230155, - "learning_rate": 1.2002340913474607e-07, - "loss": 0.9936, - "num_input_tokens_seen": 158422070, - "step": 7422 - }, - { - "epoch": 0.8925629772139723, - "flos": 21384735488520.0, - "grad_norm": 4.705446196427147, - "learning_rate": 1.1975776140053317e-07, - "loss": 0.9698, - "num_input_tokens_seen": 158441760, - "step": 7423 - }, - { - "epoch": 0.8926832201046113, - "flos": 16061958213120.0, - "grad_norm": 5.211823395284773, - "learning_rate": 1.194923988975729e-07, - "loss": 0.9471, - "num_input_tokens_seen": 158461080, - "step": 7424 - }, - { - "epoch": 0.8928034629952504, - "flos": 9375342202800.0, - "grad_norm": 13.511372050336604, - "learning_rate": 1.192273216661206e-07, - "loss": 0.9496, - "num_input_tokens_seen": 158478890, - "step": 7425 - }, - { - "epoch": 0.8929237058858895, - "flos": 39246321973440.0, - "grad_norm": 0.7496462623541179, - "learning_rate": 1.189625297463881e-07, - "loss": 0.8365, - "num_input_tokens_seen": 158540300, - "step": 7426 - }, - { - "epoch": 0.8930439487765286, - "flos": 20577704546640.0, - "grad_norm": 3.3581908512101952, - "learning_rate": 1.1869802317854394e-07, - "loss": 1.0156, - "num_input_tokens_seen": 158563805, - "step": 7427 - }, - { - "epoch": 0.8931641916671677, - "flos": 15930852303720.0, - "grad_norm": 3.89081631232225, - "learning_rate": 1.1843380200271425e-07, - "loss": 0.9512, - "num_input_tokens_seen": 158582725, - "step": 7428 - }, - { - "epoch": 0.8932844345578068, - "flos": 18390737550840.0, - "grad_norm": 3.3235290026925117, - "learning_rate": 1.181698662589805e-07, - "loss": 1.0259, - "num_input_tokens_seen": 158602030, - "step": 7429 - }, - { - "epoch": 0.8934046774484459, - "flos": 16294300124400.0, - "grad_norm": 4.675856306832839, - "learning_rate": 1.1790621598738249e-07, - "loss": 0.9826, - "num_input_tokens_seen": 158620065, - "step": 7430 - }, - { - "epoch": 0.8935249203390849, - "flos": 17398931873400.0, - "grad_norm": 4.702272661099236, - "learning_rate": 1.1764285122791461e-07, - "loss": 0.9753, - "num_input_tokens_seen": 158640505, - "step": 7431 - }, - { - "epoch": 0.8936451632297241, - "flos": 11132590650600.0, - "grad_norm": 3.578335818281112, - "learning_rate": 1.173797720205294e-07, - "loss": 0.9919, - "num_input_tokens_seen": 158658260, - "step": 7432 - }, - { - "epoch": 0.8937654061203631, - "flos": 25055271689880.0, - "grad_norm": 6.105375029271083, - "learning_rate": 1.1711697840513602e-07, - "loss": 0.9482, - "num_input_tokens_seen": 158677415, - "step": 7433 - }, - { - "epoch": 0.8938856490110022, - "flos": 11394679823160.0, - "grad_norm": 4.248794384663081, - "learning_rate": 1.1685447042160012e-07, - "loss": 0.9308, - "num_input_tokens_seen": 158695170, - "step": 7434 - }, - { - "epoch": 0.8940058919016414, - "flos": 14698211462880.0, - "grad_norm": 4.213043875066732, - "learning_rate": 1.1659224810974367e-07, - "loss": 0.9362, - "num_input_tokens_seen": 158714850, - "step": 7435 - }, - { - "epoch": 0.8941261347922804, - "flos": 17950220585640.0, - "grad_norm": 2.697739544998539, - "learning_rate": 1.1633031150934591e-07, - "loss": 0.9065, - "num_input_tokens_seen": 158737600, - "step": 7436 - }, - { - "epoch": 0.8942463776829195, - "flos": 13859624227680.0, - "grad_norm": 4.243815016775171, - "learning_rate": 1.1606866066014176e-07, - "loss": 1.0128, - "num_input_tokens_seen": 158756370, - "step": 7437 - }, - { - "epoch": 0.8943666205735585, - "flos": 15845136599280.0, - "grad_norm": 5.259795179259244, - "learning_rate": 1.1580729560182434e-07, - "loss": 0.9764, - "num_input_tokens_seen": 158771945, - "step": 7438 - }, - { - "epoch": 0.8944868634641977, - "flos": 13410368717880.0, - "grad_norm": 3.490395719943416, - "learning_rate": 1.1554621637404171e-07, - "loss": 0.9429, - "num_input_tokens_seen": 158789755, - "step": 7439 - }, - { - "epoch": 0.8946071063548368, - "flos": 10210709974200.0, - "grad_norm": 4.700462018294032, - "learning_rate": 1.1528542301639999e-07, - "loss": 0.8275, - "num_input_tokens_seen": 158806265, - "step": 7440 - }, - { - "epoch": 0.8947273492454758, - "flos": 14252574017160.0, - "grad_norm": 4.922238484842049, - "learning_rate": 1.1502491556846105e-07, - "loss": 1.0394, - "num_input_tokens_seen": 158824480, - "step": 7441 - }, - { - "epoch": 0.894847592136115, - "flos": 13150303208280.0, - "grad_norm": 4.195332731343793, - "learning_rate": 1.1476469406974331e-07, - "loss": 1.0393, - "num_input_tokens_seen": 158839800, - "step": 7442 - }, - { - "epoch": 0.894967835026754, - "flos": 16692217086600.0, - "grad_norm": 24.112796206951376, - "learning_rate": 1.1450475855972341e-07, - "loss": 0.9972, - "num_input_tokens_seen": 158860310, - "step": 7443 - }, - { - "epoch": 0.8950880779173931, - "flos": 10733171271960.0, - "grad_norm": 4.052842148416401, - "learning_rate": 1.1424510907783158e-07, - "loss": 0.9268, - "num_input_tokens_seen": 158877310, - "step": 7444 - }, - { - "epoch": 0.8952083208080323, - "flos": 15695750852040.0, - "grad_norm": 4.21263934620927, - "learning_rate": 1.1398574566345787e-07, - "loss": 1.0504, - "num_input_tokens_seen": 158897665, - "step": 7445 - }, - { - "epoch": 0.8953285636986713, - "flos": 16530168115080.0, - "grad_norm": 6.105226635174628, - "learning_rate": 1.1372666835594702e-07, - "loss": 1.0429, - "num_input_tokens_seen": 158915710, - "step": 7446 - }, - { - "epoch": 0.8954488065893104, - "flos": 11578074788520.0, - "grad_norm": 3.9918338148657155, - "learning_rate": 1.1346787719460071e-07, - "loss": 0.9354, - "num_input_tokens_seen": 158934315, - "step": 7447 - }, - { - "epoch": 0.8955690494799495, - "flos": 12939859198920.0, - "grad_norm": 5.07422401245878, - "learning_rate": 1.1320937221867732e-07, - "loss": 0.9604, - "num_input_tokens_seen": 158951615, - "step": 7448 - }, - { - "epoch": 0.8956892923705886, - "flos": 18107149076640.0, - "grad_norm": 4.678003482603658, - "learning_rate": 1.1295115346739192e-07, - "loss": 1.0201, - "num_input_tokens_seen": 158971335, - "step": 7449 - }, - { - "epoch": 0.8958095352612276, - "flos": 37665084603120.0, - "grad_norm": 5.5581752809913825, - "learning_rate": 1.1269322097991629e-07, - "loss": 0.958, - "num_input_tokens_seen": 158994340, - "step": 7450 - }, - { - "epoch": 0.8959297781518668, - "flos": 16482233000640.0, - "grad_norm": 10.823493087931997, - "learning_rate": 1.1243557479537846e-07, - "loss": 0.9114, - "num_input_tokens_seen": 159013950, - "step": 7451 - }, - { - "epoch": 0.8960500210425059, - "flos": 14431707025680.0, - "grad_norm": 4.554447750108863, - "learning_rate": 1.121782149528634e-07, - "loss": 0.9229, - "num_input_tokens_seen": 159030770, - "step": 7452 - }, - { - "epoch": 0.8961702639331449, - "flos": 14121498769320.0, - "grad_norm": 12.164387389286837, - "learning_rate": 1.1192114149141208e-07, - "loss": 1.0052, - "num_input_tokens_seen": 159050125, - "step": 7453 - }, - { - "epoch": 0.8962905068237841, - "flos": 9086878540560.0, - "grad_norm": 4.438674986110632, - "learning_rate": 1.1166435445002197e-07, - "loss": 0.8788, - "num_input_tokens_seen": 159067515, - "step": 7454 - }, - { - "epoch": 0.8964107497144231, - "flos": 16664585472960.0, - "grad_norm": 4.277166493544643, - "learning_rate": 1.1140785386764818e-07, - "loss": 0.9115, - "num_input_tokens_seen": 159085935, - "step": 7455 - }, - { - "epoch": 0.8965309926050622, - "flos": 13832912460840.0, - "grad_norm": 3.0715411884788346, - "learning_rate": 1.1115163978320153e-07, - "loss": 0.9184, - "num_input_tokens_seen": 159104385, - "step": 7456 - }, - { - "epoch": 0.8966512354957014, - "flos": 20414459774280.0, - "grad_norm": 3.160860980357408, - "learning_rate": 1.1089571223554917e-07, - "loss": 1.0395, - "num_input_tokens_seen": 159124990, - "step": 7457 - }, - { - "epoch": 0.8967714783863404, - "flos": 16615178603640.0, - "grad_norm": 5.561550616565502, - "learning_rate": 1.1064007126351537e-07, - "loss": 1.0757, - "num_input_tokens_seen": 159145425, - "step": 7458 - }, - { - "epoch": 0.8968917212769795, - "flos": 17451466221840.0, - "grad_norm": 22.639488681370572, - "learning_rate": 1.1038471690588003e-07, - "loss": 0.9768, - "num_input_tokens_seen": 159164290, - "step": 7459 - }, - { - "epoch": 0.8970119641676186, - "flos": 16689610854000.0, - "grad_norm": 3.125909749168829, - "learning_rate": 1.1012964920138145e-07, - "loss": 1.0199, - "num_input_tokens_seen": 159183595, - "step": 7460 - }, - { - "epoch": 0.8971322070582577, - "flos": 17454041792880.0, - "grad_norm": 2.837261132760542, - "learning_rate": 1.0987486818871205e-07, - "loss": 0.9803, - "num_input_tokens_seen": 159206905, - "step": 7461 - }, - { - "epoch": 0.8972524499488967, - "flos": 15483743103120.0, - "grad_norm": 6.793829013958066, - "learning_rate": 1.0962037390652245e-07, - "loss": 0.9393, - "num_input_tokens_seen": 159225645, - "step": 7462 - }, - { - "epoch": 0.8973726928395359, - "flos": 15432803155800.0, - "grad_norm": 3.7187828254853765, - "learning_rate": 1.0936616639341911e-07, - "loss": 0.9387, - "num_input_tokens_seen": 159245655, - "step": 7463 - }, - { - "epoch": 0.897492935730175, - "flos": 38515654960200.0, - "grad_norm": 0.7396263101117121, - "learning_rate": 1.0911224568796473e-07, - "loss": 0.793, - "num_input_tokens_seen": 159303570, - "step": 7464 - }, - { - "epoch": 0.897613178620814, - "flos": 12962646286080.0, - "grad_norm": 7.8665813678492, - "learning_rate": 1.0885861182867984e-07, - "loss": 0.9265, - "num_input_tokens_seen": 159321395, - "step": 7465 - }, - { - "epoch": 0.8977334215114532, - "flos": 23529629275920.0, - "grad_norm": 6.275337739039523, - "learning_rate": 1.0860526485403942e-07, - "loss": 0.9292, - "num_input_tokens_seen": 159342390, - "step": 7466 - }, - { - "epoch": 0.8978536644020922, - "flos": 10739426230200.0, - "grad_norm": 3.0293201713723814, - "learning_rate": 1.0835220480247675e-07, - "loss": 0.9983, - "num_input_tokens_seen": 159360605, - "step": 7467 - }, - { - "epoch": 0.8979739072927313, - "flos": 12757935988440.0, - "grad_norm": 4.617225298266921, - "learning_rate": 1.0809943171238067e-07, - "loss": 1.0602, - "num_input_tokens_seen": 159378250, - "step": 7468 - }, - { - "epoch": 0.8980941501833704, - "flos": 15824434498200.0, - "grad_norm": 6.0593313214605145, - "learning_rate": 1.078469456220965e-07, - "loss": 0.857, - "num_input_tokens_seen": 159398125, - "step": 7469 - }, - { - "epoch": 0.8982143930740095, - "flos": 26812704107040.0, - "grad_norm": 3.068712589323344, - "learning_rate": 1.0759474656992606e-07, - "loss": 0.9099, - "num_input_tokens_seen": 159420615, - "step": 7470 - }, - { - "epoch": 0.8983346359646486, - "flos": 12809979751920.0, - "grad_norm": 4.593026863323566, - "learning_rate": 1.0734283459412785e-07, - "loss": 1.0084, - "num_input_tokens_seen": 159437185, - "step": 7471 - }, - { - "epoch": 0.8984548788552876, - "flos": 14593878643440.0, - "grad_norm": 2.704186556311426, - "learning_rate": 1.0709120973291707e-07, - "loss": 1.0187, - "num_input_tokens_seen": 159456685, - "step": 7472 - }, - { - "epoch": 0.8985751217459268, - "flos": 12600578235600.0, - "grad_norm": 4.684300666000443, - "learning_rate": 1.0683987202446475e-07, - "loss": 0.9896, - "num_input_tokens_seen": 159474590, - "step": 7473 - }, - { - "epoch": 0.8986953646365659, - "flos": 15354323579520.0, - "grad_norm": 3.451217193392325, - "learning_rate": 1.0658882150689862e-07, - "loss": 0.9346, - "num_input_tokens_seen": 159493170, - "step": 7474 - }, - { - "epoch": 0.8988156075272049, - "flos": 9899704517280.0, - "grad_norm": 13.80965066234536, - "learning_rate": 1.0633805821830288e-07, - "loss": 1.001, - "num_input_tokens_seen": 159509575, - "step": 7475 - }, - { - "epoch": 0.8989358504178441, - "flos": 20702770128720.0, - "grad_norm": 5.145738464387018, - "learning_rate": 1.0608758219671753e-07, - "loss": 1.0582, - "num_input_tokens_seen": 159528335, - "step": 7476 - }, - { - "epoch": 0.8990560933084831, - "flos": 14357796021840.0, - "grad_norm": 5.8805882275169825, - "learning_rate": 1.0583739348014065e-07, - "loss": 0.9263, - "num_input_tokens_seen": 159549140, - "step": 7477 - }, - { - "epoch": 0.8991763361991222, - "flos": 18159775409760.0, - "grad_norm": 4.784269688280053, - "learning_rate": 1.0558749210652518e-07, - "loss": 1.0753, - "num_input_tokens_seen": 159568790, - "step": 7478 - }, - { - "epoch": 0.8992965790897613, - "flos": 17871219762840.0, - "grad_norm": 3.0304622609756673, - "learning_rate": 1.053378781137808e-07, - "loss": 1.0749, - "num_input_tokens_seen": 159589430, - "step": 7479 - }, - { - "epoch": 0.8994168219804004, - "flos": 11368335995040.0, - "grad_norm": 6.011364029698966, - "learning_rate": 1.0508855153977392e-07, - "loss": 1.0038, - "num_input_tokens_seen": 159605615, - "step": 7480 - }, - { - "epoch": 0.8995370648710395, - "flos": 17660131860720.0, - "grad_norm": 6.037145707691544, - "learning_rate": 1.0483951242232669e-07, - "loss": 0.8858, - "num_input_tokens_seen": 159625810, - "step": 7481 - }, - { - "epoch": 0.8996573077616786, - "flos": 45185984134080.0, - "grad_norm": 1.012883403383073, - "learning_rate": 1.0459076079921936e-07, - "loss": 0.8363, - "num_input_tokens_seen": 159678190, - "step": 7482 - }, - { - "epoch": 0.8997775506523177, - "flos": 12912166262160.0, - "grad_norm": 4.127845014250163, - "learning_rate": 1.0434229670818618e-07, - "loss": 1.0697, - "num_input_tokens_seen": 159694585, - "step": 7483 - }, - { - "epoch": 0.8998977935429567, - "flos": 17186218908600.0, - "grad_norm": 3.790096810344969, - "learning_rate": 1.0409412018691944e-07, - "loss": 1.0277, - "num_input_tokens_seen": 159714770, - "step": 7484 - }, - { - "epoch": 0.9000180364335959, - "flos": 14747127747240.0, - "grad_norm": 3.345440873770239, - "learning_rate": 1.0384623127306724e-07, - "loss": 0.9842, - "num_input_tokens_seen": 159731835, - "step": 7485 - }, - { - "epoch": 0.900138279324235, - "flos": 13620966035040.0, - "grad_norm": 4.891224004850422, - "learning_rate": 1.0359863000423397e-07, - "loss": 1.0144, - "num_input_tokens_seen": 159749690, - "step": 7486 - }, - { - "epoch": 0.900258522214874, - "flos": 20467055445840.0, - "grad_norm": 3.3374298831275064, - "learning_rate": 1.0335131641798112e-07, - "loss": 0.9375, - "num_input_tokens_seen": 159771370, - "step": 7487 - }, - { - "epoch": 0.9003787651055132, - "flos": 41708138898840.0, - "grad_norm": 0.8646801622394424, - "learning_rate": 1.0310429055182512e-07, - "loss": 0.8544, - "num_input_tokens_seen": 159825410, - "step": 7488 - }, - { - "epoch": 0.9004990079961522, - "flos": 18185414022000.0, - "grad_norm": 3.315063923813495, - "learning_rate": 1.0285755244324024e-07, - "loss": 0.96, - "num_input_tokens_seen": 159845875, - "step": 7489 - }, - { - "epoch": 0.9006192508867913, - "flos": 16589601314520.0, - "grad_norm": 3.4257233461469037, - "learning_rate": 1.0261110212965629e-07, - "loss": 0.9175, - "num_input_tokens_seen": 159867390, - "step": 7490 - }, - { - "epoch": 0.9007394937774305, - "flos": 12783666585360.0, - "grad_norm": 3.6481266910404626, - "learning_rate": 1.023649396484596e-07, - "loss": 1.0051, - "num_input_tokens_seen": 159886165, - "step": 7491 - }, - { - "epoch": 0.9008597366680695, - "flos": 30770784123840.0, - "grad_norm": 20.165896734667697, - "learning_rate": 1.0211906503699275e-07, - "loss": 0.9006, - "num_input_tokens_seen": 159908860, - "step": 7492 - }, - { - "epoch": 0.9009799795587086, - "flos": 10554866125560.0, - "grad_norm": 4.338174438810391, - "learning_rate": 1.0187347833255455e-07, - "loss": 1.0423, - "num_input_tokens_seen": 159924485, - "step": 7493 - }, - { - "epoch": 0.9011002224493477, - "flos": 15327305197080.0, - "grad_norm": 2.9224484130901014, - "learning_rate": 1.0162817957240056e-07, - "loss": 1.02, - "num_input_tokens_seen": 159944100, - "step": 7494 - }, - { - "epoch": 0.9012204653399868, - "flos": 50969673860160.0, - "grad_norm": 0.9744674181709941, - "learning_rate": 1.0138316879374253e-07, - "loss": 0.9111, - "num_input_tokens_seen": 160013110, - "step": 7495 - }, - { - "epoch": 0.9013407082306258, - "flos": 11026142183520.0, - "grad_norm": 5.846981358745851, - "learning_rate": 1.0113844603374833e-07, - "loss": 0.9634, - "num_input_tokens_seen": 160029355, - "step": 7496 - }, - { - "epoch": 0.901460951121265, - "flos": 10634081579280.0, - "grad_norm": 6.3798731709373415, - "learning_rate": 1.0089401132954178e-07, - "loss": 0.9521, - "num_input_tokens_seen": 160047055, - "step": 7497 - }, - { - "epoch": 0.9015811940119041, - "flos": 15799746394320.0, - "grad_norm": 3.578535564472407, - "learning_rate": 1.006498647182037e-07, - "loss": 0.9411, - "num_input_tokens_seen": 160065430, - "step": 7498 - }, - { - "epoch": 0.9017014369025431, - "flos": 17766825620280.0, - "grad_norm": 4.060140540786427, - "learning_rate": 1.004060062367713e-07, - "loss": 0.9502, - "num_input_tokens_seen": 160086245, - "step": 7499 - }, - { - "epoch": 0.9018216797931822, - "flos": 12836875488120.0, - "grad_norm": 3.419600614907904, - "learning_rate": 1.0016243592223728e-07, - "loss": 0.9162, - "num_input_tokens_seen": 160106365, - "step": 7500 - }, - { - "epoch": 0.9019419226838213, - "flos": 26602597374840.0, - "grad_norm": 3.1756549730333528, - "learning_rate": 9.991915381155114e-08, - "loss": 0.885, - "num_input_tokens_seen": 160129065, - "step": 7501 - }, - { - "epoch": 0.9020621655744604, - "flos": 16665260027280.0, - "grad_norm": 6.684791658246363, - "learning_rate": 9.967615994161871e-08, - "loss": 0.9742, - "num_input_tokens_seen": 160148445, - "step": 7502 - }, - { - "epoch": 0.9021824084650995, - "flos": 16245813101880.0, - "grad_norm": 3.9218820518643027, - "learning_rate": 9.943345434930161e-08, - "loss": 1.0043, - "num_input_tokens_seen": 160168415, - "step": 7503 - }, - { - "epoch": 0.9023026513557386, - "flos": 15721236156480.0, - "grad_norm": 4.547730460427293, - "learning_rate": 9.919103707141885e-08, - "loss": 0.9073, - "num_input_tokens_seen": 160187015, - "step": 7504 - }, - { - "epoch": 0.9024228942463777, - "flos": 17208883349520.0, - "grad_norm": 4.416814813588428, - "learning_rate": 9.89489081447441e-08, - "loss": 0.9872, - "num_input_tokens_seen": 160203935, - "step": 7505 - }, - { - "epoch": 0.9025431371370167, - "flos": 17791299093240.0, - "grad_norm": 10.89511668156348, - "learning_rate": 9.870706760600844e-08, - "loss": 1.0463, - "num_input_tokens_seen": 160223605, - "step": 7506 - }, - { - "epoch": 0.9026633800276559, - "flos": 13439564071080.0, - "grad_norm": 2.8850621488808996, - "learning_rate": 9.846551549189918e-08, - "loss": 0.9433, - "num_input_tokens_seen": 160242930, - "step": 7507 - }, - { - "epoch": 0.902783622918295, - "flos": 23115302831040.0, - "grad_norm": 2.963971599015501, - "learning_rate": 9.822425183905902e-08, - "loss": 0.894, - "num_input_tokens_seen": 160263175, - "step": 7508 - }, - { - "epoch": 0.902903865808934, - "flos": 45615493599240.0, - "grad_norm": 1.1585442746608725, - "learning_rate": 9.798327668408823e-08, - "loss": 0.9963, - "num_input_tokens_seen": 160324530, - "step": 7509 - }, - { - "epoch": 0.9030241086995732, - "flos": 16925816121840.0, - "grad_norm": 4.334596938740428, - "learning_rate": 9.774259006354158e-08, - "loss": 0.9064, - "num_input_tokens_seen": 160344320, - "step": 7510 - }, - { - "epoch": 0.9031443515902122, - "flos": 18788715836160.0, - "grad_norm": 3.5101343299831655, - "learning_rate": 9.750219201393184e-08, - "loss": 0.9815, - "num_input_tokens_seen": 160364005, - "step": 7511 - }, - { - "epoch": 0.9032645944808513, - "flos": 17742321485760.0, - "grad_norm": 11.983465883868647, - "learning_rate": 9.726208257172697e-08, - "loss": 1.0082, - "num_input_tokens_seen": 160385420, - "step": 7512 - }, - { - "epoch": 0.9033848373714904, - "flos": 15039362781360.0, - "grad_norm": 5.24892487189434, - "learning_rate": 9.702226177335115e-08, - "loss": 0.9746, - "num_input_tokens_seen": 160403635, - "step": 7513 - }, - { - "epoch": 0.9035050802621295, - "flos": 18709990967400.0, - "grad_norm": 2.368768145408681, - "learning_rate": 9.67827296551853e-08, - "loss": 0.9532, - "num_input_tokens_seen": 160424640, - "step": 7514 - }, - { - "epoch": 0.9036253231527686, - "flos": 17213543906640.0, - "grad_norm": 9.240569797798633, - "learning_rate": 9.65434862535659e-08, - "loss": 0.8937, - "num_input_tokens_seen": 160443730, - "step": 7515 - }, - { - "epoch": 0.9037455660434077, - "flos": 12806361687840.0, - "grad_norm": 6.630031762140525, - "learning_rate": 9.630453160478635e-08, - "loss": 0.8723, - "num_input_tokens_seen": 160458805, - "step": 7516 - }, - { - "epoch": 0.9038658089340468, - "flos": 17110529534280.0, - "grad_norm": 5.1366938717315795, - "learning_rate": 9.60658657450959e-08, - "loss": 1.0355, - "num_input_tokens_seen": 160478825, - "step": 7517 - }, - { - "epoch": 0.9039860518246858, - "flos": 15510270900600.0, - "grad_norm": 2.43202308190209, - "learning_rate": 9.582748871069979e-08, - "loss": 1.0159, - "num_input_tokens_seen": 160497985, - "step": 7518 - }, - { - "epoch": 0.904106294715325, - "flos": 18946104250560.0, - "grad_norm": 7.290266356658403, - "learning_rate": 9.558940053775954e-08, - "loss": 1.0526, - "num_input_tokens_seen": 160516345, - "step": 7519 - }, - { - "epoch": 0.904226537605964, - "flos": 12600792866520.0, - "grad_norm": 3.7350800299592235, - "learning_rate": 9.535160126239294e-08, - "loss": 0.9146, - "num_input_tokens_seen": 160532690, - "step": 7520 - }, - { - "epoch": 0.9043467804966031, - "flos": 17634891848760.0, - "grad_norm": 6.042392695858719, - "learning_rate": 9.511409092067424e-08, - "loss": 0.9363, - "num_input_tokens_seen": 160552765, - "step": 7521 - }, - { - "epoch": 0.9044670233872423, - "flos": 16082997591360.0, - "grad_norm": 5.041050434546611, - "learning_rate": 9.487686954863327e-08, - "loss": 0.9026, - "num_input_tokens_seen": 160572205, - "step": 7522 - }, - { - "epoch": 0.9045872662778813, - "flos": 16902814403760.0, - "grad_norm": 4.613612822811272, - "learning_rate": 9.46399371822566e-08, - "loss": 0.9906, - "num_input_tokens_seen": 160591700, - "step": 7523 - }, - { - "epoch": 0.9047075091685204, - "flos": 10735501550520.0, - "grad_norm": 3.6539278399645454, - "learning_rate": 9.440329385748657e-08, - "loss": 0.9347, - "num_input_tokens_seen": 160608490, - "step": 7524 - }, - { - "epoch": 0.9048277520591596, - "flos": 12757353418800.0, - "grad_norm": 3.6873585302080203, - "learning_rate": 9.416693961022137e-08, - "loss": 0.9353, - "num_input_tokens_seen": 160626460, - "step": 7525 - }, - { - "epoch": 0.9049479949497986, - "flos": 15537933175800.0, - "grad_norm": 3.60102466200011, - "learning_rate": 9.393087447631654e-08, - "loss": 0.9994, - "num_input_tokens_seen": 160644460, - "step": 7526 - }, - { - "epoch": 0.9050682378404377, - "flos": 14747894286240.0, - "grad_norm": 11.057377033140696, - "learning_rate": 9.36950984915823e-08, - "loss": 0.9468, - "num_input_tokens_seen": 160662535, - "step": 7527 - }, - { - "epoch": 0.9051884807310768, - "flos": 15327673135800.0, - "grad_norm": 5.303613028608569, - "learning_rate": 9.345961169178607e-08, - "loss": 0.9231, - "num_input_tokens_seen": 160681940, - "step": 7528 - }, - { - "epoch": 0.9053087236217159, - "flos": 15563387818680.0, - "grad_norm": 5.300855193328681, - "learning_rate": 9.322441411265081e-08, - "loss": 0.9511, - "num_input_tokens_seen": 160702645, - "step": 7529 - }, - { - "epoch": 0.9054289665123549, - "flos": 12076215921120.0, - "grad_norm": 3.8731863638285926, - "learning_rate": 9.298950578985554e-08, - "loss": 0.954, - "num_input_tokens_seen": 160719440, - "step": 7530 - }, - { - "epoch": 0.905549209402994, - "flos": 14750745811320.0, - "grad_norm": 15.279088029259448, - "learning_rate": 9.275488675903665e-08, - "loss": 0.9339, - "num_input_tokens_seen": 160738105, - "step": 7531 - }, - { - "epoch": 0.9056694522936332, - "flos": 15404067726000.0, - "grad_norm": 3.5407027933811572, - "learning_rate": 9.252055705578454e-08, - "loss": 0.9545, - "num_input_tokens_seen": 160757325, - "step": 7532 - }, - { - "epoch": 0.9057896951842722, - "flos": 21069130797600.0, - "grad_norm": 3.7792628087127436, - "learning_rate": 9.228651671564747e-08, - "loss": 0.9394, - "num_input_tokens_seen": 160779075, - "step": 7533 - }, - { - "epoch": 0.9059099380749113, - "flos": 19864182893520.0, - "grad_norm": 3.4095413684900238, - "learning_rate": 9.205276577412901e-08, - "loss": 1.0119, - "num_input_tokens_seen": 160801575, - "step": 7534 - }, - { - "epoch": 0.9060301809655504, - "flos": 12574387715280.0, - "grad_norm": 4.573078590023065, - "learning_rate": 9.181930426668905e-08, - "loss": 0.9928, - "num_input_tokens_seen": 160818090, - "step": 7535 - }, - { - "epoch": 0.9061504238561895, - "flos": 22643413541880.0, - "grad_norm": 5.634102979071662, - "learning_rate": 9.158613222874346e-08, - "loss": 0.9202, - "num_input_tokens_seen": 160839435, - "step": 7536 - }, - { - "epoch": 0.9062706667468285, - "flos": 14226751435560.0, - "grad_norm": 7.07965219624005, - "learning_rate": 9.135324969566394e-08, - "loss": 1.0463, - "num_input_tokens_seen": 160858655, - "step": 7537 - }, - { - "epoch": 0.9063909096374677, - "flos": 13068389537280.0, - "grad_norm": 5.080297386896425, - "learning_rate": 9.112065670277913e-08, - "loss": 0.9736, - "num_input_tokens_seen": 160874740, - "step": 7538 - }, - { - "epoch": 0.9065111525281068, - "flos": 23928374100240.0, - "grad_norm": 4.155163156769539, - "learning_rate": 9.088835328537303e-08, - "loss": 0.9489, - "num_input_tokens_seen": 160896050, - "step": 7539 - }, - { - "epoch": 0.9066313954187458, - "flos": 16612572371040.0, - "grad_norm": 4.882776171739859, - "learning_rate": 9.065633947868568e-08, - "loss": 0.9302, - "num_input_tokens_seen": 160915375, - "step": 7540 - }, - { - "epoch": 0.906751638309385, - "flos": 18683555154600.0, - "grad_norm": 5.254488706387307, - "learning_rate": 9.042461531791379e-08, - "loss": 1.0172, - "num_input_tokens_seen": 160933515, - "step": 7541 - }, - { - "epoch": 0.906871881200024, - "flos": 11708659451400.0, - "grad_norm": 3.358016622169346, - "learning_rate": 9.019318083820903e-08, - "loss": 0.9873, - "num_input_tokens_seen": 160951815, - "step": 7542 - }, - { - "epoch": 0.9069921240906631, - "flos": 17501333014560.0, - "grad_norm": 2.6009067018136784, - "learning_rate": 8.996203607468045e-08, - "loss": 1.0691, - "num_input_tokens_seen": 160970535, - "step": 7543 - }, - { - "epoch": 0.9071123669813023, - "flos": 18054890682240.0, - "grad_norm": 2.4654359423505086, - "learning_rate": 8.973118106239241e-08, - "loss": 0.9859, - "num_input_tokens_seen": 160992860, - "step": 7544 - }, - { - "epoch": 0.9072326098719413, - "flos": 19026024920160.0, - "grad_norm": 6.107147981646307, - "learning_rate": 8.95006158363656e-08, - "loss": 1.1591, - "num_input_tokens_seen": 161012765, - "step": 7545 - }, - { - "epoch": 0.9073528527625804, - "flos": 16979791563600.0, - "grad_norm": 5.1525543840806, - "learning_rate": 8.9270340431576e-08, - "loss": 0.9992, - "num_input_tokens_seen": 161031575, - "step": 7546 - }, - { - "epoch": 0.9074730956532195, - "flos": 27022074961800.0, - "grad_norm": 4.22386368137085, - "learning_rate": 8.904035488295658e-08, - "loss": 0.9544, - "num_input_tokens_seen": 161050795, - "step": 7547 - }, - { - "epoch": 0.9075933385438586, - "flos": 46661305380000.0, - "grad_norm": 0.6891177391003245, - "learning_rate": 8.881065922539632e-08, - "loss": 0.7938, - "num_input_tokens_seen": 161110955, - "step": 7548 - }, - { - "epoch": 0.9077135814344977, - "flos": 14142078224160.0, - "grad_norm": 3.7519299944790654, - "learning_rate": 8.85812534937389e-08, - "loss": 0.9683, - "num_input_tokens_seen": 161128775, - "step": 7549 - }, - { - "epoch": 0.9078338243251368, - "flos": 12416692685280.0, - "grad_norm": 5.610378675005202, - "learning_rate": 8.835213772278583e-08, - "loss": 0.8855, - "num_input_tokens_seen": 161145350, - "step": 7550 - }, - { - "epoch": 0.9079540672157759, - "flos": 20518884578400.0, - "grad_norm": 3.338640819551072, - "learning_rate": 8.812331194729373e-08, - "loss": 1.0121, - "num_input_tokens_seen": 161164715, - "step": 7551 - }, - { - "epoch": 0.9080743101064149, - "flos": 16717487760120.0, - "grad_norm": 4.913314883759503, - "learning_rate": 8.789477620197461e-08, - "loss": 0.9432, - "num_input_tokens_seen": 161183960, - "step": 7552 - }, - { - "epoch": 0.9081945529970541, - "flos": 16187912980440.0, - "grad_norm": 4.645009240017048, - "learning_rate": 8.766653052149831e-08, - "loss": 1.0136, - "num_input_tokens_seen": 161198865, - "step": 7553 - }, - { - "epoch": 0.9083147958876931, - "flos": 13381847919000.0, - "grad_norm": 4.020256725862877, - "learning_rate": 8.743857494048823e-08, - "loss": 0.9753, - "num_input_tokens_seen": 161215400, - "step": 7554 - }, - { - "epoch": 0.9084350387783322, - "flos": 13407363885000.0, - "grad_norm": 5.373903029526301, - "learning_rate": 8.721090949352605e-08, - "loss": 0.8653, - "num_input_tokens_seen": 161231360, - "step": 7555 - }, - { - "epoch": 0.9085552816689714, - "flos": 14620191810000.0, - "grad_norm": 6.1890679116479435, - "learning_rate": 8.698353421514793e-08, - "loss": 0.9449, - "num_input_tokens_seen": 161249455, - "step": 7556 - }, - { - "epoch": 0.9086755245596104, - "flos": 12836016964440.0, - "grad_norm": 6.253284354326489, - "learning_rate": 8.67564491398467e-08, - "loss": 1.0235, - "num_input_tokens_seen": 161266180, - "step": 7557 - }, - { - "epoch": 0.9087957674502495, - "flos": 13565978761800.0, - "grad_norm": 3.5326949682513624, - "learning_rate": 8.652965430207104e-08, - "loss": 0.9697, - "num_input_tokens_seen": 161283805, - "step": 7558 - }, - { - "epoch": 0.9089160103408886, - "flos": 12833257424040.0, - "grad_norm": 8.437702091763816, - "learning_rate": 8.630314973622521e-08, - "loss": 0.8901, - "num_input_tokens_seen": 161301070, - "step": 7559 - }, - { - "epoch": 0.9090362532315277, - "flos": 23797206867720.0, - "grad_norm": 3.3745854441035665, - "learning_rate": 8.607693547666995e-08, - "loss": 0.9385, - "num_input_tokens_seen": 161323330, - "step": 7560 - }, - { - "epoch": 0.9091564961221668, - "flos": 51193583842440.0, - "grad_norm": 0.9312829184634505, - "learning_rate": 8.585101155772201e-08, - "loss": 0.854, - "num_input_tokens_seen": 161385170, - "step": 7561 - }, - { - "epoch": 0.9092767390128058, - "flos": 17578279512840.0, - "grad_norm": 4.129148997638878, - "learning_rate": 8.562537801365377e-08, - "loss": 0.9135, - "num_input_tokens_seen": 161404625, - "step": 7562 - }, - { - "epoch": 0.909396981903445, - "flos": 16769654169840.0, - "grad_norm": 3.7115528118575276, - "learning_rate": 8.540003487869362e-08, - "loss": 0.9068, - "num_input_tokens_seen": 161426015, - "step": 7563 - }, - { - "epoch": 0.909517224794084, - "flos": 16638640245120.0, - "grad_norm": 4.000482768489504, - "learning_rate": 8.517498218702557e-08, - "loss": 1.021, - "num_input_tokens_seen": 161443665, - "step": 7564 - }, - { - "epoch": 0.9096374676847231, - "flos": 13623418959840.0, - "grad_norm": 6.427692534902567, - "learning_rate": 8.49502199727905e-08, - "loss": 0.9293, - "num_input_tokens_seen": 161461410, - "step": 7565 - }, - { - "epoch": 0.9097577105753623, - "flos": 23745285750480.0, - "grad_norm": 6.7144733391145195, - "learning_rate": 8.472574827008428e-08, - "loss": 0.8644, - "num_input_tokens_seen": 161482015, - "step": 7566 - }, - { - "epoch": 0.9098779534660013, - "flos": 15562008048480.0, - "grad_norm": 3.9741361819059478, - "learning_rate": 8.450156711295942e-08, - "loss": 1.0627, - "num_input_tokens_seen": 161501905, - "step": 7567 - }, - { - "epoch": 0.9099981963566404, - "flos": 18309222480120.0, - "grad_norm": 5.108575694239961, - "learning_rate": 8.427767653542383e-08, - "loss": 1.0827, - "num_input_tokens_seen": 161516795, - "step": 7568 - }, - { - "epoch": 0.9101184392472795, - "flos": 14960760558840.0, - "grad_norm": 4.002129464758174, - "learning_rate": 8.405407657144125e-08, - "loss": 0.9261, - "num_input_tokens_seen": 161535675, - "step": 7569 - }, - { - "epoch": 0.9102386821379186, - "flos": 17607536189160.0, - "grad_norm": 4.1238232870489995, - "learning_rate": 8.383076725493232e-08, - "loss": 0.9378, - "num_input_tokens_seen": 161552715, - "step": 7570 - }, - { - "epoch": 0.9103589250285576, - "flos": 16033682706720.0, - "grad_norm": 3.354780737225734, - "learning_rate": 8.360774861977216e-08, - "loss": 0.9095, - "num_input_tokens_seen": 161571555, - "step": 7571 - }, - { - "epoch": 0.9104791679191968, - "flos": 18053817527640.0, - "grad_norm": 3.9971012102168237, - "learning_rate": 8.338502069979281e-08, - "loss": 0.9657, - "num_input_tokens_seen": 161591585, - "step": 7572 - }, - { - "epoch": 0.9105994108098359, - "flos": 10186420470600.0, - "grad_norm": 4.071568622905107, - "learning_rate": 8.316258352878214e-08, - "loss": 1.0109, - "num_input_tokens_seen": 161607725, - "step": 7573 - }, - { - "epoch": 0.9107196537004749, - "flos": 19019616654120.0, - "grad_norm": 4.630678351124935, - "learning_rate": 8.294043714048338e-08, - "loss": 0.9354, - "num_input_tokens_seen": 161626525, - "step": 7574 - }, - { - "epoch": 0.9108398965911141, - "flos": 47639491776720.0, - "grad_norm": 0.786024079000093, - "learning_rate": 8.271858156859624e-08, - "loss": 0.8721, - "num_input_tokens_seen": 161691615, - "step": 7575 - }, - { - "epoch": 0.9109601394817531, - "flos": 18080989217880.0, - "grad_norm": 4.565128144286301, - "learning_rate": 8.249701684677557e-08, - "loss": 0.9619, - "num_input_tokens_seen": 161712660, - "step": 7576 - }, - { - "epoch": 0.9110803823723922, - "flos": 16166965586880.0, - "grad_norm": 2.893034389656702, - "learning_rate": 8.227574300863294e-08, - "loss": 1.0303, - "num_input_tokens_seen": 161732550, - "step": 7577 - }, - { - "epoch": 0.9112006252630314, - "flos": 34652929473840.0, - "grad_norm": 2.767251350101297, - "learning_rate": 8.205476008773548e-08, - "loss": 0.9232, - "num_input_tokens_seen": 161756270, - "step": 7578 - }, - { - "epoch": 0.9113208681536704, - "flos": 21386084597160.0, - "grad_norm": 3.3696590124293566, - "learning_rate": 8.183406811760596e-08, - "loss": 1.0535, - "num_input_tokens_seen": 161775720, - "step": 7579 - }, - { - "epoch": 0.9114411110443095, - "flos": 18213352251240.0, - "grad_norm": 2.2373411324645103, - "learning_rate": 8.161366713172313e-08, - "loss": 0.9677, - "num_input_tokens_seen": 161797830, - "step": 7580 - }, - { - "epoch": 0.9115613539349486, - "flos": 13041248508600.0, - "grad_norm": 8.643419141636475, - "learning_rate": 8.139355716352137e-08, - "loss": 1.0745, - "num_input_tokens_seen": 161812390, - "step": 7581 - }, - { - "epoch": 0.9116815968255877, - "flos": 15432741832680.0, - "grad_norm": 8.75652876198512, - "learning_rate": 8.117373824639196e-08, - "loss": 0.926, - "num_input_tokens_seen": 161832375, - "step": 7582 - }, - { - "epoch": 0.9118018397162267, - "flos": 46799064847920.0, - "grad_norm": 0.7338624342341458, - "learning_rate": 8.095421041368067e-08, - "loss": 0.839, - "num_input_tokens_seen": 161891510, - "step": 7583 - }, - { - "epoch": 0.9119220826068659, - "flos": 14855354584800.0, - "grad_norm": 3.7169039976926985, - "learning_rate": 8.073497369868999e-08, - "loss": 0.9315, - "num_input_tokens_seen": 161909690, - "step": 7584 - }, - { - "epoch": 0.912042325497505, - "flos": 20204966273280.0, - "grad_norm": 3.8861223386236197, - "learning_rate": 8.051602813467772e-08, - "loss": 0.9748, - "num_input_tokens_seen": 161931265, - "step": 7585 - }, - { - "epoch": 0.912162568388144, - "flos": 12443465775240.0, - "grad_norm": 2.7801550949523923, - "learning_rate": 8.029737375485756e-08, - "loss": 0.9358, - "num_input_tokens_seen": 161950215, - "step": 7586 - }, - { - "epoch": 0.9122828112787832, - "flos": 14068351189680.0, - "grad_norm": 2.9260897505600405, - "learning_rate": 8.007901059239986e-08, - "loss": 0.9605, - "num_input_tokens_seen": 161969215, - "step": 7587 - }, - { - "epoch": 0.9124030541694222, - "flos": 14776997654760.0, - "grad_norm": 2.835621041120053, - "learning_rate": 7.986093868042964e-08, - "loss": 1.0289, - "num_input_tokens_seen": 161989180, - "step": 7588 - }, - { - "epoch": 0.9125232970600613, - "flos": 17923784772840.0, - "grad_norm": 3.4220381745751336, - "learning_rate": 7.964315805202826e-08, - "loss": 0.9056, - "num_input_tokens_seen": 162009480, - "step": 7589 - }, - { - "epoch": 0.9126435399507005, - "flos": 13990668813960.0, - "grad_norm": 5.702408920691263, - "learning_rate": 7.942566874023304e-08, - "loss": 0.9606, - "num_input_tokens_seen": 162028385, - "step": 7590 - }, - { - "epoch": 0.9127637828413395, - "flos": 13885753424880.0, - "grad_norm": 7.777693950867402, - "learning_rate": 7.920847077803649e-08, - "loss": 0.9177, - "num_input_tokens_seen": 162045895, - "step": 7591 - }, - { - "epoch": 0.9128840257319786, - "flos": 14357979991200.0, - "grad_norm": 4.640174509910363, - "learning_rate": 7.899156419838826e-08, - "loss": 1.0277, - "num_input_tokens_seen": 162064585, - "step": 7592 - }, - { - "epoch": 0.9130042686226177, - "flos": 17683286886600.0, - "grad_norm": 3.8707685439837074, - "learning_rate": 7.87749490341918e-08, - "loss": 0.8875, - "num_input_tokens_seen": 162084580, - "step": 7593 - }, - { - "epoch": 0.9131245115132568, - "flos": 16765606843920.0, - "grad_norm": 2.737170289272183, - "learning_rate": 7.855862531830836e-08, - "loss": 1.0714, - "num_input_tokens_seen": 162100410, - "step": 7594 - }, - { - "epoch": 0.9132447544038959, - "flos": 14142108885720.0, - "grad_norm": 2.6543011865307147, - "learning_rate": 7.834259308355373e-08, - "loss": 0.9536, - "num_input_tokens_seen": 162118895, - "step": 7595 - }, - { - "epoch": 0.9133649972945349, - "flos": 15616443413640.0, - "grad_norm": 4.266616650129252, - "learning_rate": 7.812685236269989e-08, - "loss": 0.9674, - "num_input_tokens_seen": 162137275, - "step": 7596 - }, - { - "epoch": 0.9134852401851741, - "flos": 51023128055400.0, - "grad_norm": 0.8590507589226842, - "learning_rate": 7.791140318847445e-08, - "loss": 0.8453, - "num_input_tokens_seen": 162195130, - "step": 7597 - }, - { - "epoch": 0.9136054830758131, - "flos": 16799248123320.0, - "grad_norm": 3.1303395240106937, - "learning_rate": 7.769624559356081e-08, - "loss": 1.0251, - "num_input_tokens_seen": 162218245, - "step": 7598 - }, - { - "epoch": 0.9137257259664522, - "flos": 16663328349000.0, - "grad_norm": 3.618243003735657, - "learning_rate": 7.748137961059842e-08, - "loss": 0.939, - "num_input_tokens_seen": 162231945, - "step": 7599 - }, - { - "epoch": 0.9138459688570914, - "flos": 13564323037560.0, - "grad_norm": 5.698266538749381, - "learning_rate": 7.726680527218211e-08, - "loss": 0.883, - "num_input_tokens_seen": 162248705, - "step": 7600 - }, - { - "epoch": 0.9139662117477304, - "flos": 33080026499760.0, - "grad_norm": 3.5755217965002726, - "learning_rate": 7.70525226108627e-08, - "loss": 0.972, - "num_input_tokens_seen": 162272095, - "step": 7601 - }, - { - "epoch": 0.9140864546383695, - "flos": 15825844929960.0, - "grad_norm": 4.131214431401819, - "learning_rate": 7.683853165914666e-08, - "loss": 1.025, - "num_input_tokens_seen": 162289585, - "step": 7602 - }, - { - "epoch": 0.9142066975290086, - "flos": 12416999300880.0, - "grad_norm": 4.23457656887337, - "learning_rate": 7.662483244949602e-08, - "loss": 0.9926, - "num_input_tokens_seen": 162306565, - "step": 7603 - }, - { - "epoch": 0.9143269404196477, - "flos": 12547859917800.0, - "grad_norm": 6.310673515539095, - "learning_rate": 7.641142501432951e-08, - "loss": 1.0373, - "num_input_tokens_seen": 162322480, - "step": 7604 - }, - { - "epoch": 0.9144471833102867, - "flos": 23766754390560.0, - "grad_norm": 4.950207546006042, - "learning_rate": 7.619830938602013e-08, - "loss": 0.9688, - "num_input_tokens_seen": 162343425, - "step": 7605 - }, - { - "epoch": 0.9145674262009259, - "flos": 14960361958560.0, - "grad_norm": 2.9806127350951397, - "learning_rate": 7.598548559689777e-08, - "loss": 1.0371, - "num_input_tokens_seen": 162361545, - "step": 7606 - }, - { - "epoch": 0.914687669091565, - "flos": 11892545001720.0, - "grad_norm": 4.835937894411333, - "learning_rate": 7.577295367924751e-08, - "loss": 1.0258, - "num_input_tokens_seen": 162377665, - "step": 7607 - }, - { - "epoch": 0.914807911982204, - "flos": 18341238696840.0, - "grad_norm": 2.777725300823009, - "learning_rate": 7.556071366531002e-08, - "loss": 1.0588, - "num_input_tokens_seen": 162398355, - "step": 7608 - }, - { - "epoch": 0.9149281548728432, - "flos": 13622928374880.0, - "grad_norm": 5.907200973891718, - "learning_rate": 7.53487655872822e-08, - "loss": 1.0018, - "num_input_tokens_seen": 162417245, - "step": 7609 - }, - { - "epoch": 0.9150483977634822, - "flos": 19129897816200.0, - "grad_norm": 4.639246263779936, - "learning_rate": 7.513710947731656e-08, - "loss": 0.972, - "num_input_tokens_seen": 162438175, - "step": 7610 - }, - { - "epoch": 0.9151686406541213, - "flos": 15564154357680.0, - "grad_norm": 3.7830047319076594, - "learning_rate": 7.492574536752095e-08, - "loss": 1.0742, - "num_input_tokens_seen": 162457885, - "step": 7611 - }, - { - "epoch": 0.9152888835447605, - "flos": 19444705306560.0, - "grad_norm": 5.563191745907505, - "learning_rate": 7.471467328995907e-08, - "loss": 1.0103, - "num_input_tokens_seen": 162476415, - "step": 7612 - }, - { - "epoch": 0.9154091264353995, - "flos": 9427815228120.0, - "grad_norm": 4.733280470826013, - "learning_rate": 7.450389327665018e-08, - "loss": 0.824, - "num_input_tokens_seen": 162493970, - "step": 7613 - }, - { - "epoch": 0.9155293693260386, - "flos": 14330808300960.0, - "grad_norm": 3.5843075307733696, - "learning_rate": 7.429340535957029e-08, - "loss": 0.883, - "num_input_tokens_seen": 162508885, - "step": 7614 - }, - { - "epoch": 0.9156496122166777, - "flos": 13728671626080.0, - "grad_norm": 6.953874937612066, - "learning_rate": 7.40832095706494e-08, - "loss": 0.9338, - "num_input_tokens_seen": 162525300, - "step": 7615 - }, - { - "epoch": 0.9157698551073168, - "flos": 14988146880000.0, - "grad_norm": 4.814626050710347, - "learning_rate": 7.387330594177443e-08, - "loss": 1.0269, - "num_input_tokens_seen": 162547095, - "step": 7616 - }, - { - "epoch": 0.9158900979979558, - "flos": 17924520650280.0, - "grad_norm": 3.304389525342445, - "learning_rate": 7.366369450478749e-08, - "loss": 1.018, - "num_input_tokens_seen": 162567925, - "step": 7617 - }, - { - "epoch": 0.916010340888595, - "flos": 21483334596240.0, - "grad_norm": 3.4181188583109594, - "learning_rate": 7.345437529148646e-08, - "loss": 0.885, - "num_input_tokens_seen": 162586655, - "step": 7618 - }, - { - "epoch": 0.9161305837792341, - "flos": 12102038502720.0, - "grad_norm": 5.137024705717156, - "learning_rate": 7.324534833362483e-08, - "loss": 0.9621, - "num_input_tokens_seen": 162603950, - "step": 7619 - }, - { - "epoch": 0.9162508266698731, - "flos": 16271819652840.0, - "grad_norm": 3.1310841651601136, - "learning_rate": 7.303661366291192e-08, - "loss": 0.9065, - "num_input_tokens_seen": 162624340, - "step": 7620 - }, - { - "epoch": 0.9163710695605123, - "flos": 14174063779320.0, - "grad_norm": 2.944642332212971, - "learning_rate": 7.28281713110126e-08, - "loss": 1.048, - "num_input_tokens_seen": 162642135, - "step": 7621 - }, - { - "epoch": 0.9164913124511513, - "flos": 16192788168480.0, - "grad_norm": 3.8456901057978583, - "learning_rate": 7.262002130954759e-08, - "loss": 0.9935, - "num_input_tokens_seen": 162660310, - "step": 7622 - }, - { - "epoch": 0.9166115553417904, - "flos": 17714383256520.0, - "grad_norm": 2.819979830324806, - "learning_rate": 7.241216369009296e-08, - "loss": 1.0117, - "num_input_tokens_seen": 162680215, - "step": 7623 - }, - { - "epoch": 0.9167317982324296, - "flos": 18290728011360.0, - "grad_norm": 2.982548339400854, - "learning_rate": 7.220459848418037e-08, - "loss": 0.8961, - "num_input_tokens_seen": 162700010, - "step": 7624 - }, - { - "epoch": 0.9168520411230686, - "flos": 11053528504680.0, - "grad_norm": 2.7830917603816268, - "learning_rate": 7.199732572329708e-08, - "loss": 1.0162, - "num_input_tokens_seen": 162717630, - "step": 7625 - }, - { - "epoch": 0.9169722840137077, - "flos": 21563745850800.0, - "grad_norm": 7.088253900169737, - "learning_rate": 7.179034543888684e-08, - "loss": 0.9767, - "num_input_tokens_seen": 162736855, - "step": 7626 - }, - { - "epoch": 0.9170925269043467, - "flos": 15982773420960.0, - "grad_norm": 21.780715476090098, - "learning_rate": 7.158365766234808e-08, - "loss": 0.9985, - "num_input_tokens_seen": 162755425, - "step": 7627 - }, - { - "epoch": 0.9172127697949859, - "flos": 16272003622200.0, - "grad_norm": 4.435887433260299, - "learning_rate": 7.137726242503527e-08, - "loss": 0.9468, - "num_input_tokens_seen": 162774065, - "step": 7628 - }, - { - "epoch": 0.917333012685625, - "flos": 12360141672480.0, - "grad_norm": 7.985035728990931, - "learning_rate": 7.11711597582585e-08, - "loss": 1.0018, - "num_input_tokens_seen": 162791145, - "step": 7629 - }, - { - "epoch": 0.917453255576264, - "flos": 10109228679840.0, - "grad_norm": 4.716573453235994, - "learning_rate": 7.096534969328271e-08, - "loss": 1.0263, - "num_input_tokens_seen": 162808310, - "step": 7630 - }, - { - "epoch": 0.9175734984669032, - "flos": 14724861906600.0, - "grad_norm": 3.2388300045354046, - "learning_rate": 7.075983226132987e-08, - "loss": 1.0607, - "num_input_tokens_seen": 162826960, - "step": 7631 - }, - { - "epoch": 0.9176937413575422, - "flos": 10476969118920.0, - "grad_norm": 4.302902820976507, - "learning_rate": 7.055460749357656e-08, - "loss": 1.0068, - "num_input_tokens_seen": 162842960, - "step": 7632 - }, - { - "epoch": 0.9178139842481813, - "flos": 13095315935040.0, - "grad_norm": 4.890378318147226, - "learning_rate": 7.034967542115521e-08, - "loss": 0.9304, - "num_input_tokens_seen": 162860945, - "step": 7633 - }, - { - "epoch": 0.9179342271388204, - "flos": 14226230189040.0, - "grad_norm": 9.599977938308797, - "learning_rate": 7.014503607515388e-08, - "loss": 0.9762, - "num_input_tokens_seen": 162879970, - "step": 7634 - }, - { - "epoch": 0.9180544700294595, - "flos": 17552180977200.0, - "grad_norm": 10.98759176760597, - "learning_rate": 6.994068948661592e-08, - "loss": 0.8999, - "num_input_tokens_seen": 162897845, - "step": 7635 - }, - { - "epoch": 0.9181747129200986, - "flos": 11840255945760.0, - "grad_norm": 3.9640093747594936, - "learning_rate": 6.973663568654142e-08, - "loss": 0.9871, - "num_input_tokens_seen": 162915270, - "step": 7636 - }, - { - "epoch": 0.9182949558107377, - "flos": 17260957774560.0, - "grad_norm": 3.6920658874501346, - "learning_rate": 6.953287470588386e-08, - "loss": 0.8767, - "num_input_tokens_seen": 162932945, - "step": 7637 - }, - { - "epoch": 0.9184151987013768, - "flos": 15687349584600.0, - "grad_norm": 4.387415462885835, - "learning_rate": 6.932940657555452e-08, - "loss": 1.0746, - "num_input_tokens_seen": 162948795, - "step": 7638 - }, - { - "epoch": 0.9185354415920158, - "flos": 22936936361520.0, - "grad_norm": 2.3198635296310126, - "learning_rate": 6.912623132641938e-08, - "loss": 0.986, - "num_input_tokens_seen": 162973605, - "step": 7639 - }, - { - "epoch": 0.918655684482655, - "flos": 14909483334360.0, - "grad_norm": 3.890259016969977, - "learning_rate": 6.892334898929952e-08, - "loss": 0.9811, - "num_input_tokens_seen": 162993570, - "step": 7640 - }, - { - "epoch": 0.918775927373294, - "flos": 11001822018360.0, - "grad_norm": 4.205760030564319, - "learning_rate": 6.872075959497236e-08, - "loss": 1.0637, - "num_input_tokens_seen": 163012065, - "step": 7641 - }, - { - "epoch": 0.9188961702639331, - "flos": 21331403939520.0, - "grad_norm": 4.408451477275238, - "learning_rate": 6.85184631741702e-08, - "loss": 1.0482, - "num_input_tokens_seen": 163032350, - "step": 7642 - }, - { - "epoch": 0.9190164131545723, - "flos": 14695697214960.0, - "grad_norm": 8.02898971872055, - "learning_rate": 6.831645975758161e-08, - "loss": 1.0033, - "num_input_tokens_seen": 163050010, - "step": 7643 - }, - { - "epoch": 0.9191366560452113, - "flos": 18238254986040.0, - "grad_norm": 3.9556435263301295, - "learning_rate": 6.811474937585026e-08, - "loss": 0.9043, - "num_input_tokens_seen": 163069520, - "step": 7644 - }, - { - "epoch": 0.9192568989358504, - "flos": 15223003039200.0, - "grad_norm": 4.651250420898156, - "learning_rate": 6.79133320595755e-08, - "loss": 1.0077, - "num_input_tokens_seen": 163089160, - "step": 7645 - }, - { - "epoch": 0.9193771418264896, - "flos": 16481159846040.0, - "grad_norm": 20.64236218796722, - "learning_rate": 6.771220783931198e-08, - "loss": 0.9612, - "num_input_tokens_seen": 163109040, - "step": 7646 - }, - { - "epoch": 0.9194973847171286, - "flos": 50550441565680.0, - "grad_norm": 0.8683004025483069, - "learning_rate": 6.751137674556994e-08, - "loss": 0.9036, - "num_input_tokens_seen": 163169145, - "step": 7647 - }, - { - "epoch": 0.9196176276077677, - "flos": 10397998957680.0, - "grad_norm": 5.422108581964158, - "learning_rate": 6.731083880881572e-08, - "loss": 1.0069, - "num_input_tokens_seen": 163185085, - "step": 7648 - }, - { - "epoch": 0.9197378704984068, - "flos": 16560130007280.0, - "grad_norm": 4.643487837156812, - "learning_rate": 6.711059405947072e-08, - "loss": 1.0322, - "num_input_tokens_seen": 163202995, - "step": 7649 - }, - { - "epoch": 0.9198581133890459, - "flos": 14409533169720.0, - "grad_norm": 3.124956512148581, - "learning_rate": 6.691064252791156e-08, - "loss": 0.9944, - "num_input_tokens_seen": 163222190, - "step": 7650 - }, - { - "epoch": 0.9199783562796849, - "flos": 12521638735920.0, - "grad_norm": 7.165533848618215, - "learning_rate": 6.67109842444713e-08, - "loss": 1.0028, - "num_input_tokens_seen": 163240840, - "step": 7651 - }, - { - "epoch": 0.9200985991703241, - "flos": 12521945351520.0, - "grad_norm": 7.249683767854066, - "learning_rate": 6.651161923943704e-08, - "loss": 0.9896, - "num_input_tokens_seen": 163258465, - "step": 7652 - }, - { - "epoch": 0.9202188420609632, - "flos": 14908410179760.0, - "grad_norm": 6.157613070863478, - "learning_rate": 6.631254754305326e-08, - "loss": 0.9949, - "num_input_tokens_seen": 163277645, - "step": 7653 - }, - { - "epoch": 0.9203390849516022, - "flos": 9821316925680.0, - "grad_norm": 4.281856744471466, - "learning_rate": 6.611376918551848e-08, - "loss": 1.0087, - "num_input_tokens_seen": 163296150, - "step": 7654 - }, - { - "epoch": 0.9204593278422414, - "flos": 15036879195000.0, - "grad_norm": 5.571057712522361, - "learning_rate": 6.591528419698744e-08, - "loss": 1.0192, - "num_input_tokens_seen": 163315655, - "step": 7655 - }, - { - "epoch": 0.9205795707328804, - "flos": 10240334589240.0, - "grad_norm": 7.147693299291207, - "learning_rate": 6.571709260756986e-08, - "loss": 1.0439, - "num_input_tokens_seen": 163332020, - "step": 7656 - }, - { - "epoch": 0.9206998136235195, - "flos": 15929380548840.0, - "grad_norm": 18.24387873584247, - "learning_rate": 6.551919444733122e-08, - "loss": 0.9657, - "num_input_tokens_seen": 163349555, - "step": 7657 - }, - { - "epoch": 0.9208200565141585, - "flos": 38166107922360.0, - "grad_norm": 5.317310802858371, - "learning_rate": 6.53215897462931e-08, - "loss": 0.884, - "num_input_tokens_seen": 163373030, - "step": 7658 - }, - { - "epoch": 0.9209402994047977, - "flos": 21801974781600.0, - "grad_norm": 5.675639098895225, - "learning_rate": 6.512427853443103e-08, - "loss": 0.9793, - "num_input_tokens_seen": 163394830, - "step": 7659 - }, - { - "epoch": 0.9210605422954368, - "flos": 20755672415880.0, - "grad_norm": 3.253808797450866, - "learning_rate": 6.492726084167799e-08, - "loss": 0.9859, - "num_input_tokens_seen": 163416665, - "step": 7660 - }, - { - "epoch": 0.9211807851860758, - "flos": 39245708742240.0, - "grad_norm": 0.7931513082892074, - "learning_rate": 6.473053669792072e-08, - "loss": 0.8101, - "num_input_tokens_seen": 163471075, - "step": 7661 - }, - { - "epoch": 0.921301028076715, - "flos": 13617746571240.0, - "grad_norm": 4.889401803901986, - "learning_rate": 6.453410613300248e-08, - "loss": 0.9467, - "num_input_tokens_seen": 163488725, - "step": 7662 - }, - { - "epoch": 0.921421270967354, - "flos": 19601541812880.0, - "grad_norm": 3.04895042745966, - "learning_rate": 6.43379691767214e-08, - "loss": 0.8051, - "num_input_tokens_seen": 163507650, - "step": 7663 - }, - { - "epoch": 0.9215415138579931, - "flos": 50280717664680.0, - "grad_norm": 0.7569744549400751, - "learning_rate": 6.414212585883105e-08, - "loss": 0.8359, - "num_input_tokens_seen": 163570000, - "step": 7664 - }, - { - "epoch": 0.9216617567486323, - "flos": 25369741903080.0, - "grad_norm": 4.4707931436164445, - "learning_rate": 6.394657620904143e-08, - "loss": 0.9177, - "num_input_tokens_seen": 163592830, - "step": 7665 - }, - { - "epoch": 0.9217819996392713, - "flos": 21043492185360.0, - "grad_norm": 4.8753212064904075, - "learning_rate": 6.375132025701657e-08, - "loss": 0.9435, - "num_input_tokens_seen": 163614850, - "step": 7666 - }, - { - "epoch": 0.9219022425299104, - "flos": 10293052907040.0, - "grad_norm": 4.86095086454236, - "learning_rate": 6.355635803237724e-08, - "loss": 0.9115, - "num_input_tokens_seen": 163630270, - "step": 7667 - }, - { - "epoch": 0.9220224854205495, - "flos": 12810500998440.0, - "grad_norm": 5.109959373536578, - "learning_rate": 6.336168956469867e-08, - "loss": 1.0204, - "num_input_tokens_seen": 163648465, - "step": 7668 - }, - { - "epoch": 0.9221427283111886, - "flos": 17634922510320.0, - "grad_norm": 3.060135531945759, - "learning_rate": 6.316731488351168e-08, - "loss": 0.9469, - "num_input_tokens_seen": 163669375, - "step": 7669 - }, - { - "epoch": 0.9222629712018277, - "flos": 9768690592560.0, - "grad_norm": 4.547510686005153, - "learning_rate": 6.297323401830334e-08, - "loss": 0.863, - "num_input_tokens_seen": 163687880, - "step": 7670 - }, - { - "epoch": 0.9223832140924668, - "flos": 15353618363640.0, - "grad_norm": 3.38223564744586, - "learning_rate": 6.277944699851523e-08, - "loss": 0.9035, - "num_input_tokens_seen": 163707120, - "step": 7671 - }, - { - "epoch": 0.9225034569831059, - "flos": 15012926968560.0, - "grad_norm": 8.174065903426195, - "learning_rate": 6.25859538535447e-08, - "loss": 0.9612, - "num_input_tokens_seen": 163727635, - "step": 7672 - }, - { - "epoch": 0.9226236998737449, - "flos": 8798476201440.0, - "grad_norm": 6.746661172846994, - "learning_rate": 6.239275461274474e-08, - "loss": 1.0011, - "num_input_tokens_seen": 163743730, - "step": 7673 - }, - { - "epoch": 0.9227439427643841, - "flos": 18658131173280.0, - "grad_norm": 6.6784612589725825, - "learning_rate": 6.219984930542299e-08, - "loss": 1.09, - "num_input_tokens_seen": 163764190, - "step": 7674 - }, - { - "epoch": 0.9228641856550232, - "flos": 12731438852520.0, - "grad_norm": 2.9905730590774215, - "learning_rate": 6.200723796084383e-08, - "loss": 0.9668, - "num_input_tokens_seen": 163782005, - "step": 7675 - }, - { - "epoch": 0.9229844285456622, - "flos": 50431544505240.0, - "grad_norm": 0.7445199555283879, - "learning_rate": 6.181492060822546e-08, - "loss": 0.8731, - "num_input_tokens_seen": 163841900, - "step": 7676 - }, - { - "epoch": 0.9231046714363014, - "flos": 12731653483440.0, - "grad_norm": 5.902105906028505, - "learning_rate": 6.162289727674274e-08, - "loss": 1.035, - "num_input_tokens_seen": 163859300, - "step": 7677 - }, - { - "epoch": 0.9232249143269404, - "flos": 12652867291560.0, - "grad_norm": 4.798577717825738, - "learning_rate": 6.143116799552527e-08, - "loss": 1.1088, - "num_input_tokens_seen": 163875265, - "step": 7678 - }, - { - "epoch": 0.9233451572175795, - "flos": 16638824214480.0, - "grad_norm": 4.052835559730835, - "learning_rate": 6.123973279365802e-08, - "loss": 0.778, - "num_input_tokens_seen": 163893960, - "step": 7679 - }, - { - "epoch": 0.9234654001082186, - "flos": 12753612708480.0, - "grad_norm": 5.831591028052038, - "learning_rate": 6.10485917001824e-08, - "loss": 1.0015, - "num_input_tokens_seen": 163911535, - "step": 7680 - }, - { - "epoch": 0.9235856429988577, - "flos": 17605941788040.0, - "grad_norm": 2.9179589451391497, - "learning_rate": 6.085774474409322e-08, - "loss": 1.0365, - "num_input_tokens_seen": 163931815, - "step": 7681 - }, - { - "epoch": 0.9237058858894968, - "flos": 9951778942320.0, - "grad_norm": 2.8672638345062493, - "learning_rate": 6.066719195434267e-08, - "loss": 0.9329, - "num_input_tokens_seen": 163949335, - "step": 7682 - }, - { - "epoch": 0.9238261287801359, - "flos": 20440711617720.0, - "grad_norm": 3.398464158929202, - "learning_rate": 6.047693335983717e-08, - "loss": 0.8831, - "num_input_tokens_seen": 163971400, - "step": 7683 - }, - { - "epoch": 0.923946371670775, - "flos": 16428502851360.0, - "grad_norm": 7.244394173997035, - "learning_rate": 6.028696898943853e-08, - "loss": 1.0287, - "num_input_tokens_seen": 163990180, - "step": 7684 - }, - { - "epoch": 0.924066614561414, - "flos": 15533364603360.0, - "grad_norm": 6.042289789447226, - "learning_rate": 6.00972988719648e-08, - "loss": 0.9295, - "num_input_tokens_seen": 164008135, - "step": 7685 - }, - { - "epoch": 0.9241868574520532, - "flos": 20309421738960.0, - "grad_norm": 3.6994483175615636, - "learning_rate": 5.990792303618807e-08, - "loss": 0.93, - "num_input_tokens_seen": 164027435, - "step": 7686 - }, - { - "epoch": 0.9243071003426923, - "flos": 21751372111440.0, - "grad_norm": 3.087599526473028, - "learning_rate": 5.971884151083695e-08, - "loss": 0.914, - "num_input_tokens_seen": 164049565, - "step": 7687 - }, - { - "epoch": 0.9244273432333313, - "flos": 20414735728320.0, - "grad_norm": 4.29050842260146, - "learning_rate": 5.9530054324595124e-08, - "loss": 0.9719, - "num_input_tokens_seen": 164069400, - "step": 7688 - }, - { - "epoch": 0.9245475861239704, - "flos": 47421045100200.0, - "grad_norm": 0.7445072662557424, - "learning_rate": 5.934156150610103e-08, - "loss": 0.8254, - "num_input_tokens_seen": 164126485, - "step": 7689 - }, - { - "epoch": 0.9246678290146095, - "flos": 17238814580160.0, - "grad_norm": 4.004915554951805, - "learning_rate": 5.915336308394914e-08, - "loss": 0.998, - "num_input_tokens_seen": 164145040, - "step": 7690 - }, - { - "epoch": 0.9247880719052486, - "flos": 13465325329560.0, - "grad_norm": 3.9738602065757482, - "learning_rate": 5.89654590866886e-08, - "loss": 0.998, - "num_input_tokens_seen": 164164260, - "step": 7691 - }, - { - "epoch": 0.9249083147958876, - "flos": 17084124383040.0, - "grad_norm": 3.6305210557145, - "learning_rate": 5.877784954282483e-08, - "loss": 1.1079, - "num_input_tokens_seen": 164183320, - "step": 7692 - }, - { - "epoch": 0.9250285576865268, - "flos": 21935135015520.0, - "grad_norm": 3.989462084542103, - "learning_rate": 5.8590534480817963e-08, - "loss": 0.952, - "num_input_tokens_seen": 164204765, - "step": 7693 - }, - { - "epoch": 0.9251488005771659, - "flos": 7460582694360.0, - "grad_norm": 5.1204463919116865, - "learning_rate": 5.840351392908349e-08, - "loss": 0.9472, - "num_input_tokens_seen": 164220205, - "step": 7694 - }, - { - "epoch": 0.9252690434678049, - "flos": 16769592846720.0, - "grad_norm": 3.900761036101366, - "learning_rate": 5.821678791599205e-08, - "loss": 0.9336, - "num_input_tokens_seen": 164239370, - "step": 7695 - }, - { - "epoch": 0.9253892863584441, - "flos": 15247813789320.0, - "grad_norm": 3.2459029268990855, - "learning_rate": 5.803035646986965e-08, - "loss": 1.0295, - "num_input_tokens_seen": 164258425, - "step": 7696 - }, - { - "epoch": 0.9255095292490831, - "flos": 12364250321520.0, - "grad_norm": 5.426031712462923, - "learning_rate": 5.7844219618998766e-08, - "loss": 0.8935, - "num_input_tokens_seen": 164272470, - "step": 7697 - }, - { - "epoch": 0.9256297721397222, - "flos": 17605880464920.0, - "grad_norm": 4.008848179184999, - "learning_rate": 5.765837739161505e-08, - "loss": 0.9242, - "num_input_tokens_seen": 164291310, - "step": 7698 - }, - { - "epoch": 0.9257500150303614, - "flos": 16428809466960.0, - "grad_norm": 3.4945500039668356, - "learning_rate": 5.7472829815911504e-08, - "loss": 0.9773, - "num_input_tokens_seen": 164309855, - "step": 7699 - }, - { - "epoch": 0.9258702579210004, - "flos": 16035277107840.0, - "grad_norm": 3.3074335182987893, - "learning_rate": 5.7287576920035164e-08, - "loss": 1.0356, - "num_input_tokens_seen": 164328590, - "step": 7700 - }, - { - "epoch": 0.9259905008116395, - "flos": 21381516024720.0, - "grad_norm": 3.8116901975994306, - "learning_rate": 5.7102618732088435e-08, - "loss": 0.9991, - "num_input_tokens_seen": 164347640, - "step": 7701 - }, - { - "epoch": 0.9261107437022786, - "flos": 17478086004000.0, - "grad_norm": 3.618746131275593, - "learning_rate": 5.6917955280130216e-08, - "loss": 0.9709, - "num_input_tokens_seen": 164368840, - "step": 7702 - }, - { - "epoch": 0.9262309865929177, - "flos": 15642848564880.0, - "grad_norm": 3.777087986994614, - "learning_rate": 5.6733586592172755e-08, - "loss": 0.9374, - "num_input_tokens_seen": 164388055, - "step": 7703 - }, - { - "epoch": 0.9263512294835567, - "flos": 14436122290320.0, - "grad_norm": 2.9297755520622095, - "learning_rate": 5.6549512696185244e-08, - "loss": 1.0346, - "num_input_tokens_seen": 164406275, - "step": 7704 - }, - { - "epoch": 0.9264714723741959, - "flos": 15065645286360.0, - "grad_norm": 3.5027396515046827, - "learning_rate": 5.636573362009156e-08, - "loss": 0.8966, - "num_input_tokens_seen": 164426055, - "step": 7705 - }, - { - "epoch": 0.926591715264835, - "flos": 12758426573400.0, - "grad_norm": 3.3824562783809475, - "learning_rate": 5.618224939177074e-08, - "loss": 0.9849, - "num_input_tokens_seen": 164443680, - "step": 7706 - }, - { - "epoch": 0.926711958155474, - "flos": 25810902761040.0, - "grad_norm": 2.990486797140979, - "learning_rate": 5.599906003905719e-08, - "loss": 0.9282, - "num_input_tokens_seen": 164465945, - "step": 7707 - }, - { - "epoch": 0.9268322010461132, - "flos": 14934355407600.0, - "grad_norm": 3.3424591710201224, - "learning_rate": 5.581616558974023e-08, - "loss": 1.0382, - "num_input_tokens_seen": 164484230, - "step": 7708 - }, - { - "epoch": 0.9269524439367522, - "flos": 16322698277040.0, - "grad_norm": 3.864716204529478, - "learning_rate": 5.5633566071565444e-08, - "loss": 1.0153, - "num_input_tokens_seen": 164503190, - "step": 7709 - }, - { - "epoch": 0.9270726868273913, - "flos": 29985559099200.0, - "grad_norm": 4.799522837127804, - "learning_rate": 5.5451261512232896e-08, - "loss": 0.9193, - "num_input_tokens_seen": 164525590, - "step": 7710 - }, - { - "epoch": 0.9271929297180305, - "flos": 14042436623400.0, - "grad_norm": 3.1295055715520763, - "learning_rate": 5.5269251939397576e-08, - "loss": 0.8525, - "num_input_tokens_seen": 164544825, - "step": 7711 - }, - { - "epoch": 0.9273131726086695, - "flos": 14168544698520.0, - "grad_norm": 4.7937792999685165, - "learning_rate": 5.508753738067073e-08, - "loss": 0.9878, - "num_input_tokens_seen": 164564085, - "step": 7712 - }, - { - "epoch": 0.9274334154993086, - "flos": 16534062133200.0, - "grad_norm": 3.279551765937142, - "learning_rate": 5.4906117863617875e-08, - "loss": 1.0151, - "num_input_tokens_seen": 164583190, - "step": 7713 - }, - { - "epoch": 0.9275536583899477, - "flos": 22669144138800.0, - "grad_norm": 3.80936540660323, - "learning_rate": 5.4724993415760533e-08, - "loss": 1.0081, - "num_input_tokens_seen": 164601265, - "step": 7714 - }, - { - "epoch": 0.9276739012805868, - "flos": 13255617197640.0, - "grad_norm": 7.44122503265237, - "learning_rate": 5.454416406457496e-08, - "loss": 0.9646, - "num_input_tokens_seen": 164620080, - "step": 7715 - }, - { - "epoch": 0.9277941441712259, - "flos": 9792826788360.0, - "grad_norm": 4.670741527425343, - "learning_rate": 5.436362983749299e-08, - "loss": 0.9604, - "num_input_tokens_seen": 164634970, - "step": 7716 - }, - { - "epoch": 0.927914387061865, - "flos": 16534123456320.0, - "grad_norm": 3.713386707184853, - "learning_rate": 5.418339076190137e-08, - "loss": 0.8631, - "num_input_tokens_seen": 164654200, - "step": 7717 - }, - { - "epoch": 0.9280346299525041, - "flos": 12807281534640.0, - "grad_norm": 2.6677340715669398, - "learning_rate": 5.400344686514202e-08, - "loss": 1.1115, - "num_input_tokens_seen": 164671505, - "step": 7718 - }, - { - "epoch": 0.9281548728431431, - "flos": 15875497091760.0, - "grad_norm": 4.502269596387436, - "learning_rate": 5.38237981745131e-08, - "loss": 0.8925, - "num_input_tokens_seen": 164689340, - "step": 7719 - }, - { - "epoch": 0.9282751157337822, - "flos": 13361207141040.0, - "grad_norm": 3.1396614988064244, - "learning_rate": 5.364444471726592e-08, - "loss": 1.0292, - "num_input_tokens_seen": 164708265, - "step": 7720 - }, - { - "epoch": 0.9283953586244214, - "flos": 18185720637600.0, - "grad_norm": 4.001309180592198, - "learning_rate": 5.346538652060939e-08, - "loss": 1.0179, - "num_input_tokens_seen": 164729340, - "step": 7721 - }, - { - "epoch": 0.9285156015150604, - "flos": 12915416387520.0, - "grad_norm": 3.3026035673201797, - "learning_rate": 5.3286623611705994e-08, - "loss": 0.9363, - "num_input_tokens_seen": 164747105, - "step": 7722 - }, - { - "epoch": 0.9286358444056995, - "flos": 47545982487960.0, - "grad_norm": 0.8888565039364615, - "learning_rate": 5.3108156017673824e-08, - "loss": 0.8799, - "num_input_tokens_seen": 164808585, - "step": 7723 - }, - { - "epoch": 0.9287560872963386, - "flos": 15877827370320.0, - "grad_norm": 8.194510450654079, - "learning_rate": 5.2929983765586775e-08, - "loss": 0.9464, - "num_input_tokens_seen": 164827085, - "step": 7724 - }, - { - "epoch": 0.9288763301869777, - "flos": 18288428394360.0, - "grad_norm": 5.135579795903077, - "learning_rate": 5.275210688247278e-08, - "loss": 0.8518, - "num_input_tokens_seen": 164847130, - "step": 7725 - }, - { - "epoch": 0.9289965730776167, - "flos": 8666879707080.0, - "grad_norm": 5.601480438184645, - "learning_rate": 5.257452539531604e-08, - "loss": 1.0629, - "num_input_tokens_seen": 164863920, - "step": 7726 - }, - { - "epoch": 0.9291168159682559, - "flos": 18996952213200.0, - "grad_norm": 3.625484039612418, - "learning_rate": 5.2397239331055445e-08, - "loss": 0.9163, - "num_input_tokens_seen": 164885640, - "step": 7727 - }, - { - "epoch": 0.929237058858895, - "flos": 10266893048280.0, - "grad_norm": 5.112717214880551, - "learning_rate": 5.2220248716585036e-08, - "loss": 1.0241, - "num_input_tokens_seen": 164903040, - "step": 7728 - }, - { - "epoch": 0.929357301749534, - "flos": 16947714023760.0, - "grad_norm": 4.167971953343427, - "learning_rate": 5.204355357875445e-08, - "loss": 0.9743, - "num_input_tokens_seen": 164921105, - "step": 7729 - }, - { - "epoch": 0.9294775446401732, - "flos": 9139320904320.0, - "grad_norm": 6.2581755000638655, - "learning_rate": 5.1867153944367584e-08, - "loss": 0.9339, - "num_input_tokens_seen": 164937215, - "step": 7730 - }, - { - "epoch": 0.9295977875308122, - "flos": 18656199495000.0, - "grad_norm": 4.901716531091111, - "learning_rate": 5.16910498401848e-08, - "loss": 0.9673, - "num_input_tokens_seen": 164956385, - "step": 7731 - }, - { - "epoch": 0.9297180304214513, - "flos": 11656891641960.0, - "grad_norm": 6.024455141622184, - "learning_rate": 5.151524129292073e-08, - "loss": 1.0412, - "num_input_tokens_seen": 164974000, - "step": 7732 - }, - { - "epoch": 0.9298382733120905, - "flos": 17110222918680.0, - "grad_norm": 3.3279890292335295, - "learning_rate": 5.1339728329245155e-08, - "loss": 0.8992, - "num_input_tokens_seen": 164994285, - "step": 7733 - }, - { - "epoch": 0.9299585162027295, - "flos": 15721297479600.0, - "grad_norm": 4.794837742706403, - "learning_rate": 5.116451097578367e-08, - "loss": 0.9999, - "num_input_tokens_seen": 165013045, - "step": 7734 - }, - { - "epoch": 0.9300787590933686, - "flos": 15250052083200.0, - "grad_norm": 4.027808324471466, - "learning_rate": 5.0989589259115895e-08, - "loss": 0.9683, - "num_input_tokens_seen": 165033650, - "step": 7735 - }, - { - "epoch": 0.9301990019840077, - "flos": 12595580401320.0, - "grad_norm": 3.7190541375433774, - "learning_rate": 5.081496320577816e-08, - "loss": 0.9232, - "num_input_tokens_seen": 165050490, - "step": 7736 - }, - { - "epoch": 0.9303192448746468, - "flos": 42149790341760.0, - "grad_norm": 0.9312663736120042, - "learning_rate": 5.0640632842260835e-08, - "loss": 0.8876, - "num_input_tokens_seen": 165110470, - "step": 7737 - }, - { - "epoch": 0.9304394877652858, - "flos": 41260084737960.0, - "grad_norm": 2.894124485114515, - "learning_rate": 5.0466598195009426e-08, - "loss": 0.952, - "num_input_tokens_seen": 165137060, - "step": 7738 - }, - { - "epoch": 0.930559730655925, - "flos": 14908042241040.0, - "grad_norm": 5.296518499479826, - "learning_rate": 5.0292859290425036e-08, - "loss": 0.9213, - "num_input_tokens_seen": 165154650, - "step": 7739 - }, - { - "epoch": 0.9306799735465641, - "flos": 16533786179160.0, - "grad_norm": 3.744751627163744, - "learning_rate": 5.011941615486348e-08, - "loss": 0.9994, - "num_input_tokens_seen": 165173485, - "step": 7740 - }, - { - "epoch": 0.9308002164372031, - "flos": 10765156827120.0, - "grad_norm": 4.442837151660722, - "learning_rate": 4.994626881463659e-08, - "loss": 1.0792, - "num_input_tokens_seen": 165189460, - "step": 7741 - }, - { - "epoch": 0.9309204593278423, - "flos": 21988037302680.0, - "grad_norm": 4.33723169206008, - "learning_rate": 4.9773417296009814e-08, - "loss": 0.9372, - "num_input_tokens_seen": 165210700, - "step": 7742 - }, - { - "epoch": 0.9310407022184813, - "flos": 16374680717400.0, - "grad_norm": 3.5540124273131197, - "learning_rate": 4.960086162520527e-08, - "loss": 0.8769, - "num_input_tokens_seen": 165230510, - "step": 7743 - }, - { - "epoch": 0.9311609451091204, - "flos": 15721573433640.0, - "grad_norm": 8.97698235256164, - "learning_rate": 4.942860182839936e-08, - "loss": 1.0555, - "num_input_tokens_seen": 165248575, - "step": 7744 - }, - { - "epoch": 0.9312811879997596, - "flos": 14981248029000.0, - "grad_norm": 3.2729885184893464, - "learning_rate": 4.925663793172341e-08, - "loss": 1.0176, - "num_input_tokens_seen": 165266255, - "step": 7745 - }, - { - "epoch": 0.9314014308903986, - "flos": 48081235204320.0, - "grad_norm": 0.8464885350694168, - "learning_rate": 4.908496996126477e-08, - "loss": 0.8417, - "num_input_tokens_seen": 165329435, - "step": 7746 - }, - { - "epoch": 0.9315216737810377, - "flos": 16036043646840.0, - "grad_norm": 14.188560412601106, - "learning_rate": 4.89135979430646e-08, - "loss": 0.9869, - "num_input_tokens_seen": 165349200, - "step": 7747 - }, - { - "epoch": 0.9316419166716768, - "flos": 17054867706720.0, - "grad_norm": 4.077740998294266, - "learning_rate": 4.874252190312078e-08, - "loss": 1.0651, - "num_input_tokens_seen": 165369305, - "step": 7748 - }, - { - "epoch": 0.9317621595623159, - "flos": 21545036751120.0, - "grad_norm": 6.072500887509906, - "learning_rate": 4.857174186738477e-08, - "loss": 0.8749, - "num_input_tokens_seen": 165392375, - "step": 7749 - }, - { - "epoch": 0.931882402452955, - "flos": 11132590650600.0, - "grad_norm": 5.819983474340933, - "learning_rate": 4.840125786176408e-08, - "loss": 0.9665, - "num_input_tokens_seen": 165408300, - "step": 7750 - }, - { - "epoch": 0.932002645343594, - "flos": 20205027596400.0, - "grad_norm": 2.9019554977212167, - "learning_rate": 4.823106991212067e-08, - "loss": 0.9997, - "num_input_tokens_seen": 165427260, - "step": 7751 - }, - { - "epoch": 0.9321228882342332, - "flos": 10843943019000.0, - "grad_norm": 9.896752073237012, - "learning_rate": 4.806117804427212e-08, - "loss": 1.0653, - "num_input_tokens_seen": 165444915, - "step": 7752 - }, - { - "epoch": 0.9322431311248722, - "flos": 12679640381520.0, - "grad_norm": 5.314278159158144, - "learning_rate": 4.7891582283990926e-08, - "loss": 0.8765, - "num_input_tokens_seen": 165463360, - "step": 7753 - }, - { - "epoch": 0.9323633740155113, - "flos": 17188733156520.0, - "grad_norm": 2.4170095485487595, - "learning_rate": 4.772228265700473e-08, - "loss": 0.9473, - "num_input_tokens_seen": 165483940, - "step": 7754 - }, - { - "epoch": 0.9324836169061504, - "flos": 10629237052800.0, - "grad_norm": 3.648665557856513, - "learning_rate": 4.75532791889961e-08, - "loss": 0.9783, - "num_input_tokens_seen": 165500360, - "step": 7755 - }, - { - "epoch": 0.9326038597967895, - "flos": 13203910711320.0, - "grad_norm": 2.9783196656849746, - "learning_rate": 4.738457190560252e-08, - "loss": 0.8749, - "num_input_tokens_seen": 165519190, - "step": 7756 - }, - { - "epoch": 0.9327241026874286, - "flos": 13439380101720.0, - "grad_norm": 4.158950043371143, - "learning_rate": 4.721616083241664e-08, - "loss": 1.0113, - "num_input_tokens_seen": 165537165, - "step": 7757 - }, - { - "epoch": 0.9328443455780677, - "flos": 21070111967520.0, - "grad_norm": 3.838423473068853, - "learning_rate": 4.7048045994986684e-08, - "loss": 0.9962, - "num_input_tokens_seen": 165557745, - "step": 7758 - }, - { - "epoch": 0.9329645884687068, - "flos": 21437760421920.0, - "grad_norm": 4.0419543186873685, - "learning_rate": 4.688022741881559e-08, - "loss": 1.1275, - "num_input_tokens_seen": 165577990, - "step": 7759 - }, - { - "epoch": 0.9330848313593458, - "flos": 15534100480800.0, - "grad_norm": 3.5159549812585507, - "learning_rate": 4.671270512936076e-08, - "loss": 0.9849, - "num_input_tokens_seen": 165596870, - "step": 7760 - }, - { - "epoch": 0.933205074249985, - "flos": 15720929540880.0, - "grad_norm": 7.233704241312417, - "learning_rate": 4.6545479152035884e-08, - "loss": 1.0507, - "num_input_tokens_seen": 165615760, - "step": 7761 - }, - { - "epoch": 0.9333253171406241, - "flos": 10844035003680.0, - "grad_norm": 4.059951217433462, - "learning_rate": 4.637854951220821e-08, - "loss": 1.0006, - "num_input_tokens_seen": 165632265, - "step": 7762 - }, - { - "epoch": 0.9334455600312631, - "flos": 11106277484040.0, - "grad_norm": 3.080926230483798, - "learning_rate": 4.621191623520171e-08, - "loss": 0.974, - "num_input_tokens_seen": 165650415, - "step": 7763 - }, - { - "epoch": 0.9335658029219023, - "flos": 15747028076520.0, - "grad_norm": 4.856507856870281, - "learning_rate": 4.604557934629372e-08, - "loss": 1.0726, - "num_input_tokens_seen": 165669210, - "step": 7764 - }, - { - "epoch": 0.9336860458125413, - "flos": 14383955880600.0, - "grad_norm": 2.980280438994987, - "learning_rate": 4.587953887071805e-08, - "loss": 1.0417, - "num_input_tokens_seen": 165688750, - "step": 7765 - }, - { - "epoch": 0.9338062887031804, - "flos": 14852625705960.0, - "grad_norm": 2.9838676620571514, - "learning_rate": 4.5713794833662554e-08, - "loss": 1.0883, - "num_input_tokens_seen": 165707685, - "step": 7766 - }, - { - "epoch": 0.9339265315938196, - "flos": 16507442351040.0, - "grad_norm": 7.948932254511697, - "learning_rate": 4.5548347260270236e-08, - "loss": 0.8566, - "num_input_tokens_seen": 165726695, - "step": 7767 - }, - { - "epoch": 0.9340467744844586, - "flos": 16218978688800.0, - "grad_norm": 6.675906390540082, - "learning_rate": 4.538319617564012e-08, - "loss": 0.917, - "num_input_tokens_seen": 165745435, - "step": 7768 - }, - { - "epoch": 0.9341670173750977, - "flos": 16822924395720.0, - "grad_norm": 4.4914851692818605, - "learning_rate": 4.521834160482485e-08, - "loss": 0.9785, - "num_input_tokens_seen": 165763895, - "step": 7769 - }, - { - "epoch": 0.9342872602657368, - "flos": 17658844075200.0, - "grad_norm": 3.327584092863855, - "learning_rate": 4.5053783572832846e-08, - "loss": 1.0479, - "num_input_tokens_seen": 165783795, - "step": 7770 - }, - { - "epoch": 0.9344075031563759, - "flos": 18339705618840.0, - "grad_norm": 3.9473232470822004, - "learning_rate": 4.488952210462771e-08, - "loss": 0.9894, - "num_input_tokens_seen": 165803720, - "step": 7771 - }, - { - "epoch": 0.9345277460470149, - "flos": 18180998757360.0, - "grad_norm": 3.5457782141902325, - "learning_rate": 4.4725557225127495e-08, - "loss": 1.0776, - "num_input_tokens_seen": 165821780, - "step": 7772 - }, - { - "epoch": 0.9346479889376541, - "flos": 24478650981000.0, - "grad_norm": 2.92128433540351, - "learning_rate": 4.456188895920565e-08, - "loss": 1.0249, - "num_input_tokens_seen": 165843255, - "step": 7773 - }, - { - "epoch": 0.9347682318282932, - "flos": 13540125518640.0, - "grad_norm": 4.199499445560353, - "learning_rate": 4.439851733169031e-08, - "loss": 1.067, - "num_input_tokens_seen": 165860765, - "step": 7774 - }, - { - "epoch": 0.9348884747189322, - "flos": 18683524493040.0, - "grad_norm": 3.6322540838457678, - "learning_rate": 4.4235442367365204e-08, - "loss": 0.9075, - "num_input_tokens_seen": 165880795, - "step": 7775 - }, - { - "epoch": 0.9350087176095714, - "flos": 12994202579400.0, - "grad_norm": 3.2961937078385923, - "learning_rate": 4.4072664090968545e-08, - "loss": 1.0051, - "num_input_tokens_seen": 165898900, - "step": 7776 - }, - { - "epoch": 0.9351289605002104, - "flos": 13702235813280.0, - "grad_norm": 4.08135087033176, - "learning_rate": 4.391018252719347e-08, - "loss": 1.0601, - "num_input_tokens_seen": 165918415, - "step": 7777 - }, - { - "epoch": 0.9352492033908495, - "flos": 13328761662480.0, - "grad_norm": 5.528360247741512, - "learning_rate": 4.374799770068849e-08, - "loss": 0.9106, - "num_input_tokens_seen": 165934810, - "step": 7778 - }, - { - "epoch": 0.9353694462814887, - "flos": 21040763306520.0, - "grad_norm": 4.397485768965812, - "learning_rate": 4.358610963605658e-08, - "loss": 0.9769, - "num_input_tokens_seen": 165954980, - "step": 7779 - }, - { - "epoch": 0.9354896891721277, - "flos": 21857268670440.0, - "grad_norm": 4.882986715642049, - "learning_rate": 4.342451835785677e-08, - "loss": 0.9032, - "num_input_tokens_seen": 165975610, - "step": 7780 - }, - { - "epoch": 0.9356099320627668, - "flos": 13806875248320.0, - "grad_norm": 2.7323532041328424, - "learning_rate": 4.3263223890601665e-08, - "loss": 0.9787, - "num_input_tokens_seen": 165994040, - "step": 7781 - }, - { - "epoch": 0.9357301749534058, - "flos": 13543896890520.0, - "grad_norm": 5.14877647013388, - "learning_rate": 4.31022262587597e-08, - "loss": 1.0159, - "num_input_tokens_seen": 166012435, - "step": 7782 - }, - { - "epoch": 0.935850417844045, - "flos": 16743739603560.0, - "grad_norm": 3.6431142785710846, - "learning_rate": 4.2941525486754225e-08, - "loss": 0.8854, - "num_input_tokens_seen": 166032475, - "step": 7783 - }, - { - "epoch": 0.935970660734684, - "flos": 13177628206320.0, - "grad_norm": 5.0758705181805, - "learning_rate": 4.278112159896286e-08, - "loss": 1.0159, - "num_input_tokens_seen": 166050035, - "step": 7784 - }, - { - "epoch": 0.9360909036253231, - "flos": 14645799760680.0, - "grad_norm": 3.6181303241510667, - "learning_rate": 4.2621014619719896e-08, - "loss": 0.9058, - "num_input_tokens_seen": 166067520, - "step": 7785 - }, - { - "epoch": 0.9362111465159623, - "flos": 44232332533440.0, - "grad_norm": 0.7749709523817796, - "learning_rate": 4.246120457331215e-08, - "loss": 0.8584, - "num_input_tokens_seen": 166129415, - "step": 7786 - }, - { - "epoch": 0.9363313894066013, - "flos": 17189315726160.0, - "grad_norm": 5.3664294378002095, - "learning_rate": 4.2301691483983325e-08, - "loss": 0.9453, - "num_input_tokens_seen": 166149255, - "step": 7787 - }, - { - "epoch": 0.9364516322972404, - "flos": 14278611229680.0, - "grad_norm": 2.9628684156839005, - "learning_rate": 4.214247537593163e-08, - "loss": 0.9806, - "num_input_tokens_seen": 166168225, - "step": 7788 - }, - { - "epoch": 0.9365718751878795, - "flos": 14697843524160.0, - "grad_norm": 3.643415045024052, - "learning_rate": 4.1983556273309293e-08, - "loss": 1.037, - "num_input_tokens_seen": 166186695, - "step": 7789 - }, - { - "epoch": 0.9366921180785186, - "flos": 13224919428000.0, - "grad_norm": 13.781091667528669, - "learning_rate": 4.182493420022526e-08, - "loss": 0.9208, - "num_input_tokens_seen": 166202085, - "step": 7790 - }, - { - "epoch": 0.9368123609691577, - "flos": 18342373174560.0, - "grad_norm": 7.319960220312207, - "learning_rate": 4.166660918074139e-08, - "loss": 1.011, - "num_input_tokens_seen": 166221710, - "step": 7791 - }, - { - "epoch": 0.9369326038597968, - "flos": 18182777127840.0, - "grad_norm": 3.1922564453805604, - "learning_rate": 4.15085812388758e-08, - "loss": 0.9601, - "num_input_tokens_seen": 166243650, - "step": 7792 - }, - { - "epoch": 0.9370528467504359, - "flos": 16506062580840.0, - "grad_norm": 6.277633916656238, - "learning_rate": 4.135085039860153e-08, - "loss": 1.0184, - "num_input_tokens_seen": 166262770, - "step": 7793 - }, - { - "epoch": 0.9371730896410749, - "flos": 17761398524160.0, - "grad_norm": 7.90593299682542, - "learning_rate": 4.1193416683845906e-08, - "loss": 1.0105, - "num_input_tokens_seen": 166281420, - "step": 7794 - }, - { - "epoch": 0.9372933325317141, - "flos": 10995321767640.0, - "grad_norm": 7.544643790985849, - "learning_rate": 4.103628011849136e-08, - "loss": 1.056, - "num_input_tokens_seen": 166296500, - "step": 7795 - }, - { - "epoch": 0.9374135754223532, - "flos": 15537411929280.0, - "grad_norm": 3.1266340942421365, - "learning_rate": 4.0879440726375506e-08, - "loss": 0.9718, - "num_input_tokens_seen": 166314005, - "step": 7796 - }, - { - "epoch": 0.9375338183129922, - "flos": 16082077744560.0, - "grad_norm": 5.993918197547137, - "learning_rate": 4.0722898531291074e-08, - "loss": 0.7768, - "num_input_tokens_seen": 166330965, - "step": 7797 - }, - { - "epoch": 0.9376540612036314, - "flos": 18579191673600.0, - "grad_norm": 3.241738750241434, - "learning_rate": 4.0566653556985295e-08, - "loss": 0.9905, - "num_input_tokens_seen": 166351230, - "step": 7798 - }, - { - "epoch": 0.9377743040942704, - "flos": 13988951766600.0, - "grad_norm": 4.415201603918792, - "learning_rate": 4.0410705827159886e-08, - "loss": 1.0173, - "num_input_tokens_seen": 166368245, - "step": 7799 - }, - { - "epoch": 0.9378945469849095, - "flos": 10790948747160.0, - "grad_norm": 3.852442890315771, - "learning_rate": 4.0255055365472356e-08, - "loss": 0.9355, - "num_input_tokens_seen": 166386060, - "step": 7800 - }, - { - "epoch": 0.9380147898755486, - "flos": 14616696392160.0, - "grad_norm": 5.807464715634512, - "learning_rate": 4.009970219553471e-08, - "loss": 0.966, - "num_input_tokens_seen": 166402730, - "step": 7801 - }, - { - "epoch": 0.9381350327661877, - "flos": 18705514379640.0, - "grad_norm": 3.8383880681121574, - "learning_rate": 3.99446463409141e-08, - "loss": 0.9754, - "num_input_tokens_seen": 166420305, - "step": 7802 - }, - { - "epoch": 0.9382552756568268, - "flos": 16769531523600.0, - "grad_norm": 6.698008329516722, - "learning_rate": 3.978988782513215e-08, - "loss": 0.9036, - "num_input_tokens_seen": 166437520, - "step": 7803 - }, - { - "epoch": 0.9383755185474659, - "flos": 20256887390520.0, - "grad_norm": 2.760030400343723, - "learning_rate": 3.963542667166586e-08, - "loss": 0.9862, - "num_input_tokens_seen": 166457345, - "step": 7804 - }, - { - "epoch": 0.938495761438105, - "flos": 14515337744040.0, - "grad_norm": 3.0227439538335052, - "learning_rate": 3.9481262903946486e-08, - "loss": 0.9038, - "num_input_tokens_seen": 166476510, - "step": 7805 - }, - { - "epoch": 0.938616004328744, - "flos": 49629909997920.0, - "grad_norm": 0.7803028468771234, - "learning_rate": 3.932739654536066e-08, - "loss": 0.8009, - "num_input_tokens_seen": 166538930, - "step": 7806 - }, - { - "epoch": 0.9387362472193832, - "flos": 13409295563280.0, - "grad_norm": 5.421681513343338, - "learning_rate": 3.917382761925014e-08, - "loss": 0.9743, - "num_input_tokens_seen": 166554485, - "step": 7807 - }, - { - "epoch": 0.9388564901100223, - "flos": 18864497195160.0, - "grad_norm": 3.447736721971143, - "learning_rate": 3.9020556148910754e-08, - "loss": 1.0088, - "num_input_tokens_seen": 166573560, - "step": 7808 - }, - { - "epoch": 0.9389767330006613, - "flos": 42183247651800.0, - "grad_norm": 0.727304141628684, - "learning_rate": 3.8867582157593895e-08, - "loss": 0.8163, - "num_input_tokens_seen": 166627485, - "step": 7809 - }, - { - "epoch": 0.9390969758913005, - "flos": 22171309621800.0, - "grad_norm": 2.9162259086728928, - "learning_rate": 3.871490566850544e-08, - "loss": 0.9827, - "num_input_tokens_seen": 166651415, - "step": 7810 - }, - { - "epoch": 0.9392172187819395, - "flos": 15930944288400.0, - "grad_norm": 3.361938492075593, - "learning_rate": 3.856252670480642e-08, - "loss": 0.9293, - "num_input_tokens_seen": 166669795, - "step": 7811 - }, - { - "epoch": 0.9393374616725786, - "flos": 13990576829280.0, - "grad_norm": 3.565230945741407, - "learning_rate": 3.841044528961279e-08, - "loss": 1.0362, - "num_input_tokens_seen": 166687310, - "step": 7812 - }, - { - "epoch": 0.9394577045632178, - "flos": 17189162418360.0, - "grad_norm": 3.21941598465103, - "learning_rate": 3.825866144599477e-08, - "loss": 1.0102, - "num_input_tokens_seen": 166706085, - "step": 7813 - }, - { - "epoch": 0.9395779474538568, - "flos": 13490381372160.0, - "grad_norm": 3.4544084330128526, - "learning_rate": 3.8107175196978145e-08, - "loss": 0.9833, - "num_input_tokens_seen": 166722110, - "step": 7814 - }, - { - "epoch": 0.9396981903444959, - "flos": 10109381987640.0, - "grad_norm": 3.577464480007159, - "learning_rate": 3.7955986565542996e-08, - "loss": 0.9748, - "num_input_tokens_seen": 166739910, - "step": 7815 - }, - { - "epoch": 0.9398184332351349, - "flos": 24819648991680.0, - "grad_norm": 3.4708097373889997, - "learning_rate": 3.780509557462497e-08, - "loss": 0.9137, - "num_input_tokens_seen": 166759830, - "step": 7816 - }, - { - "epoch": 0.9399386761257741, - "flos": 18052775034600.0, - "grad_norm": 10.029667144150944, - "learning_rate": 3.765450224711375e-08, - "loss": 0.9773, - "num_input_tokens_seen": 166780055, - "step": 7817 - }, - { - "epoch": 0.9400589190164131, - "flos": 19441639150560.0, - "grad_norm": 3.6362059136344436, - "learning_rate": 3.750420660585396e-08, - "loss": 1.0176, - "num_input_tokens_seen": 166801715, - "step": 7818 - }, - { - "epoch": 0.9401791619070522, - "flos": 16634960857920.0, - "grad_norm": 2.823102506219021, - "learning_rate": 3.735420867364603e-08, - "loss": 1.0278, - "num_input_tokens_seen": 166822415, - "step": 7819 - }, - { - "epoch": 0.9402994047976914, - "flos": 25605671216880.0, - "grad_norm": 3.202111209374932, - "learning_rate": 3.7204508473244186e-08, - "loss": 0.8517, - "num_input_tokens_seen": 166845760, - "step": 7820 - }, - { - "epoch": 0.9404196476883304, - "flos": 15799715732760.0, - "grad_norm": 3.645389042985568, - "learning_rate": 3.7055106027357395e-08, - "loss": 0.9205, - "num_input_tokens_seen": 166865345, - "step": 7821 - }, - { - "epoch": 0.9405398905789695, - "flos": 13412852304240.0, - "grad_norm": 45.45327270945405, - "learning_rate": 3.690600135865063e-08, - "loss": 0.9434, - "num_input_tokens_seen": 166881990, - "step": 7822 - }, - { - "epoch": 0.9406601334696086, - "flos": 50326199854320.0, - "grad_norm": 0.7734593860342522, - "learning_rate": 3.675719448974246e-08, - "loss": 0.8295, - "num_input_tokens_seen": 166946800, - "step": 7823 - }, - { - "epoch": 0.9407803763602477, - "flos": 15748315862040.0, - "grad_norm": 6.099724134867443, - "learning_rate": 3.6608685443207054e-08, - "loss": 0.8159, - "num_input_tokens_seen": 166965670, - "step": 7824 - }, - { - "epoch": 0.9409006192508867, - "flos": 13386784430160.0, - "grad_norm": 4.748209565677345, - "learning_rate": 3.646047424157306e-08, - "loss": 0.8953, - "num_input_tokens_seen": 166982365, - "step": 7825 - }, - { - "epoch": 0.9410208621415259, - "flos": 16612909648200.0, - "grad_norm": 5.446767691924718, - "learning_rate": 3.631256090732382e-08, - "loss": 0.8895, - "num_input_tokens_seen": 167002545, - "step": 7826 - }, - { - "epoch": 0.941141105032165, - "flos": 16162213045080.0, - "grad_norm": 7.690491877228705, - "learning_rate": 3.6164945462897833e-08, - "loss": 1.0547, - "num_input_tokens_seen": 167021555, - "step": 7827 - }, - { - "epoch": 0.941261347922804, - "flos": 14698824694080.0, - "grad_norm": 5.55208907804019, - "learning_rate": 3.6017627930687856e-08, - "loss": 0.975, - "num_input_tokens_seen": 167041100, - "step": 7828 - }, - { - "epoch": 0.9413815908134432, - "flos": 13776024170880.0, - "grad_norm": 7.599370438450215, - "learning_rate": 3.587060833304267e-08, - "loss": 0.9857, - "num_input_tokens_seen": 167059010, - "step": 7829 - }, - { - "epoch": 0.9415018337040822, - "flos": 12390716795880.0, - "grad_norm": 8.29656326656603, - "learning_rate": 3.5723886692264225e-08, - "loss": 0.8624, - "num_input_tokens_seen": 167076270, - "step": 7830 - }, - { - "epoch": 0.9416220765947213, - "flos": 22695181351320.0, - "grad_norm": 3.9097513604682836, - "learning_rate": 3.557746303061071e-08, - "loss": 0.8375, - "num_input_tokens_seen": 167097745, - "step": 7831 - }, - { - "epoch": 0.9417423194853605, - "flos": 16715893359000.0, - "grad_norm": 3.4365393943665485, - "learning_rate": 3.543133737029391e-08, - "loss": 0.9521, - "num_input_tokens_seen": 167117975, - "step": 7832 - }, - { - "epoch": 0.9418625623759995, - "flos": 17005859437680.0, - "grad_norm": 9.283573977115399, - "learning_rate": 3.5285509733481214e-08, - "loss": 0.9063, - "num_input_tokens_seen": 167137420, - "step": 7833 - }, - { - "epoch": 0.9419828052666386, - "flos": 12810041075040.0, - "grad_norm": 6.730644449607426, - "learning_rate": 3.513998014229469e-08, - "loss": 0.9841, - "num_input_tokens_seen": 167156090, - "step": 7834 - }, - { - "epoch": 0.9421030481572777, - "flos": 12548013225600.0, - "grad_norm": 6.577620751995365, - "learning_rate": 3.499474861881069e-08, - "loss": 1.0887, - "num_input_tokens_seen": 167173035, - "step": 7835 - }, - { - "epoch": 0.9422232910479168, - "flos": 14331728147760.0, - "grad_norm": 9.695962864090209, - "learning_rate": 3.4849815185061136e-08, - "loss": 0.896, - "num_input_tokens_seen": 167192645, - "step": 7836 - }, - { - "epoch": 0.9423435339385559, - "flos": 13072896786600.0, - "grad_norm": 28.038535711824917, - "learning_rate": 3.470517986303223e-08, - "loss": 0.9945, - "num_input_tokens_seen": 167211350, - "step": 7837 - }, - { - "epoch": 0.942463776829195, - "flos": 14249630507400.0, - "grad_norm": 4.236439557014742, - "learning_rate": 3.4560842674664856e-08, - "loss": 1.0155, - "num_input_tokens_seen": 167229585, - "step": 7838 - }, - { - "epoch": 0.9425840197198341, - "flos": 16086523670760.0, - "grad_norm": 4.863038672100272, - "learning_rate": 3.441680364185506e-08, - "loss": 0.9816, - "num_input_tokens_seen": 167249175, - "step": 7839 - }, - { - "epoch": 0.9427042626104731, - "flos": 14147474658720.0, - "grad_norm": 5.228475132337597, - "learning_rate": 3.427306278645314e-08, - "loss": 0.9773, - "num_input_tokens_seen": 167267350, - "step": 7840 - }, - { - "epoch": 0.9428245055011123, - "flos": 16245414501600.0, - "grad_norm": 4.209284130494798, - "learning_rate": 3.4129620130264767e-08, - "loss": 0.9419, - "num_input_tokens_seen": 167285430, - "step": 7841 - }, - { - "epoch": 0.9429447483917514, - "flos": 14875075515960.0, - "grad_norm": 4.566995574541414, - "learning_rate": 3.398647569505009e-08, - "loss": 1.0108, - "num_input_tokens_seen": 167302575, - "step": 7842 - }, - { - "epoch": 0.9430649912823904, - "flos": 13361115156360.0, - "grad_norm": 5.770889483934952, - "learning_rate": 3.384362950252373e-08, - "loss": 0.959, - "num_input_tokens_seen": 167319265, - "step": 7843 - }, - { - "epoch": 0.9431852341730296, - "flos": 23215097739600.0, - "grad_norm": 4.663103014530249, - "learning_rate": 3.3701081574355473e-08, - "loss": 0.7942, - "num_input_tokens_seen": 167340945, - "step": 7844 - }, - { - "epoch": 0.9433054770636686, - "flos": 47907682599720.0, - "grad_norm": 0.6931845861058467, - "learning_rate": 3.3558831932169796e-08, - "loss": 0.7643, - "num_input_tokens_seen": 167409335, - "step": 7845 - }, - { - "epoch": 0.9434257199543077, - "flos": 18605044916760.0, - "grad_norm": 18.153515082850518, - "learning_rate": 3.341688059754588e-08, - "loss": 1.1092, - "num_input_tokens_seen": 167424710, - "step": 7846 - }, - { - "epoch": 0.9435459628449467, - "flos": 17787221105760.0, - "grad_norm": 4.869502562510757, - "learning_rate": 3.327522759201762e-08, - "loss": 0.9926, - "num_input_tokens_seen": 167444300, - "step": 7847 - }, - { - "epoch": 0.9436662057355859, - "flos": 15746752122480.0, - "grad_norm": 7.295403995949185, - "learning_rate": 3.313387293707359e-08, - "loss": 0.8797, - "num_input_tokens_seen": 167462725, - "step": 7848 - }, - { - "epoch": 0.943786448626225, - "flos": 14277047490120.0, - "grad_norm": 3.3988809931976744, - "learning_rate": 3.29928166541571e-08, - "loss": 0.9018, - "num_input_tokens_seen": 167481400, - "step": 7849 - }, - { - "epoch": 0.943906691516864, - "flos": 15694769682120.0, - "grad_norm": 9.066181145552125, - "learning_rate": 3.2852058764666346e-08, - "loss": 1.0201, - "num_input_tokens_seen": 167500220, - "step": 7850 - }, - { - "epoch": 0.9440269344075032, - "flos": 25186868184240.0, - "grad_norm": 2.4723050559617206, - "learning_rate": 3.2711599289954264e-08, - "loss": 0.9105, - "num_input_tokens_seen": 167523975, - "step": 7851 - }, - { - "epoch": 0.9441471772981422, - "flos": 13643691799080.0, - "grad_norm": 6.967503183813774, - "learning_rate": 3.257143825132847e-08, - "loss": 0.9936, - "num_input_tokens_seen": 167541865, - "step": 7852 - }, - { - "epoch": 0.9442674201887813, - "flos": 18317286470400.0, - "grad_norm": 4.310620990456509, - "learning_rate": 3.243157567005106e-08, - "loss": 0.9788, - "num_input_tokens_seen": 167559765, - "step": 7853 - }, - { - "epoch": 0.9443876630794205, - "flos": 10974834297480.0, - "grad_norm": 4.44407703682424, - "learning_rate": 3.2292011567339296e-08, - "loss": 0.8607, - "num_input_tokens_seen": 167577290, - "step": 7854 - }, - { - "epoch": 0.9445079059700595, - "flos": 9448578652320.0, - "grad_norm": 19.415253201665067, - "learning_rate": 3.21527459643649e-08, - "loss": 0.7591, - "num_input_tokens_seen": 167593895, - "step": 7855 - }, - { - "epoch": 0.9446281488606986, - "flos": 16821912564240.0, - "grad_norm": 3.426121972081281, - "learning_rate": 3.2013778882254536e-08, - "loss": 0.9635, - "num_input_tokens_seen": 167612410, - "step": 7856 - }, - { - "epoch": 0.9447483917513377, - "flos": 18186149899440.0, - "grad_norm": 3.301907282968779, - "learning_rate": 3.1875110342088676e-08, - "loss": 0.9798, - "num_input_tokens_seen": 167633580, - "step": 7857 - }, - { - "epoch": 0.9448686346419768, - "flos": 17451711514320.0, - "grad_norm": 2.8252418954636966, - "learning_rate": 3.1736740364904035e-08, - "loss": 0.8768, - "num_input_tokens_seen": 167653830, - "step": 7858 - }, - { - "epoch": 0.9449888775326158, - "flos": 10398489542640.0, - "grad_norm": 3.448338293628496, - "learning_rate": 3.159866897169094e-08, - "loss": 0.9756, - "num_input_tokens_seen": 167671750, - "step": 7859 - }, - { - "epoch": 0.945109120423255, - "flos": 10920276286080.0, - "grad_norm": 3.700027780170798, - "learning_rate": 3.146089618339487e-08, - "loss": 0.9855, - "num_input_tokens_seen": 167688325, - "step": 7860 - }, - { - "epoch": 0.9452293633138941, - "flos": 18107670323160.0, - "grad_norm": 3.7942014472741903, - "learning_rate": 3.132342202091554e-08, - "loss": 0.8986, - "num_input_tokens_seen": 167708270, - "step": 7861 - }, - { - "epoch": 0.9453496062045331, - "flos": 15065614624800.0, - "grad_norm": 4.5984286402501295, - "learning_rate": 3.1186246505107595e-08, - "loss": 0.8963, - "num_input_tokens_seen": 167727130, - "step": 7862 - }, - { - "epoch": 0.9454698490951723, - "flos": 14199947684040.0, - "grad_norm": 2.9879759788415767, - "learning_rate": 3.104936965678084e-08, - "loss": 1.0583, - "num_input_tokens_seen": 167745180, - "step": 7863 - }, - { - "epoch": 0.9455900919858113, - "flos": 14960944528200.0, - "grad_norm": 4.351570389636025, - "learning_rate": 3.091279149669956e-08, - "loss": 1.0259, - "num_input_tokens_seen": 167763690, - "step": 7864 - }, - { - "epoch": 0.9457103348764504, - "flos": 14724585952560.0, - "grad_norm": 2.8142399809774976, - "learning_rate": 3.0776512045581624e-08, - "loss": 0.9606, - "num_input_tokens_seen": 167782200, - "step": 7865 - }, - { - "epoch": 0.9458305777670896, - "flos": 15217913220240.0, - "grad_norm": 6.450016926587983, - "learning_rate": 3.0640531324101384e-08, - "loss": 0.9923, - "num_input_tokens_seen": 167799685, - "step": 7866 - }, - { - "epoch": 0.9459508206577286, - "flos": 14200192976520.0, - "grad_norm": 4.307295381420644, - "learning_rate": 3.0504849352886554e-08, - "loss": 0.9795, - "num_input_tokens_seen": 167817550, - "step": 7867 - }, - { - "epoch": 0.9460710635483677, - "flos": 8562270933600.0, - "grad_norm": 9.896151139359528, - "learning_rate": 3.036946615252023e-08, - "loss": 0.9398, - "num_input_tokens_seen": 167832800, - "step": 7868 - }, - { - "epoch": 0.9461913064390068, - "flos": 24452061860400.0, - "grad_norm": 2.790961807618383, - "learning_rate": 3.0234381743539984e-08, - "loss": 0.892, - "num_input_tokens_seen": 167850135, - "step": 7869 - }, - { - "epoch": 0.9463115493296459, - "flos": 13806752602080.0, - "grad_norm": 3.881707938141252, - "learning_rate": 3.0099596146437863e-08, - "loss": 1.0234, - "num_input_tokens_seen": 167866960, - "step": 7870 - }, - { - "epoch": 0.946431792220285, - "flos": 50540077958400.0, - "grad_norm": 0.7835003296073021, - "learning_rate": 2.996510938166086e-08, - "loss": 0.8571, - "num_input_tokens_seen": 167929655, - "step": 7871 - }, - { - "epoch": 0.9465520351109241, - "flos": 13435332775800.0, - "grad_norm": 6.609778425741845, - "learning_rate": 2.983092146960997e-08, - "loss": 0.9626, - "num_input_tokens_seen": 167946720, - "step": 7872 - }, - { - "epoch": 0.9466722780015632, - "flos": 13570976596080.0, - "grad_norm": 3.38114708481173, - "learning_rate": 2.9697032430642256e-08, - "loss": 1.0137, - "num_input_tokens_seen": 167964655, - "step": 7873 - }, - { - "epoch": 0.9467925208922022, - "flos": 12207045876480.0, - "grad_norm": 6.169606856768901, - "learning_rate": 2.9563442285067906e-08, - "loss": 0.9456, - "num_input_tokens_seen": 167981420, - "step": 7874 - }, - { - "epoch": 0.9469127637828414, - "flos": 20781740289960.0, - "grad_norm": 3.729158596832807, - "learning_rate": 2.943015105315294e-08, - "loss": 1.0235, - "num_input_tokens_seen": 168001335, - "step": 7875 - }, - { - "epoch": 0.9470330066734804, - "flos": 18526013432400.0, - "grad_norm": 6.845642385607859, - "learning_rate": 2.929715875511718e-08, - "loss": 0.8971, - "num_input_tokens_seen": 168020090, - "step": 7876 - }, - { - "epoch": 0.9471532495641195, - "flos": 16665076057920.0, - "grad_norm": 6.192121533011917, - "learning_rate": 2.9164465411135375e-08, - "loss": 0.9228, - "num_input_tokens_seen": 168039580, - "step": 7877 - }, - { - "epoch": 0.9472734924547586, - "flos": 11185155660600.0, - "grad_norm": 3.190237776884206, - "learning_rate": 2.9032071041337426e-08, - "loss": 1.025, - "num_input_tokens_seen": 168057535, - "step": 7878 - }, - { - "epoch": 0.9473937353453977, - "flos": 7854697623120.0, - "grad_norm": 2.5675488487581455, - "learning_rate": 2.889997566580704e-08, - "loss": 0.9507, - "num_input_tokens_seen": 168075410, - "step": 7879 - }, - { - "epoch": 0.9475139782360368, - "flos": 18342863759520.0, - "grad_norm": 3.7360576844836793, - "learning_rate": 2.8768179304583086e-08, - "loss": 0.9292, - "num_input_tokens_seen": 168097185, - "step": 7880 - }, - { - "epoch": 0.9476342211266758, - "flos": 16218794719440.0, - "grad_norm": 5.300245085033878, - "learning_rate": 2.8636681977659117e-08, - "loss": 0.9664, - "num_input_tokens_seen": 168116555, - "step": 7881 - }, - { - "epoch": 0.947754464017315, - "flos": 14331298885920.0, - "grad_norm": 4.07036732583637, - "learning_rate": 2.850548370498318e-08, - "loss": 1.0066, - "num_input_tokens_seen": 168134115, - "step": 7882 - }, - { - "epoch": 0.9478747069079541, - "flos": 17582848085280.0, - "grad_norm": 3.3567757420273376, - "learning_rate": 2.8374584506457798e-08, - "loss": 0.9281, - "num_input_tokens_seen": 168155110, - "step": 7883 - }, - { - "epoch": 0.9479949497985931, - "flos": 15170560675440.0, - "grad_norm": 4.012397864637243, - "learning_rate": 2.824398440193998e-08, - "loss": 0.8821, - "num_input_tokens_seen": 168173630, - "step": 7884 - }, - { - "epoch": 0.9481151926892323, - "flos": 12861348961080.0, - "grad_norm": 6.578597896730369, - "learning_rate": 2.811368341124232e-08, - "loss": 0.927, - "num_input_tokens_seen": 168192420, - "step": 7885 - }, - { - "epoch": 0.9482354355798713, - "flos": 15721604095200.0, - "grad_norm": 4.418714492417781, - "learning_rate": 2.7983681554131222e-08, - "loss": 0.9072, - "num_input_tokens_seen": 168212400, - "step": 7886 - }, - { - "epoch": 0.9483556784705104, - "flos": 13518871509480.0, - "grad_norm": 3.3625372618698783, - "learning_rate": 2.7853978850327365e-08, - "loss": 0.907, - "num_input_tokens_seen": 168231290, - "step": 7887 - }, - { - "epoch": 0.9484759213611496, - "flos": 18344212868160.0, - "grad_norm": 3.491938571463914, - "learning_rate": 2.7724575319507225e-08, - "loss": 1.0974, - "num_input_tokens_seen": 168250720, - "step": 7888 - }, - { - "epoch": 0.9485961642517886, - "flos": 14672020942560.0, - "grad_norm": 2.964173815481513, - "learning_rate": 2.759547098130044e-08, - "loss": 1.0069, - "num_input_tokens_seen": 168269170, - "step": 7889 - }, - { - "epoch": 0.9487164071424277, - "flos": 16113879330360.0, - "grad_norm": 3.414472542238291, - "learning_rate": 2.746666585529267e-08, - "loss": 0.989, - "num_input_tokens_seen": 168289165, - "step": 7890 - }, - { - "epoch": 0.9488366500330668, - "flos": 27650248849200.0, - "grad_norm": 5.480493633841851, - "learning_rate": 2.73381599610234e-08, - "loss": 0.9557, - "num_input_tokens_seen": 168309285, - "step": 7891 - }, - { - "epoch": 0.9489568929237059, - "flos": 19863048415800.0, - "grad_norm": 3.8640443300898997, - "learning_rate": 2.7209953317987033e-08, - "loss": 0.9364, - "num_input_tokens_seen": 168330045, - "step": 7892 - }, - { - "epoch": 0.9490771358143449, - "flos": 23951866403280.0, - "grad_norm": 24.78968788606023, - "learning_rate": 2.7082045945631793e-08, - "loss": 0.9971, - "num_input_tokens_seen": 168351980, - "step": 7893 - }, - { - "epoch": 0.9491973787049841, - "flos": 10450563967680.0, - "grad_norm": 4.059827424313743, - "learning_rate": 2.6954437863361712e-08, - "loss": 0.913, - "num_input_tokens_seen": 168369615, - "step": 7894 - }, - { - "epoch": 0.9493176215956232, - "flos": 18023794312320.0, - "grad_norm": 4.409187149599167, - "learning_rate": 2.6827129090534862e-08, - "loss": 0.936, - "num_input_tokens_seen": 168389635, - "step": 7895 - }, - { - "epoch": 0.9494378644862622, - "flos": 14931994467480.0, - "grad_norm": 2.927804570695765, - "learning_rate": 2.670011964646335e-08, - "loss": 1.009, - "num_input_tokens_seen": 168408035, - "step": 7896 - }, - { - "epoch": 0.9495581073769014, - "flos": 10738996968360.0, - "grad_norm": 4.839170198162812, - "learning_rate": 2.657340955041487e-08, - "loss": 0.9024, - "num_input_tokens_seen": 168426530, - "step": 7897 - }, - { - "epoch": 0.9496783502675404, - "flos": 20384007297120.0, - "grad_norm": 5.535656741498012, - "learning_rate": 2.6446998821611167e-08, - "loss": 0.9338, - "num_input_tokens_seen": 168446445, - "step": 7898 - }, - { - "epoch": 0.9497985931581795, - "flos": 10503098316120.0, - "grad_norm": 4.650120329581789, - "learning_rate": 2.6320887479228228e-08, - "loss": 0.9344, - "num_input_tokens_seen": 168462765, - "step": 7899 - }, - { - "epoch": 0.9499188360488187, - "flos": 19361105249760.0, - "grad_norm": 5.339577992655266, - "learning_rate": 2.619507554239786e-08, - "loss": 0.9361, - "num_input_tokens_seen": 168481045, - "step": 7900 - }, - { - "epoch": 0.9500390789394577, - "flos": 17294323099920.0, - "grad_norm": 15.99706622883357, - "learning_rate": 2.606956303020502e-08, - "loss": 0.9358, - "num_input_tokens_seen": 168501570, - "step": 7901 - }, - { - "epoch": 0.9501593218300968, - "flos": 10134867292080.0, - "grad_norm": 5.746841890240741, - "learning_rate": 2.5944349961690036e-08, - "loss": 1.0651, - "num_input_tokens_seen": 168518310, - "step": 7902 - }, - { - "epoch": 0.9502795647207359, - "flos": 27651659280960.0, - "grad_norm": 6.305695747389227, - "learning_rate": 2.581943635584749e-08, - "loss": 0.95, - "num_input_tokens_seen": 168540860, - "step": 7903 - }, - { - "epoch": 0.950399807611375, - "flos": 29146481279040.0, - "grad_norm": 3.4509578480872385, - "learning_rate": 2.569482223162689e-08, - "loss": 0.8799, - "num_input_tokens_seen": 168564555, - "step": 7904 - }, - { - "epoch": 0.950520050502014, - "flos": 16664646796080.0, - "grad_norm": 3.8794291624968555, - "learning_rate": 2.5570507607932e-08, - "loss": 0.9544, - "num_input_tokens_seen": 168584190, - "step": 7905 - }, - { - "epoch": 0.9506402933926532, - "flos": 12599106480720.0, - "grad_norm": 4.305262809290005, - "learning_rate": 2.54464925036213e-08, - "loss": 0.8341, - "num_input_tokens_seen": 168601200, - "step": 7906 - }, - { - "epoch": 0.9507605362832923, - "flos": 23220126235440.0, - "grad_norm": 5.571692254390646, - "learning_rate": 2.532277693750773e-08, - "loss": 0.8237, - "num_input_tokens_seen": 168621845, - "step": 7907 - }, - { - "epoch": 0.9508807791739313, - "flos": 13905106417320.0, - "grad_norm": 5.466043903575103, - "learning_rate": 2.5199360928358948e-08, - "loss": 0.9772, - "num_input_tokens_seen": 168638800, - "step": 7908 - }, - { - "epoch": 0.9510010220645704, - "flos": 15250113406320.0, - "grad_norm": 3.8020840300816285, - "learning_rate": 2.507624449489665e-08, - "loss": 1.095, - "num_input_tokens_seen": 168657150, - "step": 7909 - }, - { - "epoch": 0.9511212649552095, - "flos": 13387213692000.0, - "grad_norm": 3.939832934792686, - "learning_rate": 2.495342765579811e-08, - "loss": 0.876, - "num_input_tokens_seen": 168675530, - "step": 7910 - }, - { - "epoch": 0.9512415078458486, - "flos": 14774207452800.0, - "grad_norm": 3.3250483229718117, - "learning_rate": 2.4830910429693984e-08, - "loss": 0.9337, - "num_input_tokens_seen": 168695210, - "step": 7911 - }, - { - "epoch": 0.9513617507364877, - "flos": 13020423761280.0, - "grad_norm": 2.927935999874374, - "learning_rate": 2.470869283517052e-08, - "loss": 1.0193, - "num_input_tokens_seen": 168712965, - "step": 7912 - }, - { - "epoch": 0.9514819936271268, - "flos": 17792924155920.0, - "grad_norm": 2.8144989539392005, - "learning_rate": 2.458677489076777e-08, - "loss": 0.996, - "num_input_tokens_seen": 168733695, - "step": 7913 - }, - { - "epoch": 0.9516022365177659, - "flos": 13230775785960.0, - "grad_norm": 6.21783440525414, - "learning_rate": 2.446515661498072e-08, - "loss": 1.0572, - "num_input_tokens_seen": 168752745, - "step": 7914 - }, - { - "epoch": 0.9517224794084049, - "flos": 18052652388360.0, - "grad_norm": 3.519833093500815, - "learning_rate": 2.434383802625861e-08, - "loss": 0.9586, - "num_input_tokens_seen": 168771420, - "step": 7915 - }, - { - "epoch": 0.9518427222990441, - "flos": 15249316205760.0, - "grad_norm": 5.639208671301016, - "learning_rate": 2.4222819143005168e-08, - "loss": 0.9752, - "num_input_tokens_seen": 168790735, - "step": 7916 - }, - { - "epoch": 0.9519629651896832, - "flos": 14935121946600.0, - "grad_norm": 10.264404967575027, - "learning_rate": 2.4102099983579706e-08, - "loss": 1.0361, - "num_input_tokens_seen": 168809605, - "step": 7917 - }, - { - "epoch": 0.9520832080803222, - "flos": 15406122050520.0, - "grad_norm": 4.730580234867654, - "learning_rate": 2.3981680566294236e-08, - "loss": 0.9905, - "num_input_tokens_seen": 168828925, - "step": 7918 - }, - { - "epoch": 0.9522034509709614, - "flos": 16452117800640.0, - "grad_norm": 3.3193030404831294, - "learning_rate": 2.3861560909416822e-08, - "loss": 0.9742, - "num_input_tokens_seen": 168848195, - "step": 7919 - }, - { - "epoch": 0.9523236938616004, - "flos": 17554909856040.0, - "grad_norm": 3.3052034448747203, - "learning_rate": 2.3741741031169325e-08, - "loss": 1.0577, - "num_input_tokens_seen": 168867485, - "step": 7920 - }, - { - "epoch": 0.9524439367522395, - "flos": 16112376913920.0, - "grad_norm": 3.49421104159761, - "learning_rate": 2.3622220949728544e-08, - "loss": 0.92, - "num_input_tokens_seen": 168886090, - "step": 7921 - }, - { - "epoch": 0.9525641796428787, - "flos": 24293232352680.0, - "grad_norm": 5.302704700365864, - "learning_rate": 2.3503000683225526e-08, - "loss": 0.8346, - "num_input_tokens_seen": 168903525, - "step": 7922 - }, - { - "epoch": 0.9526844225335177, - "flos": 11839918668600.0, - "grad_norm": 7.521365188292235, - "learning_rate": 2.3384080249745585e-08, - "loss": 1.0585, - "num_input_tokens_seen": 168921135, - "step": 7923 - }, - { - "epoch": 0.9528046654241568, - "flos": 26365993506720.0, - "grad_norm": 9.565731524796846, - "learning_rate": 2.3265459667329178e-08, - "loss": 1.0479, - "num_input_tokens_seen": 168940345, - "step": 7924 - }, - { - "epoch": 0.9529249083147959, - "flos": 12937406274120.0, - "grad_norm": 4.56427928771452, - "learning_rate": 2.31471389539708e-08, - "loss": 1.0801, - "num_input_tokens_seen": 168957190, - "step": 7925 - }, - { - "epoch": 0.953045151205435, - "flos": 20414827713000.0, - "grad_norm": 7.439901525194392, - "learning_rate": 2.3029118127619872e-08, - "loss": 0.9419, - "num_input_tokens_seen": 168976625, - "step": 7926 - }, - { - "epoch": 0.953165394096074, - "flos": 15511068101160.0, - "grad_norm": 5.187789173568969, - "learning_rate": 2.2911397206179628e-08, - "loss": 1.09, - "num_input_tokens_seen": 168993095, - "step": 7927 - }, - { - "epoch": 0.9532856369867132, - "flos": 14174278410240.0, - "grad_norm": 4.462701719196734, - "learning_rate": 2.279397620750845e-08, - "loss": 0.8477, - "num_input_tokens_seen": 169011860, - "step": 7928 - }, - { - "epoch": 0.9534058798773523, - "flos": 10634449518000.0, - "grad_norm": 5.760009346530537, - "learning_rate": 2.2676855149419195e-08, - "loss": 0.9995, - "num_input_tokens_seen": 169028750, - "step": 7929 - }, - { - "epoch": 0.9535261227679913, - "flos": 12469288356840.0, - "grad_norm": 6.605318885515347, - "learning_rate": 2.2560034049678988e-08, - "loss": 0.9698, - "num_input_tokens_seen": 169042820, - "step": 7930 - }, - { - "epoch": 0.9536463656586305, - "flos": 16450124799240.0, - "grad_norm": 4.948900027350814, - "learning_rate": 2.2443512926008988e-08, - "loss": 0.9794, - "num_input_tokens_seen": 169061870, - "step": 7931 - }, - { - "epoch": 0.9537666085492695, - "flos": 13204033357560.0, - "grad_norm": 6.3123724896233675, - "learning_rate": 2.2327291796085946e-08, - "loss": 0.9231, - "num_input_tokens_seen": 169079950, - "step": 7932 - }, - { - "epoch": 0.9538868514399086, - "flos": 13465785252960.0, - "grad_norm": 5.321486826620774, - "learning_rate": 2.2211370677540197e-08, - "loss": 0.9995, - "num_input_tokens_seen": 169096195, - "step": 7933 - }, - { - "epoch": 0.9540070943305478, - "flos": 11892790294200.0, - "grad_norm": 5.105502746515541, - "learning_rate": 2.2095749587957012e-08, - "loss": 0.9891, - "num_input_tokens_seen": 169113820, - "step": 7934 - }, - { - "epoch": 0.9541273372211868, - "flos": 14304893734680.0, - "grad_norm": 2.642624318240017, - "learning_rate": 2.1980428544876138e-08, - "loss": 0.9105, - "num_input_tokens_seen": 169132180, - "step": 7935 - }, - { - "epoch": 0.9542475801118259, - "flos": 19025779627680.0, - "grad_norm": 3.860442349876339, - "learning_rate": 2.1865407565791584e-08, - "loss": 0.9629, - "num_input_tokens_seen": 169153470, - "step": 7936 - }, - { - "epoch": 0.954367823002465, - "flos": 16585952588880.0, - "grad_norm": 5.913476791505305, - "learning_rate": 2.175068666815183e-08, - "loss": 0.9944, - "num_input_tokens_seen": 169174030, - "step": 7937 - }, - { - "epoch": 0.9544880658931041, - "flos": 10528767589920.0, - "grad_norm": 4.308979532465437, - "learning_rate": 2.163626586935985e-08, - "loss": 1.0086, - "num_input_tokens_seen": 169190290, - "step": 7938 - }, - { - "epoch": 0.9546083087837431, - "flos": 20729175279960.0, - "grad_norm": 3.3339464736330564, - "learning_rate": 2.1522145186773755e-08, - "loss": 0.8441, - "num_input_tokens_seen": 169208930, - "step": 7939 - }, - { - "epoch": 0.9547285516743822, - "flos": 15013141599480.0, - "grad_norm": 4.759162899107798, - "learning_rate": 2.140832463770481e-08, - "loss": 1.0776, - "num_input_tokens_seen": 169227845, - "step": 7940 - }, - { - "epoch": 0.9548487945650214, - "flos": 19575106000080.0, - "grad_norm": 5.393640178035349, - "learning_rate": 2.129480423941987e-08, - "loss": 0.9766, - "num_input_tokens_seen": 169244235, - "step": 7941 - }, - { - "epoch": 0.9549690374556604, - "flos": 15825844929960.0, - "grad_norm": 10.230537358971928, - "learning_rate": 2.1181584009140052e-08, - "loss": 1.0355, - "num_input_tokens_seen": 169263495, - "step": 7942 - }, - { - "epoch": 0.9550892803462995, - "flos": 12462880090800.0, - "grad_norm": 4.047465748915914, - "learning_rate": 2.10686639640405e-08, - "loss": 1.0528, - "num_input_tokens_seen": 169277305, - "step": 7943 - }, - { - "epoch": 0.9552095232369386, - "flos": 17320697589600.0, - "grad_norm": 2.5967327570145793, - "learning_rate": 2.0956044121251294e-08, - "loss": 1.0403, - "num_input_tokens_seen": 169295810, - "step": 7944 - }, - { - "epoch": 0.9553297661275777, - "flos": 16166168386320.0, - "grad_norm": 3.7923931781206512, - "learning_rate": 2.084372449785654e-08, - "loss": 1.0334, - "num_input_tokens_seen": 169315365, - "step": 7945 - }, - { - "epoch": 0.9554500090182168, - "flos": 10896170751840.0, - "grad_norm": 2.919996994226032, - "learning_rate": 2.0731705110895282e-08, - "loss": 0.9075, - "num_input_tokens_seen": 169332575, - "step": 7946 - }, - { - "epoch": 0.9555702519088559, - "flos": 16717211806080.0, - "grad_norm": 3.1383467424551585, - "learning_rate": 2.0619985977360587e-08, - "loss": 1.0867, - "num_input_tokens_seen": 169350615, - "step": 7947 - }, - { - "epoch": 0.955690494799495, - "flos": 16320337336920.0, - "grad_norm": 3.7314042920491284, - "learning_rate": 2.0508567114200237e-08, - "loss": 1.0055, - "num_input_tokens_seen": 169370250, - "step": 7948 - }, - { - "epoch": 0.955810737690134, - "flos": 18526687986720.0, - "grad_norm": 4.257575968200685, - "learning_rate": 2.0397448538316485e-08, - "loss": 1.0054, - "num_input_tokens_seen": 169391010, - "step": 7949 - }, - { - "epoch": 0.9559309805807732, - "flos": 14802942882600.0, - "grad_norm": 4.352762436949613, - "learning_rate": 2.028663026656563e-08, - "loss": 0.8866, - "num_input_tokens_seen": 169409585, - "step": 7950 - }, - { - "epoch": 0.9560512234714122, - "flos": 15326140057800.0, - "grad_norm": 4.310157692628792, - "learning_rate": 2.0176112315758885e-08, - "loss": 0.9499, - "num_input_tokens_seen": 169427095, - "step": 7951 - }, - { - "epoch": 0.9561714663620513, - "flos": 12358669917600.0, - "grad_norm": 4.029197229363481, - "learning_rate": 2.0065894702661957e-08, - "loss": 0.9189, - "num_input_tokens_seen": 169443490, - "step": 7952 - }, - { - "epoch": 0.9562917092526905, - "flos": 18574255162440.0, - "grad_norm": 6.518188557718671, - "learning_rate": 1.9955977443994577e-08, - "loss": 1.0053, - "num_input_tokens_seen": 169463200, - "step": 7953 - }, - { - "epoch": 0.9564119521433295, - "flos": 17136290792760.0, - "grad_norm": 4.9549355070996235, - "learning_rate": 1.9846360556430965e-08, - "loss": 0.85, - "num_input_tokens_seen": 169481220, - "step": 7954 - }, - { - "epoch": 0.9565321950339686, - "flos": 22821902657640.0, - "grad_norm": 5.298697099144876, - "learning_rate": 1.973704405660004e-08, - "loss": 0.8243, - "num_input_tokens_seen": 169502055, - "step": 7955 - }, - { - "epoch": 0.9566524379246077, - "flos": 16771493863440.0, - "grad_norm": 2.706375813367828, - "learning_rate": 1.9628027961085203e-08, - "loss": 0.9969, - "num_input_tokens_seen": 169525005, - "step": 7956 - }, - { - "epoch": 0.9567726808152468, - "flos": 27176550528000.0, - "grad_norm": 2.630356002036365, - "learning_rate": 1.9519312286423894e-08, - "loss": 1.0616, - "num_input_tokens_seen": 169547920, - "step": 7957 - }, - { - "epoch": 0.9568929237058859, - "flos": 16163776784640.0, - "grad_norm": 3.547079340892439, - "learning_rate": 1.9410897049108255e-08, - "loss": 1.0015, - "num_input_tokens_seen": 169566920, - "step": 7958 - }, - { - "epoch": 0.957013166596525, - "flos": 16952987812080.0, - "grad_norm": 9.829953749771983, - "learning_rate": 1.9302782265584905e-08, - "loss": 1.1265, - "num_input_tokens_seen": 169587305, - "step": 7959 - }, - { - "epoch": 0.9571334094871641, - "flos": 12495509538720.0, - "grad_norm": 8.537949164040178, - "learning_rate": 1.9194967952254282e-08, - "loss": 1.0853, - "num_input_tokens_seen": 169605600, - "step": 7960 - }, - { - "epoch": 0.9572536523778031, - "flos": 10863326673000.0, - "grad_norm": 6.160407352107203, - "learning_rate": 1.9087454125472635e-08, - "loss": 1.0202, - "num_input_tokens_seen": 169619795, - "step": 7961 - }, - { - "epoch": 0.9573738952684423, - "flos": 17763422187120.0, - "grad_norm": 8.176304741661458, - "learning_rate": 1.8980240801548696e-08, - "loss": 1.003, - "num_input_tokens_seen": 169638705, - "step": 7962 - }, - { - "epoch": 0.9574941381590814, - "flos": 18337681955880.0, - "grad_norm": 7.350987540614229, - "learning_rate": 1.8873327996747458e-08, - "loss": 0.9595, - "num_input_tokens_seen": 169656925, - "step": 7963 - }, - { - "epoch": 0.9576143810497204, - "flos": 23037129870360.0, - "grad_norm": 27.603034076607866, - "learning_rate": 1.8766715727287053e-08, - "loss": 0.8849, - "num_input_tokens_seen": 169678350, - "step": 7964 - }, - { - "epoch": 0.9577346239403596, - "flos": 19758807581040.0, - "grad_norm": 2.9948503482984945, - "learning_rate": 1.8660404009340546e-08, - "loss": 1.0149, - "num_input_tokens_seen": 169698520, - "step": 7965 - }, - { - "epoch": 0.9578548668309986, - "flos": 41122994276280.0, - "grad_norm": 0.9061837665426491, - "learning_rate": 1.8554392859035485e-08, - "loss": 0.8955, - "num_input_tokens_seen": 169755990, - "step": 7966 - }, - { - "epoch": 0.9579751097216377, - "flos": 14016859334280.0, - "grad_norm": 3.3126979264921, - "learning_rate": 1.8448682292453444e-08, - "loss": 1.0163, - "num_input_tokens_seen": 169774785, - "step": 7967 - }, - { - "epoch": 0.9580953526122769, - "flos": 12784494447480.0, - "grad_norm": 4.6830382411388385, - "learning_rate": 1.8343272325631154e-08, - "loss": 0.8898, - "num_input_tokens_seen": 169793450, - "step": 7968 - }, - { - "epoch": 0.9582155955029159, - "flos": 17259700650600.0, - "grad_norm": 5.048962424168845, - "learning_rate": 1.8238162974558492e-08, - "loss": 1.0103, - "num_input_tokens_seen": 169807100, - "step": 7969 - }, - { - "epoch": 0.958335838393555, - "flos": 16192358906640.0, - "grad_norm": 3.6765596414888084, - "learning_rate": 1.8133354255181144e-08, - "loss": 0.9697, - "num_input_tokens_seen": 169827135, - "step": 7970 - }, - { - "epoch": 0.958456081284194, - "flos": 11972710963800.0, - "grad_norm": 3.7659835961258596, - "learning_rate": 1.802884618339795e-08, - "loss": 0.9698, - "num_input_tokens_seen": 169845660, - "step": 7971 - }, - { - "epoch": 0.9585763241748332, - "flos": 14174186425560.0, - "grad_norm": 4.620261574478393, - "learning_rate": 1.7924638775062894e-08, - "loss": 1.0235, - "num_input_tokens_seen": 169864500, - "step": 7972 - }, - { - "epoch": 0.9586965670654722, - "flos": 15194911502160.0, - "grad_norm": 4.632360220795654, - "learning_rate": 1.7820732045984444e-08, - "loss": 1.039, - "num_input_tokens_seen": 169884365, - "step": 7973 - }, - { - "epoch": 0.9588168099561113, - "flos": 15223677593520.0, - "grad_norm": 3.4258056082995916, - "learning_rate": 1.7717126011924655e-08, - "loss": 0.9634, - "num_input_tokens_seen": 169905670, - "step": 7974 - }, - { - "epoch": 0.9589370528467505, - "flos": 8272703455200.0, - "grad_norm": 7.933696646612394, - "learning_rate": 1.7613820688600957e-08, - "loss": 0.9824, - "num_input_tokens_seen": 169921295, - "step": 7975 - }, - { - "epoch": 0.9590572957373895, - "flos": 16665597304440.0, - "grad_norm": 3.734995770432862, - "learning_rate": 1.7510816091684588e-08, - "loss": 1.018, - "num_input_tokens_seen": 169940940, - "step": 7976 - }, - { - "epoch": 0.9591775386280286, - "flos": 16009117249080.0, - "grad_norm": 4.220710581393807, - "learning_rate": 1.740811223680083e-08, - "loss": 1.0015, - "num_input_tokens_seen": 169957515, - "step": 7977 - }, - { - "epoch": 0.9592977815186677, - "flos": 12889164544080.0, - "grad_norm": 5.995766417673618, - "learning_rate": 1.7305709139530334e-08, - "loss": 0.9589, - "num_input_tokens_seen": 169976015, - "step": 7978 - }, - { - "epoch": 0.9594180244093068, - "flos": 11702956401240.0, - "grad_norm": 8.044450294581488, - "learning_rate": 1.7203606815407334e-08, - "loss": 0.9693, - "num_input_tokens_seen": 169990330, - "step": 7979 - }, - { - "epoch": 0.9595382672999458, - "flos": 14589984625320.0, - "grad_norm": 2.9404399039282, - "learning_rate": 1.7101805279920557e-08, - "loss": 1.0191, - "num_input_tokens_seen": 170008210, - "step": 7980 - }, - { - "epoch": 0.959658510190585, - "flos": 16088240718120.0, - "grad_norm": 5.56799507889585, - "learning_rate": 1.7000304548513643e-08, - "loss": 1.0366, - "num_input_tokens_seen": 170028035, - "step": 7981 - }, - { - "epoch": 0.9597787530812241, - "flos": 13570639318920.0, - "grad_norm": 4.233140828919565, - "learning_rate": 1.6899104636583394e-08, - "loss": 1.0438, - "num_input_tokens_seen": 170045805, - "step": 7982 - }, - { - "epoch": 0.9598989959718631, - "flos": 43013372296440.0, - "grad_norm": 0.7218241256687962, - "learning_rate": 1.6798205559482638e-08, - "loss": 0.8786, - "num_input_tokens_seen": 170107905, - "step": 7983 - }, - { - "epoch": 0.9600192388625023, - "flos": 14829470680080.0, - "grad_norm": 4.136659040011113, - "learning_rate": 1.669760733251713e-08, - "loss": 0.989, - "num_input_tokens_seen": 170126500, - "step": 7984 - }, - { - "epoch": 0.9601394817531413, - "flos": 14512056957120.0, - "grad_norm": 3.687952692725564, - "learning_rate": 1.659730997094755e-08, - "loss": 1.059, - "num_input_tokens_seen": 170144710, - "step": 7985 - }, - { - "epoch": 0.9602597246437804, - "flos": 15275322756720.0, - "grad_norm": 2.957921890517123, - "learning_rate": 1.6497313489989283e-08, - "loss": 0.8508, - "num_input_tokens_seen": 170164255, - "step": 7986 - }, - { - "epoch": 0.9603799675344196, - "flos": 21332293124760.0, - "grad_norm": 4.127098705578283, - "learning_rate": 1.639761790481131e-08, - "loss": 0.9085, - "num_input_tokens_seen": 170184855, - "step": 7987 - }, - { - "epoch": 0.9605002104250586, - "flos": 19942693131360.0, - "grad_norm": 9.77577324269931, - "learning_rate": 1.6298223230537754e-08, - "loss": 1.0154, - "num_input_tokens_seen": 170202375, - "step": 7988 - }, - { - "epoch": 0.9606204533156977, - "flos": 25396729623960.0, - "grad_norm": 4.904432992933814, - "learning_rate": 1.619912948224611e-08, - "loss": 0.9129, - "num_input_tokens_seen": 170223300, - "step": 7989 - }, - { - "epoch": 0.9607406962063368, - "flos": 18916663604880.0, - "grad_norm": 5.113957487870245, - "learning_rate": 1.6100336674969682e-08, - "loss": 0.8186, - "num_input_tokens_seen": 170241860, - "step": 7990 - }, - { - "epoch": 0.9608609390969759, - "flos": 18023150419560.0, - "grad_norm": 8.225765352671266, - "learning_rate": 1.600184482369449e-08, - "loss": 0.9909, - "num_input_tokens_seen": 170261495, - "step": 7991 - }, - { - "epoch": 0.960981181987615, - "flos": 14960484604800.0, - "grad_norm": 4.810450922713377, - "learning_rate": 1.5903653943362126e-08, - "loss": 1.1141, - "num_input_tokens_seen": 170280210, - "step": 7992 - }, - { - "epoch": 0.9611014248782541, - "flos": 12627933895200.0, - "grad_norm": 4.952655565417224, - "learning_rate": 1.580576404886802e-08, - "loss": 0.9839, - "num_input_tokens_seen": 170298460, - "step": 7993 - }, - { - "epoch": 0.9612216677688932, - "flos": 13728365010480.0, - "grad_norm": 3.5041578663924513, - "learning_rate": 1.570817515506162e-08, - "loss": 1.0256, - "num_input_tokens_seen": 170316870, - "step": 7994 - }, - { - "epoch": 0.9613419106595322, - "flos": 11179084671720.0, - "grad_norm": 11.817623258609231, - "learning_rate": 1.561088727674753e-08, - "loss": 1.0377, - "num_input_tokens_seen": 170330800, - "step": 7995 - }, - { - "epoch": 0.9614621535501714, - "flos": 18290360072640.0, - "grad_norm": 7.309650615895731, - "learning_rate": 1.551390042868417e-08, - "loss": 0.9223, - "num_input_tokens_seen": 170352290, - "step": 7996 - }, - { - "epoch": 0.9615823964408104, - "flos": 12624346492680.0, - "grad_norm": 2.965490700320769, - "learning_rate": 1.5417214625584207e-08, - "loss": 0.9433, - "num_input_tokens_seen": 170369665, - "step": 7997 - }, - { - "epoch": 0.9617026393314495, - "flos": 14328232729920.0, - "grad_norm": 3.0196342105263114, - "learning_rate": 1.5320829882114806e-08, - "loss": 1.076, - "num_input_tokens_seen": 170387460, - "step": 7998 - }, - { - "epoch": 0.9618228822220887, - "flos": 14384109188400.0, - "grad_norm": 5.408136468643941, - "learning_rate": 1.5224746212897378e-08, - "loss": 1.0024, - "num_input_tokens_seen": 170406475, - "step": 7999 - }, - { - "epoch": 0.9619431251127277, - "flos": 14933128945200.0, - "grad_norm": 2.0989056778606474, - "learning_rate": 1.512896363250804e-08, - "loss": 0.9963, - "num_input_tokens_seen": 170426305, - "step": 8000 - }, - { - "epoch": 0.9620633680033668, - "flos": 15904508475600.0, - "grad_norm": 4.8332002994162515, - "learning_rate": 1.503348215547673e-08, - "loss": 0.9874, - "num_input_tokens_seen": 170447115, - "step": 8001 - }, - { - "epoch": 0.962183610894006, - "flos": 13092863010240.0, - "grad_norm": 4.8289073322605365, - "learning_rate": 1.4938301796288078e-08, - "loss": 1.0298, - "num_input_tokens_seen": 170463405, - "step": 8002 - }, - { - "epoch": 0.962303853784645, - "flos": 13066641828360.0, - "grad_norm": 4.827674820044409, - "learning_rate": 1.4843422569380537e-08, - "loss": 1.039, - "num_input_tokens_seen": 170479880, - "step": 8003 - }, - { - "epoch": 0.9624240966752841, - "flos": 18784607187120.0, - "grad_norm": 2.6197687429912873, - "learning_rate": 1.4748844489147483e-08, - "loss": 1.0522, - "num_input_tokens_seen": 170496590, - "step": 8004 - }, - { - "epoch": 0.9625443395659231, - "flos": 10345525932360.0, - "grad_norm": 3.7894100698351667, - "learning_rate": 1.4654567569936326e-08, - "loss": 0.9323, - "num_input_tokens_seen": 170513885, - "step": 8005 - }, - { - "epoch": 0.9626645824565623, - "flos": 13018768037040.0, - "grad_norm": 2.5728210452042233, - "learning_rate": 1.456059182604874e-08, - "loss": 1.0497, - "num_input_tokens_seen": 170532410, - "step": 8006 - }, - { - "epoch": 0.9627848253472013, - "flos": 11734420709880.0, - "grad_norm": 4.000011205657522, - "learning_rate": 1.4466917271740653e-08, - "loss": 0.9969, - "num_input_tokens_seen": 170550330, - "step": 8007 - }, - { - "epoch": 0.9629050682378404, - "flos": 14829532003200.0, - "grad_norm": 6.5878379315265905, - "learning_rate": 1.4373543921222697e-08, - "loss": 0.8968, - "num_input_tokens_seen": 170569635, - "step": 8008 - }, - { - "epoch": 0.9630253111284796, - "flos": 12049933416120.0, - "grad_norm": 7.354932995457932, - "learning_rate": 1.428047178865932e-08, - "loss": 0.9925, - "num_input_tokens_seen": 170586145, - "step": 8009 - }, - { - "epoch": 0.9631455540191186, - "flos": 14435662366920.0, - "grad_norm": 2.965977548721339, - "learning_rate": 1.4187700888169451e-08, - "loss": 0.9674, - "num_input_tokens_seen": 170605040, - "step": 8010 - }, - { - "epoch": 0.9632657969097577, - "flos": 47226637086720.0, - "grad_norm": 0.824839606732648, - "learning_rate": 1.40952312338265e-08, - "loss": 0.8762, - "num_input_tokens_seen": 170669405, - "step": 8011 - }, - { - "epoch": 0.9633860398003968, - "flos": 31741918361760.0, - "grad_norm": 3.4081222568307146, - "learning_rate": 1.4003062839657909e-08, - "loss": 0.9068, - "num_input_tokens_seen": 170691605, - "step": 8012 - }, - { - "epoch": 0.9635062826910359, - "flos": 17661787584960.0, - "grad_norm": 2.4496728211527716, - "learning_rate": 1.391119571964583e-08, - "loss": 1.0235, - "num_input_tokens_seen": 170712265, - "step": 8013 - }, - { - "epoch": 0.9636265255816749, - "flos": 11289672449400.0, - "grad_norm": 7.2203877230277325, - "learning_rate": 1.3819629887726225e-08, - "loss": 0.9574, - "num_input_tokens_seen": 170730075, - "step": 8014 - }, - { - "epoch": 0.9637467684723141, - "flos": 16062019536240.0, - "grad_norm": 6.361926867259279, - "learning_rate": 1.3728365357789317e-08, - "loss": 0.9868, - "num_input_tokens_seen": 170749160, - "step": 8015 - }, - { - "epoch": 0.9638670113629532, - "flos": 12442637913120.0, - "grad_norm": 6.53489590597686, - "learning_rate": 1.3637402143680254e-08, - "loss": 0.987, - "num_input_tokens_seen": 170763780, - "step": 8016 - }, - { - "epoch": 0.9639872542535922, - "flos": 39402514587000.0, - "grad_norm": 0.7754303660165679, - "learning_rate": 1.3546740259197998e-08, - "loss": 0.8004, - "num_input_tokens_seen": 170816310, - "step": 8017 - }, - { - "epoch": 0.9641074971442314, - "flos": 17163585129240.0, - "grad_norm": 5.322137326435269, - "learning_rate": 1.3456379718095989e-08, - "loss": 0.9117, - "num_input_tokens_seen": 170836445, - "step": 8018 - }, - { - "epoch": 0.9642277400348704, - "flos": 47793292788600.0, - "grad_norm": 0.8670218713349032, - "learning_rate": 1.3366320534081487e-08, - "loss": 0.8778, - "num_input_tokens_seen": 170898845, - "step": 8019 - }, - { - "epoch": 0.9643479829255095, - "flos": 22040326358640.0, - "grad_norm": 4.608419442134003, - "learning_rate": 1.3276562720816675e-08, - "loss": 0.9733, - "num_input_tokens_seen": 170920075, - "step": 8020 - }, - { - "epoch": 0.9644682258161487, - "flos": 14226904743360.0, - "grad_norm": 3.647325889610021, - "learning_rate": 1.3187106291917549e-08, - "loss": 1.0521, - "num_input_tokens_seen": 170936785, - "step": 8021 - }, - { - "epoch": 0.9645884687067877, - "flos": 15039270796680.0, - "grad_norm": 3.7391819362159984, - "learning_rate": 1.309795126095503e-08, - "loss": 0.9345, - "num_input_tokens_seen": 170954805, - "step": 8022 - }, - { - "epoch": 0.9647087115974268, - "flos": 13433677051560.0, - "grad_norm": 3.920319632519543, - "learning_rate": 1.3009097641453192e-08, - "loss": 1.033, - "num_input_tokens_seen": 170972375, - "step": 8023 - }, - { - "epoch": 0.9648289544880659, - "flos": 11709180697920.0, - "grad_norm": 4.08723782039757, - "learning_rate": 1.2920545446891474e-08, - "loss": 0.9854, - "num_input_tokens_seen": 170988815, - "step": 8024 - }, - { - "epoch": 0.964949197378705, - "flos": 17107739332320.0, - "grad_norm": 4.84830973951977, - "learning_rate": 1.2832294690703127e-08, - "loss": 0.9302, - "num_input_tokens_seen": 171007510, - "step": 8025 - }, - { - "epoch": 0.965069440269344, - "flos": 16610610031200.0, - "grad_norm": 4.696479030423017, - "learning_rate": 1.2744345386275668e-08, - "loss": 0.9943, - "num_input_tokens_seen": 171026770, - "step": 8026 - }, - { - "epoch": 0.9651896831599832, - "flos": 18055013328480.0, - "grad_norm": 3.465379056467688, - "learning_rate": 1.265669754695109e-08, - "loss": 0.9994, - "num_input_tokens_seen": 171046060, - "step": 8027 - }, - { - "epoch": 0.9653099260506223, - "flos": 15825568975920.0, - "grad_norm": 3.3490316443282397, - "learning_rate": 1.2569351186025201e-08, - "loss": 1.0483, - "num_input_tokens_seen": 171064235, - "step": 8028 - }, - { - "epoch": 0.9654301689412613, - "flos": 19051019639640.0, - "grad_norm": 7.950819719387749, - "learning_rate": 1.2482306316748737e-08, - "loss": 0.9814, - "num_input_tokens_seen": 171084400, - "step": 8029 - }, - { - "epoch": 0.9655504118319005, - "flos": 12331620873600.0, - "grad_norm": 6.125146980857632, - "learning_rate": 1.2395562952326021e-08, - "loss": 1.0043, - "num_input_tokens_seen": 171101280, - "step": 8030 - }, - { - "epoch": 0.9656706547225395, - "flos": 15720714909960.0, - "grad_norm": 5.435057519013515, - "learning_rate": 1.2309121105916309e-08, - "loss": 1.0315, - "num_input_tokens_seen": 171119290, - "step": 8031 - }, - { - "epoch": 0.9657908976131786, - "flos": 26445055652640.0, - "grad_norm": 2.972573959675309, - "learning_rate": 1.222298079063222e-08, - "loss": 0.916, - "num_input_tokens_seen": 171140150, - "step": 8032 - }, - { - "epoch": 0.9659111405038178, - "flos": 17347072079280.0, - "grad_norm": 22.177398531280318, - "learning_rate": 1.2137142019541524e-08, - "loss": 0.9502, - "num_input_tokens_seen": 171158425, - "step": 8033 - }, - { - "epoch": 0.9660313833944568, - "flos": 17792280263160.0, - "grad_norm": 5.710989266929366, - "learning_rate": 1.2051604805666027e-08, - "loss": 0.9617, - "num_input_tokens_seen": 171175270, - "step": 8034 - }, - { - "epoch": 0.9661516262850959, - "flos": 8300059114800.0, - "grad_norm": 3.852280493738924, - "learning_rate": 1.196636916198135e-08, - "loss": 1.0026, - "num_input_tokens_seen": 171192530, - "step": 8035 - }, - { - "epoch": 0.9662718691757349, - "flos": 14226506143080.0, - "grad_norm": 4.057246170800894, - "learning_rate": 1.1881435101418036e-08, - "loss": 1.0021, - "num_input_tokens_seen": 171211665, - "step": 8036 - }, - { - "epoch": 0.9663921120663741, - "flos": 48713855017920.0, - "grad_norm": 0.9673094652521057, - "learning_rate": 1.1796802636860003e-08, - "loss": 0.9323, - "num_input_tokens_seen": 171279915, - "step": 8037 - }, - { - "epoch": 0.9665123549570132, - "flos": 18736273472400.0, - "grad_norm": 6.107368159141778, - "learning_rate": 1.1712471781146316e-08, - "loss": 0.9453, - "num_input_tokens_seen": 171298970, - "step": 8038 - }, - { - "epoch": 0.9666325978476522, - "flos": 31397547579480.0, - "grad_norm": 6.55474016898163, - "learning_rate": 1.1628442547069628e-08, - "loss": 0.899, - "num_input_tokens_seen": 171320890, - "step": 8039 - }, - { - "epoch": 0.9667528407382914, - "flos": 15301359969240.0, - "grad_norm": 6.209121073813696, - "learning_rate": 1.1544714947377521e-08, - "loss": 0.9938, - "num_input_tokens_seen": 171338295, - "step": 8040 - }, - { - "epoch": 0.9668730836289304, - "flos": 16979607594240.0, - "grad_norm": 4.654842906558847, - "learning_rate": 1.1461288994770945e-08, - "loss": 0.9263, - "num_input_tokens_seen": 171357090, - "step": 8041 - }, - { - "epoch": 0.9669933265195695, - "flos": 20152125309240.0, - "grad_norm": 3.751553912869897, - "learning_rate": 1.1378164701906002e-08, - "loss": 0.9914, - "num_input_tokens_seen": 171378575, - "step": 8042 - }, - { - "epoch": 0.9671135694102087, - "flos": 15956214961920.0, - "grad_norm": 15.265714959432993, - "learning_rate": 1.1295342081392156e-08, - "loss": 0.8874, - "num_input_tokens_seen": 171397655, - "step": 8043 - }, - { - "epoch": 0.9672338123008477, - "flos": 14304065872560.0, - "grad_norm": 3.8107201846239627, - "learning_rate": 1.1212821145793804e-08, - "loss": 0.9149, - "num_input_tokens_seen": 171416990, - "step": 8044 - }, - { - "epoch": 0.9673540551914868, - "flos": 12019204984920.0, - "grad_norm": 7.564114989295778, - "learning_rate": 1.1130601907629156e-08, - "loss": 1.007, - "num_input_tokens_seen": 171434440, - "step": 8045 - }, - { - "epoch": 0.9674742980821259, - "flos": 44304189212760.0, - "grad_norm": 0.8250778204887818, - "learning_rate": 1.1048684379370899e-08, - "loss": 0.8962, - "num_input_tokens_seen": 171494845, - "step": 8046 - }, - { - "epoch": 0.967594540972765, - "flos": 13256322413520.0, - "grad_norm": 6.102417713752886, - "learning_rate": 1.0967068573445759e-08, - "loss": 0.9639, - "num_input_tokens_seen": 171512050, - "step": 8047 - }, - { - "epoch": 0.967714783863404, - "flos": 14750868457560.0, - "grad_norm": 6.166570851724314, - "learning_rate": 1.0885754502234945e-08, - "loss": 0.865, - "num_input_tokens_seen": 171531430, - "step": 8048 - }, - { - "epoch": 0.9678350267540432, - "flos": 16481497123200.0, - "grad_norm": 4.548923516000126, - "learning_rate": 1.08047421780737e-08, - "loss": 1.0076, - "num_input_tokens_seen": 171550340, - "step": 8049 - }, - { - "epoch": 0.9679552696446823, - "flos": 15432649848000.0, - "grad_norm": 8.148775965975034, - "learning_rate": 1.0724031613251305e-08, - "loss": 0.9582, - "num_input_tokens_seen": 171567960, - "step": 8050 - }, - { - "epoch": 0.9680755125353213, - "flos": 19128610030680.0, - "grad_norm": 3.9261344485083125, - "learning_rate": 1.0643622820011744e-08, - "loss": 0.8786, - "num_input_tokens_seen": 171588735, - "step": 8051 - }, - { - "epoch": 0.9681957554259605, - "flos": 20174513796120.0, - "grad_norm": 33.56035403986749, - "learning_rate": 1.0563515810552814e-08, - "loss": 0.9061, - "num_input_tokens_seen": 171605425, - "step": 8052 - }, - { - "epoch": 0.9683159983165995, - "flos": 14592836150400.0, - "grad_norm": 11.571791426202058, - "learning_rate": 1.0483710597026795e-08, - "loss": 0.9622, - "num_input_tokens_seen": 171625005, - "step": 8053 - }, - { - "epoch": 0.9684362412072386, - "flos": 17216456754840.0, - "grad_norm": 3.8880827537489475, - "learning_rate": 1.0404207191540227e-08, - "loss": 0.9583, - "num_input_tokens_seen": 171645180, - "step": 8054 - }, - { - "epoch": 0.9685564840978778, - "flos": 15878348616840.0, - "grad_norm": 4.368902917022105, - "learning_rate": 1.0325005606153236e-08, - "loss": 0.9682, - "num_input_tokens_seen": 171664360, - "step": 8055 - }, - { - "epoch": 0.9686767269885168, - "flos": 10156949163360.0, - "grad_norm": 5.569305200440124, - "learning_rate": 1.0246105852881104e-08, - "loss": 1.0075, - "num_input_tokens_seen": 171679180, - "step": 8056 - }, - { - "epoch": 0.9687969698791559, - "flos": 15117474418920.0, - "grad_norm": 3.337998193711786, - "learning_rate": 1.0167507943692476e-08, - "loss": 1.0041, - "num_input_tokens_seen": 171697985, - "step": 8057 - }, - { - "epoch": 0.968917212769795, - "flos": 14068719128400.0, - "grad_norm": 5.1816905019626445, - "learning_rate": 1.008921189051093e-08, - "loss": 0.9386, - "num_input_tokens_seen": 171715050, - "step": 8058 - }, - { - "epoch": 0.9690374556604341, - "flos": 15399744446040.0, - "grad_norm": 7.666574422309011, - "learning_rate": 1.0011217705213848e-08, - "loss": 0.9916, - "num_input_tokens_seen": 171732645, - "step": 8059 - }, - { - "epoch": 0.9691576985510731, - "flos": 23273028522600.0, - "grad_norm": 3.0222236503180078, - "learning_rate": 9.933525399632658e-09, - "loss": 0.981, - "num_input_tokens_seen": 171750600, - "step": 8060 - }, - { - "epoch": 0.9692779414417123, - "flos": 25448926695240.0, - "grad_norm": 14.734335129684922, - "learning_rate": 9.856134985553488e-09, - "loss": 0.8803, - "num_input_tokens_seen": 171770045, - "step": 8061 - }, - { - "epoch": 0.9693981843323514, - "flos": 20205395535120.0, - "grad_norm": 4.280334374744471, - "learning_rate": 9.77904647471628e-09, - "loss": 0.9539, - "num_input_tokens_seen": 171792945, - "step": 8062 - }, - { - "epoch": 0.9695184272229904, - "flos": 16795783367040.0, - "grad_norm": 3.2761059356999906, - "learning_rate": 9.702259878815454e-09, - "loss": 0.9737, - "num_input_tokens_seen": 171812990, - "step": 8063 - }, - { - "epoch": 0.9696386701136296, - "flos": 16559946037920.0, - "grad_norm": 5.390520443109645, - "learning_rate": 9.625775209499254e-09, - "loss": 0.9667, - "num_input_tokens_seen": 171832715, - "step": 8064 - }, - { - "epoch": 0.9697589130042686, - "flos": 10686309312120.0, - "grad_norm": 3.7791430695850967, - "learning_rate": 9.549592478370172e-09, - "loss": 0.9587, - "num_input_tokens_seen": 171850615, - "step": 8065 - }, - { - "epoch": 0.9698791558949077, - "flos": 13386815091720.0, - "grad_norm": 4.155710974136859, - "learning_rate": 9.473711696985632e-09, - "loss": 1.0235, - "num_input_tokens_seen": 171869665, - "step": 8066 - }, - { - "epoch": 0.9699993987855468, - "flos": 12705524286240.0, - "grad_norm": 3.5269700842016674, - "learning_rate": 9.398132876856201e-09, - "loss": 0.9747, - "num_input_tokens_seen": 171888350, - "step": 8067 - }, - { - "epoch": 0.9701196416761859, - "flos": 48105340738560.0, - "grad_norm": 0.7818769609531905, - "learning_rate": 9.322856029447379e-09, - "loss": 0.873, - "num_input_tokens_seen": 171949255, - "step": 8068 - }, - { - "epoch": 0.970239884566825, - "flos": 17266231562880.0, - "grad_norm": 5.47960035495736, - "learning_rate": 9.247881166178695e-09, - "loss": 1.026, - "num_input_tokens_seen": 171967685, - "step": 8069 - }, - { - "epoch": 0.970360127457464, - "flos": 18002386995360.0, - "grad_norm": 7.815702956709623, - "learning_rate": 9.173208298423274e-09, - "loss": 0.993, - "num_input_tokens_seen": 171988610, - "step": 8070 - }, - { - "epoch": 0.9704803703481032, - "flos": 21071982322680.0, - "grad_norm": 4.894529691240773, - "learning_rate": 9.09883743750961e-09, - "loss": 0.9875, - "num_input_tokens_seen": 172011220, - "step": 8071 - }, - { - "epoch": 0.9706006132387422, - "flos": 12308680478640.0, - "grad_norm": 3.3357769751062385, - "learning_rate": 9.024768594719124e-09, - "loss": 1.0558, - "num_input_tokens_seen": 172029320, - "step": 8072 - }, - { - "epoch": 0.9707208561293813, - "flos": 12888704620680.0, - "grad_norm": 4.505408533605748, - "learning_rate": 8.95100178128816e-09, - "loss": 0.9421, - "num_input_tokens_seen": 172048180, - "step": 8073 - }, - { - "epoch": 0.9708410990200205, - "flos": 22276102364640.0, - "grad_norm": 8.89977963635525, - "learning_rate": 8.877537008407321e-09, - "loss": 0.9362, - "num_input_tokens_seen": 172067950, - "step": 8074 - }, - { - "epoch": 0.9709613419106595, - "flos": 21777286677720.0, - "grad_norm": 3.2053703067993466, - "learning_rate": 8.804374287221028e-09, - "loss": 0.9058, - "num_input_tokens_seen": 172088905, - "step": 8075 - }, - { - "epoch": 0.9710815848012986, - "flos": 16872607219080.0, - "grad_norm": 5.586749080575093, - "learning_rate": 8.731513628827958e-09, - "loss": 1.0721, - "num_input_tokens_seen": 172107990, - "step": 8076 - }, - { - "epoch": 0.9712018276919377, - "flos": 16894750413480.0, - "grad_norm": 4.169559593454582, - "learning_rate": 8.658955044280825e-09, - "loss": 1.0529, - "num_input_tokens_seen": 172126635, - "step": 8077 - }, - { - "epoch": 0.9713220705825768, - "flos": 16586013912000.0, - "grad_norm": 2.466587809857288, - "learning_rate": 8.586698544587268e-09, - "loss": 0.9905, - "num_input_tokens_seen": 172147965, - "step": 8078 - }, - { - "epoch": 0.9714423134732159, - "flos": 15774015797400.0, - "grad_norm": 6.263873457155394, - "learning_rate": 8.514744140707853e-09, - "loss": 0.9728, - "num_input_tokens_seen": 172166825, - "step": 8079 - }, - { - "epoch": 0.971562556363855, - "flos": 14357366760000.0, - "grad_norm": 5.286739949459675, - "learning_rate": 8.443091843558515e-09, - "loss": 0.9859, - "num_input_tokens_seen": 172185630, - "step": 8080 - }, - { - "epoch": 0.9716827992544941, - "flos": 17763728802720.0, - "grad_norm": 4.674401254102025, - "learning_rate": 8.37174166400878e-09, - "loss": 0.8692, - "num_input_tokens_seen": 172200925, - "step": 8081 - }, - { - "epoch": 0.9718030421451331, - "flos": 17556657564960.0, - "grad_norm": 3.0920208038380315, - "learning_rate": 8.300693612881992e-09, - "loss": 1.0675, - "num_input_tokens_seen": 172220710, - "step": 8082 - }, - { - "epoch": 0.9719232850357723, - "flos": 15694677697440.0, - "grad_norm": 6.281348991562926, - "learning_rate": 8.22994770095664e-09, - "loss": 1.0246, - "num_input_tokens_seen": 172239005, - "step": 8083 - }, - { - "epoch": 0.9720435279264114, - "flos": 16819919562840.0, - "grad_norm": 3.98183644525307, - "learning_rate": 8.159503938964585e-09, - "loss": 0.979, - "num_input_tokens_seen": 172256045, - "step": 8084 - }, - { - "epoch": 0.9721637708170504, - "flos": 20204506349880.0, - "grad_norm": 5.423615541780588, - "learning_rate": 8.089362337592164e-09, - "loss": 0.9374, - "num_input_tokens_seen": 172279390, - "step": 8085 - }, - { - "epoch": 0.9722840137076896, - "flos": 20754047353200.0, - "grad_norm": 3.3949395780285037, - "learning_rate": 8.019522907479536e-09, - "loss": 0.9467, - "num_input_tokens_seen": 172299470, - "step": 8086 - }, - { - "epoch": 0.9724042565983286, - "flos": 13648658971800.0, - "grad_norm": 4.788959744768348, - "learning_rate": 7.949985659221558e-09, - "loss": 0.9846, - "num_input_tokens_seen": 172316455, - "step": 8087 - }, - { - "epoch": 0.9725244994889677, - "flos": 16376735041920.0, - "grad_norm": 4.460215922718373, - "learning_rate": 7.880750603366904e-09, - "loss": 1.0095, - "num_input_tokens_seen": 172335045, - "step": 8088 - }, - { - "epoch": 0.9726447423796069, - "flos": 16612235093880.0, - "grad_norm": 4.0758953210867706, - "learning_rate": 7.811817750418282e-09, - "loss": 1.0119, - "num_input_tokens_seen": 172353525, - "step": 8089 - }, - { - "epoch": 0.9727649852702459, - "flos": 18893447255880.0, - "grad_norm": 15.231518161621727, - "learning_rate": 7.743187110833105e-09, - "loss": 1.0245, - "num_input_tokens_seen": 172376005, - "step": 8090 - }, - { - "epoch": 0.972885228160885, - "flos": 14567596138440.0, - "grad_norm": 2.7716190878002855, - "learning_rate": 7.674858695022602e-09, - "loss": 1.0347, - "num_input_tokens_seen": 172394080, - "step": 8091 - }, - { - "epoch": 0.9730054710515241, - "flos": 12442852544040.0, - "grad_norm": 4.702462691504339, - "learning_rate": 7.606832513351591e-09, - "loss": 0.9805, - "num_input_tokens_seen": 172411750, - "step": 8092 - }, - { - "epoch": 0.9731257139421632, - "flos": 49391926359600.0, - "grad_norm": 0.8233123061613467, - "learning_rate": 7.539108576140264e-09, - "loss": 0.9044, - "num_input_tokens_seen": 172475580, - "step": 8093 - }, - { - "epoch": 0.9732459568328022, - "flos": 13098933999120.0, - "grad_norm": 4.038678562355427, - "learning_rate": 7.471686893661732e-09, - "loss": 0.9176, - "num_input_tokens_seen": 172493595, - "step": 8094 - }, - { - "epoch": 0.9733661997234414, - "flos": 14826987093720.0, - "grad_norm": 3.316255563788091, - "learning_rate": 7.4045674761442636e-09, - "loss": 0.8696, - "num_input_tokens_seen": 172510645, - "step": 8095 - }, - { - "epoch": 0.9734864426140805, - "flos": 16898951047200.0, - "grad_norm": 3.893173424907533, - "learning_rate": 7.337750333769488e-09, - "loss": 0.9604, - "num_input_tokens_seen": 172530170, - "step": 8096 - }, - { - "epoch": 0.9736066855047195, - "flos": 25002737341440.0, - "grad_norm": 4.66134262651729, - "learning_rate": 7.2712354766737425e-09, - "loss": 0.9565, - "num_input_tokens_seen": 172550220, - "step": 8097 - }, - { - "epoch": 0.9737269283953586, - "flos": 14487000914520.0, - "grad_norm": 2.498050215467616, - "learning_rate": 7.2050229149469565e-09, - "loss": 1.0341, - "num_input_tokens_seen": 172569950, - "step": 8098 - }, - { - "epoch": 0.9738471712859977, - "flos": 20596536292560.0, - "grad_norm": 4.436403595777104, - "learning_rate": 7.139112658633984e-09, - "loss": 0.8537, - "num_input_tokens_seen": 172589820, - "step": 8099 - }, - { - "epoch": 0.9739674141766368, - "flos": 19785120747600.0, - "grad_norm": 67.4813523142227, - "learning_rate": 7.073504717733048e-09, - "loss": 0.9199, - "num_input_tokens_seen": 172609105, - "step": 8100 - }, - { - "epoch": 0.9740876570672758, - "flos": 49313692075800.0, - "grad_norm": 0.7317194304054774, - "learning_rate": 7.008199102196855e-09, - "loss": 0.8198, - "num_input_tokens_seen": 172670250, - "step": 8101 - }, - { - "epoch": 0.974207899957915, - "flos": 41675448127800.0, - "grad_norm": 0.8216446731466208, - "learning_rate": 6.9431958219321464e-09, - "loss": 0.8484, - "num_input_tokens_seen": 172726135, - "step": 8102 - }, - { - "epoch": 0.9743281428485541, - "flos": 16060547781360.0, - "grad_norm": 3.674871901790302, - "learning_rate": 6.878494886800146e-09, - "loss": 1.011, - "num_input_tokens_seen": 172746630, - "step": 8103 - }, - { - "epoch": 0.9744483857391931, - "flos": 14198169313560.0, - "grad_norm": 5.777174054091631, - "learning_rate": 6.814096306615669e-09, - "loss": 0.9816, - "num_input_tokens_seen": 172764490, - "step": 8104 - }, - { - "epoch": 0.9745686286298323, - "flos": 12521301458760.0, - "grad_norm": 4.522473695424404, - "learning_rate": 6.750000091148011e-09, - "loss": 0.8682, - "num_input_tokens_seen": 172781505, - "step": 8105 - }, - { - "epoch": 0.9746888715204713, - "flos": 20990957836920.0, - "grad_norm": 6.64885375964449, - "learning_rate": 6.686206250120729e-09, - "loss": 0.9503, - "num_input_tokens_seen": 172802720, - "step": 8106 - }, - { - "epoch": 0.9748091144111104, - "flos": 13099332599400.0, - "grad_norm": 3.1540998934720643, - "learning_rate": 6.622714793210749e-09, - "loss": 0.9661, - "num_input_tokens_seen": 172821360, - "step": 8107 - }, - { - "epoch": 0.9749293573017496, - "flos": 14669813310240.0, - "grad_norm": 5.349134264624645, - "learning_rate": 6.559525730050364e-09, - "loss": 1.0102, - "num_input_tokens_seen": 172841180, - "step": 8108 - }, - { - "epoch": 0.9750496001923886, - "flos": 13098719368200.0, - "grad_norm": 4.330901099129872, - "learning_rate": 6.496639070224574e-09, - "loss": 0.9865, - "num_input_tokens_seen": 172859385, - "step": 8109 - }, - { - "epoch": 0.9751698430830277, - "flos": 13885201516800.0, - "grad_norm": 21.801961632060987, - "learning_rate": 6.4340548232739714e-09, - "loss": 1.0668, - "num_input_tokens_seen": 172875305, - "step": 8110 - }, - { - "epoch": 0.9752900859736668, - "flos": 16743708942000.0, - "grad_norm": 3.337238567271272, - "learning_rate": 6.371772998692071e-09, - "loss": 1.0146, - "num_input_tokens_seen": 172894280, - "step": 8111 - }, - { - "epoch": 0.9754103288643059, - "flos": 14410085077800.0, - "grad_norm": 6.4475247742146236, - "learning_rate": 6.309793605927094e-09, - "loss": 0.8705, - "num_input_tokens_seen": 172912320, - "step": 8112 - }, - { - "epoch": 0.975530571754945, - "flos": 13724992238880.0, - "grad_norm": 3.3986904707841123, - "learning_rate": 6.248116654381297e-09, - "loss": 1.0157, - "num_input_tokens_seen": 172930510, - "step": 8113 - }, - { - "epoch": 0.9756508146455841, - "flos": 16767017275680.0, - "grad_norm": 3.6379185386626394, - "learning_rate": 6.186742153410751e-09, - "loss": 0.9459, - "num_input_tokens_seen": 172949725, - "step": 8114 - }, - { - "epoch": 0.9757710575362232, - "flos": 16324476647520.0, - "grad_norm": 6.5115907469985, - "learning_rate": 6.125670112326453e-09, - "loss": 1.0888, - "num_input_tokens_seen": 172968705, - "step": 8115 - }, - { - "epoch": 0.9758913004268622, - "flos": 19917299811600.0, - "grad_norm": 3.882584194786, - "learning_rate": 6.064900540392548e-09, - "loss": 0.9262, - "num_input_tokens_seen": 172990520, - "step": 8116 - }, - { - "epoch": 0.9760115433175014, - "flos": 15773341243080.0, - "grad_norm": 3.6740574620653534, - "learning_rate": 6.0044334468278835e-09, - "loss": 1.0117, - "num_input_tokens_seen": 173009585, - "step": 8117 - }, - { - "epoch": 0.9761317862081405, - "flos": 18684505662960.0, - "grad_norm": 4.917317729882442, - "learning_rate": 5.944268840805345e-09, - "loss": 0.9491, - "num_input_tokens_seen": 173030050, - "step": 8118 - }, - { - "epoch": 0.9762520290987795, - "flos": 18917368820760.0, - "grad_norm": 4.237356339891738, - "learning_rate": 5.88440673145163e-09, - "loss": 0.8646, - "num_input_tokens_seen": 173050820, - "step": 8119 - }, - { - "epoch": 0.9763722719894187, - "flos": 12758487896520.0, - "grad_norm": 4.580479143558053, - "learning_rate": 5.824847127848142e-09, - "loss": 1.0492, - "num_input_tokens_seen": 173069065, - "step": 8120 - }, - { - "epoch": 0.9764925148800577, - "flos": 15956981500920.0, - "grad_norm": 9.654266647547763, - "learning_rate": 5.765590039029433e-09, - "loss": 1.0038, - "num_input_tokens_seen": 173088105, - "step": 8121 - }, - { - "epoch": 0.9766127577706968, - "flos": 26235010243560.0, - "grad_norm": 4.791527367377661, - "learning_rate": 5.706635473985422e-09, - "loss": 0.9443, - "num_input_tokens_seen": 173111695, - "step": 8122 - }, - { - "epoch": 0.976733000661336, - "flos": 15852311404320.0, - "grad_norm": 3.6254949560649834, - "learning_rate": 5.6479834416591764e-09, - "loss": 1.0792, - "num_input_tokens_seen": 173130775, - "step": 8123 - }, - { - "epoch": 0.976853243551975, - "flos": 18368165094600.0, - "grad_norm": 4.159465951018311, - "learning_rate": 5.589633950947803e-09, - "loss": 0.9083, - "num_input_tokens_seen": 173147995, - "step": 8124 - }, - { - "epoch": 0.9769734864426141, - "flos": 15196444580160.0, - "grad_norm": 4.945042703750792, - "learning_rate": 5.5315870107035535e-09, - "loss": 0.9183, - "num_input_tokens_seen": 173165765, - "step": 8125 - }, - { - "epoch": 0.9770937293332532, - "flos": 9873575320080.0, - "grad_norm": 6.88684670453309, - "learning_rate": 5.473842629731607e-09, - "loss": 1.0065, - "num_input_tokens_seen": 173183985, - "step": 8126 - }, - { - "epoch": 0.9772139722238923, - "flos": 12705984209640.0, - "grad_norm": 4.967313546846088, - "learning_rate": 5.416400816792066e-09, - "loss": 1.0029, - "num_input_tokens_seen": 173201220, - "step": 8127 - }, - { - "epoch": 0.9773342151145313, - "flos": 14512424895840.0, - "grad_norm": 5.146889252634239, - "learning_rate": 5.359261580598407e-09, - "loss": 0.9866, - "num_input_tokens_seen": 173216780, - "step": 8128 - }, - { - "epoch": 0.9774544580051704, - "flos": 8325636403920.0, - "grad_norm": 4.915011994976879, - "learning_rate": 5.302424929819027e-09, - "loss": 0.9952, - "num_input_tokens_seen": 173230510, - "step": 8129 - }, - { - "epoch": 0.9775747008958096, - "flos": 9506264142840.0, - "grad_norm": 4.262945766017192, - "learning_rate": 5.24589087307592e-09, - "loss": 0.9507, - "num_input_tokens_seen": 173247850, - "step": 8130 - }, - { - "epoch": 0.9776949437864486, - "flos": 42604233203280.0, - "grad_norm": 4.605272025193017, - "learning_rate": 5.189659418944891e-09, - "loss": 0.8783, - "num_input_tokens_seen": 173277745, - "step": 8131 - }, - { - "epoch": 0.9778151866770877, - "flos": 15039178812000.0, - "grad_norm": 3.8767233677197317, - "learning_rate": 5.133730575956674e-09, - "loss": 0.9988, - "num_input_tokens_seen": 173297135, - "step": 8132 - }, - { - "epoch": 0.9779354295677268, - "flos": 14829470680080.0, - "grad_norm": 3.7353913907190384, - "learning_rate": 5.0781043525953696e-09, - "loss": 0.9409, - "num_input_tokens_seen": 173314920, - "step": 8133 - }, - { - "epoch": 0.9780556724583659, - "flos": 16664830765440.0, - "grad_norm": 2.9031793923923512, - "learning_rate": 5.0227807572995605e-09, - "loss": 0.9551, - "num_input_tokens_seen": 173336615, - "step": 8134 - }, - { - "epoch": 0.9781759153490049, - "flos": 14855661200400.0, - "grad_norm": 4.156676765272594, - "learning_rate": 4.967759798461646e-09, - "loss": 0.9004, - "num_input_tokens_seen": 173354680, - "step": 8135 - }, - { - "epoch": 0.9782961582396441, - "flos": 20152278617040.0, - "grad_norm": 17.007604356787915, - "learning_rate": 4.913041484428282e-09, - "loss": 0.9704, - "num_input_tokens_seen": 173374875, - "step": 8136 - }, - { - "epoch": 0.9784164011302832, - "flos": 18182409189120.0, - "grad_norm": 7.252305552135349, - "learning_rate": 4.858625823500384e-09, - "loss": 0.9804, - "num_input_tokens_seen": 173392295, - "step": 8137 - }, - { - "epoch": 0.9785366440209222, - "flos": 21353700441720.0, - "grad_norm": 4.159814897600589, - "learning_rate": 4.80451282393246e-09, - "loss": 0.9687, - "num_input_tokens_seen": 173412000, - "step": 8138 - }, - { - "epoch": 0.9786568869115614, - "flos": 23063289729120.0, - "grad_norm": 3.2891525292477297, - "learning_rate": 4.750702493933722e-09, - "loss": 0.9026, - "num_input_tokens_seen": 173431605, - "step": 8139 - }, - { - "epoch": 0.9787771298022004, - "flos": 16586381850720.0, - "grad_norm": 4.096247719143328, - "learning_rate": 4.697194841666974e-09, - "loss": 1.0749, - "num_input_tokens_seen": 173450250, - "step": 8140 - }, - { - "epoch": 0.9788973726928395, - "flos": 15247292542800.0, - "grad_norm": 3.8641272564500935, - "learning_rate": 4.6439898752492764e-09, - "loss": 1.0396, - "num_input_tokens_seen": 173470110, - "step": 8141 - }, - { - "epoch": 0.9790176155834787, - "flos": 49338134887200.0, - "grad_norm": 0.7461577383033648, - "learning_rate": 4.591087602751731e-09, - "loss": 0.8671, - "num_input_tokens_seen": 173531690, - "step": 8142 - }, - { - "epoch": 0.9791378584741177, - "flos": 15220059529440.0, - "grad_norm": 3.1302680815433526, - "learning_rate": 4.538488032199916e-09, - "loss": 0.9446, - "num_input_tokens_seen": 173549510, - "step": 8143 - }, - { - "epoch": 0.9792581013647568, - "flos": 14301582286200.0, - "grad_norm": 5.494628016730188, - "learning_rate": 4.486191171572784e-09, - "loss": 0.9025, - "num_input_tokens_seen": 173566500, - "step": 8144 - }, - { - "epoch": 0.9793783442553959, - "flos": 16870920833280.0, - "grad_norm": 4.428107193377328, - "learning_rate": 4.434197028803766e-09, - "loss": 1.0076, - "num_input_tokens_seen": 173585445, - "step": 8145 - }, - { - "epoch": 0.979498587146035, - "flos": 16375937841360.0, - "grad_norm": 6.1073778405232195, - "learning_rate": 4.3825056117805514e-09, - "loss": 1.0331, - "num_input_tokens_seen": 173601050, - "step": 8146 - }, - { - "epoch": 0.979618830036674, - "flos": 10109136695160.0, - "grad_norm": 9.132556033877595, - "learning_rate": 4.331116928344425e-09, - "loss": 1.0182, - "num_input_tokens_seen": 173617085, - "step": 8147 - }, - { - "epoch": 0.9797390729273132, - "flos": 11840286607320.0, - "grad_norm": 4.766806934311377, - "learning_rate": 4.28003098629115e-09, - "loss": 0.852, - "num_input_tokens_seen": 173632940, - "step": 8148 - }, - { - "epoch": 0.9798593158179523, - "flos": 17449319912640.0, - "grad_norm": 4.052254066041154, - "learning_rate": 4.229247793370305e-09, - "loss": 1.0147, - "num_input_tokens_seen": 173651785, - "step": 8149 - }, - { - "epoch": 0.9799795587085913, - "flos": 19444919937480.0, - "grad_norm": 2.8455521459069564, - "learning_rate": 4.178767357285951e-09, - "loss": 0.9247, - "num_input_tokens_seen": 173673135, - "step": 8150 - }, - { - "epoch": 0.9800998015992305, - "flos": 18710174936760.0, - "grad_norm": 3.807912724568688, - "learning_rate": 4.128589685695516e-09, - "loss": 0.9187, - "num_input_tokens_seen": 173693280, - "step": 8151 - }, - { - "epoch": 0.9802200444898695, - "flos": 11836975158840.0, - "grad_norm": 11.102935856400668, - "learning_rate": 4.078714786211135e-09, - "loss": 1.0764, - "num_input_tokens_seen": 173708850, - "step": 8152 - }, - { - "epoch": 0.9803402873805086, - "flos": 17714045979360.0, - "grad_norm": 3.438722982463413, - "learning_rate": 4.029142666398977e-09, - "loss": 0.9918, - "num_input_tokens_seen": 173728735, - "step": 8153 - }, - { - "epoch": 0.9804605302711478, - "flos": 16035491738760.0, - "grad_norm": 3.6376208570400452, - "learning_rate": 3.979873333778805e-09, - "loss": 1.025, - "num_input_tokens_seen": 173746630, - "step": 8154 - }, - { - "epoch": 0.9805807731617868, - "flos": 27778349925720.0, - "grad_norm": 17.957180378568275, - "learning_rate": 3.930906795824862e-09, - "loss": 0.9586, - "num_input_tokens_seen": 173767025, - "step": 8155 - }, - { - "epoch": 0.9807010160524259, - "flos": 12626860740600.0, - "grad_norm": 23.314366651976897, - "learning_rate": 3.882243059965207e-09, - "loss": 0.9986, - "num_input_tokens_seen": 173784460, - "step": 8156 - }, - { - "epoch": 0.980821258943065, - "flos": 9558767829720.0, - "grad_norm": 4.506103826472076, - "learning_rate": 3.833882133582156e-09, - "loss": 0.8851, - "num_input_tokens_seen": 173799840, - "step": 8157 - }, - { - "epoch": 0.9809415018337041, - "flos": 15406612635480.0, - "grad_norm": 3.5046195470486756, - "learning_rate": 3.785824024012285e-09, - "loss": 0.9995, - "num_input_tokens_seen": 173818560, - "step": 8158 - }, - { - "epoch": 0.9810617447243432, - "flos": 16560099345720.0, - "grad_norm": 3.151711099097259, - "learning_rate": 3.738068738545541e-09, - "loss": 1.0082, - "num_input_tokens_seen": 173837365, - "step": 8159 - }, - { - "epoch": 0.9811819876149822, - "flos": 12993313394160.0, - "grad_norm": 13.010296192125525, - "learning_rate": 3.6906162844265733e-09, - "loss": 1.0024, - "num_input_tokens_seen": 173854170, - "step": 8160 - }, - { - "epoch": 0.9813022305056214, - "flos": 16062172844040.0, - "grad_norm": 3.822736661382182, - "learning_rate": 3.643466668853845e-09, - "loss": 0.9426, - "num_input_tokens_seen": 173871915, - "step": 8161 - }, - { - "epoch": 0.9814224733962604, - "flos": 18082920896160.0, - "grad_norm": 2.824194015035831, - "learning_rate": 3.59661989898008e-09, - "loss": 0.9679, - "num_input_tokens_seen": 173892690, - "step": 8162 - }, - { - "epoch": 0.9815427162868995, - "flos": 17790379246440.0, - "grad_norm": 13.385798293238459, - "learning_rate": 3.5500759819115934e-09, - "loss": 0.9944, - "num_input_tokens_seen": 173912775, - "step": 8163 - }, - { - "epoch": 0.9816629591775387, - "flos": 14667636339480.0, - "grad_norm": 3.8461135172563576, - "learning_rate": 3.5038349247094034e-09, - "loss": 1.044, - "num_input_tokens_seen": 173929755, - "step": 8164 - }, - { - "epoch": 0.9817832020681777, - "flos": 12547921240920.0, - "grad_norm": 3.6164593550746202, - "learning_rate": 3.4578967343878994e-09, - "loss": 0.9985, - "num_input_tokens_seen": 173945680, - "step": 8165 - }, - { - "epoch": 0.9819034449588168, - "flos": 16010619665520.0, - "grad_norm": 3.9867960437037406, - "learning_rate": 3.4122614179161733e-09, - "loss": 1.0337, - "num_input_tokens_seen": 173965360, - "step": 8166 - }, - { - "epoch": 0.9820236878494559, - "flos": 14200223638080.0, - "grad_norm": 3.1875964879410446, - "learning_rate": 3.36692898221691e-09, - "loss": 0.9922, - "num_input_tokens_seen": 173983445, - "step": 8167 - }, - { - "epoch": 0.982143930740095, - "flos": 13335016620720.0, - "grad_norm": 5.662063898467769, - "learning_rate": 3.3218994341668305e-09, - "loss": 0.955, - "num_input_tokens_seen": 174002095, - "step": 8168 - }, - { - "epoch": 0.982264173630734, - "flos": 18919576453080.0, - "grad_norm": 3.881591482082685, - "learning_rate": 3.2771727805971373e-09, - "loss": 0.9779, - "num_input_tokens_seen": 174023200, - "step": 8169 - }, - { - "epoch": 0.9823844165213732, - "flos": 15639445131720.0, - "grad_norm": 4.240225828382681, - "learning_rate": 3.232749028292847e-09, - "loss": 0.9945, - "num_input_tokens_seen": 174039885, - "step": 8170 - }, - { - "epoch": 0.9825046594120123, - "flos": 15563479803360.0, - "grad_norm": 3.8451226737465305, - "learning_rate": 3.188628183992792e-09, - "loss": 1.1056, - "num_input_tokens_seen": 174059870, - "step": 8171 - }, - { - "epoch": 0.9826249023026513, - "flos": 42581900491440.0, - "grad_norm": 0.8675882942603335, - "learning_rate": 3.1448102543902844e-09, - "loss": 0.886, - "num_input_tokens_seen": 174123505, - "step": 8172 - }, - { - "epoch": 0.9827451451932905, - "flos": 11368519964400.0, - "grad_norm": 3.8860354642403223, - "learning_rate": 3.1012952461324515e-09, - "loss": 0.8914, - "num_input_tokens_seen": 174142200, - "step": 8173 - }, - { - "epoch": 0.9828653880839295, - "flos": 14380491124320.0, - "grad_norm": 3.506486875858983, - "learning_rate": 3.0580831658204575e-09, - "loss": 0.9682, - "num_input_tokens_seen": 174159500, - "step": 8174 - }, - { - "epoch": 0.9829856309745686, - "flos": 15353587702080.0, - "grad_norm": 3.7289715223915465, - "learning_rate": 3.015174020009281e-09, - "loss": 1.0109, - "num_input_tokens_seen": 174178545, - "step": 8175 - }, - { - "epoch": 0.9831058738652078, - "flos": 16896743414880.0, - "grad_norm": 3.5014457441454576, - "learning_rate": 2.9725678152086043e-09, - "loss": 0.9769, - "num_input_tokens_seen": 174196835, - "step": 8176 - }, - { - "epoch": 0.9832261167558468, - "flos": 7953756654240.0, - "grad_norm": 22.073215378784376, - "learning_rate": 2.930264557881257e-09, - "loss": 1.0396, - "num_input_tokens_seen": 174211740, - "step": 8177 - }, - { - "epoch": 0.9833463596464859, - "flos": 42944643096240.0, - "grad_norm": 0.813699936623235, - "learning_rate": 2.8882642544452163e-09, - "loss": 0.8575, - "num_input_tokens_seen": 174276185, - "step": 8178 - }, - { - "epoch": 0.983466602537125, - "flos": 9611854086240.0, - "grad_norm": 22.386228333378103, - "learning_rate": 2.8465669112716083e-09, - "loss": 0.9711, - "num_input_tokens_seen": 174293430, - "step": 8179 - }, - { - "epoch": 0.9835868454277641, - "flos": 16295189309640.0, - "grad_norm": 4.286786585584437, - "learning_rate": 2.8051725346858177e-09, - "loss": 0.9694, - "num_input_tokens_seen": 174313410, - "step": 8180 - }, - { - "epoch": 0.9837070883184031, - "flos": 19706917125360.0, - "grad_norm": 4.503478930698805, - "learning_rate": 2.7640811309674883e-09, - "loss": 0.9215, - "num_input_tokens_seen": 174332630, - "step": 8181 - }, - { - "epoch": 0.9838273312090423, - "flos": 20834151992160.0, - "grad_norm": 3.1813525589934635, - "learning_rate": 2.7232927063498557e-09, - "loss": 1.0361, - "num_input_tokens_seen": 174352725, - "step": 8182 - }, - { - "epoch": 0.9839475740996814, - "flos": 28644354143640.0, - "grad_norm": 5.184567093466462, - "learning_rate": 2.682807267020859e-09, - "loss": 0.9075, - "num_input_tokens_seen": 174375205, - "step": 8183 - }, - { - "epoch": 0.9840678169903204, - "flos": 17188457202480.0, - "grad_norm": 2.6590621423536143, - "learning_rate": 2.642624819121808e-09, - "loss": 0.8524, - "num_input_tokens_seen": 174395075, - "step": 8184 - }, - { - "epoch": 0.9841880598809596, - "flos": 10372023068280.0, - "grad_norm": 3.240735861961285, - "learning_rate": 2.6027453687487154e-09, - "loss": 0.839, - "num_input_tokens_seen": 174411885, - "step": 8185 - }, - { - "epoch": 0.9843083027715986, - "flos": 15877091492880.0, - "grad_norm": 11.645788494062456, - "learning_rate": 2.5631689219509643e-09, - "loss": 0.7305, - "num_input_tokens_seen": 174430285, - "step": 8186 - }, - { - "epoch": 0.9844285456622377, - "flos": 15486012058560.0, - "grad_norm": 5.8059819910057255, - "learning_rate": 2.523895484732197e-09, - "loss": 1.0644, - "num_input_tokens_seen": 174449460, - "step": 8187 - }, - { - "epoch": 0.9845487885528769, - "flos": 12779465951640.0, - "grad_norm": 8.365738487685109, - "learning_rate": 2.4849250630505357e-09, - "loss": 0.9773, - "num_input_tokens_seen": 174467425, - "step": 8188 - }, - { - "epoch": 0.9846690314435159, - "flos": 17949852646920.0, - "grad_norm": 3.0744307758741436, - "learning_rate": 2.4462576628172528e-09, - "loss": 0.9546, - "num_input_tokens_seen": 174485775, - "step": 8189 - }, - { - "epoch": 0.984789274334155, - "flos": 13151682978480.0, - "grad_norm": 4.032448050914629, - "learning_rate": 2.407893289898766e-09, - "loss": 0.9691, - "num_input_tokens_seen": 174504525, - "step": 8190 - }, - { - "epoch": 0.984909517224794, - "flos": 19471263765600.0, - "grad_norm": 3.564950735813792, - "learning_rate": 2.3698319501144202e-09, - "loss": 1.0647, - "num_input_tokens_seen": 174525230, - "step": 8191 - }, - { - "epoch": 0.9850297601154332, - "flos": 13282328964480.0, - "grad_norm": 4.559493445988139, - "learning_rate": 2.3320736492382644e-09, - "loss": 0.9519, - "num_input_tokens_seen": 174543785, - "step": 8192 - }, - { - "epoch": 0.9851500030060723, - "flos": 15850839649440.0, - "grad_norm": 12.01956108320152, - "learning_rate": 2.29461839299816e-09, - "loss": 0.9121, - "num_input_tokens_seen": 174563220, - "step": 8193 - }, - { - "epoch": 0.9852702458967113, - "flos": 18757619466240.0, - "grad_norm": 5.43963633696165, - "learning_rate": 2.257466187076229e-09, - "loss": 1.0272, - "num_input_tokens_seen": 174582145, - "step": 8194 - }, - { - "epoch": 0.9853904887873505, - "flos": 14826741801240.0, - "grad_norm": 3.3340817723855563, - "learning_rate": 2.2206170371081854e-09, - "loss": 0.9222, - "num_input_tokens_seen": 174600450, - "step": 8195 - }, - { - "epoch": 0.9855107316779895, - "flos": 17974663397040.0, - "grad_norm": 5.702127615055534, - "learning_rate": 2.1840709486842247e-09, - "loss": 1.0745, - "num_input_tokens_seen": 174619790, - "step": 8196 - }, - { - "epoch": 0.9856309745686286, - "flos": 13518043647360.0, - "grad_norm": 4.498183837607581, - "learning_rate": 2.1478279273481335e-09, - "loss": 1.0074, - "num_input_tokens_seen": 174637995, - "step": 8197 - }, - { - "epoch": 0.9857512174592677, - "flos": 24347606394720.0, - "grad_norm": 3.617898628084162, - "learning_rate": 2.1118879785981815e-09, - "loss": 1.0138, - "num_input_tokens_seen": 174657855, - "step": 8198 - }, - { - "epoch": 0.9858714603499068, - "flos": 18500405481720.0, - "grad_norm": 3.4698268834005312, - "learning_rate": 2.0762511078862288e-09, - "loss": 1.0216, - "num_input_tokens_seen": 174677920, - "step": 8199 - }, - { - "epoch": 0.9859917032405459, - "flos": 16848563007960.0, - "grad_norm": 5.536219546556637, - "learning_rate": 2.0409173206186183e-09, - "loss": 0.8833, - "num_input_tokens_seen": 174696880, - "step": 8200 - }, - { - "epoch": 0.986111946131185, - "flos": 14147811935880.0, - "grad_norm": 3.3409917275064602, - "learning_rate": 2.0058866221550617e-09, - "loss": 1.0921, - "num_input_tokens_seen": 174714840, - "step": 8201 - }, - { - "epoch": 0.9862321890218241, - "flos": 14069148390240.0, - "grad_norm": 3.6389264904794096, - "learning_rate": 1.971159017809976e-09, - "loss": 0.9763, - "num_input_tokens_seen": 174732850, - "step": 8202 - }, - { - "epoch": 0.9863524319124631, - "flos": 15379318299000.0, - "grad_norm": 4.08147845015888, - "learning_rate": 1.93673451285159e-09, - "loss": 0.996, - "num_input_tokens_seen": 174751620, - "step": 8203 - }, - { - "epoch": 0.9864726748031023, - "flos": 37747513972560.0, - "grad_norm": 0.7450608298410416, - "learning_rate": 1.9026131125019495e-09, - "loss": 0.8269, - "num_input_tokens_seen": 174808710, - "step": 8204 - }, - { - "epoch": 0.9865929176937414, - "flos": 16609812830640.0, - "grad_norm": 2.7358456389791157, - "learning_rate": 1.8687948219371363e-09, - "loss": 1.0876, - "num_input_tokens_seen": 174827655, - "step": 8205 - }, - { - "epoch": 0.9867131605843804, - "flos": 15354047625480.0, - "grad_norm": 3.576132940291877, - "learning_rate": 1.835279646287491e-09, - "loss": 1.1093, - "num_input_tokens_seen": 174845385, - "step": 8206 - }, - { - "epoch": 0.9868334034750196, - "flos": 15824311851960.0, - "grad_norm": 2.717674919752527, - "learning_rate": 1.8020675906371685e-09, - "loss": 0.9999, - "num_input_tokens_seen": 174864500, - "step": 8207 - }, - { - "epoch": 0.9869536463656586, - "flos": 18366999955320.0, - "grad_norm": 3.3910015969392195, - "learning_rate": 1.7691586600243612e-09, - "loss": 0.9717, - "num_input_tokens_seen": 174883120, - "step": 8208 - }, - { - "epoch": 0.9870738892562977, - "flos": 11603744062320.0, - "grad_norm": 6.98985945662634, - "learning_rate": 1.7365528594415202e-09, - "loss": 1.0891, - "num_input_tokens_seen": 174896910, - "step": 8209 - }, - { - "epoch": 0.9871941321469369, - "flos": 25318342032360.0, - "grad_norm": 7.6053215369195035, - "learning_rate": 1.7042501938346888e-09, - "loss": 0.9029, - "num_input_tokens_seen": 174919360, - "step": 8210 - }, - { - "epoch": 0.9873143750375759, - "flos": 15222911054520.0, - "grad_norm": 16.711763836803982, - "learning_rate": 1.6722506681043913e-09, - "loss": 0.997, - "num_input_tokens_seen": 174938040, - "step": 8211 - }, - { - "epoch": 0.987434617928215, - "flos": 11552129560680.0, - "grad_norm": 4.817136636441353, - "learning_rate": 1.640554287104745e-09, - "loss": 0.9078, - "num_input_tokens_seen": 174956035, - "step": 8212 - }, - { - "epoch": 0.9875548608188541, - "flos": 12647532180120.0, - "grad_norm": 3.174283170504076, - "learning_rate": 1.609161055644348e-09, - "loss": 1.0038, - "num_input_tokens_seen": 174971680, - "step": 8213 - }, - { - "epoch": 0.9876751037094932, - "flos": 18598667312280.0, - "grad_norm": 4.2433037457229945, - "learning_rate": 1.5780709784849467e-09, - "loss": 0.8998, - "num_input_tokens_seen": 174988420, - "step": 8214 - }, - { - "epoch": 0.9877953466001322, - "flos": 11310343888920.0, - "grad_norm": 5.222532991311487, - "learning_rate": 1.5472840603436565e-09, - "loss": 1.044, - "num_input_tokens_seen": 175005370, - "step": 8215 - }, - { - "epoch": 0.9879155894907714, - "flos": 13334556697320.0, - "grad_norm": 18.806435250624652, - "learning_rate": 1.5168003058900757e-09, - "loss": 1.0204, - "num_input_tokens_seen": 175023090, - "step": 8216 - }, - { - "epoch": 0.9880358323814105, - "flos": 15903987229080.0, - "grad_norm": 5.162099952992253, - "learning_rate": 1.4866197197491715e-09, - "loss": 1.1455, - "num_input_tokens_seen": 175042170, - "step": 8217 - }, - { - "epoch": 0.9881560752720495, - "flos": 11079351086280.0, - "grad_norm": 10.221393021754706, - "learning_rate": 1.4567423064988371e-09, - "loss": 0.982, - "num_input_tokens_seen": 175059240, - "step": 8218 - }, - { - "epoch": 0.9882763181626887, - "flos": 15269313090960.0, - "grad_norm": 5.136675120121269, - "learning_rate": 1.4271680706718913e-09, - "loss": 1.0022, - "num_input_tokens_seen": 175076635, - "step": 8219 - }, - { - "epoch": 0.9883965610533277, - "flos": 19966001465040.0, - "grad_norm": 3.8288228547191663, - "learning_rate": 1.3978970167543013e-09, - "loss": 1.0464, - "num_input_tokens_seen": 175096535, - "step": 8220 - }, - { - "epoch": 0.9885168039439668, - "flos": 9950675126160.0, - "grad_norm": 7.22407236447003, - "learning_rate": 1.3689291491867372e-09, - "loss": 0.987, - "num_input_tokens_seen": 175114570, - "step": 8221 - }, - { - "epoch": 0.988637046834606, - "flos": 18814630402440.0, - "grad_norm": 5.959963097682101, - "learning_rate": 1.3402644723636836e-09, - "loss": 0.9611, - "num_input_tokens_seen": 175136320, - "step": 8222 - }, - { - "epoch": 0.988757289725245, - "flos": 17950281908760.0, - "grad_norm": 3.304476539605882, - "learning_rate": 1.311902990633218e-09, - "loss": 1.0475, - "num_input_tokens_seen": 175155005, - "step": 8223 - }, - { - "epoch": 0.9888775326158841, - "flos": 18762188038680.0, - "grad_norm": 3.036165950737858, - "learning_rate": 1.2838447082978987e-09, - "loss": 0.9359, - "num_input_tokens_seen": 175175880, - "step": 8224 - }, - { - "epoch": 0.9889977755065231, - "flos": 17294016484320.0, - "grad_norm": 5.138859686324625, - "learning_rate": 1.2560896296143208e-09, - "loss": 1.0409, - "num_input_tokens_seen": 175194065, - "step": 8225 - }, - { - "epoch": 0.9891180183971623, - "flos": 13438030993080.0, - "grad_norm": 3.8256476658032965, - "learning_rate": 1.2286377587926722e-09, - "loss": 1.0297, - "num_input_tokens_seen": 175210575, - "step": 8226 - }, - { - "epoch": 0.9892382612878013, - "flos": 18631051467720.0, - "grad_norm": 6.647510504282112, - "learning_rate": 1.2014890999973992e-09, - "loss": 0.9823, - "num_input_tokens_seen": 175227215, - "step": 8227 - }, - { - "epoch": 0.9893585041784404, - "flos": 18108958108680.0, - "grad_norm": 3.87230896569076, - "learning_rate": 1.1746436573472073e-09, - "loss": 1.001, - "num_input_tokens_seen": 175248670, - "step": 8228 - }, - { - "epoch": 0.9894787470690796, - "flos": 14327619498720.0, - "grad_norm": 4.565617789407058, - "learning_rate": 1.1481014349141726e-09, - "loss": 0.9122, - "num_input_tokens_seen": 175265610, - "step": 8229 - }, - { - "epoch": 0.9895989899597186, - "flos": 17529853813440.0, - "grad_norm": 3.5650606823821924, - "learning_rate": 1.121862436724852e-09, - "loss": 1.062, - "num_input_tokens_seen": 175284170, - "step": 8230 - }, - { - "epoch": 0.9897192328503577, - "flos": 15485092211760.0, - "grad_norm": 3.8720414957944094, - "learning_rate": 1.0959266667598388e-09, - "loss": 0.9391, - "num_input_tokens_seen": 175302705, - "step": 8231 - }, - { - "epoch": 0.9898394757409968, - "flos": 15144646109160.0, - "grad_norm": 3.8332947991687236, - "learning_rate": 1.0702941289533196e-09, - "loss": 0.9633, - "num_input_tokens_seen": 175321100, - "step": 8232 - }, - { - "epoch": 0.9899597186316359, - "flos": 13145581328040.0, - "grad_norm": 4.8529367266683865, - "learning_rate": 1.0449648271939615e-09, - "loss": 1.1037, - "num_input_tokens_seen": 175337165, - "step": 8233 - }, - { - "epoch": 0.990079961522275, - "flos": 16898889724080.0, - "grad_norm": 2.506569432476232, - "learning_rate": 1.0199387653240243e-09, - "loss": 0.9628, - "num_input_tokens_seen": 175356575, - "step": 8234 - }, - { - "epoch": 0.9902002044129141, - "flos": 11604326631960.0, - "grad_norm": 3.8543147840477183, - "learning_rate": 9.952159471400267e-10, - "loss": 0.9275, - "num_input_tokens_seen": 175373335, - "step": 8235 - }, - { - "epoch": 0.9903204473035532, - "flos": 16030984489440.0, - "grad_norm": 6.467662185387168, - "learning_rate": 9.707963763923022e-10, - "loss": 1.061, - "num_input_tokens_seen": 175392105, - "step": 8236 - }, - { - "epoch": 0.9904406901941922, - "flos": 11420962328160.0, - "grad_norm": 3.946265741896263, - "learning_rate": 9.466800567854427e-10, - "loss": 1.015, - "num_input_tokens_seen": 175410425, - "step": 8237 - }, - { - "epoch": 0.9905609330848314, - "flos": 18972448078680.0, - "grad_norm": 5.30576930300673, - "learning_rate": 9.228669919778553e-10, - "loss": 0.9147, - "num_input_tokens_seen": 175429070, - "step": 8238 - }, - { - "epoch": 0.9906811759754705, - "flos": 16428472189800.0, - "grad_norm": 3.972554703486458, - "learning_rate": 8.993571855817617e-10, - "loss": 1.0212, - "num_input_tokens_seen": 175447620, - "step": 8239 - }, - { - "epoch": 0.9908014188661095, - "flos": 15694585712760.0, - "grad_norm": 3.1487535644607996, - "learning_rate": 8.761506411638642e-10, - "loss": 0.9624, - "num_input_tokens_seen": 175466805, - "step": 8240 - }, - { - "epoch": 0.9909216617567487, - "flos": 13647800448120.0, - "grad_norm": 4.557506389100769, - "learning_rate": 8.53247362244236e-10, - "loss": 0.9609, - "num_input_tokens_seen": 175485335, - "step": 8241 - }, - { - "epoch": 0.9910419046473877, - "flos": 16794372935280.0, - "grad_norm": 4.192782130885209, - "learning_rate": 8.306473522976532e-10, - "loss": 0.9148, - "num_input_tokens_seen": 175504460, - "step": 8242 - }, - { - "epoch": 0.9911621475380268, - "flos": 16140805728120.0, - "grad_norm": 6.792765460227245, - "learning_rate": 8.083506147522623e-10, - "loss": 0.9451, - "num_input_tokens_seen": 175523575, - "step": 8243 - }, - { - "epoch": 0.991282390428666, - "flos": 9532945248120.0, - "grad_norm": 3.5860873587882436, - "learning_rate": 7.863571529906909e-10, - "loss": 1.0795, - "num_input_tokens_seen": 175538880, - "step": 8244 - }, - { - "epoch": 0.991402633319305, - "flos": 44260761347640.0, - "grad_norm": 0.7891050648303506, - "learning_rate": 7.646669703489372e-10, - "loss": 0.8801, - "num_input_tokens_seen": 175602910, - "step": 8245 - }, - { - "epoch": 0.9915228762099441, - "flos": 13308212869200.0, - "grad_norm": 5.594531468195778, - "learning_rate": 7.432800701177023e-10, - "loss": 0.8064, - "num_input_tokens_seen": 175620630, - "step": 8246 - }, - { - "epoch": 0.9916431191005832, - "flos": 47209527936240.0, - "grad_norm": 0.8039691273944197, - "learning_rate": 7.221964555415017e-10, - "loss": 0.8309, - "num_input_tokens_seen": 175680010, - "step": 8247 - }, - { - "epoch": 0.9917633619912223, - "flos": 11735401879800.0, - "grad_norm": 4.133137161973047, - "learning_rate": 7.01416129818222e-10, - "loss": 0.9718, - "num_input_tokens_seen": 175697350, - "step": 8248 - }, - { - "epoch": 0.9918836048818613, - "flos": 18081019879440.0, - "grad_norm": 4.057668541905768, - "learning_rate": 6.809390961006745e-10, - "loss": 0.8076, - "num_input_tokens_seen": 175717200, - "step": 8249 - }, - { - "epoch": 0.9920038477725005, - "flos": 17819329307160.0, - "grad_norm": 2.6924442675816342, - "learning_rate": 6.607653574948191e-10, - "loss": 0.9156, - "num_input_tokens_seen": 175737700, - "step": 8250 - }, - { - "epoch": 0.9921240906631396, - "flos": 15506407544040.0, - "grad_norm": 9.469351469268377, - "learning_rate": 6.408949170613187e-10, - "loss": 1.0447, - "num_input_tokens_seen": 175756685, - "step": 8251 - }, - { - "epoch": 0.9922443335537786, - "flos": 17688039428400.0, - "grad_norm": 3.672422847054233, - "learning_rate": 6.213277778144288e-10, - "loss": 1.0444, - "num_input_tokens_seen": 175778050, - "step": 8252 - }, - { - "epoch": 0.9923645764444178, - "flos": 15350981469480.0, - "grad_norm": 4.397638007726087, - "learning_rate": 6.020639427224416e-10, - "loss": 0.8999, - "num_input_tokens_seen": 175795415, - "step": 8253 - }, - { - "epoch": 0.9924848193350568, - "flos": 17785780012440.0, - "grad_norm": 5.043906457830744, - "learning_rate": 5.831034147076864e-10, - "loss": 0.9413, - "num_input_tokens_seen": 175812385, - "step": 8254 - }, - { - "epoch": 0.9926050622256959, - "flos": 49351012742400.0, - "grad_norm": 0.7042260767536733, - "learning_rate": 5.644461966463065e-10, - "loss": 0.804, - "num_input_tokens_seen": 175879715, - "step": 8255 - }, - { - "epoch": 0.9927253051163349, - "flos": 14856029139120.0, - "grad_norm": 5.663376170702323, - "learning_rate": 5.460922913687049e-10, - "loss": 0.9779, - "num_input_tokens_seen": 175898525, - "step": 8256 - }, - { - "epoch": 0.9928455480069741, - "flos": 15850808987880.0, - "grad_norm": 4.733393098337457, - "learning_rate": 5.280417016593208e-10, - "loss": 0.9689, - "num_input_tokens_seen": 175918035, - "step": 8257 - }, - { - "epoch": 0.9929657908976132, - "flos": 12311961265560.0, - "grad_norm": 5.375677199175137, - "learning_rate": 5.102944302559642e-10, - "loss": 0.9753, - "num_input_tokens_seen": 175935250, - "step": 8258 - }, - { - "epoch": 0.9930860337882522, - "flos": 16114400576880.0, - "grad_norm": 3.711910337121585, - "learning_rate": 4.9285047985137e-10, - "loss": 1.0066, - "num_input_tokens_seen": 175954390, - "step": 8259 - }, - { - "epoch": 0.9932062766788914, - "flos": 20047853812920.0, - "grad_norm": 2.9790919012189274, - "learning_rate": 4.757098530916436e-10, - "loss": 0.9782, - "num_input_tokens_seen": 175974555, - "step": 8260 - }, - { - "epoch": 0.9933265195695304, - "flos": 14304679103760.0, - "grad_norm": 8.042164314433743, - "learning_rate": 4.5887255257670563e-10, - "loss": 1.0095, - "num_input_tokens_seen": 175991315, - "step": 8261 - }, - { - "epoch": 0.9934467624601695, - "flos": 15171143245080.0, - "grad_norm": 5.45985657304455, - "learning_rate": 4.4233858086117906e-10, - "loss": 0.9922, - "num_input_tokens_seen": 176009560, - "step": 8262 - }, - { - "epoch": 0.9935670053508087, - "flos": 14016859334280.0, - "grad_norm": 5.767112808756717, - "learning_rate": 4.261079404528356e-10, - "loss": 0.89, - "num_input_tokens_seen": 176028760, - "step": 8263 - }, - { - "epoch": 0.9936872482414477, - "flos": 15613959827280.0, - "grad_norm": 3.3825814606527453, - "learning_rate": 4.1018063381437205e-10, - "loss": 0.9082, - "num_input_tokens_seen": 176048865, - "step": 8264 - }, - { - "epoch": 0.9938074911320868, - "flos": 49994584281000.0, - "grad_norm": 0.9197175594787576, - "learning_rate": 3.9455666336141167e-10, - "loss": 0.889, - "num_input_tokens_seen": 176112365, - "step": 8265 - }, - { - "epoch": 0.9939277340227259, - "flos": 10659045637200.0, - "grad_norm": 7.054350886124148, - "learning_rate": 3.7923603146450267e-10, - "loss": 1.0365, - "num_input_tokens_seen": 176128145, - "step": 8266 - }, - { - "epoch": 0.994047976913365, - "flos": 12548043887160.0, - "grad_norm": 5.654709748527142, - "learning_rate": 3.642187404473418e-10, - "loss": 1.0161, - "num_input_tokens_seen": 176146025, - "step": 8267 - }, - { - "epoch": 0.994168219804004, - "flos": 13596707193000.0, - "grad_norm": 8.262858225202988, - "learning_rate": 3.495047925885508e-10, - "loss": 1.0741, - "num_input_tokens_seen": 176164080, - "step": 8268 - }, - { - "epoch": 0.9942884626946432, - "flos": 12647194902960.0, - "grad_norm": 5.4850355657991, - "learning_rate": 3.350941901199e-10, - "loss": 1.0612, - "num_input_tokens_seen": 176180720, - "step": 8269 - }, - { - "epoch": 0.9944087055852823, - "flos": 13325848814280.0, - "grad_norm": 6.784156563157833, - "learning_rate": 3.2098693522764066e-10, - "loss": 1.059, - "num_input_tokens_seen": 176193640, - "step": 8270 - }, - { - "epoch": 0.9945289484759213, - "flos": 14908747456920.0, - "grad_norm": 3.897364405633984, - "learning_rate": 3.071830300516165e-10, - "loss": 1.0449, - "num_input_tokens_seen": 176211190, - "step": 8271 - }, - { - "epoch": 0.9946491913665605, - "flos": 10424036170200.0, - "grad_norm": 3.8902646552817925, - "learning_rate": 2.9368247668615234e-10, - "loss": 0.9218, - "num_input_tokens_seen": 176229500, - "step": 8272 - }, - { - "epoch": 0.9947694342571995, - "flos": 8923143183240.0, - "grad_norm": 7.405507754703959, - "learning_rate": 2.804852771789434e-10, - "loss": 0.8307, - "num_input_tokens_seen": 176242520, - "step": 8273 - }, - { - "epoch": 0.9948896771478386, - "flos": 13360931187000.0, - "grad_norm": 3.908585727461535, - "learning_rate": 2.675914335321661e-10, - "loss": 0.7851, - "num_input_tokens_seen": 176260995, - "step": 8274 - }, - { - "epoch": 0.9950099200384778, - "flos": 17713831348440.0, - "grad_norm": 5.994323906655395, - "learning_rate": 2.550009477018111e-10, - "loss": 1.0045, - "num_input_tokens_seen": 176279485, - "step": 8275 - }, - { - "epoch": 0.9951301629291168, - "flos": 16875550728840.0, - "grad_norm": 4.298331384933245, - "learning_rate": 2.4271382159790634e-10, - "loss": 0.8576, - "num_input_tokens_seen": 176296635, - "step": 8276 - }, - { - "epoch": 0.9952504058197559, - "flos": 15799777055880.0, - "grad_norm": 2.7030890137145787, - "learning_rate": 2.3073005708429406e-10, - "loss": 1.0812, - "num_input_tokens_seen": 176316000, - "step": 8277 - }, - { - "epoch": 0.995370648710395, - "flos": 15061935237600.0, - "grad_norm": 4.144907913565569, - "learning_rate": 2.190496559788535e-10, - "loss": 0.9487, - "num_input_tokens_seen": 176334005, - "step": 8278 - }, - { - "epoch": 0.9954908916010341, - "flos": 10502975669880.0, - "grad_norm": 4.6054339613826025, - "learning_rate": 2.0767262005372265e-10, - "loss": 0.9819, - "num_input_tokens_seen": 176351240, - "step": 8279 - }, - { - "epoch": 0.9956111344916732, - "flos": 13623112344240.0, - "grad_norm": 4.444848436961993, - "learning_rate": 1.965989510346322e-10, - "loss": 0.9749, - "num_input_tokens_seen": 176370080, - "step": 8280 - }, - { - "epoch": 0.9957313773823123, - "flos": 14225892911880.0, - "grad_norm": 22.830187652792493, - "learning_rate": 1.8582865060134955e-10, - "loss": 0.928, - "num_input_tokens_seen": 176387990, - "step": 8281 - }, - { - "epoch": 0.9958516202729514, - "flos": 41135749485240.0, - "grad_norm": 0.7829577663353872, - "learning_rate": 1.7536172038790098e-10, - "loss": 0.8175, - "num_input_tokens_seen": 176448020, - "step": 8282 - }, - { - "epoch": 0.9959718631635904, - "flos": 19785611332560.0, - "grad_norm": 8.731615207163939, - "learning_rate": 1.651981619819054e-10, - "loss": 0.9125, - "num_input_tokens_seen": 176464890, - "step": 8283 - }, - { - "epoch": 0.9960921060542296, - "flos": 17084124383040.0, - "grad_norm": 3.8278571833134953, - "learning_rate": 1.5533797692546257e-10, - "loss": 0.9162, - "num_input_tokens_seen": 176483345, - "step": 8284 - }, - { - "epoch": 0.9962123489448687, - "flos": 13255954474800.0, - "grad_norm": 6.672603665658187, - "learning_rate": 1.4578116671404296e-10, - "loss": 1.0512, - "num_input_tokens_seen": 176501345, - "step": 8285 - }, - { - "epoch": 0.9963325918355077, - "flos": 14199825037800.0, - "grad_norm": 4.750096370321738, - "learning_rate": 1.3652773279759777e-10, - "loss": 0.9436, - "num_input_tokens_seen": 176517715, - "step": 8286 - }, - { - "epoch": 0.9964528347261468, - "flos": 23612830732440.0, - "grad_norm": 4.838603971618781, - "learning_rate": 1.2757767657989305e-10, - "loss": 0.852, - "num_input_tokens_seen": 176541225, - "step": 8287 - }, - { - "epoch": 0.9965730776167859, - "flos": 16426356542160.0, - "grad_norm": 5.27152616548598, - "learning_rate": 1.1893099941850948e-10, - "loss": 1.092, - "num_input_tokens_seen": 176559840, - "step": 8288 - }, - { - "epoch": 0.996693320507425, - "flos": 16323556800720.0, - "grad_norm": 3.8025425185134405, - "learning_rate": 1.105877026252866e-10, - "loss": 0.9918, - "num_input_tokens_seen": 176577890, - "step": 8289 - }, - { - "epoch": 0.996813563398064, - "flos": 9321182791680.0, - "grad_norm": 3.6986800287917987, - "learning_rate": 1.0254778746565663e-10, - "loss": 0.9475, - "num_input_tokens_seen": 176592885, - "step": 8290 - }, - { - "epoch": 0.9969338062887032, - "flos": 10345679240160.0, - "grad_norm": 3.096712331801289, - "learning_rate": 9.481125515953259e-11, - "loss": 0.958, - "num_input_tokens_seen": 176610665, - "step": 8291 - }, - { - "epoch": 0.9970540491793423, - "flos": 18313821714120.0, - "grad_norm": 10.04786518254313, - "learning_rate": 8.737810688064228e-11, - "loss": 1.0303, - "num_input_tokens_seen": 176630220, - "step": 8292 - }, - { - "epoch": 0.9971742920699813, - "flos": 15248733636120.0, - "grad_norm": 3.4689265025829337, - "learning_rate": 8.024834375608414e-11, - "loss": 1.0147, - "num_input_tokens_seen": 176648530, - "step": 8293 - }, - { - "epoch": 0.9972945349606205, - "flos": 51718804680600.0, - "grad_norm": 0.8248555765028567, - "learning_rate": 7.342196686788149e-11, - "loss": 0.891, - "num_input_tokens_seen": 176701415, - "step": 8294 - }, - { - "epoch": 0.9974147778512595, - "flos": 13960338983040.0, - "grad_norm": 4.777289252231573, - "learning_rate": 6.689897725142834e-11, - "loss": 0.9036, - "num_input_tokens_seen": 176720610, - "step": 8295 - }, - { - "epoch": 0.9975350207418986, - "flos": 11289212526000.0, - "grad_norm": 4.661327691263178, - "learning_rate": 6.067937589615545e-11, - "loss": 1.0972, - "num_input_tokens_seen": 176738405, - "step": 8296 - }, - { - "epoch": 0.9976552636325378, - "flos": 42916766190120.0, - "grad_norm": 0.763684909250779, - "learning_rate": 5.476316374575241e-11, - "loss": 0.8111, - "num_input_tokens_seen": 176801610, - "step": 8297 - }, - { - "epoch": 0.9977755065231768, - "flos": 15979553957160.0, - "grad_norm": 3.7649817966056585, - "learning_rate": 4.9150341697723476e-11, - "loss": 0.9545, - "num_input_tokens_seen": 176821220, - "step": 8298 - }, - { - "epoch": 0.9978957494138159, - "flos": 18525860124600.0, - "grad_norm": 3.7359412831056362, - "learning_rate": 4.384091060338768e-11, - "loss": 0.8831, - "num_input_tokens_seen": 176841410, - "step": 8299 - }, - { - "epoch": 0.998015992304455, - "flos": 16087872779400.0, - "grad_norm": 3.4961581261531074, - "learning_rate": 3.883487126810081e-11, - "loss": 0.9531, - "num_input_tokens_seen": 176860390, - "step": 8300 - }, - { - "epoch": 0.9981362351950941, - "flos": 12915385725960.0, - "grad_norm": 3.0755939437268784, - "learning_rate": 3.41322244516995e-11, - "loss": 1.0279, - "num_input_tokens_seen": 176878055, - "step": 8301 - }, - { - "epoch": 0.9982564780857331, - "flos": 23875686444000.0, - "grad_norm": 2.463591195402347, - "learning_rate": 2.9732970866946925e-11, - "loss": 0.8524, - "num_input_tokens_seen": 176897655, - "step": 8302 - }, - { - "epoch": 0.9983767209763723, - "flos": 10974956943720.0, - "grad_norm": 5.332439208580629, - "learning_rate": 2.563711118175327e-11, - "loss": 1.0041, - "num_input_tokens_seen": 176914260, - "step": 8303 - }, - { - "epoch": 0.9984969638670114, - "flos": 14173695840600.0, - "grad_norm": 3.6041369721764163, - "learning_rate": 2.184464601717728e-11, - "loss": 1.0556, - "num_input_tokens_seen": 176932295, - "step": 8304 - }, - { - "epoch": 0.9986172067576504, - "flos": 14462159502840.0, - "grad_norm": 6.60929448504815, - "learning_rate": 1.8355575948758585e-11, - "loss": 0.9953, - "num_input_tokens_seen": 176950000, - "step": 8305 - }, - { - "epoch": 0.9987374496482896, - "flos": 16870675540800.0, - "grad_norm": 5.840691893364423, - "learning_rate": 1.5169901505407424e-11, - "loss": 0.9595, - "num_input_tokens_seen": 176966785, - "step": 8306 - }, - { - "epoch": 0.9988576925389286, - "flos": 17816907043920.0, - "grad_norm": 3.622337810710755, - "learning_rate": 1.228762317073695e-11, - "loss": 0.9667, - "num_input_tokens_seen": 176985335, - "step": 8307 - }, - { - "epoch": 0.9989779354295677, - "flos": 22302967439280.0, - "grad_norm": 5.327469860977996, - "learning_rate": 9.70874138195299e-12, - "loss": 1.0077, - "num_input_tokens_seen": 177006965, - "step": 8308 - }, - { - "epoch": 0.9990981783202069, - "flos": 13885569455520.0, - "grad_norm": 6.034658467031974, - "learning_rate": 7.433256530076093e-12, - "loss": 0.9724, - "num_input_tokens_seen": 177026640, - "step": 8309 - }, - { - "epoch": 0.9992184212108459, - "flos": 12179322278160.0, - "grad_norm": 14.394504748957965, - "learning_rate": 5.46116896038562e-12, - "loss": 0.9794, - "num_input_tokens_seen": 177040770, - "step": 8310 - }, - { - "epoch": 0.999338664101485, - "flos": 33235391251200.0, - "grad_norm": 3.9221609550880476, - "learning_rate": 3.792478972197699e-12, - "loss": 0.8502, - "num_input_tokens_seen": 177061075, - "step": 8311 - }, - { - "epoch": 0.9994589069921241, - "flos": 10712438509320.0, - "grad_norm": 4.945047910822709, - "learning_rate": 2.4271868181990895e-12, - "loss": 0.9045, - "num_input_tokens_seen": 177077960, - "step": 8312 - }, - { - "epoch": 0.9995791498827632, - "flos": 8824636060200.0, - "grad_norm": 4.5428092056034695, - "learning_rate": 1.3652927060014973e-12, - "loss": 1.029, - "num_input_tokens_seen": 177093275, - "step": 8313 - }, - { - "epoch": 0.9996993927734023, - "flos": 13649701464840.0, - "grad_norm": 16.49611826232491, - "learning_rate": 6.067967965872612e-13, - "loss": 0.8703, - "num_input_tokens_seen": 177112605, - "step": 8314 - }, - { - "epoch": 0.9998196356640414, - "flos": 45061236263760.0, - "grad_norm": 3.420838360118696, - "learning_rate": 1.5169920497548615e-13, - "loss": 0.9997, - "num_input_tokens_seen": 177136945, - "step": 8315 - }, - { - "epoch": 0.9999398785546805, - "flos": 36421455827640.0, - "grad_norm": 3.9955062658103033, - "learning_rate": 0.0, - "loss": 0.7991, - "num_input_tokens_seen": 177185545, - "step": 8316 - } - ], - "logging_steps": 1.0, - "max_steps": 8316, - "num_input_tokens_seen": 177185545, - "num_train_epochs": 1, - "save_steps": 832, - "stateful_callbacks": { - "TrainerControl": { - "args": { - "should_epoch_stop": false, - "should_evaluate": false, - "should_log": false, - "should_save": true, - "should_training_stop": true - }, - "attributes": {} - } - }, - "total_flos": 4.96810663484588e+17, - "train_batch_size": 5, - "trial_name": null, - "trial_params": null -} diff --git a/sft/smoe_perturbed/training_args.bin b/sft/smoe_perturbed/training_args.bin deleted file mode 100644 index af8b6f0399c2d482558722fe832902363aef3b81..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/training_args.bin +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:eb10e88f97b57f0886947860356d58f6ab1a3a01d6bcb9b6ffd0e74941ee5b4b -size 8120 diff --git a/sft/smoe_perturbed/zero_to_fp32.py b/sft/smoe_perturbed/zero_to_fp32.py deleted file mode 100644 index 24cc342e78d1a006c782b3a4cd68d9ce786d8fd8..0000000000000000000000000000000000000000 --- a/sft/smoe_perturbed/zero_to_fp32.py +++ /dev/null @@ -1,604 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) Microsoft Corporation. -# SPDX-License-Identifier: Apache-2.0 - -# DeepSpeed Team - -# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets -# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in -# the future. Once extracted, the weights don't require DeepSpeed and can be used in any -# application. -# -# example: python zero_to_fp32.py . pytorch_model.bin - -import argparse -import torch -import glob -import math -import os -import re -from collections import OrderedDict -from dataclasses import dataclass - -# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with -# DeepSpeed data structures it has to be available in the current python environment. -from deepspeed.utils import logger -from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS, - FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES, - FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS) - - -@dataclass -class zero_model_state: - buffers: dict() - param_shapes: dict() - shared_params: list - ds_version: int - frozen_param_shapes: dict() - frozen_param_fragments: dict() - - -debug = 0 - -# load to cpu -device = torch.device('cpu') - - -def atoi(text): - return int(text) if text.isdigit() else text - - -def natural_keys(text): - ''' - alist.sort(key=natural_keys) sorts in human order - http://nedbatchelder.com/blog/200712/human_sorting.html - (See Toothy's implementation in the comments) - ''' - return [atoi(c) for c in re.split(r'(\d+)', text)] - - -def get_model_state_file(checkpoint_dir, zero_stage): - if not os.path.isdir(checkpoint_dir): - raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist") - - # there should be only one file - if zero_stage <= 2: - file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt") - elif zero_stage == 3: - file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt") - - if not os.path.exists(file): - raise FileNotFoundError(f"can't find model states file at '{file}'") - - return file - - -def get_checkpoint_files(checkpoint_dir, glob_pattern): - # XXX: need to test that this simple glob rule works for multi-node setup too - ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys) - - if len(ckpt_files) == 0: - raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'") - - return ckpt_files - - -def get_optim_files(checkpoint_dir): - return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt") - - -def get_model_state_files(checkpoint_dir): - return get_checkpoint_files(checkpoint_dir, "*_model_states.pt") - - -def parse_model_states(files): - zero_model_states = [] - for file in files: - state_dict = torch.load(file, map_location=device) - - if BUFFER_NAMES not in state_dict: - raise ValueError(f"{file} is not a model state checkpoint") - buffer_names = state_dict[BUFFER_NAMES] - if debug: - print("Found buffers:", buffer_names) - - # recover just the buffers while restoring them to fp32 if they were saved in fp16 - buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names} - param_shapes = state_dict[PARAM_SHAPES] - - # collect parameters that are included in param_shapes - param_names = [] - for s in param_shapes: - for name in s.keys(): - param_names.append(name) - - # update with frozen parameters - frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None) - if frozen_param_shapes is not None: - if debug: - print(f"Found frozen_param_shapes: {frozen_param_shapes}") - param_names += list(frozen_param_shapes.keys()) - - # handle shared params - shared_params = [[k, v] for k, v in state_dict["shared_params"].items()] - - ds_version = state_dict.get(DS_VERSION, None) - - frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None) - - z_model_state = zero_model_state(buffers=buffers, - param_shapes=param_shapes, - shared_params=shared_params, - ds_version=ds_version, - frozen_param_shapes=frozen_param_shapes, - frozen_param_fragments=frozen_param_fragments) - zero_model_states.append(z_model_state) - - return zero_model_states - - -def parse_optim_states(files, ds_checkpoint_dir): - - total_files = len(files) - state_dicts = [] - for f in files: - state_dict = torch.load(f, map_location=device) - # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights - # and also handle the case where it was already removed by another helper script - state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None) - state_dicts.append(state_dict) - - if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]: - raise ValueError(f"{files[0]} is not a zero checkpoint") - zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE] - world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT] - - # For ZeRO-2 each param group can have different partition_count as data parallelism for expert - # parameters can be different from data parallelism for non-expert parameters. So we can just - # use the max of the partition_count to get the dp world_size. - - if type(world_size) is list: - world_size = max(world_size) - - if world_size != total_files: - raise ValueError( - f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. " - "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes." - ) - - # the groups are named differently in each stage - if zero_stage <= 2: - fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS - elif zero_stage == 3: - fp32_groups_key = FP32_FLAT_GROUPS - else: - raise ValueError(f"unknown zero stage {zero_stage}") - - if zero_stage <= 2: - fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))] - elif zero_stage == 3: - # if there is more than one param group, there will be multiple flattened tensors - one - # flattened tensor per group - for simplicity merge them into a single tensor - # - # XXX: could make the script more memory efficient for when there are multiple groups - it - # will require matching the sub-lists of param_shapes for each param group flattened tensor - - fp32_flat_groups = [ - torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts)) - ] - - return zero_stage, world_size, fp32_flat_groups - - -def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters): - """ - Returns fp32 state_dict reconstructed from ds checkpoint - - Args: - - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are) - - """ - print(f"Processing zero checkpoint '{ds_checkpoint_dir}'") - - optim_files = get_optim_files(ds_checkpoint_dir) - zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir) - print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}") - - model_files = get_model_state_files(ds_checkpoint_dir) - - zero_model_states = parse_model_states(model_files) - print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}') - - if zero_stage <= 2: - return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states, - exclude_frozen_parameters) - elif zero_stage == 3: - return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states, - exclude_frozen_parameters) - - -def _zero2_merge_frozen_params(state_dict, zero_model_states): - if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0: - return - - frozen_param_shapes = zero_model_states[0].frozen_param_shapes - frozen_param_fragments = zero_model_states[0].frozen_param_fragments - - if debug: - num_elem = sum(s.numel() for s in frozen_param_shapes.values()) - print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}') - - wanted_params = len(frozen_param_shapes) - wanted_numel = sum(s.numel() for s in frozen_param_shapes.values()) - avail_numel = sum([p.numel() for p in frozen_param_fragments.values()]) - print(f'Frozen params: Have {avail_numel} numels to process.') - print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params') - - total_params = 0 - total_numel = 0 - for name, shape in frozen_param_shapes.items(): - total_params += 1 - unpartitioned_numel = shape.numel() - total_numel += unpartitioned_numel - - state_dict[name] = frozen_param_fragments[name] - - if debug: - print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ") - - print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements") - - -def _has_callable(obj, fn): - attr = getattr(obj, fn, None) - return callable(attr) - - -def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states): - param_shapes = zero_model_states[0].param_shapes - - # Reconstruction protocol: - # - # XXX: document this - - if debug: - for i in range(world_size): - for j in range(len(fp32_flat_groups[0])): - print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}") - - # XXX: memory usage doubles here (zero2) - num_param_groups = len(fp32_flat_groups[0]) - merged_single_partition_of_fp32_groups = [] - for i in range(num_param_groups): - merged_partitions = [sd[i] for sd in fp32_flat_groups] - full_single_fp32_vector = torch.cat(merged_partitions, 0) - merged_single_partition_of_fp32_groups.append(full_single_fp32_vector) - avail_numel = sum( - [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups]) - - if debug: - wanted_params = sum([len(shapes) for shapes in param_shapes]) - wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes]) - # not asserting if there is a mismatch due to possible padding - print(f"Have {avail_numel} numels to process.") - print(f"Need {wanted_numel} numels in {wanted_params} params.") - - # params - # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support - # out-of-core computing solution - total_numel = 0 - total_params = 0 - for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups): - offset = 0 - avail_numel = full_single_fp32_vector.numel() - for name, shape in shapes.items(): - - unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape) - total_numel += unpartitioned_numel - total_params += 1 - - if debug: - print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ") - state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape) - offset += unpartitioned_numel - - # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and - # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex - # paddings performed in the code it's almost impossible to predict the exact numbers w/o the - # live optimizer object, so we are checking that the numbers are within the right range - align_to = 2 * world_size - - def zero2_align(x): - return align_to * math.ceil(x / align_to) - - if debug: - print(f"original offset={offset}, avail_numel={avail_numel}") - - offset = zero2_align(offset) - avail_numel = zero2_align(avail_numel) - - if debug: - print(f"aligned offset={offset}, avail_numel={avail_numel}") - - # Sanity check - if offset != avail_numel: - raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong") - - print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements") - - -def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states, - exclude_frozen_parameters): - state_dict = OrderedDict() - - # buffers - buffers = zero_model_states[0].buffers - state_dict.update(buffers) - if debug: - print(f"added {len(buffers)} buffers") - - if not exclude_frozen_parameters: - _zero2_merge_frozen_params(state_dict, zero_model_states) - - _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states) - - # recover shared parameters - for pair in zero_model_states[0].shared_params: - if pair[1] in state_dict: - state_dict[pair[0]] = state_dict[pair[1]] - - return state_dict - - -def zero3_partitioned_param_info(unpartitioned_numel, world_size): - remainder = unpartitioned_numel % world_size - padding_numel = (world_size - remainder) if remainder else 0 - partitioned_numel = math.ceil(unpartitioned_numel / world_size) - return partitioned_numel, padding_numel - - -def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states): - if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0: - return - - if debug: - for i in range(world_size): - num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values()) - print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}') - - frozen_param_shapes = zero_model_states[0].frozen_param_shapes - wanted_params = len(frozen_param_shapes) - wanted_numel = sum(s.numel() for s in frozen_param_shapes.values()) - avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size - print(f'Frozen params: Have {avail_numel} numels to process.') - print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params') - - total_params = 0 - total_numel = 0 - for name, shape in zero_model_states[0].frozen_param_shapes.items(): - total_params += 1 - unpartitioned_numel = shape.numel() - total_numel += unpartitioned_numel - - param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states) - state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape) - - partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size) - - if debug: - print( - f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}" - ) - - print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements") - - -def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states): - param_shapes = zero_model_states[0].param_shapes - avail_numel = fp32_flat_groups[0].numel() * world_size - # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each - # param, re-consolidating each param, while dealing with padding if any - - # merge list of dicts, preserving order - param_shapes = {k: v for d in param_shapes for k, v in d.items()} - - if debug: - for i in range(world_size): - print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}") - - wanted_params = len(param_shapes) - wanted_numel = sum(shape.numel() for shape in param_shapes.values()) - # not asserting if there is a mismatch due to possible padding - avail_numel = fp32_flat_groups[0].numel() * world_size - print(f"Trainable params: Have {avail_numel} numels to process.") - print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.") - - # params - # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support - # out-of-core computing solution - offset = 0 - total_numel = 0 - total_params = 0 - for name, shape in param_shapes.items(): - - unpartitioned_numel = shape.numel() - total_numel += unpartitioned_numel - total_params += 1 - - partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size) - - if debug: - print( - f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}" - ) - - # XXX: memory usage doubles here - state_dict[name] = torch.cat( - tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)), - 0).narrow(0, 0, unpartitioned_numel).view(shape) - offset += partitioned_numel - - offset *= world_size - - # Sanity check - if offset != avail_numel: - raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong") - - print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements") - - -def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states, - exclude_frozen_parameters): - state_dict = OrderedDict() - - # buffers - buffers = zero_model_states[0].buffers - state_dict.update(buffers) - if debug: - print(f"added {len(buffers)} buffers") - - if not exclude_frozen_parameters: - _zero3_merge_frozen_params(state_dict, world_size, zero_model_states) - - _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states) - - # recover shared parameters - for pair in zero_model_states[0].shared_params: - if pair[1] in state_dict: - state_dict[pair[0]] = state_dict[pair[1]] - - return state_dict - - -def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False): - """ - Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with - ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example - via a model hub. - - Args: - - ``checkpoint_dir``: path to the desired checkpoint folder - - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14`` - - ``exclude_frozen_parameters``: exclude frozen parameters - - Returns: - - pytorch ``state_dict`` - - Note: this approach may not work if your application doesn't have sufficient free CPU memory and - you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with - the checkpoint. - - A typical usage might be :: - - from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint - # do the training and checkpoint saving - state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu - model = model.cpu() # move to cpu - model.load_state_dict(state_dict) - # submit to model hub or save the model to share with others - - In this example the ``model`` will no longer be usable in the deepspeed context of the same - application. i.e. you will need to re-initialize the deepspeed engine, since - ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it. - - If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead. - - """ - if tag is None: - latest_path = os.path.join(checkpoint_dir, 'latest') - if os.path.isfile(latest_path): - with open(latest_path, 'r') as fd: - tag = fd.read().strip() - else: - raise ValueError(f"Unable to find 'latest' file at {latest_path}") - - ds_checkpoint_dir = os.path.join(checkpoint_dir, tag) - - if not os.path.isdir(ds_checkpoint_dir): - raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist") - - return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters) - - -def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False): - """ - Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be - loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed. - - Args: - - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``) - - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin) - - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14`` - - ``exclude_frozen_parameters``: exclude frozen parameters - """ - - state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters) - print(f"Saving fp32 state dict to {output_file}") - torch.save(state_dict, output_file) - - -def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None): - """ - 1. Put the provided model to cpu - 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` - 3. Load it into the provided model - - Args: - - ``model``: the model object to update - - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``) - - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14`` - - Returns: - - ``model`: modified model - - Make sure you have plenty of CPU memory available before you call this function. If you don't - have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it - conveniently placed for you in the checkpoint folder. - - A typical usage might be :: - - from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint - model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir) - # submit to model hub or save the model to share with others - - Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context - of the same application. i.e. you will need to re-initialize the deepspeed engine, since - ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it. - - """ - logger.info(f"Extracting fp32 weights") - state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag) - - logger.info(f"Overwriting model with fp32 weights") - model = model.cpu() - model.load_state_dict(state_dict, strict=False) - - return model - - -if __name__ == "__main__": - - parser = argparse.ArgumentParser() - parser.add_argument("checkpoint_dir", - type=str, - help="path to the desired checkpoint folder, e.g., path/checkpoint-12") - parser.add_argument( - "output_file", - type=str, - help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)") - parser.add_argument("-t", - "--tag", - type=str, - default=None, - help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1") - parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters") - parser.add_argument("-d", "--debug", action='store_true', help="enable debug") - args = parser.parse_args() - - debug = args.debug - - convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir, - args.output_file, - tag=args.tag, - exclude_frozen_parameters=args.exclude_frozen_parameters)