sedrickkeh commited on
Commit
48db821
1 Parent(s): 4747153

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -4
config.json CHANGED
@@ -4,7 +4,6 @@
4
  ],
5
  "model_type": "openlm",
6
  "params": null,
7
- "params_args_dict": {
8
  "apply_qk_norm": true,
9
  "attn_activation": null,
10
  "attn_name": "auto",
@@ -13,7 +12,7 @@
13
  "dim": 2048,
14
  "ffn_type": "swiglu_torch",
15
  "model": "open_lm_1b_swiglutorch",
16
- "model_norm": "gain_only_lp_layer_norm",
17
  "moe_capacity_factor": 1.25,
18
  "moe_expert_model_parallelism": false,
19
  "moe_freq": 0,
@@ -29,8 +28,7 @@
29
  "qk_norm": true,
30
  "seq_len": 2048,
31
  "vocab_size": 50432,
32
- "weight_tying": false
33
- },
34
  "torch_dtype": "float32",
35
  "transformers_version": "4.40.2"
36
  }
 
4
  ],
5
  "model_type": "openlm",
6
  "params": null,
 
7
  "apply_qk_norm": true,
8
  "attn_activation": null,
9
  "attn_name": "auto",
 
12
  "dim": 2048,
13
  "ffn_type": "swiglu_torch",
14
  "model": "open_lm_1b_swiglutorch",
15
+ "norm_type": "gain_only_lp_layer_norm",
16
  "moe_capacity_factor": 1.25,
17
  "moe_expert_model_parallelism": false,
18
  "moe_freq": 0,
 
28
  "qk_norm": true,
29
  "seq_len": 2048,
30
  "vocab_size": 50432,
31
+ "weight_tying": false,
 
32
  "torch_dtype": "float32",
33
  "transformers_version": "4.40.2"
34
  }