File size: 687 Bytes
85a0876 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
{
"attn_cfg": {
"causal": true,
"head_dim": 64,
"num_heads": 48,
"out_proj_bias": true,
"qkv_proj_bias": true,
"sliding_window_length": 2048
},
"attn_layer_idx": [
6,
18,
30,
42
],
"d_model": 2048,
"eos_token_id": 50279,
"mlp_cfg": {},
"mlp_layer_idx": [
2,
5,
8,
11,
14,
17,
20,
23,
26,
29,
32,
35,
38,
41,
44,
47
],
"model_type": "rene",
"n_layer": 48,
"pad_token_id": 1,
"pad_vocab_size_multiple": 16,
"residual_in_fp32": true,
"rms_norm": true,
"ssm_cfg": {
"norm_before_gate": true
},
"tie_word_embeddings": true,
"vocab_size": 50280
}
|