system HF staff commited on
Commit
2f69d1e
1 Parent(s): 0147cd6

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +10 -1
config.json CHANGED
@@ -3,15 +3,24 @@
3
  "AlbertForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
 
 
6
  "directionality": "bidi",
 
 
7
  "hidden_act": "gelu",
8
  "hidden_dropout_prob": 0.1,
9
  "hidden_size": 768,
10
  "initializer_range": 0.02,
 
11
  "intermediate_size": 3072,
 
12
  "max_position_embeddings": 512,
 
13
  "num_attention_heads": 12,
 
14
  "num_hidden_layers": 12,
 
15
  "pooler_fc_size": 768,
16
  "pooler_num_attention_heads": 12,
17
  "pooler_num_fc_layers": 3,
@@ -19,4 +28,4 @@
19
  "pooler_type": "first_token_transform",
20
  "type_vocab_size": 2,
21
  "vocab_size": 128000
22
- }
 
3
  "AlbertForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
+ "bos_token_id": 2,
7
+ "classifier_dropout_prob": 0.1,
8
  "directionality": "bidi",
9
+ "embedding_size": 128,
10
+ "eos_token_id": 3,
11
  "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 768,
14
  "initializer_range": 0.02,
15
+ "inner_group_num": 1,
16
  "intermediate_size": 3072,
17
+ "layer_norm_eps": 1e-12,
18
  "max_position_embeddings": 512,
19
+ "model_type": "albert",
20
  "num_attention_heads": 12,
21
+ "num_hidden_groups": 1,
22
  "num_hidden_layers": 12,
23
+ "pad_token_id": 0,
24
  "pooler_fc_size": 768,
25
  "pooler_num_attention_heads": 12,
26
  "pooler_num_fc_layers": 3,
 
28
  "pooler_type": "first_token_transform",
29
  "type_vocab_size": 2,
30
  "vocab_size": 128000
31
+ }