Hezar: Upload model and config
Browse files- model_config.yaml +46 -0
model_config.yaml
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: bert_sequence_labeling
|
2 |
+
config_type: model
|
3 |
+
task: sequence_labeling
|
4 |
+
num_labels: 23
|
5 |
+
id2label:
|
6 |
+
0: P
|
7 |
+
1: Ne
|
8 |
+
2: PRO
|
9 |
+
3: CONJ
|
10 |
+
4: 'N'
|
11 |
+
5: PUNC
|
12 |
+
6: Pe
|
13 |
+
7: ADV
|
14 |
+
8: V
|
15 |
+
9: AJ
|
16 |
+
10: AJe
|
17 |
+
11: DET
|
18 |
+
12: POSTP
|
19 |
+
13: NUM
|
20 |
+
14: DETe
|
21 |
+
15: NUMe
|
22 |
+
16: PROe
|
23 |
+
17: ADVe
|
24 |
+
18: RES
|
25 |
+
19: CL
|
26 |
+
20: INT
|
27 |
+
21: CONJe
|
28 |
+
22: RESe
|
29 |
+
vocab_size: 42000
|
30 |
+
hidden_size: 768
|
31 |
+
num_hidden_layers: 12
|
32 |
+
num_attention_heads: 12
|
33 |
+
intermediate_size: 3072
|
34 |
+
hidden_act: gelu
|
35 |
+
hidden_dropout_prob: 0.1
|
36 |
+
attention_probs_dropout_prob: 0.1
|
37 |
+
max_position_embeddings: 512
|
38 |
+
type_vocab_size: 2
|
39 |
+
initializer_range: 0.02
|
40 |
+
layer_norm_eps: 1.0e-12
|
41 |
+
pad_token_id: 0
|
42 |
+
position_embedding_type: absolute
|
43 |
+
use_cache: true
|
44 |
+
prediction_skip_tokens:
|
45 |
+
- '[SEP]'
|
46 |
+
- '[CLS]'
|