Hiveurban commited on
Commit
eddcad9
1 Parent(s): b40e921

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +93 -0
config.json ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "BertForJointParsing"
4
+ ],
5
+ "auto_map": {
6
+ "AutoModel": "BertForJointParsing.BertForJointParsing"
7
+ },
8
+ "attention_probs_dropout_prob": 0.1,
9
+ "classifier_dropout": null,
10
+ "do_lex": true,
11
+ "do_morph": true,
12
+ "do_ner": true,
13
+ "do_prefix": true,
14
+ "do_syntax": true,
15
+ "gradient_checkpointing": false,
16
+ "hidden_act": "gelu",
17
+ "hidden_dropout_prob": 0.1,
18
+ "hidden_size": 768,
19
+ "id2label": {
20
+ "0": "B-ANG",
21
+ "1": "B-DUC",
22
+ "2": "B-EVE",
23
+ "3": "B-FAC",
24
+ "4": "B-GPE",
25
+ "5": "B-LOC",
26
+ "6": "B-ORG",
27
+ "7": "B-PER",
28
+ "8": "B-WOA",
29
+ "9": "B-INFORMAL",
30
+ "10": "B-MISC",
31
+ "11": "B-TIMEX",
32
+ "12": "B-TTL",
33
+ "13": "I-DUC",
34
+ "14": "I-EVE",
35
+ "15": "I-FAC",
36
+ "16": "I-GPE",
37
+ "17": "I-LOC",
38
+ "18": "I-ORG",
39
+ "19": "I-PER",
40
+ "20": "I-WOA",
41
+ "21": "I-ANG",
42
+ "22": "I-INFORMAL",
43
+ "23": "I-MISC",
44
+ "24": "I-TIMEX",
45
+ "25": "I-TTL",
46
+ "26": "O"
47
+ },
48
+ "initializer_range": 0.02,
49
+ "intermediate_size": 3072,
50
+ "label2id": {
51
+ "B-ANG": 0,
52
+ "B-DUC": 1,
53
+ "B-EVE": 2,
54
+ "B-FAC": 3,
55
+ "B-GPE": 4,
56
+ "B-INFORMAL": 9,
57
+ "B-LOC": 5,
58
+ "B-MISC": 10,
59
+ "B-ORG": 6,
60
+ "B-PER": 7,
61
+ "B-TIMEX": 11,
62
+ "B-TTL": 12,
63
+ "B-WOA": 8,
64
+ "I-ANG": 21,
65
+ "I-DUC": 13,
66
+ "I-EVE": 14,
67
+ "I-FAC": 15,
68
+ "I-GPE": 16,
69
+ "I-INFORMAL": 22,
70
+ "I-LOC": 17,
71
+ "I-MISC": 23,
72
+ "I-ORG": 18,
73
+ "I-PER": 19,
74
+ "I-TIMEX": 24,
75
+ "I-TTL": 25,
76
+ "I-WOA": 20,
77
+ "O": 26
78
+ },
79
+ "layer_norm_eps": 1e-12,
80
+ "max_position_embeddings": 512,
81
+ "model_type": "bert",
82
+ "newmodern": true,
83
+ "num_attention_heads": 12,
84
+ "num_hidden_layers": 12,
85
+ "pad_token_id": 0,
86
+ "position_embedding_type": "absolute",
87
+ "syntax_head_size": 128,
88
+ "torch_dtype": "float32",
89
+ "transformers_version": "4.36.2",
90
+ "type_vocab_size": 2,
91
+ "use_cache": true,
92
+ "vocab_size": 128000
93
+ }