File size: 2,301 Bytes
ba9c6c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
df1187a
ba9c6c6
 
 
 
 
 
 
 
 
 
 
df1187a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ba9c6c6
 
df1187a
ba9c6c6
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
{
  "_name_or_path": "hfl/chinese-roberta-wwm-ext",
  "architectures": [
    "GPLinker"
  ],
  "attention_probs_dropout_prob": 0.1,
  "bos_token_id": 0,
  "classifier_dropout": null,
  "directionality": "bidi",
  "eos_token_id": 2,
  "head_size": 64,
  "hidden_act": "gelu",
  "hidden_dropout_prob": 0.1,
  "hidden_size": 768,
  "initializer_range": 0.02,
  "intermediate_size": 3072,
  "layer_norm_eps": 1e-12,
  "max_position_embeddings": 512,
  "model_type": "bert",
  "num_attention_heads": 12,
  "num_hidden_layers": 12,
  "num_predicates": 48,
  "output_past": true,
  "pad_token_id": 0,
  "pooler_fc_size": 768,
  "pooler_num_attention_heads": 12,
  "pooler_num_fc_layers": 3,
  "pooler_size_per_head": 128,
  "pooler_type": "first_token_transform",
  "position_embedding_type": "absolute",
  "predicate2id": {
    "\u4e08\u592b": 0,
    "\u4e0a\u6620\u65f6\u95f4": 1,
    "\u4e3b\u6301\u4eba": 2,
    "\u4e3b\u6f14": 3,
    "\u4e3b\u89d2": 4,
    "\u4eba\u53e3\u6570\u91cf": 5,
    "\u4f5c\u66f2": 6,
    "\u4f5c\u8005": 7,
    "\u4f5c\u8bcd": 8,
    "\u4fee\u4e1a\u5e74\u9650": 9,
    "\u51fa\u54c1\u516c\u53f8": 10,
    "\u51fa\u7248\u793e": 11,
    "\u51fa\u751f\u5730": 12,
    "\u51fa\u751f\u65e5\u671f": 13,
    "\u521b\u59cb\u4eba": 14,
    "\u5236\u7247\u4eba": 15,
    "\u5360\u5730\u9762\u79ef": 16,
    "\u53f7": 17,
    "\u5609\u5bbe": 18,
    "\u56fd\u7c4d": 19,
    "\u59bb\u5b50": 20,
    "\u5b57": 21,
    "\u5b98\u65b9\u8bed\u8a00": 22,
    "\u5bfc\u6f14": 23,
    "\u603b\u90e8\u5730\u70b9": 24,
    "\u6210\u7acb\u65e5\u671f": 25,
    "\u6240\u5728\u57ce\u5e02": 26,
    "\u6240\u5c5e\u4e13\u8f91": 27,
    "\u6539\u7f16\u81ea": 28,
    "\u671d\u4ee3": 29,
    "\u6b4c\u624b": 30,
    "\u6bcd\u4eb2": 31,
    "\u6bd5\u4e1a\u9662\u6821": 32,
    "\u6c11\u65cf": 33,
    "\u6c14\u5019": 34,
    "\u6ce8\u518c\u8d44\u672c": 35,
    "\u6d77\u62d4": 36,
    "\u7236\u4eb2": 37,
    "\u76ee": 38,
    "\u7956\u7c4d": 39,
    "\u7b80\u79f0": 40,
    "\u7f16\u5267": 41,
    "\u8463\u4e8b\u957f": 42,
    "\u8eab\u9ad8": 43,
    "\u8fde\u8f7d\u7f51\u7ad9": 44,
    "\u90ae\u653f\u7f16\u7801": 45,
    "\u9762\u79ef": 46,
    "\u9996\u90fd": 47
  },
  "torch_dtype": "float32",
  "transformers_version": "4.30.2",
  "type_vocab_size": 2,
  "use_cache": true,
  "vocab_size": 21128
}