{ | |
"activation_dropout": 0.1, | |
"activation_fn": "gelu", | |
"apply_graphormer_init": true, | |
"architectures": [ | |
"GraphormerForGraphClassification" | |
], | |
"attention_dropout": 0.1, | |
"bias": true, | |
"bos_token_id": 1, | |
"dropout": 0.0, | |
"edge_type": "multi_hop", | |
"embed_scale": null, | |
"embedding_dim": 768, | |
"encoder_normalize_before": true, | |
"eos_token_id": 2, | |
"export": false, | |
"ffn_embedding_dim": 768, | |
"freeze_embeddings": false, | |
"hidden_size": 768, | |
"id2label": { | |
"0": "LABEL_0" | |
}, | |
"init_fn": null, | |
"kdim": null, | |
"label2id": { | |
"LABEL_0": 0 | |
}, | |
"layerdrop": 0.0, | |
"max_nodes": 512, | |
"model_type": "graphormer", | |
"multi_hop_max_dist": 5, | |
"no_token_positional_embeddings": false, | |
"num_atoms": 4608, | |
"num_attention_heads": 32, | |
"num_edge_dis": 128, | |
"num_edges": 1536, | |
"num_in_degree": 512, | |
"num_layers": 12, | |
"num_out_degree": 512, | |
"num_spatial": 512, | |
"num_trans_layers_to_freeze": 0, | |
"pad_token_id": 0, | |
"pre_layernorm": false, | |
"q_noise": 0.0, | |
"qn_block_size": 8, | |
"self_attention": true, | |
"share_input_output_embed": false, | |
"spatial_pos_max": 1024, | |
"torch_dtype": "float32", | |
"traceable": false, | |
"transformers_version": "4.26.0.dev0", | |
"vdim": null | |
} | |