File size: 2,072 Bytes
c69bf79 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
{
"module": "keras_nlp.models.bert.bert_classifier",
"class_name": "BertClassifier",
"config": {
"backbone": {
"module": "keras_nlp.models.bert.bert_backbone",
"class_name": "BertBackbone",
"config": {
"name": "bert_backbone",
"trainable": true,
"vocabulary_size": 30522,
"num_layers": 2,
"num_heads": 2,
"hidden_dim": 128,
"intermediate_dim": 512,
"dropout": 0.1,
"max_sequence_length": 512,
"num_segments": 2
},
"registered_name": "keras_nlp>BertBackbone"
},
"preprocessor": {
"module": "keras_nlp.models.bert.bert_preprocessor",
"class_name": "BertPreprocessor",
"config": {
"name": "bert_preprocessor",
"trainable": true,
"dtype": "float32",
"tokenizer": {
"module": "keras_nlp.models.bert.bert_tokenizer",
"class_name": "BertTokenizer",
"config": {
"name": "bert_tokenizer",
"trainable": true,
"dtype": "int32",
"vocabulary": null,
"sequence_length": null,
"lowercase": true,
"strip_accents": false,
"split": true,
"suffix_indicator": "##",
"oov_token": "[UNK]"
},
"registered_name": "keras_nlp>BertTokenizer"
},
"sequence_length": 512,
"truncate": "round_robin"
},
"registered_name": "keras_nlp>BertPreprocessor"
},
"name": "bert_classifier",
"num_classes": 4,
"activation": "linear",
"dropout": 0.1
},
"registered_name": "keras_nlp>BertClassifier"
} |