KerasHub
sam_large_sa1b / task.json
Divyasreepat's picture
Upload folder using huggingface_hub
6eb610d verified
{
"module": "keras_hub.src.models.sam.sam_image_segmenter",
"class_name": "SAMImageSegmenter",
"config": {
"backbone": {
"module": "keras_hub.src.models.sam.sam_backbone",
"class_name": "SAMBackbone",
"config": {
"name": "sam_backbone",
"trainable": true,
"image_encoder": {
"module": "keras_hub.src.models.vit_det.vit_det_backbone",
"class_name": "ViTDetBackbone",
"config": {
"name": "vi_t_det_backbone",
"trainable": true,
"image_shape": [
1024,
1024,
3
],
"patch_size": 16,
"hidden_size": 1024,
"num_layers": 24,
"intermediate_dim": 4096,
"num_heads": 16,
"num_output_channels": 256,
"use_bias": true,
"use_abs_pos": true,
"use_rel_pos": true,
"window_size": 14,
"global_attention_layer_indices": [
5,
11,
17,
23
],
"layer_norm_epsilon": 1e-06
},
"registered_name": "keras_hub>ViTDetBackbone"
},
"prompt_encoder": {
"module": "keras_hub.src.models.sam.sam_prompt_encoder",
"class_name": "SAMPromptEncoder",
"config": {
"name": "sam_prompt_encoder",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"hidden_size": 256,
"image_embedding_size": [
64,
64
],
"input_image_size": [
1024,
1024
],
"mask_in_channels": 16,
"activation": "gelu"
},
"registered_name": "keras_hub>SAMPromptEncoder"
},
"mask_decoder": {
"module": "keras_hub.src.models.sam.sam_mask_decoder",
"class_name": "SAMMaskDecoder",
"config": {
"name": "sam_mask_decoder",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"hidden_size": 256,
"num_layers": 2,
"intermediate_dim": 2048,
"num_heads": 8,
"embedding_dim": 256,
"num_multimask_outputs": 3,
"iou_head_depth": 3,
"iou_head_hidden_dim": 256,
"activation": "gelu"
},
"registered_name": "keras_hub>SAMMaskDecoder"
}
},
"registered_name": "keras_hub>SAMBackbone"
},
"preprocessor": {
"module": "keras_hub.src.models.sam.sam_image_segmenter_preprocessor",
"class_name": "SAMImageSegmenterPreprocessor",
"config": {
"name": "sam_image_segmenter_preprocessor",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"image_converter": {
"module": "keras_hub.src.models.sam.sam_image_converter",
"class_name": "SAMImageConverter",
"config": {
"name": "sam_image_converter",
"trainable": true,
"dtype": {
"module": "keras",
"class_name": "DTypePolicy",
"config": {
"name": "float32"
},
"registered_name": null
},
"image_size": [
1024,
1024
],
"scale": 0.00392156862745098,
"offset": null,
"interpolation": "bilinear",
"crop_to_aspect_ratio": true
},
"registered_name": "keras_hub>SAMImageConverter"
},
"config_file": "preprocessor.json"
},
"registered_name": "keras_hub>SAMImageSegmenterPreprocessor"
},
"name": "sam_image_segmenter"
},
"registered_name": "keras_hub>SAMImageSegmenter"
}