File size: 4,690 Bytes
1eced3c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
from config import DatasetName, AffectnetConf, InputDataSize, LearningConfig
# from hg_Class import HourglassNet
import tensorflow as tf
# from tensorflow import keras
# from skimage.transform import resize
from keras.models import Model
from keras.applications import mobilenet_v2, mobilenet, resnet50, densenet, resnet
from keras.layers import Dense, MaxPooling2D, Conv2D, Flatten, \
BatchNormalization, Activation, GlobalAveragePooling2D, DepthwiseConv2D, \
Dropout, ReLU, Concatenate, Input, GlobalMaxPool2D, LeakyReLU, Softmax, ELU
class CNNModel:
def get_model(self, arch, num_of_classes, weights):
if arch == 'resnet':
model = self._create_resnetemb(num_of_classes,
num_of_embeddings=LearningConfig.num_embeddings,
input_shape=(InputDataSize.image_input_size,
InputDataSize.image_input_size, 3),
weights=weights
)
if arch == 'xcp':
model = self._create_Xception_l2(num_of_classes,
num_of_embeddings=LearningConfig.num_embeddings,
input_shape=(InputDataSize.image_input_size,
InputDataSize.image_input_size, 3),
weights=weights
)
return model
def _create_resnetemb(self, num_of_classes, input_shape, weights, num_of_embeddings):
resnet_model = resnet.ResNet50(
input_shape=input_shape,
include_top=True,
weights='imagenet',
# weights=None,
input_tensor=None,
pooling=None)
resnet_model.layers.pop()
avg_pool = resnet_model.get_layer('avg_pool').output # 2048
''''''
embeddings = []
for i in range(num_of_embeddings):
emb = tf.keras.layers.Dense(LearningConfig.embedding_size, activation=None)(avg_pool)
emb_l2 = tf.keras.layers.Lambda(lambda x: tf.math.l2_normalize(x, axis=1))(emb)
embeddings.append(emb_l2)
if num_of_embeddings > 1:
fused = tf.keras.layers.Concatenate(axis=1)([embeddings[i] for i in range(num_of_embeddings)])
else:
fused = embeddings[0]
fused = Dropout(rate=0.5)(fused)
'''out'''
out_categorical = Dense(num_of_classes,
activation='softmax',
name='out')(fused)
inp = [resnet_model.input]
revised_model = Model(inp, [out_categorical] + [embeddings[i] for i in range(num_of_embeddings)])
revised_model.summary()
'''save json'''
model_json = revised_model.to_json()
with open("./model_archs/resnetemb.json", "w") as json_file:
json_file.write(model_json)
return revised_model
def _create_Xception_l2(self, num_of_classes, num_of_embeddings, input_shape, weights):
xception_model = tf.keras.applications.Xception(
include_top=False,
# weights=None,
input_tensor=None,
weights='imagenet',
input_shape=input_shape,
pooling=None,
classes=num_of_classes
)
act_14 = xception_model.get_layer('block14_sepconv2_act').output
avg_pool = GlobalAveragePooling2D()(act_14)
embeddings = []
for i in range(num_of_embeddings):
emb = tf.keras.layers.Dense(LearningConfig.embedding_size, activation=None)(avg_pool)
emb_l2 = tf.keras.layers.Lambda(lambda x: tf.math.l2_normalize(x, axis=1))(emb)
embeddings.append(emb_l2)
if num_of_embeddings > 1:
fused = tf.keras.layers.Concatenate(axis=1)([embeddings[i] for i in range(num_of_embeddings)])
else:
fused = embeddings[0]
fused = Dropout(rate=0.5)(fused)
'''out'''
out_categorical = Dense(num_of_classes,
activation='softmax',
name='out')(fused)
inp = [xception_model.input]
revised_model = Model(inp, [out_categorical] + [embeddings[i] for i in range(num_of_embeddings)])
revised_model.summary()
'''save json'''
model_json = revised_model.to_json()
with open("./model_archs/xcp_embedding.json", "w") as json_file:
json_file.write(model_json)
return revised_model |