参考caffe:https://github.com/hujie-frank/SENet
def expand_dim_backend(self,x):
x1 = K.reshape(x,(-1,1,256))
print('x1:',x1)
return x1
def multiply(self,a):
x = np.multiply(a[0], a[1])
print('x:',x)
return x
def make_net_Res(self, encoding):
# Input
x = ZeroPadding1D(padding=3)(encoding)
x = Conv1D(filters=64, kernel_size=7, strides=2, padding='valid', activation='relu')(x)
x = BatchNormalization(axis=1, scale=True)(x)
x_pool = MaxPooling1D(pool_size=3, strides=2, padding='same')(x)
#RESNet_1
x = Conv1D(filters=128, kernel_size=1, strides=1, padding='valid', activation='relu')(x_pool)
x = BatchNormalization(axis=1, scale=True)(x)
x = Conv1D(filters=128, kernel_size=3, strides=1, padding='valid', activation='relu')(x)
x = BatchNormalization(axis=1, scale=True)(x)
RES_1 = Conv1D(filters=256, kernel_size=1, strides=1, padding='valid', activation='relu')(x)
x = BatchNormalization(axis=1, scale=True)(RES_1)
# SENet
squeeze = GlobalAveragePooling1D()(x)
squeeze = Lambda(self.expand_dim_backend)(squeeze)
excitation = Conv1D(filters=16, kernel_size=1, strides=1, padding='valid', activation='relu')(squeeze)
excitation = Conv1D(filters=256, kernel_size=1, strides=1, padding='valid', activation='sigmoid')(excitation)
x_pool_1 = Conv1D(filters=256, kernel_size=1, strides=1, padding='valid', activation='relu')(x_pool)
x_pool_1 = BatchNormalization(axis=1, scale=True)(x_pool_1)
scale = Lambda(self.multiply)([RES_1, excitation])
res_1 = Concatenate(axis=1)([x_pool_1, scale])
#RESNet_2
x = Conv1D(filters=128, kernel_size=1, activation='relu')(res_1)
x = BatchNormalization(axis=1, scale=True)(x)
x = Conv1D(filters=128, kernel_size=3, activation='relu')(x)
x = BatchNormalization(axis=1, scale=True)(x)
RES_2 = Conv1D(filters=256, kernel_size=1)(x)
# SENet
squeeze = GlobalAveragePooling1D()(RES_2)
squeeze = Lambda(self.expand_dim_backend)(squeeze)
excitation = Conv1D(filters=16, kernel_size=1, strides=1, padding='valid', activation='relu')(squeeze)
excitation = Conv1D(filters=256, kernel_size=1, strides=1, padding='valid', activation='sigmoid')(excitation)
scale = Lambda(self.multiply)([RES_2, excitation])
x = Concatenate(axis=1)([res_1, scale])
x = GlobalMaxPooling1D()(x)
print('x:', x)
output = Dense(1, activation='sigmoid')(x)
return (output)