Keras中CNN联合LSTM进行分类

原文地址:http://siligence.ai/article-444-1.html

def get_model():
n_classes = 6#要分的类别
inp=Input(shape=(40, 80))#输入data的形状
reshape=Reshape((1,40,80))(inp)#开始建模,这是keras的一种方式,这种方式比较好操作,另外一种神经网络简单,但是不好拓展,推荐这种。

pre=ZeroPadding2D(padding=(1, 1))(reshape)#DCNN开始,这实在一种网络,你可以随意修改。

# 1
conv1=Convolution2D(32, 3, 3, border_mode='same',init='glorot_uniform')(reshape)
#model.add(Activation('relu'))
l1=LeakyReLU(alpha=0.33)(conv1)

conv2=ZeroPadding2D(padding=(1, 1))(l1)
conv2=Convolution2D(32, 3, 3, border_mode='same',init='glorot_uniform')(conv2)
#model.add(Activation('relu'))
l2=LeakyReLU(alpha=0.33)(conv2)

m2=MaxPooling2D((3, 3), strides=(3, 3))(l2)
d2=Dropout(0.25)(m2)
# 2
conv3=ZeroPadding2D(padding=(1, 1))(d2)
conv3=Convolution2D(64, 3, 3, border_mode='same',init='glorot_uniform')(conv3)
#model.add(Activation('relu'))
l3=LeakyReLU(alpha=0.33)(conv3)

conv4=ZeroPadding2D(padding=(1, 1))(l3)
conv4=Convolution2D(64, 3, 3, border_mode='same',init='glorot_uniform')(conv4)
#model.add(Activation('relu'))
l4=LeakyReLU(alpha=0.33)(conv4)

m4=MaxPooling2D((3, 3), strides=(3, 3))(l4)
d4=Dropout(0.25)(m4)
# 3
conv5=ZeroPadding2D(padding=(1, 1))(d4)
conv5=Convolution2D(128, 3, 3, border_mode='same',init='glorot_uniform')(conv5)
#model.add(Activation('relu'))
l5=LeakyReLU(alpha=0.33)(conv5)

conv6=ZeroPadding2D(padding=(1, 1))(l5)
conv6=Convolution2D(128, 3, 3, border_mode='same',init='glorot_uniform')(conv6)
#model.add(Activation('relu'))
l6=LeakyReLU(alpha=0.33)(conv6)

m6=MaxPooling2D((3, 3), strides=(3, 3))(l6)
d6=Dropout(0.25)(m6)
# 4
conv7=ZeroPadding2D(padding=(1, 1))(d6)
conv7=Convolution2D(256, 3, 3, border_mode='same',init='glorot_uniform')(conv7)
#model.add(Activation('relu'))
l7=LeakyReLU(alpha=0.33)(conv7)

conv8=ZeroPadding2D(padding=(1, 1))(l7)
conv8=Convolution2D(256, 3, 3, border_mode='same',init='glorot_uniform')(conv8)
#model.add(Activation('relu'))
l8=LeakyReLU(alpha=0.33)(conv8)
g=GlobalMaxPooling2D()(l8)#DCNN结束,需要把其搞成二维的data
print("g=",g)
#g1=Flatten()(g)
lstm1=LSTM(#lstm这里直接连接输入层,你也可以吧dcnn结果输入,只是要一位时间一位特征。
    input_shape=(40,80),
    output_dim=256,
    activation='tanh',
    return_sequences=False)(inp)
dl1=Dropout(0.3)(lstm1)

den1=Dense(200,activation="relu")(dl1)
#model.add(Activation('relu'))
#l11=LeakyReLU(alpha=0.33)(d11)
dl2=Dropout(0.3)(den1)

lstm2=LSTM(

256,activation=‘tanh’,

return_sequences=False)(lstm1)

dl2=Dropout(0.5)(lstm2)

print("dl2=",dl1)
g2=concatenate([g2,dl2],axis=1)#拼接DCNN和LSTM
d10=Dense(1024)(g2)
#model.add(Activation('relu'))
l10=LeakyReLU(alpha=0.33)(d10)
l10=Dropout(0.5)(l10)
l11=Dense(n_classes, activation='softmax')(l10)



model=Model(input=inp,outputs=l11)
model.summary()
#编译model
adam = keras.optimizers.Adam(lr = 0.0005, beta_1=0.95, beta_2=0.999,epsilon=1e-08)
#adam = keras.optimizers.Adam(lr = 0.001, beta_1=0.95, beta_2=0.999,epsilon=1e-08)
#sgd = keras.optimizers.SGD(lr = 0.001, decay = 1e-06, momentum = 0.9, nesterov = False)

#reduce_lr = ReduceLROnPlateau(monitor = 'loss', factor = 0.1, patience = 2,verbose = 1, min_lr = 0.00000001, mode = 'min')
model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])


return model 
                            ---------------  源于  siligence 社区作者  guan394077759 

你可能感兴趣的:(Keras中CNN联合LSTM进行分类)