经典的cnn model

def get_model(X_input):#重新建立模型,与原来不一样的是这里inp是传入
    n_classes = 5
    
#    input_shape = (time_span, feature, 1)
#    X_input = Input(input_shape)
    #inp=Input(shape=(120,39))#原来的inp是函数里,传入可以三个公用
#    reshape=Reshape((30,5,1))(inp)
 #   pre=ZeroPadding2D(padding=(1, 1))(reshape)
    # 1
    #reshape=BatchNormalization()(reshape)
    conv1=Convolution2D(32, 3, 3, border_mode='same',init='glorot_uniform')(X_input)
    #model.add(Activation('relu'))
    l1=PReLU()(conv1)
    l1=BatchNormalization()(l1)

    conv2=ZeroPadding2D(padding=(1, 1))(l1)
    conv2=Convolution2D(32, 3, 3, border_mode='same',init='glorot_uniform')(conv2)
    #model.add(Activation('relu'))
    l2=PReLU()(conv2)
    l2=BatchNormalization()(l2)

    m2=AveragePooling2D((3, 3), strides=(3, 3))(l2)
    d2=Dropout(0.25)(m2)
    # 2
    conv3=ZeroPadding2D(padding=(1, 1))(d2)
    conv3=Convolution2D(64, 3, 3, border_mode='same',init='glorot_uniform')(conv3)
    #model.add(Activation('relu'))
    l3=PReLU()(conv3)
    l3=BatchNormalization()(l3)

    conv4=ZeroPadding2D(padding=(1, 1))(l3)
    conv4=Convolution2D(64, 3, 3, border_mode='same',init='glorot_uniform')(conv4)
    #model.add(Activation('relu'))
    l4=PReLU()(conv4)
    l4=BatchNormalization()(l4)

    m4=AveragePooling2D((3, 3), strides=(3, 3))(l4)
    d4=Dropout(0.25)(m4)
    
    g=GlobalAveragePooling2D()(d4)

    Den=Dense(1024)(g)
    #model.add(Activation('relu'))
    ld=PReLU()(Den)
    ld=Dropout(0.5)(ld)
    result=Dense(n_classes, activation='softmax')(ld)


#    result=g
    model=Model(inputs=X_input,outputs=result)
    return model

你可能感兴趣的:(神经网络)