LSTM 自编码器

def build_model(layers,seq_len):  #layers [*,*,*,*,*]
    model = Sequential()

    model.add(LSTM(layers[1], input_shape=(seq_len, layers[0]),return_sequences=True))
    #model.add(Dropout(0.2))
    model.add(LSTM(layers[2], return_sequences=True))
    model.add(LSTM(layers[3], return_sequences=True))
    model.add(LSTM(layers[4], return_sequences=True))
    model.add(LSTM(layers[5], return_sequences=True))
    #model.add(Dropout(0.2))
    model.compile(loss=losses.mean_squared_error, optimizer=keras.optimizers.Adam
    (lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0, amsgrad=False))
    return model
model.fit(trainX,trainY,batch_size=batchSize,epochs=nb_epoch)

你可能感兴趣的:(LSTM 自编码器)