keras循环多输入模型的处理

def create_model():
    def sum_add(x):
        x = K.sum(x, axis=1, keepdims=False)
        return x
    #############model1#########################################################
    input_1 = Input(shape=(None,max_len,), dtype='int32')
    embedded = TimeDistributed(Embedding(char_dict_count + 2, 64, input_length=max_len, mask_zero=True))(input_1)
    rnn = TimeDistributed(Bidirectional(LSTM(64, return_sequences=False, dropout=0.5), merge_mode='concat'))(embedded)
    out = Lambda(sum_add)(rnn)
    # out = Dense(128, activation='relu')(out)
    # out = Dropout(0.5)(out)
    output = Dense(XueKe_count, activation='sigmoid')(out)

    model = Model(inputs=input_1, outputs=output)
    model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['acc']) #binary_crossentropy  #rmsprop
    model.summary()
    # plot_model(model, 'model.png', show_shapes=True)
    return model

def create_model_attention():
    def sum_add(x):
        x = K.sum(x, axis=1, keepdims=False)
        return x
    ########model2#############################################################################################
    input_1 = Input(shape=(None,max_len,), dtype='int32')
    embedded = TimeDistributed(Embedding(char_dict_count + 2, 64, input_length=max_len, mask_zero=True))(input_1)
    rnn = TimeDistributed(LSTM(64, return_sequences=True, dropout=0.5))(embedded)
    # rnn = TimeDistributed(Bidirectional(GRU(16, return_sequences=False, dropout=0.8), merge_mode='concat'))(embedded)
    ###########################
    a = TimeDistributed(Permute((2, 1)))(rnn)
    a = TimeDistributed(Reshape((64, max_len)))(a)  # this line is not useful. It's just to know which dimension is what.
    a = TimeDistributed(Dense(max_len, activation='softmax'))(a)
    a_probs = TimeDistributed(Permute((2, 1), name='attention_vec'))(a)
    output_attention_mul = merge([rnn, a_probs], name='attention_mul', mode='mul')
    attention_mul = TimeDistributed(Flatten())(output_attention_mul)
    ##########################
    out = Lambda(sum_add)(attention_mul)
    # out = Dense(128, activation='relu')(out)
    # out = Dropout(0.5)(out)
    output = Dense(XueKe_count, activation='sigmoid')(out)
    #####################################################################################################

    model = Model(inputs=input_1, outputs=output)
    model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['acc']) #binary_crossentropy  #rmsprop
    model.summary()
    # plot_model(model, 'model.png', show_shapes=True)
    return model

你可能感兴趣的:(深度学习)