用tensorboard看keras的训练过程

#新形式 一层 d多任务
from keras.callbacks import ModelCheckpoint
from keras.callbacks import TensorBoard
tbCallBack = TensorBoard(log_dir='./logs',  # log 目录
                 histogram_freq=0,  # 按照何等频率(epoch)来计算直方图,0为不计算
#                  batch_size=32,     # 用多大量的数据计算直方图
                 write_graph=True,  # 是否存储网络结构图
                 write_grads=True, # 是否可视化梯度直方图
                 write_images=True,# 是否可视化参数
                 embeddings_freq=0, 
                 embeddings_layer_names=None, 
                 embeddings_metadata=None)
checkpoint = ModelCheckpoint(filepath="coarse3.h5", 
monitor='val_acc',verbose=1,save_best_only='True',mode='auto',period=1)
num_input_timepoints=time_step
num_output_timepoints=forward
#model.add(LSTM(32,input_shape=(time_step2,freature),   kernel_regularizer=regularizers.l2(0.0001),return_sequences=True))
inputs = Input(shape=(time_step,freature))
lstmout=(LSTM(32,input_shape=(time_step,freature),   kernel_regularizer=regularizers.l2(0.0005),return_sequences=True))(inputs)
mainout=(TimeDistributed(Dense(1,activation="tanh",   kernel_regularizer=regularizers.l2(0.0005))))(lstmout)
classout=(TimeDistributed(Dense(1,activation="sigmoid",   kernel_regularizer=regularizers.l2(0.0005))))(lstmout)
mainout2=Cropping1D(cropping=(num_input_timepoints - num_output_timepoints,0), name="reg")(mainout)
classout2=Cropping1D(cropping=(num_input_timepoints - num_output_timepoints,0), name="classification")(classout)
model = Model(inputs=inputs, outputs=[mainout2,classout2])
model.summary()
opt=Adam()
model.compile(loss={"reg":'mean_squared_error',"classification":'binary_crossentropy'},optimizer=opt,metrics={"reg":"mse","classification":"acc"})
model.save("coarse2.h5")
model.fit(trainXs,[trainYs,trainYs2],epochs=40,batch_size=128,verbose=2,validation_data=(testXs,[testYs,testYs2]),callbacks=[checkpoint,tbCallBack],shuffle=True)

终端输入

tensorboard --logdir /Users/iris/econmic/logs       #log的绝对位置

打开
http://localhost:6006

你可能感兴趣的:(机器学习)