使用keras绘制实时的loss与acc曲线

废话不多说,直接上代码,代码有注释,不懂得评论问博主即可

# -*- coding: utf-8 -*-
import keras
from keras.models import Sequential
from keras.layers import Dense
import numpy as np
import matplotlib.pyplot as plt
import time
# 输入训练数据 keras接收numpy数组类型的数据
x = np.array([[0, 1, 0],
              [0, 0, 1],
              [1, 3, 2],
              [3, 2, 1]])
y = np.array([0, 0, 1, 1]).T
# 最简单的序贯模型,序贯模型是多个网络层的线性堆叠
simple_model = Sequential()
# dense层为全连接层
# 第一层隐含层为全连接层 5个神经元 输入数据的维度为3
simple_model.add(Dense(5, input_dim=3, activation='relu'))
# 第二个隐含层 4个神经元
simple_model.add(Dense(4, activation='relu'))
# 输出层为1个神经元
simple_model.add(Dense(1, activation='sigmoid'))
# 编译模型,训练模型之前需要编译模型
# 编译模型的三个参数:优化器、损失函数、指标列表
simple_model.compile(optimizer='sgd', loss='mean_squared_error', metrics=['accuracy'])


class LossHistory(keras.callbacks.Callback):
    #函数开始时创建盛放loss与acc的容器
    def on_train_begin(self, logs={}):
        self.losses = {'batch': [], 'epoch': []}
        self.accuracy = {'batch': [], 'epoch': []}
        self.val_loss = {'batch': [], 'epoch': []}
        self.val_acc = {'batch': [], 'epoch': []}

    #按照batch来进行追加数据
    def on_batch_end(self, batch, logs={}):
        #每一个batch完成后向容器里面追加loss,acc
        self.losses['batch'].append(logs.get('loss'))
        self.accuracy['batch'].append(logs.get('acc'))
        self.val_loss['batch'].append(logs.get('val_loss'))
        self.val_acc['batch'].append(logs.get('val_acc'))
        #每五秒按照当前容器里的值来绘图
        if int(time.time()) % 5 == 0:
            self.draw_p(self.losses['batch'], 'loss', 'train_batch')
            self.draw_p(self.accuracy['batch'], 'acc', 'train_batch')
            self.draw_p(self.val_loss['batch'], 'loss', 'val_batch')
            self.draw_p(self.val_acc['batch'], 'acc', 'val_batch')
    def on_epoch_end(self, batch, logs={}):
        # 每一个epoch完成后向容器里面追加loss,acc
        self.losses['epoch'].append(logs.get('loss'))
        self.accuracy['epoch'].append(logs.get('acc'))
        self.val_loss['epoch'].append(logs.get('val_loss'))
        self.val_acc['epoch'].append(logs.get('val_acc'))
        # 每五秒按照当前容器里的值来绘图
        if int(time.time()) % 5 == 0:
            self.draw_p(self.losses['epoch'], 'loss', 'train_epoch')
            self.draw_p(self.accuracy['epoch'], 'acc', 'train_epoch')
            self.draw_p(self.val_loss['epoch'], 'loss', 'val_epoch')
            self.draw_p(self.val_acc['epoch'], 'acc', 'val_epoch')
    #绘图,这里把每一种曲线都单独绘图,若想把各种曲线绘制在一张图上的话可修改此方法
    def draw_p(self, lists, label, type):
        plt.figure()
        plt.plot(range(len(lists)), lists, 'r', label=label)
        plt.ylabel(label)
        plt.xlabel(type)
        plt.legend(loc="upper right")
        plt.savefig(type+'_'+label+'.jpg')
    #由于这里的绘图设置的是5s绘制一次,当训练结束后得到的图可能不是一个完整的训练过程(最后一次绘图结束,有训练了0-5秒的时间)
    #所以这里的方法会在整个训练结束以后调用
    def end_draw(self):
        self.draw_p(self.losses['batch'], 'loss', 'train_batch')
        self.draw_p(self.accuracy['batch'], 'acc', 'train_batch')
        self.draw_p(self.val_loss['batch'], 'loss', 'val_batch')
        self.draw_p(self.val_acc['batch'], 'acc', 'val_batch')
        self.draw_p(self.losses['epoch'], 'loss', 'train_epoch')
        self.draw_p(self.accuracy['epoch'], 'acc', 'train_epoch')
        self.draw_p(self.val_loss['epoch'], 'loss', 'val_epoch')
        self.draw_p(self.val_acc['epoch'], 'acc', 'val_epoch')
logs_loss = LossHistory()

# 训练网络 2000次
# Keras以Numpy数组作为输入数据和标签的数据类型。训练模型一般使用fit函数
simple_model.fit(x, y, epochs=20000, callbacks=[logs_loss])
# 应用模型 进行预测
y_ = simple_model.predict_classes(x[0:1])
print("[0,1,0]的分类结果:" + str(y[0]))

logs_loss.end_draw()

 

你可能感兴趣的:(使用keras绘制实时的loss与acc曲线)