Keras学习中文版 https://keras-cn.readthedocs.io/en/latest/
发现一个超详细代码介绍 https://www.imooc.com/article/79490
原理部分见另外
import random
import os
os.environ['KERAS_BACKEND'] = 'tensorflow'
from keras import backend as K
from keras.layers import Convolution2D, MaxPooling2D
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.models import Sequential
from keras.models import load_model
from keras.optimizers import SGD
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import np_utils
from sklearn.model_selection import train_test_split
from load_data import load_dataset, resize_image, IMAGE_SIZE
class Dataset:
def __init__(self, path_name):
# 训练集
self.train_images = None
self.train_labels = None
# 验证集
self.valid_images = None
self.valid_labels = None
# 测试集
self.test_images = None
self.test_labels = None
# 数据集加载路径
self.path_name = path_name
# 当前库采用的维度顺序
self.input_shape = None
self.nb_classes = None
# 加载数据集并按照交叉验证的原则划分数据集并进行相关预处理工作
def load(self, img_rows=IMAGE_SIZE, img_cols=IMAGE_SIZE,
img_channels=3):
# 加载数据集到内存
images, labels, face_num = load_dataset(self.path_name)
self.nb_classes = face_num
train_images, valid_images, train_labels, valid_labels = train_test_split(images, labels, test_size=0.3,
random_state=random.randint(0, 100))
_, test_images, _, test_labels = train_test_split(images, labels, test_size=0.5,
random_state=random.randint(0, 100))
# 当前的维度顺序如果为'th',则输入图片数据时的顺序为:channels,rows,cols,否则:rows,cols,channels
# 这部分代码就是根据keras库要求的维度顺序重组训练数据集
#if K.image_dim_ordering() == 'th':#版本原因可能出错
if K.image_data_format() == 'channels_first':
train_images = train_images.reshape(train_images.shape[0], img_channels, img_rows, img_cols)
valid_images = valid_images.reshape(valid_images.shape[0], img_channels, img_rows, img_cols)
test_images = test_images.reshape(test_images.shape[0], img_channels, img_rows, img_cols)
self.input_shape = (img_channels, img_rows, img_cols)
else:
train_images = train_images.reshape(train_images.shape[0], img_rows, img_cols, img_channels)
valid_images = valid_images.reshape(valid_images.shape[0], img_rows, img_cols, img_channels)
test_images = test_images.reshape(test_images.shape[0], img_rows, img_cols, img_channels)
self.input_shape = (img_rows, img_cols, img_channels)
# 输出训练集、验证集、测试集的数量
print(train_images.shape[0], 'train samples')
print(valid_images.shape[0], 'valid samples')
print(test_images.shape[0], 'test samples')
'''
我们的模型使用categorical_crossentropy作为损失函数,因此需要根据类别数量nb_classes将
类别标签进行one-hot编码使其向量化,在这里我们的类别只有两种,经过转化后标签数据变为二维
'''
train_labels = np_utils.to_categorical(train_labels, self.nb_classes)
valid_labels = np_utils.to_categorical(valid_labels, self.nb_classes)
test_labels = np_utils.to_categorical(test_labels, self.nb_classes)
# 像素数据浮点化以便归一化
train_images = train_images.astype('float32')
valid_images = valid_images.astype('float32')
test_images = test_images.astype('float32')
# 将其归一化,图像的各像素值归一化到0~1区间
train_images /= 255
valid_images /= 255
test_images /= 255
self.train_images = train_images
self.valid_images = valid_images
self.test_images = test_images
self.train_labels = train_labels
self.valid_labels = valid_labels
self.test_labels = test_labels
# CNN网络模型类
class Model:
def __init__(self):
self.model = None
# 建立模型
def build_model(self, dataset, nb_classes=5):
# 构建一个空的网络模型,它是一个线性堆叠模型,各神经网络层会被顺序添加,专业名称为序贯模型或线性堆叠模型
self.model = Sequential()
# 以下代码将顺序添加CNN网络需要的各层,一个add就是一个网络层
self.model.add(Convolution2D(32, 3, 3, border_mode='same',
input_shape=dataset.input_shape)) # 1 2维卷积层 卷积核 个数,大小,步长
self.model.add(Activation('relu')) # 2 激活函数层
self.model.add(Convolution2D(32, 3, 3)) # 3 2维卷积层
self.model.add(Activation('relu')) # 4 激活函数层
self.model.add(MaxPooling2D(pool_size=(2, 2))) # 5 池化层
self.model.add(Dropout(0.25)) # 6 Dropout层
self.model.add(Convolution2D(64, 3, 3, border_mode='same')) # 7 2维卷积层
self.model.add(Activation('relu')) # 8 激活函数层
self.model.add(Convolution2D(64, 3, 3)) # 9 2维卷积层
self.model.add(Activation('relu')) # 10 激活函数层
self.model.add(MaxPooling2D(pool_size=(2, 2))) # 11 池化层
self.model.add(Dropout(0.25)) # 12 Dropout层
self.model.add(Flatten()) # 13 Flatten层
self.model.add(Dense(512)) # 14 Dense层,又被称作全连接层
self.model.add(Activation('relu')) # 15 激活函数层
self.model.add(Dropout(0.5)) # 16 Dropout层
self.model.add(Dense(nb_classes)) # 17 Dense层
self.model.add(Activation('softmax')) # 18 分类层,输出最终结果
# 输出模型概况
self.model.summary()
# 训练模型
def train(self, dataset, batch_size=20, nb_epoch=10, data_augmentation=True):
sgd = SGD(lr=0.0007, decay=1e-6,
momentum=0.9, nesterov=True) # 采用SGD+momentum的优化器进行训练,首先生成一个优化器对象
self.model.compile(loss='categorical_crossentropy',
optimizer=sgd,
metrics=['accuracy']) # 完成实际的模型配置工作
# 不使用数据提升,所谓的提升就是从我们提供的训练数据中利用旋转、翻转、加噪声等方法创造新的
# 训练数据,有意识的提升训练数据规模,增加模型训练量
if not data_augmentation:
self.model.fit(dataset.train_images,
dataset.train_labels,
batch_size=batch_size,
nb_epoch=nb_epoch,
validation_data=(dataset.valid_images, dataset.valid_labels),
shuffle=True)
# 使用实时数据提升
else:
# 定义数据生成器用于数据提升,其返回一个生成器对象datagen,datagen每被调用一
# 次其生成一组数据(顺序生成),节省内存,其实就是python的数据生成器
datagen = ImageDataGenerator(
featurewise_center=False, # 是否使输入数据去中心化(均值为0),
samplewise_center=False, # 是否使输入数据的每个样本均值为0
featurewise_std_normalization=False, # 是否数据标准化(输入数据除以数据集的标准差)
samplewise_std_normalization=False, # 是否将每个样本数据除以自身的标准差
zca_whitening=False, # 是否对输入数据施以ZCA白化
rotation_range=20, # 数据提升时图片随机转动的角度(范围为0~180)
width_shift_range=0.2, # 数据提升时图片水平偏移的幅度(单位为图片宽度的占比,0~1之间的浮点数)
height_shift_range=0.2, # 同上,只不过这里是垂直
horizontal_flip=True, # 是否进行随机水平翻转
vertical_flip=False) # 是否进行随机垂直翻转
# 计算整个训练样本集的数量以用于特征值归一化、ZCA白化等处理
datagen.fit(dataset.train_images)
# 利用生成器开始训练模型
self.model.fit_generator(datagen.flow(dataset.train_images, dataset.train_labels,
batch_size=batch_size),
samples_per_epoch=dataset.train_images.shape[0],
nb_epoch=nb_epoch,
validation_data=(dataset.valid_images, dataset.valid_labels))
MODEL_PATH = './model/face1.model'
def save_model(self, file_path=MODEL_PATH):
self.model.save(file_path)
def load_model(self, file_path=MODEL_PATH):
self.model = load_model(file_path)
def evaluate(self, dataset):
score = self.model.evaluate(dataset.test_images, dataset.test_labels, verbose=1)
print("%s: %.2f%%" % (self.model.metrics_names[1], score[1] * 100))
# 识别人脸
def face_predict(self, image):
# 依然是根据后端系统确定维度顺序
if K.image_dim_ordering() == 'th' and image.shape != (1, 3, IMAGE_SIZE, IMAGE_SIZE):
image = resize_image(image) # 尺寸必须与训练集一致都应该是IMAGE_SIZE x IMAGE_SIZE
image = image.reshape((1, 3, IMAGE_SIZE, IMAGE_SIZE)) # 与模型训练不同,这次只是针对1张图片进行预测
elif K.image_dim_ordering() == 'tf' and image.shape != (1, IMAGE_SIZE, IMAGE_SIZE, 3):
image = resize_image(image)
image = image.reshape((1, IMAGE_SIZE, IMAGE_SIZE, 3))
# 浮点并归一化
image = image.astype('float32')
image /= 255
# 给出输入属于各个类别的概率
result_probability = self.model.predict_proba(image)
print('result:', result_probability, max(result_probability[0]))
# 给出类别预测:0-9
result = self.model.predict_classes(image)
# 返回类别预测结果
return max(result_probability[0]),result[0]
if __name__ == '__main__':
dataset = Dataset('./data/')
dataset.load()
model = Model()
model.build_model(dataset, dataset.nb_classes)
model.train(dataset)
model.save_model(file_path='./model/face1.model')
model.evaluate(dataset)
训练 结果
374 train samples
161 valid samples
268 test samples
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_1 (Conv2D) (None, 64, 64, 32) 896
_________________________________________________________________
activation_1 (Activation) (None, 64, 64, 32) 0
_________________________________________________________________
conv2d_2 (Conv2D) (None, 62, 62, 32) 9248
_________________________________________________________________
activation_2 (Activation) (None, 62, 62, 32) 0
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 31, 31, 32) 0
_________________________________________________________________
dropout_1 (Dropout) (None, 31, 31, 32) 0
_________________________________________________________________
conv2d_3 (Conv2D) (None, 31, 31, 64) 18496
_________________________________________________________________
activation_3 (Activation) (None, 31, 31, 64) 0
_________________________________________________________________
conv2d_4 (Conv2D) (None, 29, 29, 64) 36928
_________________________________________________________________
activation_4 (Activation) (None, 29, 29, 64) 0
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 14, 14, 64) 0
_________________________________________________________________
dropout_2 (Dropout) (None, 14, 14, 64) 0
_________________________________________________________________
flatten_1 (Flatten) (None, 12544) 0
_________________________________________________________________
dense_1 (Dense) (None, 512) 6423040
_________________________________________________________________
activation_5 (Activation) (None, 512) 0
_________________________________________________________________
dropout_3 (Dropout) (None, 512) 0
_________________________________________________________________
dense_2 (Dense) (None, 2) 1026
_________________________________________________________________
activation_6 (Activation) (None, 2) 0
=================================================================
Total params: 6,489,634
Trainable params: 6,489,634
Non-trainable params: 0
Epoch 1/10
1/18 [>.............................] - ETA: 48s - loss: 0.6389 - accuracy: 0.6500
2/18 [==>...........................] - ETA: 27s - loss: 0.6374 - accuracy: 0.6250
3/18 [====>.........................] - ETA: 19s - loss: 0.6457 - accuracy: 0.6167
4/18 [=====>........................] - ETA: 14s - loss: 0.6392 - accuracy: 0.6250
5/18 [=======>......................] - ETA: 11s - loss: 0.6323 - accuracy: 0.6300
6/18 [=========>....................] - ETA: 9s - loss: 0.6434 - accuracy: 0.6083
7/18 [==========>...................] - ETA: 8s - loss: 0.6552 - accuracy: 0.6143
8/18 [============>.................] - ETA: 6s - loss: 0.6521 - accuracy: 0.6169
9/18 [==============>...............] - ETA: 5s - loss: 0.6560 - accuracy: 0.6149
10/18 [===============>..............] - ETA: 4s - loss: 0.6520 - accuracy: 0.6237
11/18 [=================>............] - ETA: 3s - loss: 0.6492 - accuracy: 0.6308
12/18 [===================>..........] - ETA: 3s - loss: 0.6476 - accuracy: 0.6282
13/18 [====================>.........] - ETA: 2s - loss: 0.6429 - accuracy: 0.6339
14/18 [======================>.......] - ETA: 2s - loss: 0.6354 - accuracy: 0.6423
15/18 [========================>.....] - ETA: 1s - loss: 0.6330 - accuracy: 0.6463
16/18 [=========================>....] - ETA: 0s - loss: 0.6348 - accuracy: 0.6401
17/18 [===========================>..] - ETA: 0s - loss: 0.6403 - accuracy: 0.6347
18/18 [==============================] - 9s 498ms/step - loss: 0.6350 - accuracy: 0.6412 - val_loss: 0.6009 - val_accuracy: 0.6584
Epoch 2/10
1/18 [>.............................] - ETA: 5s - loss: 0.6829 - accuracy: 0.6000
2/18 [==>...........................] - ETA: 5s - loss: 0.6944 - accuracy: 0.5500
3/18 [====>.........................] - ETA: 4s - loss: 0.6747 - accuracy: 0.5833
4/18 [=====>........................] - ETA: 4s - loss: 0.6857 - accuracy: 0.5625
5/18 [=======>......................] - ETA: 4s - loss: 0.6564 - accuracy: 0.6100
6/18 [=========>....................] - ETA: 3s - loss: 0.6321 - accuracy: 0.6417
7/18 [==========>...................] - ETA: 3s - loss: 0.6142 - accuracy: 0.6643
8/18 [============>.................] - ETA: 3s - loss: 0.6143 - accuracy: 0.6623
9/18 [==============>...............] - ETA: 2s - loss: 0.6040 - accuracy: 0.6724
10/18 [===============>..............] - ETA: 2s - loss: 0.6009 - accuracy: 0.6753
11/18 [=================>............] - ETA: 2s - loss: 0.6111 - accuracy: 0.6542
12/18 [===================>..........] - ETA: 1s - loss: 0.6159 - accuracy: 0.6453
13/18 [====================>.........] - ETA: 1s - loss: 0.6183 - accuracy: 0.6417
14/18 [======================>.......] - ETA: 1s - loss: 0.6116 - accuracy: 0.6496
15/18 [========================>.....] - ETA: 0s - loss: 0.6162 - accuracy: 0.6395
16/18 [=========================>....] - ETA: 0s - loss: 0.6103 - accuracy: 0.6433
17/18 [===========================>..] - ETA: 0s - loss: 0.6043 - accuracy: 0.6527
18/18 [==============================] - 6s 335ms/step - loss: 0.6079 - accuracy: 0.6469 - val_loss: 0.5700 - val_accuracy: 0.6584
Epoch 3/10
1/18 [>.............................] - ETA: 5s - loss: 0.7328 - accuracy: 0.5000
2/18 [==>...........................] - ETA: 5s - loss: 0.5827 - accuracy: 0.7250
3/18 [====>.........................] - ETA: 4s - loss: 0.5838 - accuracy: 0.7222
4/18 [=====>........................] - ETA: 4s - loss: 0.5677 - accuracy: 0.7162
5/18 [=======>......................] - ETA: 4s - loss: 0.5515 - accuracy: 0.7234
6/18 [=========>....................] - ETA: 3s - loss: 0.5562 - accuracy: 0.7018
7/18 [==========>...................] - ETA: 3s - loss: 0.5656 - accuracy: 0.6791
8/18 [============>.................] - ETA: 3s - loss: 0.5731 - accuracy: 0.6688
9/18 [==============>...............] - ETA: 2s - loss: 0.5767 - accuracy: 0.6552
10/18 [===============>..............] - ETA: 2s - loss: 0.5662 - accuracy: 0.6649
11/18 [=================>............] - ETA: 2s - loss: 0.5722 - accuracy: 0.6542
12/18 [===================>..........] - ETA: 1s - loss: 0.5742 - accuracy: 0.6538
13/18 [====================>.........] - ETA: 1s - loss: 0.5725 - accuracy: 0.6575
14/18 [======================>.......] - ETA: 1s - loss: 0.5704 - accuracy: 0.6642
15/18 [========================>.....] - ETA: 1s - loss: 0.5704 - accuracy: 0.6633
16/18 [=========================>....] - ETA: 0s - loss: 0.5646 - accuracy: 0.6688
17/18 [===========================>..] - ETA: 0s - loss: 0.5635 - accuracy: 0.6677
18/18 [==============================] - 6s 361ms/step - loss: 0.5663 - accuracy: 0.6667 - val_loss: 0.5245 - val_accuracy: 0.6584
Epoch 4/10
1/18 [>.............................] - ETA: 6s - loss: 0.5439 - accuracy: 0.6500
2/18 [==>...........................] - ETA: 7s - loss: 0.5967 - accuracy: 0.6000
3/18 [====>.........................] - ETA: 7s - loss: 0.5630 - accuracy: 0.6500
4/18 [=====>........................] - ETA: 6s - loss: 0.5582 - accuracy: 0.6500
5/18 [=======>......................] - ETA: 5s - loss: 0.5443 - accuracy: 0.6600
6/18 [=========>....................] - ETA: 5s - loss: 0.5199 - accuracy: 0.6917
7/18 [==========>...................] - ETA: 4s - loss: 0.5203 - accuracy: 0.6786
8/18 [============>.................] - ETA: 4s - loss: 0.5448 - accuracy: 0.6494
9/18 [==============>...............] - ETA: 3s - loss: 0.5587 - accuracy: 0.6322
10/18 [===============>..............] - ETA: 3s - loss: 0.5526 - accuracy: 0.6495
11/18 [=================>............] - ETA: 2s - loss: 0.5463 - accuracy: 0.6542
12/18 [===================>..........] - ETA: 2s - loss: 0.5350 - accuracy: 0.6709
13/18 [====================>.........] - ETA: 1s - loss: 0.5321 - accuracy: 0.6732
14/18 [======================>.......] - ETA: 1s - loss: 0.5393 - accuracy: 0.6642
15/18 [========================>.....] - ETA: 1s - loss: 0.5336 - accuracy: 0.6701
16/18 [=========================>....] - ETA: 0s - loss: 0.5266 - accuracy: 0.6752
17/18 [===========================>..] - ETA: 0s - loss: 0.5275 - accuracy: 0.6766
18/18 [==============================] - 7s 392ms/step - loss: 0.5204 - accuracy: 0.6864 - val_loss: 0.4376 - val_accuracy: 0.7516
Epoch 5/10
1/18 [>.............................] - ETA: 5s - loss: 0.4689 - accuracy: 0.7000
2/18 [==>...........................] - ETA: 5s - loss: 0.5072 - accuracy: 0.6500
3/18 [====>.........................] - ETA: 4s - loss: 0.4896 - accuracy: 0.7000
4/18 [=====>........................] - ETA: 4s - loss: 0.4796 - accuracy: 0.7125
5/18 [=======>......................] - ETA: 4s - loss: 0.4606 - accuracy: 0.7400
6/18 [=========>....................] - ETA: 4s - loss: 0.4284 - accuracy: 0.7750
7/18 [==========>...................] - ETA: 3s - loss: 0.4143 - accuracy: 0.7857
8/18 [============>.................] - ETA: 3s - loss: 0.4094 - accuracy: 0.7750
9/18 [==============>...............] - ETA: 3s - loss: 0.4267 - accuracy: 0.7500
10/18 [===============>..............] - ETA: 2s - loss: 0.4284 - accuracy: 0.7450
11/18 [=================>............] - ETA: 2s - loss: 0.4240 - accuracy: 0.7591
12/18 [===================>..........] - ETA: 2s - loss: 0.4187 - accuracy: 0.7792
13/18 [====================>.........] - ETA: 1s - loss: 0.4275 - accuracy: 0.7692
14/18 [======================>.......] - ETA: 1s - loss: 0.4220 - accuracy: 0.7857
15/18 [========================>.....] - ETA: 1s - loss: 0.4244 - accuracy: 0.7933
16/18 [=========================>....] - ETA: 0s - loss: 0.4263 - accuracy: 0.7969
17/18 [===========================>..] - ETA: 0s - loss: 0.4223 - accuracy: 0.8000
18/18 [==============================] - 7s 379ms/step - loss: 0.4186 - accuracy: 0.8083 - val_loss: 0.3016 - val_accuracy: 0.9876
Epoch 6/10
1/18 [>.............................] - ETA: 5s - loss: 0.3389 - accuracy: 1.0000
2/18 [==>...........................] - ETA: 5s - loss: 0.3579 - accuracy: 0.9250
3/18 [====>.........................] - ETA: 5s - loss: 0.3523 - accuracy: 0.9333
4/18 [=====>........................] - ETA: 4s - loss: 0.3374 - accuracy: 0.9375
5/18 [=======>......................] - ETA: 4s - loss: 0.3250 - accuracy: 0.9468
6/18 [=========>....................] - ETA: 4s - loss: 0.3273 - accuracy: 0.9298
7/18 [==========>...................] - ETA: 3s - loss: 0.3332 - accuracy: 0.9104
8/18 [============>.................] - ETA: 3s - loss: 0.3204 - accuracy: 0.9156
9/18 [==============>...............] - ETA: 3s - loss: 0.3251 - accuracy: 0.9023
10/18 [===============>..............] - ETA: 2s - loss: 0.3152 - accuracy: 0.9124
11/18 [=================>............] - ETA: 2s - loss: 0.3149 - accuracy: 0.9206
12/18 [===================>..........] - ETA: 2s - loss: 0.3111 - accuracy: 0.9188
13/18 [====================>.........] - ETA: 1s - loss: 0.3046 - accuracy: 0.9213
14/18 [======================>.......] - ETA: 1s - loss: 0.2935 - accuracy: 0.9270
15/18 [========================>.....] - ETA: 1s - loss: 0.2881 - accuracy: 0.9286
16/18 [=========================>....] - ETA: 0s - loss: 0.2808 - accuracy: 0.9299
17/18 [===========================>..] - ETA: 0s - loss: 0.2738 - accuracy: 0.9341
18/18 [==============================] - 7s 376ms/step - loss: 0.2713 - accuracy: 0.9322 - val_loss: 0.1445 - val_accuracy: 1.0000
Epoch 7/10
1/18 [>.............................] - ETA: 6s - loss: 0.2642 - accuracy: 0.9000
2/18 [==>...........................] - ETA: 5s - loss: 0.2516 - accuracy: 0.9000
3/18 [====>.........................] - ETA: 4s - loss: 0.2301 - accuracy: 0.9259
4/18 [=====>........................] - ETA: 4s - loss: 0.2276 - accuracy: 0.9324
5/18 [=======>......................] - ETA: 4s - loss: 0.2158 - accuracy: 0.9362
6/18 [=========>....................] - ETA: 3s - loss: 0.2158 - accuracy: 0.9386
7/18 [==========>...................] - ETA: 3s - loss: 0.2090 - accuracy: 0.9403
8/18 [============>.................] - ETA: 3s - loss: 0.2086 - accuracy: 0.9351
9/18 [==============>...............] - ETA: 2s - loss: 0.1988 - accuracy: 0.9425
10/18 [===============>..............] - ETA: 2s - loss: 0.1897 - accuracy: 0.9485
11/18 [=================>............] - ETA: 2s - loss: 0.1795 - accuracy: 0.9533
12/18 [===================>..........] - ETA: 1s - loss: 0.1819 - accuracy: 0.9530
13/18 [====================>.........] - ETA: 1s - loss: 0.1795 - accuracy: 0.9567
14/18 [======================>.......] - ETA: 1s - loss: 0.1791 - accuracy: 0.9562
15/18 [========================>.....] - ETA: 0s - loss: 0.1705 - accuracy: 0.9592
16/18 [=========================>....] - ETA: 0s - loss: 0.1642 - accuracy: 0.9618
17/18 [===========================>..] - ETA: 0s - loss: 0.1654 - accuracy: 0.9581
18/18 [==============================] - 6s 344ms/step - loss: 0.1640 - accuracy: 0.9598 - val_loss: 0.0638 - val_accuracy: 0.9938
Epoch 8/10
1/18 [>.............................] - ETA: 5s - loss: 0.0661 - accuracy: 1.0000
2/18 [==>...........................] - ETA: 4s - loss: 0.0745 - accuracy: 1.0000
3/18 [====>.........................] - ETA: 4s - loss: 0.0855 - accuracy: 1.0000
4/18 [=====>........................] - ETA: 4s - loss: 0.0884 - accuracy: 1.0000
5/18 [=======>......................] - ETA: 4s - loss: 0.0967 - accuracy: 0.9900
6/18 [=========>....................] - ETA: 3s - loss: 0.0848 - accuracy: 0.9917
7/18 [==========>...................] - ETA: 3s - loss: 0.0837 - accuracy: 0.9857
8/18 [============>.................] - ETA: 3s - loss: 0.0905 - accuracy: 0.9812
9/18 [==============>...............] - ETA: 2s - loss: 0.0837 - accuracy: 0.9828
10/18 [===============>..............] - ETA: 2s - loss: 0.0884 - accuracy: 0.9794
11/18 [=================>............] - ETA: 2s - loss: 0.0871 - accuracy: 0.9813
12/18 [===================>..........] - ETA: 2s - loss: 0.0876 - accuracy: 0.9829
13/18 [====================>.........] - ETA: 1s - loss: 0.0893 - accuracy: 0.9803
14/18 [======================>.......] - ETA: 1s - loss: 0.0862 - accuracy: 0.9818
15/18 [========================>.....] - ETA: 1s - loss: 0.0830 - accuracy: 0.9830
16/18 [=========================>....] - ETA: 0s - loss: 0.0841 - accuracy: 0.9809
17/18 [===========================>..] - ETA: 0s - loss: 0.0858 - accuracy: 0.9790
18/18 [==============================] - 7s 406ms/step - loss: 0.0831 - accuracy: 0.9802 - val_loss: 0.0338 - val_accuracy: 0.9938
Epoch 9/10
1/18 [>.............................] - ETA: 5s - loss: 0.0913 - accuracy: 0.9500
2/18 [==>...........................] - ETA: 5s - loss: 0.0608 - accuracy: 0.9750
3/18 [====>.........................] - ETA: 5s - loss: 0.0717 - accuracy: 0.9667
4/18 [=====>........................] - ETA: 4s - loss: 0.0645 - accuracy: 0.9750
5/18 [=======>......................] - ETA: 4s - loss: 0.0877 - accuracy: 0.9600
6/18 [=========>....................] - ETA: 4s - loss: 0.0920 - accuracy: 0.9583
7/18 [==========>...................] - ETA: 3s - loss: 0.0882 - accuracy: 0.9643
8/18 [============>.................] - ETA: 3s - loss: 0.0853 - accuracy: 0.9625
9/18 [==============>...............] - ETA: 3s - loss: 0.0846 - accuracy: 0.9667
10/18 [===============>..............] - ETA: 3s - loss: 0.0890 - accuracy: 0.9650
11/18 [=================>............] - ETA: 2s - loss: 0.1150 - accuracy: 0.9409
12/18 [===================>..........] - ETA: 2s - loss: 0.1293 - accuracy: 0.9375
13/18 [====================>.........] - ETA: 1s - loss: 0.1628 - accuracy: 0.9291
14/18 [======================>.......] - ETA: 1s - loss: 0.2432 - accuracy: 0.9015
15/18 [========================>.....] - ETA: 1s - loss: 0.2775 - accuracy: 0.8946
16/18 [=========================>....] - ETA: 0s - loss: 0.2833 - accuracy: 0.8917
17/18 [===========================>..] - ETA: 0s - loss: 0.2679 - accuracy: 0.8982
18/18 [==============================] - 8s 424ms/step - loss: 0.2613 - accuracy: 0.9011 - val_loss: 0.0490 - val_accuracy: 0.9752
Epoch 10/10
1/18 [>.............................] - ETA: 7s - loss: 0.0377 - accuracy: 1.0000
2/18 [==>...........................] - ETA: 6s - loss: 0.0480 - accuracy: 1.0000
3/18 [====>.........................] - ETA: 6s - loss: 0.0612 - accuracy: 1.0000
4/18 [=====>........................] - ETA: 5s - loss: 0.0713 - accuracy: 0.9875
5/18 [=======>......................] - ETA: 5s - loss: 0.0682 - accuracy: 0.9900
6/18 [=========>....................] - ETA: 5s - loss: 0.0671 - accuracy: 0.9917
7/18 [==========>...................] - ETA: 4s - loss: 0.0813 - accuracy: 0.9857
8/18 [============>.................] - ETA: 4s - loss: 0.0763 - accuracy: 0.9875
9/18 [==============>...............] - ETA: 3s - loss: 0.0720 - accuracy: 0.9889
10/18 [===============>..............] - ETA: 3s - loss: 0.0726 - accuracy: 0.9850
11/18 [=================>............] - ETA: 3s - loss: 0.0722 - accuracy: 0.9818
12/18 [===================>..........] - ETA: 2s - loss: 0.0713 - accuracy: 0.9833
13/18 [====================>.........] - ETA: 2s - loss: 0.0696 - accuracy: 0.9846
14/18 [======================>.......] - ETA: 1s - loss: 0.0727 - accuracy: 0.9821
15/18 [========================>.....] - ETA: 1s - loss: 0.0695 - accuracy: 0.9833
16/18 [=========================>....] - ETA: 0s - loss: 0.0669 - accuracy: 0.9844
17/18 [===========================>..] - ETA: 0s - loss: 0.0657 - accuracy: 0.9853
18/18 [==============================] - 8s 444ms/step - loss: 0.0632 - accuracy: 0.9861 - val_loss: 0.0322 - val_accuracy: 0.9938
32/268 [==>...........................] - ETA: 0s
64/268 [======>.......................] - ETA: 0s
96/268 [=========>....................] - ETA: 0s
128/268 [=============>................] - ETA: 0s
160/268 [================>.............] - ETA: 0s
192/268 [====================>.........] - ETA: 0s
224/268 [========================>.....] - ETA: 0s
256/268 [===========================>..] - ETA: 0s
268/268 [==============================] - 1s 3ms/step
accuracy: 99.25%