import os
#图像读取库
from PIL import Image
#矩阵运算库
import numpy as np
import tensorflow as tf
from tensorflow.keras import layers, datasets, models,callbacks
# 数据文件夹
data_dir = "data"
data_dir2 = "test"
# 训练还是测试
train = True
# 模型文件路径
# 从文件夹读取图片和标签到numpy数组中
# 标签信息在文件名中,例如1_40.jpg表示该图片的标签为1
def read_data(data_dir):
datas = []
labels = []
fpaths = []
for fname in os.listdir(data_dir):
fpath = os.path.join(data_dir, fname)
fpaths.append(fpath)
image = Image.open(fpath)
data = np.array(image) / 255.0
label = int(fname.split("_")[0])
datas.append(data)
labels.append(label)
datas = np.array(datas)
labels = np.array(labels)
print("shape of datas: {}\tshape of labels: {}".format(datas.shape, labels.shape))
return fpaths, datas, labels
def test_data(data_dir):
test_datas = []
test_labels = []
test_fpaths = []
print(data_dir)
for fname in os.listdir(data_dir):
fpath = os.path.join(data_dir, fname)
print(fpath)
test_fpaths.append(fpath)
image = Image.open(fpath)
print("----------------->>>>>",np.array(image).shape)
data = np.array(image) / 255.0
print("----------------->>>>>",np.array(data).shape)
#print(data.shape)
label = int(fname.split("_")[0])
#print(label)
test_datas.append(data)
test_labels.append(label)
#print(test_datas)
test_datas = np.array(test_datas)
print(test_labels)
test_labels = np.array(test_labels)
print(test_labels)
print("shape of datas: {}\tshape of labels: {}".format(test_datas.shape, test_labels.shape))
return test_fpaths, test_datas, test_labels
fpaths, datas, labels = read_data(data_dir)
test_fpaths, test_datas, test_labels = test_data(data_dir2)
print("test data info\n",test_fpaths,test_labels)
# 计算有多少类图片
num_classes = len(set(labels))
#print(num_classes)
def create_model():
model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(32, 32, 3)))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
#model.summary()
model.add(layers.Flatten())
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(10, activation='softmax'))
#model.summary()
model.compile(optimizer='adam', metrics=['accuracy'],
loss='sparse_categorical_crossentropy')
return model
isTrain = False
if isTrain:
print("this is just train")
model = create_model()
model.fit(datas, labels, epochs=10)
# 保存模型结构和参数到文件 pb
tf.keras.models.save_model(model,"model_save_path") # 默认生成 .pb 格式模型,也可以通过save_format 设置 .h5 格式
print('模型已保存')
test_loss, test_acc = model.evaluate(datas, labels)
print('\nTest accuracy:', test_acc)
predictions = model.predict(datas)
print("------predictions shape--------->",predictions.shape)
for n in range(10):
label_number = np.argmax(predictions[n]) # 置信度值最大的标签
print("---------------",label_number,labels[label_number])
else:
print("this is predicted")
model=tf.keras.models.load_model("model_save_path")
print("this is predicted test_labels",test_labels)
predictions = model.predict(test_datas)
p = len(test_labels)
print("predictions result len --->: {} \n".format(p))
for n in range(p):
label_number = np.argmax(predictions[n]) # 置信度值最大的标签
print("label_number --->: {} labels:{} test_labels:{} \n".format(label_number,labels[label_number],test_labels[n]))
saved_model_dir = "model_save_path"
# Converting a SavedModel to a TensorFlow Lite model.
converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_dir)
tflite_model = converter.convert()
open('model_tflite.tflite', 'wb').write(tflite_model)