tensorflow自定义循环训练模型

import os

import cv2
from keras.callbacks import ModelCheckpoint
from sklearn.model_selection import train_test_split
import numpy as np
from sklearn import metrics
import tensorflow as tf
from sklearn.utils.class_weight import compute_class_weight, compute_sample_weight

tf_version = int(tf.__version__.split(".")[0])
print('tf_version:',tf_version)
if tf_version == 1:
    import keras
    from keras.models import Model, Sequential
    from keras.layers import Conv2D, MaxPooling2D, AveragePooling2D, Flatten, Dense, Dropout
    from keras.optimizers import SGD
    from keras.preprocessing.image import ImageDataGenerator
elif tf_version == 2:
    from tensorflow import keras
    from tensorflow.keras.models import Model, Sequential
    from tensorflow.keras.layers import Conv2D, MaxPooling2D, AveragePooling2D, Flatten, Dense, Dropout
    from tensorflow.keras.optimizers import SGD
    from tensorflow.keras.preprocessing.image import ImageDataGenerator
    from tensorflow.keras.applications.resnet import ResNet50

# num_classes = 7
model = ResNet50(weights=None,classes=7)

#
def get_img(img_paths, img_size):
    X = np.zeros((len(img_paths),img_size,img_size,3),dtype=np.uint8)
    i = 0
    for img_path in img_paths:
        img = cv2.imread(img_path)
        img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
        img = cv2.resize(img,(img_size,img_size),interpolation=cv2.INTER_AREA)
        img = img.reshape(1,img_size,img_size,3)
        img = tf.cast(img, tf.float32) / 255.0
        X[i,:,:,:] = img
        i += 1
    return X
def get_X_batch(X_paths,labels, batch_size, img_size):
    while 1:
        for i in range(0, len(X_paths), batch_size):
            X = get_img(X_paths[i:i+batch_size], img_size)
            label = labels[i:i+batch_size]
            yield label,X
##########################################
root_path = 'dataset/train/'
filelist = []
labellist = []
batch_size = 16
for root, dirs, files in os.walk(root_path, topdown=False):
    for name in files:
        filepath = os.path.join(root, name)
        label = int(root.split('/')[2][0])
        filelist.append(filepath)
        labellist.append(label)
# print(filelist)
train_data, val_data,train_label, val_label = train_test_split(filelist,labellist,test_size=0.1)
val_data = get_img(val_data, 224)
test_dataset = tf.data.Dataset.from_tensor_slices((val_data, val_label))

train_dataset = get_X_batch(train_data,train_label,1,224)

# label,images = next(iterbatch)
# print(type(label),type(images))

def val_data():
    for img,label in test_dataset:
        img = np.reshape(img, [1, 224, 224, 3])
        y_pred = model(img)
        sparse_categorical_accuracy.update_state(y_true=label, y_pred=y_pred)
        val_loss = tf.keras.losses.sparse_categorical_crossentropy(y_true=label, y_pred=y_pred)
        val_loss = tf.reduce_mean(val_loss)
    return sparse_categorical_accuracy.result(),val_loss
learning_rate = 0.001
model = ResNet50(weights=None,classes=7)
optimizer = tf.keras.optimizers.Adam(learning_rate=learning_rate)
sparse_categorical_accuracy = tf.keras.metrics.SparseCategoricalAccuracy()

step = 0
epoch_num = 20
batches = int(len(train_data)/batch_size)
for epoch in range(epoch_num):
    # for img,label in train_dataset:
    #     label,img = next(train_dataset)
    for i in range(batches):
        #将img和label转成tensor的格式
        label, img = next(train_dataset)
        #train函数应用到每个batch data上
        with tf.GradientTape() as tape:
            y_pred = model(img)
            loss = tf.keras.losses.sparse_categorical_crossentropy(y_true=label, y_pred=y_pred)
            loss = tf.reduce_mean(loss)
            sparse_categorical_accuracy.update_state(y_true=label,y_pred=y_pred)
            train_acc = sparse_categorical_accuracy.result()
        grads = tape.gradient(loss, model.variables)
        optimizer.apply_gradients(grads_and_vars=zip(grads, model.variables))
        if step % 10 == 0:
            val_acc,val_loss = val_data()
            print("step %d: loss:%f,train_accuracy:%f,val_loss:%f,val_accuracy:%f" % (step, loss.numpy(),train_acc,val_loss,val_acc))
        step += 1
        next(train_dataset)

########################################

你可能感兴趣的:(付费专栏,tensorflow,keras)