深度学习--大黄蜂预测

使用MATLABR2021b的深度学习工具箱对2021年美赛C题大黄蜂数据进行处理
图像识别
使用MobileNet-v2进行迁移学习
main1.m

%% 准备工作空间
clear;clc;
%% 导入数据
params = load("C:\Users\28605\Desktop\美赛预备\机器学习\大黄蜂识别\params_2022_01_12__19_12_54.mat");
digitDatasetPath = fullfile('C:\Users\28605\Desktop\美赛预备\机器学习\大黄蜂识别\dataset\');
imds = imageDatastore(digitDatasetPath,...
    'IncludeSubfolders',true,'LabelSource','foldernames');
%数据集图片个数
countEachLabel(imds);
numTrainFiles = 0.8;%   80%作为训练数据
[imdsTrain,imdsValidation] = splitEachLabel(imds,numTrainFiles,'randomize');
%查看图片大小
img = readimage(imds,1);
size(img);
%% 定义CNN网络结构
lgraph = layerGraph();
tempLayers = [
    imageInputLayer([254 254 3],"Name","imageinput")
    convolution2dLayer([3 3],32,"Name","Conv1","Padding","same","Stride",[2 2],"Bias",params.Conv1.Bias,"Weights",params.Conv1.Weights)
    batchNormalizationLayer("Name","bn_Conv1","Epsilon",0.001,"Offset",params.bn_Conv1.Offset,"Scale",params.bn_Conv1.Scale,"TrainedMean",params.bn_Conv1.TrainedMean,"TrainedVariance",params.bn_Conv1.TrainedVariance)
    clippedReluLayer(6,"Name","Conv1_relu")
    groupedConvolution2dLayer([3 3],1,32,"Name","expanded_conv_depthwise","Padding","same","Bias",params.expanded_conv_depthwise.Bias,"Weights",params.expanded_conv_depthwise.Weights)
    batchNormalizationLayer("Name","expanded_conv_depthwise_BN","Epsilon",0.001,"Offset",params.expanded_conv_depthwise_BN.Offset,"Scale",params.expanded_conv_depthwise_BN.Scale,"TrainedMean",params.expanded_conv_depthwise_BN.TrainedMean,"TrainedVariance",params.expanded_conv_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","expanded_conv_depthwise_relu")
    convolution2dLayer([1 1],16,"Name","expanded_conv_project","Padding","same","Bias",params.expanded_conv_project.Bias,"Weights",params.expanded_conv_project.Weights)
    batchNormalizationLayer("Name","expanded_conv_project_BN","Epsilon",0.001,"Offset",params.expanded_conv_project_BN.Offset,"Scale",params.expanded_conv_project_BN.Scale,"TrainedMean",params.expanded_conv_project_BN.TrainedMean,"TrainedVariance",params.expanded_conv_project_BN.TrainedVariance)
    convolution2dLayer([1 1],96,"Name","block_1_expand","Padding","same","Bias",params.block_1_expand.Bias,"Weights",params.block_1_expand.Weights)
    batchNormalizationLayer("Name","block_1_expand_BN","Epsilon",0.001,"Offset",params.block_1_expand_BN.Offset,"Scale",params.block_1_expand_BN.Scale,"TrainedMean",params.block_1_expand_BN.TrainedMean,"TrainedVariance",params.block_1_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_1_expand_relu")
    groupedConvolution2dLayer([3 3],1,96,"Name","block_1_depthwise","Padding","same","Stride",[2 2],"Bias",params.block_1_depthwise.Bias,"Weights",params.block_1_depthwise.Weights)
    batchNormalizationLayer("Name","block_1_depthwise_BN","Epsilon",0.001,"Offset",params.block_1_depthwise_BN.Offset,"Scale",params.block_1_depthwise_BN.Scale,"TrainedMean",params.block_1_depthwise_BN.TrainedMean,"TrainedVariance",params.block_1_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_1_depthwise_relu")
    convolution2dLayer([1 1],24,"Name","block_1_project","Padding","same","Bias",params.block_1_project.Bias,"Weights",params.block_1_project.Weights)
    batchNormalizationLayer("Name","block_1_project_BN","Epsilon",0.001,"Offset",params.block_1_project_BN.Offset,"Scale",params.block_1_project_BN.Scale,"TrainedMean",params.block_1_project_BN.TrainedMean,"TrainedVariance",params.block_1_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    convolution2dLayer([1 1],144,"Name","block_2_expand","Padding","same","Bias",params.block_2_expand.Bias,"Weights",params.block_2_expand.Weights)
    batchNormalizationLayer("Name","block_2_expand_BN","Epsilon",0.001,"Offset",params.block_2_expand_BN.Offset,"Scale",params.block_2_expand_BN.Scale,"TrainedMean",params.block_2_expand_BN.TrainedMean,"TrainedVariance",params.block_2_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_2_expand_relu")
    groupedConvolution2dLayer([3 3],1,144,"Name","block_2_depthwise","Padding","same","Bias",params.block_2_depthwise.Bias,"Weights",params.block_2_depthwise.Weights)
    batchNormalizationLayer("Name","block_2_depthwise_BN","Epsilon",0.001,"Offset",params.block_2_depthwise_BN.Offset,"Scale",params.block_2_depthwise_BN.Scale,"TrainedMean",params.block_2_depthwise_BN.TrainedMean,"TrainedVariance",params.block_2_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_2_depthwise_relu")
    convolution2dLayer([1 1],24,"Name","block_2_project","Padding","same","Bias",params.block_2_project.Bias,"Weights",params.block_2_project.Weights)
    batchNormalizationLayer("Name","block_2_project_BN","Epsilon",0.001,"Offset",params.block_2_project_BN.Offset,"Scale",params.block_2_project_BN.Scale,"TrainedMean",params.block_2_project_BN.TrainedMean,"TrainedVariance",params.block_2_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    additionLayer(2,"Name","block_2_add")
    convolution2dLayer([1 1],144,"Name","block_3_expand","Padding","same","Bias",params.block_3_expand.Bias,"Weights",params.block_3_expand.Weights)
    batchNormalizationLayer("Name","block_3_expand_BN","Epsilon",0.001,"Offset",params.block_3_expand_BN.Offset,"Scale",params.block_3_expand_BN.Scale,"TrainedMean",params.block_3_expand_BN.TrainedMean,"TrainedVariance",params.block_3_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_3_expand_relu")
    groupedConvolution2dLayer([3 3],1,144,"Name","block_3_depthwise","Padding","same","Stride",[2 2],"Bias",params.block_3_depthwise.Bias,"Weights",params.block_3_depthwise.Weights)
    batchNormalizationLayer("Name","block_3_depthwise_BN","Epsilon",0.001,"Offset",params.block_3_depthwise_BN.Offset,"Scale",params.block_3_depthwise_BN.Scale,"TrainedMean",params.block_3_depthwise_BN.TrainedMean,"TrainedVariance",params.block_3_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_3_depthwise_relu")
    convolution2dLayer([1 1],32,"Name","block_3_project","Padding","same","Bias",params.block_3_project.Bias,"Weights",params.block_3_project.Weights)
    batchNormalizationLayer("Name","block_3_project_BN","Epsilon",0.001,"Offset",params.block_3_project_BN.Offset,"Scale",params.block_3_project_BN.Scale,"TrainedMean",params.block_3_project_BN.TrainedMean,"TrainedVariance",params.block_3_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    convolution2dLayer([1 1],192,"Name","block_4_expand","Padding","same","Bias",params.block_4_expand.Bias,"Weights",params.block_4_expand.Weights)
    batchNormalizationLayer("Name","block_4_expand_BN","Epsilon",0.001,"Offset",params.block_4_expand_BN.Offset,"Scale",params.block_4_expand_BN.Scale,"TrainedMean",params.block_4_expand_BN.TrainedMean,"TrainedVariance",params.block_4_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_4_expand_relu")
    groupedConvolution2dLayer([3 3],1,192,"Name","block_4_depthwise","Padding","same","Bias",params.block_4_depthwise.Bias,"Weights",params.block_4_depthwise.Weights)
    batchNormalizationLayer("Name","block_4_depthwise_BN","Epsilon",0.001,"Offset",params.block_4_depthwise_BN.Offset,"Scale",params.block_4_depthwise_BN.Scale,"TrainedMean",params.block_4_depthwise_BN.TrainedMean,"TrainedVariance",params.block_4_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_4_depthwise_relu")
    convolution2dLayer([1 1],32,"Name","block_4_project","Padding","same","Bias",params.block_4_project.Bias,"Weights",params.block_4_project.Weights)
    batchNormalizationLayer("Name","block_4_project_BN","Epsilon",0.001,"Offset",params.block_4_project_BN.Offset,"Scale",params.block_4_project_BN.Scale,"TrainedMean",params.block_4_project_BN.TrainedMean,"TrainedVariance",params.block_4_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = additionLayer(2,"Name","block_4_add");
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    convolution2dLayer([1 1],192,"Name","block_5_expand","Padding","same","Bias",params.block_5_expand.Bias,"Weights",params.block_5_expand.Weights)
    batchNormalizationLayer("Name","block_5_expand_BN","Epsilon",0.001,"Offset",params.block_5_expand_BN.Offset,"Scale",params.block_5_expand_BN.Scale,"TrainedMean",params.block_5_expand_BN.TrainedMean,"TrainedVariance",params.block_5_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_5_expand_relu")
    groupedConvolution2dLayer([3 3],1,192,"Name","block_5_depthwise","Padding","same","Bias",params.block_5_depthwise.Bias,"Weights",params.block_5_depthwise.Weights)
    batchNormalizationLayer("Name","block_5_depthwise_BN","Epsilon",0.001,"Offset",params.block_5_depthwise_BN.Offset,"Scale",params.block_5_depthwise_BN.Scale,"TrainedMean",params.block_5_depthwise_BN.TrainedMean,"TrainedVariance",params.block_5_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_5_depthwise_relu")
    convolution2dLayer([1 1],32,"Name","block_5_project","Padding","same","Bias",params.block_5_project.Bias,"Weights",params.block_5_project.Weights)
    batchNormalizationLayer("Name","block_5_project_BN","Epsilon",0.001,"Offset",params.block_5_project_BN.Offset,"Scale",params.block_5_project_BN.Scale,"TrainedMean",params.block_5_project_BN.TrainedMean,"TrainedVariance",params.block_5_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    additionLayer(2,"Name","block_5_add")
    convolution2dLayer([1 1],192,"Name","block_6_expand","Padding","same","Bias",params.block_6_expand.Bias,"Weights",params.block_6_expand.Weights)
    batchNormalizationLayer("Name","block_6_expand_BN","Epsilon",0.001,"Offset",params.block_6_expand_BN.Offset,"Scale",params.block_6_expand_BN.Scale,"TrainedMean",params.block_6_expand_BN.TrainedMean,"TrainedVariance",params.block_6_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_6_expand_relu")
    groupedConvolution2dLayer([3 3],1,192,"Name","block_6_depthwise","Padding","same","Stride",[2 2],"Bias",params.block_6_depthwise.Bias,"Weights",params.block_6_depthwise.Weights)
    batchNormalizationLayer("Name","block_6_depthwise_BN","Epsilon",0.001,"Offset",params.block_6_depthwise_BN.Offset,"Scale",params.block_6_depthwise_BN.Scale,"TrainedMean",params.block_6_depthwise_BN.TrainedMean,"TrainedVariance",params.block_6_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_6_depthwise_relu")
    convolution2dLayer([1 1],64,"Name","block_6_project","Padding","same","Bias",params.block_6_project.Bias,"Weights",params.block_6_project.Weights)
    batchNormalizationLayer("Name","block_6_project_BN","Epsilon",0.001,"Offset",params.block_6_project_BN.Offset,"Scale",params.block_6_project_BN.Scale,"TrainedMean",params.block_6_project_BN.TrainedMean,"TrainedVariance",params.block_6_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    convolution2dLayer([1 1],384,"Name","block_7_expand","Padding","same","Bias",params.block_7_expand.Bias,"Weights",params.block_7_expand.Weights)
    batchNormalizationLayer("Name","block_7_expand_BN","Epsilon",0.001,"Offset",params.block_7_expand_BN.Offset,"Scale",params.block_7_expand_BN.Scale,"TrainedMean",params.block_7_expand_BN.TrainedMean,"TrainedVariance",params.block_7_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_7_expand_relu")
    groupedConvolution2dLayer([3 3],1,384,"Name","block_7_depthwise","Padding","same","Bias",params.block_7_depthwise.Bias,"Weights",params.block_7_depthwise.Weights)
    batchNormalizationLayer("Name","block_7_depthwise_BN","Epsilon",0.001,"Offset",params.block_7_depthwise_BN.Offset,"Scale",params.block_7_depthwise_BN.Scale,"TrainedMean",params.block_7_depthwise_BN.TrainedMean,"TrainedVariance",params.block_7_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_7_depthwise_relu")
    convolution2dLayer([1 1],64,"Name","block_7_project","Padding","same","Bias",params.block_7_project.Bias,"Weights",params.block_7_project.Weights)
    batchNormalizationLayer("Name","block_7_project_BN","Epsilon",0.001,"Offset",params.block_7_project_BN.Offset,"Scale",params.block_7_project_BN.Scale,"TrainedMean",params.block_7_project_BN.TrainedMean,"TrainedVariance",params.block_7_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = additionLayer(2,"Name","block_7_add");
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    convolution2dLayer([1 1],384,"Name","block_8_expand","Padding","same","Bias",params.block_8_expand.Bias,"Weights",params.block_8_expand.Weights)
    batchNormalizationLayer("Name","block_8_expand_BN","Epsilon",0.001,"Offset",params.block_8_expand_BN.Offset,"Scale",params.block_8_expand_BN.Scale,"TrainedMean",params.block_8_expand_BN.TrainedMean,"TrainedVariance",params.block_8_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_8_expand_relu")
    groupedConvolution2dLayer([3 3],1,384,"Name","block_8_depthwise","Padding","same","Bias",params.block_8_depthwise.Bias,"Weights",params.block_8_depthwise.Weights)
    batchNormalizationLayer("Name","block_8_depthwise_BN","Epsilon",0.001,"Offset",params.block_8_depthwise_BN.Offset,"Scale",params.block_8_depthwise_BN.Scale,"TrainedMean",params.block_8_depthwise_BN.TrainedMean,"TrainedVariance",params.block_8_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_8_depthwise_relu")
    convolution2dLayer([1 1],64,"Name","block_8_project","Padding","same","Bias",params.block_8_project.Bias,"Weights",params.block_8_project.Weights)
    batchNormalizationLayer("Name","block_8_project_BN","Epsilon",0.001,"Offset",params.block_8_project_BN.Offset,"Scale",params.block_8_project_BN.Scale,"TrainedMean",params.block_8_project_BN.TrainedMean,"TrainedVariance",params.block_8_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = additionLayer(2,"Name","block_8_add");
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    convolution2dLayer([1 1],384,"Name","block_9_expand","Padding","same","Bias",params.block_9_expand.Bias,"Weights",params.block_9_expand.Weights)
    batchNormalizationLayer("Name","block_9_expand_BN","Epsilon",0.001,"Offset",params.block_9_expand_BN.Offset,"Scale",params.block_9_expand_BN.Scale,"TrainedMean",params.block_9_expand_BN.TrainedMean,"TrainedVariance",params.block_9_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_9_expand_relu")
    groupedConvolution2dLayer([3 3],1,384,"Name","block_9_depthwise","Padding","same","Bias",params.block_9_depthwise.Bias,"Weights",params.block_9_depthwise.Weights)
    batchNormalizationLayer("Name","block_9_depthwise_BN","Epsilon",0.001,"Offset",params.block_9_depthwise_BN.Offset,"Scale",params.block_9_depthwise_BN.Scale,"TrainedMean",params.block_9_depthwise_BN.TrainedMean,"TrainedVariance",params.block_9_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_9_depthwise_relu")
    convolution2dLayer([1 1],64,"Name","block_9_project","Padding","same","Bias",params.block_9_project.Bias,"Weights",params.block_9_project.Weights)
    batchNormalizationLayer("Name","block_9_project_BN","Epsilon",0.001,"Offset",params.block_9_project_BN.Offset,"Scale",params.block_9_project_BN.Scale,"TrainedMean",params.block_9_project_BN.TrainedMean,"TrainedVariance",params.block_9_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    additionLayer(2,"Name","block_9_add")
    convolution2dLayer([1 1],384,"Name","block_10_expand","Padding","same","Bias",params.block_10_expand.Bias,"Weights",params.block_10_expand.Weights)
    batchNormalizationLayer("Name","block_10_expand_BN","Epsilon",0.001,"Offset",params.block_10_expand_BN.Offset,"Scale",params.block_10_expand_BN.Scale,"TrainedMean",params.block_10_expand_BN.TrainedMean,"TrainedVariance",params.block_10_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_10_expand_relu")
    groupedConvolution2dLayer([3 3],1,384,"Name","block_10_depthwise","Padding","same","Bias",params.block_10_depthwise.Bias,"Weights",params.block_10_depthwise.Weights)
    batchNormalizationLayer("Name","block_10_depthwise_BN","Epsilon",0.001,"Offset",params.block_10_depthwise_BN.Offset,"Scale",params.block_10_depthwise_BN.Scale,"TrainedMean",params.block_10_depthwise_BN.TrainedMean,"TrainedVariance",params.block_10_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_10_depthwise_relu")
    convolution2dLayer([1 1],96,"Name","block_10_project","Padding","same","Bias",params.block_10_project.Bias,"Weights",params.block_10_project.Weights)
    batchNormalizationLayer("Name","block_10_project_BN","Epsilon",0.001,"Offset",params.block_10_project_BN.Offset,"Scale",params.block_10_project_BN.Scale,"TrainedMean",params.block_10_project_BN.TrainedMean,"TrainedVariance",params.block_10_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    convolution2dLayer([1 1],576,"Name","block_11_expand","Padding","same","Bias",params.block_11_expand.Bias,"Weights",params.block_11_expand.Weights)
    batchNormalizationLayer("Name","block_11_expand_BN","Epsilon",0.001,"Offset",params.block_11_expand_BN.Offset,"Scale",params.block_11_expand_BN.Scale,"TrainedMean",params.block_11_expand_BN.TrainedMean,"TrainedVariance",params.block_11_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_11_expand_relu")
    groupedConvolution2dLayer([3 3],1,576,"Name","block_11_depthwise","Padding","same","Bias",params.block_11_depthwise.Bias,"Weights",params.block_11_depthwise.Weights)
    batchNormalizationLayer("Name","block_11_depthwise_BN","Epsilon",0.001,"Offset",params.block_11_depthwise_BN.Offset,"Scale",params.block_11_depthwise_BN.Scale,"TrainedMean",params.block_11_depthwise_BN.TrainedMean,"TrainedVariance",params.block_11_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_11_depthwise_relu")
    convolution2dLayer([1 1],96,"Name","block_11_project","Padding","same","Bias",params.block_11_project.Bias,"Weights",params.block_11_project.Weights)
    batchNormalizationLayer("Name","block_11_project_BN","Epsilon",0.001,"Offset",params.block_11_project_BN.Offset,"Scale",params.block_11_project_BN.Scale,"TrainedMean",params.block_11_project_BN.TrainedMean,"TrainedVariance",params.block_11_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = additionLayer(2,"Name","block_11_add");
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    convolution2dLayer([1 1],576,"Name","block_12_expand","Padding","same","Bias",params.block_12_expand.Bias,"Weights",params.block_12_expand.Weights)
    batchNormalizationLayer("Name","block_12_expand_BN","Epsilon",0.001,"Offset",params.block_12_expand_BN.Offset,"Scale",params.block_12_expand_BN.Scale,"TrainedMean",params.block_12_expand_BN.TrainedMean,"TrainedVariance",params.block_12_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_12_expand_relu")
    groupedConvolution2dLayer([3 3],1,576,"Name","block_12_depthwise","Padding","same","Bias",params.block_12_depthwise.Bias,"Weights",params.block_12_depthwise.Weights)
    batchNormalizationLayer("Name","block_12_depthwise_BN","Epsilon",0.001,"Offset",params.block_12_depthwise_BN.Offset,"Scale",params.block_12_depthwise_BN.Scale,"TrainedMean",params.block_12_depthwise_BN.TrainedMean,"TrainedVariance",params.block_12_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_12_depthwise_relu")
    convolution2dLayer([1 1],96,"Name","block_12_project","Padding","same","Bias",params.block_12_project.Bias,"Weights",params.block_12_project.Weights)
    batchNormalizationLayer("Name","block_12_project_BN","Epsilon",0.001,"Offset",params.block_12_project_BN.Offset,"Scale",params.block_12_project_BN.Scale,"TrainedMean",params.block_12_project_BN.TrainedMean,"TrainedVariance",params.block_12_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    additionLayer(2,"Name","block_12_add")
    convolution2dLayer([1 1],576,"Name","block_13_expand","Padding","same","Bias",params.block_13_expand.Bias,"Weights",params.block_13_expand.Weights)
    batchNormalizationLayer("Name","block_13_expand_BN","Epsilon",0.001,"Offset",params.block_13_expand_BN.Offset,"Scale",params.block_13_expand_BN.Scale,"TrainedMean",params.block_13_expand_BN.TrainedMean,"TrainedVariance",params.block_13_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_13_expand_relu")
    groupedConvolution2dLayer([3 3],1,576,"Name","block_13_depthwise","Padding","same","Stride",[2 2],"Bias",params.block_13_depthwise.Bias,"Weights",params.block_13_depthwise.Weights)
    batchNormalizationLayer("Name","block_13_depthwise_BN","Epsilon",0.001,"Offset",params.block_13_depthwise_BN.Offset,"Scale",params.block_13_depthwise_BN.Scale,"TrainedMean",params.block_13_depthwise_BN.TrainedMean,"TrainedVariance",params.block_13_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_13_depthwise_relu")
    convolution2dLayer([1 1],160,"Name","block_13_project","Padding","same","Bias",params.block_13_project.Bias,"Weights",params.block_13_project.Weights)
    batchNormalizationLayer("Name","block_13_project_BN","Epsilon",0.001,"Offset",params.block_13_project_BN.Offset,"Scale",params.block_13_project_BN.Scale,"TrainedMean",params.block_13_project_BN.TrainedMean,"TrainedVariance",params.block_13_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    convolution2dLayer([1 1],960,"Name","block_14_expand","Padding","same","Bias",params.block_14_expand.Bias,"Weights",params.block_14_expand.Weights)
    batchNormalizationLayer("Name","block_14_expand_BN","Epsilon",0.001,"Offset",params.block_14_expand_BN.Offset,"Scale",params.block_14_expand_BN.Scale,"TrainedMean",params.block_14_expand_BN.TrainedMean,"TrainedVariance",params.block_14_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_14_expand_relu")
    groupedConvolution2dLayer([3 3],1,960,"Name","block_14_depthwise","Padding","same","Bias",params.block_14_depthwise.Bias,"Weights",params.block_14_depthwise.Weights)
    batchNormalizationLayer("Name","block_14_depthwise_BN","Epsilon",0.001,"Offset",params.block_14_depthwise_BN.Offset,"Scale",params.block_14_depthwise_BN.Scale,"TrainedMean",params.block_14_depthwise_BN.TrainedMean,"TrainedVariance",params.block_14_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_14_depthwise_relu")
    convolution2dLayer([1 1],160,"Name","block_14_project","Padding","same","Bias",params.block_14_project.Bias,"Weights",params.block_14_project.Weights)
    batchNormalizationLayer("Name","block_14_project_BN","Epsilon",0.001,"Offset",params.block_14_project_BN.Offset,"Scale",params.block_14_project_BN.Scale,"TrainedMean",params.block_14_project_BN.TrainedMean,"TrainedVariance",params.block_14_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = additionLayer(2,"Name","block_14_add");
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    convolution2dLayer([1 1],960,"Name","block_15_expand","Padding","same","Bias",params.block_15_expand.Bias,"Weights",params.block_15_expand.Weights)
    batchNormalizationLayer("Name","block_15_expand_BN","Epsilon",0.001,"Offset",params.block_15_expand_BN.Offset,"Scale",params.block_15_expand_BN.Scale,"TrainedMean",params.block_15_expand_BN.TrainedMean,"TrainedVariance",params.block_15_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_15_expand_relu")
    groupedConvolution2dLayer([3 3],1,960,"Name","block_15_depthwise","Padding","same","Bias",params.block_15_depthwise.Bias,"Weights",params.block_15_depthwise.Weights)
    batchNormalizationLayer("Name","block_15_depthwise_BN","Epsilon",0.001,"Offset",params.block_15_depthwise_BN.Offset,"Scale",params.block_15_depthwise_BN.Scale,"TrainedMean",params.block_15_depthwise_BN.TrainedMean,"TrainedVariance",params.block_15_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_15_depthwise_relu")
    convolution2dLayer([1 1],160,"Name","block_15_project","Padding","same","Bias",params.block_15_project.Bias,"Weights",params.block_15_project.Weights)
    batchNormalizationLayer("Name","block_15_project_BN","Epsilon",0.001,"Offset",params.block_15_project_BN.Offset,"Scale",params.block_15_project_BN.Scale,"TrainedMean",params.block_15_project_BN.TrainedMean,"TrainedVariance",params.block_15_project_BN.TrainedVariance)];
lgraph = addLayers(lgraph,tempLayers);

tempLayers = [
    additionLayer(2,"Name","block_15_add")
    convolution2dLayer([1 1],960,"Name","block_16_expand","Padding","same","Bias",params.block_16_expand.Bias,"Weights",params.block_16_expand.Weights)
    batchNormalizationLayer("Name","block_16_expand_BN","Epsilon",0.001,"Offset",params.block_16_expand_BN.Offset,"Scale",params.block_16_expand_BN.Scale,"TrainedMean",params.block_16_expand_BN.TrainedMean,"TrainedVariance",params.block_16_expand_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_16_expand_relu")
    groupedConvolution2dLayer([3 3],1,960,"Name","block_16_depthwise","Padding","same","Bias",params.block_16_depthwise.Bias,"Weights",params.block_16_depthwise.Weights)
    batchNormalizationLayer("Name","block_16_depthwise_BN","Epsilon",0.001,"Offset",params.block_16_depthwise_BN.Offset,"Scale",params.block_16_depthwise_BN.Scale,"TrainedMean",params.block_16_depthwise_BN.TrainedMean,"TrainedVariance",params.block_16_depthwise_BN.TrainedVariance)
    clippedReluLayer(6,"Name","block_16_depthwise_relu")
    convolution2dLayer([1 1],320,"Name","block_16_project","Padding","same","Bias",params.block_16_project.Bias,"Weights",params.block_16_project.Weights)
    batchNormalizationLayer("Name","block_16_project_BN","Epsilon",0.001,"Offset",params.block_16_project_BN.Offset,"Scale",params.block_16_project_BN.Scale,"TrainedMean",params.block_16_project_BN.TrainedMean,"TrainedVariance",params.block_16_project_BN.TrainedVariance)
    convolution2dLayer([1 1],1280,"Name","Conv_1","Bias",params.Conv_1.Bias,"Weights",params.Conv_1.Weights)
    batchNormalizationLayer("Name","Conv_1_bn","Epsilon",0.001,"Offset",params.Conv_1_bn.Offset,"Scale",params.Conv_1_bn.Scale,"TrainedMean",params.Conv_1_bn.TrainedMean,"TrainedVariance",params.Conv_1_bn.TrainedVariance)
    clippedReluLayer(6,"Name","out_relu")
    globalAveragePooling2dLayer("Name","global_average_pooling2d_1")
    fullyConnectedLayer(2,"Name","Logits")
    softmaxLayer("Name","softmax")
    classificationLayer("Name","classoutput")];
lgraph = addLayers(lgraph,tempLayers);

% 清理辅助变量
clear tempLayers;

lgraph = connectLayers(lgraph,"block_1_project_BN","block_2_expand");
lgraph = connectLayers(lgraph,"block_1_project_BN","block_2_add/in2");
lgraph = connectLayers(lgraph,"block_2_project_BN","block_2_add/in1");
lgraph = connectLayers(lgraph,"block_3_project_BN","block_4_expand");
lgraph = connectLayers(lgraph,"block_3_project_BN","block_4_add/in2");
lgraph = connectLayers(lgraph,"block_4_project_BN","block_4_add/in1");
lgraph = connectLayers(lgraph,"block_4_add","block_5_expand");
lgraph = connectLayers(lgraph,"block_4_add","block_5_add/in2");
lgraph = connectLayers(lgraph,"block_5_project_BN","block_5_add/in1");
lgraph = connectLayers(lgraph,"block_6_project_BN","block_7_expand");
lgraph = connectLayers(lgraph,"block_6_project_BN","block_7_add/in2");
lgraph = connectLayers(lgraph,"block_7_project_BN","block_7_add/in1");
lgraph = connectLayers(lgraph,"block_7_add","block_8_expand");
lgraph = connectLayers(lgraph,"block_7_add","block_8_add/in2");
lgraph = connectLayers(lgraph,"block_8_project_BN","block_8_add/in1");
lgraph = connectLayers(lgraph,"block_8_add","block_9_expand");
lgraph = connectLayers(lgraph,"block_8_add","block_9_add/in2");
lgraph = connectLayers(lgraph,"block_9_project_BN","block_9_add/in1");
lgraph = connectLayers(lgraph,"block_10_project_BN","block_11_expand");
lgraph = connectLayers(lgraph,"block_10_project_BN","block_11_add/in2");
lgraph = connectLayers(lgraph,"block_11_project_BN","block_11_add/in1");
lgraph = connectLayers(lgraph,"block_11_add","block_12_expand");
lgraph = connectLayers(lgraph,"block_11_add","block_12_add/in2");
lgraph = connectLayers(lgraph,"block_12_project_BN","block_12_add/in1");
lgraph = connectLayers(lgraph,"block_13_project_BN","block_14_expand");
lgraph = connectLayers(lgraph,"block_13_project_BN","block_14_add/in2");
lgraph = connectLayers(lgraph,"block_14_project_BN","block_14_add/in1");
lgraph = connectLayers(lgraph,"block_14_add","block_15_expand");
lgraph = connectLayers(lgraph,"block_14_add","block_15_add/in2");
lgraph = connectLayers(lgraph,"block_15_project_BN","block_15_add/in1");
options = trainingOptions('sgdm',...
    'maxEpochs',50,...
    'ValidationData',imdsValidation,...
    'ValidationFrequency',30,...
    'Verbose',false,...
    'InitialLearnRate',1e-3,...
    'plots','training-progress',...
    'Shuffle','every-epoch',...
    'ExecutionEnvironment','cpu');
%% 
net = trainNetwork(imdsTrain,lgraph,options);
save('CNNtestmini2','net')

注:params和Igraph为深度学习工具箱自动生成,需手动添加至脚本

预测用代码
main2.m

clear;clc;
dbstop if error
load CNNtestmini2.mat
net.Layers;
%read the image to classify
[file,path] = uigetfile('*');
image = fullfile(path,file);
I = imresize(imread(image),[254,254]);
%adjust size of image
% sz = net.Layers(1).InputSize;
% I = I(1:sz(1),1:sz(2),1:sz(3));
%classify
label = classify(net,I);

文本识别
使用LSTM网络建立
main3.m

clear;clc;
dbstop if error
%% 导入数据集
filename = "data.xlsx";
data = readtable(filename,"TextType","string");
data = table2cell(data);
for i = size(data,1):-1:1
    if ismissing(data{i,1})
        data(i,:) = [];
    end
end
data = data([1:10,12:1000],:);
data = cell2table(data,"VariableNames",{'Description','Category'});
head(data);
data.Category = categorical(data.Category);
figure(1)
histogram(data.Category);
xlabel('Class');
ylabel('Frequency');
title('Class Distribution');
cvp = cvpartition(data.Category,'HoldOut',0.2);
dataTrain = data(training(cvp),:);
dataValidation = data(test(cvp),:);
textDataTrain = dataTrain.Description;
textDataValidation = dataValidation.Description;
YTrain = dataTrain.Category;
YValidation = dataValidation.Category;

figure(2)
wordcloud(textDataTrain);
title("Training Data");

%文本预处理
% documentsTrain = preprocessText(textDataTrain);
documentsTrain = tokenizedDocument(textDataTrain);
documentsTrain = lower(documentsTrain);
documentsTrain = erasePunctuation(documentsTrain);
% documentsValidation = preprocessText(textDataValidation);
documentsValidation = tokenizedDocument(textDataValidation);
documentsValidation = lower(documentsValidation);
documentsValidation = erasePunctuation(documentsValidation);
documentsTrain(1:5);
%转变文件顺序
enc = wordEncoding(documentsTrain);
documentLengths = doclength(documentsTrain);
figure(2)
histogram(documentLengths)
title("Document Lengths");
xlabel("Length");
ylabel("Number of Documents");
sequenceLength = 60;
XTrain = doc2sequence(enc,documentsTrain,'Length',sequenceLength);
XTrain(1:5);
XValidation = doc2sequence(enc,documentsValidation,'Length',sequenceLength);

%创建LSTM模型
inputSize = 1;
embeddingDimension = 50;
numHiddenUnits = 80;
numWords = enc.NumWords;
numClasses = numel(categories(YTrain));
layers = [...
    sequenceInputLayer(inputSize)
    wordEmbeddingLayer(embeddingDimension,numWords)
    lstmLayer(numHiddenUnits,'OutputMode','last')
    fullyConnectedLayer(numClasses)
    softmaxLayer
    classificationLayer];
% layers = [
%     sequenceInputLayer(1,"Name","input")
%     lstmLayer(128,"Name","lstm","OutputMode","last")
%     dropoutLayer(0.5,"Name","dropout")
%     fullyConnectedLayer(2,"Name","fc")
%     softmaxLayer("Name","softmax")
%     classificationLayer("Name","classification")];
%设置训练参数
options = trainingOptions("adam",...
    "MiniBatchSize",16,...
    'Plots','training-progress',...
    'GradientThreshold',2,...
    'MaxEpochs',60,...
    'ValidationData',{XValidation,YValidation},...
    'Shuffle','every-epoch',...
    'Verbose',false);
% YTrain = cellstr(YTrain);
net = trainNetwork(XTrain,YTrain,layers,options);
save('LSTMtestmini2','net')

预测用代码
main4.m

clear;clc;
dbstop if error
load LSTMtestmini2.mat
filename = "data.xlsx";
data = readtable(filename,"TextType","string");
data = table2cell(data);
for i = size(data,1):-1:1
    if ismissing(data{i,1})
        data(i,:) = [];
    end
end
reportsNew = data(11,1);
%文本预处理
documentsNew = tokenizedDocument(reportsNew);
documentsTrain = lower(documentsNew);
documentsTrain = erasePunctuation(documentsTrain);
enc = wordEncoding(documentsTrain);
documentLengths = doclength(documentsTrain);
sequenceLength = 40;
XNew = doc2sequence(enc,documentsNew,'Length',sequenceLength);
[labelsNew,scores] = classify(net,XNew);


你可能感兴趣的:(MATLAB,深度学习,人工智能,cnn)