SVM 学习及应用

SVM 学习及应用

MATLAB 学习SVM和应用

%% 训练 SVM 分类器
clc
clear
close all;
% 导入数据,加载Fisher的虹膜数据集。去除萼片的长度和宽度以及所有观察到的刚毛鸢尾。
load fisheriris
inds = ~strcmp(species,'setosa');
X = meas(inds,3:4);
y = species(inds);

% 利用处理后的数据集训练支持向量机分类器。
SVMModel = fitcsvm(X,y)

% SVMModel是一个经过训练的ClassificationSVM分类器。显示SVMModel的属性,可以使用点表示法如下。
classOrder = SVMModel.ClassNames

% 绘制数据的散点图并圈出支持向量。支持向量是发生在估计类边界上或超出其估计类边界的观察值。
sv = SVMModel.SupportVectors;
figure
gscatter(X(:,1),X(:,2),y)
hold on
plot(sv(:,1),sv(:,2),'ko','MarkerSize',10)
legend('versicolor','virginica','Support Vector')
hold off
sv = SVMModel.SupportVectors; % Support vectors
beta = SVMModel.Beta; % Linear predictor coefficients
b = SVMModel.Bias; % Bias term
 %% SVM 分类器的决策边界和边际线 二元分类器
figure
hold on
gscatter(X(:,1),X(:,2),y)
plot(sv(:,1),sv(:,2),'ko','MarkerSize',10)

X1 = linspace(min(X(:,1)),max(X(:,1)),100);
X2 = -(beta(1)/beta(2)*X1)-b/beta(2);
plot(X1,X2,'-')

m = 1/sqrt(beta(1)^2 + beta(2)^2);  % Margin half-width
X1margin_low = X1+beta(1)*m^2;
X2margin_low = X2+beta(2)*m^2;
X1margin_high = X1-beta(1)*m^2;
X2margin_high = X2-beta(2)*m^2;
plot(X1margin_high,X2margin_high,'b--')
plot(X1margin_low,X2margin_low,'r--')
xlabel('X_1 (Sepal Length in cm)')
ylabel('X_2 (Sepal Width in cm)')
legend('setosa','virginica','Support Vector', ...
    'Boundary Line','Upper Margin','Lower Margin')
hold off
%% SVM 检测离群值
X = meas(:,1:2);
y = ones(size(X,1),1);
rng(1);
SVMModel = fitcsvm(X,y,'KernelScale','auto','Standardize',true,...
    'OutlierFraction',0.05);
svInd = SVMModel.IsSupportVector;
h = 0.02; % Mesh grid step size
[X1,X2] = meshgrid(min(X(:,1)):h:max(X(:,1)),...
    min(X(:,2)):h:max(X(:,2)));
[~,score] = predict(SVMModel,[X1(:),X2(:)]);
scoreGrid = reshape(score,size(X1,1),size(X2,2));

figure
plot(X(:,1),X(:,2),'k.')
hold on
plot(X(svInd,1),X(svInd,2),'ro','MarkerSize',10)
contour(X1,X2,scoreGrid)
colorbar;
title('{\bf Iris Outlier Detection via One-Class SVM}')
xlabel('Sepal Length (cm)')
ylabel('Sepal Width (cm)')
legend('Observation','Support Vector')
hold off

CVSVMModel = crossval(SVMModel);
[~,scorePred] = kfoldPredict(CVSVMModel);
outlierRate = mean(scorePred<0)

%% SVM分类器优化 自动优化超参数
load ionosphere
rng default
Mdl = fitcsvm(X,Y,'OptimizeHyperparameters','auto', ...
    'HyperparameterOptimizationOptions',struct('AcquisitionFunctionName', ...
    'expected-improvement-plus'))
%% 训练和交叉验证 SVM 分类器
load ionosphere
rng(1); % For reproducibility
SVMModel = fitcsvm(X,Y,'Standardize',true,'KernelFunction','RBF',...
    'KernelScale','auto');
CVSVMModel = crossval(SVMModel);
% CVSVMModel 是 ClassificationPartitionedModel 交叉验证的分类器。
%估计样本外的误分类率。
classLoss = kfoldLoss(CVSVMModel)

参考文献:Matlab官网

你可能感兴趣的:(支持向量机,学习,数学建模,分类)