基于粒子群优化算法的最小二乘支持向量机预测模型构建(MATLAB实现)

先去下载matlab的lssvm工具箱,网上可以搜到。这里不细说

先用PSO算法进行参数寻优,也就是gamma和sig2两个参数,LS-SVM的原理不细说。

pso.m

clc;
clear all;
n = 2;   % n 是自变量的个数
m = 1;    % m 是因变量的个数
%% 读取训练数据
for train_num=2:34;  %训练样本数
train_Data = data(1:train_num,:);

% 特征值归一化
[train_Input,minI,maxI] = premnmx(train_Data(:,1:n)');

% 构造输出矩阵
[train_Output,minO,maxO] = premnmx(train_Data(:,n+1:end)');

%% 读取测试数据
test_Data = data(train_num+1:end,:);
% 特征值归一化
test_Input = tramnmx(test_Data(:,1:n)',minI,maxI);
% 构造测试输出矩阵
test_Output = tramnmx(test_Data(:,n+1:end)',minO,maxO);

%PSO参数
c1 = 2;  %PSO局部搜索能力
c2 = 2; %PSO全局搜索能力
sizepop = 20; %种群规模
k = 100;%最大迭代次数
w = 0.9;%惯性因子

%确定优化参数的数目
length = 2;
param = rand(sizepop,length);
speed = rand(sizepop,length);
popmin = 0.01;
popmax = 100;
vmin = -1;
vmax = 1;
type='f';
for i=1:sizepop
    gamma = param(i,1);
    sig2 = param(i,2);
    [alpha,b] = trainlssvm({train_Input',train_Output',type,gamma,sig2,'RBF_kernel'});
    predict = simlssvm({train_Input',train_Output',type,gamma,sig2,'RBF_kernel'},{alpha,b},test_Input');
    score = sqrt(sum((predict-test_Output').^2)/size(test_Output',1));
    fitness(i,:) = score;
    lbest(i,:) = param(i,:);
end
[value,index] = min(fitness);
gbest = param(index,:);
fitnessbest = value;
%PSO优化参数
for T=1:k
    for i=1:sizepop
        speed(i,:) = w*speed(i,:)+c1*rand*(lbest(i,:)-param(i,:))+c2*rand*(gbest-param(i,:));
        speed(i,find(speed(i,:)>vmax)) = vmax;
        speed(i,find(speed(i,:)popmax)) = popmax;
        param(i,find(param(i,:) 0.9
                kk = ceil(2*pick);
                param(i,kk) = pick;
            end
            gamma = param(i,1);
            sig2 = param(i,2);
[alpha,b] = trainlssvm({train_Input',train_Output',type,gamma,sig2,'RBF_kernel'});
 predict = simlssvm({train_Input',train_Output',type,gamma,sig2,'RBF_kernel'},{alpha,b},test_Input');
fit = sqrt(sum((predict-test_Output').^2)/size(test_Output',1));
if fit

然后进行预测predict.m 

clc;
clear all;
n = 2;   % n 是自变量的个数
m = 1;    % m 是因变量的个数
%% 读取训练数据
train_num = 20;  %训练样本数
train_Data = data(1:train_num,:);
% 特征值归一化
[train_Input,minI,maxI] = premnmx(train_Data(:,1:n)');
% 构造输出矩阵
[train_Output,minO,maxO] = premnmx(train_Data(:,n+1:end)');
gam = 16.6576;
sig2 = 0.56056;
type = 'function estimation';
tic; %计时开始
% 用trainlssvm()函数对训练数据进行训练
[alpha,b] = trainlssvm({train_Input',train_Output',type,gam,sig2,'RBF_kernel'});
SVMtrain_Output = simlssvm({train_Input',train_Output',type,gam,sig2,'RBF_kernel','preprocess'},{alpha,b},train_Input');
toc; %计时结束
train_Output = postmnmx(train_Output',minO,maxO);
SVMtrain_Output = postmnmx(SVMtrain_Output',minO,maxO);
% 训练数据误差
train_err = train_Output - SVMtrain_Output';
n1 = length(SVMtrain_Output);
train_RMSE = sqrt(sum((train_err).^2)/n1);

%% 读取测试数据
test_Data = data(train_num+1:end,:);
% 特征值归一化
test_Input = tramnmx(test_Data(:,1:n)',minI,maxI)';
% 构造测试输出矩阵
test_Output = tramnmx(test_Data(:,n+1:end)',minO,maxO)';

SVMtest_Output = simlssvm({train_Input',train_Output,type,gam,sig2,'RBF_kernel','preprocess'},{alpha,b},test_Input);
test_Output = postmnmx(test_Output,minO,maxO);
SVMtest_Output = postmnmx(SVMtest_Output',minO,maxO);

% 测试数据误差
test_err = test_Output - SVMtest_Output';
n2 = length(SVMtest_Output');
test_RMSE = sqrt(sum((test_err).^2)/n2);

%% 预测结果可视化
figure(6);  % 绘制图1
%subplot(2,1,1);  % 图1包含2行1列个子图形,首先绘制子图1
plot(SVMtest_Output,':og');  % 用绿色的o绘制测试数据的预测输出值
hold on;
plot(test_Output','-*b');  % 用蓝色的*绘制测试数据的期望输出值
legend('预测输出','期望输出');  % 子图1的注释
title('最小二乘支持向量机预测天然气结果','fontsize',12)  %子图1的标题
%plotlssvm({train_Input',train_Output,type,gam,sig2,'RBF_kernel','preprocess'},{alpha,b});


 

你可能感兴趣的:(机器学习)