初学RBF,下面的程序训练没问题,但是预测结果就差很远了,希望专家解答,谢谢~~蓝色字体是存在一些疑问,希望有人能回答一下
代码如下:
%RBF法建模
%标准化的建模数据集
clear all
clear
clc
data=[0.26667 0.47647 0.35 0.30741 0.39565 0.9;
0.2 0.75882 0.35 0.27778 0.53479 0.46001;
0.2 0.52353 0.4 0.33704 0.32609 0.48001;
0.66667 0.52353 0.29999 0.21852 0.27391 0.50001;
0.26667 0.75882 0.6 0.33704 0.32609 0.44001;
0.16667 0.57059 0.29999 0.27778 0.3087 0.46001;
0.2 0.71176 0.4 0.36667 0.44783 0.48001;
0.23333 0.71176 0.5 0.30741 0.15218 0.34001;
0.16667 0.42941 0.5 0.24815 0.5 0.48001;
0.13333 0.57059 0.5 0.3963 0.41305 0.48001;
0.16667 0.75882 0.5 0.21852 0.3087 0.52001;
0.16667 0.71176 0.4 0.27778 0.48261 0.44001;
0.23333 0.75882 0.5 0.33704 0.3087 0.48001;
0.13333 0.75882 0.5 0.33704 0.23913 0.42001;
0.16667 0.75882 0.2 0.18889 0.13478 0.1;
0.13333 0.71176 0.35 0.27778 0.32609 0.14;
0.2 0.24119 0.35 0.24815 0.3087 0.88001;
0.70001 0.47647 0.25 0.33704 0.20435 0.38001;
0.4 0.52353 0.2 0.15927 0.23913 0.34001;
0.26667 0.47647 0.35 0.12963 0.16957 0.36;
0.16667 0.6647 0.35 0.27778 0.27391 0.40001;
0.3 0.61764 0.55 0.24815 0.25652 0.62001;
0.13333 0.75882 0.6 0.30741 0.20435 0.62001;
0.3 0.71176 0.55 0.18889 0.20435 0.70001;
0.13333 0.57059 0.5 0.30741 0.29131 0.72001;
0.16667 0.80588 0.5 0.21852 0.23913 0.60001;
0.1 0.61764 0.6 0.21852 0.16957 0.62001;
0.23333 0.61764 0.5 0.3963 0.3087 0.70001;
0.2 0.80588 0.45 0.18889 0.1 0.48001;
0.23333 0.6647 0.45 0.1 0.11739 0.54001;
0.2 0.52353 0.29999 0.30741 0.16957 0.50001;
0.16667 0.71176 0.6 0.24815 0.23913 0.70001;
0.1 0.85293 0.6 0.12963 0.16957 0.52001;
0.3 0.61764 0.5 0.21852 0.13478 0.64001;
0.13333 0.61764 0.4 0.27778 0.23913 0.62001;
0.16667 0.80588 0.6 0.3963 0.23913 0.36;
0.23333 0.75882 0.75 0.30741 0.1 0.66001;
0.13333 0.9 0.6 0.27778 0.20435 0.44001;
0.16667 0.61764 0.29999 0.27778 0.32609 0.60001;
0.16667 0.71176 0.6 0.27778 0.27391 0.52001;
0.1 0.85293 0.45 0.1 0.23913 0.54001;
0.1 0.71176 0.69999 0.27778 0.43044 0.76001;
0.1 0.38236 0.6 0.30741 0.32609 0.64001;
0.1 0.75882 0.6 0.33704 0.25652 0.46001;
0.73333 0.71176 0.25 0.45556 0.5 0.50001;
0.56667 0.61764 0.35 0.33704 0.34348 0.74;
0.13333 0.6647 0.75 0.75186 0.69131 0.56001;
0.83334 0.61764 0.25 0.60371 0.44783 0.38001;
0.9 0.33529 0.2 0.3963 0.3087 0.54001;
0.4 0.61764 0.5 0.3963 0.44783 0.48001;
0.1 0.57059 0.4 0.57408 0.7261 0.88001;
0.16667 0.61764 0.35 0.27778 0.34348 0.38001;
0.1 0.47647 0.35 0.75186 0.83045 0.62001;
0.2 0.47647 0.35 0.81111 0.83045 0.70001;
0.1 0.33529 0.6 0.9 0.9 0.50001;
0.13333 0.24119 0.4 0.81111 0.86522 0.58356;
0.4 0.80588 0.6 0.48519 0.55218 0.42001;
0.5 0.42941 0.45 0.57408 0.37826 0.56001;
0.16667 0.28824 0.25 0.75186 0.79567 0.54001;
0.23333 0.33529 0.4 0.84075 0.65653 0.68001;
0.13333 0.57059 0.6 0.57408 0.67392 0.46001;
0.5 0.57059 0.5 0.51482 0.55218 0.44001;
0.1 0.6647 0.69999 0.57408 0.65653 0.88001;
0.3 0.75882 0.75 0.21852 0.37826 0.70001;
0.23333 0.57059 0.6 0.3963 0.41305 0.48001;
0.59999 0.80588 0.45 0.24815 0.25652 0.42001;
0.70001 0.28824 0.65 0.63334 0.58696 0.60001;
0.4 0.61764 0.4 0.33704 0.34348 0.34001;
0.46666 0.1 0.5 0.63334 0.65653 0.54001;
0.23333 0.33529 0.25 0.84075 0.7261 0.50001;
0.2 0.47647 0.1 0.27778 0.3087 0.42001;
0.36667 0.52353 0.5 0.51482 0.83045 0.62001;
0.4 0.47647 0.45 0.33704 0.32609 0.50001;
0.33333 0.33529 0.4 0.63334 0.7261 0.46001;
0.23333 0.24119 0.55 0.63334 0.7261 0.53778;
0.13333 0.24119 0.45 0.33704 0.3087 0.76001;
0.33333 0.47647 0.45 0.42593 0.41305 0.70001;
0.5 0.1 0.35 0.3963 0.48261 0.70001;
0.46666 0.52353 0.4 0.63334 0.86522 0.52001;
0.26667 0.33529 0.25 0.69259 0.83045 0.68001;
0.26667 0.71176 0.6 0.42593 0.55218 0.34001;
0.4 0.42941 0.5 0.45556 0.43044 0.52001;
0.23333 0.47647 0.6 0.54445 0.62175 0.52001;
0.36667 0.47647 0.4 0.69259 0.69131 0.52001;
0.26667 0.61764 0.29999 0.63334 0.81306 0.44001;
0.1 0.33529 0.65 0.3963 0.23913 0.44001;
0.13333 0.57059 0.6 0.51482 0.55218 0.38001;
0.2 0.33529 0.55 0.45556 0.55218 0.52001;
0.33333 0.24119 0.29999 0.57408 0.83045 0.64001;
0.16667 0.42941 0.5 0.45556 0.76088 0.58001;
0.13333 0.71176 0.65 0.45556 0.37826 0.22001;
0.1 0.71176 0.6 0.63334 0.69131 0.28001;
0.2 0.52353 0.9 0.36667 0.48261 0.36446;
0.16667 0.71176 0.8 0.36667 0.32609 0.42001;
0.13333 0.42941 0.55 0.51482 0.48261 0.58001;
0.4 0.47647 0.5 0.30741 0.37826 0.48001;
0.8 0.52353 0.4 0.30741 0.34348 0.48001;
0.1 0.6647 0.69999 0.51482 0.5 0.38001;
0.16667 0.6647 0.55 0.45556 0.32609 0.34001;
0.23333 0.61764 0.65 0.51482 0.41305 0.2
0.3 0.71176 0.5 0.33704 0.5174 0.70001;
0.13333 0.57059 0.5 0.33704 0.44783 0.60001;
0.3 0.80588 0.69999 0.54445 0.5174 0.50001;
0.13333 0.75882 0.65 0.3963 0.37826 0.28001;
0.16667 0.71176 0.65 0.57408 0.5 0.70001;
0.16667 0.75882 0.6 0.45556 0.62175 0.44001;
0.26667 0.61764 0.65 0.3963 0.34348 0.74;
0.36667 0.57059 0.5 0.30741 0.39565 0.62001;
0.16667 0.57059 0.65 0.3963 0.29131 0.62001;
0.2 0.52353 0.5 0.48519 0.39565 0.44001;
0.4 0.52353 0.45 0.33704 0.34348 0.56001;
0.13333 0.52353 0.5 0.33704 0.29131 0.52001;
0.26667 0.42941 0.5 0.75186 0.65653 0.76001;
0.13333 0.24119 0.5 0.54445 0.58696 0.64001;
0.13333 0.14707 0.1 0.51482 0.5174 0.58001;
0.2 0.24119 0.29999 0.60371 0.62175 0.74;
0.26667 0.42941 0.25 0.57408 0.65653 0.60001;
0.56667 0.19412 0.4 0.78148 0.69131 0.70001;
0.4 0.57059 0.25 0.60371 0.58696 0.50001;
0.33333 0.47647 0.25 0.48519 0.65653 0.72001;
0.4 0.38236 0.29999 0.42593 0.23913 0.58001;
0.13333 0.61764 0.65 0.57408 0.55218 0.70001;
0.23333 0.42941 0.69999 0.63334 0.48261 0.62001];
%训练数据
p=data(1:100,1:5);
t=data(1:100,6);
p=p';
t=t';
%测试数据
p_test=data(101:123,1:5);
t_test=data(101:123,6);
p_test=p_test';
t_test=t_test';
%—————————初始化———————————————
[~,Q]=size(p);
SamNum =Q; % 总样本数
InDim = 1; % 样本输入维数,输入维数设置有什么要求吗?
ClusterNum = 5; % 隐节点数,即聚类样本数
Overlap = 1; % 隐节点重叠系数
%选择初始聚类中心
Centers = p(:,1:ClusterNum);
NumberInClusters = zeros(ClusterNum,1); % 各类中的样本数,初始化为零
IndexInClusters = zeros(ClusterNum,SamNum); % 各类所含样本的索引号
while 1,
NumberInClusters = zeros(ClusterNum,1); % 各类中的样本数,初始化为零
IndexInClusters = zeros(ClusterNum,SamNum); % 各类所含样本的索引号
% 按最小距离原则对所有样本进行分类
for i = 1:SamNum
%计算所有样本输入与聚类中心的距离
AllDistance = dist(Centers',p(:,i));
%按对小距离原则对样本进行分类
[~,Pos] = min(AllDistance);
NumberInClusters(Pos) = NumberInClusters(Pos) + 1;
IndexInClusters(Pos,NumberInClusters(Pos)) = i;
end
% 保存旧的聚类中心
OldCenters = Centers;
%重新计算各类新的聚类中心
for i = 1:ClusterNum
Index = IndexInClusters(i,1:NumberInClusters(i));
Centers(:,i) = mean(p(:,Index)')';
end
% 判断新旧聚类中心是否一致,是则结束聚类
EqualNum = sum(sum(Centers==OldCenters));
if EqualNum == InDim*ClusterNum,
break,
end
end
% 根据各聚类中心之间的距离确定各隐节点的扩展常数(宽度)
AllDistances = dist(Centers',Centers); % 计算隐节点数据中心间的距离(矩阵)
Maximum = max(max(AllDistances)); % 找出其中最大的一个距离
for i = 1:ClusterNum % 将对角线上的0 替换为较大的值
AllDistances(i,i) = Maximum+1;
end
spread = Overlap*min(min(AllDistances)') % 以隐节点间的最小距离作为扩展常数
%spread = Overlap*min(AllDistances)' % 以隐节点间的最小距离作为扩展常数
%spread有问题
%使用min(min()),不用双min的话,spread是一个矩阵,不是一个数字,怎么解决???%---------------------------------------------------
% 训练与测试
net = newrbe(p,t,spread);
a = sim(net,p) % 测试 - 输出为预测值
err1 = sum((t-a).^2) % 训练误差的平方和
%---------------------------------------------------
% 结果作图
%subplot(1,2,1);
figure(1)
plot(t,'r+:')
hold on
plot(a,'bo:')
title('+为真实值,o为预测值')
title('RBF网络拟合曲线图--训练');
legend('实际值','估计值');
ylabel('样本输出');
xlabel('输入样本点');
axis([1,105,0,1]);
b=sim(net,p_test)
err2=sum((t_test-b).^2) % 训练误差的平方和
%subplot(1,2,2);
figure(2)
plot(t_test,'r+:')
hold on
plot(b,'bo:')
title('+为真实值,o为预测值')
title('RBF网络拟合曲线图--测试');
legend('实际值','估计值');
ylabel('样本输出');
xlabel('输入样本点');
axis([0,25,-20,5]);
结果截图:
训练图
测试结果图:
[Last edited by 箬茶 on 2012-11-22 at 16:26]