参考Andrew Ng 公开课的推导
trA=∑ni=1Aii
trAB=trBA
trABC=trCBA=trBCA
trA=trAT
if a∈R , tra=a
∇AtrAB=BT
∇AtrABATC=CAB+CTABT
∇θJ=⎡⎣⎢⎢⎢⎢⎢⎢⎢∂J∂θ0∂J∂θ1⋮∂J∂θn⎤⎦⎥⎥⎥⎥⎥⎥⎥
∇Af(A)=⎡⎣⎢⎢⎢⎢∂f∂A11⋮∂A∂An1⋯⋱⋯∂A∂A1n⋮∂A∂Ann⎤⎦⎥⎥⎥⎥
对于花费函数 J=∑i(d(i)−wTx)+λwTw ,为使它取最小值,则应取 ∇wJ=0 .
function [w errRate cost] = RegularizedLeastSquares(x,d,lambda)
if nargin==2
lambda=0;
end
[n,m]=size(x);
w=(x*x'+lambda*eye(n,n))\x*d;
y=sign(x'*w);
%错误率
errRate=sum(d~=y)/m;
%花费函数
cost=sum((d-y).^2)+lambda*w'*w;
end
测试,随机数的产生参考http://blog.csdn.net/jinjgkfadfaf/article/details/53200819
close all;
%产生随机数
n1=1000;n2=1000;n=n1+n2;
[x1,y1,x2,y2]=GenRandomData(8,-5,3,n1,n2);
samps=[ones(n1,1),x1, y1;ones(n2,1),x2,y2];
d=[ones(n1,1);-ones(n2,1)];
%打乱顺序
% randI=randperm(n);
% samps(randI,:)=samps(1:n,:);
% d(randI)=d(1:n);
%利用感知机进行训练
w0 = [-10;-10;-10];etaLim = 5e-6;x = samps';epochN =5000;
%[w, err] = Rosenblatt(x,d,w0,etaLim,epochN)
%[w, err] = RosenblattStpDes(x,d,w0,etaLim,epochN)
lambda=0.1;
[w errRate cost] = RegularizedLeastSquares(x,d,lambda)
%画图
figure;
plot(x1,y1,'Marker','x','Color','r','LineStyle','none');
hold on;
plot(x2,y2,'Marker','o','Color','b','LineStyle','none');
xx=[-10:0.1:10];
yy=-w(2,1)*xx/w(3,1)-w(1,1)/w(3,1);
plot(xx,yy,'k');