coursera机器学习课程笔记 第四周 一对多逻辑回归

ex3

github:https://github.com/DLW3D/coursera-machine-learning-ex
练习文件下载地址:https://s3.amazonaws.com/spark-public/ml/exercises/on-demand/machine-learning-ex3.zip

One-vs-All Logistic Regression 一对多逻辑回归

Cost Function 代价函数

lrCostFunction.m

function [J, grad] = lrCostFunction(theta, X, y, lambda)
	m = length(y); % number of training examples
	n = size(theta);
	h = 1./(1+exp(-X*theta));%1*m
	J = (-y' * log(h) - (1-y)' * log(1-h))/m + lambda/2/m.*theta(2:n)'*theta(2:n);%1
	grad1 = ((h-y)'*X(:,1)/m)';%1*1
	grad2 = ((h-y)'*X(:,2:n)/m)'+lambda/m.*theta(2:n);%n*1
	grad = [grad1;grad2];
	grad = grad(:);
end

One-vs-All Training 一对多训练

oneVsAll.m

function [all_theta] = oneVsAll(X, y, num_labels, lambda)
	m = size(X, 1);
	n = size(X, 2);
	
	% Add ones to the X data matrix
	X = [ones(m, 1) X];
	
	for c = 1:num_labels
	    theta = zeros(n + 1, 1);
	    options = optimset('GradObj', 'on', 'MaxIter', 50);
	    [theta] = ...
	        fmincg (@(theta)(lrCostFunction(theta, X, (y == c), lambda)), ...
	            theta, options);
	    all_theta(c,:) = theta;
	end
end

Predict for One-Vs-All 一对多预测

predictOneVsAll.m

function p = predictOneVsAll(all_theta, X)
m = size(X, 1);
num_labels = size(all_theta, 1);

% Add ones to the X data matrix
X = [ones(m, 1) X];

h = 1./(1+exp(-X*all_theta'));%m*10
[r,p] = max(h,[],2);
end

运行一对多逻辑回归

% Load Training Data
load('ex3data1.mat'); % training data stored in arrays X, y
input_layer_size  = 400;  % 20x20 Input Images of Digits
num_labels = 10;          % 10 labels, from 1 to 10
                          % (note that we have mapped "0" to label 10)

fprintf('\nTraining One-vs-All Logistic Regression...\n')

lambda = 0.1;
[all_theta] = oneVsAll(X, y, num_labels, lambda);

pred = predictOneVsAll(all_theta, X);

fprintf('\nTraining Set Accuracy: %f\n', mean(double(pred == y)) * 100);

你可能感兴趣的:(coursera机器学习课程笔记 第四周 一对多逻辑回归)