吴恩达机器学习第六周编程作业ex5答案

linearRegCostFunction.m

J = 1 / (2 * m) * sum((X * theta - y) .^ 2) + lambda / (2 * m) * (sum(theta .* theta) - theta(1) * theta(1));
gradient = 1 / m * X' * (X * theta - y); %没有sum
grad = gradient + lambda / m * theta;
grad(1) = gradient(1); 

learningCurve.m

n = size(Xval, 1);
for i = 1:m
    theta = trainLinearReg(X(1:i,:), y(1:i,:), lambda);
    error_train(i) = 1 / (2 * i) * sum((X(1:i,:) * theta - y(1:i,:)) .^ 2);
    error_val(i) = 1 / (2 * n) * sum((Xval * theta - yval) .^ 2);
end

polyFeatures.m

for i = 1:p
    X_poly(:,i) = X .^ i;
end

validationCurve.m

m = size(X, 1);
n = size(Xval, 1);
for i = 1:length(lambda_vec)
    lambda = lambda_vec(i);
    theta = trainLinearReg(X, y, lambda);
    error_train(i) = 1 / (2 * m) * sum((X * theta - y) .^ 2);
    error_val(i) = 1 / (2 * n) * sum((Xval * theta - yval) .^ 2);
end

你可能感兴趣的:(机器学习)