stanford openclassroom on machine learning matlab练习

Linear Regression

http://openclassroom.stanford.edu/MainFolder/DocumentPage.php?course=DeepLearning&doc=exercises/ex2/ex2.html

clear all; close all; clc;
x=load('c:\ex2Data\ex2x.dat');
y=load('c:\ex2Data\ex2y.dat');

figure % open a new figure window
plot(x, y, 'o');
ylabel('height in meters')
xlabel('age in years')

m= length(y);
x= [ones(m,1), x];
theta = zeros(size(x(1,:)))';
alpha = 0.07;
MAX_INTERATION = 1500

for i = 1:MAX_INTERATION
    grad=(1/m).* x' *(x * theta-y);
    theta=theta-alpha*grad;
end

hold on;
plot(x(:,2), x * theta, '-');
legend('Training data', 'Liner regression')


 


Multivariate Linear Regression

http://openclassroom.stanford.edu/MainFolder/DocumentPage.php?course=DeepLearning&doc=exercises/ex3/ex3.html

clear all; close all; clc;
x=load('c:\ex3Data\ex3x.dat');
y=load('c:\ex3Data\ex3y.dat');

m= length(y);
x= [ones(m,1), x];

sigma = std(x);
mu = mean(x);
x(:,2) = (x(:,2)-mu(2))./sigma(2);
x(:,3) = (x(:,3)-mu(3))./sigma(3);

theta = zeros(size(x(1,:)))';
alpha = 0.3;
J = zeros(50, 1);

for num_iterations = 1:50
    J(num_iterations) = (1/2*m).* (x * theta-y)'*(x * theta-y);
    
    grad=(1/m).* x' *(x * theta-y);
    theta=theta-alpha*grad;
end

figure; 
plot(0:49, J(1:50), '-');
xlabel('number of interations');
ylabel('cost j');
hold on;

Logistic Regression and Newton's Method

http://openclassroom.stanford.edu/MainFolder/DocumentPage.php?course=DeepLearning&doc=exercises/ex4/ex4.html

clear all; close all; clc;
x=load('c:\ex4Data\ex4x.dat');
y=load('c:\ex4Data\ex4y.dat');

[m, n] = size(x);
x= [ones(m,1), x];

pos=find(y==1);
neg=find(y==0);

figure
plot(x(pos,2), x(pos,3), '+');hold on
plot(x(neg,2), x(neg,3), 'o');hold on
xlabel('exam 1 score');
ylabel('exam 2 score');

theta = zeros(n+1, 1);
% Define the sigmoid function
g = inline('1.0 ./ (1.0 + exp(-z))'); 

% Newton's method
MAX_ITR = 20;
J = zeros(MAX_ITR, 1);

for i = 1:MAX_ITR
    z = x*theta;
    h = g(z);
    
    grad = (1/m).*x' * (h-y);
    H=(1/m).*x' * diag(h) * diag(1-h) * x;
    
    J(i) = (1/m)*sum(-y.*log(h) - (1-y).*log(1-h));
    
    theta = theta - inv(H)*grad;
end

theta

% Calculate the probability that a student with
% Score 20 on exam 1 and score 80 on exam 2 
% will not be admitted
prob = 1 - g([1, 20, 80]*theta)

% Plot Newton's method result
% Only need 2 points to define a line, so choose two endpoints
plot_x = [min(x(:, 2))-2, max(x(:,2))+2];
plot_y = (-1./theta(3)).*(theta(2).*plot_x+theta(1));
plot(plot_x, plot_y)
legend('Admitted', 'Not admitted', 'Decision Boundary');
hold off

% Plot J
figure
plot(0:MAX_ITR-1, J, 'o--', 'MarkerFaceColor', 'r', 'MarkerSize', 8)
xlabel('Iteration'); ylabel('J')
% Display J
J


 

你可能感兴趣的:(matlab)