1. 程式人生 > >吳恩達 機器學習 ex1

吳恩達 機器學習 ex1

clear ; close all; clc             %初始化
fprintf('Running warmUpExercise ... \n');
fprintf('5x5 Identity Matrix: \n');
warmUpExercise()

fprintf('Program paused. Press enter to continue.\n');
pause;

fprintf('Plotting Data ...\n')
data = load('ex1data1.txt');       %讀取離散data
x = data(:, 1); y = data(:, 2);
m = length(y);                     %訓練number
plot(x, y,'rx','MarkerSize',10);   %plot yhe data
ylabel('Profit in $10,000s');            %set the y-axis label
xlabel('Population of City in 10,000s'); %set the x-axis labe
fprintf('Program paused. Press enter to continue.\n');
pause;

plot 用法

plot - 二維線圖

    此 MATLAB 函式 建立 Y 中資料對 X 中對應值的二維線圖。 如果 X 和 Y 都是向量,則它們的長度必須相同。plot 函式繪製 Y 對 X 的圖。
    如果 X 和 Y 均為矩陣,則它們的大小必須相同。plot 函式繪製 Y 的列對 X 的列的圖。 如果 X 或 Y中的一個是向量而另一個是矩陣,則矩陣的各維中必須有一維與向量的長度相等。如果矩陣的行數等於向量長度,則 plot
    函式繪製矩陣中的每一列對向量的圖。如果矩陣的列數等於向量長度,則該函式繪製矩陣中的每一行對向量的圖。如果矩陣為方陣,則該函式繪製每一列對向量的圖。 如果 X 或Y 之一為標量,而另一個為標量或向量,則 plot 函式會繪製離散點。但是,要檢視這些點,您必須指定標記符號,例如 plot(X,Y,'o')。

    plot(X,Y)
    plot(X,Y,LineSpec)
    plot(X1,Y1,...,Xn,Yn)
    plot(X1,Y1,LineSpec1,...,Xn,Yn,LineSpecn)
    plot(Y)
    plot(Y,LineSpec)
    plot(___,Name,Value)
    plot(ax,___)
    h = plot(___)

    另請參閱 gca, hold, legend, loglog, plot3, title, xlabel, xlim, ylabel, ylim, 
    yyaxis, Line 屬性

x = [ones(m, 1), data(:,1)];    % Add a column of ones to x (m個1)
theta = zeros(2, 1); % initialize fitting parameters(初始化擬合引數)

% Some gradient descent settings
iterations = 1500;   %迭代次數
alpha = 0.01;        %學習率
fprintf('\nTesting the cost function ...\n')

% compute and display initial cost
J = computeCost(x, y, theta);  %代價函式
fprintf('With theta = [0 ; 0]\nCost computed = %f\n', J);
fprintf('Expected cost value (approx) 32.07\n');

% further testing of the cost function(進一步測試)
J = computeCost(x, y, [-1 ; 2]);
fprintf('\nWith theta = [-1 ; 2]\nCost computed = %f\n', J);
fprintf('Expected cost value (approx) 54.24\n');

fprintf('Program paused. Press enter to continue.\n');
pause;
fprintf('\nRunning Gradient Descent ...\n')

% run gradient descent(梯度下降法)
theta = gradientDescent(x, y, theta, alpha, iterations);

% print theta to screen
fprintf('Theta found by gradient descent:\n');
fprintf('%f\n', theta);
fprintf('Expected theta values (approx)\n');
fprintf(' -3.6303\n  1.1664\n\n');

% Plot the linear fit
hold on;                   % keep previous plot visible(保持現有的影象)
plot(x(:,2), x*theta, '-') % x軸=x矩陣的第二列,y軸=x*theta
legend('Training data', 'Linear regression')
hold off                   % don't overlay any more plots on this figure

% Predict values for population sizes of 35,000 and 70,000
predict1 = [1, 3.5] *theta;
fprintf('For population = 35,000, we predict a profit of %f\n',...
    predict1*10000);
predict2 = [1, 7] * theta;
fprintf('For population = 70,000, we predict a profit of %f\n',...
    predict2*10000);

fprintf('Program paused. Press enter to continue.\n');
pause;

cost function

function J = computeCost(x, y, theta)
%COMPUTECOST Compute cost for linear regression
%   J = COMPUTECOST(X, y, theta) computes the cost of using theta as the
%   parameter for linear regression to fit the data points in X and y

% Initialize some useful values
m = length(y); % number of training examples

% You need to return the following variables correctly 
J = 0;

% ====================== YOUR CODE HERE ======================
% Instructions: Compute the cost of a particular choice of theta
%               You should set J to the cost.

h = x*theta - y;
J = 1/(2*m) * sum(h.^2);



% =========================================================================

end

Gradient descent

function [theta, J_history] = gradientDescent(x, y, theta, alpha, num_iters)
%GRADIENTDESCENT Performs gradient descent to learn theta
%   theta = GRADIENTDESCENT(X, y, theta, alpha, num_iters) updates theta by 
%   taking num_iters gradient steps with learning rate alpha

% Initialize some useful values
m = length(y); % number of training examples
J_history = zeros(num_iters, 1);

for iter = 1:num_iters

    % ====================== YOUR CODE HERE ======================
    % Instructions: Perform a single gradient step on the parameter vector
    %               theta. 
    %
    % Hint: While debugging, it can be useful to print out the values
    %       of the cost function (computeCost) and gradient here.
    %
    theta = theta - alpha/m*x'*(x*theta - y);  %梯度下降

    % ============================================================

    % Save the cost J in every iteration    
    J_history(iter) = computeCost(x, y, theta);

end

end

%% ============= Part 4: Visualizing J(theta_0, theta_1) =============
fprintf('Visualizing J(theta_0, theta_1) ...\n')

% Grid over which we will calculate J
theta0_vals = linspace(-10, 10, 100);  %函式產生-10和10之間的100個等間距點的行向量
theta1_vals = linspace(-1, 4, 100);    %%函式產生-1和4之間的100個等間距點的行向量

% initialize J_vals to a matrix of 0's  (初始化J為0矩陣)
J_vals = zeros(length(theta0_vals), length(theta1_vals));

% Fill out J_vals
for i = 1:length(theta0_vals)          %開始迭代i從1開始
    for j = 1:length(theta1_vals)
	  t = [theta0_vals(i); theta1_vals(j)];
	  J_vals(i,j) = computeCost(x, y, t);  
      %J = computeCost(X, y, theta) computes the cost of using theta as the
      parameter for linear regression to fit the data points in X and y
    end
end

% Because of the way meshgrids work in the surf command, we need to
% transpose J_vals before calling surf, or else the axes will be flipped
J_vals = J_vals';
% Surface plot (曲面圖)
figure;
surf(theta0_vals, theta1_vals, J_vals) %建立一個三維曲面圖。該函式將矩陣J中的值繪製為由X和Y定義的x-y平面中的網格上方的高度。
xlabel('\theta_0'); ylabel('\theta_1');

% Contour plot
figure;
% Plot J_vals as 15 contours spaced logarithmically between 0.01 and 100
contour(theta0_vals, theta1_vals, J_vals, logspace(-2, 3, 20))  %繪製矩陣 Z 的等高線圖
xlabel('\theta_0'); ylabel('\theta_1');
hold on;
plot(theta(1), theta(2), 'rx', 'MarkerSize', 10, 'LineWidth', 2);