From 35973be7d1bc90b518f39e58f1030bc9b257284e Mon Sep 17 00:00:00 2001 From: Ryan W Tenney Date: Sun, 23 Oct 2011 21:19:32 -0400 Subject: [PATCH] Adding my solutions to ex1. --- ex1/computeCost.m | 3 +++ ex1/computeCostMulti.m | 3 +++ ex1/ex1.m | 10 +++++----- ex1/ex1_multi.m | 13 ++++++++++--- ex1/featureNormalize.m | 9 +++++++++ ex1/gradientDescent.m | 9 ++++++++- ex1/gradientDescentMulti.m | 18 ++++++++++++++---- ex1/normalEqn.m | 1 + ex1/plotData.m | 5 ++++- ex1/warmUpExercise.m | 4 ++-- 10 files changed, 59 insertions(+), 16 deletions(-) diff --git a/ex1/computeCost.m b/ex1/computeCost.m index 93eb14c..bbeab1b 100644 --- a/ex1/computeCost.m +++ b/ex1/computeCost.m @@ -14,7 +14,10 @@ % You should set J to the cost. +predictions = X*theta; +sqrerrors = (predictions-y).^2; +J = 1/(2*m) * sum(sqrerrors); % ========================================================================= diff --git a/ex1/computeCostMulti.m b/ex1/computeCostMulti.m index d9a3d68..617e107 100644 --- a/ex1/computeCostMulti.m +++ b/ex1/computeCostMulti.m @@ -14,7 +14,10 @@ % You should set J to the cost. +predictions = X*theta; +sqrerrors = (predictions-y).^2; +J = 1/(2*m) * sum(sqrerrors); % ========================================================================= diff --git a/ex1/ex1.m b/ex1/ex1.m index f3aacb5..73fff4b 100644 --- a/ex1/ex1.m +++ b/ex1/ex1.m @@ -30,10 +30,10 @@ % Complete warmUpExercise.m fprintf('Running warmUpExercise ... \n'); fprintf('5x5 Identity Matrix: \n'); -warmUpExercise() +%warmUpExercise() -fprintf('Program paused. Press enter to continue.\n'); -pause; +%fprintf('Program paused. Press enter to continue.\n'); +%pause; %% ======================= Part 2: Plotting ======================= @@ -46,8 +46,8 @@ % Note: You have to complete the code in plotData.m plotData(X, y); -fprintf('Program paused. Press enter to continue.\n'); -pause; +%fprintf('Program paused. Press enter to continue.\n'); +%pause; %% =================== Part 3: Gradient descent =================== fprintf('Running Gradient Descent ...\n') diff --git a/ex1/ex1_multi.m b/ex1/ex1_multi.m index ac68dbb..4872d79 100644 --- a/ex1/ex1_multi.m +++ b/ex1/ex1_multi.m @@ -82,7 +82,7 @@ fprintf('Running gradient descent ...\n'); % Choose some alpha value -alpha = 0.01; +alpha = 0.1; num_iters = 100; % Init Theta and Run Gradient Descent @@ -104,8 +104,15 @@ % ====================== YOUR CODE HERE ====================== % Recall that the first column of X is all-ones. Thus, it does % not need to be normalized. -price = 0; % You should change this +params = [1650 3]; + +params = (params - mu) ./ sigma; + +params = [1 params]; + +%price = theta(1) + ((1650 - mu(1)) * ((theta(2) * sigma(1)) + mu(1))) + (3 * ((theta(3) * sigma(2)) + mu(2))); +price = params * theta; % ============================================================ @@ -149,8 +156,8 @@ % Estimate the price of a 1650 sq-ft, 3 br house % ====================== YOUR CODE HERE ====================== -price = 0; % You should change this +price = theta(1) + (1650 * theta(2)) + (3 * theta(3)); % ============================================================ diff --git a/ex1/featureNormalize.m b/ex1/featureNormalize.m index bb5d072..1705181 100644 --- a/ex1/featureNormalize.m +++ b/ex1/featureNormalize.m @@ -6,6 +6,7 @@ % working with learning algorithms. % You need to set these values correctly +m = size(X, 2); X_norm = X; mu = zeros(1, size(X, 2)); sigma = zeros(1, size(X, 2)); @@ -28,6 +29,14 @@ +for dim = 1:m + + feature = X(:, dim); + feature -= (mu(1, dim) = mean(feature)); + feature /= (sigma(1, dim) = std(feature)); + X_norm(:, dim) = feature; + +end diff --git a/ex1/gradientDescent.m b/ex1/gradientDescent.m index 3f66abf..59e82fa 100644 --- a/ex1/gradientDescent.m +++ b/ex1/gradientDescent.m @@ -7,6 +7,8 @@ m = length(y); % number of training examples J_history = zeros(num_iters, 1); +aa = alpha * (1/m); + for iter = 1:num_iters % ====================== YOUR CODE HERE ====================== @@ -18,9 +20,14 @@ % + deviation = (X*theta)-y; + theta -= aa * [ + sum(deviation); + sum(deviation .* X(:,2)) + ]; - + computeCost(X, y, theta) % ============================================================ diff --git a/ex1/gradientDescentMulti.m b/ex1/gradientDescentMulti.m index e5e1b7e..f626ecc 100644 --- a/ex1/gradientDescentMulti.m +++ b/ex1/gradientDescentMulti.m @@ -5,8 +5,12 @@ % Initialize some useful values m = length(y); % number of training examples +n = length(X(1, :)); + J_history = zeros(num_iters, 1); +aa = alpha * (1/m); + for iter = 1:num_iters % ====================== YOUR CODE HERE ====================== @@ -17,12 +21,18 @@ % of the cost function (computeCostMulti) and gradient here. % + deviation = (X*theta)-y; + tmp = zeros(n, 1); + for feature = 1:n + tmp(feature) =sum(deviation .* X(:, feature)); + end - - - - + theta -= aa * tmp; + %theta -= aa * [ + %sum(deviation); + %sum(deviation .* X(:,2)) + %]; diff --git a/ex1/normalEqn.m b/ex1/normalEqn.m index d32cd8e..176f05d 100644 --- a/ex1/normalEqn.m +++ b/ex1/normalEqn.m @@ -13,6 +13,7 @@ % ---------------------- Sample Solution ---------------------- +theta = pinv(X' * X) * X' * y; % ------------------------------------------------------------- diff --git a/ex1/plotData.m b/ex1/plotData.m index 3a10239..63cf83f 100644 --- a/ex1/plotData.m +++ b/ex1/plotData.m @@ -14,11 +14,14 @@ function plotData(x, y) % appear as red crosses. Furthermore, you can make the % markers larger by using plot(..., 'rx', 'MarkerSize', 10); -figure; % open a new figure window +plot(x, y, 'rx', 'MarkerSize', 10); +ylabel('Profit in $10,000s'); +xlabel('Population of City in 10,000s'); +figure; % open a new figure window % ============================================================ diff --git a/ex1/warmUpExercise.m b/ex1/warmUpExercise.m index 4ab8187..4821309 100644 --- a/ex1/warmUpExercise.m +++ b/ex1/warmUpExercise.m @@ -2,7 +2,7 @@ %WARMUPEXERCISE Example function in octave % A = WARMUPEXERCISE() is an example function that returns the 5x5 identity matrix -A = []; +%A = []; % ============= YOUR CODE HERE ============== % Instructions: Return the 5x5 identity matrix % In octave, we return values by defining which variables @@ -10,7 +10,7 @@ % and then set them accordingly. - +A = eye(5);