Skip to content

Commit

Permalink
Adding my solutions to ex1.
Browse files Browse the repository at this point in the history
  • Loading branch information
ryantenney committed Oct 24, 2011
1 parent bedf0a6 commit 35973be
Show file tree
Hide file tree
Showing 10 changed files with 59 additions and 16 deletions.
3 changes: 3 additions & 0 deletions ex1/computeCost.m
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@
% You should set J to the cost.


predictions = X*theta;
sqrerrors = (predictions-y).^2;

J = 1/(2*m) * sum(sqrerrors);


% =========================================================================
Expand Down
3 changes: 3 additions & 0 deletions ex1/computeCostMulti.m
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@
% You should set J to the cost.


predictions = X*theta;
sqrerrors = (predictions-y).^2;

J = 1/(2*m) * sum(sqrerrors);


% =========================================================================
Expand Down
10 changes: 5 additions & 5 deletions ex1/ex1.m
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,10 @@
% Complete warmUpExercise.m
fprintf('Running warmUpExercise ... \n');
fprintf('5x5 Identity Matrix: \n');
warmUpExercise()
%warmUpExercise()

fprintf('Program paused. Press enter to continue.\n');
pause;
%fprintf('Program paused. Press enter to continue.\n');
%pause;


%% ======================= Part 2: Plotting =======================
Expand All @@ -46,8 +46,8 @@
% Note: You have to complete the code in plotData.m
plotData(X, y);

fprintf('Program paused. Press enter to continue.\n');
pause;
%fprintf('Program paused. Press enter to continue.\n');
%pause;

%% =================== Part 3: Gradient descent ===================
fprintf('Running Gradient Descent ...\n')
Expand Down
13 changes: 10 additions & 3 deletions ex1/ex1_multi.m
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@
fprintf('Running gradient descent ...\n');

% Choose some alpha value
alpha = 0.01;
alpha = 0.1;
num_iters = 100;

% Init Theta and Run Gradient Descent
Expand All @@ -104,8 +104,15 @@
% ====================== YOUR CODE HERE ======================
% Recall that the first column of X is all-ones. Thus, it does
% not need to be normalized.
price = 0; % You should change this

params = [1650 3];

params = (params - mu) ./ sigma;

params = [1 params];

%price = theta(1) + ((1650 - mu(1)) * ((theta(2) * sigma(1)) + mu(1))) + (3 * ((theta(3) * sigma(2)) + mu(2)));
price = params * theta;

% ============================================================

Expand Down Expand Up @@ -149,8 +156,8 @@

% Estimate the price of a 1650 sq-ft, 3 br house
% ====================== YOUR CODE HERE ======================
price = 0; % You should change this

price = theta(1) + (1650 * theta(2)) + (3 * theta(3));

% ============================================================

Expand Down
9 changes: 9 additions & 0 deletions ex1/featureNormalize.m
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
% working with learning algorithms.

% You need to set these values correctly
m = size(X, 2);
X_norm = X;
mu = zeros(1, size(X, 2));
sigma = zeros(1, size(X, 2));
Expand All @@ -28,6 +29,14 @@



for dim = 1:m

feature = X(:, dim);
feature -= (mu(1, dim) = mean(feature));
feature /= (sigma(1, dim) = std(feature));
X_norm(:, dim) = feature;

end



Expand Down
9 changes: 8 additions & 1 deletion ex1/gradientDescent.m
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
m = length(y); % number of training examples
J_history = zeros(num_iters, 1);

aa = alpha * (1/m);

for iter = 1:num_iters

% ====================== YOUR CODE HERE ======================
Expand All @@ -18,9 +20,14 @@
%


deviation = (X*theta)-y;

theta -= aa * [
sum(deviation);
sum(deviation .* X(:,2))
];


computeCost(X, y, theta)


% ============================================================
Expand Down
18 changes: 14 additions & 4 deletions ex1/gradientDescentMulti.m
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,12 @@

% Initialize some useful values
m = length(y); % number of training examples
n = length(X(1, :));

J_history = zeros(num_iters, 1);

aa = alpha * (1/m);

for iter = 1:num_iters

% ====================== YOUR CODE HERE ======================
Expand All @@ -17,12 +21,18 @@
% of the cost function (computeCostMulti) and gradient here.
%

deviation = (X*theta)-y;
tmp = zeros(n, 1);

for feature = 1:n
tmp(feature) =sum(deviation .* X(:, feature));
end





theta -= aa * tmp;
%theta -= aa * [
%sum(deviation);
%sum(deviation .* X(:,2))
%];



Expand Down
1 change: 1 addition & 0 deletions ex1/normalEqn.m
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
% ---------------------- Sample Solution ----------------------


theta = pinv(X' * X) * X' * y;


% -------------------------------------------------------------
Expand Down
5 changes: 4 additions & 1 deletion ex1/plotData.m
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,14 @@ function plotData(x, y)
% appear as red crosses. Furthermore, you can make the
% markers larger by using plot(..., 'rx', 'MarkerSize', 10);

figure; % open a new figure window


plot(x, y, 'rx', 'MarkerSize', 10);
ylabel('Profit in $10,000s');
xlabel('Population of City in 10,000s');


figure; % open a new figure window


% ============================================================
Expand Down
4 changes: 2 additions & 2 deletions ex1/warmUpExercise.m
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@
%WARMUPEXERCISE Example function in octave
% A = WARMUPEXERCISE() is an example function that returns the 5x5 identity matrix

A = [];
%A = [];
% ============= YOUR CODE HERE ==============
% Instructions: Return the 5x5 identity matrix
% In octave, we return values by defining which variables
% represent the return values (at the top of the file)
% and then set them accordingly.



A = eye(5);



Expand Down

0 comments on commit 35973be

Please sign in to comment.