Skip to content

Commit b752ab8

Browse files
committed
2 parents 062f9d7 + 8114c18 commit b752ab8

File tree

4 files changed

+26
-31
lines changed

4 files changed

+26
-31
lines changed

ex5/mlclass-ex5/learningCurve.m

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -53,16 +53,13 @@
5353
% end
5454
%
5555

56-
% ---------------------- Sample Solution ----------------------
57-
58-
59-
60-
61-
62-
63-
64-
% -------------------------------------------------------------
56+
for i= 1:m
57+
theta = trainLinearReg(X(1:i,:), y(1:i), lambda);
58+
error_train(i) = linearRegCostFunction(X(1:i,:) , y(1:i) , theta, 0);
59+
error_val(i) = linearRegCostFunction(Xval, yval, theta, 0);
60+
end
6561

62+
%
6663
% =========================================================================
6764

6865
end

ex5/mlclass-ex5/linearRegCostFunction.m

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -19,16 +19,16 @@
1919
% You should set J to the cost and grad to the gradient.
2020
%
2121

22-
23-
24-
25-
26-
27-
28-
29-
30-
31-
22+
% calculate cost function
23+
diff = X*theta - y;
24+
% calculate penalty
25+
% excluded the first theta value
26+
theta1 = [0 ; theta(2:end, :)];
27+
p = lambda*(theta1'*theta1);
28+
J = (diff'*diff)/(2*m) + p/(2*m);
29+
30+
% calculate grads
31+
grad = (X'*diff+lambda*theta1)/m;
3232

3333
% =========================================================================
3434

ex5/mlclass-ex5/polyFeatures.m

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,10 +15,10 @@
1515
%
1616
%
1717

18-
19-
20-
21-
18+
X_poly(:,1) = X;
19+
for i=2:p
20+
X_poly(:,i) = X.*X_poly(:,i-1);
21+
end
2222

2323
% =========================================================================
2424

ex5/mlclass-ex5/validationCurve.m

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -39,14 +39,12 @@
3939
%
4040
%
4141

42-
43-
44-
45-
46-
47-
48-
49-
42+
for i = 1:length(lambda_vec)
43+
lambda = lambda_vec(i);
44+
theta = trainLinearReg(X,y,lambda);
45+
error_train(i) = linearRegCostFunction(X , y , theta, 0);
46+
error_val(i) = linearRegCostFunction(Xval, yval, theta, 0);
47+
end
5048

5149
% =========================================================================
5250

0 commit comments

Comments
 (0)