Skip to content

Commit af1b87b

Browse files
author
Joe | GJoeG&MM
committed
feat: add Stanford ML course exercises and supporting files
1 parent 2e69e79 commit af1b87b

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

72 files changed

+1128
-461
lines changed
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
# Created by Octave 10.2.0, Tue Jun 10 08:32:55 2025 UTC <unknown@JOE-DIAQDC4>
2+
# name: email
3+
# type: sq_string
4+
# elements: 1
5+
# length: 21
6+
lorenzosca7@gmail.com
7+
8+
9+
# name: token
10+
# type: sq_string
11+
# elements: 1
12+
# length: 16
13+
Ak3sUtRKkXjNx8m0
14+
15+

Certifications/Stanford Machine Learning/ex2/costFunction.m

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,14 @@
2020
% Note: grad should have the same dimensions as theta
2121
%
2222

23-
h_theta = sigmoid(X*theta);
24-
J = (1 / m) * ((-y' * log(h_theta)) - (1 - y)' * log(1 - h_theta));
23+
% Compute hypothesis using sigmoid function
24+
h = sigmoid(X * theta);
2525

26-
grad = (1 / m) * (h_theta - y)' * X;
26+
% Calculate cost function
27+
J = (1/m) * sum(-y .* log(h) - (1 - y) .* log(1 - h));
28+
29+
% Calculate gradient
30+
grad = (1/m) * (X' * (h - y));
2731

2832
% =============================================================
2933

Certifications/Stanford Machine Learning/ex2/costFunctionReg.m

Lines changed: 22 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,27 @@
1111
J = 0;
1212
grad = zeros(size(theta));
1313

14-
h_theta = sigmoid(X*theta);
15-
J = (1/m) * (-y' * log(h_theta) - (1-y)' * log(1-h_theta)) + (lambda/(2*m)) * (theta(2:length(theta)))' * theta(2:length(theta));
14+
% ====================== YOUR CODE HERE ======================
15+
% Instructions: Compute the cost of a particular choice of theta.
16+
% You should set J to the cost.
17+
% Compute the partial derivatives and set grad to the partial
18+
% derivatives of the cost w.r.t. each parameter in theta
1619

17-
thetaZero = theta;
18-
thetaZero(1) = 0;
20+
% Compute hypothesis using sigmoid function
21+
h = sigmoid(X * theta);
1922

20-
grad = ((1 / m) * (h_theta - y)' * X) + lambda / m * thetaZero';
21-
end
23+
% Calculate cost function with regularization term
24+
% Note that theta(1) is not regularized
25+
J = (1/m) * sum(-y .* log(h) - (1 - y) .* log(1 - h)) + (lambda/(2*m)) * sum(theta(2:end).^2);
26+
27+
% Calculate gradient with regularization
28+
% First calculate the gradient without regularization
29+
grad_no_reg = (1/m) * (X' * (h - y));
30+
31+
% Add regularization term to all theta values except theta(1)
32+
grad = grad_no_reg;
33+
grad(2:end) = grad(2:end) + (lambda/m) * theta(2:end);
34+
35+
% =============================================================
36+
37+
end

Certifications/Stanford Machine Learning/ex2/ex2.m

Lines changed: 22 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,7 @@
2424
% contains the label.
2525

2626
data = load('ex2data1.txt');
27-
X = data(:, [1, 2]); %Feature 1 and Feature 2
28-
y = data(:, 3); %Negative or Positive
27+
X = data(:, [1, 2]); y = data(:, 3);
2928

3029
%% ==================== Part 1: Plotting ====================
3130
% We start the exercise by first plotting the data to understand the
@@ -59,7 +58,7 @@
5958
[m, n] = size(X);
6059

6160
% Add intercept term to x and X_test
62-
X = [ones(m, 1) X]; %Concatenating Column Vector 1's(Feature 0) with Column Vector X (Feature 1, Feature 2)
61+
X = [ones(m, 1) X];
6362

6463
% Initialize fitting parameters
6564
initial_theta = zeros(n + 1, 1);
@@ -68,12 +67,25 @@
6867
[cost, grad] = costFunction(initial_theta, X, y);
6968

7069
fprintf('Cost at initial theta (zeros): %f\n', cost);
70+
fprintf('Expected cost (approx): 0.693\n');
7171
fprintf('Gradient at initial theta (zeros): \n');
7272
fprintf(' %f \n', grad);
73+
fprintf('Expected gradients (approx):\n -0.1000\n -12.0092\n -11.2628\n');
74+
75+
% Compute and display cost and gradient with non-zero theta
76+
test_theta = [-24; 0.2; 0.2];
77+
[cost, grad] = costFunction(test_theta, X, y);
78+
79+
fprintf('\nCost at test theta: %f\n', cost);
80+
fprintf('Expected cost (approx): 0.218\n');
81+
fprintf('Gradient at test theta: \n');
82+
fprintf(' %f \n', grad);
83+
fprintf('Expected gradients (approx):\n 0.043\n 2.566\n 2.647\n');
7384

7485
fprintf('\nProgram paused. Press enter to continue.\n');
7586
pause;
7687

88+
7789
%% ============= Part 3: Optimizing using fminunc =============
7890
% In this exercise, you will use a built-in function (fminunc) to find the
7991
% optimal parameters theta.
@@ -85,13 +97,14 @@
8597
% This function will return theta and the cost
8698
[theta, cost] = ...
8799
fminunc(@(t)(costFunction(t, X, y)), initial_theta, options);
88-
% 't' is passed as dummy parameter which is initialized with 'initial_theta'
89-
% first then subsequent values are choosen by fminunc
90100

91101
% Print theta to screen
92102
fprintf('Cost at theta found by fminunc: %f\n', cost);
103+
fprintf('Expected cost (approx): 0.203\n');
93104
fprintf('theta: \n');
94105
fprintf(' %f \n', theta);
106+
fprintf('Expected theta (approx):\n');
107+
fprintf(' -25.161\n 0.206\n 0.201\n');
95108

96109
% Plot Boundary
97110
plotDecisionBoundary(theta, X, y);
@@ -125,13 +138,14 @@
125138

126139
prob = sigmoid([1 45 85] * theta);
127140
fprintf(['For a student with scores 45 and 85, we predict an admission ' ...
128-
'probability of %f\n\n'], prob);
141+
'probability of %f\n'], prob);
142+
fprintf('Expected value: 0.775 +/- 0.002\n\n');
129143

130144
% Compute accuracy on our training set
131145
p = predict(theta, X);
132146

133147
fprintf('Train Accuracy: %f\n', mean(double(p == y)) * 100);
148+
fprintf('Expected accuracy (approx): 89.0\n');
149+
fprintf('\n');
134150

135-
fprintf('\nProgram paused. Press enter to continue.\n');
136-
pause;
137151

Binary file not shown.

Certifications/Stanford Machine Learning/ex2/ex2_reg.m

Lines changed: 26 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
%
33
% Instructions
44
% ------------
5-
%
5+
%
66
% This file contains code that helps you get started on the second part
77
% of the exercise which covers regularization with logistic regression.
88
%
@@ -29,7 +29,7 @@
2929

3030
plotData(X, y);
3131

32-
% Put some labels
32+
% Put some labels
3333
hold on;
3434

3535
% Labels and Legend
@@ -43,8 +43,8 @@
4343

4444
%% =========== Part 1: Regularized Logistic Regression ============
4545
% In this part, you are given a dataset with data points that are not
46-
% linearly separable. However, you would still like to use logistic
47-
% regression to classify the data points.
46+
% linearly separable. However, you would still like to use logistic
47+
% regression to classify the data points.
4848
%
4949
% To do so, you introduce more features to use -- in particular, you add
5050
% polynomial features to our data matrix (similar to polynomial
@@ -68,13 +68,33 @@
6868
[cost, grad] = costFunctionReg(initial_theta, X, y, lambda);
6969

7070
fprintf('Cost at initial theta (zeros): %f\n', cost);
71+
fprintf('Expected cost (approx): 0.693\n');
72+
fprintf('Gradient at initial theta (zeros) - first five values only:\n');
73+
fprintf(' %f \n', grad(1:5));
74+
fprintf('Expected gradients (approx) - first five values only:\n');
75+
fprintf(' 0.0085\n 0.0188\n 0.0001\n 0.0503\n 0.0115\n');
76+
77+
fprintf('\nProgram paused. Press enter to continue.\n');
78+
pause;
79+
80+
% Compute and display cost and gradient
81+
% with all-ones theta and lambda = 10
82+
test_theta = ones(size(X,2),1);
83+
[cost, grad] = costFunctionReg(test_theta, X, y, 10);
84+
85+
fprintf('\nCost at test theta (with lambda = 10): %f\n', cost);
86+
fprintf('Expected cost (approx): 3.16\n');
87+
fprintf('Gradient at test theta - first five values only:\n');
88+
fprintf(' %f \n', grad(1:5));
89+
fprintf('Expected gradients (approx) - first five values only:\n');
90+
fprintf(' 0.3460\n 0.1614\n 0.1948\n 0.2269\n 0.0922\n');
7191

7292
fprintf('\nProgram paused. Press enter to continue.\n');
7393
pause;
7494

7595
%% ============= Part 2: Regularization and Accuracies =============
7696
% Optional Exercise:
77-
% In this part, you will get to try different values of lambda and
97+
% In this part, you will get to try different values of lambda and
7898
% see how regularization affects the decision coundart
7999
%
80100
% Try the following values of lambda (0, 1, 10, 100).
@@ -112,5 +132,5 @@
112132
p = predict(theta, X);
113133

114134
fprintf('Train Accuracy: %f\n', mean(double(p == y)) * 100);
115-
135+
fprintf('Expected accuracy (with lambda = 1): 83.1 (approx)\n');
116136

Certifications/Stanford Machine Learning/ex2/lib/submitWithConfiguration.m

Lines changed: 69 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -22,15 +22,17 @@ function submitWithConfiguration(conf)
2222
response = submitParts(conf, email, token, parts);
2323
catch
2424
e = lasterror();
25-
fprintf( ...
26-
'!! Submission failed: unexpected error: %s\n', ...
27-
e.message);
28-
fprintf('!! Please try again later.\n');
25+
fprintf('\n!! Submission failed: %s\n', e.message);
26+
fprintf('\n\nFunction: %s\nFileName: %s\nLineNumber: %d\n', ...
27+
e.stack(1,1).name, e.stack(1,1).file, e.stack(1,1).line);
28+
fprintf('\nPlease correct your code and resubmit.\n');
2929
return
3030
end
3131

3232
if isfield(response, 'errorMessage')
3333
fprintf('!! Submission failed: %s\n', response.errorMessage);
34+
elseif isfield(response, 'errorCode')
35+
fprintf('!! Submission failed: %s\n', response.message);
3436
else
3537
showFeedback(parts, response);
3638
save(tokenFile, 'email', 'token');
@@ -62,13 +64,13 @@ function submitWithConfiguration(conf)
6264
function response = submitParts(conf, email, token, parts)
6365
body = makePostBody(conf, email, token, parts);
6466
submissionUrl = submissionUrl();
65-
params = {'jsonBody', body};
66-
responseBody = urlread(submissionUrl, 'post', params);
67-
response = loadjson(responseBody);
67+
responseBody = getResponse(submissionUrl, body);
68+
jsonResponse = validateResponse(responseBody);
69+
response = loadjson(jsonResponse);
6870
end
6971

7072
function body = makePostBody(conf, email, token, parts)
71-
bodyStruct.assignmentSlug = conf.assignmentSlug;
73+
bodyStruct.assignmentKey = conf.assignmentKey;
7274
bodyStruct.submitterEmail = email;
7375
bodyStruct.secret = token;
7476
bodyStruct.parts = makePartsStruct(conf, parts);
@@ -100,26 +102,81 @@ function showFeedback(parts, response)
100102
fprintf('== \n');
101103
fprintf('== %43s | %9s | %-s\n', 'Part Name', 'Score', 'Feedback');
102104
fprintf('== %43s | %9s | %-s\n', '---------', '-----', '--------');
105+
103106
for part = parts
104107
score = '';
105108
partFeedback = '';
106-
partFeedback = response.partFeedbacks.(makeValidFieldName(part{:}.id));
107-
partEvaluation = response.partEvaluations.(makeValidFieldName(part{:}.id));
109+
% NEW PARSING REPONSE BODY
110+
disp(response)
111+
partFeedback = response.linked.onDemandProgrammingScriptEvaluations_0x2E_v1{1}(1).parts.(makeValidFieldName(part{:}.id)).feedback;
112+
partEvaluation = response.linked.onDemandProgrammingScriptEvaluations_0x2E_v1{1}(1).parts.(makeValidFieldName(part{:}.id));
108113
score = sprintf('%d / %3d', partEvaluation.score, partEvaluation.maxScore);
109114
fprintf('== %43s | %9s | %-s\n', part{:}.name, score, partFeedback);
110115
end
111-
evaluation = response.evaluation;
116+
evaluation = response.linked.onDemandProgrammingScriptEvaluations_0x2E_v1{1}(1);
112117
totalScore = sprintf('%d / %d', evaluation.score, evaluation.maxScore);
113118
fprintf('== --------------------------------\n');
114119
fprintf('== %43s | %9s | %-s\n', '', totalScore, '');
115120
fprintf('== \n');
116121
end
117122

123+
% use urlread or curl to send submit results to the grader and get a response
124+
function response = getResponse(url, body)
125+
% NEW CURL SUBMISSION FOR WINDOWS AND MAC
126+
if ispc
127+
new_body = regexprep (body, '\"', '\\"'); % will escape double quoted objects to format properly for windows libcurl
128+
json_command = sprintf('curl -X POST -H "Cache-Control: no-cache" -H "Content-Type: application/json" -d "%s" --ssl-no-revoke "%s"', new_body, url);
129+
[code, response] = dos(json_command); %dos is for windows
130+
131+
new_response = regexp(response, '\{(.)*', 'match');
132+
response = new_response{1,1};
133+
134+
% test the success code
135+
if (code ~= 0)
136+
fprintf('[error] submission with Invoke-WebRequest() was not successful\n');
137+
end
138+
else
139+
json_command = sprintf('curl -X POST -H "Cache-Control: no-cache" -H "Content-Type: application/json" -d '' %s '' --ssl-no-revoke ''%s''', body, url);
140+
[code, response] = system(json_command);
141+
% test the success code
142+
if (code ~= 0)
143+
fprintf('[error] submission with curl() was not successful\n');
144+
end
145+
end
146+
end
147+
148+
% validate the grader's response
149+
function response = validateResponse(resp)
150+
% test if the response is json or an HTML page
151+
isJson = length(resp) > 0 && resp(1) == '{';
152+
isHtml = findstr(lower(resp), '<html');
153+
154+
if (isJson)
155+
response = resp;
156+
elseif (isHtml)
157+
% the response is html, so it's probably an error message
158+
printHTMLContents(resp);
159+
error('Grader response is an HTML message');
160+
else
161+
error('Grader sent no response');
162+
end
163+
end
164+
165+
% parse a HTML response and print it's contents
166+
function printHTMLContents(response)
167+
strippedResponse = regexprep(response, '<[^>]+>', ' ');
168+
strippedResponse = regexprep(strippedResponse, '[\t ]+', ' ');
169+
fprintf(strippedResponse);
170+
end
171+
172+
173+
174+
118175
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
119176
%
120177
% Service configuration
121178
%
122179
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
123180
function submissionUrl = submissionUrl()
124-
submissionUrl = 'https://www-origin.coursera.org/api/onDemandProgrammingImmediateFormSubmissions.v1';
181+
submissionUrl = 'https://www.coursera.org/api/onDemandProgrammingScriptSubmissions.v1?includes=evaluation';
125182
end

Certifications/Stanford Machine Learning/ex2/plotData.m

Lines changed: 17 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,23 @@ function plotData(X, y)
66
% Create New Figure
77
figure; hold on;
88

9-
% Find Indices of Positive and Negative Examples
10-
pos = find(y == 1); neg = find(y == 0);
11-
% Plot Examples
12-
plot(X(pos, 1), X(pos, 2), 'k+','LineWidth', 2, ...
13-
'MarkerSize', 7);
14-
plot(X(neg, 1), X(neg, 2), 'ko', 'MarkerFaceColor', 'y', ...
15-
'MarkerSize', 7);
9+
% ====================== YOUR CODE HERE ======================
10+
% Instructions: Plot the positive and negative examples on a
11+
% 2D plot, using the option 'k+' for the positive
12+
% examples and 'ko' for the negative examples.
13+
%
14+
15+
16+
17+
18+
19+
20+
21+
22+
23+
% =========================================================================
24+
25+
1626

1727
hold off;
1828

Certifications/Stanford Machine Learning/ex2/predict.m

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,20 @@
99
% You need to return the following variables correctly
1010
p = zeros(m, 1);
1111

12-
p = sigmoid(X * theta) >= 0.5;
12+
% ====================== YOUR CODE HERE ======================
13+
% Instructions: Complete the following code to make predictions using
14+
% your learned logistic regression parameters.
15+
% You should set p to a vector of 0's and 1's
16+
%
17+
18+
% Calculate the probability using sigmoid function
19+
prob = sigmoid(X * theta);
20+
21+
% Convert probabilities to binary predictions (0 or 1)
22+
% Predict 1 if probability >= 0.5, otherwise predict 0
23+
p = prob >= 0.5;
24+
25+
% =========================================================================
26+
1327

1428
end

0 commit comments

Comments
 (0)