@@ -10,6 +10,11 @@ Machine Learning algorithms in Python
10
10
11
11
### Usage
12
12
13
+ * [ Cost] ( #cost )
14
+ * [ Feature normalization] ( #feature-normalization )
15
+ * [ Gradient Descent] ( #gradient-descent )
16
+ * [ Prediction] ( #prediction )
17
+
13
18
The first column of the training set x must be all 1s.
14
19
15
20
The following code is assumed in all the examples.
@@ -53,3 +58,56 @@ Normalized x: [[ 1. -1.22474487 -1.22474487]
53
58
[ 1. 0. 0. ]
54
59
[ 1. 1.22474487 1.22474487]]
55
60
```
61
+
62
+ #### Gradient Descent
63
+
64
+ ``` python
65
+ list_x = [[1 , 1 ], [1 , 2 ], [1 , 3 ]]
66
+ list_theta = [[0 ], [0 ]]
67
+ list_y = [[10 ], [20 ], [30 ]]
68
+
69
+ x = np.matrix(list_x)
70
+ theta = np.matrix(list_theta)
71
+ y = np.matrix(list_y)
72
+
73
+ (x, mu, sigma) = pyml.normalize_features(x)
74
+
75
+ alpha = 0.03
76
+ num_iters = 2000
77
+
78
+ theta = pyml.gradient_descent(x, y, theta, alpha, num_iters)
79
+ print (' Theta: ' , theta)
80
+ ```
81
+
82
+ ```
83
+ Theta: [[ 20. ]
84
+ [ 8.16496581]]
85
+ ```
86
+
87
+ #### Prediction
88
+
89
+ ``` python
90
+ list_x = [[1 , 1 ], [1 , 2 ], [1 , 3 ]]
91
+ list_theta = [[0 ], [0 ]]
92
+ list_y = [[10 ], [20 ], [30 ]]
93
+
94
+ x = np.matrix(list_x)
95
+ theta = np.matrix(list_theta)
96
+ y = np.matrix(list_y)
97
+
98
+ (x, mu, sigma) = pyml.normalize_features(x)
99
+
100
+ alpha = 0.03
101
+ num_iters = 2000
102
+
103
+ theta = pyml.gradient_descent(x, y, theta, alpha, num_iters)
104
+
105
+ vals = [[1 , 4 ], [1 , 5 ]]
106
+ predictions = pyml.predict(np.matrix(vals), theta, mu, sigma)
107
+ print (' Predictions: ' , predictions)
108
+ ```
109
+
110
+ ```
111
+ Predictions: [[ 40.]
112
+ [ 50.]]
113
+ ```
0 commit comments