4
4
# https://deeplearningcourses.com/c/data-science-logistic-regression-in-python
5
5
# https://www.udemy.com/data-science-logistic-regression-in-python
6
6
7
+ from __future__ import print_function , division
8
+ from builtins import range
9
+ # Note: you may need to update your version of future
10
+ # sudo pip install -U future
11
+
12
+
13
+
7
14
import numpy as np
8
15
import matplotlib .pyplot as plt
9
16
15
22
16
23
# distance from origin is radius + random normal
17
24
# angle theta is uniformly distributed between (0, 2pi)
18
- R1 = np .random .randn (N / 2 ) + R_inner
19
- theta = 2 * np .pi * np .random .random (N / 2 )
25
+ R1 = np .random .randn (N // 2 ) + R_inner
26
+ theta = 2 * np .pi * np .random .random (N // 2 )
20
27
X_inner = np .concatenate ([[R1 * np .cos (theta )], [R1 * np .sin (theta )]]).T
21
28
22
- R2 = np .random .randn (N / 2 ) + R_outer
23
- theta = 2 * np .pi * np .random .random (N / 2 )
29
+ R2 = np .random .randn (N // 2 ) + R_outer
30
+ theta = 2 * np .pi * np .random .random (N // 2 )
24
31
X_outer = np .concatenate ([[R2 * np .cos (theta )], [R2 * np .sin (theta )]]).T
25
32
26
33
X = np .concatenate ([ X_inner , X_outer ])
27
- T = np .array ([0 ]* (N / 2 ) + [1 ]* (N / 2 )) # labels: first 50 are 0, last 50 are 1
34
+ T = np .array ([0 ]* (N // 2 ) + [1 ]* (N / /2 )) # labels: first 50 are 0, last 50 are 1
28
35
29
36
plt .scatter (X [:,0 ], X [:,1 ], c = T )
30
37
plt .show ()
@@ -53,24 +60,17 @@ def sigmoid(z):
53
60
54
61
# calculate the cross-entropy error
55
62
def cross_entropy (T , Y ):
56
- # E = 0
57
- # for i in xrange(N):
58
- # if T[i] == 1:
59
- # E -= np.log(Y[i])
60
- # else:
61
- # E -= np.log(1 - Y[i])
62
- # return E
63
63
return - (T * np .log (Y ) + (1 - T )* np .log (1 - Y )).sum ()
64
64
65
65
66
66
# let's do gradient descent 100 times
67
67
learning_rate = 0.0001
68
68
error = []
69
- for i in xrange (5000 ):
69
+ for i in range (5000 ):
70
70
e = cross_entropy (T , Y )
71
71
error .append (e )
72
72
if i % 500 == 0 :
73
- print e
73
+ print ( e )
74
74
75
75
# gradient descent weight udpate with regularization
76
76
# w += learning_rate * ( np.dot((T - Y).T, Xb) - 0.01*w ) # old
@@ -83,5 +83,5 @@ def cross_entropy(T, Y):
83
83
plt .title ("Cross-entropy per iteration" )
84
84
plt .show ()
85
85
86
- print "Final w:" , w
87
- print "Final classification rate:" , 1 - np .abs (T - np .round (Y )).sum () / N
86
+ print ( "Final w:" , w )
87
+ print ( "Final classification rate:" , 1 - np .abs (T - np .round (Y )).sum () / N )
0 commit comments