1
1
# https://deeplearningcourses.com/c/unsupervised-deep-learning-in-python
2
2
# https://www.udemy.com/unsupervised-deep-learning-in-python
3
+ from __future__ import print_function , division
4
+ from builtins import range , input
5
+ # Note: you may need to update your version of future
6
+ # sudo pip install -U future
7
+
3
8
import numpy as np
4
9
import theano
5
10
import theano .tensor as T
@@ -16,7 +21,7 @@ def __init__(self, M, an_id):
16
21
17
22
def fit (self , X , learning_rate = 0.5 , mu = 0.99 , epochs = 1 , batch_sz = 100 , show_fig = False ):
18
23
N , D = X .shape
19
- n_batches = N / batch_sz
24
+ n_batches = N // batch_sz
20
25
21
26
W0 = init_weights ((D , self .M ))
22
27
self .W = theano .shared (W0 , 'W_%s' % self .id )
@@ -43,8 +48,15 @@ def fit(self, X, learning_rate=0.5, mu=0.99, epochs=1, batch_sz=100, show_fig=Fa
43
48
outputs = H ,
44
49
)
45
50
51
+ # save this for later so we can call it to
52
+ # create reconstructions of input
53
+ self .predict = theano .function (
54
+ inputs = [X_in ],
55
+ outputs = X_hat ,
56
+ )
57
+
46
58
# cost = ((X_in - X_hat) * (X_in - X_hat)).sum() / N
47
- cost = - (X_in * T .log (X_hat ) + (1 - X_in ) * T .log (1 - X_hat )).sum () / ( batch_sz * D )
59
+ cost = - (X_in * T .log (X_hat ) + (1 - X_in ) * T .log (1 - X_hat )).flatten (). mean ( )
48
60
cost_op = theano .function (
49
61
inputs = [X_in ],
50
62
outputs = cost ,
@@ -61,15 +73,16 @@ def fit(self, X, learning_rate=0.5, mu=0.99, epochs=1, batch_sz=100, show_fig=Fa
61
73
)
62
74
63
75
costs = []
64
- print "training autoencoder: %s" % self .id
65
- for i in xrange (epochs ):
66
- print "epoch:" , i
76
+ print ( "training autoencoder: %s" % self .id )
77
+ for i in range (epochs ):
78
+ print ( "epoch:" , i )
67
79
X = shuffle (X )
68
- for j in xrange (n_batches ):
80
+ for j in range (n_batches ):
69
81
batch = X [j * batch_sz :(j * batch_sz + batch_sz )]
70
82
train_op (batch )
71
- the_cost = cost_op (X ) # technically we could also get the cost for Xtest here
72
- print "j / n_batches:" , j , "/" , n_batches , "cost:" , the_cost
83
+ the_cost = cost_op (batch ) # technically we could also get the cost for Xtest here
84
+ if j % 10 == 0 :
85
+ print ("j / n_batches:" , j , "/" , n_batches , "cost:" , the_cost )
73
86
costs .append (the_cost )
74
87
if show_fig :
75
88
plt .plot (costs )
@@ -162,19 +175,19 @@ def fit(self, X, Y, Xtest, Ytest, pretrain=True, learning_rate=0.01, mu=0.99, re
162
175
updates = updates ,
163
176
)
164
177
165
- n_batches = N / batch_sz
178
+ n_batches = N // batch_sz
166
179
costs = []
167
- print "supervised training..."
168
- for i in xrange (epochs ):
169
- print "epoch:" , i
180
+ print ( "supervised training..." )
181
+ for i in range (epochs ):
182
+ print ( "epoch:" , i )
170
183
X , Y = shuffle (X , Y )
171
- for j in xrange (n_batches ):
184
+ for j in range (n_batches ):
172
185
Xbatch = X [j * batch_sz :(j * batch_sz + batch_sz )]
173
186
Ybatch = Y [j * batch_sz :(j * batch_sz + batch_sz )]
174
187
train_op (Xbatch , Ybatch )
175
188
the_cost , the_prediction = cost_predict_op (Xtest , Ytest )
176
189
error = error_rate (the_prediction , Ytest )
177
- print "j / n_batches:" , j , "/" , n_batches , "cost:" , the_cost , "error:" , error
190
+ print ( "j / n_batches:" , j , "/" , n_batches , "cost:" , the_cost , "error:" , error )
178
191
costs .append (the_cost )
179
192
plt .plot (costs )
180
193
plt .show ()
@@ -202,5 +215,32 @@ def main():
202
215
dnn .fit (Xtrain , Ytrain , Xtest , Ytest , pretrain = False , epochs = 10 )
203
216
204
217
218
+ def test_single_autoencoder ():
219
+ Xtrain , Ytrain , Xtest , Ytest = getKaggleMNIST ()
220
+
221
+ autoencoder = AutoEncoder (300 , 0 )
222
+ autoencoder .fit (Xtrain , epochs = 2 , show_fig = True )
223
+
224
+ done = False
225
+ while not done :
226
+ i = np .random .choice (len (Xtest ))
227
+ x = Xtest [i ]
228
+ y = autoencoder .predict ([x ])
229
+ plt .subplot (1 ,2 ,1 )
230
+ plt .imshow (x .reshape (28 ,28 ), cmap = 'gray' )
231
+ plt .title ('Original' )
232
+
233
+ plt .subplot (1 ,2 ,2 )
234
+ plt .imshow (y .reshape (28 ,28 ), cmap = 'gray' )
235
+ plt .title ('Reconstructed' )
236
+
237
+ plt .show ()
238
+
239
+ ans = input ("Generate another?" )
240
+ if ans and ans [0 ] in ('n' or 'N' ):
241
+ done = True
242
+
243
+
205
244
if __name__ == '__main__' :
206
- main ()
245
+ # main()
246
+ test_single_autoencoder ()
0 commit comments