Skip to content

Commit f506d49

Browse files
theano autoencoder
1 parent 16faec6 commit f506d49

File tree

1 file changed

+55
-15
lines changed

1 file changed

+55
-15
lines changed

unsupervised_class2/autoencoder.py

+55-15
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,10 @@
11
# https://deeplearningcourses.com/c/unsupervised-deep-learning-in-python
22
# https://www.udemy.com/unsupervised-deep-learning-in-python
3+
from __future__ import print_function, division
4+
from builtins import range, input
5+
# Note: you may need to update your version of future
6+
# sudo pip install -U future
7+
38
import numpy as np
49
import theano
510
import theano.tensor as T
@@ -16,7 +21,7 @@ def __init__(self, M, an_id):
1621

1722
def fit(self, X, learning_rate=0.5, mu=0.99, epochs=1, batch_sz=100, show_fig=False):
1823
N, D = X.shape
19-
n_batches = N / batch_sz
24+
n_batches = N // batch_sz
2025

2126
W0 = init_weights((D, self.M))
2227
self.W = theano.shared(W0, 'W_%s' % self.id)
@@ -43,8 +48,15 @@ def fit(self, X, learning_rate=0.5, mu=0.99, epochs=1, batch_sz=100, show_fig=Fa
4348
outputs=H,
4449
)
4550

51+
# save this for later so we can call it to
52+
# create reconstructions of input
53+
self.predict = theano.function(
54+
inputs=[X_in],
55+
outputs=X_hat,
56+
)
57+
4658
# cost = ((X_in - X_hat) * (X_in - X_hat)).sum() / N
47-
cost = -(X_in * T.log(X_hat) + (1 - X_in) * T.log(1 - X_hat)).sum() / (batch_sz * D)
59+
cost = -(X_in * T.log(X_hat) + (1 - X_in) * T.log(1 - X_hat)).flatten().mean()
4860
cost_op = theano.function(
4961
inputs=[X_in],
5062
outputs=cost,
@@ -61,15 +73,16 @@ def fit(self, X, learning_rate=0.5, mu=0.99, epochs=1, batch_sz=100, show_fig=Fa
6173
)
6274

6375
costs = []
64-
print "training autoencoder: %s" % self.id
65-
for i in xrange(epochs):
66-
print "epoch:", i
76+
print("training autoencoder: %s" % self.id)
77+
for i in range(epochs):
78+
print("epoch:", i)
6779
X = shuffle(X)
68-
for j in xrange(n_batches):
80+
for j in range(n_batches):
6981
batch = X[j*batch_sz:(j*batch_sz + batch_sz)]
7082
train_op(batch)
71-
the_cost = cost_op(X) # technically we could also get the cost for Xtest here
72-
print "j / n_batches:", j, "/", n_batches, "cost:", the_cost
83+
the_cost = cost_op(batch) # technically we could also get the cost for Xtest here
84+
if j % 10 == 0:
85+
print("j / n_batches:", j, "/", n_batches, "cost:", the_cost)
7386
costs.append(the_cost)
7487
if show_fig:
7588
plt.plot(costs)
@@ -162,19 +175,19 @@ def fit(self, X, Y, Xtest, Ytest, pretrain=True, learning_rate=0.01, mu=0.99, re
162175
updates=updates,
163176
)
164177

165-
n_batches = N / batch_sz
178+
n_batches = N // batch_sz
166179
costs = []
167-
print "supervised training..."
168-
for i in xrange(epochs):
169-
print "epoch:", i
180+
print("supervised training...")
181+
for i in range(epochs):
182+
print("epoch:", i)
170183
X, Y = shuffle(X, Y)
171-
for j in xrange(n_batches):
184+
for j in range(n_batches):
172185
Xbatch = X[j*batch_sz:(j*batch_sz + batch_sz)]
173186
Ybatch = Y[j*batch_sz:(j*batch_sz + batch_sz)]
174187
train_op(Xbatch, Ybatch)
175188
the_cost, the_prediction = cost_predict_op(Xtest, Ytest)
176189
error = error_rate(the_prediction, Ytest)
177-
print "j / n_batches:", j, "/", n_batches, "cost:", the_cost, "error:", error
190+
print("j / n_batches:", j, "/", n_batches, "cost:", the_cost, "error:", error)
178191
costs.append(the_cost)
179192
plt.plot(costs)
180193
plt.show()
@@ -202,5 +215,32 @@ def main():
202215
dnn.fit(Xtrain, Ytrain, Xtest, Ytest, pretrain=False, epochs=10)
203216

204217

218+
def test_single_autoencoder():
219+
Xtrain, Ytrain, Xtest, Ytest = getKaggleMNIST()
220+
221+
autoencoder = AutoEncoder(300, 0)
222+
autoencoder.fit(Xtrain, epochs=2, show_fig=True)
223+
224+
done = False
225+
while not done:
226+
i = np.random.choice(len(Xtest))
227+
x = Xtest[i]
228+
y = autoencoder.predict([x])
229+
plt.subplot(1,2,1)
230+
plt.imshow(x.reshape(28,28), cmap='gray')
231+
plt.title('Original')
232+
233+
plt.subplot(1,2,2)
234+
plt.imshow(y.reshape(28,28), cmap='gray')
235+
plt.title('Reconstructed')
236+
237+
plt.show()
238+
239+
ans = input("Generate another?")
240+
if ans and ans[0] in ('n' or 'N'):
241+
done = True
242+
243+
205244
if __name__ == '__main__':
206-
main()
245+
# main()
246+
test_single_autoencoder()

0 commit comments

Comments
 (0)