@@ -24,6 +24,7 @@ def fit(self, X, learning_rate=0.5, mu=0.99, epochs=1, batch_sz=100, show_fig=Fa
24
24
self .params = [self .W , self .bh , self .bo ]
25
25
self .forward_params = [self .W , self .bh ]
26
26
27
+ # TODO: technically these should be reset before doing backprop
27
28
self .dW = theano .shared (np .zeros (W0 .shape ), 'dW_%s' % self .id )
28
29
self .dbh = theano .shared (np .zeros (self .M ), 'dbh_%s' % self .id )
29
30
self .dbo = theano .shared (np .zeros (D ), 'dbo_%s' % self .id )
@@ -33,11 +34,6 @@ def fit(self, X, learning_rate=0.5, mu=0.99, epochs=1, batch_sz=100, show_fig=Fa
33
34
X_in = T .matrix ('X_%s' % self .id )
34
35
X_hat = self .forward_output (X_in )
35
36
36
- forward_op = theano .function (
37
- inputs = [X_in ],
38
- outputs = X_hat ,
39
- )
40
-
41
37
# attach it to the object so it can be used later
42
38
# must be sigmoidal because the output is also a sigmoid
43
39
H = T .nnet .sigmoid (X_in .dot (self .W ) + self .bh )
@@ -71,7 +67,7 @@ def fit(self, X, learning_rate=0.5, mu=0.99, epochs=1, batch_sz=100, show_fig=Fa
71
67
for j in xrange (n_batches ):
72
68
batch = X [j * batch_sz :(j * batch_sz + batch_sz )]
73
69
train_op (batch )
74
- the_cost = cost_op (X )
70
+ the_cost = cost_op (X ) # technically we could also get the cost for Xtest here
75
71
print "j / n_batches:" , j , "/" , n_batches , "cost:" , the_cost
76
72
costs .append (the_cost )
77
73
if show_fig :
@@ -89,6 +85,16 @@ def forward_output(self, X):
89
85
Y = T .nnet .sigmoid (Z .dot (self .W .T ) + self .bo )
90
86
return Y
91
87
88
+ @staticmethod
89
+ def createFromArrays (W , bh , bo , an_id ):
90
+ ae = AutoEncoder (W .shape [1 ], an_id )
91
+ ae .W = theano .shared (W , 'W_%s' % ae .id )
92
+ ae .bh = theano .shared (bh , 'bh_%s' % ae .id )
93
+ ae .bo = theano .shared (bo , 'bo_%s' % ae .id )
94
+ ae .params = [ae .W , ae .bh , ae .bo ]
95
+ ae .forward_params = [ae .W , ae .bh ]
96
+ return ae
97
+
92
98
93
99
class DNN (object ):
94
100
def __init__ (self , hidden_layer_sizes , UnsupervisedModel = AutoEncoder ):
@@ -135,14 +141,9 @@ def fit(self, X, Y, Xtest, Ytest, pretrain=True, learning_rate=0.01, mu=0.99, re
135
141
targets = T .ivector ('Targets' )
136
142
pY = self .forward (X_in )
137
143
138
- forward_op = theano .function (
139
- inputs = [X_in ],
140
- outputs = pY ,
141
- )
142
-
143
144
# squared_magnitude = [(p*p).sum() for p in self.params]
144
145
# reg_cost = T.sum(squared_magnitude)
145
- cost = - T .mean ( T .log (pY ) [T .arange (pY .shape [0 ]), targets ] ) #+ reg*reg_cost
146
+ cost = - T .mean ( T .log (pY [T .arange (pY .shape [0 ]), targets ]) ) #+ reg*reg_cost
146
147
prediction = self .predict (X_in )
147
148
cost_predict_op = theano .function (
148
149
inputs = [X_in , targets ],
@@ -171,7 +172,6 @@ def fit(self, X, Y, Xtest, Ytest, pretrain=True, learning_rate=0.01, mu=0.99, re
171
172
Ybatch = Y [j * batch_sz :(j * batch_sz + batch_sz )]
172
173
train_op (Xbatch , Ybatch )
173
174
the_cost , the_prediction = cost_predict_op (Xtest , Ytest )
174
- # print "prediction:", the_prediction, "test:", Ytest
175
175
error = error_rate (the_prediction , Ytest )
176
176
print "j / n_batches:" , j , "/" , n_batches , "cost:" , the_cost , "error:" , error
177
177
costs .append (the_cost )
0 commit comments