Skip to content

Commit 62ae239

Browse files
committed
2 parents 141b440 + a5f043f commit 62ae239

File tree

5 files changed

+30
-29
lines changed

5 files changed

+30
-29
lines changed

NN/Basic/Layers.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -570,14 +570,14 @@ def _derivative(self, y, *args):
570570
# Special Layer
571571

572572
class Dropout(SubLayer):
573-
def __init__(self, parent, shape, prob=0.5):
574-
if prob < 0 or prob >= 1:
573+
def __init__(self, parent, shape, keep_prob=0.5):
574+
if keep_prob < 0 or keep_prob >= 1:
575575
raise BuildLayerError("Probability of Dropout should be a positive float smaller than 1")
576576
SubLayer.__init__(self, parent, shape)
577577
self._mask = None
578-
self._prob = prob
579-
self._prob_inv = 1 / (1 - prob)
580-
self.description = "(Drop prob: {})".format(prob)
578+
self._prob = keep_prob
579+
self._prob_inv = 1 / keep_prob
580+
self.description = "(Keep prob: {})".format(keep_prob)
581581

582582
def get_params(self):
583583
return self._prob,
@@ -586,7 +586,7 @@ def _activate(self, x, predict):
586586
if not predict:
587587
# noinspection PyTypeChecker
588588
self._mask = np.random.binomial(
589-
[np.ones(x.shape)], 1 - self._prob
589+
[np.ones(x.shape)], self._prob
590590
)[0].astype(np.float32) * self._prob_inv
591591
return x * self._mask
592592
return x

NN/PyTorch/Auto/Layers.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -182,16 +182,16 @@ def _activate(self, x, predict):
182182
class Dropout(SubLayer):
183183
def __init__(self, parent, shape, prob=0.5):
184184
if prob < 0 or prob >= 1:
185-
raise BuildLayerError("Probability of Dropout should be a positive float smaller than 1")
185+
raise BuildLayerError("Keep probability of Dropout should be a positive float smaller than 1")
186186
SubLayer.__init__(self, parent, shape)
187187
self._prob = prob
188-
self.description = "(Drop prob: {})".format(prob)
188+
self.description = "(Keep prob: {})".format(prob)
189189

190190
def get_params(self):
191191
return self._prob,
192192

193193
def _activate(self, x, predict):
194-
return F.dropout(x, self._prob, not predict)
194+
return F.dropout(x, 1 - self._prob, not predict)
195195

196196

197197
class Normalize(SubLayer):

NN/PyTorch/Basic/Layers.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -199,26 +199,26 @@ def _derivative(self, y, delta=None):
199199
# Special Layer
200200

201201
class Dropout(SubLayer):
202-
def __init__(self, parent, shape, prob=0.5):
203-
if prob < 0 or prob >= 1:
204-
raise BuildLayerError("Probability of Dropout should be a positive float smaller than 1")
202+
def __init__(self, parent, shape, keep_prob=0.5):
203+
if keep_prob < 0 or keep_prob >= 1:
204+
raise BuildLayerError("Keep probability of Dropout should be a positive float smaller than 1")
205205
SubLayer.__init__(self, parent, shape)
206-
self._prob = prob
207-
self._prob_inv = 1 / (1 - prob)
208-
self.description = "(Drop prob: {})".format(prob)
206+
self._mask = None
207+
self._prob = keep_prob
208+
self._prob_inv = 1 / keep_prob
209+
self.description = "(Keep prob: {})".format(keep_prob)
209210

210211
def get_params(self):
211212
return self._prob,
212213

213214
def _activate(self, x, predict):
214215
if not predict:
215-
return x.mm(torch.diag(
216-
(torch.rand(x.size()[1]) >= self._prob).float() * self._prob_inv
217-
))
216+
self._mask = (torch.rand(x.size()) < self._prob).float() * self._prob_inv
217+
return x * self._mask
218218
return x
219219

220220
def _derivative(self, y, delta=None):
221-
return self._prob_inv * delta
221+
return delta * self._mask
222222

223223

224224
class Normalize(SubLayer):

NN/TF/Layers.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -350,20 +350,19 @@ def _activate(self, x, *args):
350350
# Special Layers
351351

352352
class Dropout(SubLayer):
353-
def __init__(self, parent, shape, drop_prob=0.5, **kwargs):
354-
if drop_prob < 0 or drop_prob >= 1:
355-
raise BuildLayerError("(Dropout) Probability of Dropout should be a positive float smaller than 1")
353+
def __init__(self, parent, shape, keep_prob=0.5, **kwargs):
354+
if keep_prob < 0 or keep_prob >= 1:
355+
raise BuildLayerError("(Dropout) Keep probability of Dropout should be a positive float smaller than 1")
356356
SubLayer.__init__(self, parent, shape, **kwargs)
357-
self._drop_prob = drop_prob
358-
self._prob = 1 - tf.constant(self._drop_prob, dtype=tf.float32)
359-
self.description = "(Drop prob: {})".format(drop_prob)
357+
self._keep_prob = keep_prob
358+
self.description = "(Keep prob: {})".format(keep_prob)
360359

361360
def get_params(self):
362-
return self._drop_prob,
361+
return self._keep_prob,
363362

364363
def _activate(self, x, predict):
365364
if not predict:
366-
return tf.nn.dropout(x, self._prob)
365+
return tf.nn.dropout(x, self._keep_prob)
367366
return x
368367

369368

NN/TF/Networks.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,8 @@ def name(self):
7171

7272
@NNTiming.timeit(level=4)
7373
def _get_w(self, shape):
74+
if self._w_stds[-1] is None:
75+
self._w_stds[-1] = sqrt(2 / sum(shape))
7476
initial = tf.truncated_normal(shape, stddev=self._w_stds[-1])
7577
return tf.Variable(initial, name="w")
7678

@@ -223,8 +225,8 @@ def add(self, layer, *args, **kwargs):
223225
kwargs["apply_bias"] = kwargs.get("apply_bias", True)
224226
kwargs["position"] = kwargs.get("position", len(self._layers) + 1)
225227

226-
self._w_stds.append(Util.get_and_pop(kwargs, "std", 0.1))
227-
self._b_inits.append(Util.get_and_pop(kwargs, "init", 0.1))
228+
self._w_stds.append(Util.get_and_pop(kwargs, "w_std", None))
229+
self._b_inits.append(Util.get_and_pop(kwargs, "b_init", 0.1))
228230
if Util.get_and_pop(kwargs, "pop_last_init", False):
229231
self._w_stds.pop()
230232
self._b_inits.pop()

0 commit comments

Comments
 (0)