Skip to content

Commit d419c60

Browse files
committed
Update Algorithms
1 parent c43a37d commit d419c60

File tree

2 files changed

+27
-23
lines changed

2 files changed

+27
-23
lines changed

NN/TF/Layers.py

+8-4
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ def _activate(self, x, predict):
117117
class ConvLayer(Layer):
118118
LayerTiming = Timing()
119119

120-
def __init__(self, shape, stride=1, padding="SAME", parent=None, **kwargs):
120+
def __init__(self, shape, stride=1, padding=None, parent=None, **kwargs):
121121
"""
122122
:param shape: shape[0] = shape of previous layer c x h x w
123123
shape[1] = shape of current layer's weight f x c x h x w
@@ -129,15 +129,19 @@ def __init__(self, shape, stride=1, padding="SAME", parent=None, **kwargs):
129129
shape = _parent.shape
130130
Layer.__init__(self, shape, **kwargs)
131131
self._stride = stride
132+
if padding is None:
133+
padding = "SAME"
132134
if isinstance(padding, str):
133135
if padding.upper() == "VALID":
134136
self._padding = 0
135137
self._pad_flag = "VALID"
136138
else:
137139
self._padding = self._pad_flag = "SAME"
138-
else:
139-
self._padding = int(padding)
140+
elif isinstance(padding, int):
141+
self._padding = padding
140142
self._pad_flag = "VALID"
143+
else:
144+
raise BuildLayerError("Padding should be 'SAME' or 'VALID' or integer")
141145
self.parent = parent
142146
if len(shape) == 1:
143147
self.n_channels, self.n_filters, self.out_h, self.out_w = None, None, None, None
@@ -215,7 +219,7 @@ def __init__(self, shape, stride=1, padding="SAME", **_kwargs):
215219
conv_layer.__init__(self, shape, stride, padding, **_kwargs)
216220

217221
def _conv(self, x, w):
218-
return tf.nn.conv2d(x, w, strides=[self._stride] * 4, padding=self._pad_flag)
222+
return tf.nn.conv2d(x, w, strides=[1, self.stride, self.stride, 1], padding=self._pad_flag)
219223

220224
def _activate(self, x, w, bias, predict):
221225
res = self._conv(x, w) + bias if self.apply_bias else self._conv(x, w)

g_CNN/Layers.py

+19-19
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ def info(self):
6666
class ConvLayer(Layer):
6767
LayerTiming = Timing()
6868

69-
def __init__(self, shape, stride=1, padding="SAME", parent=None):
69+
def __init__(self, shape, stride=1, padding=None, parent=None):
7070
"""
7171
:param shape: shape[0] = shape of previous layer c x h x w
7272
shape[1] = shape of current layer's weight f x h x w
@@ -79,15 +79,19 @@ def __init__(self, shape, stride=1, padding="SAME", parent=None):
7979
shape = _parent.shape
8080
Layer.__init__(self, shape)
8181
self.stride = stride
82+
if padding is None:
83+
padding = "SAME"
8284
if isinstance(padding, str):
8385
if padding.upper() == "VALID":
8486
self.padding = 0
8587
self.pad_flag = "VALID"
8688
else:
8789
self.padding = self.pad_flag = "SAME"
88-
else:
89-
self.padding = int(padding)
90+
elif isinstance(padding, int):
91+
self.padding = padding
9092
self.pad_flag = "VALID"
93+
else:
94+
raise ValueError("Padding should be 'SAME' or 'VALID' or integer")
9195
self.parent = parent
9296
if len(shape) == 1:
9397
self.n_channels = self.n_filters = self.out_h = self.out_w = None
@@ -136,7 +140,7 @@ def __init__(self, shape, stride=1, padding="SAME"):
136140
conv_layer.__init__(self, shape, stride, padding)
137141

138142
def _conv(self, x, w):
139-
return tf.nn.conv2d(x, w, strides=[self.stride] * 4, padding=self.pad_flag)
143+
return tf.nn.conv2d(x, w, strides=[1, self.stride, self.stride, 1], padding=self.pad_flag)
140144

141145
def _activate(self, x, w, bias, predict):
142146
res = self._conv(x, w) + bias
@@ -380,25 +384,21 @@ def get_layer_by_name(self, name, parent, current_dimension, *args, **kwargs):
380384
return _layer, (_current, _next)
381385

382386
if __name__ == '__main__':
383-
with tf.Session().as_default():
387+
with tf.Session().as_default() as sess:
384388
# NN Process
385389
nn_x = np.array([
386390
[ 0, 1, 2, 1, 0],
387391
[-1, -2, 0, 2, 1],
388392
[ 0, 1, -2, -1, 2],
389393
[ 1, 2, -1, 0, -2]
390-
])
394+
], dtype=np.float32)
391395
nn_w = np.array([
392396
[-2, -1, 0, 1, 2],
393397
[ 2, 1, 0, -1, -2]
394-
]).T
395-
nn_b = 1
396-
nn_id = Identical([nn_x.shape[1], 1])
397-
nn_r1 = nn_id.activate(nn_x, nn_w, nn_b)
398-
# nn_norm = Normalize(nn_id, [None, 2])
399-
# nn_norm.activate(nn_r1, None)
400-
print(nn_r1.eval())
401-
398+
], dtype=np.float32).T
399+
nn_b = 1.
400+
nn_id = Identical([nn_x.shape[1], 2])
401+
print(nn_id.activate(nn_x, nn_w, nn_b).eval())
402402
# CNN Process
403403
conv_x = np.array([
404404
[
@@ -408,17 +408,17 @@ def get_layer_by_name(self, name, parent, current_dimension, *args, **kwargs):
408408
[-2, 1, -1, 0]
409409
]
410410
], dtype=np.float32).reshape(1, 4, 4, 1)
411-
# Using "VALID" Padding -> out_h = out_w = 2
412-
conv_id = ConvIdentical([(conv_x.shape[1:], [2, 3, 3])], padding="VALID")
413411
conv_w = np.array([
414412
[[ 1, 0, 1],
415413
[-1, 0, 1],
416414
[ 1, 0, -1]],
417415
[[0, 1, 0],
418416
[1, 0, -1],
419417
[0, -1, 1]]
420-
]).transpose([1, 2, 0])[..., None, :]
421-
conv_b = np.array([1, -1])
418+
], dtype=np.float32).transpose([1, 2, 0])[..., None, :]
419+
conv_b = np.array([1, -1], dtype=np.float32)
420+
# Using "VALID" Padding -> out_h = out_w = 2
421+
conv_id = ConvIdentical([(conv_x.shape[1:], [2, 3, 3])], padding="VALID")
422422
print(conv_id.activate(conv_x, conv_w, conv_b).eval())
423423
conv_x = np.array([
424424
[
@@ -435,5 +435,5 @@ def get_layer_by_name(self, name, parent, current_dimension, *args, **kwargs):
435435
[ 0 1 -1 2 0 ]
436436
[ 0 0 0 0 0 ] ]
437437
"""
438-
conv_id = ConvIdentical([(conv_x.shape[1:], [2, 3, 3])], padding="SAME")
438+
conv_id = ConvIdentical([(conv_x.shape[1:], [2, 3, 3])], padding=1, stride=2)
439439
print(conv_id.activate(conv_x, conv_w, conv_b).eval())

0 commit comments

Comments
 (0)