forked from PacktPublishing/Deep-Learning-with-Keras
-
Notifications
You must be signed in to change notification settings - Fork 0
/
tf-keras-func.py
80 lines (69 loc) · 2.49 KB
/
tf-keras-func.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
# -*- coding: utf-8 -*-
from __future__ import division, print_function
from keras import backend as K
from keras.engine.topology import Layer, InputSpec
from keras.layers.core import Dropout, Reshape
from keras.layers.convolutional import ZeroPadding2D
from keras.models import Sequential
import numpy as np
# test harness
# creates a Sequential model out of a single layer and passes the
# input through it to produce output
def test_layer(layer, x):
layer_config = layer.get_config()
layer_config["input_shape"] = x.shape
layer = layer.__class__.from_config(layer_config)
model = Sequential()
model.add(layer)
model.compile("rmsprop", "mse")
x_ = np.expand_dims(x, axis=0)
return model.predict(x_)[0]
# custom layer
class LocalResponseNormalization(Layer):
def __init__(self, n=5, alpha=0.0005, beta=0.75, k=2, **kwargs):
self.n = n
self.alpha = alpha
self.beta = beta
self.k = k
super(LocalResponseNormalization, self).__init__(**kwargs)
def build(self, input_shape):
self.shape = input_shape
super(LocalResponseNormalization, self).build(input_shape)
def call(self, x, mask=None):
if K.image_dim_ordering == "th":
_, f, r, c = self.shape
else:
_, r, c, f = self.shape
half_n = self.n // 2
squared = K.square(x)
pooled = K.pool2d(squared, (half_n, half_n), strides=(1, 1),
padding="same", pool_mode="avg")
if K.image_dim_ordering == "th":
summed = K.sum(pooled, axis=1, keepdims=True)
averaged = (self.alpha / self.n) * K.repeat_elements(summed, f, axis=1)
else:
summed = K.sum(pooled, axis=3, keepdims=True)
averaged = (self.alpha / self.n) * K.repeat_elements(summed, f, axis=3)
denom = K.pow(self.k + averaged, self.beta)
return x / denom
def compute_output_shape(self, input_shape):
return input_shape
# test the test harness
x = np.random.randn(10, 10)
layer = Dropout(0.5)
y = test_layer(layer, x)
assert(x.shape == y.shape)
x = np.random.randn(10, 10, 3)
layer = ZeroPadding2D(padding=(1,1))
y = test_layer(layer, x)
assert(x.shape[0] + 2 == y.shape[0])
assert(x.shape[1] + 2 == y.shape[1])
x = np.random.randn(10, 10)
layer = Reshape((5, 20))
y = test_layer(layer, x)
assert(y.shape == (5, 20))
# test custom layer
x = np.random.randn(225, 225, 3)
layer = LocalResponseNormalization()
y = test_layer(layer, x)
assert(x.shape == y.shape)