Skip to content

Commit

Permalink
Moved test out of run_in_graph_and_eager_mode in wrapper. (#1442)
Browse files Browse the repository at this point in the history
See #1328
  • Loading branch information
gabrieldemarmiesse authored Mar 27, 2020
1 parent 44ea32a commit 53f26d1
Showing 1 changed file with 21 additions and 25 deletions.
46 changes: 21 additions & 25 deletions tensorflow_addons/layers/wrappers_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,32 +150,28 @@ def test_forward_pass(self, base_layer, input_shape):
self.evaluate(tf.compat.v1.global_variables_initializer())
self.assertAllClose(self.evaluate(base_output), self.evaluate(wn_output))

@parameterized.named_parameters(
["Dense", lambda: tf.keras.layers.Dense(1), [1]],
["SimpleRNN", lambda: tf.keras.layers.SimpleRNN(1), [10, 10]],
["Conv2D", lambda: tf.keras.layers.Conv2D(3, 1), [3, 3, 1]],
["LSTM", lambda: tf.keras.layers.LSTM(1), [10, 10]],
)
def test_removal(self, base_layer_fn, input_shape):
sample_data = np.ones([1] + input_shape, dtype=np.float32)

for data_init in [True, False]:
with self.subTest(data_init=data_init):
base_layer = base_layer_fn()
wn_layer = wrappers.WeightNormalization(base_layer, data_init)
wn_output = wn_layer(sample_data)
self.evaluate(tf.compat.v1.global_variables_initializer())
with tf.control_dependencies([wn_output]):
wn_removed_layer = wn_layer.remove()
wn_removed_output = wn_removed_layer(sample_data)

self.evaluate(
tf.compat.v1.initialize_variables(wn_removed_layer.variables)
)
self.assertAllClose(
self.evaluate(wn_removed_output), self.evaluate(wn_output)
)
self.assertTrue(isinstance(wn_removed_layer, base_layer.__class__))
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("data_init", [True, False])
@pytest.mark.parametrize(
"base_layer_fn, input_shape",
[
(lambda: tf.keras.layers.Dense(1), [1]),
(lambda: tf.keras.layers.SimpleRNN(1), [10, 10]),
(lambda: tf.keras.layers.Conv2D(3, 1), [3, 3, 1]),
(lambda: tf.keras.layers.LSTM(1), [10, 10]),
],
)
def test_removal(base_layer_fn, input_shape, data_init):
sample_data = np.ones([1] + input_shape, dtype=np.float32)

base_layer = base_layer_fn()
wn_layer = wrappers.WeightNormalization(base_layer, data_init)
wn_output = wn_layer(sample_data)
wn_removed_layer = wn_layer.remove()
wn_removed_output = wn_removed_layer(sample_data)
np.testing.assert_allclose(wn_removed_output.numpy(), wn_output.numpy())
assert isinstance(wn_removed_layer, base_layer.__class__)


if __name__ == "__main__":
Expand Down

0 comments on commit 53f26d1

Please sign in to comment.