Skip to content

Commit

Permalink
Moved test out of run_all_in_graph_and_eager_mode in softshrink. (ten…
Browse files Browse the repository at this point in the history
…sorflow#1405)

* Moved test out of run_all_in_graph_and_eager_mode in softshrink.

See tensorflow#1328

* Small fix.
  • Loading branch information
gabrieldemarmiesse authored Mar 26, 2020
1 parent b48483f commit ea07906
Showing 1 changed file with 13 additions and 16 deletions.
29 changes: 13 additions & 16 deletions tensorflow_addons/activations/softshrink_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import sys

import pytest
from absl.testing import parameterized

import numpy as np
import tensorflow as tf
Expand Down Expand Up @@ -48,21 +47,6 @@ def test_softshrink(dtype):
)


@test_utils.run_all_in_graph_and_eager_modes
class SoftshrinkTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(("float32", np.float32), ("float64", np.float64))
def test_theoretical_gradients(self, dtype):
# Only test theoretical gradients for float32 and float64
# because of the instability of float16 while computing jacobian

# Softshrink is not continuous at `lower` and `upper`.
# Avoid these two points to make gradients smooth.
x = tf.constant([-2.0, -1.5, 0.0, 1.5, 2.0], dtype=dtype)

theoretical, numerical = tf.test.compute_gradient(softshrink, [x])
self.assertAllCloseAccordingToType(theoretical, numerical, atol=1e-4)


@pytest.mark.parametrize("dtype", [np.float16, np.float32])
def test_same_as_py_func(dtype):
np.random.seed(1234)
Expand All @@ -89,5 +73,18 @@ def verify_funcs_are_equivalent(dtype):
test_utils.assert_allclose_according_to_type(grad_native, grad_py)


@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_theoretical_gradients(dtype):
# Only test theoretical gradients for float32 and float64
# because of the instability of float16 while computing jacobian

# Softshrink is not continuous at `lower` and `upper`.
# Avoid these two points to make gradients smooth.
x = tf.constant([-2.0, -1.5, 0.0, 1.5, 2.0], dtype=dtype)

theoretical, numerical = tf.test.compute_gradient(softshrink, [x])
test_utils.assert_allclose_according_to_type(theoretical, numerical, atol=1e-4)


if __name__ == "__main__":
sys.exit(pytest.main([__file__]))

0 comments on commit ea07906

Please sign in to comment.