Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions keras/api/_tf_keras/keras/activations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from keras.src.activations.activations import hard_tanh
from keras.src.activations.activations import leaky_relu
from keras.src.activations.activations import linear
from keras.src.activations.activations import log_sigmoid
from keras.src.activations.activations import log_softmax
from keras.src.activations.activations import mish
from keras.src.activations.activations import relu
Expand Down
1 change: 1 addition & 0 deletions keras/api/activations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from keras.src.activations.activations import hard_tanh
from keras.src.activations.activations import leaky_relu
from keras.src.activations.activations import linear
from keras.src.activations.activations import log_sigmoid
from keras.src.activations.activations import log_softmax
from keras.src.activations.activations import mish
from keras.src.activations.activations import relu
Expand Down
2 changes: 2 additions & 0 deletions keras/src/activations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from keras.src.activations.activations import hard_tanh
from keras.src.activations.activations import leaky_relu
from keras.src.activations.activations import linear
from keras.src.activations.activations import log_sigmoid
from keras.src.activations.activations import log_softmax
from keras.src.activations.activations import mish
from keras.src.activations.activations import relu
Expand Down Expand Up @@ -47,6 +48,7 @@
linear,
mish,
log_softmax,
log_sigmoid,
}

ALL_OBJECTS_DICT = {fn.__name__: fn for fn in ALL_OBJECTS}
Expand Down
13 changes: 13 additions & 0 deletions keras/src/activations/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,6 +433,19 @@ def hard_sigmoid(x):
return ops.hard_sigmoid(x)


@keras_export("keras.activations.log_sigmoid")
def log_sigmoid(x):
"""Logarithm of the sigmoid activation function.

It is defined as `f(x) = log(1 / (1 + exp(-x)))`.

Args:
x: Input tensor.

"""
return ops.log_sigmoid(x)


@keras_export(["keras.activations.hard_silu", "keras.activations.hard_swish"])
def hard_silu(x):
"""Hard SiLU activation function, also known as Hard Swish.
Expand Down
43 changes: 43 additions & 0 deletions keras/src/activations/activations_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@ def _ref_hard_sigmoid(x):
return z


def _ref_log_sigmoid(x):
return -1 * _ref_softplus(-x)


def _ref_hard_silu(x):
return x * np.minimum(np.maximum(0.0, x + 3.0), 6.0) * (1.0 / 6.0)

Expand Down Expand Up @@ -337,6 +341,45 @@ def test_hard_sigmoid(self):
result_positive_above_1, expected_positive_above_1, rtol=1e-05
)

def test_log_sigmoid(self):
# Basic test for random values between 0 and 1
x = np.random.uniform(0, 1, (2, 5))
result = activations.log_sigmoid(x[np.newaxis, :])[0]
expected = np.vectorize(_ref_log_sigmoid)(x)
self.assertAllClose(result, expected, rtol=1e-05)

# Test with 1D array
x_1d = np.random.uniform(-10, 10, 5)
result_1d = activations.log_sigmoid(x_1d)
expected_1d = np.vectorize(_ref_log_sigmoid)(x_1d)
self.assertAllClose(result_1d, expected_1d, rtol=1e-05)

# Test with 3D array
x_3d = np.random.uniform(-10, 10, (3, 3, 3))
result_3d = activations.log_sigmoid(x_3d)
expected_3d = np.vectorize(_ref_log_sigmoid)(x_3d)
self.assertAllClose(result_3d, expected_3d, rtol=1e-05)

# Test large positive values
x_large_positive = np.random.uniform(10, 100, (2, 5))
result_large_positive = activations.log_sigmoid(x_large_positive)
expected_large_positive = np.vectorize(_ref_log_sigmoid)(
x_large_positive
)
self.assertAllClose(
result_large_positive, expected_large_positive, rtol=1e-05
)

# Test large negative values
x_large_negative = np.random.uniform(-100, -10, (2, 5))
result_large_negative = activations.log_sigmoid(x_large_negative)
expected_large_negative = np.vectorize(_ref_log_sigmoid)(
x_large_negative
)
self.assertAllClose(
result_large_negative, expected_large_negative, rtol=1e-05
)

def test_hard_silu(self):
# Basic test for random values between -3 and 3
x = np.random.uniform(-3, 3, (2, 5)).astype("float32")
Expand Down