Skip to content

Added Leaky ReLU activation function (1D & 3D) #123

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Mar 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
79 changes: 59 additions & 20 deletions src/nf/nf_activation_1d.f90
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,17 @@ module nf_activation_1d
public :: gaussian, gaussian_prime
public :: linear, linear_prime
public :: relu, relu_prime
public :: leaky_relu, leaky_relu_prime
public :: sigmoid, sigmoid_prime
public :: softmax, softmax_prime
public :: softplus, softplus_prime
public :: step, step_prime
public :: tanhf, tanh_prime

interface
pure function activation_function(x)
pure function activation_function(x, alpha)
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: activation_function(size(x))
end function activation_function
end interface
Expand All @@ -30,7 +32,7 @@ end function activation_function
pure function elu(x, alpha) result(res)
! Exponential Linear Unit (ELU) activation function.
real, intent(in) :: x(:)
real, intent(in) :: alpha
real, intent(in), optional :: alpha
real :: res(size(x))
where (x >= 0)
res = x
Expand All @@ -43,7 +45,7 @@ pure function elu_prime(x, alpha) result(res)
! First derivative of the Exponential Linear Unit (ELU)
! activation function.
real, intent(in) :: x(:)
real, intent(in) :: alpha
real, intent(in), optional :: alpha
real :: res(size(x))
where (x >= 0)
res = 1
Expand All @@ -52,51 +54,58 @@ pure function elu_prime(x, alpha) result(res)
end where
end function elu_prime

pure function exponential(x) result(res)
pure function exponential(x, alpha) result(res)
! Exponential activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = exp(x)
end function exponential

pure function gaussian(x) result(res)
pure function gaussian(x, alpha) result(res)
! Gaussian activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = exp(-x**2)
end function gaussian

pure function gaussian_prime(x) result(res)
pure function gaussian_prime(x, alpha) result(res)
! First derivative of the Gaussian activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = -2 * x * gaussian(x)
end function gaussian_prime

pure function linear(x) result(res)
pure function linear(x, alpha) result(res)
! Linear activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = x
end function linear

pure function linear_prime(x) result(res)
pure function linear_prime(x, alpha) result(res)
! First derivative of the Gaussian activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = 1
end function linear_prime

pure function relu(x) result(res)
pure function relu(x, alpha) result(res)
!! Rectified Linear Unit (ReLU) activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = max(0., x)
end function relu

pure function relu_prime(x) result(res)
pure function relu_prime(x, alpha) result(res)
! First derivative of the Rectified Linear Unit (ReLU) activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
where (x > 0)
res = 1
Expand All @@ -105,52 +114,79 @@ pure function relu_prime(x) result(res)
end where
end function relu_prime

pure function sigmoid(x) result(res)
pure function leaky_relu(x, alpha) result(res)
!! Leaky Rectified Linear Unit (Leaky ReLU) activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = max(alpha*x, x)
end function leaky_relu

pure function leaky_relu_prime(x, alpha) result(res)
! First derivative of the Leaky Rectified Linear Unit (Leaky ReLU) activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
where (x > 0)
res = 1
elsewhere
res = alpha
end where
end function leaky_relu_prime

pure function sigmoid(x, alpha) result(res)
! Sigmoid activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = 1 / (1 + exp(-x))
endfunction sigmoid

pure function sigmoid_prime(x) result(res)
pure function sigmoid_prime(x, alpha) result(res)
! First derivative of the sigmoid activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = sigmoid(x) * (1 - sigmoid(x))
end function sigmoid_prime

pure function softmax(x) result(res)
pure function softmax(x, alpha) result(res)
!! Softmax activation function
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = exp(x - maxval(x))
res = res / sum(res)
end function softmax

pure function softmax_prime(x) result(res)
pure function softmax_prime(x, alpha) result(res)
!! Derivative of the softmax activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = softmax(x) * (1 - softmax(x))
end function softmax_prime

pure function softplus(x) result(res)
pure function softplus(x, alpha) result(res)
! Softplus activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = log(exp(x) + 1)
end function softplus

pure function softplus_prime(x) result(res)
pure function softplus_prime(x,alpha) result(res)
! First derivative of the softplus activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = exp(x) / (exp(x) + 1)
end function softplus_prime

pure function step(x) result(res)
pure function step(x, alpha) result(res)
! Step activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
where (x > 0)
res = 1
Expand All @@ -159,26 +195,29 @@ pure function step(x) result(res)
end where
end function step

pure function step_prime(x) result(res)
pure function step_prime(x, alpha) result(res)
! First derivative of the step activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = 0
end function step_prime

pure function tanhf(x) result(res)
pure function tanhf(x, alpha) result(res)
! Tangent hyperbolic activation function.
! Same as the intrinsic tanh, but must be
! defined here so that we can use procedure
! pointer with it.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = tanh(x)
end function tanhf

pure function tanh_prime(x) result(res)
pure function tanh_prime(x, alpha) result(res)
! First derivative of the tanh activation function.
real, intent(in) :: x(:)
real, intent(in), optional :: alpha
real :: res(size(x))
res = 1 - tanh(x)**2
end function tanh_prime
Expand Down
Loading