Skip to content

Commit 52b9c39

Browse files
authored
Merge branch 'master' into logcosh
2 parents 02bbbde + 18c2f77 commit 52b9c39

File tree

8 files changed

+49
-21
lines changed

8 files changed

+49
-21
lines changed

.codecov.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
comment: false

.travis.yml

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,12 @@ os:
44
- linux
55
- osx
66
julia:
7-
- 0.7
7+
- 1.0
8+
- 1.1
9+
- nightly
10+
matrix:
11+
allow_failures:
12+
- julia: nightly
813
notifications:
914
email: false
1015
git:
@@ -13,6 +18,8 @@ env:
1318
# Disable test fuzzing for the moment, as we're a little too slow for Travis
1419
- NNLIB_TEST_FUZZING=false
1520

16-
# Submit to Codecov
21+
# Submit to Codecov
1722
after_success:
18-
- julia -e 'using Pkg; Pkg.add("Coverage"); using Coverage; Codecov.submit(process_folder())'
23+
- if [[ $TRAVIS_JULIA_VERSION = 1.1 ]] && [[ $TRAVIS_OS_NAME = linux ]]; then
24+
julia -e 'using Pkg; Pkg.add("Coverage"); using Coverage; Codecov.submit(process_folder())';
25+
fi

Manifest.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@ uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
55

66
[[Crayons]]
77
deps = ["Test"]
8-
git-tree-sha1 = "3017c662a988bcb8a3f43306a793617c6524d476"
8+
git-tree-sha1 = "f621b8ef51fd2004c7cf157ea47f027fdeac5523"
99
uuid = "a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f"
10-
version = "1.0.0"
10+
version = "4.0.0"
1111

1212
[[Distributed]]
1313
deps = ["Random", "Serialization", "Sockets"]

Project.toml

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,16 @@
11
name = "NNlib"
22
uuid = "872c559c-99b0-510c-b3b7-b6c96a88d5cd"
3+
version = "0.6.0"
34

45
[deps]
56
Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
67
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
78
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
89
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
9-
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
1010
TimerOutputs = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f"
11+
12+
[extras]
13+
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
14+
15+
[targets]
16+
test = ["Test"]

REQUIRE

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
julia 1.0
22
Requires
3+
TimerOutputs

src/NNlib.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
module NNlib
22
using Requires, TimerOutputs
33

4+
const to = TimerOutput()
5+
46
# Include APIs
57
include("dim_helpers.jl")
68
include("activation.jl")
@@ -22,6 +24,4 @@ include("impl/depthwiseconv_im2col.jl")
2224
# Direct implementations of pooling
2325
include("impl/pooling_direct.jl")
2426

25-
to = TimerOutput()
26-
27-
end # module NNlib
27+
end # module NNlib

src/activation.jl

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -7,11 +7,11 @@ export σ, sigmoid, relu, leakyrelu, elu, gelu, swish, selu, softplus, softsign,
77
Classic [sigmoid](https://en.wikipedia.org/wiki/Sigmoid_function) activation
88
function.
99
"""
10-
σ(x) = one(x) / (one(x) + exp(-x))
10+
σ(x::Real) = one(x) / (one(x) + exp(-x))
1111
const sigmoid = σ
1212

1313
# ForwardDiff numerical stability hack
14-
σ_stable(x) = ifelse(x < -80, zero(x), one(x) / (one(x) + exp(-x)))
14+
σ_stable(x::Real) = ifelse(x < -80, zero(x), one(x) / (one(x) + exp(-x)))
1515
σ(x::Float32) = σ_stable(x)
1616
@init @require ForwardDiff="f6369f11-7733-5829-9624-2563aa707210" begin
1717
σ(x::ForwardDiff.Dual{T,Float32}) where T = σ_stable(x)
@@ -27,11 +27,11 @@ Return `log(σ(x))` which is computed in a numerically stable way.
2727
-0.6931471805599453
2828
julia> logσ.([-100, -10, 100])
2929
3-element Array{Float64,1}:
30-
-100.0
31-
-10.000045398899218
30+
-100.0
31+
-10.000045398899218
3232
-3.720075976020836e-44
3333
"""
34-
logσ(x) = -softplus(-x)
34+
logσ(x::Real) = -softplus(-x)
3535
const logsigmoid = logσ
3636

3737

@@ -41,7 +41,7 @@ const logsigmoid = logσ
4141
[Rectified Linear Unit](https://en.wikipedia.org/wiki/Rectifier_(neural_networks))
4242
activation function.
4343
"""
44-
relu(x) = max(zero(x), x)
44+
relu(x::Real) = max(zero(x), x)
4545

4646

4747
"""
@@ -51,7 +51,7 @@ Leaky [Rectified Linear Unit](https://en.wikipedia.org/wiki/Rectifier_(neural_ne
5151
activation function.
5252
You can also specify the coefficient explicitly, e.g. `leakyrelu(x, 0.01)`.
5353
"""
54-
leakyrelu(x, a = oftype(x/1, 0.01)) = max(a*x, x/1)
54+
leakyrelu(x::Real, a = oftype(x/1, 0.01)) = max(a*x, x/1)
5555

5656

5757
"""
@@ -71,7 +71,7 @@ elu(x, α = one(x)) = ifelse(x ≥ 0, x/1, α * (exp(x) - one(x)))
7171
[Gaussian Error Linear Unit](https://arxiv.org/pdf/1606.08415.pdf)
7272
activation function.
7373
"""
74-
function gelu(x)
74+
function gelu(x::Real)
7575
λ = oftype(x/1, (2/π))
7676
α = oftype(x/1, 0.044715)
7777
h = oftype(x/1, 0.5)
@@ -85,7 +85,7 @@ end
8585
Self-gated actvation function.
8686
See [Swish: a Self-Gated Activation Function](https://arxiv.org/pdf/1710.05941.pdf).
8787
"""
88-
swish(x) = x * σ(x)
88+
swish(x::Real) = x * σ(x)
8989

9090
"""
9191
selu(x) = λ * (x ≥ 0 ? x : α * (exp(x) - 1))
@@ -96,7 +96,7 @@ swish(x) = x * σ(x)
9696
Scaled exponential linear units.
9797
See [Self-Normalizing Neural Networks](https://arxiv.org/pdf/1706.02515.pdf).
9898
"""
99-
function selu(x)
99+
function selu(x::Real)
100100
λ = oftype(x/1, 1.0507009873554804934193349852946)
101101
α = oftype(x/1, 1.6732632423543772848170429916717)
102102
λ * ifelse(x > 0, x/1, α * (exp(x) - 1))
@@ -108,15 +108,15 @@ end
108108
109109
See [Quadratic Polynomials Learn Better Image Features](http://www.iro.umontreal.ca/~lisa/publications2/index.php/attachments/single/205).
110110
"""
111-
softsign(x) = x / (one(x) + abs(x))
111+
softsign(x::Real) = x / (one(x) + abs(x))
112112

113113

114114
"""
115115
softplus(x) = log(exp(x) + 1)
116116
117117
See [Deep Sparse Rectifier Neural Networks](http://proceedings.mlr.press/v15/glorot11a/glorot11a.pdf).
118118
"""
119-
softplus(x) = ifelse(x > 0, x + log1p(exp(-x)), log1p(exp(x)))
119+
softplus(x::Real) = ifelse(x > 0, x + log1p(exp(-x)), log1p(exp(x)))
120120

121121

122122
"""
@@ -125,3 +125,9 @@ softplus(x) = ifelse(x > 0, x + log1p(exp(-x)), log1p(exp(x)))
125125
Return `log(cosh(x))` which is computed in a numerically stable way.
126126
"""
127127
logcosh(x::T) where T = x + softplus(-2x) - log(convert(T, 2))
128+
129+
# Provide an informative error message if activation functions are called with an array
130+
for f in (, :σ_stable, :logσ, :relu, :leakyrelu, :elu, :gelu, :swish, :selu, :softsign, :softplus, :logcosh)
131+
@eval $(f)(x::AbstractArray, args...) =
132+
error("Use broadcasting (`", $(string(f)), ".(x)`) to apply activation functions to arrays.")
133+
end

test/activation.jl

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,13 @@ end
6464
test_value_float_precision_preserving.(ACTIVATION_FUNCTIONS)
6565
end
6666

67+
@testset "Array input" begin
68+
x = rand(5)
69+
for a in ACTIVATION_FUNCTIONS
70+
@test_throws ErrorException a(x)
71+
end
72+
end
73+
6774
@testset "Test Integer64 and Integer32 inputs will force Float64 outputs" begin
6875
test_value_int_input_forces_float64.(filter(x -> x != relu, ACTIVATION_FUNCTIONS))
6976

0 commit comments

Comments
 (0)