Skip to content

Commit

Permalink
un-revert the removal of the active=true method
Browse files Browse the repository at this point in the history
  • Loading branch information
mcabbott committed Jan 8, 2023
1 parent f42f475 commit d7cc49d
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 13 deletions.
11 changes: 0 additions & 11 deletions src/deprecations.jl
Original file line number Diff line number Diff line change
Expand Up @@ -186,17 +186,6 @@ function update!(opt::Optimise.AbstractOptimiser, ::Params, grads::Union{Tuple,
end


function dropout(rng, x, p; dims=:, active::Bool=true)
if active
NNlib.dropout(rng, x, p; dims)
else
Base.depwarn("Flux.dropout(...; active=false) is deprecated. Please branch outside the function, or call dropout(x, 0) if you must.", :dropout)
return x
end
end
dropout(x, p; kwargs...) = dropout(NNlib._rng_from_array(x), x, p; kwargs...)


# v0.14 deprecations

# Enable these when 0.14 is released, and delete const ClipGrad = Optimise.ClipValue etc:
Expand Down
4 changes: 2 additions & 2 deletions test/layers/normalisation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,10 @@ evalwgrad(f, x...) = pullback(f, x...)[1]
y = m(x)
@test count(a->a == 0, y) > 50

y = Flux.dropout(values(rng_kwargs)..., x, 0.9, active=true)
y = Flux.dropout(values(rng_kwargs)..., x, 0.9) # , active=true)
@test count(a->a == 0, y) > 50

y = Flux.dropout(values(rng_kwargs)..., x, 0.9, active=false)
y = Flux.dropout(values(rng_kwargs)..., x, 0.9 * 0) # , active=false)
@test count(a->a == 0, y) == 0

# CPU RNGs map onto CPU ok
Expand Down

0 comments on commit d7cc49d

Please sign in to comment.