Skip to content

Commit

Permalink
Merge pull request #847 from sethaxen/rmnorm
Browse files Browse the repository at this point in the history
Remove adjoint for norm
  • Loading branch information
oxinabox authored Dec 7, 2020
2 parents f8b038c + b9b77fe commit bfba50c
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 8 deletions.
4 changes: 2 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name = "Zygote"
uuid = "e88e6eb3-aa80-5325-afca-941959d7151f"
version = "0.5.15"
version = "0.5.16"

[deps]
AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c"
Expand All @@ -26,7 +26,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444"
[compat]
AbstractFFTs = "0.5"
ArrayLayouts = "0.1, 0.2, 0.3, 0.4"
ChainRules = "0.7.33"
ChainRules = "0.7.34"
DiffRules = "1.0"
FillArrays = "0.8, 0.9, 0.10"
ForwardDiff = "0.10"
Expand Down
5 changes: 0 additions & 5 deletions src/lib/array.jl
Original file line number Diff line number Diff line change
Expand Up @@ -421,11 +421,6 @@ end
end
end

function _pullback(cx::AContext, ::typeof(norm), x::AbstractArray, p::Real = 2)
fallback = (x, p) -> sum(abs.(x).^p .+ eps(0f0)) ^ (one(eltype(x)) / p) # avoid d(sqrt(x))/dx == Inf at 0
_pullback(cx, fallback, x, p)
end

# LinAlg Matrix Types
# ===================

Expand Down
19 changes: 18 additions & 1 deletion test/gradcheck.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1643,4 +1643,21 @@ end
end
end

@test gradient(x -> norm(x), rand(Float32, 2, 2))[1] isa Matrix{Float32}
@testset "norm" begin
# rrule for norm is defined in ChainRules. These tests just check various norm-related
# issues are resolved

# check that type is not unnecessarily promoted
# https://github.com/FluxML/Zygote.jl/issues/663
@test gradient(norm, randn(Float32, 2, 2)) isa Tuple{Matrix{Float32}}
@test gradient(norm, randn(Float32, 2, 2), 3) isa Tuple{Matrix{Float32},Float32}
@test gradient(norm, randn(Float32, 2, 2), 3f0) isa Tuple{Matrix{Float32},Float32}
@test gradient(norm, randn(ComplexF32, 2, 2), 3.5f0) isa Tuple{Matrix{ComplexF32},Float32}

# just check that these do not error
# https://github.com/FluxML/Zygote.jl/issues/331
gradient(x->norm(x*[1, 1]), 1.23)
gradient(x->norm(x*[1 1]), 1.23)
gradient(x->norm(x*[1im, 1]), 1.23)
gradient(x->norm(x*[1im 1]), 1.23)
end

2 comments on commit bfba50c

@oxinabox
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/26002

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.5.16 -m "<description of version>" bfba50c485b4b62a6e083b7592da58fa87e271c7
git push origin v0.5.16

Please sign in to comment.