Skip to content

Commit

Permalink
Merge pull request #408 from SciML/forward_tests
Browse files Browse the repository at this point in the history
Forward tests
  • Loading branch information
ChrisRackauckas authored Oct 6, 2021
2 parents 2b99f6c + e8e62ff commit 23253b9
Show file tree
Hide file tree
Showing 3 changed files with 85 additions and 0 deletions.
1 change: 1 addition & 0 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ jobs:
- IntegroDiff
- NNSTOPPINGTIME
- NNRODE
- Forward
steps:
- uses: actions/checkout@v2
- uses: julia-actions/setup-julia@v1
Expand Down
81 changes: 81 additions & 0 deletions test/forward_tests.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
using Flux
println("forward_tests")
using DiffEqFlux
println("Starting Soon!")
using ModelingToolkit
using DiffEqBase
using Test, NeuralPDE
println("Starting Soon!")
using SciMLBase
import ModelingToolkit: Interval

@testset "ODE" begin
@parameters x
@variables u(..)

Dx = Differential(x)
eq = Dx(u(x)) ~ 0.
bcs = [u(0.) ~ u(0.)]
domains = [x Interval(0.0,1.0)]
chain = FastChain((x,p) -> x.^2)

chain([1],Float64[])
strategy_ = NeuralPDE.GridTraining(0.1)
discretization = NeuralPDE.PhysicsInformedNN(chain,strategy_;init_params = Float64[])
@named pde_system = PDESystem(eq,bcs,domains,[x],[u(x)])
prob = NeuralPDE.discretize(pde_system,discretization)

train_data =prob.f.f.loss_function.pde_loss_function.pde_loss_functions.contents[1].train_set
inner_loss =prob.f.f.loss_function.pde_loss_function.pde_loss_functions.contents[1].loss_function

dudx(x) = @. 2*x
@test inner_loss(train_data, Float64[]) dudx(train_data) rtol = 1e-8
end

@testset "derivatives" begin
chain = FastChain(FastDense(2,16,Flux.σ),FastDense(16,16,Flux.σ),FastDense(16,1))
initθ = Float64.(DiffEqFlux.initial_params(chain))

eltypeθ = eltype(initθ)
parameterless_type_θ = DiffEqBase.parameterless_type(initθ)
phi = NeuralPDE.get_phi(chain,parameterless_type_θ)
derivative = NeuralPDE.get_numeric_derivative()

u_ = (cord, θ, phi)->sum(phi(cord, θ))

phi([1,2], initθ)

phi_ = (p) -> phi(p, initθ)[1]
dphi = Zygote.gradient(phi_,[1.,2.])

function get_ε(dim, der_num,eltypeθ)
epsilon = cbrt(eps(eltypeθ))
ε = zeros(eltypeθ, dim)
ε[der_num] = epsilon
ε
end

eps_x = get_ε(2, 1,Float64)
eps_y = get_ε(2, 2,Float64)

dphi_x = derivative(phi,u_,[1.,2.],[eps_x],1,initθ)
dphi_y = derivative(phi,u_,[1.,2.],[eps_y],1,initθ)

#first order derivatives
@test isapprox(dphi[1][1], dphi_x, atol=1e-8)
@test isapprox(dphi[1][2], dphi_y, atol=1e-8)

dphi_x = derivative(phi,u_,[1.,2.],[[ 0.0049215667, 0.0]],1,initθ)
dphi_y = derivative(phi,u_,[1.,2.],[[0.0, 0.0049215667]],1,initθ)

hess_phi = Zygote.hessian(phi_,[1,2])

dphi_xx = derivative(phi,u_,[1.,2.],[eps_x,eps_x],2,initθ)
dphi_xy = derivative(phi,u_,[1.,2.],[eps_x,eps_y],2,initθ)
dphi_yy = derivative(phi,u_,[1.,2.],[eps_y,eps_y],2,initθ)

#second order derivatives
@test isapprox(hess_phi[1], dphi_xx, atol=1e-5)
@test isapprox(hess_phi[2], dphi_xy, atol=1e-5)
@test isapprox(hess_phi[4], dphi_yy, atol=1e-5)
end
3 changes: 3 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ const is_CI = haskey(ENV,"CI")
@time @safetestset "NNRODE" begin include("NNRODE_tests.jl") end
@time @safetestset "NNParamKolmogorov" begin include("NNParamKolmogorov_tests.jl") end
end
if GROUP == "All" || GROUP == "Forward"
@time @safetestset "Forward" begin include("forward_tests.jl") end
end

if !is_APPVEYOR && GROUP == "GPU"
@safetestset "NNPDE_gpu" begin include("NNPDE_tests_gpu.jl") end
Expand Down

0 comments on commit 23253b9

Please sign in to comment.