Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/ADNLPModels.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ include("reverse.jl")
include("enzyme.jl")
include("sparse_diff_tools.jl")
include("zygote.jl")
include("predefined_backend.jl")
include("nlp.jl")
include("nls.jl")

Expand Down
75 changes: 40 additions & 35 deletions src/ad.jl
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,12 @@ end
function ADModelBackend(
nvar::Integer,
f;
backend::Symbol = :default,
matrix_free::Bool = false,
show_time::Bool = false,
gradient_backend::Type{GB} = ForwardDiffADGradient,
hprod_backend::Type{HvB} = ForwardDiffADHvprod,
hessian_backend::Type{HB} = SparseADHessian,
gradient_backend::Type{GB} = get_default_backend(:gradient_backend, backend),
hprod_backend::Type{HvB} = get_default_backend(:hprod_backend, backend),
hessian_backend::Type{HB} = get_default_backend(:hessian_backend, backend, matrix_free),
kwargs...,
) where {GB, HvB, HB}
c! = (args...) -> []
Expand Down Expand Up @@ -112,14 +114,16 @@ function ADModelBackend(
f,
ncon::Integer,
c!;
backend::Symbol = :default,
matrix_free::Bool = false,
show_time::Bool = false,
gradient_backend::Type{GB} = ForwardDiffADGradient,
hprod_backend::Type{HvB} = ForwardDiffADHvprod,
jprod_backend::Type{JvB} = ForwardDiffADJprod,
jtprod_backend::Type{JtvB} = ForwardDiffADJtprod,
jacobian_backend::Type{JB} = SparseADJacobian,
hessian_backend::Type{HB} = SparseADHessian,
ghjvprod_backend::Type{GHJ} = ForwardDiffADGHjvprod,
gradient_backend::Type{GB} = get_default_backend(:gradient_backend, backend),
hprod_backend::Type{HvB} = get_default_backend(:hprod_backend, backend),
jprod_backend::Type{JvB} = get_default_backend(:jprod_backend, backend),
jtprod_backend::Type{JtvB} = get_default_backend(:jtprod_backend, backend),
jacobian_backend::Type{JB} = get_default_backend(:jacobian_backend, backend, matrix_free),
hessian_backend::Type{HB} = get_default_backend(:hessian_backend, backend, matrix_free),
ghjvprod_backend::Type{GHJ} = get_default_backend(:ghjvprod_backend, backend),
kwargs...,
) where {GB, HvB, JvB, JtvB, JB, HB, GHJ}
b = @elapsed begin
Expand Down Expand Up @@ -170,21 +174,20 @@ function ADModelNLSBackend(
nvar::Integer,
F!,
nequ::Integer;
backend::Symbol = :default,
matrix_free::Bool = false,
show_time::Bool = false,
gradient_backend::Type{GB} = ForwardDiffADGradient,
hprod_backend::Type{HvB} = ForwardDiffADHvprod,
jprod_backend::Type{JvB} = ForwardDiffADJprod,
jtprod_backend::Type{JtvB} = ForwardDiffADJtprod,
jacobian_backend::Type{JB} = SparseADJacobian,
hessian_backend::Type{HB} = SparseADHessian,
ghjvprod_backend::Type{GHJ} = ForwardDiffADGHjvprod,
hprod_residual_backend::Type{HvBLS} = ForwardDiffADHvprod,
jprod_residual_backend::Type{JvBLS} = ForwardDiffADJprod,
jtprod_residual_backend::Type{JtvBLS} = ForwardDiffADJtprod,
jacobian_residual_backend::Type{JBLS} = SparseADJacobian,
hessian_residual_backend::Type{HBLS} = ForwardDiffADHessian,
gradient_backend::Type{GB} = get_default_backend(:gradient_backend, backend),
hprod_backend::Type{HvB} = get_default_backend(:hprod_backend, backend),
hessian_backend::Type{HB} = get_default_backend(:hessian_backend, backend, matrix_free),
ghjvprod_backend::Type{GHJ} = get_default_backend(:ghjvprod_backend, backend),
hprod_residual_backend::Type{HvBLS} = get_default_backend(:hprod_residual_backend, backend),
jprod_residual_backend::Type{JvBLS} = get_default_backend(:jprod_residual_backend, backend),
jtprod_residual_backend::Type{JtvBLS} = get_default_backend(:jtprod_residual_backend, backend),
jacobian_residual_backend::Type{JBLS} = get_default_backend(:jacobian_residual_backend, backend, matrix_free),
hessian_residual_backend::Type{HBLS} = get_default_backend(:hessian_residual_backend, backend, matrix_free),
kwargs...,
) where {GB, HvB, JvB, JtvB, JB, HB, GHJ, HvBLS, JvBLS, JtvBLS, JBLS, HBLS}
) where {GB, HvB, HB, GHJ, HvBLS, JvBLS, JtvBLS, JBLS, HBLS}
function F(x; nequ = nequ)
Fx = similar(x, nequ)
F!(Fx, x)
Expand Down Expand Up @@ -251,19 +254,21 @@ function ADModelNLSBackend(
nequ::Integer,
ncon::Integer,
c!;
backend::Symbol = :default,
matrix_free::Bool = false,
show_time::Bool = false,
gradient_backend::Type{GB} = ForwardDiffADGradient,
hprod_backend::Type{HvB} = ForwardDiffADHvprod,
jprod_backend::Type{JvB} = ForwardDiffADJprod,
jtprod_backend::Type{JtvB} = ForwardDiffADJtprod,
jacobian_backend::Type{JB} = SparseADJacobian,
hessian_backend::Type{HB} = SparseADHessian,
ghjvprod_backend::Type{GHJ} = ForwardDiffADGHjvprod,
hprod_residual_backend::Type{HvBLS} = ForwardDiffADHvprod,
jprod_residual_backend::Type{JvBLS} = ForwardDiffADJprod,
jtprod_residual_backend::Type{JtvBLS} = ForwardDiffADJtprod,
jacobian_residual_backend::Type{JBLS} = SparseADJacobian,
hessian_residual_backend::Type{HBLS} = ForwardDiffADHessian,
gradient_backend::Type{GB} = get_default_backend(:gradient_backend, backend),
hprod_backend::Type{HvB} = get_default_backend(:hprod_backend, backend),
jprod_backend::Type{JvB} = get_default_backend(:jprod_backend, backend),
jtprod_backend::Type{JtvB} = get_default_backend(:jtprod_backend, backend),
jacobian_backend::Type{JB} = get_default_backend(:jacobian_backend, backend, matrix_free),
hessian_backend::Type{HB} = get_default_backend(:hessian_backend, backend, matrix_free),
ghjvprod_backend::Type{GHJ} = get_default_backend(:ghjvprod_backend, backend),
hprod_residual_backend::Type{HvBLS} = get_default_backend(:hprod_residual_backend, backend),
jprod_residual_backend::Type{JvBLS} = get_default_backend(:jprod_residual_backend, backend),
jtprod_residual_backend::Type{JtvBLS} = get_default_backend(:jtprod_residual_backend, backend),
jacobian_residual_backend::Type{JBLS} = get_default_backend(:jacobian_residual_backend, backend, matrix_free),
hessian_residual_backend::Type{HBLS} = get_default_backend(:hessian_residual_backend, backend, matrix_free),
kwargs...,
) where {GB, HvB, JvB, JtvB, JB, HB, GHJ, HvBLS, JvBLS, JtvBLS, JBLS, HBLS}
function F(x; nequ = nequ)
Expand Down
2 changes: 1 addition & 1 deletion src/forward.jl
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ function ADNLPModels.Jprod!(b::ForwardDiffADJprod{T, Tag}, Jv, c!, x, v) where {
end

struct GenericForwardDiffADJtprod <: ADBackend end
function GeneriForwardDiffADJtprod(
function GenericForwardDiffADJtprod(
nvar::Integer,
f,
ncon::Integer = 0,
Expand Down
82 changes: 82 additions & 0 deletions src/predefined_backend.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
default_backend = Dict(
:gradient_backend => ForwardDiffADGradient,
:hprod_backend => ForwardDiffADHvprod,
:jprod_backend => ForwardDiffADJprod,
:jtprod_backend => ForwardDiffADJtprod,
:jacobian_backend => SparseADJacobian,
:hessian_backend => SparseADHessian,
:ghjvprod_backend => ForwardDiffADGHjvprod,
:hprod_residual_backend => ForwardDiffADHvprod,
:jprod_residual_backend => ForwardDiffADJprod,
:jtprod_residual_backend => ForwardDiffADJtprod,
:jacobian_residual_backend => SparseADJacobian,
:hessian_residual_backend => ForwardDiffADHessian,
)

optimized = Dict(
:gradient_backend => ReverseDiffADGradient, # EnzymeADGradient
:hprod_backend => ForwardDiffADHvprod,
:jprod_backend => ForwardDiffADJprod,
:jtprod_backend => ReverseDiffADJtprod,
:jacobian_backend => SparseADJacobian,
:hessian_backend => SparseADHessian,
:ghjvprod_backend => ForwardDiffADGHjvprod,
:hprod_residual_backend => ForwardDiffADHvprod,
:jprod_residual_backend => ForwardDiffADJprod,
:jtprod_residual_backend => ReverseDiffADJtprod,
:jacobian_residual_backend => SparseADJacobian,
:hessian_residual_backend => ForwardDiffADHessian,
)

generic = Dict(
:gradient_backend => GenericForwardDiffADGradient,
:hprod_backend => GenericForwardDiffADHvprod,
:jprod_backend => GenericForwardDiffADJprod,
:jtprod_backend => GenericForwardDiffADJtprod,
:jacobian_backend => ForwardDiffADJacobian,
:hessian_backend => ForwardDiffADHessian,
:ghjvprod_backend => ForwardDiffADGHjvprod,
:hprod_residual_backend => GenericForwardDiffADHvprod,
:jprod_residual_backend => GenericForwardDiffADJprod,
:jtprod_residual_backend => GenericForwardDiffADJtprod,
:jacobian_residual_backend => ForwardDiffADJacobian,
:hessian_residual_backend => ForwardDiffADHessian,
)

predefined_backend = Dict(
:default => default_backend,
:optimized => optimized,
:generic => generic
)

"""
get_default_backend(meth::Symbol, backend::Symbol; kwargs...)
get_default_backend(::Val{::Symbol}, backend; kwargs...)

Return a type `<:ADBackend` that corresponds to the default `backend` use for the method `meth`.
See `keys(ADNLPModels.predefined_backend)` for a list of possible backends.

The following keyword arguments are accepted:
- `matrix_free::Bool`: If `true`, this returns an `EmptyADbackend` for methods that handle matrices, e.g. `:hessian_backend`.

"""
function get_default_backend(meth::Symbol, args...; kwargs...)
return get_default_backend(Val(meth), args...; kwargs...)
end

function get_default_backend(::Val{sym}, backend, args...; kwargs...) where {sym}
return predefined_backend[backend][sym]
end

function get_default_backend(::Val{:jacobian_backend}, backend, matrix_free::Bool = false)
return matrix_free ? EmptyADbackend : predefined_backend[backend][:jacobian_backend]
end
function get_default_backend(::Val{:hessian_backend}, backend, matrix_free::Bool = false)
return matrix_free ? EmptyADbackend : predefined_backend[backend][:hessian_backend]
end
function get_default_backend(::Val{:jacobian_residual_backend}, backend, matrix_free::Bool = false)
return matrix_free ? EmptyADbackend : predefined_backend[backend][:jacobian_residual_backend]
end
function get_default_backend(:: Val{:hessian_residual_backend}, backend, matrix_free::Bool = false)
return matrix_free ? EmptyADbackend : predefined_backend[backend][:hessian_residual_backend]
end
2 changes: 1 addition & 1 deletion src/sparse_diff_tools.jl
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@
return SDTForwardDiffADHvprod(tmp_in, tmp_out)
end

function Hvprod!(b::SDTForwardDiffADHvprod, ::Val{Smbl}, Hv, f, x, v) where {Smbl}
function Hvprod!(b::SDTForwardDiffADHvprod, Hv, x, v, f, args...)
ϕ!(dy, x; f = f) = ForwardDiff.gradient!(dy, f, x)
SparseDiffTools.auto_hesvecgrad!(Hv, (dy, x) -> ϕ!(dy, x), x, v, b.tmp_in, b.tmp_out)
return Hv
Expand Down
77 changes: 3 additions & 74 deletions test/nlp/basic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,47 +8,6 @@ function (regr::LinearRegression)(beta)
return dot(r, r) / 2
end

ReverseDiffAD() = ADNLPModels.ADModelBackend(
ADNLPModels.ReverseDiffADGradient(nothing),
ADNLPModels.ReverseDiffADHvprod(),
ADNLPModels.ReverseDiffADJprod(),
ADNLPModels.ReverseDiffADJtprod(),
ADNLPModels.ReverseDiffADJacobian(0),
ADNLPModels.ReverseDiffADHessian(0),
ADNLPModels.ForwardDiffADGHjvprod(),
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
)
ZygoteAD() = ADNLPModels.ADModelBackend(
ADNLPModels.ZygoteADGradient(),
ADNLPModels.GenericForwardDiffADHvprod(),
ADNLPModels.ZygoteADJprod(),
ADNLPModels.ZygoteADJtprod(),
ADNLPModels.ZygoteADJacobian(0),
ADNLPModels.ZygoteADHessian(0),
ADNLPModels.ForwardDiffADGHjvprod(),
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
ADNLPModels.EmptyADbackend(),
)

function test_autodiff_backend_error()
@testset "Error without loading package - $backend" for backend in [:ZygoteAD]
adbackend = eval(backend)()
@test_throws ArgumentError gradient(adbackend.gradient_backend, sum, [1.0])
@test_throws ArgumentError gradient!(adbackend.gradient_backend, [1.0], sum, [1.0])
@test_throws ArgumentError jacobian(adbackend.jacobian_backend, identity, [1.0])
@test_throws ArgumentError hessian(adbackend.hessian_backend, sum, [1.0])
@test_throws ArgumentError Jprod!(adbackend.jprod_backend, [1.0], [1.0], identity, [1.0])
@test_throws ArgumentError Jtprod!(adbackend.jtprod_backend, [1.0], [1.0], identity, [1.0])
end
end

function test_autodiff_model(name; kwargs...)
x0 = zeros(2)
f(x) = dot(x, x)
Expand Down Expand Up @@ -385,36 +344,6 @@ function test_autodiff_model(name; kwargs...)
end
end

# Test the argument error without loading the packages
test_autodiff_backend_error()

# Automatically loads the code for Zygote with Requires
import Zygote

test_autodiff_model("OptimizedAD")
test_autodiff_model(
"ForwardDiff",
gradient_backend = ADNLPModels.GenericForwardDiffADGradient,
hprod_backend = ADNLPModels.GenericForwardDiffADHvprod,
jprod_backend = ADNLPModels.GenericForwardDiffADJprod,
jtprod_backend = ADNLPModels.ForwardDiffADJtprod,
jacobian_backend = ADNLPModels.ForwardDiffADJacobian,
hessian_backend = ADNLPModels.ForwardDiffADHessian,
)
test_autodiff_model(
"ReverseDiff",
gradient_backend = ADNLPModels.ReverseDiffADGradient,
hprod_backend = ADNLPModels.ReverseDiffADHvprod,
jprod_backend = ADNLPModels.ReverseDiffADJprod,
jtprod_backend = ADNLPModels.ReverseDiffADJtprod,
jacobian_backend = ADNLPModels.ReverseDiffADJacobian,
hessian_backend = ADNLPModels.ReverseDiffADHessian,
)
test_autodiff_model(
"Zygote",
gradient_backend = ADNLPModels.ZygoteADGradient,
jprod_backend = ADNLPModels.ZygoteADJprod,
jtprod_backend = ADNLPModels.ZygoteADJtprod,
jacobian_backend = ADNLPModels.ZygoteADJacobian,
hessian_backend = ADNLPModels.ZygoteADHessian,
)
@testset "Basic tests using $backend " for backend in keys(ADNLPModels.predefined_backend)
test_autodiff_model("$backend", backend = backend)
end
37 changes: 3 additions & 34 deletions test/nlp/nlpmodelstest.jl
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@

function nlpmodelstest_autodiff(name; kwargs...)
for problem in NLPModelsTest.nlp_problems
@testset "Checking NLPModelsTest tests on problem $problem with $name" begin
@testset "Checking NLPModelsTest tests with $backend" for backend in keys(ADNLPModels.predefined_backend)
@testset "Checking NLPModelsTest tests on problem $problem" for problem in NLPModelsTest.nlp_problems
nlp_from_T = eval(Meta.parse(lowercase(problem) * "_autodiff"))
nlp_ad = nlp_from_T(; kwargs...)
nlp_ad = nlp_from_T(; backend = backend)
nlp_man = eval(Meta.parse(problem))()

show(IOBuffer(), nlp_ad)
Expand All @@ -24,34 +22,5 @@ function nlpmodelstest_autodiff(name; kwargs...)
@testset "Check coordinate memory" begin
coord_memory_nlp(nlp_ad, exclude = [], linear_api = true)
end
end
end
end

nlpmodelstest_autodiff("OptimizedAD")
nlpmodelstest_autodiff(
"ForwardDiff",
gradient_backend = ADNLPModels.GenericForwardDiffADGradient,
hprod_backend = ADNLPModels.GenericForwardDiffADHvprod,
jprod_backend = ADNLPModels.GenericForwardDiffADJprod,
jtprod_backend = ADNLPModels.ForwardDiffADJtprod,
jacobian_backend = ADNLPModels.ForwardDiffADJacobian,
hessian_backend = ADNLPModels.ForwardDiffADHessian,
)
nlpmodelstest_autodiff(
"ReverseDiff",
gradient_backend = ADNLPModels.ReverseDiffADGradient,
hprod_backend = ADNLPModels.ReverseDiffADHvprod,
jprod_backend = ADNLPModels.ReverseDiffADJprod,
jtprod_backend = ADNLPModels.ReverseDiffADJtprod,
jacobian_backend = ADNLPModels.ReverseDiffADJacobian,
hessian_backend = ADNLPModels.ReverseDiffADHessian,
)
nlpmodelstest_autodiff(
"Zygote",
gradient_backend = ADNLPModels.ZygoteADGradient,
jprod_backend = ADNLPModels.ZygoteADJprod,
jtprod_backend = ADNLPModels.ZygoteADJtprod,
jacobian_backend = ADNLPModels.ZygoteADJacobian,
hessian_backend = ADNLPModels.ZygoteADHessian,
)
30 changes: 3 additions & 27 deletions test/nls/basic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -361,30 +361,6 @@ function autodiff_nls_test(name; kwargs...)
end
end

autodiff_nls_test("OptimizedAD")
autodiff_nls_test(
"ForwardDiff",
gradient_backend = ADNLPModels.GenericForwardDiffADGradient,
hprod_backend = ADNLPModels.GenericForwardDiffADHvprod,
jprod_backend = ADNLPModels.GenericForwardDiffADJprod,
jtprod_backend = ADNLPModels.ForwardDiffADJtprod,
jacobian_backend = ADNLPModels.ForwardDiffADJacobian,
hessian_backend = ADNLPModels.ForwardDiffADHessian,
)
autodiff_nls_test(
"ReverseDiff",
gradient_backend = ADNLPModels.ReverseDiffADGradient,
hprod_backend = ADNLPModels.ReverseDiffADHvprod,
jprod_backend = ADNLPModels.ReverseDiffADJprod,
jtprod_backend = ADNLPModels.ReverseDiffADJtprod,
jacobian_backend = ADNLPModels.ReverseDiffADJacobian,
hessian_backend = ADNLPModels.ReverseDiffADHessian,
)
autodiff_nls_test(
"Zygote",
gradient_backend = ADNLPModels.ZygoteADGradient,
jprod_backend = ADNLPModels.ZygoteADJprod,
jtprod_backend = ADNLPModels.ZygoteADJtprod,
jacobian_backend = ADNLPModels.ZygoteADJacobian,
hessian_backend = ADNLPModels.ZygoteADHessian,
)
@testset "Basic NLS tests using $backend " for backend in keys(ADNLPModels.predefined_backend)
autodiff_nls_test("$backend", backend = backend)
end
Loading