diff --git a/src/quasi-newton.jl b/src/quasi-newton.jl index 060d7dc..f78ffa9 100644 --- a/src/quasi-newton.jl +++ b/src/quasi-newton.jl @@ -107,6 +107,7 @@ NLPModels.jac_nln_structure!( # the following methods are affected by the Hessian approximation NLPModels.hess_op(nlp::QuasiNewtonModel, x::AbstractVector; kwargs...) = nlp.op NLPModels.hprod(nlp::QuasiNewtonModel, x::AbstractVector, v::AbstractVector; kwargs...) = nlp.op * v + function NLPModels.hprod!( nlp::QuasiNewtonModel, x::AbstractVector, @@ -129,6 +130,9 @@ function NLPModels.hprod!( return Hv end +NLPModels.neval_hprod(nlp::LBFGSModel) = nlp.op.nprod +NLPModels.neval_hprod(nlp::LSR1Model) = nlp.op.nprod + function Base.push!(nlp::QuasiNewtonModel, args...) push!(nlp.op, args...) return nlp diff --git a/test/nlp/quasi-newton.jl b/test/nlp/quasi-newton.jl index 0c0c5b7..72ac7ee 100644 --- a/test/nlp/quasi-newton.jl +++ b/test/nlp/quasi-newton.jl @@ -37,6 +37,9 @@ @test obj(nlp, x) ≈ f(x) @test grad(nlp, x) ≈ ∇f(x) @test hprod(nlp, x, v) ≈ H(x) * v + @test neval_hprod(nlp.model) == 0 + (QNM == LSR1Model) && (@test neval_hprod(nlp) == 2) + (QNM == LBFGSModel) && (@test neval_hprod(nlp) == 1) @test cons(nlp, x) ≈ c(x) @test jac(nlp, x) ≈ J(x) @test jprod(nlp, x, v) ≈ J(x) * v