From fffb8cae815b696e5dbe9c3b33fffbe3ff044e09 Mon Sep 17 00:00:00 2001 From: Patrick Kofod Mogensen Date: Tue, 16 Aug 2016 09:37:10 +0200 Subject: [PATCH] Fix AD for NewtonTrustRegion. (#264) --- src/newton.jl | 7 ------- src/optimize.jl | 6 +++--- test/newton_trust_region.jl | 15 +++++++++------ 3 files changed, 12 insertions(+), 16 deletions(-) diff --git a/src/newton.jl b/src/newton.jl index b6065deed..976783423 100644 --- a/src/newton.jl +++ b/src/newton.jl @@ -28,13 +28,6 @@ end Newton(; linesearch!::Function = hz_linesearch!) = Newton(linesearch!) -function optimize(d::Function, - initial_x::Array, - method::Newton, - options::OptimizationOptions) - throw(ArgumentError("Newton's method without a user supplied gradient and hessian is currently not supported.")) -end - function optimize{T}(d::TwiceDifferentiableFunction, initial_x::Vector{T}, mo::Newton, diff --git a/src/optimize.jl b/src/optimize.jl index a034c1949..472f7b12f 100644 --- a/src/optimize.jl +++ b/src/optimize.jl @@ -150,7 +150,7 @@ end function optimize{T}(f::Function, initial_x::Array{T}, - method::Newton, + method::Union{Newton, NewtonTrustRegion}, options::OptimizationOptions) if !options.autodiff error("No gradient or Hessian was provided. Either provide a gradient and Hessian, set autodiff = true in the OptimizationOptions if applicable, or choose a solver that doesn't require a Hessian.") @@ -172,7 +172,7 @@ end function optimize(f::Function, g!::Function, initial_x::Array, - method::Newton, + method::Union{Newton, NewtonTrustRegion}, options::OptimizationOptions) if !options.autodiff error("No Hessian was provided. Either provide a Hessian, set autodiff = true in the OptimizationOptions if applicable, or choose a solver that doesn't require a Hessian.") @@ -190,7 +190,7 @@ end function optimize(d::DifferentiableFunction, initial_x::Array, - method::Newton, + method::Union{Newton, NewtonTrustRegion}, options::OptimizationOptions) if !options.autodiff error("No Hessian was provided. Either provide a Hessian, set autodiff = true in the OptimizationOptions if applicable, or choose a solver that doesn't require a Hessian.") diff --git a/test/newton_trust_region.jl b/test/newton_trust_region.jl index c66f304fe..9ac52e98b 100644 --- a/test/newton_trust_region.jl +++ b/test/newton_trust_region.jl @@ -140,11 +140,14 @@ let # Test Optim.newton for all twice differentiable functions in # Optim.UnconstrainedProblems.examples for (name, prob) in Optim.UnconstrainedProblems.examples - if prob.istwicedifferentiable - ddf = TwiceDifferentiableFunction(prob.f, prob.g!,prob.h!) - res = Optim.optimize(ddf, prob.initial_x, method=NewtonTrustRegion()) - @assert norm(res.minimum - prob.solutions) < 1e-2 - @assert res.f_converged || res.x_converged || res.g_converged - end + if prob.istwicedifferentiable + ddf = DifferentiableFunction(prob.f, prob.g!) + res = Optim.optimize(ddf, prob.initial_x, NewtonTrustRegion(), OptimizationOptions(autodiff = true)) + @assert norm(Optim.minimizer(res) - prob.solutions) < 1e-2 + res = Optim.optimize(ddf.f, prob.initial_x, NewtonTrustRegion(), OptimizationOptions(autodiff = true)) + @assert norm(Optim.minimizer(res) - prob.solutions) < 1e-2 + res = Optim.optimize(ddf.f, ddf.g!, prob.initial_x, NewtonTrustRegion(), OptimizationOptions(autodiff = true)) + @assert norm(Optim.minimizer(res) - prob.solutions) < 1e-2 + end end end