Skip to content

Commit

Permalink
Fix AD for NewtonTrustRegion. (#264)
Browse files Browse the repository at this point in the history
  • Loading branch information
pkofod committed Aug 16, 2016
1 parent 6d37caa commit fffb8ca
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 16 deletions.
7 changes: 0 additions & 7 deletions src/newton.jl
Expand Up @@ -28,13 +28,6 @@ end
Newton(; linesearch!::Function = hz_linesearch!) =
Newton(linesearch!)

function optimize(d::Function,
initial_x::Array,
method::Newton,
options::OptimizationOptions)
throw(ArgumentError("Newton's method without a user supplied gradient and hessian is currently not supported."))
end

function optimize{T}(d::TwiceDifferentiableFunction,
initial_x::Vector{T},
mo::Newton,
Expand Down
6 changes: 3 additions & 3 deletions src/optimize.jl
Expand Up @@ -150,7 +150,7 @@ end

function optimize{T}(f::Function,
initial_x::Array{T},
method::Newton,
method::Union{Newton, NewtonTrustRegion},
options::OptimizationOptions)
if !options.autodiff
error("No gradient or Hessian was provided. Either provide a gradient and Hessian, set autodiff = true in the OptimizationOptions if applicable, or choose a solver that doesn't require a Hessian.")
Expand All @@ -172,7 +172,7 @@ end
function optimize(f::Function,
g!::Function,
initial_x::Array,
method::Newton,
method::Union{Newton, NewtonTrustRegion},
options::OptimizationOptions)
if !options.autodiff
error("No Hessian was provided. Either provide a Hessian, set autodiff = true in the OptimizationOptions if applicable, or choose a solver that doesn't require a Hessian.")
Expand All @@ -190,7 +190,7 @@ end

function optimize(d::DifferentiableFunction,
initial_x::Array,
method::Newton,
method::Union{Newton, NewtonTrustRegion},
options::OptimizationOptions)
if !options.autodiff
error("No Hessian was provided. Either provide a Hessian, set autodiff = true in the OptimizationOptions if applicable, or choose a solver that doesn't require a Hessian.")
Expand Down
15 changes: 9 additions & 6 deletions test/newton_trust_region.jl
Expand Up @@ -140,11 +140,14 @@ let
# Test Optim.newton for all twice differentiable functions in
# Optim.UnconstrainedProblems.examples
for (name, prob) in Optim.UnconstrainedProblems.examples
if prob.istwicedifferentiable
ddf = TwiceDifferentiableFunction(prob.f, prob.g!,prob.h!)
res = Optim.optimize(ddf, prob.initial_x, method=NewtonTrustRegion())
@assert norm(res.minimum - prob.solutions) < 1e-2
@assert res.f_converged || res.x_converged || res.g_converged
end
if prob.istwicedifferentiable
ddf = DifferentiableFunction(prob.f, prob.g!)
res = Optim.optimize(ddf, prob.initial_x, NewtonTrustRegion(), OptimizationOptions(autodiff = true))
@assert norm(Optim.minimizer(res) - prob.solutions) < 1e-2
res = Optim.optimize(ddf.f, prob.initial_x, NewtonTrustRegion(), OptimizationOptions(autodiff = true))
@assert norm(Optim.minimizer(res) - prob.solutions) < 1e-2
res = Optim.optimize(ddf.f, ddf.g!, prob.initial_x, NewtonTrustRegion(), OptimizationOptions(autodiff = true))
@assert norm(Optim.minimizer(res) - prob.solutions) < 1e-2
end
end
end

0 comments on commit fffb8ca

Please sign in to comment.