diff --git a/docs/src/optimization_packages/multistartoptimization.md b/docs/src/optimization_packages/multistartoptimization.md index 4f801e64f..15e14625b 100644 --- a/docs/src/optimization_packages/multistartoptimization.md +++ b/docs/src/optimization_packages/multistartoptimization.md @@ -36,7 +36,7 @@ using Optimization, OptimizationMultistartOptimization, OptimizationNLopt rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 x0 = zeros(2) p = [1.0, 100.0] -f = OptimizationFunction(rosenbrock) +f = OptimizationFunction(rosenbrock, Optimization.AutoForwardDiff()) prob = Optimization.OptimizationProblem(f, x0, p, lb = [-1.0, -1.0], ub = [1.0, 1.0]) sol = solve(prob, MultistartOptimization.TikTak(100), NLopt.LD_LBFGS()) ``` diff --git a/lib/OptimizationNLopt/src/OptimizationNLopt.jl b/lib/OptimizationNLopt/src/OptimizationNLopt.jl index ee9640024..aae8548e8 100644 --- a/lib/OptimizationNLopt/src/OptimizationNLopt.jl +++ b/lib/OptimizationNLopt/src/OptimizationNLopt.jl @@ -12,7 +12,8 @@ SciMLBase.allowsbounds(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true SciMLBase.supports_opt_cache_interface(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true end @static if isdefined(OptimizationBase, :supports_opt_cache_interface) - OptimizationBase.supports_opt_cache_interface(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true + OptimizationBase.supports_opt_cache_interface(opt::Union{ + NLopt.Algorithm, NLopt.Opt}) = true end function SciMLBase.requiresgradient(opt::Union{NLopt.Algorithm, NLopt.Opt}) @@ -70,7 +71,8 @@ function __map_optimizer_args!(cache::OptimizationBase.OptimizationCache, opt::N kwargs...) # Check if AUGLAG algorithm requires local_method - if opt.algorithm ∈ (NLopt.LN_AUGLAG, NLopt.LD_AUGLAG, NLopt.AUGLAG) && local_method === nothing + if opt.algorithm ∈ (NLopt.LN_AUGLAG, NLopt.LD_AUGLAG, NLopt.AUGLAG) && + local_method === nothing error("NLopt.$(opt.algorithm) requires a local optimization method. " * "Please specify a local_method, e.g., solve(prob, NLopt.$(opt.algorithm)(); " * "local_method = NLopt.LN_NELDERMEAD())") @@ -167,6 +169,15 @@ function SciMLBase.__solve(cache::OptimizationBase.OptimizationCache{ } local x + # Check if algorithm requires gradients but none are provided + opt = cache.opt isa NLopt.Opt ? cache.opt.algorithm : cache.opt + if SciMLBase.requiresgradient(opt) && isnothing(cache.f.grad) + throw(OptimizationBase.IncompatibleOptimizerError( + "The NLopt algorithm $(opt) requires gradients, but no gradient function is available. " * + "Please use `OptimizationFunction` with an automatic differentiation backend, " * + "e.g., `OptimizationFunction(f, AutoForwardDiff())`, or provide gradients manually via the `grad` kwarg.")) + end + _loss = function (θ) x = cache.f(θ, cache.p) opt_state = OptimizationBase.OptimizationState(u = θ, p = cache.p, objective = x[1]) diff --git a/lib/OptimizationNLopt/test/runtests.jl b/lib/OptimizationNLopt/test/runtests.jl index 12d86b21d..8517849e6 100644 --- a/lib/OptimizationNLopt/test/runtests.jl +++ b/lib/OptimizationNLopt/test/runtests.jl @@ -174,4 +174,35 @@ using Test, Random @test sol.retcode == ReturnCode.MaxIters @test sol.objective < l1 end + + @testset "gradient-based algorithm without AD backend" begin + # Test that gradient-based algorithms throw a helpful error when no AD backend is specified + # This reproduces the issue from https://discourse.julialang.org/t/error-when-using-multistart-optimization/133174 + rosenbrock_test(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 + x0_test = zeros(2) + p_test = [1.0, 100.0] + + # Create OptimizationFunction WITHOUT specifying an AD backend + f_no_ad = OptimizationFunction(rosenbrock_test) + prob_no_ad = OptimizationProblem( + f_no_ad, x0_test, p_test, lb = [-1.0, -1.0], ub = [1.5, 1.5]) + + # Test with LD_LBFGS (gradient-based algorithm) - should throw IncompatibleOptimizerError + @test_throws OptimizationBase.IncompatibleOptimizerError solve(prob_no_ad, NLopt.LD_LBFGS()) + + # Test with NLopt.Opt interface - should also throw IncompatibleOptimizerError + @test_throws OptimizationBase.IncompatibleOptimizerError solve(prob_no_ad, NLopt.Opt(:LD_LBFGS, 2)) + + # Test that gradient-free algorithms still work without AD backend + sol = solve(prob_no_ad, NLopt.LN_NELDERMEAD()) + @test sol.retcode == ReturnCode.Success + + # Test that with AD backend, gradient-based algorithms work correctly + f_with_ad = OptimizationFunction(rosenbrock_test, OptimizationBase.AutoZygote()) + prob_with_ad = OptimizationProblem( + f_with_ad, x0_test, p_test, lb = [-1.0, -1.0], ub = [1.5, 1.5]) + sol = solve(prob_with_ad, NLopt.LD_LBFGS()) + @test sol.retcode == ReturnCode.Success + @test sol.objective < 1.0 + end end