From 4a47199820ef2489b8052855dd3979cb4fe74cfc Mon Sep 17 00:00:00 2001 From: Olivier Cots Date: Sun, 19 Oct 2025 14:48:53 +0200 Subject: [PATCH 1/4] Update CTDirect.jl From 01aa921f4222a85d9d0f318a0558f1ea5bebbf43 Mon Sep 17 00:00:00 2001 From: Olivier Cots Date: Fri, 24 Oct 2025 17:37:19 +0200 Subject: [PATCH 2/4] Refine solver backend integration and update tests --- Project.toml | 19 ++++++---- ext/CTDirectExtADNLP.jl | 10 +++--- ext/CTDirectExtExa.jl | 28 +++++++++++---- ext/CTDirectExtIpopt.jl | 8 ++--- ext/CTDirectExtKnitro.jl | 8 ++--- ext/CTDirectExtMadNLP.jl | 15 ++++---- src/CTDirect.jl | 2 ++ src/docp.jl | 14 ++++---- src/solution.jl | 58 ++++++++++++++----------------- src/solve.jl | 44 +++++++++++------------ test/runtests.jl | 5 +-- test/suite/test_exa.jl | 28 +++++++++++++-- test/tmp/Project.toml | 8 +++++ test/tmp/test_madnlp.jl | 75 ++++++++++++++++++++++++++++++++++++++++ 14 files changed, 226 insertions(+), 96 deletions(-) create mode 100644 test/tmp/Project.toml create mode 100644 test/tmp/test_madnlp.jl diff --git a/Project.toml b/Project.toml index 3c981aaf..8c572649 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,10 @@ name = "CTDirect" uuid = "790bbbee-bee9-49ee-8912-a9de031322d5" +version = "0.17.4" authors = ["Pierre Martinon "] -version = "0.17.3" + +[workspace] +projects = ["test", "docs"] [deps] CTBase = "54762871-cc72-4466-b8e8-f6c8b58076cd" @@ -9,6 +12,8 @@ CTModels = "34c4fa32-2049-4079-8329-de33c2a22e2d" DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" HSL = "34c5aeac-e683-54a6-a0e9-6e0fdc586c50" MKL = "33e6dc65-8f57-5167-99aa-e5a354878fb2" +NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6" +SolverCore = "ff4d7338-4cf1-434d-91df-b86cb86fb843" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" [weakdeps] @@ -18,6 +23,9 @@ MadNLP = "2621e9c9-9eb4-46b1-8089-e8c72242dfb6" NLPModelsIpopt = "f4238b75-b362-5c4c-b852-0801c9a21d71" NLPModelsKnitro = "bec4dd0d-7755-52d5-9a02-22f0ffc7efcb" +[sources] +CTParser = {rev = "167-bug-max-objective-for-exa", url = "https://github.com/control-toolbox/CTParser.jl.git"} + [extensions] CTDirectExtADNLP = ["ADNLPModels"] CTDirectExtExa = ["ExaModels"] @@ -29,7 +37,6 @@ CTDirectExtMadNLP = ["MadNLP"] ADNLPModels = "0.8" CTBase = "0.16" CTModels = "0.6" -CTParser = "0.7" CUDA = "5" DocStringExtensions = "0.9" ExaModels = "0.9" @@ -38,8 +45,10 @@ MKL = "0.9" MadNLP = "0.8" MadNLPGPU = "0.7" MadNLPMumps = "0.5" +NLPModels = "0.21" NLPModelsIpopt = "0.11" NLPModelsKnitro = "0.9" +SolverCore = "0.3.8" SplitApplyCombine = "1" julia = "1.10" @@ -49,11 +58,9 @@ CTParser = "32681960-a1b1-40db-9bff-a1ca817385d1" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" MadNLPGPU = "d72a61cc-809d-412f-99be-fd81f4b8a598" MadNLPMumps = "3b83494e-c0a4-4895-918b-9157a7a085a1" +NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6" SplitApplyCombine = "03a91e81-4c3e-53e1-a0a4-9c0c8f19dd66" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["Test", "SplitApplyCombine", "CTParser", "ADNLPModels", "ExaModels", "NLPModelsIpopt", "MadNLPMumps", "MadNLPGPU", "CUDA", "AMDGPU"] - -[workspace] -projects = ["test", "docs"] +test = ["Test", "SplitApplyCombine", "CTParser", "ADNLPModels", "ExaModels", "NLPModels", "NLPModelsIpopt", "MadNLPMumps", "MadNLPGPU", "CUDA", "AMDGPU"] diff --git a/ext/CTDirectExtADNLP.jl b/ext/CTDirectExtADNLP.jl index 32123abb..03083238 100644 --- a/ext/CTDirectExtADNLP.jl +++ b/ext/CTDirectExtADNLP.jl @@ -1,10 +1,9 @@ module CTDirectExtADNLP using CTDirect - using DocStringExtensions - using ADNLPModels +using CTModels """ $(TYPEDSIGNATURES) @@ -16,8 +15,11 @@ Build the NLP model for the DOCP (ADNLPModels version) * `adnlp_backend`: backend for ADNLPModels ([`:optimized`], `:manual`, `:default`) """ function CTDirect.build_nlp!( - docp::CTDirect.DOCP, - nlp_model::CTDirect.ADNLPBackend, + docp::CTDirect.DOCP{ + <:CTDirect.Discretization, + <:CTModels.Model, + <:CTDirect.ADNLPBackend, + }, x0; adnlp_backend=CTDirect.__adnlp_backend(), show_time=false, #+default diff --git a/ext/CTDirectExtExa.jl b/ext/CTDirectExtExa.jl index 806065b8..ba5529d3 100644 --- a/ext/CTDirectExtExa.jl +++ b/ext/CTDirectExtExa.jl @@ -2,10 +2,9 @@ module CTDirectExtExa using CTDirect using CTModels: CTModels - using DocStringExtensions - using ExaModels +using SolverCore """ $(TYPEDSIGNATURES) @@ -18,8 +17,11 @@ Build the NLP model for the DOCP (ExaModels version) * `exa_backend`: backend for ExaModels ([`nothing`]) """ function CTDirect.build_nlp!( - docp::CTDirect.DOCP, - nlp_model::CTDirect.ExaBackend, + docp::CTDirect.DOCP{ + <:CTDirect.Discretization, + <:CTModels.Model, + <:CTDirect.ExaBackend, + }, x0; grid_size=CTDirect.__grid_size(), disc_method=CTDirect.__disc_method(), @@ -60,12 +62,26 @@ function CTDirect.build_nlp!( return nothing end -function CTDirect.get_time_grid_exa(docp_solution, docp) +""" +$(TYPEDSIGNATURES) + +Retrieve the time grid from the given DOCP solution. + +# Arguments + +- `nlp_solution`: The DOCP solution. +- `docp`: The DOCP. + +# Returns + +- `::Vector{Float64}`: The time grid. +""" +function CTDirect.get_time_grid_exa(nlp_solution::SolverCore.AbstractExecutionStats, docp::CTDirect.DOCP) grid = zeros(docp.time.steps+1) ocp = docp.ocp if docp.flags.freet0 || docp.flags.freetf - v = docp.exa_getter(docp_solution; val=:variable) + v = docp.exa_getter(nlp_solution; val=:variable) end if docp.flags.freet0 diff --git a/ext/CTDirectExtIpopt.jl b/ext/CTDirectExtIpopt.jl index 97612859..412e35ba 100644 --- a/ext/CTDirectExtIpopt.jl +++ b/ext/CTDirectExtIpopt.jl @@ -35,7 +35,7 @@ $(TYPEDSIGNATURES) Solve a discretized optimal control problem (Ipopt version). """ function CTDirect.solve_docp( - solver_backend::CTDirect.IpoptBackend, + ::CTDirect.IpoptBackend, docp::CTDirect.DOCP; display::Bool=CTDirect.__display(), max_iter::Integer=CTDirect.__max_iterations(), @@ -75,7 +75,7 @@ function CTDirect.solve_docp( solver = IpoptSolver(nlp) # solve discretized problem with NLP solver - docp_solution = solve!( + nlp_solution = solve!( solver, nlp; print_level=print_level, @@ -88,8 +88,8 @@ function CTDirect.solve_docp( kwargs..., ) - # return DOCP solution - return docp_solution + # return NLP solution + return nlp_solution end end diff --git a/ext/CTDirectExtKnitro.jl b/ext/CTDirectExtKnitro.jl index 56932188..70681d2b 100644 --- a/ext/CTDirectExtKnitro.jl +++ b/ext/CTDirectExtKnitro.jl @@ -21,7 +21,7 @@ $(TYPEDSIGNATURES) Solve a discretized optimal control problem with Ipopt """ function CTDirect.solve_docp( - solver_backend::CTDirect.KnitroBackend, + ::CTDirect.KnitroBackend, docp::CTDirect.DOCP; display::Bool=CTDirect.__display(), max_iter::Integer=CTDirect.__max_iterations(), @@ -44,10 +44,10 @@ function CTDirect.solve_docp( ) # solve discretized problem with NLP solver - docp_solution = solve!(solver, nlp) + nlp_solution = solve!(solver, nlp) - # return DOCP solution - return docp_solution + # return NLP solution + return nlp_solution end end diff --git a/ext/CTDirectExtMadNLP.jl b/ext/CTDirectExtMadNLP.jl index e138f25d..0da06ab0 100644 --- a/ext/CTDirectExtMadNLP.jl +++ b/ext/CTDirectExtMadNLP.jl @@ -7,6 +7,7 @@ using DocStringExtensions using MadNLP using HSL using MKL +using NLPModels """ $(TYPEDSIGNATURES) @@ -22,7 +23,7 @@ $(TYPEDSIGNATURES) Solve a discretized optimal control problem DOCP """ function CTDirect.solve_docp( - solver_backend::CTDirect.MadNLPBackend, + ::CTDirect.MadNLPBackend, docp::CTDirect.DOCP; display::Bool=CTDirect.__display(), max_iter::Integer=CTDirect.__max_iterations(), @@ -43,19 +44,19 @@ function CTDirect.solve_docp( ) # solve discretized problem with NLP solver - docp_solution = solve!(solver) + nlp_solution = solve!(solver) - # return DOCP solution - return docp_solution + # return NLP solution + return nlp_solution end -function CTDirect.SolverInfos(nlp_solution::MadNLP.MadNLPExecutionStats) - objective = nlp_solution.objective # NB sign is incorrect for max problems ! +function CTDirect.SolverInfos(nlp_solution::MadNLP.MadNLPExecutionStats, nlp::NLPModels.AbstractNLPModel) + minimize = NLPModels.get_minimize(nlp) + objective = minimize ? nlp_solution.objective : -nlp_solution.objective # sign depends on minimization for MadNLP iterations = nlp_solution.iter constraints_violation = nlp_solution.primal_feas status = Symbol(nlp_solution.status) successful = (status == :SOLVE_SUCCEEDED) || (status == :SOLVED_TO_ACCEPTABLE_LEVEL) - return objective, iterations, constraints_violation, "MadNLP", status, successful end diff --git a/src/CTDirect.jl b/src/CTDirect.jl index 89d9977b..444b4580 100644 --- a/src/CTDirect.jl +++ b/src/CTDirect.jl @@ -4,6 +4,8 @@ using CTBase using CTModels: CTModels using DocStringExtensions using SparseArrays +using SolverCore: SolverCore, AbstractExecutionStats +using NLPModels: NLPModels, AbstractNLPModel # ---------------------------------------------------------------------- # EXTENSIONS diff --git a/src/docp.jl b/src/docp.jl index 73b92fbd..7eb5efff 100644 --- a/src/docp.jl +++ b/src/docp.jl @@ -277,7 +277,7 @@ Struct representing a discretized optimal control problem (DOCP). - `discretization::D`: The discretization scheme. - `ocp::O`: The original OCP model. -- `nlp_model::N`: The NLP model backend. +- `nlp_model_backend::N`: The NLP model backend. - `nlp`: The constructed NLP instance. - `exa_getter::Union{Nothing,Function}`: Getter for ExaModels if used. - `flags::DOCPFlags`: Boolean flags describing problem structure. @@ -290,11 +290,11 @@ Struct representing a discretized optimal control problem (DOCP). # Example ```julia-repl -julia> DOCP(ocp, nlp_model) +julia> DOCP(ocp, nlp_model_backend) DOCP{...}(...) ``` """ -mutable struct DOCP{D<:Discretization,O<:CTModels.Model,N<:CTDirect.AbstractNLPModelBackend} +mutable struct DOCP{D<:CTDirect.Discretization,O<:CTModels.Model,N<:CTDirect.AbstractNLPModelBackend} # discretization scheme discretization::D @@ -303,7 +303,7 @@ mutable struct DOCP{D<:Discretization,O<:CTModels.Model,N<:CTDirect.AbstractNLPM ocp::O # parametric instead of just qualifying reduces allocations (but not time). Specialization ? # NLP - nlp_model::N + nlp_model_backend::N nlp exa_getter::Union{Nothing,Function} # getter for ExaModels (if used) @@ -326,7 +326,7 @@ mutable struct DOCP{D<:Discretization,O<:CTModels.Model,N<:CTDirect.AbstractNLPM # constructor function DOCP( ocp::CTModels.Model, - nlp_model; + nlp_model_backend::CTDirect.AbstractNLPModelBackend; grid_size=__grid_size(), time_grid=__time_grid(), disc_method=__disc_method(), @@ -423,10 +423,10 @@ mutable struct DOCP{D<:Discretization,O<:CTModels.Model,N<:CTDirect.AbstractNLPM ) # call constructor with const fields - docp = new{typeof(discretization),typeof(ocp),typeof(nlp_model)}( + docp = new{typeof(discretization),typeof(ocp),typeof(nlp_model_backend)}( discretization, ocp, - nlp_model, + nlp_model_backend, nothing, # nlp nothing, # exa_getter flags, diff --git a/src/solution.jl b/src/solution.jl index c9b09e6c..3b9f13ee 100644 --- a/src/solution.jl +++ b/src/solution.jl @@ -32,14 +32,12 @@ is_empty(t) = (isnothing(t) || length(t) == 0) $(TYPEDSIGNATURES) Build an OCP functional solution from a DOCP discrete solution given as -a `SolverCore.GenericExecutionStats` object. +a `SolverCore.AbstractExecutionStats` object. # Arguments - `docp`: The discretized optimal control problem (`DOCP`). - `nlp_solution`: A solver execution statistics object. -- `nlp_model`: The NLP model backend (default: `ADNLPBackend()`). -- `nlp_solver`: The NLP solver backend (default: `IpoptBackend()`). # Returns @@ -53,20 +51,17 @@ julia> build_OCP_solution(docp, nlp_solution) CTModels.Solution(...) ``` """ -function build_OCP_solution( - docp, nlp_solution; nlp_model=ADNLPBackend(), nlp_solver=IpoptBackend() -) +function build_OCP_solution(docp::DOCP, nlp_solution::SolverCore.AbstractExecutionStats) + + # retrieve NLP model and OCP model + nlp = nlp_model(docp) ocp = ocp_model(docp) + # retrieve NLP model backend + nlp_model_backend = docp.nlp_model_backend + # retrieve data from NLP solver - objective, iterations, constraints_violation, message, status, successful = SolverInfos( - nlp_solution - ) - # fix objective sign for maximization problems with MadNLP - # should be in Solverinfos but needs max info. can we retrieve it from nlp solution ? - if docp.flags.max && nlp_solver isa MadNLPBackend - objective = - objective - end + objective, iterations, constraints_violation, message, status, successful = SolverInfos(nlp_solution, nlp) # arrays (explicit conversion for GPU case) solution = Array(nlp_solution.solution) @@ -75,7 +70,7 @@ function build_OCP_solution( multipliers_U = Array(nlp_solution.multipliers_U) # time grid - if nlp_model isa ADNLPBackend + if nlp_model_backend isa ADNLPBackend T = get_time_grid(solution, docp) else T = get_time_grid_exa(nlp_solution, docp) @@ -90,13 +85,13 @@ function build_OCP_solution( solution; multipliers_L=multipliers_L, multipliers_U=multipliers_U, - nlp_model=nlp_model, + nlp_model_backend=nlp_model_backend, nlp_solution=nlp_solution, ) # costate and constraints multipliers P, path_constraints_dual, boundary_constraints_dual = parse_DOCP_solution_dual( - docp, multipliers; nlp_model=nlp_model, nlp_solution=nlp_solution + docp, multipliers; nlp_model_backend=nlp_model_backend, nlp_solution=nlp_solution ) return CTModels.build_solution( @@ -166,13 +161,12 @@ julia> SolverInfos(nlp_solution) (1.23, 15, 1.0e-6, "Ipopt/generic", :first_order, true) ``` """ -function SolverInfos(nlp_solution) +function SolverInfos(nlp_solution::SolverCore.AbstractExecutionStats, ::NLPModels.AbstractNLPModel) objective = nlp_solution.objective iterations = nlp_solution.iter constraints_violation = nlp_solution.primal_feas status = nlp_solution.status successful = (status == :first_order) || (status == :acceptable) - return objective, iterations, constraints_violation, "Ipopt/generic", status, successful end @@ -190,7 +184,7 @@ multipliers. - `dual`: Array of dual variables (default: `nothing`). - `multipliers_L`: Lower bound multipliers (default: `nothing`). - `multipliers_U`: Upper bound multipliers (default: `nothing`). -- `nlp_model`: The NLP model backend (default: `ADNLPBackend()`). +- `nlp_model_backend`: The NLP model backend (default: `ADNLPBackend()`). - `nlp_solution`: A solver execution statistics object. # Returns @@ -211,7 +205,7 @@ function build_OCP_solution( dual=nothing, multipliers_L=nothing, multipliers_U=nothing, - nlp_model=ADNLPBackend(), + nlp_model_backend=ADNLPBackend(), nlp_solution, ) ocp = ocp_model(docp) @@ -224,7 +218,7 @@ function build_OCP_solution( objective = DOCP_objective(solution, docp) # time grid - if nlp_model isa ADNLPBackend + if nlp_model_backend isa ADNLPBackend T = get_time_grid(solution, docp) else T = get_time_grid_exa(nlp_solution, docp) @@ -236,13 +230,13 @@ function build_OCP_solution( solution; multipliers_L=multipliers_L, multipliers_U=multipliers_U, - nlp_model=nlp_model, + nlp_model_backend=nlp_model_backend, nlp_solution=nlp_solution, ) # costate and constraints multipliers P, path_constraints_dual, boundary_constraints_dual = parse_DOCP_solution_dual( - docp, dual; nlp_model=nlp_model, nlp_solution=nlp_solution + docp, dual; nlp_model_backend=nlp_model_backend, nlp_solution=nlp_solution ) return CTModels.build_solution( @@ -281,7 +275,7 @@ variables. Bound multipliers are also parsed if available. - `solution`: Array of primal decision variables. - `multipliers_L`: Lower bound multipliers. - `multipliers_U`: Upper bound multipliers. -- `nlp_model`: The NLP model backend. +- `nlp_model_backend`: The NLP model backend. - `nlp_solution`: A solver execution statistics object. # Returns @@ -296,12 +290,12 @@ variables. Bound multipliers are also parsed if available. ```julia-repl julia> X, U, v, box_mults = parse_DOCP_solution_primal(docp, primal; - multipliers_L=mL, multipliers_U=mU, nlp_model=nlp_model, nlp_solution=nlp_solution) + multipliers_L=mL, multipliers_U=mU, nlp_model_backend=nlp_model_backend, nlp_solution=nlp_solution) ([...] , [...], [...], (...)) ``` """ function parse_DOCP_solution_primal( - docp, solution; multipliers_L, multipliers_U, nlp_model, nlp_solution + docp, solution; multipliers_L, multipliers_U, nlp_model_backend, nlp_solution ) # state and control variables @@ -318,7 +312,7 @@ function parse_DOCP_solution_primal( mult_variable_box_lower = zeros(size(v)) mult_variable_box_upper = zeros(size(v)) - if nlp_model isa ExaBackend # Exa + if nlp_model_backend isa ExaBackend # Exa getter = docp.exa_getter X[:] = getter(nlp_solution; val=:state)' # transpose to match choice below for ADNLP U[:] = getter(nlp_solution; val=:control)' @@ -387,7 +381,7 @@ variables. - `docp`: The discretized optimal control problem (`DOCP`). - `multipliers`: Array of dual variables (may be `nothing`). -- `nlp_model`: The NLP model backend (default: `ADNLPBackend()`). +- `nlp_model_backend`: The NLP model backend (default: `ADNLPBackend()`). - `nlp_solution`: A solver execution statistics object. # Returns @@ -400,17 +394,17 @@ variables. # Example ```julia-repl -julia> P, path_dual, bound_dual = parse_DOCP_solution_dual(docp, duals; nlp_model=nlp_model, nlp_solution=nlp_solution) +julia> P, path_dual, bound_dual = parse_DOCP_solution_dual(docp, duals; nlp_model_backend=nlp_model_backend, nlp_solution=nlp_solution) ([...] , [...], [...]) ``` """ -function parse_DOCP_solution_dual(docp, multipliers; nlp_model=ADNLPBackend(), nlp_solution) +function parse_DOCP_solution_dual(docp, multipliers; nlp_model_backend=ADNLPBackend(), nlp_solution) # costate N = docp.time.steps P = zeros(N, docp.dims.NLP_x) - if nlp_model isa ExaBackend # Exa + if nlp_model_backend isa ExaBackend # Exa getter = docp.exa_getter P[:] = getter(nlp_solution; val=:costate)' # transpose to match choice below for ADNLP dpc = docp.dims.path_cons diff --git a/src/solve.jl b/src/solve.jl index 409b583a..558ab76c 100644 --- a/src/solve.jl +++ b/src/solve.jl @@ -65,7 +65,7 @@ Build the NLP model for a discretized optimal control problem using the specifie # Arguments - `docp::CTDirect.DOCP`: The discretized optimal control problem. -- `nlp_model::T`: The NLP model backend (subtype of `AbstractNLPModelBackend`). +- `nlp_model_backend::T`: The NLP model backend (subtype of `AbstractNLPModelBackend`). - `x0`: Initial guess for decision variables. # Returns @@ -80,7 +80,11 @@ ERROR: ExtensionError(...) ``` """ function build_nlp!( - docp::CTDirect.DOCP, nlp_model::T, x0; kwargs... + docp::CTDirect.DOCP{ + <:CTDirect.Discretization, + <:CTModels.Model, + T, + }, x0; kwargs... ) where {T<:AbstractNLPModelBackend} throw(CTBase.ExtensionError(WEAKDEPS[T]...)) end @@ -100,7 +104,7 @@ Parse the method description to determine the NLP solver or model. # Returns -- `nlp_solver` or `nlp_model`: The corresponding backend instance. +- `nlp_solver_backend` or `nlp_model_backend`: The corresponding backend instance. # Example @@ -120,32 +124,32 @@ function parse_description(description, info) if info == :solver # get NLP solver choice if :ipopt ∈ method - nlp_solver = CTDirect.IpoptBackend() + nlp_solver_backend = CTDirect.IpoptBackend() elseif :madnlp ∈ method - nlp_solver = CTDirect.MadNLPBackend() + nlp_solver_backend = CTDirect.MadNLPBackend() elseif :knitro ∈ method - nlp_solver = CTDirect.KnitroBackend() + nlp_solver_backend = CTDirect.KnitroBackend() else error("no known solver (:ipopt, :madnlp, :knitro) in method", method) end # patch: replaces ipopt by madnlp for :exa as long as the issue with getters for a posteriori treatment is not fixed #=if (:exa ∈ method) && (:ipopt ∈ method) - nlp_solver = CTDirect.MadNLPBackend() + nlp_solver_backend = CTDirect.MadNLPBackend() @warn "currently replacing Ipopt with MadNLP for :exa" end=# - return nlp_solver + return nlp_solver_backend elseif info == :model # get NLP model choice if :adnlp ∈ method - nlp_model = CTDirect.ADNLPBackend() + nlp_model_backend = CTDirect.ADNLPBackend() elseif :exa ∈ method - nlp_model = CTDirect.ExaBackend() + nlp_model_backend = CTDirect.ExaBackend() else error("no known model (:adnlp, :exa) in method", method) end - return nlp_model + return nlp_model_backend else error("parse_description info should be either :solver or :model, got ", info) return nothing @@ -232,14 +236,11 @@ function solve( ) # get NLP solver choice and solve DOCP - nlp_solver = parse_description(description, :solver) - nlp_model = parse_description(description, :model) - docp_solution = CTDirect.solve_docp(nlp_solver, docp; display=display, kwargs...) + nlp_solver_backend = parse_description(description, :solver) + nlp_solution = CTDirect.solve_docp(nlp_solver_backend, docp; display=display, kwargs...) # build and return OCP solution - return build_OCP_solution( - docp, docp_solution; nlp_model=nlp_model, nlp_solver=nlp_solver - ) + return build_OCP_solution(docp, nlp_solution) end """ @@ -353,13 +354,13 @@ function direct_transcription( lagrange_to_mayer=__lagrange_to_mayer(), kwargs..., ) - nlp_model = parse_description(description, :model) + nlp_model_backend = parse_description(description, :model) # build DOCP - if nlp_model isa ExaBackend + if nlp_model_backend isa ExaBackend docp = DOCP( ocp, - nlp_model; + nlp_model_backend; grid_size=grid_size, time_grid=time_grid, disc_method=disc_method, @@ -368,7 +369,7 @@ function direct_transcription( else docp = DOCP( ocp, - nlp_model; + nlp_model_backend; grid_size=grid_size, time_grid=time_grid, disc_method=disc_method, @@ -392,7 +393,6 @@ function direct_transcription( # build nlp build_nlp!( docp, - nlp_model, # +++is now in docp, can be removed x0; grid_size=grid_size, disc_method=disc_method, diff --git a/test/runtests.jl b/test/runtests.jl index 3a267824..e3f18dda 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -5,14 +5,15 @@ using Test using CTBase using CTParser: CTParser, @def using CTModels: - CTModels, objective, state, control, variable, costate, time_grid, iterations -using CTDirect: CTDirect, solve, direct_transcription, set_initial_guess, build_OCP_solution + CTModels, objective, state, control, variable, costate, time_grid, iterations, criterion +using CTDirect: CTDirect, solve, direct_transcription, set_initial_guess, build_OCP_solution, nlp_model, ocp_model # activate NLP modelers using ADNLPModels # + using ExaModels (in test_exa for now) # activate NLP solvers +using NLPModels using NLPModelsIpopt using MadNLPMumps diff --git a/test/suite/test_exa.jl b/test/suite/test_exa.jl index 8d09e028..15d6814d 100644 --- a/test/suite/test_exa.jl +++ b/test/suite/test_exa.jl @@ -110,8 +110,9 @@ function test_exa(exa_backend, display) end # goddard2 - @testset verbose = true showtiming = true "goddard2 :examodel :trapeze :grid_size" begin + @testset verbose = true showtiming = true "goddard2 :examodel :trapeze :grid_size :objective" begin prob = goddard2() + # madnlp sol = solve( prob.ocp, :madnlp, @@ -123,14 +124,37 @@ function test_exa(exa_backend, display) ) @test time_grid(sol)[end] ≈ 0.201965 rtol = 1e-2 # check time grid @test objective(sol) ≈ prob.obj rtol = 1e-2 + # ipopt + sol = solve( + prob.ocp, + :ipopt, + :exa; + disc_method=:trapeze, + exa_backend=exa_backend, + display=display, + grid_size=1000, + ) + @test time_grid(sol)[end] ≈ 0.201965 rtol = 1e-2 # check time grid + @test objective(sol) ≈ prob.obj rtol = 1e-2 + end + + @testset verbose = true showtiming = true "goddard2 :examodel :transcription :max" begin + prob = goddard2() + docp = direct_transcription( + prob.ocp, :madnlp, :exa; display=display, disc_method=:trapeze, grid_size=100 + ) + @test NLPModels.get_minimize(nlp_model(docp)) == false + @test CTModels.criterion(ocp_model(docp)) == :max end - @testset verbose = true showtiming = true ":examodel :cpu :transcription :grid_size" begin + @testset verbose = true showtiming = true ":examodel :cpu :transcription :grid_size :min" begin prob = beam2() docp = direct_transcription( prob.ocp, :madnlp, :exa; display=display, disc_method=:trapeze, grid_size=100 ) @test docp.dim_NLP_variables == 303 + @test NLPModels.get_minimize(nlp_model(docp)) == true + @test CTModels.criterion(ocp_model(docp)) == :min end end diff --git a/test/tmp/Project.toml b/test/tmp/Project.toml new file mode 100644 index 00000000..59325a13 --- /dev/null +++ b/test/tmp/Project.toml @@ -0,0 +1,8 @@ +[deps] +ADNLPModels = "54578032-b7ea-4c30-94aa-7cbd1cce6c9a" +DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" +ExaModels = "1037b233-b668-4ce9-9b63-f9f681f55dd2" +Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9" +MadNLPMumps = "3b83494e-c0a4-4895-918b-9157a7a085a1" +NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6" +NLPModelsIpopt = "f4238b75-b362-5c4c-b852-0801c9a21d71" diff --git a/test/tmp/test_madnlp.jl b/test/tmp/test_madnlp.jl new file mode 100644 index 00000000..67eac2f5 --- /dev/null +++ b/test/tmp/test_madnlp.jl @@ -0,0 +1,75 @@ +using Pkg +Pkg.activate(joinpath(@__DIR__)) + +using NLPModels +using ExaModels +using MadNLPMumps +using ADNLPModels +using Ipopt +using NLPModelsIpopt +using DataFrames + +# min f(x) +f(x) = 1+x[1]^2 + +function build_problem(problem_type::Symbol, sense::Symbol) + minimize = sense == :min + if problem_type == :exa + c = ExaCore(; minimize=minimize) + x = variable(c, 1; start=2.0) + expr = minimize ? f(x) : -f(x) + objective(c, expr) + return ExaModel(c) + elseif problem_type == :adnlp + objective_function = minimize ? f : (x -> -f(x)) + return ADNLPModel(objective_function, [2.0]; minimize=minimize) + else + error("Unsupported problem type: $(problem_type)") + end +end + +function solve_problem(problem, solver_type::Symbol) + if solver_type == :madnlp + solver = MadNLPSolver(problem; print_level=MadNLP.ERROR) + results = solve!(solver) + println("typeof(results): $(typeof(results))") + return results.objective + elseif solver_type == :ipopt + results = ipopt(problem; print_level=0) + println("typeof(results): $(typeof(results))") + return results.objective + else + error("Unsupported solver type: $(solver_type)") + end +end + +senses = [:min, :max] +solver_types = [:madnlp, :ipopt] +problem_types = [:exa, :adnlp] + +results = DataFrame( + sense = String[], + solver = String[], + problem_type = String[], + objective_value = Float64[], +) + +for sense in senses + for solver_type in solver_types + for problem_type in problem_types + problem = build_problem(problem_type, sense) + objective_value = solve_problem(problem, solver_type) + push!( + results, + ( + sense = String(sense), + solver = String(solver_type), + problem_type = String(problem_type), + objective_value = objective_value, + ), + ) + end + end +end + +results From fb90db208f684fe8ea1c60c554a9ff9b40689126 Mon Sep 17 00:00:00 2001 From: Olivier Cots Date: Fri, 24 Oct 2025 17:52:47 +0200 Subject: [PATCH 3/4] ctparser version --- Project.toml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/Project.toml b/Project.toml index 8c572649..dc96b503 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "CTDirect" uuid = "790bbbee-bee9-49ee-8912-a9de031322d5" -version = "0.17.4" +version = "0.18.0" authors = ["Pierre Martinon "] [workspace] @@ -23,9 +23,6 @@ MadNLP = "2621e9c9-9eb4-46b1-8089-e8c72242dfb6" NLPModelsIpopt = "f4238b75-b362-5c4c-b852-0801c9a21d71" NLPModelsKnitro = "bec4dd0d-7755-52d5-9a02-22f0ffc7efcb" -[sources] -CTParser = {rev = "167-bug-max-objective-for-exa", url = "https://github.com/control-toolbox/CTParser.jl.git"} - [extensions] CTDirectExtADNLP = ["ADNLPModels"] CTDirectExtExa = ["ExaModels"] @@ -37,6 +34,7 @@ CTDirectExtMadNLP = ["MadNLP"] ADNLPModels = "0.8" CTBase = "0.16" CTModels = "0.6" +CTParser = "0.7" CUDA = "5" DocStringExtensions = "0.9" ExaModels = "0.9" From 24b1f7df2bb005bdcf5377b4810336494d4d095d Mon Sep 17 00:00:00 2001 From: Olivier Cots Date: Fri, 24 Oct 2025 21:21:49 +0200 Subject: [PATCH 4/4] foo --- src/CTDirect.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/CTDirect.jl b/src/CTDirect.jl index 444b4580..97a2b078 100644 --- a/src/CTDirect.jl +++ b/src/CTDirect.jl @@ -4,8 +4,8 @@ using CTBase using CTModels: CTModels using DocStringExtensions using SparseArrays -using SolverCore: SolverCore, AbstractExecutionStats -using NLPModels: NLPModels, AbstractNLPModel +using SolverCore: SolverCore +using NLPModels: NLPModels # ---------------------------------------------------------------------- # EXTENSIONS