diff --git a/docs/Project.toml b/docs/Project.toml index e1f4a80eb5..dedd39cee5 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -3,11 +3,12 @@ BenchmarkTools = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf" DifferentialEquations = "0c46a032-eb83-5123-abaf-570d42b7fbaa" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" -Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" ModelingToolkit = "961ee093-0014-501f-94e3-6117800e7a78" NonlinearSolve = "8913a72c-1f9b-4ce2-8d82-65094dcecaec" Optim = "429524aa-4258-5aef-a3af-852621145aeb" +Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba" +OptimizationOptimJL = "36348300-93cb-4f02-beb5-3c3902f8871e" OrdinaryDiffEq = "1dea7af3-3e70-54e6-95c3-0bf5283fa5ed" Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7" diff --git a/docs/src/systems/OptimizationSystem.md b/docs/src/systems/OptimizationSystem.md index 8839777999..bc443471d6 100644 --- a/docs/src/systems/OptimizationSystem.md +++ b/docs/src/systems/OptimizationSystem.md @@ -8,9 +8,10 @@ OptimizationSystem ## Composition and Accessor Functions -- `get_eqs(sys)` or `equations(sys)`: The equation to be minimized. +- `get_op(sys)` or `objective(sys)`: The objective to be minimized. - `get_states(sys)` or `states(sys)`: The set of states for the optimization. - `get_ps(sys)` or `parameters(sys)`: The parameters for the optimization. +- `get_constraints(sys)` or `constraints(sys)`: The constraints for the optimization. ## Transformations diff --git a/docs/src/tutorials/optimization.md b/docs/src/tutorials/optimization.md index 40eae2f83f..d2f917ed32 100644 --- a/docs/src/tutorials/optimization.md +++ b/docs/src/tutorials/optimization.md @@ -1,26 +1,85 @@ # Modeling Optimization Problems -```julia -using ModelingToolkit, Optimization, OptimizationOptimJL +## Rosenbrock Function in 2D +Let's solve the classical _Rosenbrock function_ in two dimensions. -@variables x y -@parameters a b +First, we need to make some imports. +```@example rosenbrock_2d +using ModelingToolkit, Optimization, OptimizationOptimJL +``` +Now we can define our optimization problem. +```@example rosenbrock_2d +@variables begin + x, [bounds = (-2.0, 2.0)] + y, [bounds = (-1.0, 3.0)] +end +@parameters a = 1 b = 1 loss = (a - x)^2 + b * (y - x^2)^2 -@named sys = OptimizationSystem(loss,[x,y],[a,b]) +@named sys = OptimizationSystem(loss, [x, y], [a, b]) +``` +A visualization of the objective function is depicted below. +```@eval +using Plots +x = -2:0.01:2 +y = -1:0.01:3 +contour(x, y, (x,y) -> (1 - x)^2 + 100 * (y - x^2)^2, fill=true, color=:viridis, ratio=:equal, xlims=(-2, 2)) +``` + +### Explanation +Every optimization problem consists of a set of _optimization variables_. In this case we create two variables. Additionally we assign _box constraints_ for each of them. In the next step we create two parameters for the problem with `@parameters`. While it is not needed to do this it makes it easier to `remake` the problem later with different values for the parameters. The _objective function_ is specified as well and finally everything is used to construct an `OptimizationSystem`. + +## Building and Solving the Optimization Problem +Next the actual `OptimizationProblem` can be created. At this stage an initial guess `u0` for the optimization variables needs to be provided via map, using the symbols from before. Concrete values for the parameters of the system can also be provided or changed. However, if the parameters have default values assigned they are used automatically. +```@example rosenbrock_2d u0 = [ - x=>1.0 - y=>2.0 + x => 1.0 + y => 2.0 ] p = [ - a => 6.0 - b => 7.0 + a => 1.0 + b => 100.0 +] + +prob = OptimizationProblem(sys, u0, p, grad=true, hess=true) +solve(prob, GradientDescent()) +``` + +## Rosenbrock Function with Constraints +```@example rosenbrock_2d_cstr +using ModelingToolkit, Optimization, OptimizationOptimJL + +@variables begin + x, [bounds = (-2.0, 2.0)] + y, [bounds = (-1.0, 3.0)] +end +@parameters a = 1 b = 100 +loss = (a - x)^2 + b * (y - x^2)^2 +cons = [ + x^2 + y^2 ≲ 1, ] +@named sys = OptimizationSystem(loss, [x, y], [a, b], constraints = cons) +u0 = [ + x => 1.0 + y => 2.0 +] +prob = OptimizationProblem(sys, u0, grad=true, hess=true) +solve(prob, IPNewton()) +``` -prob = OptimizationProblem(sys,u0,p,grad=true,hess=true) -solve(prob,Newton()) +A visualization of the objective function and the inequality constraint is depicted below. +```@eval +using Plots +x = -2:0.01:2 +y = -1:0.01:3 +contour(x, y, (x,y) -> (1 - x)^2 + 100 * (y - x^2)^2, fill=true, color=:viridis, ratio=:equal, xlims=(-2, 2)) +contour!(x, y, (x, y) -> x^2 + y^2, levels=[1], color=:lightblue, line=4) ``` +### Explanation +Equality and inequality constraints can be added to the `OptimizationSystem`. An equality constraint can be specified via and `Equation`, e.g., `x^2 + y^2 ~ 1`. While inequality constraints via an `Inequality`, e.g., `x^2 + y^2 ≲ 1`. The syntax is here `\lesssim` and `\gtrsim`. + +## Nested Systems Needs more text but it's super cool and auto-parallelizes and sparsifies too. Plus you can hierarchically nest systems to have it generate huge optimization problems. diff --git a/src/ModelingToolkit.jl b/src/ModelingToolkit.jl index 9f4baee0bf..80eb652909 100644 --- a/src/ModelingToolkit.jl +++ b/src/ModelingToolkit.jl @@ -67,7 +67,15 @@ import Graphs: SimpleDiGraph, add_edge!, incidence_matrix for fun in [:toexpr] @eval begin function $fun(eq::Equation; kw...) - Expr(:(=), $fun(eq.lhs; kw...), $fun(eq.rhs; kw...)) + Expr(:call, :(==), $fun(eq.lhs; kw...), $fun(eq.rhs; kw...)) + end + + function $fun(ineq::Inequality; kw...) + if ineq.relational_op == Symbolics.leq + Expr(:call, :(<=), $fun(ineq.lhs; kw...), $fun(ineq.rhs; kw...)) + else + Expr(:call, :(>=), $fun(ineq.lhs; kw...), $fun(ineq.rhs; kw...)) + end end $fun(eqs::AbstractArray; kw...) = map(eq -> $fun(eq; kw...), eqs) @@ -86,6 +94,7 @@ abstract type AbstractTimeDependentSystem <: AbstractSystem end abstract type AbstractTimeIndependentSystem <: AbstractSystem end abstract type AbstractODESystem <: AbstractTimeDependentSystem end abstract type AbstractMultivariateSystem <: AbstractSystem end +abstract type AbstractOptimizationSystem <: AbstractTimeIndependentSystem end """ $(TYPEDSIGNATURES) @@ -139,6 +148,7 @@ include("systems/jumps/jumpsystem.jl") include("systems/nonlinear/nonlinearsystem.jl") include("systems/nonlinear/modelingtoolkitize.jl") +include("systems/optimization/constraints_system.jl") include("systems/optimization/optimizationsystem.jl") include("systems/pde/pdesystem.jl") @@ -179,7 +189,7 @@ export OptimizationProblem, OptimizationProblemExpr, constraints export AutoModelingToolkit export SteadyStateProblem, SteadyStateProblemExpr export JumpProblem, DiscreteProblem -export NonlinearSystem, OptimizationSystem +export NonlinearSystem, OptimizationSystem, ConstraintsSystem export alias_elimination, flatten export connect, @connector, Connection, Flow, Stream, instream export isinput, isoutput, getbounds, hasbounds, isdisturbance, istunable, getdist, hasdist, @@ -192,7 +202,7 @@ export Equation, ConstrainedEquation export Term, Sym export SymScope, LocalScope, ParentScope, GlobalScope export independent_variables, independent_variable, states, parameters, equations, controls, - observed, structure, full_equations + observed, structure, full_equations, objective export structural_simplify, expand_connections, linearize, linearization_function export DiscreteSystem, DiscreteProblem diff --git a/src/constants.jl b/src/constants.jl index 49cf716ef2..9c01a04030 100644 --- a/src/constants.jl +++ b/src/constants.jl @@ -32,3 +32,15 @@ macro constants(xs...) xs, toconstant) |> esc end + +""" +Substitute all `@constants` in the given expression +""" +function subs_constants(eqs) + consts = collect_constants(eqs) + if !isempty(consts) + csubs = Dict(c => getdefault(c) for c in consts) + eqs = substitute(eqs, csubs) + end + return eqs +end diff --git a/src/systems/optimization/constraints_system.jl b/src/systems/optimization/constraints_system.jl new file mode 100644 index 0000000000..d8901dd4e5 --- /dev/null +++ b/src/systems/optimization/constraints_system.jl @@ -0,0 +1,222 @@ +""" +$(TYPEDEF) + +A constraint system of equations. + +# Fields +$(FIELDS) + +# Examples + +```julia +@variables x y z +@parameters a b c + +cstr = [0 ~ a*(y-x), + 0 ~ x*(b-z)-y, + 0 ~ x*y - c*z + x^2 + y^2 ≲ 1] +@named ns = ConstraintsSystem(cstr, [x,y,z],[a,b,c]) +``` +""" +struct ConstraintsSystem <: AbstractTimeIndependentSystem + """ + tag: a tag for the system. If two system have the same tag, then they are + structurally identical. + """ + tag::UInt + """Vector of equations defining the system.""" + constraints::Vector{Union{Equation, Inequality}} + """Unknown variables.""" + states::Vector + """Parameters.""" + ps::Vector + """Array variables.""" + var_to_name::Any + """Observed variables.""" + observed::Vector{Equation} + """ + Jacobian matrix. Note: this field will not be defined until + [`calculate_jacobian`](@ref) is called on the system. + """ + jac::RefValue{Any} + """ + Name: the name of the system. These are required to have unique names. + """ + name::Symbol + """ + systems: The internal systems + """ + systems::Vector{ConstraintsSystem} + """ + defaults: The default values to use when initial conditions and/or + parameters are not supplied in `ODEProblem`. + """ + defaults::Dict + """ + type: type of the system + """ + connector_type::Any + """ + metadata: metadata for the system, to be used by downstream packages. + """ + metadata::Any + """ + tearing_state: cache for intermediate tearing state + """ + tearing_state::Any + """ + substitutions: substitutions generated by tearing. + """ + substitutions::Any + + function ConstraintsSystem(tag, constraints, states, ps, var_to_name, observed, jac, + name, + systems, + defaults, connector_type, metadata = nothing, + tearing_state = nothing, substitutions = nothing; + checks::Union{Bool, Int} = true) + if checks == true || (checks & CheckUnits) > 0 + all_dimensionless([states; ps]) || check_units(constraints) + end + new(tag, constraints, states, ps, var_to_name, observed, jac, name, systems, + defaults, + connector_type, metadata, tearing_state, substitutions) + end +end + +equations(sys::ConstraintsSystem) = constraints(sys) # needed for Base.show + +function ConstraintsSystem(constraints, states, ps; + observed = [], + name = nothing, + default_u0 = Dict(), + default_p = Dict(), + defaults = _merge(Dict(default_u0), Dict(default_p)), + systems = ConstraintsSystem[], + connector_type = nothing, + continuous_events = nothing, # this argument is only required for ODESystems, but is added here for the constructor to accept it without error + discrete_events = nothing, # this argument is only required for ODESystems, but is added here for the constructor to accept it without error + checks = true, + metadata = nothing) + continuous_events === nothing || isempty(continuous_events) || + throw(ArgumentError("ConstraintsSystem does not accept `continuous_events`, you provided $continuous_events")) + discrete_events === nothing || isempty(discrete_events) || + throw(ArgumentError("ConstraintsSystem does not accept `discrete_events`, you provided $discrete_events")) + + name === nothing && + throw(ArgumentError("The `name` keyword must be provided. Please consider using the `@named` macro")) + + cstr = value.(Symbolics.canonical_form.(scalarize(constraints))) + states′ = value.(scalarize(states)) + ps′ = value.(scalarize(ps)) + + if !(isempty(default_u0) && isempty(default_p)) + Base.depwarn("`default_u0` and `default_p` are deprecated. Use `defaults` instead.", + :ConstraintsSystem, force = true) + end + sysnames = nameof.(systems) + if length(unique(sysnames)) != length(sysnames) + throw(ArgumentError("System names must be unique.")) + end + + jac = RefValue{Any}(EMPTY_JAC) + defaults = todict(defaults) + defaults = Dict(value(k) => value(v) for (k, v) in pairs(defaults)) + + var_to_name = Dict() + process_variables!(var_to_name, defaults, states′) + process_variables!(var_to_name, defaults, ps′) + isempty(observed) || collect_var_to_name!(var_to_name, (eq.lhs for eq in observed)) + + ConstraintsSystem(Threads.atomic_add!(SYSTEM_COUNT, UInt(1)), + cstr, states, ps, var_to_name, observed, jac, name, systems, + defaults, + connector_type, metadata, checks = checks) +end + +function calculate_jacobian(sys::ConstraintsSystem; sparse = false, simplify = false) + cache = get_jac(sys)[] + if cache isa Tuple && cache[2] == (sparse, simplify) + return cache[1] + end + + lhss = generate_canonical_form_lhss(sys) + vals = [dv for dv in states(sys)] + if sparse + jac = sparsejacobian(lhss, vals, simplify = simplify) + else + jac = jacobian(lhss, vals, simplify = simplify) + end + get_jac(sys)[] = jac, (sparse, simplify) + return jac +end + +function generate_jacobian(sys::ConstraintsSystem, vs = states(sys), ps = parameters(sys); + sparse = false, simplify = false, kwargs...) + jac = calculate_jacobian(sys, sparse = sparse, simplify = simplify) + return build_function(jac, vs, ps; kwargs...) +end + +function calculate_hessian(sys::ConstraintsSystem; sparse = false, simplify = false) + lhss = generate_canonical_form_lhss(sys) + vals = [dv for dv in states(sys)] + if sparse + hess = [sparsehessian(lhs, vals, simplify = simplify) for lhs in lhss] + else + hess = [hessian(lhs, vals, simplify = simplify) for lhs in lhss] + end + return hess +end + +function generate_hessian(sys::ConstraintsSystem, vs = states(sys), ps = parameters(sys); + sparse = false, simplify = false, kwargs...) + hess = calculate_hessian(sys, sparse = sparse, simplify = simplify) + return build_function(hess, vs, ps; kwargs...) +end + +function generate_function(sys::ConstraintsSystem, dvs = states(sys), ps = parameters(sys); + kwargs...) + lhss = generate_canonical_form_lhss(sys) + pre, sol_states = get_substitutions_and_solved_states(sys) + + func = build_function(lhss, value.(dvs), value.(ps); postprocess_fbody = pre, + states = sol_states, kwargs...) + + cstr = constraints(sys) + lcons = fill(-Inf, length(cstr)) + ucons = zeros(length(cstr)) + lcons[findall(Base.Fix2(isa, Equation), cstr)] .= 0.0 + + return func, lcons, ucons +end + +function jacobian_sparsity(sys::ConstraintsSystem) + lhss = generate_canonical_form_lhss(sys) + jacobian_sparsity(lhss, states(sys)) +end + +function hessian_sparsity(sys::ConstraintsSystem) + lhss = generate_canonical_form_lhss(sys) + [hessian_sparsity(eq, states(sys)) for eq in lhss] +end + +""" +Convert the system of equalities and inequalities into a canonical form: +h(x) = 0 +g(x) <= 0 +""" +function generate_canonical_form_lhss(sys) + lhss = subs_constants([Symbolics.canonical_form(eq).lhs for eq in constraints(sys)]) +end + +function get_cmap(sys::ConstraintsSystem) + #Inject substitutions for constants => values + cs = collect_constants([get_constraints(sys); get_observed(sys)]) #ctrls? what else? + if !empty_substitutions(sys) + cs = [cs; collect_constants(get_substitutions(sys).subs)] + end + # Swap constants for their values + cmap = map(x -> x ~ getdefault(x), cs) + return cmap, cs +end diff --git a/src/systems/optimization/optimizationsystem.jl b/src/systems/optimization/optimizationsystem.jl index ea38bd1ee1..1673a9fe7d 100644 --- a/src/systems/optimization/optimizationsystem.jl +++ b/src/systems/optimization/optimizationsystem.jl @@ -12,11 +12,12 @@ $(FIELDS) @variables x y z @parameters a b c -op = a*(y-x) + x*(b-z)-y + x*y - c*z -@named os = OptimizationSystem(op, [x,y,z], [a,b,c]) +obj = a * (y - x) + x * (b - z) - y + x * y - c * z +cons = [x^2 + y^2 ≲ 1] +@named os = OptimizationSystem(obj, [x, y, z], [a, b, c]; constraints = cons) ``` """ -struct OptimizationSystem <: AbstractTimeIndependentSystem +struct OptimizationSystem <: AbstractOptimizationSystem """ tag: a tag for the system. If two system have the same tag, then they are structurally identical. @@ -30,9 +31,10 @@ struct OptimizationSystem <: AbstractTimeIndependentSystem ps::Vector """Array variables.""" var_to_name::Any + """Observed variables.""" observed::Vector{Equation} """List of constraint equations of the system.""" - constraints::Vector # {Union{Equation,Inequality}} + constraints::Vector{Union{Equation, Inequality}} """The unique name of the system.""" name::Symbol """The internal systems.""" @@ -64,6 +66,8 @@ struct OptimizationSystem <: AbstractTimeIndependentSystem end end +equations(sys::AbstractOptimizationSystem) = objective(sys) # needed for Base.show + function OptimizationSystem(op, states, ps; observed = [], constraints = [], @@ -80,6 +84,7 @@ function OptimizationSystem(op, states, ps; constraints = value.(scalarize(constraints)) states′ = value.(scalarize(states)) ps′ = value.(scalarize(ps)) + op′ = value(scalarize(op)) if !(isempty(default_u0) && isempty(default_p)) Base.depwarn("`default_u0` and `default_p` are deprecated. Use `defaults` instead.", @@ -98,14 +103,14 @@ function OptimizationSystem(op, states, ps; isempty(observed) || collect_var_to_name!(var_to_name, (eq.lhs for eq in observed)) OptimizationSystem(Threads.atomic_add!(SYSTEM_COUNT, UInt(1)), - value(op), states′, ps′, var_to_name, + op′, states′, ps′, var_to_name, observed, constraints, name, systems, defaults, metadata; checks = checks) end function calculate_gradient(sys::OptimizationSystem) - expand_derivatives.(gradient(equations(sys), states(sys))) + expand_derivatives.(gradient(objective(sys), states(sys))) end function generate_gradient(sys::OptimizationSystem, vs = states(sys), ps = parameters(sys); @@ -117,13 +122,13 @@ function generate_gradient(sys::OptimizationSystem, vs = states(sys), ps = param end function calculate_hessian(sys::OptimizationSystem) - expand_derivatives.(hessian(equations(sys), states(sys))) + expand_derivatives.(hessian(objective(sys), states(sys))) end function generate_hessian(sys::OptimizationSystem, vs = states(sys), ps = parameters(sys); sparse = false, kwargs...) if sparse - hess = sparsehessian(equations(sys), states(sys)) + hess = sparsehessian(objective(sys), states(sys)) else hess = calculate_hessian(sys) end @@ -134,17 +139,12 @@ end function generate_function(sys::OptimizationSystem, vs = states(sys), ps = parameters(sys); kwargs...) - eqs = equations(sys) - consts = collect_constants(eqs) - if !isempty(consts) # The SymbolicUtils._build_function method of this case doesn't support postprocess_fbody - csubs = Dict(c => getdefault(c) for c in consts) - eqs = substitute(eqs, csubs) - end + eqs = subs_constants(objective(sys)) return build_function(eqs, vs, ps; conv = AbstractSysToExpr(sys), kwargs...) end -function equations(sys::OptimizationSystem) +function objective(sys) op = get_op(sys) systems = get_systems(sys) if isempty(systems) @@ -156,23 +156,21 @@ end namespace_constraint(eq::Equation, sys) = namespace_equation(eq, sys) -# namespace_constraint(ineq::Inequality, sys) = namespace_inequality(ineq, sys) +namespace_constraint(ineq::Inequality, sys) = namespace_inequality(ineq, sys) -# function namespace_inequality(ineq::Inequality, sys, n = nameof(sys)) -# _lhs = namespace_expr(ineq.lhs, sys, n) -# _rhs = namespace_expr(ineq.rhs, sys, n) -# Inequality( -# namespace_expr(_lhs, sys, n), -# namespace_expr(_rhs, sys, n), -# ineq.relational_op, -# ) -# end +function namespace_inequality(ineq::Inequality, sys, n = nameof(sys)) + _lhs = namespace_expr(ineq.lhs, sys, n) + _rhs = namespace_expr(ineq.rhs, sys, n) + Inequality(_lhs, + _rhs, + ineq.relational_op) +end -function namespace_constraints(sys::OptimizationSystem) +function namespace_constraints(sys) namespace_constraint.(get_constraints(sys), Ref(sys)) end -function constraints(sys::OptimizationSystem) +function constraints(sys) cs = get_constraints(sys) systems = get_systems(sys) isempty(systems) ? cs : [cs; reduce(vcat, namespace_constraints.(systems))] @@ -191,7 +189,6 @@ function rep_pars_vals!(e, p) end ```julia function DiffEqBase.OptimizationProblem{iip}(sys::OptimizationSystem,u0map, parammap=DiffEqBase.NullParameters(); - lb=nothing, ub=nothing, grad = false, hess = false, sparse = false, checkbounds = false, @@ -214,34 +211,55 @@ function DiffEqBase.OptimizationProblem{iip}(sys::OptimizationSystem, u0map, linenumbers = true, parallel = SerialForm(), use_union = false, kwargs...) where {iip} + if haskey(kwargs, :lcons) || haskey(kwargs, :ucons) + Base.depwarn("`lcons` and `ucons` are deprecated. Specify constraints directly instead.", + :OptimizationProblem, force = true) + end + dvs = states(sys) ps = parameters(sys) cstr = constraints(sys) + if isnothing(lb) && isnothing(ub) # use the symbolically specified bounds + lb = first.(getbounds.(dvs)) + ub = last.(getbounds.(dvs)) + lb[isbinaryvar.(dvs)] .= 0 + ub[isbinaryvar.(dvs)] .= 1 + else # use the user supplied variable bounds + xor(isnothing(lb), isnothing(ub)) && + throw(ArgumentError("Expected both `lb` and `ub` to be supplied")) + !isnothing(lb) && length(lb) != length(dvs) && + throw(ArgumentError("Expected both `lb` to be of the same length as the vector of optimization variables")) + !isnothing(ub) && length(ub) != length(dvs) && + throw(ArgumentError("Expected both `ub` to be of the same length as the vector of optimization variables")) + end + + int = isintegervar.(dvs) .| isbinaryvar.(dvs) + defs = defaults(sys) defs = mergedefaults(defs, parammap, ps) defs = mergedefaults(defs, u0map, dvs) u0 = varmap_to_vars(u0map, dvs; defaults = defs, tofloat = false) p = varmap_to_vars(parammap, ps; defaults = defs, tofloat = false, use_union) - lb = varmap_to_vars(lb, dvs; check = false, tofloat = false, use_union) - ub = varmap_to_vars(ub, dvs; check = false, tofloat = false, use_union) + lb = varmap_to_vars(dvs .=> lb, dvs; defaults = defs, tofloat = false, use_union) + ub = varmap_to_vars(dvs .=> ub, dvs; defaults = defs, tofloat = false, use_union) + + if !isnothing(lb) && all(lb .== -Inf) && !isnothing(ub) && all(ub .== Inf) + lb = nothing + ub = nothing + end f = generate_function(sys, checkbounds = checkbounds, linenumbers = linenumbers, expression = Val{false}) - eqs = equations(sys) - cs = collect_constants(eqs) - if !isempty(cs) - cmap = map(x -> x => getdefault(x), cs) - eqs = substitute(eqs, cmap) + + obj_expr = toexpr(subs_constants(objective(sys))) + pairs_arr = if p isa SciMLBase.NullParameters + [Symbol(_s) => Expr(:ref, :x, i) for (i, _s) in enumerate(dvs)] + else + vcat([Symbol(_s) => Expr(:ref, :x, i) for (i, _s) in enumerate(dvs)], + [Symbol(_p) => p[i] for (i, _p) in enumerate(ps)]) end - obj_expr = toexpr(eqs) - pairs_arr = p isa SciMLBase.NullParameters ? - [Symbol(_s) => Expr(:ref, :x, i) for (i, _s) in enumerate(dvs)] : - [ - [Symbol(_s) => Expr(:ref, :x, i) for (i, _s) in enumerate(dvs)]..., - [Symbol(_p) => p[i] for (i, _p) in enumerate(ps)]..., - ] rep_pars_vals!(obj_expr, pairs_arr) if grad grad_oop, grad_iip = generate_gradient(sys, checkbounds = checkbounds, @@ -271,21 +289,30 @@ function DiffEqBase.OptimizationProblem{iip}(sys::OptimizationSystem, u0map, end if length(cstr) > 0 - @named cons_sys = NonlinearSystem(cstr, dvs, ps) - cons = generate_function(cons_sys, checkbounds = checkbounds, - linenumbers = linenumbers, - expression = Val{false})[2] + @named cons_sys = ConstraintsSystem(cstr, dvs, ps) + cons, lcons_, ucons_ = generate_function(cons_sys, checkbounds = checkbounds, + linenumbers = linenumbers, + expression = Val{false}) cons_j = generate_jacobian(cons_sys; expression = Val{false}, sparse = sparse)[2] cons_h = generate_hessian(cons_sys; expression = Val{false}, sparse = sparse)[2] - eqs = equations(cons_sys) - cs = collect_constants(eqs) - if !isempty(cs) - cmap = map(x -> x => getdefault(x), cs) - eqs = map(x -> x.lhs ~ substitute(x.rhs, cmap), eqs) - end - cons_expr = toexpr(eqs) + + cons_expr = toexpr.(subs_constants(constraints(cons_sys))) rep_pars_vals!.(cons_expr, Ref(pairs_arr)) + if !haskey(kwargs, :lcons) && !haskey(kwargs, :ucons) # use the symbolically specified bounds + lcons = lcons_ + ucons = ucons_ + else # use the user supplied constraints bounds + haskey(kwargs, :lcons) && haskey(kwargs, :ucons) && + throw(ArgumentError("Expected both `ucons` and `lcons` to be supplied")) + haskey(kwargs, :lcons) && length(kwargs[:lcons]) != length(cstr) && + throw(ArgumentError("Expected `lcons` to be of the same length as the vector of constraints")) + haskey(kwargs, :ucons) && length(kwargs[:ucons]) != length(cstr) && + throw(ArgumentError("Expected `ucons` to be of the same length as the vector of constraints")) + lcons = haskey(kwargs, :lcons) + ucons = haskey(kwargs, :ucons) + end + if sparse cons_jac_prototype = jacobian_sparsity(cons_sys) cons_hess_prototype = hessian_sparsity(cons_sys) @@ -301,13 +328,15 @@ function DiffEqBase.OptimizationProblem{iip}(sys::OptimizationSystem, u0map, hess_prototype = hess_prototype, syms = Symbol.(states(sys)), paramsyms = Symbol.(parameters(sys)), - cons = cons, + cons = cons[2], cons_j = cons_j, cons_h = cons_h, cons_jac_prototype = cons_jac_prototype, cons_hess_prototype = cons_hess_prototype, expr = obj_expr, cons_expr = cons_expr) + OptimizationProblem{iip}(_f, u0, p; lb = lb, ub = ub, int = int, + lcons = lcons, ucons = ucons, kwargs...) else _f = DiffEqBase.OptimizationFunction{iip}(f, sys = sys, @@ -318,16 +347,16 @@ function DiffEqBase.OptimizationProblem{iip}(sys::OptimizationSystem, u0map, paramsyms = Symbol.(parameters(sys)), hess_prototype = hess_prototype, expr = obj_expr) + OptimizationProblem{iip}(_f, u0, p; lb = lb, ub = ub, int = int, + kwargs...) end - - OptimizationProblem{iip}(_f, u0, p; lb = lb, ub = ub, kwargs...) end """ ```julia function DiffEqBase.OptimizationProblemExpr{iip}(sys::OptimizationSystem, parammap=DiffEqBase.NullParameters(); - u0=nothing, lb=nothing, ub=nothing, + u0=nothing, grad = false, hes = false, sparse = false, checkbounds = false, @@ -345,7 +374,7 @@ function OptimizationProblemExpr(sys::OptimizationSystem, args...; kwargs...) OptimizationProblemExpr{true}(sys::OptimizationSystem, args...; kwargs...) end -function OptimizationProblemExpr{iip}(sys::OptimizationSystem, u0map, +function OptimizationProblemExpr{iip}(sys::OptimizationSystem, u0, parammap = DiffEqBase.NullParameters(); lb = nothing, ub = nothing, grad = false, @@ -354,8 +383,45 @@ function OptimizationProblemExpr{iip}(sys::OptimizationSystem, u0map, linenumbers = false, parallel = SerialForm(), use_union = false, kwargs...) where {iip} + if haskey(kwargs, :lcons) || haskey(kwargs, :ucons) + Base.depwarn("`lcons` and `ucons` are deprecated. Specify constraints directly instead.", + :OptimizationProblem, force = true) + end + dvs = states(sys) ps = parameters(sys) + cstr = constraints(sys) + + if isnothing(lb) && isnothing(ub) # use the symbolically specified bounds + lb = first.(getbounds.(dvs)) + ub = last.(getbounds.(dvs)) + lb[isbinaryvar.(dvs)] .= 0 + ub[isbinaryvar.(dvs)] .= 1 + else # use the user supplied variable bounds + xor(isnothing(lb), isnothing(ub)) && + throw(ArgumentError("Expected both `lb` and `ub` to be supplied")) + !isnothing(lb) && length(lb) != length(dvs) && + throw(ArgumentError("Expected `lb` to be of the same length as the vector of optimization variables")) + !isnothing(ub) && length(ub) != length(dvs) && + throw(ArgumentError("Expected `ub` to be of the same length as the vector of optimization variables")) + end + + int = isintegervar.(dvs) .| isbinaryvar.(dvs) + + defs = defaults(sys) + defs = mergedefaults(defs, parammap, ps) + defs = mergedefaults(defs, u0map, dvs) + + u0 = varmap_to_vars(u0map, dvs; defaults = defs, tofloat = false) + p = varmap_to_vars(parammap, ps; defaults = defs, tofloat = false, use_union) + lb = varmap_to_vars(dvs .=> lb, dvs; defaults = defs, tofloat = false, use_union) + ub = varmap_to_vars(dvs .=> ub, dvs; defaults = defs, tofloat = false, use_union) + + if !isnothing(lb) && all(lb .== -Inf) && !isnothing(ub) && all(ub .== Inf) + lb = nothing + ub = nothing + end + idx = iip ? 2 : 1 f = generate_function(sys, checkbounds = checkbounds, linenumbers = linenumbers, expression = Val{true}) @@ -380,47 +446,40 @@ function OptimizationProblemExpr{iip}(sys::OptimizationSystem, u0map, hess_prototype = nothing end - defs = defaults(sys) - defs = mergedefaults(defs, parammap, ps) - defs = mergedefaults(defs, u0map, dvs) - - u0 = varmap_to_vars(u0map, dvs; defaults = defs, tofloat = false) - p = varmap_to_vars(parammap, ps; defaults = defs, tofloat = false, use_union) - lb = varmap_to_vars(lb, dvs; check = false, tofloat = false, use_union) - ub = varmap_to_vars(ub, dvs; check = false, tofloat = false, use_union) - - eqs = equations(sys) - cs = collect_constants(eqs) - if !isempty(cs) - cmap = map(x -> x => getdefault(x), cs) - eqs = substitute(eqs, cmap) + obj_expr = toexpr(subs_constants(objective(sys))) + pairs_arr = if p isa SciMLBase.NullParameters + [Symbol(_s) => Expr(:ref, :x, i) for (i, _s) in enumerate(dvs)] + else + vcat([Symbol(_s) => Expr(:ref, :x, i) for (i, _s) in enumerate(dvs)], + [Symbol(_p) => p[i] for (i, _p) in enumerate(ps)]) end - obj_expr = toexpr(eqs) - pairs_arr = p isa SciMLBase.NullParameters ? - [Symbol(_s) => Expr(:ref, :x, i) for (i, _s) in enumerate(dvs)] : - [ - [Symbol(_s) => Expr(:ref, :x, i) for (i, _s) in enumerate(dvs)]..., - [Symbol(_p) => p[i] for (i, _p) in enumerate(ps)]..., - ] rep_pars_vals!(obj_expr, pairs_arr) - if length(sys.constraints) > 0 - @named cons_sys = NonlinearSystem(sys.constraints, dvs, ps) - cons = generate_function(cons_sys, checkbounds = checkbounds, - linenumbers = linenumbers, - expression = Val{false})[1] - cons_j = generate_jacobian(cons_sys; expression = Val{false}, sparse = sparse)[2] + if length(cstr) > 0 + @named cons_sys = ConstraintsSystem(cstr, dvs, ps) + cons, lcons_, ucons_ = generate_function(cons_sys, checkbounds = checkbounds, + linenumbers = linenumbers, + expression = Val{false}) + cons_j = generate_jacobian(cons_sys; expression = Val{false}, sparse = sparse)[2] cons_h = generate_hessian(cons_sys; expression = Val{false}, sparse = sparse)[2] - eqs = equations(cons_sys) - cs = collect_constants(eqs) - if !isempty(cs) - cmap = map(x -> x => getdefault(x), cs) - eqs = map(x -> x.lhs ~ substitute(x.rhs, cmap), eqs) - end - cons_expr = toexpr(eqs) + cons_expr = toexpr.(subs_constants(constraints(cons_sys))) rep_pars_vals!.(cons_expr, Ref(pairs_arr)) + if !haskey(kwargs, :lcons) && !haskey(kwargs, :ucons) # use the symbolically specified bounds + lcons = lcons_ + ucons = ucons_ + else # use the user supplied constraints bounds + haskey(kwargs, :lcons) && haskey(kwargs, :ucons) && + throw(ArgumentError("Expected both `ucons` and `lcons` to be supplied")) + haskey(kwargs, :lcons) && length(kwargs[:lcons]) != length(cstr) && + throw(ArgumentError("Expected `lcons` to be of the same length as the vector of constraints")) + haskey(kwargs, :ucons) && length(kwargs[:ucons]) != length(cstr) && + throw(ArgumentError("Expected `ucons` to be of the same length as the vector of constraints")) + lcons = haskey(kwargs, :lcons) + ucons = haskey(kwargs, :ucons) + end + if sparse cons_jac_prototype = jacobian_sparsity(cons_sys) cons_hess_prototype = hessian_sparsity(cons_sys) @@ -428,6 +487,7 @@ function OptimizationProblemExpr{iip}(sys::OptimizationSystem, u0map, cons_jac_prototype = nothing cons_hess_prototype = nothing end + quote f = $f p = $p @@ -436,7 +496,10 @@ function OptimizationProblemExpr{iip}(sys::OptimizationSystem, u0map, hess = $_hess lb = $lb ub = $ub - cons = $cons + int = $int + cons = $cons[1] + lcons = $lcons + ucons = $ucons cons_j = $cons_j cons_h = $cons_h syms = $(Symbol.(states(sys))) @@ -454,7 +517,8 @@ function OptimizationProblemExpr{iip}(sys::OptimizationSystem, u0map, cons_hess_prototype = cons_hess_prototype, expr = obj_expr, cons_expr = cons_expr) - OptimizationProblem{$iip}(_f, u0, p; lb = lb, ub = ub, kwargs...) + OptimizationProblem{$iip}(_f, u0, p; lb = lb, ub = ub, int = int, lcons = lcons, + ucons = ucons, kwargs...) end else quote @@ -465,6 +529,7 @@ function OptimizationProblemExpr{iip}(sys::OptimizationSystem, u0map, hess = $_hess lb = $lb ub = $ub + int = $int syms = $(Symbol.(states(sys))) paramsyms = $(Symbol.(parameters(sys))) _f = OptimizationFunction{iip}(f, SciMLBase.NoAD(); @@ -474,7 +539,7 @@ function OptimizationProblemExpr{iip}(sys::OptimizationSystem, u0map, paramsyms = paramsyms, hess_prototype = hess_prototype, expr = obj_expr) - OptimizationProblem{$iip}(_f, u0, p; lb = lb, ub = ub, kwargs...) + OptimizationProblem{$iip}(_f, u0, p; lb = lb, ub = ub, int = int, kwargs...) end end end diff --git a/src/utils.jl b/src/utils.jl index 4ffd782249..25f8461f3d 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -522,6 +522,11 @@ function collect_constants!(constants, eq::Equation) collect_constants!(constants, eq.rhs) end +function collect_constants!(constants, eq::Inequality) + collect_constants!(constants, eq.lhs) + collect_constants!(constants, eq.rhs) +end + collect_constants!(constants, x::Num) = collect_constants!(constants, unwrap(x)) collect_constants!(constants, x::Real) = nothing collect_constants(n::Nothing) = Symbolics.Sym[] @@ -592,7 +597,7 @@ function empty_substitutions(sys) isnothing(subs) || isempty(subs.deps) end -function get_substitutions_and_solved_states(sys; no_postprocess = false) +function get_cmap(sys) #Inject substitutions for constants => values cs = collect_constants([get_eqs(sys); get_observed(sys)]) #ctrls? what else? if !empty_substitutions(sys) @@ -600,7 +605,11 @@ function get_substitutions_and_solved_states(sys; no_postprocess = false) end # Swap constants for their values cmap = map(x -> x ~ getdefault(x), cs) + return cmap, cs +end +function get_substitutions_and_solved_states(sys; no_postprocess = false) + cmap, cs = get_cmap(sys) if empty_substitutions(sys) && isempty(cs) sol_states = Code.LazyState() pre = no_postprocess ? (ex -> ex) : get_postprocess_fbody(sys) diff --git a/test/optimizationsystem.jl b/test/optimizationsystem.jl index 066eb34ad8..41bbac1f92 100644 --- a/test/optimizationsystem.jl +++ b/test/optimizationsystem.jl @@ -2,104 +2,128 @@ using ModelingToolkit, SparseArrays, Test, Optimization, OptimizationOptimJL, OptimizationMOI, Ipopt, AmplNLWriter, Ipopt_jll using ModelingToolkit: get_metadata -@variables x y -@constants h = 1 -@parameters a b -loss = (a - x)^2 + b * (y * h - x^2)^2 -sys1 = OptimizationSystem(loss, [x, y], [a, b], name = :sys1) - -cons2 = [x^2 + y^2 ~ 0, y * sin(x) - x * h ~ 0] -sys2 = OptimizationSystem(loss, [x, y], [a, b], name = :sys2, constraints = cons2) - -@variables z -@parameters β -loss2 = sys1.x - sys2.y + z * β * h -combinedsys = OptimizationSystem(loss2, [z], [β], systems = [sys1, sys2], - name = :combinedsys) - -equations(combinedsys) -states(combinedsys) -parameters(combinedsys) - -calculate_gradient(combinedsys) -calculate_hessian(combinedsys) -generate_function(combinedsys) -generate_gradient(combinedsys) -generate_hessian(combinedsys) -hess_sparsity = ModelingToolkit.hessian_sparsity(sys1) -sparse_prob = OptimizationProblem(sys1, [x, y], [a, b], grad = true, sparse = true) -OptimizationProblemExpr{true}(sys1, [x => 0, y => 0], [a => 0, b => 0];); -@test sparse_prob.f.hess_prototype.rowval == hess_sparsity.rowval -@test sparse_prob.f.hess_prototype.colptr == hess_sparsity.colptr - -u0 = [sys1.x => 1.0 - sys1.y => 2.0 - sys2.x => 3.0 - sys2.y => 4.0 - z => 5.0] -p = [sys1.a => 6.0 - sys1.b => 7.0 - sys2.a => 8.0 - sys2.b => 9.0 - β => 10.0] - -prob = OptimizationProblem(combinedsys, u0, p, grad = true, hess = true) -@test prob.f.sys === combinedsys -sol = solve(prob, Ipopt.Optimizer()) -@test sol.minimum < -1e5 - -#inequality constraint, the bounds for constraints lcons !== ucons -prob = OptimizationProblem(sys2, [x => 0.0, y => 0.0], [a => 1.0, b => 100.0], - lcons = [-1.0, -1.0], ucons = [500.0, 500.0], grad = true, - hess = true) -@test prob.f.sys === sys2 -sol = solve(prob, IPNewton()) -@test sol.minimum < 1.0 -sol = solve(prob, Ipopt.Optimizer()) -@test sol.minimum < 1.0 -sol = solve(prob, AmplNLWriter.Optimizer(Ipopt_jll.amplexe)) -@test sol.minimum < 1.0 - -#equality constraint, lcons == ucons -cons2 = [0.0 ~ h * x^2 + y^2] -out = zeros(1) -sys2 = OptimizationSystem(loss, [x, y], [a, b], name = :sys2, constraints = cons2) -prob = OptimizationProblem(sys2, [x => 0.0, y => 0.0], [a => 1.0, b => 1.0], lcons = [1.0], - ucons = [1.0], grad = true, hess = true) -sol = solve(prob, IPNewton()) -@test sol.minimum < 1.0 -prob.f.cons(out, sol.minimizer, [1.0, 1.0]) -@test out ≈ [1.0] -sol = solve(prob, Ipopt.Optimizer()) -@test sol.minimum < 1.0 -prob.f.cons(out, sol.minimizer, [1.0, 1.0]) -@test out ≈ [1.0] -sol = solve(prob, AmplNLWriter.Optimizer(Ipopt_jll.amplexe)) -@test sol.minimum < 1.0 -prob.f.cons(out, sol.minimizer, [1.0, 1.0]) -@test out ≈ [1.0] - -rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 -x0 = zeros(2) -_p = [1.0, 100.0] - -f = OptimizationFunction(rosenbrock, Optimization.AutoModelingToolkit()) -prob = OptimizationProblem(f, x0, _p) -sol = solve(prob, Newton()) +@testset "basic" begin + @variables x y + @parameters a b + loss = (a - x)^2 + b * (y - x^2)^2 + sys1 = OptimizationSystem(loss, [x, y], [a, b], name = :sys1) + + cons2 = [x^2 + y^2 ~ 0, y * sin(x) - x ~ 0] + sys2 = OptimizationSystem(loss, [x, y], [a, b], name = :sys2, constraints = cons2) + + @variables z + @parameters β + loss2 = sys1.x - sys2.y + z * β + combinedsys = OptimizationSystem(loss2, [z], [β], systems = [sys1, sys2], + name = :combinedsys) + + equations(combinedsys) + states(combinedsys) + parameters(combinedsys) + + calculate_gradient(combinedsys) + calculate_hessian(combinedsys) + generate_function(combinedsys) + generate_gradient(combinedsys) + generate_hessian(combinedsys) + hess_sparsity = ModelingToolkit.hessian_sparsity(sys1) + sparse_prob = OptimizationProblem(sys1, [x, y], [a, b], grad = true, sparse = true) + @test sparse_prob.f.hess_prototype.rowval == hess_sparsity.rowval + @test sparse_prob.f.hess_prototype.colptr == hess_sparsity.colptr + + u0 = [sys1.x => 1.0 + sys1.y => 2.0 + sys2.x => 3.0 + sys2.y => 4.0 + z => 5.0] + p = [sys1.a => 6.0 + sys1.b => 7.0 + sys2.a => 8.0 + sys2.b => 9.0 + β => 10.0] + + prob = OptimizationProblem(combinedsys, u0, p, grad = true, hess = true) + @test prob.f.sys === combinedsys + sol = solve(prob, Ipopt.Optimizer(); print_level = 0) + @test sol.minimum < -1e5 +end + +@testset "inequality constraint" begin + @variables x y + @parameters a b + loss = (a - x)^2 + b * (y - x^2)^2 + cons = [ + x^2 + y^2 ≲ 1.0, + ] + @named sys = OptimizationSystem(loss, [x, y], [a, b], constraints = cons) + + prob = OptimizationProblem(sys, [x => 0.0, y => 0.0], [a => 1.0, b => 1.0], + grad = true, hess = true) + @test prob.f.sys === sys + sol = solve(prob, IPNewton()) + @test sol.minimum < 1.0 + sol = solve(prob, Ipopt.Optimizer(); print_level = 0) + @test sol.minimum < 1.0 + sol = solve(prob, AmplNLWriter.Optimizer(Ipopt_jll.amplexe)) + @test sol.minimum < 1.0 +end + +@testset "equality constraint" begin + @variables x y + @parameters a b + loss = (a - x)^2 + b * (y - x^2)^2 + cons = [1.0 ~ x^2 + y^2] + @named sys = OptimizationSystem(loss, [x, y], [a, b], constraints = cons) + prob = OptimizationProblem(sys, [x => 0.0, y => 0.0], [a => 1.0, b => 1.0], + grad = true, hess = true) + sol = solve(prob, IPNewton()) + @test sol.minimum < 1.0 + @test sol.u≈[0.808, 0.589] atol=1e-3 + @test sol[x]^2 + sol[y]^2 ≈ 1.0 + sol = solve(prob, Ipopt.Optimizer(); print_level = 0) + @test sol.minimum < 1.0 + @test sol.u≈[0.808, 0.589] atol=1e-3 + @test sol[x]^2 + sol[y]^2 ≈ 1.0 + sol = solve(prob, AmplNLWriter.Optimizer(Ipopt_jll.amplexe)) + @test sol.minimum < 1.0 + @test sol.u≈[0.808, 0.589] atol=1e-3 + @test sol[x]^2 + sol[y]^2 ≈ 1.0 +end + +@testset "rosenbrock" begin + rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2 + x0 = zeros(2) + p = [1.0, 100.0] + f = OptimizationFunction(rosenbrock, Optimization.AutoModelingToolkit()) + prob = OptimizationProblem(f, x0, p) + sol = solve(prob, Newton()) + @test sol.u ≈ [1.0, 1.0] +end # issue #819 @testset "Combined system name collisions" begin + @variables x y + @parameters a b + loss = (a - x)^2 + b * (y - x^2)^2 + sys1 = OptimizationSystem(loss, [x, y], [a, b], name = :sys1) sys2 = OptimizationSystem(loss, [x, y], [a, b], name = :sys1) + @variables z + @parameters β + loss2 = sys1.x - sys2.y + z * β @test_throws ArgumentError OptimizationSystem(loss2, [z], [β], systems = [sys1, sys2]) end -# observed variable handling -@variables OBS -@named sys2 = OptimizationSystem(loss, [x, y], [a, b]; observed = [OBS ~ x * h + y]) -OBS2 = OBS -@test isequal(OBS2, @nonamespace sys2.OBS) -@unpack OBS = sys2 -@test isequal(OBS2, OBS) +@testset "observed variable handling" begin + @variables x y + @parameters a b + loss = (a - x)^2 + b * (y - x^2)^2 + @variables OBS + @named sys2 = OptimizationSystem(loss, [x, y], [a, b]; observed = [OBS ~ x + y]) + OBS2 = OBS + @test isequal(OBS2, @nonamespace sys2.OBS) + @unpack OBS = sys2 + @test isequal(OBS2, OBS) +end # nested constraints @testset "nested systems" begin @@ -130,8 +154,10 @@ end loss = (a - x)^2 + b * (y - x^2)^2 sys1 = OptimizationSystem(loss, [x, y], [a, b], name = :sys1) - cons2 = [x^2 + y^2 ~ 0, y * sin(x) - x ~ 0] - sys2 = OptimizationSystem(loss, [x, y], [a, b], name = :sys2, constraints = cons2) + cons = [ + x^2 + y^2 ≲ 1.0, + ] + sys2 = OptimizationSystem(loss, [x, y], [a, b], name = :sys2, constraints = cons) @variables z @parameters β @@ -152,26 +178,54 @@ end prob = OptimizationProblem(combinedsys, u0, p, grad = true, hess = true) @test prob.f.sys === combinedsys - sol = solve(prob, Ipopt.Optimizer()) + sol = solve(prob, Ipopt.Optimizer(); print_level = 0) @test sol.minimum < -1e5 - #inequality constraint, the bounds for constraints lcons !== ucons prob = OptimizationProblem(sys2, [x => 0.0, y => 0.0], [a => 1.0, b => 100.0], - lcons = [-1.0, -1.0], ucons = [500.0, 500.0], grad = true, - hess = true) + grad = true, hess = true) @test prob.f.sys === sys2 sol = solve(prob, IPNewton()) @test sol.minimum < 1.0 - sol = solve(prob, Ipopt.Optimizer()) + sol = solve(prob, Ipopt.Optimizer(); print_level = 0) @test sol.minimum < 1.0 end -@variables x -o1 = (x - 1)^2 -c1 = [ - x ~ 1, -] -testdict = Dict(["test" => 1]) -sys1 = OptimizationSystem(o1, [x], [], name = :sys1, constraints = c1, - metadata = testdict) -@test get_metadata(sys1) == testdict +@testset "metadata" begin + @variables x + o1 = (x - 1)^2 + c1 = [ + x ~ 1, + ] + testdict = Dict(["test" => 1]) + sys1 = OptimizationSystem(o1, [x], [], name = :sys1, constraints = c1, + metadata = testdict) + @test get_metadata(sys1) == testdict +end + +@testset "non-convex problem with inequalities" begin + @variables x[1:2] [bounds = (0.0, Inf)] + @named sys = OptimizationSystem(x[1] + x[2], [x...], []; + constraints = [ + 1.0 ≲ x[1]^2 + x[2]^2, + x[1]^2 + x[2]^2 ≲ 2.0, + ]) + + prob = OptimizationProblem(sys, [x[1] => 2.0, x[2] => 0.0], [], grad = true, + hess = true) + sol = Optimization.solve(prob, Ipopt.Optimizer(); print_level = 0) + @test sol.u ≈ [1, 0] + @test prob.lb == [0.0, 0.0] + @test prob.ub == [Inf, Inf] +end + +@testset "parameter bounds" begin + @parameters c = 0.0 + @variables x y [bounds = (c, Inf)] + @parameters a b + loss = (a - x)^2 + b * (y - x^2)^2 + @named sys = OptimizationSystem(loss, [x, y], [a, b, c]) + prob = OptimizationProblem(sys, [x => 0.0, y => 0.0], [a => 1.0, b => 100.0], + grad = true, hess = true) + @test prob.lb == [-Inf, 0.0] + @test prob.ub == [Inf, Inf] +end