diff --git a/REQUIRE b/REQUIRE index 5327a49c352..17393aef058 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,5 +1,5 @@ julia 0.6 -MathOptInterface 0.6 0.7 +MathOptInterface 0.6.3 0.7 ForwardDiff 0.5 0.11 Calculus DataStructures diff --git a/docs/src/quickstart.md b/docs/src/quickstart.md index 1145602b9df..bf898f71f50 100644 --- a/docs/src/quickstart.md +++ b/docs/src/quickstart.md @@ -93,7 +93,7 @@ julia> JuMP.optimize!(model) DocTestSetup = quote # Now we load in the solution. Using a caching optimizer removes the need to # load a solver such as GLPK for building the documentation. - mock = JuMP.caching_optimizer(model).optimizer + mock = JuMP.caching_optimizer(model).optimizer.model MOI.set(mock, MOI.TerminationStatus(), MOI.Success) MOI.set(mock, MOI.PrimalStatus(), MOI.FeasiblePoint) MOI.set(mock, MOI.DualStatus(), MOI.FeasiblePoint) diff --git a/src/JuMP.jl b/src/JuMP.jl index 4d539700fcf..cab10ba5fe8 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -134,8 +134,8 @@ end # Model # Model has three modes: -# 1) Automatic: moi_backend field holds a LazyBridgeOptimizer{CachingOptimizer} in Automatic mode. -# 2) Manual: moi_backend field holds a LazyBridgeOptimizer{CachingOptimizer} in Manual mode. +# 1) Automatic: moi_backend field holds a CachingOptimizer in Automatic mode. +# 2) Manual: moi_backend field holds a CachingOptimizer in Manual mode. # 3) Direct: moi_backend field holds an AbstractOptimizer. No extra copy of the model is stored. The moi_backend must support add_constraint etc. # Methods to interact with the CachingOptimizer are defined in solverinterface.jl. @enum ModelMode Automatic Manual Direct @@ -157,7 +157,7 @@ mutable struct Model <: AbstractModel variable_to_fix::Dict{MOIVAR, MOIFIX} variable_to_integrality::Dict{MOIVAR, MOIINT} variable_to_zero_one::Dict{MOIVAR, MOIBIN} - # In Manual and Automatic modes, LazyBridgeOptimizer{CachingOptimizer}. + # In Manual and Automatic modes, CachingOptimizer. # In Direct mode, will hold an AbstractOptimizer. moi_backend::MOI.AbstractOptimizer # Hook into a solve call...function of the form f(m::Model; kwargs...), @@ -184,11 +184,10 @@ a cache. The mode of the `CachingOptimizer` storing this cache is `caching_mode`. The optimizer can be set later in the [`JuMP.optimize!`](@ref) call. If `bridge_constraints` is true, constraints that are not supported by the optimizer are automatically bridged to equivalent supported constraints when -an appropriate is defined in the `MathOptInterface.Bridges` module or is -defined in another module and is explicitely added. +an appropriate transformation is defined in the `MathOptInterface.Bridges` +module or is defined in another module and is explicitely added. """ function Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, - bridge_constraints::Bool=true, solver=nothing) if solver !== nothing error("The solver= keyword is no longer available in JuMP 0.19 and " * @@ -198,13 +197,7 @@ function Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, universal_fallback = MOIU.UniversalFallback(JuMPMOIModel{Float64}()) caching_opt = MOIU.CachingOptimizer(universal_fallback, caching_mode) - if bridge_constraints - backend = MOI.Bridges.fullbridgeoptimizer(caching_opt, - Float64) - else - backend = caching_opt - end - return direct_model(backend) + return direct_model(caching_opt) end """ @@ -224,10 +217,11 @@ The following creates a model using the optimizer model = JuMP.Model(with_optimizer(IpoptOptimizer, print_level=0)) ``` """ -function Model(optimizer_factory::OptimizerFactory; kwargs...) +function Model(optimizer_factory::OptimizerFactory; + bridge_constraints::Bool=true, kwargs...) model = Model(; kwargs...) - optimizer = optimizer_factory() - MOIU.resetoptimizer!(model, optimizer) + set_optimizer(model, optimizer_factory, + bridge_constraints=bridge_constraints) return model end @@ -269,22 +263,6 @@ if VERSION >= v"0.7-" end -# In Automatic and Manual mode, `backend(model)` is either directly the -# `CachingOptimizer` if `bridge_constraints=false` was passed in the constructor -# or it is a `LazyBridgeOptimizer` and the `CachingOptimizer` is stored in the -# `model` field -function caching_optimizer(model::Model) - if backend(model) isa MOIU.CachingOptimizer - return backend(model) - elseif (backend(model) isa - MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer}) - return backend(model).model - else - error("The function `caching_optimizer` cannot be called on a model " * - "in `Direct` mode.") - end -end - """ backend(model::Model) @@ -294,8 +272,7 @@ and whether there are any bridges in the model. If JuMP is in direct mode (i.e., the model was created using [`JuMP.direct_model`](@ref)), the backend with be the optimizer passed to `direct_model`. If JuMP is in manual -or automatic mode, the backend will either be a `MOI.Utilities.CachingOptimizer` -or a `MOI.Bridges.LazyBridgeOptimizer`. +or automatic mode, the backend is a `MOI.Utilities.CachingOptimizer`. This function should only be used by advanced users looking to access low-level MathOptInterface or solver-specific functionality. @@ -308,16 +285,34 @@ backend(model::Model) = model.moi_backend Return mode (Direct, Automatic, Manual) of model. """ function mode(model::Model) - if !(backend(model) isa MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer} || - backend(model) isa MOIU.CachingOptimizer) + if !(backend(model) isa MOIU.CachingOptimizer) return Direct - elseif caching_optimizer(model).mode == MOIU.Automatic + elseif backend(model).mode == MOIU.Automatic return Automatic else return Manual end end +""" + bridge_constraints(model::Model) + +Return a `Bool` indicating whether the model `model` is in manual or automatic +mode, the optimizer is set and unsupported constraints are automatically bridged +to equivalent supported constraints when an appropriate transformation is +available. +""" +function bridge_constraints(model::Model) + caching_optimizer = backend(model) + if caching_optimizer isa MOIU.CachingOptimizer + return caching_optimizer.optimizer isa MOI.Bridges.LazyBridgeOptimizer + else + # Direct mode + return false + end +end + + """ num_variables(model::Model) @@ -439,8 +434,8 @@ function optimizer_index(v::VariableRef) if mode(model) == Direct return index(v) else - @assert caching_optimizer(model).state == MOIU.AttachedOptimizer - return caching_optimizer(model).model_to_optimizer_map[index(v)] + @assert backend(model).state == MOIU.AttachedOptimizer + return backend(model).model_to_optimizer_map[index(v)] end end @@ -448,8 +443,8 @@ function optimizer_index(cr::ConstraintRef{Model}) if mode(cr.model) == Direct return index(cr) else - @assert caching_optimizer(cr.model).state == MOIU.AttachedOptimizer - return caching_optimizer(cr.model).model_to_optimizer_map[index(cr)] + @assert backend(cr.model).state == MOIU.AttachedOptimizer + return backend(cr.model).model_to_optimizer_map[index(cr)] end end diff --git a/src/constraints.jl b/src/constraints.jl index 7f220799291..f564b88dddf 100644 --- a/src/constraints.jl +++ b/src/constraints.jl @@ -218,23 +218,25 @@ function constraint_object(ref::ConstraintRef{Model, MOICON{FuncType, SetType}}) end """ - add_constraint(m::Model, c::AbstractConstraint, name::String="") + add_constraint(model::Model, c::AbstractConstraint, name::String="") -Add a constraint `c` to `Model m` and sets its name. +Add a constraint `c` to `Model model` and sets its name. """ -function add_constraint(m::Model, c::AbstractConstraint, name::String="") +function add_constraint(model::Model, c::AbstractConstraint, name::String="") f = moi_function(c) s = moi_set(c) - if !MOI.supports_constraint(backend(m), typeof(f), typeof(s)) - if backend(m) isa MOI.Bridges.LazyBridgeOptimizer + if !MOI.supports_constraint(backend(model), typeof(f), typeof(s)) + if mode(model) == Direct + bridge_message = "." + elseif bridge_constraints(model) bridge_message = " and there are no bridges that can reformulate it into supported constraints." else bridge_message = ", try using `bridge_constraints=true` in the `JuMP.Model` constructor if you believe the constraint can be reformulated to constraints supported by the solver." end error("Constraints of type $(typeof(f))-in-$(typeof(s)) are not supported by the solver" * bridge_message) end - cindex = MOI.add_constraint(backend(m), f, s) - cref = ConstraintRef(m, cindex, shape(c)) + cindex = MOI.add_constraint(backend(model), f, s) + cref = ConstraintRef(model, cindex, shape(c)) if !isempty(name) set_name(cref, name) end diff --git a/src/copy.jl b/src/copy.jl index 6113fd63476..4ab6091dbe8 100644 --- a/src/copy.jl +++ b/src/copy.jl @@ -97,12 +97,8 @@ function copy_model(model::Model) " instead of the `direct_model` constructor to be able to copy", " the constructed model.") end - caching_mode = caching_optimizer(model).mode - # TODO add bridges added to the bridge optimizer that are not part of the - # fullbridgeoptimizer - bridge_constraints = backend(model) isa MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer} - new_model = Model(caching_mode = caching_mode, - bridge_constraints = bridge_constraints) + caching_mode = backend(model).mode + new_model = Model(caching_mode = caching_mode) # Copy the MOI backend, note that variable and constraint indices may have # changed, the `index_map` gives the map between the indices of diff --git a/src/objective.jl b/src/objective.jl index 7f85dbb5725..68eef8f1e92 100644 --- a/src/objective.jl +++ b/src/objective.jl @@ -115,12 +115,12 @@ However, it is not convertible to a variable. julia> JuMP.objective_function(model, JuMP.VariableRef) ERROR: InexactError: convert(MathOptInterface.SingleVariable, MathOptInterface.ScalarAffineFunction{Float64}(MathOptInterface.ScalarAffineTerm{Float64}[ScalarAffineTerm{Float64}(2.0, VariableIndex(1))], 1.0)) Stacktrace: - [1] convert at /home/blegat/.julia/dev/MathOptInterface/src/functions.jl:393 [inlined] - [2] get(::JuMP.JuMPMOIModel{Float64}, ::MathOptInterface.ObjectiveFunction{MathOptInterface.SingleVariable}) at /home/blegat/.julia/dev/MathOptInterface/src/Utilities/model.jl:259 - [3] get at /home/blegat/.julia/dev/MathOptInterface/src/Utilities/universalfallback.jl:105 [inlined] - [4] get at /home/blegat/.julia/dev/MathOptInterface/src/Utilities/cachingoptimizer.jl:436 [inlined] + [1] convert at /home/blegat/.julia/dev/MathOptInterface/src/functions.jl:398 [inlined] + [2] get(::JuMP.JuMPMOIModel{Float64}, ::MathOptInterface.ObjectiveFunction{MathOptInterface.SingleVariable}) at /home/blegat/.julia/dev/MathOptInterface/src/Utilities/model.jl:290 + [3] get at /home/blegat/.julia/dev/MathOptInterface/src/Utilities/universalfallback.jl:114 [inlined] + [4] get at /home/blegat/.julia/dev/MathOptInterface/src/Utilities/cachingoptimizer.jl:439 [inlined] [5] get(::MathOptInterface.Bridges.LazyBridgeOptimizer{MathOptInterface.Utilities.CachingOptimizer{MathOptInterface.AbstractOptimizer,MathOptInterface.Utilities.UniversalFallback{JuMP.JuMPMOIModel{Float64}}},MathOptInterface.Bridges.AllBridgedConstraints{Float64}}, ::MathOptInterface.ObjectiveFunction{MathOptInterface.SingleVariable}) at /home/blegat/.julia/dev/MathOptInterface/src/Bridges/bridgeoptimizer.jl:172 - [6] objective_function(::Model, ::Type{VariableRef}) at /home/blegat/.julia/dev/JuMP/src/objective.jl:121 + [6] objective_function(::Model, ::Type{VariableRef}) at /home/blegat/.julia/dev/JuMP/src/objective.jl:129 [7] top-level scope at none:0 ``` """ diff --git a/src/optimizer_interface.jl b/src/optimizer_interface.jl index 092e8c88de3..5d16b5a6e72 100644 --- a/src/optimizer_interface.jl +++ b/src/optimizer_interface.jl @@ -3,28 +3,57 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +function error_if_direct_mode(model::Model, func::Symbol) + if mode(model) == Direct + error("The `$func` function is not supported in Direct mode.") + end +end + # These methods directly map to CachingOptimizer methods. # They cannot be called in Direct mode. -function MOIU.resetoptimizer!(model::Model, optimizer::MOI.AbstractOptimizer) - @assert mode(model) != Direct - MOIU.resetoptimizer!(caching_optimizer(model), optimizer) +function MOIU.resetoptimizer!(model::Model, optimizer::MOI.AbstractOptimizer, + bridge_constraints::Bool=true) + error_if_direct_mode(model, :resetoptimizer!) + MOIU.resetoptimizer!(backend(model), optimizer) end function MOIU.resetoptimizer!(model::Model) - @assert mode(model) != Direct - MOIU.resetoptimizer!(caching_optimizer(model)) + error_if_direct_mode(model, :resetoptimizer!) + MOIU.resetoptimizer!(backend(model)) end function MOIU.dropoptimizer!(model::Model) + error_if_direct_mode(model, :dropoptimizer!) @assert mode(model) != Direct - MOIU.dropoptimizer!(caching_optimizer(model)) + MOIU.dropoptimizer!(backend(model)) end function MOIU.attachoptimizer!(model::Model) + error_if_direct_mode(model, :attachoptimizer!) @assert mode(model) != Direct - MOIU.attachoptimizer!(caching_optimizer(model)) + MOIU.attachoptimizer!(backend(model)) end +function set_optimizer(model::Model, optimizer_factory::OptimizerFactory; + bridge_constraints::Bool=true) + error_if_direct_mode(model, :set_optimizer) + optimizer = optimizer_factory() + if bridge_constraints + # The names are handled by the first caching optimizer. + # If default_copy_to without names is supported, no need for a second + # cache. + if !MOIU.supports_default_copy_to(optimizer, false) + if mode(model) == Manual + # TODO figure out what to do in manual mode with the two caches + error("Bridges in Manual mode with an optimizer not supporting `default_copy_to` is not supported yet") + end + universal_fallback = MOIU.UniversalFallback(JuMPMOIModel{Float64}()) + optimizer = MOIU.CachingOptimizer(universal_fallback, optimizer) + end + optimizer = MOI.Bridges.fullbridgeoptimizer(optimizer, Float64) + end + MOIU.resetoptimizer!(model, optimizer) +end """ optimize!(model::Model, @@ -55,6 +84,7 @@ JuMP.optimize!(model, with_optimizer(GLPK.Optimizer)) """ function optimize!(model::Model, optimizer_factory::Union{Nothing, OptimizerFactory}=nothing; + bridge_constraints::Bool=true, ignore_optimize_hook=(model.optimize_hook === nothing)) # The nlp_data is not kept in sync, so re-set it here. # TODO: Consider how to handle incremental solves. @@ -67,11 +97,11 @@ function optimize!(model::Model, if mode(model) == Direct error("An optimizer factory cannot be provided at the `optimize` call in Direct mode.") end - if MOIU.state(caching_optimizer(model)) != MOIU.NoOptimizer + if MOIU.state(backend(model)) != MOIU.NoOptimizer error("An optimizer factory cannot both be provided in the `Model` constructor and at the `optimize` call.") end - optimizer = optimizer_factory() - MOIU.resetoptimizer!(model, optimizer) + set_optimizer(model, optimizer_factory, + bridge_constraints=bridge_constraints) MOIU.attachoptimizer!(model) end diff --git a/test/constraint.jl b/test/constraint.jl index 2885d778cf0..13b6dfa4bfd 100644 --- a/test/constraint.jl +++ b/test/constraint.jl @@ -353,13 +353,13 @@ function test_shadow_price(model_string, constraint_dual, constraint_shadow) JuMP.JuMPMOIModel{Float64}(), eval_objective_value=false, eval_variable_constraint_dual=false)) - mock_optimizer = JuMP.caching_optimizer(model).optimizer + mock_optimizer = JuMP.backend(model).optimizer.model MOI.set(mock_optimizer, MOI.TerminationStatus(), MOI.Success) MOI.set(mock_optimizer, MOI.DualStatus(), MOI.FeasiblePoint) JuMP.optimize!(model) @testset "shadow price of $constraint_name" for constraint_name in keys(constraint_dual) - ci = MOI.get(JuMP.caching_optimizer(model), MOI.ConstraintIndex, + ci = MOI.get(JuMP.backend(model), MOI.ConstraintIndex, constraint_name) constraint_ref = JuMP.ConstraintRef(model, ci, JuMP.ScalarShape()) MOI.set(mock_optimizer, MOI.ConstraintDual(), diff --git a/test/generate_and_solve.jl b/test/generate_and_solve.jl index 006c3783197..a6f7b5277f8 100644 --- a/test/generate_and_solve.jl +++ b/test/generate_and_solve.jl @@ -37,13 +37,13 @@ model = JuMP.JuMPMOIModel{Float64}() MOIU.loadfromstring!(model, modelstring) - MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y"], ["c", "xub", "ylb"]) + MOIU.test_models_equal(JuMP.backend(m).model_cache, model, ["x","y"], ["c", "xub", "ylb"]) JuMP.optimize!(m, with_optimizer(MOIU.MockOptimizer, JuMP.JuMPMOIModel{Float64}(), eval_objective_value=false)) - mockoptimizer = JuMP.caching_optimizer(m).optimizer + mockoptimizer = JuMP.backend(m).optimizer.model MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.Success) MOI.set(mockoptimizer, MOI.ObjectiveValue(), -1.0) MOI.set(mockoptimizer, MOI.ResultCount(), 1) @@ -138,11 +138,11 @@ model = JuMP.JuMPMOIModel{Float64}() MOIU.loadfromstring!(model, modelstring) - MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y"], ["xfix", "xint", "ybin"]) + MOIU.test_models_equal(JuMP.backend(m).model_cache, model, ["x","y"], ["xfix", "xint", "ybin"]) MOIU.attachoptimizer!(m) - mockoptimizer = JuMP.caching_optimizer(m).optimizer + mockoptimizer = JuMP.backend(m).optimizer.model MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.Success) MOI.set(mockoptimizer, MOI.ObjectiveValue(), 1.0) MOI.set(mockoptimizer, MOI.ResultCount(), 1) @@ -186,13 +186,13 @@ model = JuMP.JuMPMOIModel{Float64}() MOIU.loadfromstring!(model, modelstring) - MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y"], ["c1", "c2", "c3"]) + MOIU.test_models_equal(JuMP.backend(m).model_cache, model, ["x","y"], ["c1", "c2", "c3"]) JuMP.optimize!(m, with_optimizer(MOIU.MockOptimizer, JuMP.JuMPMOIModel{Float64}(), eval_objective_value=false)) - mockoptimizer = JuMP.caching_optimizer(m).optimizer + mockoptimizer = JuMP.backend(m).optimizer.model MOI.set(mockoptimizer, MOI.TerminationStatus(), MOI.Success) MOI.set(mockoptimizer, MOI.ObjectiveValue(), -1.0) MOI.set(mockoptimizer, MOI.ResultCount(), 1) @@ -245,7 +245,7 @@ model = JuMP.JuMPMOIModel{Float64}() MOIU.loadfromstring!(model, modelstring) - MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y","z"], ["varsoc", "affsoc", "rotsoc"]) + MOIU.test_models_equal(JuMP.backend(m).model_cache, model, ["x","y","z"], ["varsoc", "affsoc", "rotsoc"]) mockoptimizer = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), eval_objective_value=false, @@ -308,7 +308,7 @@ model = JuMP.JuMPMOIModel{Float64}() MOIU.loadfromstring!(model, modelstring) - MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, + MOIU.test_models_equal(JuMP.backend(m).model_cache, model, ["x11","x12","x22"], ["var_psd", "sym_psd", "con_psd"]) diff --git a/test/model.jl b/test/model.jl index 590653b9c1e..7f7f52033d2 100644 --- a/test/model.jl +++ b/test/model.jl @@ -64,6 +64,25 @@ function test_model() # optimizer not supporting Interval model = Model(with_optimizer(MOIU.MockOptimizer, SimpleLPModel{Float64}())) + @test JuMP.bridge_constraints(model) + @test JuMP.backend(model) isa MOIU.CachingOptimizer + @test JuMP.backend(model).optimizer isa MOI.Bridges.LazyBridgeOptimizer + @test JuMP.backend(model).optimizer.model isa MOIU.MockOptimizer + @variable model x + cref = @constraint model 0 <= x + 1 <= 1 + @test cref isa JuMP.ConstraintRef{JuMP.Model,MOI.ConstraintIndex{MOI.ScalarAffineFunction{Float64},MOI.Interval{Float64}}} + JuMP.optimize!(model) + end + @testset "Automatic bridging with cache for bridged model" begin + # optimizer not supporting Interval and not supporting `default_copy_to` + model = Model(with_optimizer(MOIU.MockOptimizer, + SimpleLPModel{Float64}(), + needs_allocate_load=true)) + @test JuMP.bridge_constraints(model) + @test JuMP.backend(model) isa MOIU.CachingOptimizer + @test JuMP.backend(model).optimizer isa MOI.Bridges.LazyBridgeOptimizer + @test JuMP.backend(model).optimizer.model isa MOIU.CachingOptimizer + @test JuMP.backend(model).optimizer.model.optimizer isa MOIU.MockOptimizer @variable model x cref = @constraint model 0 <= x + 1 <= 1 @test cref isa JuMP.ConstraintRef{JuMP.Model,MOI.ConstraintIndex{MOI.ScalarAffineFunction{Float64},MOI.Interval{Float64}}} @@ -73,16 +92,28 @@ function test_model() model = Model(with_optimizer(MOIU.MockOptimizer, SimpleLPModel{Float64}()), bridge_constraints=false) + @test !JuMP.bridge_constraints(model) @test JuMP.backend(model) isa MOIU.CachingOptimizer - @test JuMP.backend(model) === JuMP.caching_optimizer(model) + @test !(JuMP.backend(model).optimizer isa MOI.Bridges.LazyBridgeOptimizer) @variable model x - @test_throws ErrorException @constraint model 0 <= x + 1 <= 1 + if VERSION < v"0.7-" + @test_throws ErrorException @constraint model 0 <= x + 1 <= 1 + else + err = ErrorException("Constraints of type MathOptInterface.ScalarAffineFunction{Float64}-in-MathOptInterface.Interval{Float64} are not supported by the solver, try using `bridge_constraints=true` in the `JuMP.Model` constructor if you believe the constraint can be reformulated to constraints supported by the solver.") + @test_throws err @constraint model 0 <= x + 1 <= 1 + end end @testset "No bridge automatically added in Direct mode" begin optimizer = MOIU.MockOptimizer(SimpleLPModel{Float64}()) model = JuMP.direct_model(optimizer) + @test !JuMP.bridge_constraints(model) @variable model x - @test_throws ErrorException @constraint model 0 <= x + 1 <= 1 + if VERSION < v"0.7-" + @test_throws ErrorException @constraint model 0 <= x + 1 <= 1 + else + err = ErrorException("Constraints of type MathOptInterface.ScalarAffineFunction{Float64}-in-MathOptInterface.Interval{Float64} are not supported by the solver.") + @test_throws err @constraint model 0 <= x + 1 <= 1 + end end end @@ -125,54 +156,50 @@ function dummy_optimizer_hook(::JuMP.AbstractModel) end @testset "Using $(copy_model ? "JuMP.copy_model" : "Base.copy")" begin for caching_mode in (MOIU.Automatic, MOIU.Manual) @testset "In $caching_mode mode" begin - for bridge_constraints in (false, true) - model = Model(caching_mode = caching_mode, - bridge_constraints = bridge_constraints) - model.optimize_hook = dummy_optimizer_hook - data = DummyExtensionData(model) - model.ext[:dummy] = data - @variable(model, x ≥ 0, Bin) - @variable(model, y ≤ 1, Int) - @variable(model, z == 0) - @constraint(model, cref, x + y == 1) + model = Model(caching_mode = caching_mode) + model.optimize_hook = dummy_optimizer_hook + data = DummyExtensionData(model) + model.ext[:dummy] = data + @variable(model, x ≥ 0, Bin) + @variable(model, y ≤ 1, Int) + @variable(model, z == 0) + @constraint(model, cref, x + y == 1) - if copy_model - new_model, reference_map = JuMP.copy_model(model) - else - new_model = copy(model) - reference_map = Dict{Union{JuMP.VariableRef, - JuMP.ConstraintRef}, - Union{JuMP.VariableRef, - JuMP.ConstraintRef}}() - reference_map[x] = new_model[:x] - reference_map[y] = new_model[:y] - reference_map[z] = new_model[:z] - reference_map[cref] = new_model[:cref] - end - @test MOIU.mode(JuMP.caching_optimizer(new_model)) == caching_mode - @test bridge_constraints == (JuMP.backend(new_model) isa MOI.Bridges.LazyBridgeOptimizer) - @test new_model.optimize_hook === dummy_optimizer_hook - @test new_model.ext[:dummy].model === new_model - x_new = reference_map[x] - @test JuMP.owner_model(x_new) === new_model - @test JuMP.name(x_new) == "x" - y_new = reference_map[y] - @test JuMP.owner_model(y_new) === new_model - @test JuMP.name(y_new) == "y" - z_new = reference_map[z] - @test JuMP.owner_model(z_new) === new_model - @test JuMP.name(z_new) == "z" - if copy_model - @test JuMP.LowerBoundRef(x_new) == reference_map[JuMP.LowerBoundRef(x)] - @test JuMP.BinaryRef(x_new) == reference_map[JuMP.BinaryRef(x)] - @test JuMP.UpperBoundRef(y_new) == reference_map[JuMP.UpperBoundRef(y)] - @test JuMP.IntegerRef(y_new) == reference_map[JuMP.IntegerRef(y)] - @test JuMP.FixRef(z_new) == reference_map[JuMP.FixRef(z)] - end - cref_new = reference_map[cref] - @test cref_new.model === new_model - @test JuMP.name(cref_new) == "cref" + if copy_model + new_model, reference_map = JuMP.copy_model(model) + else + new_model = copy(model) + reference_map = Dict{Union{JuMP.VariableRef, + JuMP.ConstraintRef}, + Union{JuMP.VariableRef, + JuMP.ConstraintRef}}() + reference_map[x] = new_model[:x] + reference_map[y] = new_model[:y] + reference_map[z] = new_model[:z] + reference_map[cref] = new_model[:cref] + end + @test MOIU.mode(JuMP.backend(new_model)) == caching_mode + @test new_model.optimize_hook === dummy_optimizer_hook + @test new_model.ext[:dummy].model === new_model + x_new = reference_map[x] + @test JuMP.owner_model(x_new) === new_model + @test JuMP.name(x_new) == "x" + y_new = reference_map[y] + @test JuMP.owner_model(y_new) === new_model + @test JuMP.name(y_new) == "y" + z_new = reference_map[z] + @test JuMP.owner_model(z_new) === new_model + @test JuMP.name(z_new) == "z" + if copy_model + @test JuMP.LowerBoundRef(x_new) == reference_map[JuMP.LowerBoundRef(x)] + @test JuMP.BinaryRef(x_new) == reference_map[JuMP.BinaryRef(x)] + @test JuMP.UpperBoundRef(y_new) == reference_map[JuMP.UpperBoundRef(y)] + @test JuMP.IntegerRef(y_new) == reference_map[JuMP.IntegerRef(y)] + @test JuMP.FixRef(z_new) == reference_map[JuMP.FixRef(z)] end + cref_new = reference_map[cref] + @test cref_new.model === new_model + @test JuMP.name(cref_new) == "cref" end end end