From ae75f18fe213c4ab81d5356df6d444ec1fd15f31 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Sat, 28 Jul 2018 12:14:43 +0200 Subject: [PATCH 01/22] First skeleton for factory --- src/JuMP.jl | 73 ++++++++++++++++++++++++++++++++--------------------- 1 file changed, 44 insertions(+), 29 deletions(-) diff --git a/src/JuMP.jl b/src/JuMP.jl index 45df94f9fe0..ea0618efe1b 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -77,6 +77,15 @@ const MOIBIN = MOICON{MOI.SingleVariable,MOI.ZeroOne} @MOIU.model JuMPMOIModel (ZeroOne, Integer) (EqualTo, GreaterThan, LessThan, Interval) (Zeros, Nonnegatives, Nonpositives, SecondOrderCone, RotatedSecondOrderCone, GeometricMeanCone, PositiveSemidefiniteConeTriangle, PositiveSemidefiniteConeSquare, RootDetConeTriangle, RootDetConeSquare, LogDetConeTriangle, LogDetConeSquare) () (SingleVariable,) (ScalarAffineFunction,ScalarQuadraticFunction) (VectorOfVariables,) (VectorAffineFunction,) +struct Factory + ModelType::DataType + args::Tuple + kwargs # type changes from Julia v0.6 to v0.7 +end +function create_model(factory::Factory) + return factory.ModelType(factory.args...; factory.kwargs...) +end + ############################################################################### # Model @@ -107,6 +116,7 @@ mutable struct Model <: AbstractModel customnames::Vector + factory::Factory # In Manual and Automatic modes, LazyBridgeOptimizer{CachingOptimizer}. # In Direct mode, will hold an AbstractOptimizer. moibackend::MOI.AbstractOptimizer @@ -124,12 +134,9 @@ mutable struct Model <: AbstractModel # using an extension-specific symbol as a key. ext::Dict{Symbol, Any} - # Default constructor. - function Model(; - mode::ModelMode=Automatic, - backend=nothing, - optimizer=nothing, - bridge_constraints=true) + # Inner constructor + function Model(factory::Factory, moibackend::MOI.ModelLike) + @assert MOI.isempty(moibackend) model = new() model.variabletolowerbound = Dict{MOIVAR, MOILB}() model.variabletoupperbound = Dict{MOIVAR, MOIUB}() @@ -137,29 +144,8 @@ mutable struct Model <: AbstractModel model.variabletointegrality = Dict{MOIVAR, MOIINT}() model.variabletozeroone = Dict{MOIVAR, MOIBIN}() model.customnames = VariableRef[] - if backend != nothing - # TODO: It would make more sense to not force users to specify - # Direct mode if they also provide a backend. - @assert mode == Direct - @assert optimizer === nothing - @assert MOI.isempty(backend) - model.moibackend = backend - else - @assert mode != Direct - universal_fallback = MOIU.UniversalFallback(JuMPMOIModel{Float64}()) - caching_mode = (mode == Automatic) ? MOIU.Automatic : MOIU.Manual - caching_opt = MOIU.CachingOptimizer(universal_fallback, - caching_mode) - if bridge_constraints - model.moibackend = MOI.Bridges.fullbridgeoptimizer(caching_opt, - Float64) - else - model.moibackend = caching_opt - end - if optimizer !== nothing - MOIU.resetoptimizer!(model, optimizer) - end - end + model.factory = factory + model.moibackend = moibackend model.optimizehook = nothing model.nlpdata = nothing model.objdict = Dict{Symbol, Any}() @@ -169,6 +155,35 @@ mutable struct Model <: AbstractModel end end +# TODO doc +function Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, + bridge_constraints::Bool=true) + universal_fallback = MOIU.UniversalFallback(JuMPMOIModel{Float64}()) + caching_opt = MOIU.CachingOptimizer(universal_fallback, + caching_mode) + if bridge_constraints + backend = MOI.Bridges.fullbridgeoptimizer(caching_opt, + Float64) + else + backend = caching_opt + end + return Model(nothing, backend) +end + +# TODO doc +function Model(factory::Factory; kwargs...) + model = Model(; kwargs...) + model.factory = factory # useful for implementing Base.copy + optimizer = create_model(factory) + MOIU.resetoptimizer!(model.moibackend, optimizer) + return model +end + +# TODO doc +function direct_model(backend::MOI.ModelLike) + return Model(nothing, backend) +end + # In Automatic and Manual mode, `model.moibackend` is either directly the # `CachingOptimizer` if `bridge_constraints=false` was passed in the constructor # or it is a `LazyBridgeOptimizer` and the `CachingOptimizer` is stored in the From 7650e39199075f34b0f8d0f9df46bdb57abc3fa0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Sat, 28 Jul 2018 13:55:06 +0200 Subject: [PATCH 02/22] Update tests --- src/JuMP.jl | 10 +++++++--- test/model.jl | 10 ++++------ test/nlp_solver.jl | 34 ++++++++++++++++------------------ 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/src/JuMP.jl b/src/JuMP.jl index ea0618efe1b..d3162308f31 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -29,6 +29,7 @@ using .Derivatives export # Objects Model, VariableRef, Norm, AffExpr, QuadExpr, + with_optimizer, # LinearConstraint, QuadConstraint, SDConstraint, NonlinearConstraint, ConstraintRef, @@ -78,10 +79,13 @@ const MOIBIN = MOICON{MOI.SingleVariable,MOI.ZeroOne} @MOIU.model JuMPMOIModel (ZeroOne, Integer) (EqualTo, GreaterThan, LessThan, Interval) (Zeros, Nonnegatives, Nonpositives, SecondOrderCone, RotatedSecondOrderCone, GeometricMeanCone, PositiveSemidefiniteConeTriangle, PositiveSemidefiniteConeSquare, RootDetConeTriangle, RootDetConeSquare, LogDetConeTriangle, LogDetConeSquare) () (SingleVariable,) (ScalarAffineFunction,ScalarQuadraticFunction) (VectorOfVariables,) (VectorAffineFunction,) struct Factory - ModelType::DataType + ModelType args::Tuple kwargs # type changes from Julia v0.6 to v0.7 end +function with_optimizer(ModelType::Type, args...; kwargs...) + return Factory(ModelType, args, kwargs) +end function create_model(factory::Factory) return factory.ModelType(factory.args...; factory.kwargs...) end @@ -116,7 +120,7 @@ mutable struct Model <: AbstractModel customnames::Vector - factory::Factory + factory::Union{Nothing, Factory} # In Manual and Automatic modes, LazyBridgeOptimizer{CachingOptimizer}. # In Direct mode, will hold an AbstractOptimizer. moibackend::MOI.AbstractOptimizer @@ -135,7 +139,7 @@ mutable struct Model <: AbstractModel ext::Dict{Symbol, Any} # Inner constructor - function Model(factory::Factory, moibackend::MOI.ModelLike) + function Model(factory::Union{Nothing, Factory}, moibackend::MOI.ModelLike) @assert MOI.isempty(moibackend) model = new() model.variabletolowerbound = Dict{MOIVAR, MOILB}() diff --git a/test/model.jl b/test/model.jl index de1977b7488..491b3f4efa3 100644 --- a/test/model.jl +++ b/test/model.jl @@ -20,16 +20,14 @@ end @testset "Bridges" begin @testset "Automatic bridging" begin # optimizer not supporting Interval - optimizer = MOIU.MockOptimizer(LPModel{Float64}()); - model = Model(optimizer=optimizer) + model = Model(with_optimizer(MOIU.MockOptimizer, LPModel{Float64}())) @variable model x cref = @constraint model 0 <= x + 1 <= 1 @test cref isa JuMP.ConstraintRef{JuMP.Model,MOI.ConstraintIndex{MOI.ScalarAffineFunction{Float64},MOI.Interval{Float64}}} JuMP.optimize(model) end @testset "Automatic bridging disabled with `bridge_constraints` keyword" begin - optimizer = MOIU.MockOptimizer(LPModel{Float64}()); - model = Model(optimizer=optimizer, bridge_constraints=false) + model = Model(with_optimizer(MOIU.MockOptimizer, LPModel{Float64}()), bridge_constraints=false) @test model.moibackend isa MOIU.CachingOptimizer @test model.moibackend === JuMP.caching_optimizer(model) @variable model x @@ -37,8 +35,8 @@ end @test_throws ErrorException JuMP.optimize(model) end @testset "No bridge automatically added in Direct mode" begin - optimizer = MOIU.MockOptimizer(LPModel{Float64}()); - model = Model(backend=optimizer, mode=JuMP.Direct) + optimizer = MOIU.MockOptimizer(LPModel{Float64}()) + model = JuMP.direct_model(optimizer) @variable model x @test_throws MethodError @constraint model 0 <= x + 1 <= 1 end diff --git a/test/nlp_solver.jl b/test/nlp_solver.jl index 70b598f12f1..5f871cfb12b 100644 --- a/test/nlp_solver.jl +++ b/test/nlp_solver.jl @@ -27,8 +27,6 @@ using Compat.Test using MathOptInterface const MOI = MathOptInterface -new_optimizer() = IpoptOptimizer(print_level=0) - @testset "NLP solver tests" begin @testset "HS071" begin @@ -40,7 +38,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) # 1 <= x1, x2, x3, x4 <= 5 # Start at (1,5,5,1) # End at (1.000..., 4.743..., 3.821..., 1.379...) - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) initval = [1,5,5,1] @variable(m, 1 <= x[i=1:4] <= 5, start=initval[i]) @NLobjective(m, Min, x[1]*x[4]*(x[1]+x[2]+x[3]) + x[3]) @@ -65,7 +63,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) # 1 <= x1, x2, x3, x4 <= 5 # Start at (1,5,5,1) # End at (1.000..., 4.743..., 3.821..., 1.379...) - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) initval = [1,5,5,1] @variable(m, 1 <= x[i=1:4] <= 5, start=initval[i]) JuMP.setNLobjective(m, :Min, :($(x[1])*$(x[4])*($(x[1])+$(x[2])+$(x[3])) + $(x[3]))) @@ -91,7 +89,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) L = [0.0, 0.0, -0.55, -0.55, 196, 196, 196, -400, -400] U = [Inf, Inf, 0.55, 0.55, 252, 252, 252, 800, 800] - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, L[i] <= x[i=1:9] <= U[i], start = 0.0) @NLobjective(m, Min, 3 * x[1] + 1e-6 * x[1]^3 + 2 * x[2] + .522074e-6 * x[2]^3) @@ -131,7 +129,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) end @testset "HS110" begin - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, -2.001 <= x[1:10] <= 9.999, start = 9) @NLobjective(m, Min, @@ -152,7 +150,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) @testset "HS111" begin c = [-6.089, -17.164, -34.054, -5.914, -24.721, -14.986, -24.100, -10.708, -26.662, -22.179] - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, -100 <= x[1:10] <= 100, start = -2.3) @NLobjective(m, Min, @@ -174,7 +172,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) @testset "HS112" begin c = [-6.089, -17.164, -34.054, -5.914, -24.721, -14.986, -24.100, -10.708, -26.662, -22.179] - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, x[1:10] >= 1e-6, start = 0.1) @NLobjective(m, Min, sum(x[j]*(c[j] + log(x[j]/sum(x[k] for k=1:10))) for j=1:10)) @@ -201,7 +199,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) upper = [2000, 16000, 120, 5000, 2000, 93, 95, 12, 4, 162] start = [1745, 12000, 110, 3048, 1974, 89.2, 92.8, 8, 3.6, 145] - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, lower[i] <= x[i=1:n] <= upper[i], start = start[i]) @NLobjective(m, Min, 5.04*x[1] + .035*x[2] + 10*x[3] + 3.36*x[5] - .063*x[4]*x[7]) @@ -240,7 +238,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) upper = [1.0, 1.0, 1.0, 0.1, 0.9, 0.9, 1000, 1000, 1000, 500, 150, 150, 150, Inf, Inf, Inf] start = [0.5 2 0.8 3 0.9 4 0.1 5 0.14 6 0.5 7 489 8 80 9 650 0.5 2 0.8 3 0.9 4 0.1 5 0.14 6 0.5 7 489 8 80 9 650] - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, lower[i] <= x[i=1:N] <= upper[i], start = start[i]) @NLobjective(m, Min, x[11] + x[12] + x[13]) @@ -275,7 +273,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) end @testset "HS118" begin - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) L = zeros(15) L[1] = 8.0 @@ -343,7 +341,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) end @testset "Two-sided constraints" begin - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, x) @NLobjective(m, Max, x) l = -1 @@ -366,7 +364,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) end @testset "Two-sided constraints (no macros)" begin - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, x) JuMP.setNLobjective(m, :Max, x) l = -1 @@ -389,7 +387,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) end @testset "Duals" begin - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, x >= 0) @variable(m, y <= 5) @variable(m, 2 <= z <= 4) @@ -445,7 +443,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) end @testset "Quadratic inequality constraints, linear objective" begin - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, -2 <= x <= 2) @variable(m, -2 <= y <= 2) @objective(m, Min, x - y) @@ -460,7 +458,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) end @testset "Quadratic inequality constraints, NL objective" begin - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, -2 <= x <= 2) @variable(m, -2 <= y <= 2) @NLobjective(m, Min, x - y) @@ -475,7 +473,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) end @testset "Quadratic equality constraints" begin - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, 0 <= x[1:2] <= 1) @constraint(m, x[1]^2 + x[2]^2 == 1/2) @NLobjective(m, Max, x[1] - x[2]) @@ -489,7 +487,7 @@ new_optimizer() = IpoptOptimizer(print_level=0) end @testset "Fixed variables" begin - m = Model(optimizer=new_optimizer()) + m = Model(with_optimizer(IpoptOptimize, print_level=0)) @variable(m, x == 0) @variable(m, y ≥ 0) @objective(m, Min, y) From 553df03010aa9da277edb60f1ee5c76eae2f6024 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Sat, 28 Jul 2018 13:56:31 +0200 Subject: [PATCH 03/22] Fix --- src/JuMP.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/JuMP.jl b/src/JuMP.jl index d3162308f31..b163883631c 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -179,7 +179,7 @@ function Model(factory::Factory; kwargs...) model = Model(; kwargs...) model.factory = factory # useful for implementing Base.copy optimizer = create_model(factory) - MOIU.resetoptimizer!(model.moibackend, optimizer) + MOIU.resetoptimizer!(model, optimizer) return model end From b46399c8c3aabbf088740186971fe878d1d00f70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Sun, 29 Jul 2018 11:38:44 +0200 Subject: [PATCH 04/22] Fix tests --- test/generate_and_solve.jl | 132 ++++++++++++++++++------------------- test/nlp_solver.jl | 32 ++++----- 2 files changed, 82 insertions(+), 82 deletions(-) diff --git a/test/generate_and_solve.jl b/test/generate_and_solve.jl index 8440ac85813..1a50c3e3211 100644 --- a/test/generate_and_solve.jl +++ b/test/generate_and_solve.jl @@ -39,20 +39,20 @@ MOIU.loadfromstring!(model, modelstring) MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y"], ["c", "xub", "ylb"]) - mocksolver = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) - MOIU.resetoptimizer!(m, mocksolver) + mockoptimizer = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) + MOIU.resetoptimizer!(m, mockoptimizer) MOIU.attachoptimizer!(m) - MOI.set!(mocksolver, MOI.TerminationStatus(), MOI.Success) - MOI.set!(mocksolver, MOI.ObjectiveValue(), -1.0) - MOI.set!(mocksolver, MOI.ResultCount(), 1) - MOI.set!(mocksolver, MOI.PrimalStatus(), MOI.FeasiblePoint) - MOI.set!(mocksolver, MOI.DualStatus(), MOI.FeasiblePoint) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(x), 1.0) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(y), 0.0) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(c), -1.0) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(JuMP.UpperBoundRef(x)), 0.0) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(JuMP.LowerBoundRef(y)), 1.0) + MOI.set!(mockoptimizer, MOI.TerminationStatus(), MOI.Success) + MOI.set!(mockoptimizer, MOI.ObjectiveValue(), -1.0) + MOI.set!(mockoptimizer, MOI.ResultCount(), 1) + MOI.set!(mockoptimizer, MOI.PrimalStatus(), MOI.FeasiblePoint) + MOI.set!(mockoptimizer, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(x), 1.0) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(y), 0.0) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(c), -1.0) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(JuMP.UpperBoundRef(x)), 0.0) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(JuMP.LowerBoundRef(y)), 1.0) JuMP.optimize(m) @@ -74,24 +74,24 @@ end @testset "LP (Direct mode)" begin - mocksolver = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) + mockoptimizer = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) - m = Model(mode = JuMP.Direct, backend = mocksolver) + m = JuMP.direct_model(mockoptimizer) @variable(m, x <= 2.0) @variable(m, y >= 0.0) @objective(m, Min, -x) c = @constraint(m, x + y <= 1) - MOI.set!(mocksolver, MOI.TerminationStatus(), MOI.Success) - MOI.set!(mocksolver, MOI.ObjectiveValue(), -1.0) - MOI.set!(mocksolver, MOI.ResultCount(), 1) - MOI.set!(mocksolver, MOI.PrimalStatus(), MOI.FeasiblePoint) - MOI.set!(mocksolver, MOI.DualStatus(), MOI.FeasiblePoint) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(x), 1.0) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(y), 0.0) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(c), -1.0) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(JuMP.UpperBoundRef(x)), 0.0) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(JuMP.LowerBoundRef(y)), 1.0) + MOI.set!(mockoptimizer, MOI.TerminationStatus(), MOI.Success) + MOI.set!(mockoptimizer, MOI.ObjectiveValue(), -1.0) + MOI.set!(mockoptimizer, MOI.ResultCount(), 1) + MOI.set!(mockoptimizer, MOI.PrimalStatus(), MOI.FeasiblePoint) + MOI.set!(mockoptimizer, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(x), 1.0) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(y), 0.0) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(c), -1.0) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(JuMP.UpperBoundRef(x)), 0.0) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(JuMP.LowerBoundRef(y)), 1.0) JuMP.optimize(m) @@ -115,9 +115,8 @@ # TODO: test Manual mode @testset "IP" begin - mocksolver = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) # Tests the solver= keyword. - m = Model(mode = JuMP.Automatic, optimizer = mocksolver) + m = Model(with_optimizer(MOIU.MockOptimizer, JuMP.JuMPMOIModel{Float64}(), evalobjective=false), caching_mode = MOIU.Automatic) @variable(m, x == 1.0, Int) @variable(m, y, Bin) @objective(m, Max, x) @@ -140,12 +139,13 @@ MOIU.attachoptimizer!(m) - MOI.set!(mocksolver, MOI.TerminationStatus(), MOI.Success) - MOI.set!(mocksolver, MOI.ObjectiveValue(), 1.0) - MOI.set!(mocksolver, MOI.ResultCount(), 1) - MOI.set!(mocksolver, MOI.PrimalStatus(), MOI.FeasiblePoint) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(x), 1.0) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(y), 0.0) + mockoptimizer = JuMP.caching_optimizer(m).optimizer + MOI.set!(mockoptimizer, MOI.TerminationStatus(), MOI.Success) + MOI.set!(mockoptimizer, MOI.ObjectiveValue(), 1.0) + MOI.set!(mockoptimizer, MOI.ResultCount(), 1) + MOI.set!(mockoptimizer, MOI.PrimalStatus(), MOI.FeasiblePoint) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(x), 1.0) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(y), 0.0) JuMP.optimize(m) @@ -186,20 +186,20 @@ MOIU.loadfromstring!(model, modelstring) MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y"], ["c1", "c2", "c3"]) - mocksolver = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) - MOIU.resetoptimizer!(m, mocksolver) + mockoptimizer = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) + MOIU.resetoptimizer!(m, mockoptimizer) MOIU.attachoptimizer!(m) - MOI.set!(mocksolver, MOI.TerminationStatus(), MOI.Success) - MOI.set!(mocksolver, MOI.ObjectiveValue(), -1.0) - MOI.set!(mocksolver, MOI.ResultCount(), 1) - MOI.set!(mocksolver, MOI.PrimalStatus(), MOI.FeasiblePoint) - MOI.set!(mocksolver, MOI.DualStatus(), MOI.FeasiblePoint) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(x), 1.0) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(y), 0.0) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(c1), -1.0) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(c2), 2.0) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(c3), 3.0) + MOI.set!(mockoptimizer, MOI.TerminationStatus(), MOI.Success) + MOI.set!(mockoptimizer, MOI.ObjectiveValue(), -1.0) + MOI.set!(mockoptimizer, MOI.ResultCount(), 1) + MOI.set!(mockoptimizer, MOI.PrimalStatus(), MOI.FeasiblePoint) + MOI.set!(mockoptimizer, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(x), 1.0) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(y), 0.0) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(c1), -1.0) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(c2), 2.0) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(c3), 3.0) JuMP.optimize(m) @@ -244,19 +244,19 @@ MOIU.loadfromstring!(model, modelstring) MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y","z"], ["varsoc", "affsoc", "rotsoc"]) - mocksolver = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) - MOIU.resetoptimizer!(m, mocksolver) + mockoptimizer = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) + MOIU.resetoptimizer!(m, mockoptimizer) MOIU.attachoptimizer!(m) - MOI.set!(mocksolver, MOI.TerminationStatus(), MOI.Success) - MOI.set!(mocksolver, MOI.ResultCount(), 1) - MOI.set!(mocksolver, MOI.PrimalStatus(), MOI.FeasiblePoint) - MOI.set!(mocksolver, MOI.DualStatus(), MOI.FeasiblePoint) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(x), 1.0) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(y), 0.0) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(z), 0.0) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(varsoc), [-1.0,-2.0,-3.0]) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(affsoc), [1.0,2.0,3.0]) + MOI.set!(mockoptimizer, MOI.TerminationStatus(), MOI.Success) + MOI.set!(mockoptimizer, MOI.ResultCount(), 1) + MOI.set!(mockoptimizer, MOI.PrimalStatus(), MOI.FeasiblePoint) + MOI.set!(mockoptimizer, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(x), 1.0) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(y), 0.0) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(z), 0.0) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(varsoc), [-1.0,-2.0,-3.0]) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(affsoc), [1.0,2.0,3.0]) JuMP.optimize(m) @@ -300,19 +300,19 @@ MOIU.loadfromstring!(model, modelstring) MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x11","x12","x22"], ["varpsd", "conpsd"]) - mocksolver = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) - MOIU.resetoptimizer!(m, mocksolver) + mockoptimizer = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) + MOIU.resetoptimizer!(m, mockoptimizer) MOIU.attachoptimizer!(m) - MOI.set!(mocksolver, MOI.TerminationStatus(), MOI.Success) - MOI.set!(mocksolver, MOI.ResultCount(), 1) - MOI.set!(mocksolver, MOI.PrimalStatus(), MOI.FeasiblePoint) - MOI.set!(mocksolver, MOI.DualStatus(), MOI.FeasiblePoint) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(x[1,1]), 1.0) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(x[1,2]), 2.0) - MOI.set!(mocksolver, MOI.VariablePrimal(), JuMP.optimizerindex(x[2,2]), 4.0) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(varpsd), [1.0,2.0,3.0]) - MOI.set!(mocksolver, MOI.ConstraintDual(), JuMP.optimizerindex(conpsd), [4.0,5.0,6.0]) + MOI.set!(mockoptimizer, MOI.TerminationStatus(), MOI.Success) + MOI.set!(mockoptimizer, MOI.ResultCount(), 1) + MOI.set!(mockoptimizer, MOI.PrimalStatus(), MOI.FeasiblePoint) + MOI.set!(mockoptimizer, MOI.DualStatus(), MOI.FeasiblePoint) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(x[1,1]), 1.0) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(x[1,2]), 2.0) + MOI.set!(mockoptimizer, MOI.VariablePrimal(), JuMP.optimizerindex(x[2,2]), 4.0) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(varpsd), [1.0,2.0,3.0]) + MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(conpsd), [4.0,5.0,6.0]) JuMP.optimize(m) diff --git a/test/nlp_solver.jl b/test/nlp_solver.jl index 5f871cfb12b..012f87287d3 100644 --- a/test/nlp_solver.jl +++ b/test/nlp_solver.jl @@ -38,7 +38,7 @@ const MOI = MathOptInterface # 1 <= x1, x2, x3, x4 <= 5 # Start at (1,5,5,1) # End at (1.000..., 4.743..., 3.821..., 1.379...) - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) initval = [1,5,5,1] @variable(m, 1 <= x[i=1:4] <= 5, start=initval[i]) @NLobjective(m, Min, x[1]*x[4]*(x[1]+x[2]+x[3]) + x[3]) @@ -63,7 +63,7 @@ const MOI = MathOptInterface # 1 <= x1, x2, x3, x4 <= 5 # Start at (1,5,5,1) # End at (1.000..., 4.743..., 3.821..., 1.379...) - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) initval = [1,5,5,1] @variable(m, 1 <= x[i=1:4] <= 5, start=initval[i]) JuMP.setNLobjective(m, :Min, :($(x[1])*$(x[4])*($(x[1])+$(x[2])+$(x[3])) + $(x[3]))) @@ -89,7 +89,7 @@ const MOI = MathOptInterface L = [0.0, 0.0, -0.55, -0.55, 196, 196, 196, -400, -400] U = [Inf, Inf, 0.55, 0.55, 252, 252, 252, 800, 800] - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, L[i] <= x[i=1:9] <= U[i], start = 0.0) @NLobjective(m, Min, 3 * x[1] + 1e-6 * x[1]^3 + 2 * x[2] + .522074e-6 * x[2]^3) @@ -129,7 +129,7 @@ const MOI = MathOptInterface end @testset "HS110" begin - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, -2.001 <= x[1:10] <= 9.999, start = 9) @NLobjective(m, Min, @@ -150,7 +150,7 @@ const MOI = MathOptInterface @testset "HS111" begin c = [-6.089, -17.164, -34.054, -5.914, -24.721, -14.986, -24.100, -10.708, -26.662, -22.179] - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, -100 <= x[1:10] <= 100, start = -2.3) @NLobjective(m, Min, @@ -172,7 +172,7 @@ const MOI = MathOptInterface @testset "HS112" begin c = [-6.089, -17.164, -34.054, -5.914, -24.721, -14.986, -24.100, -10.708, -26.662, -22.179] - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, x[1:10] >= 1e-6, start = 0.1) @NLobjective(m, Min, sum(x[j]*(c[j] + log(x[j]/sum(x[k] for k=1:10))) for j=1:10)) @@ -199,7 +199,7 @@ const MOI = MathOptInterface upper = [2000, 16000, 120, 5000, 2000, 93, 95, 12, 4, 162] start = [1745, 12000, 110, 3048, 1974, 89.2, 92.8, 8, 3.6, 145] - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, lower[i] <= x[i=1:n] <= upper[i], start = start[i]) @NLobjective(m, Min, 5.04*x[1] + .035*x[2] + 10*x[3] + 3.36*x[5] - .063*x[4]*x[7]) @@ -238,7 +238,7 @@ const MOI = MathOptInterface upper = [1.0, 1.0, 1.0, 0.1, 0.9, 0.9, 1000, 1000, 1000, 500, 150, 150, 150, Inf, Inf, Inf] start = [0.5 2 0.8 3 0.9 4 0.1 5 0.14 6 0.5 7 489 8 80 9 650 0.5 2 0.8 3 0.9 4 0.1 5 0.14 6 0.5 7 489 8 80 9 650] - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, lower[i] <= x[i=1:N] <= upper[i], start = start[i]) @NLobjective(m, Min, x[11] + x[12] + x[13]) @@ -273,7 +273,7 @@ const MOI = MathOptInterface end @testset "HS118" begin - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) L = zeros(15) L[1] = 8.0 @@ -341,7 +341,7 @@ const MOI = MathOptInterface end @testset "Two-sided constraints" begin - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, x) @NLobjective(m, Max, x) l = -1 @@ -364,7 +364,7 @@ const MOI = MathOptInterface end @testset "Two-sided constraints (no macros)" begin - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, x) JuMP.setNLobjective(m, :Max, x) l = -1 @@ -387,7 +387,7 @@ const MOI = MathOptInterface end @testset "Duals" begin - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, x >= 0) @variable(m, y <= 5) @variable(m, 2 <= z <= 4) @@ -443,7 +443,7 @@ const MOI = MathOptInterface end @testset "Quadratic inequality constraints, linear objective" begin - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, -2 <= x <= 2) @variable(m, -2 <= y <= 2) @objective(m, Min, x - y) @@ -458,7 +458,7 @@ const MOI = MathOptInterface end @testset "Quadratic inequality constraints, NL objective" begin - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, -2 <= x <= 2) @variable(m, -2 <= y <= 2) @NLobjective(m, Min, x - y) @@ -473,7 +473,7 @@ const MOI = MathOptInterface end @testset "Quadratic equality constraints" begin - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, 0 <= x[1:2] <= 1) @constraint(m, x[1]^2 + x[2]^2 == 1/2) @NLobjective(m, Max, x[1] - x[2]) @@ -487,7 +487,7 @@ const MOI = MathOptInterface end @testset "Fixed variables" begin - m = Model(with_optimizer(IpoptOptimize, print_level=0)) + m = Model(with_optimizer(IpoptOptimizer, print_level=0)) @variable(m, x == 0) @variable(m, y ≥ 0) @objective(m, Min, y) From 1991b42382dadc35fe105cf3d9442f293c2fe97f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Sat, 28 Jul 2018 20:49:27 +0200 Subject: [PATCH 05/22] Add doc --- docs/src/solvers.md | 22 +++++++++ src/JuMP.jl | 96 +++++++++++++++++++++++++++++++++----- src/optimizerinterface.jl | 21 +++++++++ test/generate_and_solve.jl | 4 +- 4 files changed, 130 insertions(+), 13 deletions(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index 67d1040c360..070cd7381a0 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -1,6 +1,28 @@ Interacting with solvers ======================== + +```@docs +with_optimizer +``` + +The solvers can be set with +```@docs +JuMP.setoptimizer +``` + +```@docs +Model +``` + +## Direct mode + +For advanced users, the model can be created without using a caching nor a +bridge optimizer using the [`JuMP.direct_model`](@ref) function: +```@docs +JuMP.direct_model +``` + TODO: Describe the connection between JuMP and solvers. Automatic vs. Manual mode. CachingOptimizer. How to set/change solvers. How to set parameters (solver specific and generic). Status codes. Accessing the result. diff --git a/src/JuMP.jl b/src/JuMP.jl index b163883631c..5d622dce28a 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -79,15 +79,40 @@ const MOIBIN = MOICON{MOI.SingleVariable,MOI.ZeroOne} @MOIU.model JuMPMOIModel (ZeroOne, Integer) (EqualTo, GreaterThan, LessThan, Interval) (Zeros, Nonnegatives, Nonpositives, SecondOrderCone, RotatedSecondOrderCone, GeometricMeanCone, PositiveSemidefiniteConeTriangle, PositiveSemidefiniteConeSquare, RootDetConeTriangle, RootDetConeSquare, LogDetConeTriangle, LogDetConeSquare) () (SingleVariable,) (ScalarAffineFunction,ScalarQuadraticFunction) (VectorOfVariables,) (VectorAffineFunction,) struct Factory - ModelType + # The constructor can be + # * `Function`: a function, or + # * `DataType`: a type, or + # * `UnionAll`: a type with missing parameters. + constructor::Union{Function, DataType, UnionAll} args::Tuple - kwargs # type changes from Julia v0.6 to v0.7 + kwargs # type changes from Julia v0.6 to v0.7 so we leave it untyped for now end -function with_optimizer(ModelType::Type, args...; kwargs...) - return Factory(ModelType, args, kwargs) + +""" + with_optimizer(constructor::Type, args...; kwargs...) + +Return a factory that creates optimizers using the constructor `constructor` +with positional arguments `args` and keyword arguments `kwargs`. + +## Examples + +The following returns a factory that creates `IpoptOptimizer`s using the +constructor call `IpoptOptimizer(print_level=0)`: +```julia +with_optimizer(IpoptOptimizer, print_level=0) +``` +""" +function with_optimizer(constructor::Type, args...; kwargs...) + return Factory(constructor, args, kwargs) end + +""" + create_model(factory::Factory) + +Creates a new model with the factory `factory`. +""" function create_model(factory::Factory) - return factory.ModelType(factory.args...; factory.kwargs...) + return factory.constructor(factory.args...; factory.kwargs...) end ############################################################################### @@ -159,7 +184,18 @@ mutable struct Model <: AbstractModel end end -# TODO doc +""" + Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, + bridge_constraints::Bool=true) + +Return a new JuMP model without any optimizer storing the model in a cache. +The mode of the `CachingOptimizer` storing this cache is `caching_mode`. +The optimizer can be set later with [`setoptimizer`](@ref). If +`bridge_constraints` is true, constraints that are not supported by the +optimizer are automatically bridged to equivalent supported constraints when +an appropriate is defined in the `MathOptInterface.Bridges` module or is +defined in another module and is explicitely added. +""" function Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, bridge_constraints::Bool=true) universal_fallback = MOIU.UniversalFallback(JuMPMOIModel{Float64}()) @@ -174,16 +210,54 @@ function Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, return Model(nothing, backend) end -# TODO doc +""" + Model(factory::Factory; + caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, + bridge_constraints::Bool=true) + +Return a new JuMP model using the factory `factory` to create the optimizer. +This is equivalent to calling `Model` with the same keyword arguments and then +calling [`setoptimizer`](@ref) on the created model with the `factory`. The +factory can be created by the [`with_optimizer`](@ref) function. + +## Examples + +The following creates a model using the optimizer +`IpoptOptimizer(print_level=0)`: +```julia +model = JuMP.Model(with_optimizer(IpoptOptimizer, print_level=0)) +``` +""" function Model(factory::Factory; kwargs...) model = Model(; kwargs...) - model.factory = factory # useful for implementing Base.copy - optimizer = create_model(factory) - MOIU.resetoptimizer!(model, optimizer) + setoptimizer(model, factory) return model end -# TODO doc +""" + direct_model(backend::MOI.ModelLike) + +Return a new JuMP model using `backend` to store the model and solve it. As +opposed to the [`Model`](@ref) constructor, no cache of the model is stored +outside of `backend` and no bridges are automatically applied to `backend`. +The absence of cache reduces the memory footprint but it is importnat to bear +in mind the following implications of creating models using this *direct* mode: + +* When `backend` does not support an operation such as adding + variables/constraints after solver or modifying constraints, an error is + thrown. With models created using the [`Model`](@ref) constructor, such + situations can be dealt with modifying the cache only and copying the model + cache once `JuMP.optimize` is called. +* When `backend` does not support a constraint type, the constraint is not + automatically bridged to constraints supported by `backend`. +* The optimizer used cannot be changed. With models created using the + [`Model`](@ref) constuctor, the variable and constraint indices used + are the indices corresponding to the cached model so the optimizer can be + changed but in direct mode, changing the backend would render all variable + and constraint references invalid are their internal indices corresponds to + the previous backend. +* The model created cannot be copied. +""" function direct_model(backend::MOI.ModelLike) return Model(nothing, backend) end diff --git a/src/optimizerinterface.jl b/src/optimizerinterface.jl index c7474b3d145..144073f6bfc 100644 --- a/src/optimizerinterface.jl +++ b/src/optimizerinterface.jl @@ -3,6 +3,27 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" + setoptimizer(model::Model, factory::Factory) + +Sets the optimizer of the model `model` as the optimizers created by the +factory `factory`. The factory can be created by the [`with_optimizer`](@ref) +function. + +## Examples + +The following sets the optimizer of `model` to be +`IpoptOptimizer(print_level=0)`: +```julia +setoptimizer(model, with_optimizer(IpoptOptimizer, print_level=0)) +``` +""" +function setoptimizer(model::Model, factory::Factory) + model.factory = factory # useful for implementing Base.copy + optimizer = create_model(factory) + MOIU.resetoptimizer!(model, optimizer) +end + # These methods directly map to CachingOptimizer methods. # They cannot be called in Direct mode. function MOIU.resetoptimizer!(model::Model, optimizer::MOI.AbstractOptimizer) diff --git a/test/generate_and_solve.jl b/test/generate_and_solve.jl index 1a50c3e3211..800c80b8705 100644 --- a/test/generate_and_solve.jl +++ b/test/generate_and_solve.jl @@ -39,10 +39,10 @@ MOIU.loadfromstring!(model, modelstring) MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y"], ["c", "xub", "ylb"]) - mockoptimizer = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) - MOIU.resetoptimizer!(m, mockoptimizer) + JuMP.setoptimizer(m, with_optimizer(MOIU.MockOptimizer, JuMP.JuMPMOIModel{Float64}(), evalobjective=false)) MOIU.attachoptimizer!(m) + mockoptimizer = JuMP.caching_optimizer(m).optimizer MOI.set!(mockoptimizer, MOI.TerminationStatus(), MOI.Success) MOI.set!(mockoptimizer, MOI.ObjectiveValue(), -1.0) MOI.set!(mockoptimizer, MOI.ResultCount(), 1) From 39be6af49129f4cf7a50847db4ca8a33d6002156 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Sun, 29 Jul 2018 11:47:39 +0200 Subject: [PATCH 06/22] Document factory field --- src/JuMP.jl | 5 +++++ src/optimizerinterface.jl | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/JuMP.jl b/src/JuMP.jl index 5d622dce28a..769d3b72217 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -145,6 +145,11 @@ mutable struct Model <: AbstractModel customnames::Vector + # Factory used to create a new optimizer, it is kept as it might be needed + # again if the user requests a copy of the model using `Base.copy`. + # In Manual and Automatic mode: Factory used to create the optimizer or + # Nothing if it has not already been set + # In Direct mode: Nothing factory::Union{Nothing, Factory} # In Manual and Automatic modes, LazyBridgeOptimizer{CachingOptimizer}. # In Direct mode, will hold an AbstractOptimizer. diff --git a/src/optimizerinterface.jl b/src/optimizerinterface.jl index 144073f6bfc..c2265878490 100644 --- a/src/optimizerinterface.jl +++ b/src/optimizerinterface.jl @@ -19,7 +19,7 @@ setoptimizer(model, with_optimizer(IpoptOptimizer, print_level=0)) ``` """ function setoptimizer(model::Model, factory::Factory) - model.factory = factory # useful for implementing Base.copy + model.factory = factory optimizer = create_model(factory) MOIU.resetoptimizer!(model, optimizer) end From 7a94929b4ec8572fc7ab4b30c834387741db9295 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Sun, 29 Jul 2018 13:20:26 +0200 Subject: [PATCH 07/22] Add doc --- docs/src/solvers.md | 65 +++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 60 insertions(+), 5 deletions(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index 070cd7381a0..34fbe0d5df9 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -1,29 +1,84 @@ Interacting with solvers ======================== +A JuMP model stores a +[MathOptInterface (MOI)](https://github.com/JuliaOpt/MathOptInterface.jl) +backend internally that contains the optimization solver. The JuMP layer on top +of this MOI backend is aimed to be as lightweight as possible: +* JuMP does not maintain any copy of the model outside this MOI backend. +* JuMP variable (resp. constraint) references are simple structures containing + both a reference to the JuMP model and the MOI index of the variable (resp. + constraint). +* JuMP gives the constraints to the MOI backend in the form provided by the user + without doing any automatic reformulation. +* variables additions, constraints additions/modifications and objective + modifications are directly applied to the MOI backend thus expecting the + backend to support such modifications. + +While this allows JuMP API to to be a thin wrapper on top of the solver API, +as mentionned in the last point above, this seems rather demanding on the +solver. Indeed, while some solvers support incremental building of the model and +modifications before and after solve, other solvers only support the model being +copied at once before solve. Moreover it seems to require all solvers to +implement all possible reformulations independently which seems both very +ambitious and might generate a lot of duplicated code. + +These apparent limitations are in fact addressed at the MOI level in a manner +that is completely transparent to JuMP. While the MOI API may seem very +demanding, it allows MOI models to be a succession of lightweight MOI layers +that fill the gap between JuMP requirements and the solver capabilities. + +JuMP models can be created in three different modes: Automatic, Manual and +Direct. + +## Automatic and Manual modes + +In Automatic and Manual modes, two MOI layers are automatically applied to the +optimizer: + +* `CachingOptimizer`: it maintain a cache of the model so that when the + the optimizer does not support a modification of the model, the optimizer's + internal model can be discarded and restored from the cache just before + optimization. The `CachingOptimizer` has two different modes: Automatic + and Manual corresponding to the two JuMP modes with the same names. +* `LazyBridgeOptimizer`: when a constraint added is not supported by the + optimizer, it tries transform the constraint into an equivalent form, + possibly adding new variables and constraints that are supported by the + optimizer. The applied transformations are selected among known recipes + which are called bridges. A few default bridges are defined in MOI but new + ones can be defined and added to the `LazyBridgeOptimizer` used by JuMP. + +See the [MOI documentation](http://www.juliaopt.org/MathOptInterface.jl/stable/) +for more details on these two MOI layers. + +To create a fresh new JuMP model (or a fresh new copy of a JuMP model), JuMP +needs to create a new empty optimizer instance. New optimizer instances can +be obtained using a factory that can be created using the +[`with_optimizer`](@ref) function: ```@docs with_optimizer ``` -The solvers can be set with +The factory can be set to the JuMP model using the [`JuMP.setoptimizer`](@ref) +function: ```@docs JuMP.setoptimizer ``` +New JuMP models are created using the [`Model`](@ref) constructor: ```@docs Model ``` ## Direct mode -For advanced users, the model can be created without using a caching nor a -bridge optimizer using the [`JuMP.direct_model`](@ref) function: +JuMP models can be created in Direct mode using the [`JuMP.direct_model`](@ref) +function. ```@docs JuMP.direct_model ``` -TODO: Describe the connection between JuMP and solvers. Automatic vs. Manual -mode. CachingOptimizer. How to set/change solvers. How to set parameters (solver +TODO: How to set parameters (solver specific and generic). Status codes. Accessing the result. How to accurately measure the solve time. From ccf68e5d51e3d3a2a716c5273ef8e83c56351c8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Sun, 29 Jul 2018 14:46:33 +0200 Subject: [PATCH 08/22] Fix constructor doc inclusion --- docs/src/solvers.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index 34fbe0d5df9..1959c110a93 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -68,7 +68,8 @@ JuMP.setoptimizer New JuMP models are created using the [`Model`](@ref) constructor: ```@docs -Model +Model() +Model(::JuMP.Factory) ``` ## Direct mode From 811f07fb614b5cccb2d0972bcc5295eaf25622d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Sun, 29 Jul 2018 15:11:17 +0200 Subject: [PATCH 09/22] Address comments --- docs/src/solvers.md | 18 +++++----- src/JuMP.jl | 73 ++++++++++++++++++++++---------------- src/optimizerinterface.jl | 6 ++-- test/generate_and_solve.jl | 2 +- 4 files changed, 56 insertions(+), 43 deletions(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index 1959c110a93..1cbbd4a7504 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -1,10 +1,12 @@ Interacting with solvers ======================== -A JuMP model stores a +A JuMP model keeps a [MathOptInterface (MOI)](https://github.com/JuliaOpt/MathOptInterface.jl) -backend internally that contains the optimization solver. The JuMP layer on top -of this MOI backend is aimed to be as lightweight as possible: +backend internally that stores the optimization problem and act as the +optimization solver (the backend can also not support optimization, e.g. it can +simply store the model in a file). JuMP can be viewed as a lightweight +user-friendly layer on top of the MOI backend: * JuMP does not maintain any copy of the model outside this MOI backend. * JuMP variable (resp. constraint) references are simple structures containing @@ -17,7 +19,7 @@ of this MOI backend is aimed to be as lightweight as possible: backend to support such modifications. While this allows JuMP API to to be a thin wrapper on top of the solver API, -as mentionned in the last point above, this seems rather demanding on the +as mentioned in the last point above, this seems rather demanding on the solver. Indeed, while some solvers support incremental building of the model and modifications before and after solve, other solvers only support the model being copied at once before solve. Moreover it seems to require all solvers to @@ -38,9 +40,9 @@ In Automatic and Manual modes, two MOI layers are automatically applied to the optimizer: * `CachingOptimizer`: it maintain a cache of the model so that when the - the optimizer does not support a modification of the model, the optimizer's - internal model can be discarded and restored from the cache just before - optimization. The `CachingOptimizer` has two different modes: Automatic + the optimizer does not support an incremental change to the model, the + optimizer's internal model can be discarded and restored from the cache just + before optimization. The `CachingOptimizer` has two different modes: Automatic and Manual corresponding to the two JuMP modes with the same names. * `LazyBridgeOptimizer`: when a constraint added is not supported by the optimizer, it tries transform the constraint into an equivalent form, @@ -69,7 +71,7 @@ JuMP.setoptimizer New JuMP models are created using the [`Model`](@ref) constructor: ```@docs Model() -Model(::JuMP.Factory) +Model(::JuMP.OptimizerFactory) ``` ## Direct mode diff --git a/src/JuMP.jl b/src/JuMP.jl index 769d3b72217..2e11927fd17 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -78,7 +78,14 @@ const MOIBIN = MOICON{MOI.SingleVariable,MOI.ZeroOne} @MOIU.model JuMPMOIModel (ZeroOne, Integer) (EqualTo, GreaterThan, LessThan, Interval) (Zeros, Nonnegatives, Nonpositives, SecondOrderCone, RotatedSecondOrderCone, GeometricMeanCone, PositiveSemidefiniteConeTriangle, PositiveSemidefiniteConeSquare, RootDetConeTriangle, RootDetConeSquare, LogDetConeTriangle, LogDetConeSquare) () (SingleVariable,) (ScalarAffineFunction,ScalarQuadraticFunction) (VectorOfVariables,) (VectorAffineFunction,) -struct Factory +""" + OptimizerFactory + +User-friendly closure that creates new MOI models. New `OptimizerFactory`s are +created with [`with_optimizer`](@ref) and new models are created from the +factory with [`create_model`](@ref). +""" +struct OptimizerFactory # The constructor can be # * `Function`: a function, or # * `DataType`: a type, or @@ -103,15 +110,15 @@ with_optimizer(IpoptOptimizer, print_level=0) ``` """ function with_optimizer(constructor::Type, args...; kwargs...) - return Factory(constructor, args, kwargs) + return OptimizerFactory(constructor, args, kwargs) end """ - create_model(factory::Factory) + create_model(factory::OptimizerFactory) Creates a new model with the factory `factory`. """ -function create_model(factory::Factory) +function create_model(factory::OptimizerFactory) return factory.constructor(factory.args...; factory.kwargs...) end @@ -145,12 +152,12 @@ mutable struct Model <: AbstractModel customnames::Vector - # Factory used to create a new optimizer, it is kept as it might be needed + # OptimizerFactory used to create a new optimizer, it is kept as it might be needed # again if the user requests a copy of the model using `Base.copy`. - # In Manual and Automatic mode: Factory used to create the optimizer or + # In Manual and Automatic mode: OptimizerFactory used to create the optimizer or # Nothing if it has not already been set # In Direct mode: Nothing - factory::Union{Nothing, Factory} + factory::Union{Nothing, OptimizerFactory} # In Manual and Automatic modes, LazyBridgeOptimizer{CachingOptimizer}. # In Direct mode, will hold an AbstractOptimizer. moibackend::MOI.AbstractOptimizer @@ -167,26 +174,30 @@ mutable struct Model <: AbstractModel # Enable extensions to attach arbitrary information to a JuMP model by # using an extension-specific symbol as a key. ext::Dict{Symbol, Any} +end - # Inner constructor - function Model(factory::Union{Nothing, Factory}, moibackend::MOI.ModelLike) - @assert MOI.isempty(moibackend) - model = new() - model.variabletolowerbound = Dict{MOIVAR, MOILB}() - model.variabletoupperbound = Dict{MOIVAR, MOIUB}() - model.variabletofix = Dict{MOIVAR, MOIFIX}() - model.variabletointegrality = Dict{MOIVAR, MOIINT}() - model.variabletozeroone = Dict{MOIVAR, MOIBIN}() - model.customnames = VariableRef[] - model.factory = factory - model.moibackend = moibackend - model.optimizehook = nothing - model.nlpdata = nothing - model.objdict = Dict{Symbol, Any}() - model.operator_counter = 0 - model.ext = Dict{Symbol, Any}() - return model - end +""" + Model(factory::Union{Nothing, OptimizerFactory}, moibackend::MOI.ModelLike) + +Return a new JuMP model with factory `factory` and MOI backend `moibackend`. +This constructor is a low-level constructor used by [`Model()`](@ref), +[`Model(::OptimizerFactory)`](@ref) and [`direct_model`](@ref). +""" +function Model(factory::Union{Nothing, OptimizerFactory}, moibackend::MOI.ModelLike) + @assert MOI.isempty(moibackend) + return Model(Dict{MOIVAR, MOILB}(), + Dict{MOIVAR, MOIUB}(), + Dict{MOIVAR, MOIFIX}(), + Dict{MOIVAR, MOIINT}(), + Dict{MOIVAR, MOIBIN}(), + VariableRef[], + factory, + moibackend, + nothing, + nothing, + Dict{Symbol, Any}(), + 0, + Dict{Symbol, Any}()) end """ @@ -195,7 +206,7 @@ end Return a new JuMP model without any optimizer storing the model in a cache. The mode of the `CachingOptimizer` storing this cache is `caching_mode`. -The optimizer can be set later with [`setoptimizer`](@ref). If +The optimizer can be set later with [`set_optimizer`](@ref). If `bridge_constraints` is true, constraints that are not supported by the optimizer are automatically bridged to equivalent supported constraints when an appropriate is defined in the `MathOptInterface.Bridges` module or is @@ -216,13 +227,13 @@ function Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, end """ - Model(factory::Factory; + Model(factory::OptimizerFactory; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, bridge_constraints::Bool=true) Return a new JuMP model using the factory `factory` to create the optimizer. This is equivalent to calling `Model` with the same keyword arguments and then -calling [`setoptimizer`](@ref) on the created model with the `factory`. The +calling [`set_optimizer`](@ref) on the created model with the `factory`. The factory can be created by the [`with_optimizer`](@ref) function. ## Examples @@ -233,9 +244,9 @@ The following creates a model using the optimizer model = JuMP.Model(with_optimizer(IpoptOptimizer, print_level=0)) ``` """ -function Model(factory::Factory; kwargs...) +function Model(factory::OptimizerFactory; kwargs...) model = Model(; kwargs...) - setoptimizer(model, factory) + set_optimizer(model, factory) return model end diff --git a/src/optimizerinterface.jl b/src/optimizerinterface.jl index c2265878490..6ba69cb3381 100644 --- a/src/optimizerinterface.jl +++ b/src/optimizerinterface.jl @@ -4,7 +4,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. """ - setoptimizer(model::Model, factory::Factory) + set_optimizer(model::Model, factory::OptimizerFactory) Sets the optimizer of the model `model` as the optimizers created by the factory `factory`. The factory can be created by the [`with_optimizer`](@ref) @@ -15,10 +15,10 @@ function. The following sets the optimizer of `model` to be `IpoptOptimizer(print_level=0)`: ```julia -setoptimizer(model, with_optimizer(IpoptOptimizer, print_level=0)) +set_optimizer(model, with_optimizer(IpoptOptimizer, print_level=0)) ``` """ -function setoptimizer(model::Model, factory::Factory) +function set_optimizer(model::Model, factory::OptimizerFactory) model.factory = factory optimizer = create_model(factory) MOIU.resetoptimizer!(model, optimizer) diff --git a/test/generate_and_solve.jl b/test/generate_and_solve.jl index 800c80b8705..f719955d541 100644 --- a/test/generate_and_solve.jl +++ b/test/generate_and_solve.jl @@ -39,7 +39,7 @@ MOIU.loadfromstring!(model, modelstring) MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y"], ["c", "xub", "ylb"]) - JuMP.setoptimizer(m, with_optimizer(MOIU.MockOptimizer, JuMP.JuMPMOIModel{Float64}(), evalobjective=false)) + JuMP.set_optimizer(m, with_optimizer(MOIU.MockOptimizer, JuMP.JuMPMOIModel{Float64}(), evalobjective=false)) MOIU.attachoptimizer!(m) mockoptimizer = JuMP.caching_optimizer(m).optimizer From 93e92dc3239cf3905b37439eb87da19850fff0eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Mon, 30 Jul 2018 08:28:13 +0200 Subject: [PATCH 10/22] Merge url in previous line --- docs/src/solvers.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index 1cbbd4a7504..3fb31e8a4d1 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -1,8 +1,7 @@ Interacting with solvers ======================== -A JuMP model keeps a -[MathOptInterface (MOI)](https://github.com/JuliaOpt/MathOptInterface.jl) +A JuMP model keeps a [MathOptInterface (MOI)](https://github.com/JuliaOpt/MathOptInterface.jl) backend internally that stores the optimization problem and act as the optimization solver (the backend can also not support optimization, e.g. it can simply store the model in a file). JuMP can be viewed as a lightweight From 8b52a3cd029cad984a38206bb93552a18d967a5a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Mon, 30 Jul 2018 08:29:53 +0200 Subject: [PATCH 11/22] Check not in Direct mode for set_optimizer --- src/optimizerinterface.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/src/optimizerinterface.jl b/src/optimizerinterface.jl index 6ba69cb3381..76322d0e9a4 100644 --- a/src/optimizerinterface.jl +++ b/src/optimizerinterface.jl @@ -19,6 +19,7 @@ set_optimizer(model, with_optimizer(IpoptOptimizer, print_level=0)) ``` """ function set_optimizer(model::Model, factory::OptimizerFactory) + @assert mode(model) != Direct model.factory = factory optimizer = create_model(factory) MOIU.resetoptimizer!(model, optimizer) From 1b7b861a7c896d977646e96c6b9f8384768ec410 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Mon, 30 Jul 2018 08:32:11 +0200 Subject: [PATCH 12/22] act -> acts --- docs/src/solvers.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index 3fb31e8a4d1..84f2eacf759 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -2,7 +2,7 @@ Interacting with solvers ======================== A JuMP model keeps a [MathOptInterface (MOI)](https://github.com/JuliaOpt/MathOptInterface.jl) -backend internally that stores the optimization problem and act as the +backend internally that stores the optimization problem and acts as the optimization solver (the backend can also not support optimization, e.g. it can simply store the model in a file). JuMP can be viewed as a lightweight user-friendly layer on top of the MOI backend: From 8fe3c11f058df985178b86c51c9afbf81766a5cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Mon, 30 Jul 2018 08:32:24 +0200 Subject: [PATCH 13/22] it maintain -> maintains --- docs/src/solvers.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index 84f2eacf759..8eab7b2338d 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -38,11 +38,11 @@ Direct. In Automatic and Manual modes, two MOI layers are automatically applied to the optimizer: -* `CachingOptimizer`: it maintain a cache of the model so that when the - the optimizer does not support an incremental change to the model, the - optimizer's internal model can be discarded and restored from the cache just - before optimization. The `CachingOptimizer` has two different modes: Automatic - and Manual corresponding to the two JuMP modes with the same names. +* `CachingOptimizer`: maintains a cache of the model so that when the optimizer + does not support an incremental change to the model, the optimizer's internal + model can be discarded and restored from the cache just before optimization. + The `CachingOptimizer` has two different modes: Automatic and Manual + corresponding to the two JuMP modes with the same names. * `LazyBridgeOptimizer`: when a constraint added is not supported by the optimizer, it tries transform the constraint into an equivalent form, possibly adding new variables and constraints that are supported by the From 3548801ad4be14e071d76a96004bf3853c86582f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Mon, 30 Jul 2018 08:40:36 +0200 Subject: [PATCH 14/22] Define backend --- docs/src/solvers.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index 8eab7b2338d..f18f9f0c223 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -2,10 +2,11 @@ Interacting with solvers ======================== A JuMP model keeps a [MathOptInterface (MOI)](https://github.com/JuliaOpt/MathOptInterface.jl) -backend internally that stores the optimization problem and acts as the -optimization solver (the backend can also not support optimization, e.g. it can -simply store the model in a file). JuMP can be viewed as a lightweight -user-friendly layer on top of the MOI backend: +*backend* internally that stores the optimization problem and acts as the +optimization solver. We call it an MOI *backend* and not optimizer as it can +also be a wrapper around an optimization file format such as MPS that writes +the JuMP model in a file. JuMP can be viewed as a lightweight user-friendly +layer on top of the MOI backend: * JuMP does not maintain any copy of the model outside this MOI backend. * JuMP variable (resp. constraint) references are simple structures containing From 2f641068f837d87eee4bb1c26888dfdd70a75522 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 31 Jul 2018 19:48:59 +0200 Subject: [PATCH 15/22] Mention that backend is ModelLike --- docs/src/solvers.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index f18f9f0c223..535cd2ba7c4 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -2,11 +2,11 @@ Interacting with solvers ======================== A JuMP model keeps a [MathOptInterface (MOI)](https://github.com/JuliaOpt/MathOptInterface.jl) -*backend* internally that stores the optimization problem and acts as the -optimization solver. We call it an MOI *backend* and not optimizer as it can -also be a wrapper around an optimization file format such as MPS that writes -the JuMP model in a file. JuMP can be viewed as a lightweight user-friendly -layer on top of the MOI backend: +*backend* of type `MOI.ModelLike` internally that stores the optimization +problem and acts as the optimization solver. We call it an MOI *backend* and +not optimizer as it can also be a wrapper around an optimization file format +such as MPS that writes the JuMP model in a file. JuMP can be viewed as a +lightweight user-friendly layer on top of the MOI backend: * JuMP does not maintain any copy of the model outside this MOI backend. * JuMP variable (resp. constraint) references are simple structures containing From e3ed0738521949596b5091f49e80404849d128e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 31 Jul 2018 19:49:28 +0200 Subject: [PATCH 16/22] Mention that bridge can be disabled --- docs/src/solvers.md | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index 535cd2ba7c4..a8ebaf94fe4 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -44,12 +44,14 @@ optimizer: model can be discarded and restored from the cache just before optimization. The `CachingOptimizer` has two different modes: Automatic and Manual corresponding to the two JuMP modes with the same names. -* `LazyBridgeOptimizer`: when a constraint added is not supported by the - optimizer, it tries transform the constraint into an equivalent form, - possibly adding new variables and constraints that are supported by the - optimizer. The applied transformations are selected among known recipes - which are called bridges. A few default bridges are defined in MOI but new - ones can be defined and added to the `LazyBridgeOptimizer` used by JuMP. +* `LazyBridgeOptimizer` (this can be disabled using the `bridge_constraints` + keyword argument to [`Model`](@ref) constructor): when a constraint added is + not supported by the optimizer, it tries transform the constraint into an + equivalent form, possibly adding new variables and constraints that are + supported by the optimizer. The applied transformations are selected among + known recipes which are called bridges. A few default bridges are defined in + MOI but new ones can be defined and added to the `LazyBridgeOptimizer` used by + JuMP. See the [MOI documentation](http://www.juliaopt.org/MathOptInterface.jl/stable/) for more details on these two MOI layers. From f42fb0836fa2d349afd368d79f9f2c688c2ece82 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 31 Jul 2018 19:49:52 +0200 Subject: [PATCH 17/22] JuMP API -> JuMP --- docs/src/solvers.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index a8ebaf94fe4..5d0d36afdc4 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -18,9 +18,9 @@ lightweight user-friendly layer on top of the MOI backend: modifications are directly applied to the MOI backend thus expecting the backend to support such modifications. -While this allows JuMP API to to be a thin wrapper on top of the solver API, -as mentioned in the last point above, this seems rather demanding on the -solver. Indeed, while some solvers support incremental building of the model and +While this allows JuMP to be a thin wrapper on top of the solver API, as +mentioned in the last point above, this seems rather demanding on the solver. +Indeed, while some solvers support incremental building of the model and modifications before and after solve, other solvers only support the model being copied at once before solve. Moreover it seems to require all solvers to implement all possible reformulations independently which seems both very From 8be79fabccd7b1824ca8aa0d827a3d388308c2ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 31 Jul 2018 20:05:05 +0200 Subject: [PATCH 18/22] factory -> OptimizerFactory --- docs/src/solvers.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index 5d0d36afdc4..a107b663cf0 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -58,7 +58,7 @@ for more details on these two MOI layers. To create a fresh new JuMP model (or a fresh new copy of a JuMP model), JuMP needs to create a new empty optimizer instance. New optimizer instances can -be obtained using a factory that can be created using the +be obtained using an [`OptimizerFactory`](@ref) that can be created using the [`with_optimizer`](@ref) function: ```@docs with_optimizer From 0afaf320ca80802ab562fcd5fecb67bc2df64e0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 31 Jul 2018 20:08:03 +0200 Subject: [PATCH 19/22] Add example for OptimizerFactory doc --- src/JuMP.jl | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/JuMP.jl b/src/JuMP.jl index 2e11927fd17..e61e4abdeaf 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -84,6 +84,16 @@ const MOIBIN = MOICON{MOI.SingleVariable,MOI.ZeroOne} User-friendly closure that creates new MOI models. New `OptimizerFactory`s are created with [`with_optimizer`](@ref) and new models are created from the factory with [`create_model`](@ref). + +## Examples + +The following construct a factory and then use it to create two independent +`IpoptOptimizer`s: +```julia +factory = with_optimizer(IpoptOptimizer, print_level=0) +optimizer1 = JuMP.create_model(factory) +optimizer2 = JuMP.create_model(factory) +``` """ struct OptimizerFactory # The constructor can be From f70303db96a146237b5f41a7a2d75f1eb063c04c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 31 Jul 2018 20:29:50 +0200 Subject: [PATCH 20/22] Address comments --- docs/src/solvers.md | 10 +++++----- src/JuMP.jl | 40 +++++++++++++------------------------ src/optimizerinterface.jl | 41 ++++++++++++++++---------------------- test/generate_and_solve.jl | 7 ++----- 4 files changed, 38 insertions(+), 60 deletions(-) diff --git a/docs/src/solvers.md b/docs/src/solvers.md index a107b663cf0..ad4edeeae0a 100644 --- a/docs/src/solvers.md +++ b/docs/src/solvers.md @@ -56,18 +56,18 @@ optimizer: See the [MOI documentation](http://www.juliaopt.org/MathOptInterface.jl/stable/) for more details on these two MOI layers. -To create a fresh new JuMP model (or a fresh new copy of a JuMP model), JuMP -needs to create a new empty optimizer instance. New optimizer instances can -be obtained using an [`OptimizerFactory`](@ref) that can be created using the +To create a fresh new JuMP model, JuMP needs to create a new empty optimizer +instance. New optimizer instances can be obtained using an +[`OptimizerFactory`](@ref) that can be created using the [`with_optimizer`](@ref) function: ```@docs with_optimizer ``` -The factory can be set to the JuMP model using the [`JuMP.setoptimizer`](@ref) +The factory can be set to the JuMP model in the [`JuMP.optimize`](@ref) function: ```@docs -JuMP.setoptimizer +JuMP.optimize ``` New JuMP models are created using the [`Model`](@ref) constructor: diff --git a/src/JuMP.jl b/src/JuMP.jl index e61e4abdeaf..a2e05979f50 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -162,12 +162,6 @@ mutable struct Model <: AbstractModel customnames::Vector - # OptimizerFactory used to create a new optimizer, it is kept as it might be needed - # again if the user requests a copy of the model using `Base.copy`. - # In Manual and Automatic mode: OptimizerFactory used to create the optimizer or - # Nothing if it has not already been set - # In Direct mode: Nothing - factory::Union{Nothing, OptimizerFactory} # In Manual and Automatic modes, LazyBridgeOptimizer{CachingOptimizer}. # In Direct mode, will hold an AbstractOptimizer. moibackend::MOI.AbstractOptimizer @@ -187,13 +181,13 @@ mutable struct Model <: AbstractModel end """ - Model(factory::Union{Nothing, OptimizerFactory}, moibackend::MOI.ModelLike) + Model(moibackend::MOI.ModelLike) -Return a new JuMP model with factory `factory` and MOI backend `moibackend`. -This constructor is a low-level constructor used by [`Model()`](@ref), +Return a new JuMP model with MOI backend `moibackend`. This constructor is a +low-level constructor used by [`Model()`](@ref), [`Model(::OptimizerFactory)`](@ref) and [`direct_model`](@ref). """ -function Model(factory::Union{Nothing, OptimizerFactory}, moibackend::MOI.ModelLike) +function Model(moibackend::MOI.ModelLike) @assert MOI.isempty(moibackend) return Model(Dict{MOIVAR, MOILB}(), Dict{MOIVAR, MOIUB}(), @@ -201,7 +195,6 @@ function Model(factory::Union{Nothing, OptimizerFactory}, moibackend::MOI.ModelL Dict{MOIVAR, MOIINT}(), Dict{MOIVAR, MOIBIN}(), VariableRef[], - factory, moibackend, nothing, nothing, @@ -214,9 +207,9 @@ end Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, bridge_constraints::Bool=true) -Return a new JuMP model without any optimizer storing the model in a cache. -The mode of the `CachingOptimizer` storing this cache is `caching_mode`. -The optimizer can be set later with [`set_optimizer`](@ref). If +Return a new JuMP model without any optimizer; the model is stored the model in +a cache. The mode of the `CachingOptimizer` storing this cache is +`caching_mode`. The optimizer can be set later with [`set_optimizer`](@ref). If `bridge_constraints` is true, constraints that are not supported by the optimizer are automatically bridged to equivalent supported constraints when an appropriate is defined in the `MathOptInterface.Bridges` module or is @@ -256,7 +249,8 @@ model = JuMP.Model(with_optimizer(IpoptOptimizer, print_level=0)) """ function Model(factory::OptimizerFactory; kwargs...) model = Model(; kwargs...) - set_optimizer(model, factory) + optimizer = create_model(factory) + MOIU.resetoptimizer!(model, optimizer) return model end @@ -266,22 +260,16 @@ end Return a new JuMP model using `backend` to store the model and solve it. As opposed to the [`Model`](@ref) constructor, no cache of the model is stored outside of `backend` and no bridges are automatically applied to `backend`. -The absence of cache reduces the memory footprint but it is importnat to bear +The absence of cache reduces the memory footprint but it is important to bear in mind the following implications of creating models using this *direct* mode: * When `backend` does not support an operation such as adding variables/constraints after solver or modifying constraints, an error is thrown. With models created using the [`Model`](@ref) constructor, such - situations can be dealt with modifying the cache only and copying the model - cache once `JuMP.optimize` is called. -* When `backend` does not support a constraint type, the constraint is not - automatically bridged to constraints supported by `backend`. -* The optimizer used cannot be changed. With models created using the - [`Model`](@ref) constuctor, the variable and constraint indices used - are the indices corresponding to the cached model so the optimizer can be - changed but in direct mode, changing the backend would render all variable - and constraint references invalid are their internal indices corresponds to - the previous backend. + situations can be dealt with by storing the modifications in a cache and + loading them into the optimizer when `JuMP.optimize` is called. +* No constraint bridging is supported by default. +* The optimizer used cannot be changed the model is constructed. * The model created cannot be copied. """ function direct_model(backend::MOI.ModelLike) diff --git a/src/optimizerinterface.jl b/src/optimizerinterface.jl index 76322d0e9a4..9c316c69e2a 100644 --- a/src/optimizerinterface.jl +++ b/src/optimizerinterface.jl @@ -3,28 +3,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -""" - set_optimizer(model::Model, factory::OptimizerFactory) - -Sets the optimizer of the model `model` as the optimizers created by the -factory `factory`. The factory can be created by the [`with_optimizer`](@ref) -function. - -## Examples - -The following sets the optimizer of `model` to be -`IpoptOptimizer(print_level=0)`: -```julia -set_optimizer(model, with_optimizer(IpoptOptimizer, print_level=0)) -``` -""" -function set_optimizer(model::Model, factory::OptimizerFactory) - @assert mode(model) != Direct - model.factory = factory - optimizer = create_model(factory) - MOIU.resetoptimizer!(model, optimizer) -end - # These methods directly map to CachingOptimizer methods. # They cannot be called in Direct mode. function MOIU.resetoptimizer!(model::Model, optimizer::MOI.AbstractOptimizer) @@ -52,8 +30,17 @@ function MOIU.attachoptimizer!(model::Model) end -function optimize(model::Model; - ignore_optimize_hook=(model.optimizehook===nothing)) +""" + function optimize(model::Model, + factory::Union{Nothing, OptimizerFactory} = nothing; + ignore_optimize_hook=(model.optimizehook===nothing)) + +Optimize the model. If `factory` is not `nothing`, it first set the optimizer +to a new one created using the factory. +""" +function optimize(model::Model, + factory::Union{Nothing, OptimizerFactory} = nothing; + ignore_optimize_hook=(model.optimizehook===nothing)) # The NLPData is not kept in sync, so re-set it here. # TODO: Consider how to handle incremental solves. if model.nlpdata !== nothing @@ -61,6 +48,12 @@ function optimize(model::Model; empty!(model.nlpdata.nlconstr_duals) end + if factory !== nothing + optimizer = create_model(factory) + MOIU.resetoptimizer!(model, optimizer) + MOIU.attachoptimizer!(model) + end + # If the user or an extension has provided an optimize hook, call # that instead of solving the model ourselves if !ignore_optimize_hook diff --git a/test/generate_and_solve.jl b/test/generate_and_solve.jl index f719955d541..e0ae828b8e0 100644 --- a/test/generate_and_solve.jl +++ b/test/generate_and_solve.jl @@ -186,10 +186,9 @@ MOIU.loadfromstring!(model, modelstring) MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y"], ["c1", "c2", "c3"]) - mockoptimizer = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}(), evalobjective=false) - MOIU.resetoptimizer!(m, mockoptimizer) - MOIU.attachoptimizer!(m) + JuMP.optimize(m, with_optimizer(MOIU.MockOptimizer, JuMP.JuMPMOIModel{Float64}(), evalobjective=false)) + mockoptimizer = JuMP.caching_optimizer(m).optimizer MOI.set!(mockoptimizer, MOI.TerminationStatus(), MOI.Success) MOI.set!(mockoptimizer, MOI.ObjectiveValue(), -1.0) MOI.set!(mockoptimizer, MOI.ResultCount(), 1) @@ -201,8 +200,6 @@ MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(c2), 2.0) MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(c3), 3.0) - JuMP.optimize(m) - #@test JuMP.isattached(m) @test JuMP.hasresultvalues(m) From 2e3c6bdae23fb76be46389ea4cefa68405f6cfa6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 31 Jul 2018 21:49:24 +0200 Subject: [PATCH 21/22] Fix --- src/JuMP.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/JuMP.jl b/src/JuMP.jl index a2e05979f50..a0331c5707c 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -226,7 +226,7 @@ function Model(; caching_mode::MOIU.CachingOptimizerMode=MOIU.Automatic, else backend = caching_opt end - return Model(nothing, backend) + return Model(backend) end """ @@ -273,7 +273,7 @@ in mind the following implications of creating models using this *direct* mode: * The model created cannot be copied. """ function direct_model(backend::MOI.ModelLike) - return Model(nothing, backend) + return Model(backend) end # In Automatic and Manual mode, `model.moibackend` is either directly the From 16caf117978e1252ed8085efddd211b06bd559fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Wed, 1 Aug 2018 00:13:47 +0200 Subject: [PATCH 22/22] Fix test --- test/generate_and_solve.jl | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/test/generate_and_solve.jl b/test/generate_and_solve.jl index e0ae828b8e0..0c9a535aa43 100644 --- a/test/generate_and_solve.jl +++ b/test/generate_and_solve.jl @@ -39,8 +39,7 @@ MOIU.loadfromstring!(model, modelstring) MOIU.test_models_equal(JuMP.caching_optimizer(m).model_cache, model, ["x","y"], ["c", "xub", "ylb"]) - JuMP.set_optimizer(m, with_optimizer(MOIU.MockOptimizer, JuMP.JuMPMOIModel{Float64}(), evalobjective=false)) - MOIU.attachoptimizer!(m) + JuMP.optimize(m, with_optimizer(MOIU.MockOptimizer, JuMP.JuMPMOIModel{Float64}(), evalobjective=false)) mockoptimizer = JuMP.caching_optimizer(m).optimizer MOI.set!(mockoptimizer, MOI.TerminationStatus(), MOI.Success) @@ -54,8 +53,6 @@ MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(JuMP.UpperBoundRef(x)), 0.0) MOI.set!(mockoptimizer, MOI.ConstraintDual(), JuMP.optimizerindex(JuMP.LowerBoundRef(y)), 1.0) - JuMP.optimize(m) - #@test JuMP.isattached(m) @test JuMP.hasresultvalues(m)