Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Parametrize JuMP model in optimizer type #1348

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
128 changes: 78 additions & 50 deletions src/JuMP.jl
Expand Up @@ -28,7 +28,7 @@ using .Derivatives

export
# Objects
Model, VariableRef, Norm, AffExpr, QuadExpr, SOCExpr,
Model, VariableRef,
# LinearConstraint, QuadConstraint, SDConstraint, SOCConstraint,
NonlinearConstraint,
ConstraintRef,
Expand Down Expand Up @@ -78,7 +78,8 @@ const MOIBIN = MOICON{MOI.SingleVariable,MOI.ZeroOne}
@MOIU.model JuMPMOIModel (ZeroOne, Integer) (EqualTo, GreaterThan, LessThan, Interval) (Zeros, Nonnegatives, Nonpositives, SecondOrderCone, RotatedSecondOrderCone, GeometricMeanCone, PositiveSemidefiniteConeTriangle, PositiveSemidefiniteConeSquare, RootDetConeTriangle, RootDetConeSquare, LogDetConeTriangle, LogDetConeSquare) () (SingleVariable,) (ScalarAffineFunction,ScalarQuadraticFunction) (VectorOfVariables,) (VectorAffineFunction,)

###############################################################################
# Model
# Model{BT}
# Model with an MOI backend of type `BT`

# Model has three modes:
# 1) Automatic: moibackend field holds a CachingOptimizer in Automatic mode.
Expand All @@ -88,7 +89,7 @@ const MOIBIN = MOICON{MOI.SingleVariable,MOI.ZeroOne}
@enum ModelMode Automatic Manual Direct

abstract type AbstractModel end
mutable struct Model <: AbstractModel
mutable struct Model{BT} <: AbstractModel

# special variablewise properties that we keep track of:
# lower bound, upper bound, fixed, integrality, binary
Expand Down Expand Up @@ -116,7 +117,7 @@ mutable struct Model <: AbstractModel
# # such that a symmetry-enforcing constraint has been created
# # between sdpconstr[c].terms[i,j] and sdpconstr[c].terms[j,i]
# sdpconstrSym::Vector{Vector{Tuple{Int,Int}}}
moibackend::Union{MOI.AbstractOptimizer,MOIU.CachingOptimizer}
moibackend::BT
# callbacks
callbacks
# lazycallback
Expand All @@ -141,49 +142,76 @@ mutable struct Model <: AbstractModel
# their functionality, and store an instance of the type in this
# dictionary keyed on an extension-specific symbol
ext::Dict{Symbol,Any}
# Default constructor
function Model(; mode::ModelMode=Automatic, backend=nothing, optimizer=nothing)
m = new()
# TODO make pretty
m.variabletolowerbound = Dict{MOIVAR,MOILB}()
m.variabletoupperbound = Dict{MOIVAR,MOIUB}()
m.variabletofix = Dict{MOIVAR,MOIFIX}()
m.variabletointegrality = Dict{MOIVAR,MOIINT}()
m.variabletozeroone = Dict{MOIVAR,MOIBIN}()
m.customnames = VariableRef[]
m.objbound = 0.0
m.objval = 0.0
if backend != nothing
# TODO: It would make more sense to not force users to specify Direct mode if they also provide a backend.
@assert mode == Direct
@assert optimizer === nothing
@assert MOI.isempty(backend)
m.moibackend = backend
else
@assert mode != Direct
m.moibackend = MOIU.CachingOptimizer(MOIU.UniversalFallback(JuMPMOIModel{Float64}()), mode == Automatic ? MOIU.Automatic : MOIU.Manual)
if optimizer !== nothing
MOIU.resetoptimizer!(m, optimizer)
end
end
m.callbacks = Any[]
m.optimizehook = nothing
# m.printhook = nothing
m.nlpdata = nothing
m.objdict = Dict{Symbol,Any}()
m.operator_counter = 0
m.ext = Dict{Symbol,Any}()

return m
end

"""
Model(backend::MOI.AbstractOptimizer)

Construct a JuMP model in direct mode with the optimizer `backend`.
The type of the model returned is `Model{typeof(backend)}`.
JuMP does not assume anything on the structure of `backend` and treat it as a
black-box optimizer. The user is responsible to handle the structure of the
`backend` optimizer. That is, even if `backend` is a caching optimizer,
the JuMP model does not support caching optimizer methods and the user is
expected to call them directly on the `moibackend`.
"""
function Model(backend::MOI.AbstractOptimizer)
@assert MOI.isempty(backend)
# TODO make pretty
m = Model(Dict{MOIVAR,MOILB}(), # variabletolowerbound
Dict{MOIVAR,MOIUB}(), # variabletoupperbound
Dict{MOIVAR,MOIFIX}(), # variabletofix
Dict{MOIVAR,MOIINT}(), # variabletointegrality
Dict{MOIVAR,MOIBIN}(), # variabletozeroone
VariableRef[], # customnames
0.0, # objbound
0.0, # objval
backend, # moibackend
Any[], # callbacks
nothing, # optimizehook
# nothing, # printhook
nothing, # nlpdata
Dict{Symbol,Any}(), # objdict
0, # operator_counter
Dict{Symbol,Any}()) # ext
end

"""
Model(; mode::ModelMode=Automatic, optimizer=nothing)

Construct a JuMP model in caching mode. The model stores a cache of the problem
data that is independent to the optimizer. The problem data is copied to the
optimizer when [`optimize`](@ref) is called. Then, further problem
modifications are applied on both the cache and the optimizer until an
operation that the optimizer does not support is applied or the solver is
explicitely reset using `MOIU.resetoptimizer!`.

* If the mode is `Automatic`, applying an operation not supported by the optimizer will be equivalent to explicitely calling `MOIU.resetoptimizer!` before its application.
* If the mode is `Manual`, this will throw an error.

The mode `Automatic` is more appropriate in applications where several
optimizers are used and you know some need to be reset for some operations and
you want this to be handled transparently whenever needed.
The mode `Manual` is more appropriate in applications where you know when
optimizers need to be reset and you want to be informed when an optimizers
unexpectedly reports that an operation is not supported.
"""
function Model(; mode::ModelMode=Automatic, optimizer=nothing)
@assert mode != Direct
backend = MOIU.CachingOptimizer(MOIU.UniversalFallback(JuMPMOIModel{Float64}()), mode == Automatic ? MOIU.Automatic : MOIU.Manual)
m = Model(backend)
if optimizer !== nothing
MOIU.resetoptimizer!(m, optimizer)
end
return m
end

# Getters/setters

function mode(m::Model)
if !(m.moibackend isa MOIU.CachingOptimizer)
return Direct
elseif m.moibackend.mode == MOIU.Automatic
const NonDirectBackendType = MOIU.CachingOptimizer{MOIU.UniversalFallback{JuMP.JuMPMOIModel{Float64}}}
mode(m::Model) = Direct
function mode(m::Model{NonDirectBackendType})
if m.moibackend.mode == MOIU.Automatic
return Automatic
else
return Manual
Expand Down Expand Up @@ -264,12 +292,12 @@ struct ConstraintRef{M<:AbstractModel,C}
end

# TODO: should model be a parameter here?
function MOI.delete!(m::Model, cr::ConstraintRef{Model})
function MOI.delete!(m::Model, cr::ConstraintRef{<:Model})
@assert m === cr.m
MOI.delete!(m.moibackend, index(cr))
end

MOI.isvalid(m::Model, cr::ConstraintRef{Model}) = cr.m === m && MOI.isvalid(m.moibackend, cr.index)
MOI.isvalid(m::Model, cr::ConstraintRef{<:Model}) = cr.m === m && MOI.isvalid(m.moibackend, cr.index)

"""
addconstraint(m::Model, c::AbstractConstraint, name::String="")
Expand Down Expand Up @@ -322,7 +350,7 @@ function optimizerindex(v::VariableRef)
end
end

function optimizerindex(cr::ConstraintRef{Model})
function optimizerindex(cr::ConstraintRef{<:Model})
if mode(cr.m) == Direct
return index(cr)
else
Expand All @@ -333,7 +361,7 @@ end

index(cr::ConstraintRef) = cr.index

function hasresultdual(m::Model, REF::Type{<:ConstraintRef{Model, T}}) where {T <: MOICON}
function hasresultdual(m::Model, REF::Type{<:ConstraintRef{M, T}}) where {M <: Model, T <: MOICON}
MOI.canget(m, MOI.ConstraintDual(), REF)
end

Expand All @@ -344,7 +372,7 @@ Get the dual value of this constraint in the result returned by a solver.
Use `hasresultdual` to check if a result exists before asking for values.
Replaces `getdual` for most use cases.
"""
function resultdual(cr::ConstraintRef{Model, <:MOICON})
function resultdual(cr::ConstraintRef{<:Model, <:MOICON})
MOI.get(cr.m, MOI.ConstraintDual(), cr)
end

Expand All @@ -353,9 +381,9 @@ end

Get a constraint's name.
"""
name(cr::ConstraintRef{Model,<:MOICON}) = MOI.get(cr.m, MOI.ConstraintName(), cr)
name(cr::ConstraintRef{<:Model,<:MOICON}) = MOI.get(cr.m, MOI.ConstraintName(), cr)

setname(cr::ConstraintRef{Model,<:MOICON}, s::String) = MOI.set!(cr.m, MOI.ConstraintName(), cr, s)
setname(cr::ConstraintRef{<:Model,<:MOICON}, s::String) = MOI.set!(cr.m, MOI.ConstraintName(), cr, s)

"""
canget(m::JuMP.Model, attr::MathOptInterface.AbstractModelAttribute)::Bool
Expand All @@ -365,7 +393,7 @@ false if not.
"""
MOI.canget(m::Model, attr::MOI.AbstractModelAttribute) = MOI.canget(m.moibackend, attr)
MOI.canget(m::Model, attr::MOI.AbstractVariableAttribute, ::Type{VariableRef}) = MOI.canget(m.moibackend, attr, MOIVAR)
MOI.canget(m::Model, attr::MOI.AbstractConstraintAttribute, ::Type{ConstraintRef{Model,T}}) where {T <: MOICON} = MOI.canget(m.moibackend, attr, T)
MOI.canget(m::Model, attr::MOI.AbstractConstraintAttribute, ::Type{ConstraintRef{M,T}}) where {M <: Model, T <: MOICON} = MOI.canget(m.moibackend, attr, T)

"""
get(m::JuMP.Model, attr::MathOptInterface.AbstractModelAttribute)
Expand Down
40 changes: 18 additions & 22 deletions src/affexpr.jl
Expand Up @@ -10,7 +10,6 @@
# src/affexpr.jl
# Defines all types relating to affine expressions
# - GenericAffExpr ∑ aᵢ xᵢ + c
# - AffExpr Alias for (Float64, VariableRef)
# - AffExprConstraint AffExpr-in-set constraint
# Operator overloads in src/operators.jl
#############################################################################
Expand Down Expand Up @@ -206,11 +205,8 @@ end
Base.convert(::Type{GenericAffExpr{T,V}}, v::V) where {T,V} = GenericAffExpr(zero(T), v => one(T))
Base.convert(::Type{GenericAffExpr{T,V}}, v::Real) where {T,V} = GenericAffExpr{T,V}(convert(T, v))

# Alias for (Float64, VariableRef), the specific GenericAffExpr used by JuMP
const AffExpr = GenericAffExpr{Float64,VariableRef}

# Check all coefficients are finite, i.e. not NaN, not Inf, not -Inf
function assert_isfinite(a::AffExpr)
function assert_isfinite(a::GenericAffExpr)
for (coef, var) in linearterms(a)
isfinite(coef) || error("Invalid coefficient $coef on variable $var.")
end
Expand All @@ -224,16 +220,16 @@ Replaces `getvalue` for most use cases.
"""
resultvalue(a::GenericAffExpr) = value(a, resultvalue)

# Note: No validation is performed that the variables in the AffExpr belong to
# the same model.
function MOI.ScalarAffineFunction(a::AffExpr)
# Note: No validation is performed that the variables in the GenericAffExpr
# belong to the same model.
function MOI.ScalarAffineFunction(a::GenericAffExpr)
assert_isfinite(a)
terms = map(t -> MOI.ScalarAffineTerm(t[1], index(t[2])), linearterms(a))
return MOI.ScalarAffineFunction(terms, a.constant)
end

function AffExpr(m::Model, f::MOI.ScalarAffineFunction)
aff = AffExpr()
function GenericAffExpr{T, VariableRef{MT}}(m::MT, f::MOI.ScalarAffineFunction) where {T, MT}
aff = GenericAffExpr{T, VariableRef{MT}}()
for t in f.terms
add_to_expression!(aff, t.coefficient, VariableRef(m, t.variable_index))
end
Expand All @@ -242,12 +238,12 @@ function AffExpr(m::Model, f::MOI.ScalarAffineFunction)
end

"""
_fillvaf!(terms, offset::Int, oi::Int, aff::AffExpr)
_fillvaf!(terms, offset::Int, oi::Int, aff::GenericAffExpr)

Fills the vectors terms at indices starting at `offset+1` with the terms of `aff`.
The output index for all terms is `oi`.
"""
function _fillvaf!(terms, offset::Int, oi::Int, aff::AffExpr)
function _fillvaf!(terms, offset::Int, oi::Int, aff::GenericAffExpr)
i = 1
for (coef, var) in linearterms(aff)
terms[offset+i] = MOI.VectorAffineTerm(Int64(oi), MOI.ScalarAffineTerm(coef, index(var)))
Expand All @@ -256,7 +252,7 @@ function _fillvaf!(terms, offset::Int, oi::Int, aff::AffExpr)
offset + length(linearterms(aff))
end

function MOI.VectorAffineFunction(affs::Vector{AffExpr})
function MOI.VectorAffineFunction(affs::Vector{<:GenericAffExpr})
len = sum(aff -> length(linearterms(aff)), affs)
terms = Vector{MOI.VectorAffineTerm{Float64}}(len)
constant = Vector{Float64}(length(affs))
Expand All @@ -268,7 +264,7 @@ function MOI.VectorAffineFunction(affs::Vector{AffExpr})
MOI.VectorAffineFunction(terms, constant)
end

function setobjective(m::Model, sense::Symbol, a::AffExpr)
function setobjective(m::Model, sense::Symbol, a::GenericAffExpr)
if sense == :Min
moisense = MOI.MinSense
else
Expand All @@ -281,14 +277,14 @@ function setobjective(m::Model, sense::Symbol, a::AffExpr)
end

"""
objectivefunction(m::Model, ::Type{AffExpr})
objectivefunction(m::Model, ::Type{<:GenericAffExpr})

Return an `AffExpr` object representing the objective function.
Return an `GenericAffExpr` object representing the objective function.
Error if the objective is not linear.
"""
function objectivefunction(m::Model, ::Type{AffExpr})
function objectivefunction(m::Model, AffExprType::Type{<:GenericAffExpr})
f = MOI.get(m.moibackend, MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}())::MOI.ScalarAffineFunction
return AffExpr(m, f)
return AffExprType(m, f)
end


Expand Down Expand Up @@ -323,15 +319,15 @@ end

moi_function_and_set(c::VectorAffExprConstraint) = (MOI.VectorAffineFunction(c.func), c.set)

function constraintobject(cr::ConstraintRef{Model}, ::Type{AffExpr}, ::Type{SetType}) where {SetType <: MOI.AbstractScalarSet}
function constraintobject(cr::ConstraintRef{<:Model}, AffExprType::Type{<:GenericAffExpr}, ::Type{SetType}) where {SetType <: MOI.AbstractScalarSet}
f = MOI.get(cr.m, MOI.ConstraintFunction(), cr)::MOI.ScalarAffineFunction
s = MOI.get(cr.m, MOI.ConstraintSet(), cr)::SetType
return AffExprConstraint(AffExpr(cr.m, f), s)
return AffExprConstraint(AffExprType(cr.m, f), s)
end

function constraintobject(cr::ConstraintRef{Model}, ::Type{Vector{AffExpr}}, ::Type{SetType}) where {SetType <: MOI.AbstractVectorSet}
function constraintobject(cr::ConstraintRef{<:Model}, ::Type{Vector{AffExprType}}, ::Type{SetType}) where {AffExprType <: GenericAffExpr, SetType <: MOI.AbstractVectorSet}
m = cr.m
f = MOI.get(m, MOI.ConstraintFunction(), cr)::MOI.VectorAffineFunction
s = MOI.get(m, MOI.ConstraintSet(), cr)::SetType
return VectorAffExprConstraint(map(f -> AffExpr(m, f), MOIU.eachscalar(f)), s)
return VectorAffExprConstraint(map(f -> AffExprType(m, f), MOIU.eachscalar(f)), s)
end
13 changes: 7 additions & 6 deletions src/macros.jl
Expand Up @@ -76,7 +76,7 @@ Helper function for macros to transform expression objects containing kernel cod
3. `condition`: `Expr` that is evaluated immediately before kernel code in each iteration. If none, pass `:()`.
4. `idxvars`: Names for the index variables for each loop, e.g. `[:i, gensym(), :k]`
5. `idxsets`: Sets used to define iteration for each loop, e.g. `[1:3, [:red,:blue], S]`
6. `sym`: A `Symbol`/`Expr` containing the element type of the container that is being iterated over, e.g. `:AffExpr` or `:VariableRef`
6. `sym`: A `Symbol`/`Expr` containing the element type of the container that is being iterated over, e.g. `:GenericAffExpr` or `:VariableRef`
7. `requestedcontainer`: Argument that is passed through to `generatedcontainer`. Either `:Auto`, `:Array`, `:JuMPArray`, or `:Dict`.
8. `lowertri`: `Bool` keyword argument that is `true` if the iteration is over a cartesian array and should only iterate over the lower triangular entries, filling upper triangular entries with copies, e.g. `x[1,3] === x[3,1]`, and `false` otherwise.
"""
Expand Down Expand Up @@ -320,7 +320,8 @@ const ScalarPolyhedralSets = Union{MOI.LessThan,MOI.GreaterThan,MOI.EqualTo,MOI.
buildconstraint(_error::Function, v::AbstractVariableRef, set::MOI.AbstractScalarSet) = SingleVariableConstraint(v, set)
buildconstraint(_error::Function, v::Vector{<:AbstractVariableRef}, set::MOI.AbstractVectorSet) = VectorOfVariablesConstraint(v, set)

buildconstraint(_error::Function, α::Number, set::MOI.AbstractScalarSet) = buildconstraint(_error, convert(AffExpr, α), set)
# We cannot support this as we do not know the type of the model so we cannot convert `α` to an affine expression
buildconstraint(_error::Function, α::Number, set::MOI.AbstractScalarSet) = _error("Constraint with constant expression not depending on variables is not supported")
function buildconstraint(_error::Function, aff::GenericAffExpr, set::S) where S <: Union{MOI.LessThan,MOI.GreaterThan,MOI.EqualTo}
offset = aff.constant
aff.constant = 0.0
Expand Down Expand Up @@ -869,14 +870,14 @@ macro expression(args...)
if isa(c,Expr)
code = quote
$code
(isa($newaff,AffExpr) || isa($newaff,Number) || isa($newaff,VariableRef)) || error("Collection of expressions with @expression must be linear. For quadratic expressions, use your own array.")
(isa($newaff,GenericAffExpr) || isa($newaff,Number) || isa($newaff,VariableRef)) || error("Collection of expressions with @expression must be linear. For quadratic expressions, use your own array.")
end
end
code = quote
$code
$(refcall) = $newaff
end
code = getloopedcode(variable, code, condition, idxvars, idxsets, :AffExpr, requestedcontainer)
code = getloopedcode(variable, code, condition, idxvars, idxsets, :(GenericAffExpr{Float64, VariableRef{typeof(m)}}), requestedcontainer)
# don't do anything with the model, but check that it's valid anyway
return assert_validmodel(m, quote
$code
Expand Down Expand Up @@ -920,7 +921,7 @@ esc_nonconstant(x) = esc(x)
# Returns the type of what `addvariable(::Model, buildvariable(...))` would return where `...` represents the positional arguments.
# Example: `@variable m [1:3] foo` will allocate an vector of element type `variabletype(m, foo)`
# Note: it needs to be implemented by all `AbstractModel`s
variabletype(m::Model) = VariableRef
variabletype(m::Model) = VariableRef{typeof(m)}
# Returns a new variable. Additional positional arguments can be used to dispatch the call to a different method.
# The return type should only depends on the positional arguments for `variabletype` to make sense. See the @variable macro doc for more details.
# Example: `@variable m x` foo will call `buildvariable(_error, info, foo)`
Expand Down Expand Up @@ -1286,7 +1287,7 @@ macro NLconstraint(m, x, extra...)
" expr1 <= expr2\n" * " expr1 >= expr2\n" *
" expr1 == expr2")
end
looped = getloopedcode(variable, code, condition, idxvars, idxsets, :(ConstraintRef{Model,NonlinearConstraintIndex}), requestedcontainer)
looped = getloopedcode(variable, code, condition, idxvars, idxsets, :(ConstraintRef{typeof($m),NonlinearConstraintIndex}), requestedcontainer)
return assert_validmodel(m, quote
initNLP($m)
$looped
Expand Down
2 changes: 1 addition & 1 deletion src/nlp.jl
Expand Up @@ -113,7 +113,7 @@ function initNLP(m::Model)
end
end

function resultdual(c::ConstraintRef{Model,NonlinearConstraintIndex})
function resultdual(c::ConstraintRef{<:Model,NonlinearConstraintIndex})
initNLP(c.m)
nldata::NLPData = c.m.nlpdata
if !MOI.canget(c.m, MOI.NLPBlockDual())
Expand Down