Skip to content

Commit

Permalink
Merge ddd7ed5 into 913866a
Browse files Browse the repository at this point in the history
  • Loading branch information
odow committed Nov 8, 2018
2 parents 913866a + ddd7ed5 commit 7c012eb
Show file tree
Hide file tree
Showing 9 changed files with 68 additions and 44 deletions.
14 changes: 10 additions & 4 deletions docs/src/solvers.md
Expand Up @@ -3,10 +3,11 @@ Interacting with solvers

A JuMP model keeps a [MathOptInterface (MOI)](https://github.com/JuliaOpt/MathOptInterface.jl)
*backend* of type `MOI.ModelLike` internally that stores the optimization
problem and acts as the optimization solver. We call it an MOI *backend* and
not optimizer as it can also be a wrapper around an optimization file format
such as MPS that writes the JuMP model in a file. JuMP can be viewed as a
lightweight user-friendly layer on top of the MOI backend:
problem and acts as the optimization solver. We call it an MOI *backend* and not
optimizer as it can also be a wrapper around an optimization file format such as
MPS that writes the JuMP model in a file. From JuMP, the MathOptInterface
backend can be accessed using the [`JuMP.backend`](@ref) function. JuMP can be
viewed as a lightweight user-friendly layer on top of the MOI backend:

* JuMP does not maintain any copy of the model outside this MOI backend.
* JuMP variable (resp. constraint) references are simple structures containing
Expand Down Expand Up @@ -86,6 +87,11 @@ function.
JuMP.direct_model
```

```@docs
JuMP.backend
```


TODO: How to set parameters (solver
specific and generic). Status codes. Accessing the result.
How to accurately measure the solve time.
44 changes: 31 additions & 13 deletions src/JuMP.jl
Expand Up @@ -269,30 +269,48 @@ if VERSION >= v"0.7-"
end


# In Automatic and Manual mode, `model.moi_backend` is either directly the
# In Automatic and Manual mode, `backend(model)` is either directly the
# `CachingOptimizer` if `bridge_constraints=false` was passed in the constructor
# or it is a `LazyBridgeOptimizer` and the `CachingOptimizer` is stored in the
# `model` field
function caching_optimizer(model::Model)
if model.moi_backend isa MOIU.CachingOptimizer
return model.moi_backend
elseif (model.moi_backend isa
if backend(model) isa MOIU.CachingOptimizer
return backend(model)
elseif (backend(model) isa
MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer})
return model.moi_backend.model
return backend(model).model
else
error("The function `caching_optimizer` cannot be called on a model " *
"in `Direct` mode.")
end
end

"""
backend(model::Model)
Return the lower-level MathOptInterface model that sits underneath JuMP. This
model depends on which operating mode JuMP is in (manual, automatic, or direct),
and whether there are any bridges in the model.
If JuMP is in direct mode (i.e., the model was created using `direct_model`),
the backend with be the optimizer passed to `direct_model`. If JuMP is in manual
or automatic mode, the backend will either be a
[@ref](`MOI.Utilities.CachingOptimizer`) or a
[@ref](`MOI.Bridges.LazyBridgeOptimizer`).
This function should only be used by advanced users looking to access low-level
MathOptInterface or solver-specific functionality.
"""
backend(model::Model) = model.moi_backend

"""
mode(model::Model)
Return mode (Direct, Automatic, Manual) of model.
"""
function mode(model::Model)
if !(model.moi_backend isa MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer} ||
model.moi_backend isa MOIU.CachingOptimizer)
if !(backend(model) isa MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer} ||
backend(model) isa MOIU.CachingOptimizer)
return Direct
elseif caching_optimizer(model).mode == MOIU.Automatic
return Automatic
Expand Down Expand Up @@ -433,24 +451,24 @@ end
Return the value of the attribute `attr` from model's MOI backend.
"""
MOI.get(m::Model, attr::MOI.AbstractModelAttribute) = MOI.get(m.moi_backend, attr)
MOI.get(m::Model, attr::MOI.AbstractModelAttribute) = MOI.get(backend(m), attr)
function MOI.get(m::Model, attr::MOI.AbstractVariableAttribute, v::VariableRef)
@assert m === owner_model(v) # TODO: Improve the error message.
MOI.get(m.moi_backend, attr, index(v))
MOI.get(backend(m), attr, index(v))
end
function MOI.get(m::Model, attr::MOI.AbstractConstraintAttribute, cr::ConstraintRef)
@assert m === cr.model # TODO: Improve the error message.
MOI.get(m.moi_backend, attr, index(cr))
MOI.get(backend(m), attr, index(cr))
end

MOI.set(m::Model, attr::MOI.AbstractModelAttribute, value) = MOI.set(m.moi_backend, attr, value)
MOI.set(m::Model, attr::MOI.AbstractModelAttribute, value) = MOI.set(backend(m), attr, value)
function MOI.set(m::Model, attr::MOI.AbstractVariableAttribute, v::VariableRef, value)
@assert m === owner_model(v) # TODO: Improve the error message.
MOI.set(m.moi_backend, attr, index(v), value)
MOI.set(backend(m), attr, index(v), value)
end
function MOI.set(m::Model, attr::MOI.AbstractConstraintAttribute, cr::ConstraintRef, value)
@assert m === cr.model # TODO: Improve the error message.
MOI.set(m.moi_backend, attr, index(cr), value)
MOI.set(backend(m), attr, index(cr), value)
end

###############################################################################
Expand Down
12 changes: 6 additions & 6 deletions src/constraints.jl
Expand Up @@ -129,7 +129,7 @@ function delete(model::Model, constraint_ref::ConstraintRef{Model})
error("The constraint reference you are trying to delete does not " *
"belong to the model.")
end
MOI.delete(model.moi_backend, index(constraint_ref))
MOI.delete(backend(model), index(constraint_ref))
end

"""
Expand All @@ -139,7 +139,7 @@ Return `true` if `constraint_ref` refers to a valid constraint in `model`.
"""
function is_valid(model::Model, constraint_ref::ConstraintRef{Model})
return (model === constraint_ref.model &&
MOI.is_valid(model.moi_backend, constraint_ref.index))
MOI.is_valid(backend(model), constraint_ref.index))
end

#############################################################################
Expand Down Expand Up @@ -225,15 +225,15 @@ Add a constraint `c` to `Model m` and sets its name.
function add_constraint(m::Model, c::AbstractConstraint, name::String="")
f = moi_function(c)
s = moi_set(c)
if !MOI.supports_constraint(m.moi_backend, typeof(f), typeof(s))
if m.moi_backend isa MOI.Bridges.LazyBridgeOptimizer
if !MOI.supports_constraint(backend(m), typeof(f), typeof(s))
if backend(m) isa MOI.Bridges.LazyBridgeOptimizer
bridge_message = " and there are no bridges that can reformulate it into supported constraints."
else
bridge_message = ", try using `bridge_constraints=true` in the `JuMP.Model` constructor if you believe the constraint can be reformulated to constraints supported by the solver."
end
error("Constraints of type $(typeof(f))-in-$(typeof(s)) are not supported by the solver" * bridge_message)
end
cindex = MOI.add_constraint(m.moi_backend, f, s)
cindex = MOI.add_constraint(backend(m), f, s)
cref = ConstraintRef(m, cindex, shape(c))
if !isempty(name)
set_name(cref, name)
Expand Down Expand Up @@ -267,7 +267,7 @@ function set_coefficient(constraint::ConstraintRef{Model, MOICON{F, S}},
variable, value) where {S, T, F <: Union{
MOI.ScalarAffineFunction{T},
MOI.ScalarQuadraticFunction{T}}}
MOI.modify(constraint.model.moi_backend, index(constraint),
MOI.modify(backend(constraint.model), index(constraint),
MOI.ScalarCoefficientChange(index(variable), convert(T, value)))
return
end
Expand Down
6 changes: 3 additions & 3 deletions src/copy.jl
Expand Up @@ -100,14 +100,14 @@ function copy_model(model::Model)
caching_mode = caching_optimizer(model).mode
# TODO add bridges added to the bridge optimizer that are not part of the
# fullbridgeoptimizer
bridge_constraints = model.moi_backend isa MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer}
bridge_constraints = backend(model) isa MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer}
new_model = Model(caching_mode = caching_mode,
bridge_constraints = bridge_constraints)

# Copy the MOI backend, note that variable and constraint indices may have
# changed, the `index_map` gives the map between the indices of
# `model.moi_backend` and the indices of `new_model.moi_backend`.
index_map = MOI.copy_to(new_model.moi_backend, model.moi_backend,
# `backend(model` and the indices of `backend(new_model)`.
index_map = MOI.copy_to(backend(new_model), backend(model),
copy_names = true)
# TODO copynames is needed because of https://github.com/JuliaOpt/MathOptInterface.jl/issues/494
# we can remove it when this is fixed and released
Expand Down
4 changes: 2 additions & 2 deletions src/objective.jl
Expand Up @@ -59,7 +59,7 @@ function set_objective_function end

function set_objective_function(model::Model, func::MOI.AbstractScalarFunction)
attr = MOI.ObjectiveFunction{typeof(func)}()
if !MOI.supports(model.moi_backend, attr)
if !MOI.supports(backend(model), attr)
error("The solver does not support an objective function of type ",
typeof(func), ".")
end
Expand Down Expand Up @@ -126,7 +126,7 @@ Stacktrace:
"""
function objective_function(model::Model, FunType::Type{<:AbstractJuMPScalar})
MOIFunType = moi_function_type(FunType)
func = MOI.get(model.moi_backend,
func = MOI.get(backend(model),
MOI.ObjectiveFunction{MOIFunType}())::MOIFunType
return jump_function(model, func)
end
2 changes: 1 addition & 1 deletion src/optimizer_interface.jl
Expand Up @@ -81,7 +81,7 @@ function optimize!(model::Model,
return model.optimize_hook(model)
end

MOI.optimize!(model.moi_backend)
MOI.optimize!(backend(model))

return
end
22 changes: 11 additions & 11 deletions src/variables.jl
Expand Up @@ -136,7 +136,7 @@ function delete(model::Model, variable_ref::VariableRef)
error("The variable reference you are trying to delete does not " *
"belong to the model.")
end
MOI.delete(model.moi_backend, variable_ref.index)
MOI.delete(backend(model), variable_ref.index)
end

"""
Expand All @@ -146,7 +146,7 @@ Return `true` if `variable` refers to a valid variable in `model`.
"""
function is_valid(model::Model, variable_ref::VariableRef)
return (model === owner_model(variable_ref) &&
MOI.is_valid(model.moi_backend, variable_ref.index))
MOI.is_valid(backend(model), variable_ref.index))
end

# The default hash is slow. It's important for the performance of AffExpr to
Expand Down Expand Up @@ -203,7 +203,7 @@ end
index(v::VariableRef) = v.index

function VariableRef(m::Model)
index = MOI.add_variable(m.moi_backend)
index = MOI.add_variable(backend(m))
return VariableRef(m, index)
end

Expand Down Expand Up @@ -276,10 +276,10 @@ function set_lower_bound(v::VariableRef,lower::Number)
# do we have a lower bound already?
if has_lower_bound(v)
cindex = lower_bound_index(v)
MOI.set(owner_model(v).moi_backend, MOI.ConstraintSet(), cindex, newset)
MOI.set(backend(owner_model(v)), MOI.ConstraintSet(), cindex, newset)
else
@assert !is_fixed(v)
cindex = MOI.add_constraint(owner_model(v).moi_backend,
cindex = MOI.add_constraint(backend(owner_model(v)),
MOI.SingleVariable(index(v)), newset)
set_lower_bound_index(v, cindex)
end
Expand Down Expand Up @@ -341,10 +341,10 @@ function set_upper_bound(v::VariableRef,upper::Number)
# do we have an upper bound already?
if has_upper_bound(v)
cindex = upper_bound_index(v)
MOI.set(owner_model(v).moi_backend, MOI.ConstraintSet(), cindex, newset)
MOI.set(backend(owner_model(v)), MOI.ConstraintSet(), cindex, newset)
else
@assert !is_fixed(v)
cindex = MOI.add_constraint(owner_model(v).moi_backend,
cindex = MOI.add_constraint(backend(owner_model(v)),
MOI.SingleVariable(index(v)), newset)
set_upper_bound_index(v, cindex)
end
Expand Down Expand Up @@ -402,10 +402,10 @@ function fix(v::VariableRef,upper::Number)
# are we already fixed?
if is_fixed(v)
cindex = fix_index(v)
MOI.set(owner_model(v).moi_backend, MOI.ConstraintSet(), cindex, newset)
MOI.set(backend(owner_model(v)), MOI.ConstraintSet(), cindex, newset)
else
@assert !has_upper_bound(v) && !has_lower_bound(v) # Do we want to remove these instead of throwing an error?
cindex = MOI.add_constraint(owner_model(v).moi_backend,
cindex = MOI.add_constraint(backend(owner_model(v)),
MOI.SingleVariable(index(v)), newset)
set_fix_index(v, cindex)
end
Expand Down Expand Up @@ -465,7 +465,7 @@ function set_integer(variable_ref::VariableRef)
error("Cannot set the variable_ref $(variable_ref) to integer as it " *
"is already binary.")
end
constraint_ref = MOI.add_constraint(owner_model(variable_ref).moi_backend,
constraint_ref = MOI.add_constraint(backend(owner_model(variable_ref)),
MOI.SingleVariable(index(variable_ref)),
MOI.Integer())
set_integer_index(variable_ref, constraint_ref)
Expand Down Expand Up @@ -509,7 +509,7 @@ function set_binary(variable_ref::VariableRef)
error("Cannot set the variable_ref $(variable_ref) to binary as it " *
"is already integer.")
end
constraint_ref = MOI.add_constraint(owner_model(variable_ref).moi_backend,
constraint_ref = MOI.add_constraint(backend(owner_model(variable_ref)),
MOI.SingleVariable(index(variable_ref)),
MOI.ZeroOne())
set_binary_index(variable_ref, constraint_ref)
Expand Down
2 changes: 1 addition & 1 deletion test/constraint.jl
Expand Up @@ -287,7 +287,7 @@ end

function test_shadow_price(model_string, constraint_dual, constraint_shadow)
model = JuMP.Model()
MOIU.loadfromstring!(model.moi_backend, model_string)
MOIU.loadfromstring!(JuMP.backend(model), model_string)
JuMP.optimize!(model, with_optimizer(MOIU.MockOptimizer,
JuMP.JuMPMOIModel{Float64}(),
eval_objective_value=false,
Expand Down
6 changes: 3 additions & 3 deletions test/model.jl
Expand Up @@ -73,8 +73,8 @@ function test_model()
model = Model(with_optimizer(MOIU.MockOptimizer,
SimpleLPModel{Float64}()),
bridge_constraints=false)
@test model.moi_backend isa MOIU.CachingOptimizer
@test model.moi_backend === JuMP.caching_optimizer(model)
@test JuMP.backend(model) isa MOIU.CachingOptimizer
@test JuMP.backend(model) === JuMP.caching_optimizer(model)
@variable model x
@test_throws ErrorException @constraint model 0 <= x + 1 <= 1
end
Expand Down Expand Up @@ -150,7 +150,7 @@ function dummy_optimizer_hook(::JuMP.AbstractModel) end
reference_map[cref] = new_model[:cref]
end
@test MOIU.mode(JuMP.caching_optimizer(new_model)) == caching_mode
@test bridge_constraints == (new_model.moi_backend isa MOI.Bridges.LazyBridgeOptimizer)
@test bridge_constraints == (JuMP.backend(new_model) isa MOI.Bridges.LazyBridgeOptimizer)
@test new_model.optimize_hook === dummy_optimizer_hook
@test new_model.ext[:dummy].model === new_model
x_new = reference_map[x]
Expand Down

0 comments on commit 7c012eb

Please sign in to comment.