Skip to content

Commit

Permalink
Merge pull request #1451 from JuliaOpt/bl/moiv0.6
Browse files Browse the repository at this point in the history
Updates to MOI v0.6
  • Loading branch information
mlubin committed Sep 1, 2018
2 parents 167ef4a + aa39a37 commit 4496a54
Show file tree
Hide file tree
Showing 16 changed files with 168 additions and 141 deletions.
4 changes: 2 additions & 2 deletions REQUIRE
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
julia 0.6
MathOptInterface 0.5 0.6
ForwardDiff 0.5 0.9
MathOptInterface 0.6 0.7
ForwardDiff 0.5 0.10
Calculus
DataStructures
NaNMath 0.2.1
Expand Down
57 changes: 29 additions & 28 deletions src/JuMP.jl
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,20 @@ const MOIFIX = MOICON{MOI.SingleVariable,MOI.EqualTo{Float64}}
const MOIINT = MOICON{MOI.SingleVariable,MOI.Integer}
const MOIBIN = MOICON{MOI.SingleVariable,MOI.ZeroOne}

@MOIU.model JuMPMOIModel (ZeroOne, Integer) (EqualTo, GreaterThan, LessThan, Interval) (Zeros, Nonnegatives, Nonpositives, SecondOrderCone, RotatedSecondOrderCone, GeometricMeanCone, PositiveSemidefiniteConeTriangle, PositiveSemidefiniteConeSquare, RootDetConeTriangle, RootDetConeSquare, LogDetConeTriangle, LogDetConeSquare) () (SingleVariable,) (ScalarAffineFunction,ScalarQuadraticFunction) (VectorOfVariables,) (VectorAffineFunction,)
@MOIU.model(JuMPMOIModel,
(MOI.ZeroOne, MOI.Integer),
(MOI.EqualTo, MOI.GreaterThan, MOI.LessThan, MOI.Interval),
(MOI.Zeros, MOI.Nonnegatives, MOI.Nonpositives, MOI.SecondOrderCone,
MOI.RotatedSecondOrderCone, MOI.GeometricMeanCone,
MOI.PositiveSemidefiniteConeTriangle,
MOI.PositiveSemidefiniteConeSquare,
MOI.RootDetConeTriangle, MOI.RootDetConeSquare,
MOI.LogDetConeTriangle, MOI.LogDetConeSquare),
(),
(MOI.SingleVariable,),
(MOI.ScalarAffineFunction, MOI.ScalarQuadraticFunction),
(MOI.VectorOfVariables,),
(MOI.VectorAffineFunction,))

"""
OptimizerFactory
Expand Down Expand Up @@ -123,7 +136,7 @@ end
# Model has three modes:
# 1) Automatic: moi_backend field holds a LazyBridgeOptimizer{CachingOptimizer} in Automatic mode.
# 2) Manual: moi_backend field holds a LazyBridgeOptimizer{CachingOptimizer} in Manual mode.
# 3) Direct: moi_backend field holds an AbstractOptimizer. No extra copy of the model is stored. The moi_backend must support addconstraint! etc.
# 3) Direct: moi_backend field holds an AbstractOptimizer. No extra copy of the model is stored. The moi_backend must support add_constraint etc.
# Methods to interact with the CachingOptimizer are defined in solverinterface.jl.
@enum ModelMode Automatic Manual Direct

Expand Down Expand Up @@ -231,7 +244,7 @@ in mind the following implications of creating models using this *direct* mode:
* The model created cannot be copied.
"""
function direct_model(backend::MOI.ModelLike)
@assert MOI.isempty(backend)
@assert MOI.is_empty(backend)
return Model(Dict{MOIVAR, MOILB}(),
Dict{MOIVAR, MOIUB}(),
Dict{MOIVAR, MOIFIX}(),
Expand Down Expand Up @@ -490,7 +503,7 @@ function delete(model::Model, constraint_ref::ConstraintRef{Model})
error("The constraint reference you are trying to delete does not " *
"belong to the model.")
end
MOI.delete!(model.moi_backend, index(constraint_ref))
MOI.delete(model.moi_backend, index(constraint_ref))
end

"""
Expand All @@ -500,7 +513,7 @@ Return `true` if `constraint_ref` refers to a valid constraint in `model`.
"""
function is_valid(model::Model, constraint_ref::ConstraintRef{Model})
return (model === constraint_ref.m &&
MOI.isvalid(model.moi_backend, constraint_ref.index))
MOI.is_valid(model.moi_backend, constraint_ref.index))
end

"""
Expand All @@ -510,15 +523,15 @@ Add a constraint `c` to `Model m` and sets its name.
"""
function add_constraint(m::Model, c::AbstractConstraint, name::String="")
f, s = moi_function_and_set(c)
if !MOI.supportsconstraint(m.moi_backend, typeof(f), typeof(s))
if !MOI.supports_constraint(m.moi_backend, typeof(f), typeof(s))
if m.moi_backend isa MOI.Bridges.LazyBridgeOptimizer
bridge_message = " and there are no bridges that can reformulate it into supported constraints."
else
bridge_message = ", try using `bridge_constraints=true` in the `JuMP.Model` constructor if you believe the constraint can be reformulated to constraints supported by the solver."
end
error("Constraints of type $(typeof(f))-in-$(typeof(s)) are not supported by the solver" * bridge_message)
end
cindex = MOI.addconstraint!(m.moi_backend, f, s)
cindex = MOI.add_constraint(m.moi_backend, f, s)
cref = ConstraintRef(m, cindex, shape(c))
if !isempty(name)
set_name(cref, name)
Expand Down Expand Up @@ -573,8 +586,9 @@ end

index(cr::ConstraintRef) = cr.index

function has_result_dual(m::Model, REF::Type{<:ConstraintRef{Model, T}}) where {T <: MOICON}
MOI.canget(m, MOI.ConstraintDual(), REF)
function has_result_dual(model::Model,
REF::Type{<:ConstraintRef{Model, T}}) where {T <: MOICON}
MOI.get(model, MOI.DualStatus()) != MOI.NoSolution
end

"""
Expand All @@ -595,20 +609,7 @@ Get a constraint's name.
"""
name(cr::ConstraintRef{Model,<:MOICON}) = MOI.get(cr.m, MOI.ConstraintName(), cr)

set_name(cr::ConstraintRef{Model,<:MOICON}, s::String) = MOI.set!(cr.m, MOI.ConstraintName(), cr, s)

"""
canget(m::JuMP.Model, attr::MathOptInterface.AbstractModelAttribute)::Bool
Return `true` if one may query the attribute `attr` from the model's MOI backend.
false if not.
"""
MOI.canget(m::Model, attr::MOI.AbstractModelAttribute) = MOI.canget(m.moi_backend, attr)
MOI.canget(m::Model, attr::MOI.AbstractVariableAttribute, ::Type{VariableRef}) = MOI.canget(m.moi_backend, attr, MOIVAR)
function MOI.canget(model::Model, attr::MOI.AbstractConstraintAttribute,
::Type{<:ConstraintRef{Model, T}}) where {T <: MOICON}
return MOI.canget(model.moi_backend, attr, T)
end
set_name(cr::ConstraintRef{Model,<:MOICON}, s::String) = MOI.set(cr.m, MOI.ConstraintName(), cr, s)

"""
get(m::JuMP.Model, attr::MathOptInterface.AbstractModelAttribute)
Expand All @@ -625,14 +626,14 @@ function MOI.get(m::Model, attr::MOI.AbstractConstraintAttribute, cr::Constraint
MOI.get(m.moi_backend, attr, index(cr))
end

MOI.set!(m::Model, attr::MOI.AbstractModelAttribute, value) = MOI.set!(m.moi_backend, attr, value)
function MOI.set!(m::Model, attr::MOI.AbstractVariableAttribute, v::VariableRef, value)
MOI.set(m::Model, attr::MOI.AbstractModelAttribute, value) = MOI.set(m.moi_backend, attr, value)
function MOI.set(m::Model, attr::MOI.AbstractVariableAttribute, v::VariableRef, value)
@assert m === v.m
MOI.set!(m.moi_backend, attr, index(v), value)
MOI.set(m.moi_backend, attr, index(v), value)
end
function MOI.set!(m::Model, attr::MOI.AbstractConstraintAttribute, cr::ConstraintRef, value)
function MOI.set(m::Model, attr::MOI.AbstractConstraintAttribute, cr::ConstraintRef, value)
@assert m === cr.m
MOI.set!(m.moi_backend, attr, index(cr), value)
MOI.set(m.moi_backend, attr, index(cr), value)
end

###############################################################################
Expand Down
4 changes: 2 additions & 2 deletions src/affexpr.jl
Original file line number Diff line number Diff line change
Expand Up @@ -287,8 +287,8 @@ function set_objective(m::Model, sense::Symbol, a::AffExpr)
@assert sense == :Max
moisense = MOI.MaxSense
end
MOI.set!(m.moi_backend, MOI.ObjectiveSense(), moisense)
MOI.set!(m.moi_backend, MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), MOI.ScalarAffineFunction(a))
MOI.set(m.moi_backend, MOI.ObjectiveSense(), moisense)
MOI.set(m.moi_backend, MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), MOI.ScalarAffineFunction(a))
nothing
end

Expand Down
4 changes: 2 additions & 2 deletions src/copy.jl
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,8 @@ function copy_model(model::Model)
# Copy the MOI backend, note that variable and constraint indices may have
# changed, the `index_map` gives the map between the indices of
# `model.moi_backend` and the indices of `new_model.moi_backend`.
index_map = MOI.copy!(new_model.moi_backend, model.moi_backend,
copynames = true)
index_map = MOI.copy_to(new_model.moi_backend, model.moi_backend,
copy_names = true)
# TODO copynames is needed because of https://github.com/JuliaOpt/MathOptInterface.jl/issues/494
# we can remove it when this is fixed and released

Expand Down
2 changes: 1 addition & 1 deletion src/macros.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1301,7 +1301,7 @@ macro variable(args...)
addkwargs!(buildcall, extra_kwargs)
variablecall = :( add_variable($model, $buildcall, $(namecall(basename, idxvars))) )
code = :( $(refcall) = $variablecall )
# Determine the return type of addvariable. This is needed to create the container holding them.
# Determine the return type of add_variable. This is needed to create the container holding them.
vartype = :( variabletype($model, $(extra...)) )

if symmetric
Expand Down
7 changes: 2 additions & 5 deletions src/nlp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ function set_objective(m::Model, sense::Symbol, ex::NonlinearExprData)
@assert sense == :Max
moisense = MOI.MaxSense
end
MOI.set!(m.moi_backend, MOI.ObjectiveSense(), moisense)
MOI.set(m.moi_backend, MOI.ObjectiveSense(), moisense)
m.nlp_data.nlobj = ex
# TODO: what do we do about existing objectives in the MOI backend?
return
Expand Down Expand Up @@ -120,9 +120,6 @@ end
function result_dual(c::ConstraintRef{Model,NonlinearConstraintIndex})
initNLP(c.m)
nldata::NLPData = c.m.nlp_data
if !MOI.canget(c.m, MOI.NLPBlockDual())
error("Duals not available.")
end
# The array is cleared on every solve.
if length(nldata.nlconstr_duals) != length(nldata.nlconstr)
nldata.nlconstr_duals = MOI.get(c.m, MOI.NLPBlockDual())
Expand Down Expand Up @@ -269,7 +266,7 @@ function SubexpressionStorage(nd::Vector{NodeData}, const_values, num_variables,

end

function MOI.initialize!(d::NLPEvaluator, requested_features::Vector{Symbol})
function MOI.initialize(d::NLPEvaluator, requested_features::Vector{Symbol})
nldata::NLPData = d.m.nlp_data

# Check if we have any user-defined operators, in which case we need to
Expand Down
2 changes: 1 addition & 1 deletion src/optimizerinterface.jl
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ function optimize!(model::Model,
# The nlp_data is not kept in sync, so re-set it here.
# TODO: Consider how to handle incremental solves.
if model.nlp_data !== nothing
MOI.set!(model, MOI.NLPBlock(), create_nlp_block_data(model))
MOI.set(model, MOI.NLPBlock(), create_nlp_block_data(model))
empty!(model.nlp_data.nlconstr_duals)
end

Expand Down
2 changes: 1 addition & 1 deletion src/parseexpr.jl
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ function destructive_add!(ex::GenericQuadExpr{C,V}, c::GenericAffExpr{C,V}, x::G
destructive_add!(ex, 1.0, c*x)
end

# Catch nonlinear expressions and parameters being used in addconstraint, etc.
# Catch nonlinear expressions and parameters being used in add_constraint, etc.

const _NLExpr = Union{NonlinearExpression,NonlinearParameter}
_nlexprerr() = error("""Cannot use nonlinear expression or parameter in @constraint or @objective.
Expand Down
4 changes: 2 additions & 2 deletions src/quadexpr.jl
Original file line number Diff line number Diff line change
Expand Up @@ -210,8 +210,8 @@ function set_objective(m::Model, sense::Symbol, a::QuadExpr)
@assert sense == :Max
moisense = MOI.MaxSense
end
MOI.set!(m.moi_backend, MOI.ObjectiveSense(), moisense)
MOI.set!(m.moi_backend, MOI.ObjectiveFunction{MOI.ScalarQuadraticFunction{Float64}}(), MOI.ScalarQuadraticFunction(a))
MOI.set(m.moi_backend, MOI.ObjectiveSense(), moisense)
MOI.set(m.moi_backend, MOI.ObjectiveFunction{MOI.ScalarQuadraticFunction{Float64}}(), MOI.ScalarQuadraticFunction(a))
nothing
end

Expand Down
34 changes: 18 additions & 16 deletions src/variables.jl
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ function delete(model::Model, variable_ref::VariableRef)
error("The variable reference you are trying to delete does not " *
"belong to the model.")
end
MOI.delete!(model.moi_backend, variable_ref.index)
MOI.delete(model.moi_backend, variable_ref.index)
end

"""
Expand All @@ -131,7 +131,7 @@ Return `true` if `variable` refers to a valid variable in `model`.
"""
function is_valid(model::Model, variable_ref::VariableRef)
return (model === variable_ref.m &&
MOI.isvalid(model.moi_backend, variable_ref.index))
MOI.is_valid(model.moi_backend, variable_ref.index))
end

# The default hash is slow. It's important for the performance of AffExpr to
Expand Down Expand Up @@ -182,7 +182,7 @@ Base.haskey(vm::VariableToValueMap, v::VariableRef) = (vm.m === v.m) && haskey(v
index(v::VariableRef) = v.index

function VariableRef(m::Model)
index = MOI.addvariable!(m.moi_backend)
index = MOI.add_variable(m.moi_backend)
return VariableRef(m, index)
end

Expand All @@ -200,7 +200,7 @@ name(v::VariableRef) = MOI.get(v.m, MOI.VariableName(), v)
Set a variable's name.
"""
set_name(v::VariableRef, s::String) = MOI.set!(v.m, MOI.VariableName(), v, s)
set_name(v::VariableRef, s::String) = MOI.set(v.m, MOI.VariableName(), v, s)

MOI.SingleVariable(v::VariableRef) = MOI.SingleVariable(index(v))

Expand All @@ -217,8 +217,8 @@ function set_objective(m::Model, sense::Symbol, x::VariableRef)
@assert sense == :Max
moisense = MOI.MaxSense
end
MOI.set!(m.moi_backend, MOI.ObjectiveSense(), moisense)
MOI.set!(m.moi_backend, MOI.ObjectiveFunction{MOI.SingleVariable}(),
MOI.set(m.moi_backend, MOI.ObjectiveSense(), moisense)
MOI.set(m.moi_backend, MOI.ObjectiveFunction{MOI.SingleVariable}(),
MOI.SingleVariable(x))
end

Expand Down Expand Up @@ -297,10 +297,10 @@ function set_lower_bound(v::VariableRef,lower::Number)
# do we have a lower bound already?
if has_lower_bound(v)
cindex = lower_bound_index(v)
MOI.set!(v.m.moi_backend, MOI.ConstraintSet(), cindex, newset)
MOI.set(v.m.moi_backend, MOI.ConstraintSet(), cindex, newset)
else
@assert !is_fixed(v)
cindex = MOI.addconstraint!(v.m.moi_backend, MOI.SingleVariable(index(v)), newset)
cindex = MOI.add_constraint(v.m.moi_backend, MOI.SingleVariable(index(v)), newset)
set_lower_bound_index(v, cindex)
end
nothing
Expand Down Expand Up @@ -354,10 +354,10 @@ function set_upper_bound(v::VariableRef,upper::Number)
# do we have an upper bound already?
if has_upper_bound(v)
cindex = upper_bound_index(v)
MOI.set!(v.m.moi_backend, MOI.ConstraintSet(), cindex, newset)
MOI.set(v.m.moi_backend, MOI.ConstraintSet(), cindex, newset)
else
@assert !is_fixed(v)
cindex = MOI.addconstraint!(v.m.moi_backend, MOI.SingleVariable(index(v)), newset)
cindex = MOI.add_constraint(v.m.moi_backend, MOI.SingleVariable(index(v)), newset)
set_upper_bound_index(v, cindex)
end
nothing
Expand Down Expand Up @@ -411,10 +411,10 @@ function fix(v::VariableRef,upper::Number)
# are we already fixed?
if is_fixed(v)
cindex = fix_index(v)
MOI.set!(v.m.moi_backend, MOI.ConstraintSet(), cindex, newset)
MOI.set(v.m.moi_backend, MOI.ConstraintSet(), cindex, newset)
else
@assert !has_upper_bound(v) && !has_lower_bound(v) # Do we want to remove these instead of throwing an error?
cindex = MOI.addconstraint!(v.m.moi_backend, MOI.SingleVariable(index(v)), newset)
cindex = MOI.add_constraint(v.m.moi_backend, MOI.SingleVariable(index(v)), newset)
set_fix_index(v, cindex)
end
nothing
Expand Down Expand Up @@ -470,7 +470,7 @@ function set_integer(variable_ref::VariableRef)
error("Cannot set the variable_ref $(variable_ref) to integer as it " *
"is already binary.")
end
constraint_ref = MOI.addconstraint!(variable_ref.m.moi_backend,
constraint_ref = MOI.add_constraint(variable_ref.m.moi_backend,
MOI.SingleVariable(index(variable_ref)),
MOI.Integer())
set_integer_index(variable_ref, constraint_ref)
Expand Down Expand Up @@ -510,7 +510,7 @@ function set_binary(variable_ref::VariableRef)
error("Cannot set the variable_ref $(variable_ref) to binary as it " *
"is already integer.")
end
constraint_ref = MOI.addconstraint!(variable_ref.m.moi_backend,
constraint_ref = MOI.add_constraint(variable_ref.m.moi_backend,
MOI.SingleVariable(index(variable_ref)),
MOI.ZeroOne())
set_binary_index(variable_ref, constraint_ref)
Expand All @@ -530,7 +530,7 @@ end


start_value(v::VariableRef) = MOI.get(v.m, MOI.VariablePrimalStart(), v)
set_start_value(v::VariableRef, val::Number) = MOI.set!(v.m, MOI.VariablePrimalStart(), v, val)
set_start_value(v::VariableRef, val::Number) = MOI.set(v.m, MOI.VariablePrimalStart(), v, val)

"""
result_value(v::VariableRef)
Expand All @@ -540,7 +540,9 @@ Use `has_result_values` to check if a result exists before asking for values.
Replaces `getvalue` for most use cases.
"""
result_value(v::VariableRef) = MOI.get(v.m, MOI.VariablePrimal(), v)
has_result_values(m::Model) = MOI.canget(m, MOI.VariablePrimal(), VariableRef)
function has_result_values(model::Model)
return MOI.get(model, MOI.PrimalStatus()) != MOI.NoSolution
end

@Base.deprecate setvalue(v::VariableRef, val::Number) set_start_value(v, val)

Expand Down
10 changes: 8 additions & 2 deletions test/JuMPExtension.jl
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,14 @@ function JuMP.set_objective(m::MyModel, sense::Symbol, f::JuMP.AbstractJuMPScala
end
JuMP.objective_sense(m::MyModel) = m.objectivesense
function JuMP.objective_function(m::MyModel, FT::Type)
# ErrorException should be thrown, this is needed in `objective.jl`
m.objective_function isa FT || error("The objective function is not of type $FT")
# InexactError should be thrown, this is needed in `objective.jl`
if !(m.objective_function isa FT)
if VERSION < v"0.7-"
throw(InexactError())
else
throw(InexactError(:objective_function, FT, typeof(m.objective_function)))
end
end
m.objective_function
end

Expand Down
Loading

0 comments on commit 4496a54

Please sign in to comment.