Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MOI to MatOI #7

Merged
merged 18 commits into from Nov 2, 2020
3 changes: 3 additions & 0 deletions Project.toml
Expand Up @@ -4,8 +4,11 @@ authors = ["JuMP-dev"]
version = "0.1.0"

[deps]
COSMO = "1e616198-aa4e-51ec-90a2-23f7fbd31d8d"
akshay326 marked this conversation as resolved.
Show resolved Hide resolved
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
ProxSDP = "65e78d25-6039-50a4-9445-38022e3d2eb3"
SCS = "c946c3f1-0d1f-5ce8-9dea-7daa1f7e2d13"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

Expand Down
5 changes: 5 additions & 0 deletions src/MatrixOptInterface.jl
Expand Up @@ -5,6 +5,9 @@ using MathOptInterface
const MOI = MathOptInterface
const MOIU = MathOptInterface.Utilities

const CI = MOI.ConstraintIndex
const VI = MOI.VariableIndex

# export MatrixOptimizer

@enum ConstraintSense EQUAL_TO GREATER_THAN LESS_THAN INTERVAL
Expand Down Expand Up @@ -57,6 +60,8 @@ end

@enum VariableType CONTINUOUS INTEGER BINARY

include("sparse_matrix.jl")
include("conic_form.jl")
include("matrix_input.jl")
include("change_form.jl")

Expand Down
263 changes: 263 additions & 0 deletions src/conic_form.jl
@@ -0,0 +1,263 @@
mutable struct ConicForm{T, AT, VT, C} <: MOI.ModelLike
num_rows::Vector{Int}
dimension::Dict{Int, Int}
sense::MOI.OptimizationSense
objective_constant::T # The objective
A::Union{Nothing, AT} # The constraints
b::VT # `b - Ax in cones`
c::VT # `sense c'x + objective_constant`
cone_types::C

function ConicForm{T, AT, VT}(cone_types) where {T, AT, VT}
model = new{T, AT, VT, typeof(cone_types)}()
model.cone_types = cone_types
model.num_rows = zeros(Int, length(cone_types))
model.dimension = Dict{Int, Int}()
model.A = nothing
return model
end
end

# The ordering of CONES matches SCS
const CONES = Dict(
MOI.Zeros => 1,
MOI.Nonnegatives => 2,
MOI.SecondOrderCone => 3,
MOI.PositiveSemidefiniteConeTriangle => 4,
MOI.ExponentialCone => 5,
MOI.DualExponentialCone => 6
)

_set_type(::MOI.ConstraintIndex{F,S}) where {F,S} = S
cons_offset(conic::MOI.Zeros) = conic.dimension
cons_offset(conic::MOI.Nonnegatives) = conic.dimension
cons_offset(conic::MOI.SecondOrderCone) = conic.dimension
cons_offset(conic::MOI.PositiveSemidefiniteConeTriangle) = Int64((conic.side_dimension*(conic.side_dimension+1))/2)

function get_conic_form(::Type{T}, model::M, con_idx) where {T, M <: MOI.AbstractOptimizer}
akshay326 marked this conversation as resolved.
Show resolved Hide resolved
# reorder constraints
cis = sort(
con_idx,
by = x->CONES[_set_type(x)]
)

# extract cones
cones = _set_type.(cis)
cones = unique(cones)

conic = ConicForm{T, SparseMatrixCSRtoCSC{Int64}, Array{T, 1}}(Tuple(cones))

idxmap = MOI.copy_to(conic, model)

# fix optimization sense
if MOI.get(model, MOI.ObjectiveSense()) == MOI.MAX_SENSE
conic.c = -conic.c
end

return conic
end

MOI.is_empty(model::ConicForm) = model.A === nothing
function MOI.empty!(model::ConicForm{T}) where T
empty!(model.dimension)
fill!(model.num_rows, 0)
model.A = nothing
model.sense = MOI.FEASIBILITY_SENSE
model.objective_constant = zero(T)
end

function _first(::Type{S}, idx::Int, ::Type{S}, args::Vararg{Type, N}) where {S, N}
return idx
end
function _first(::Type{S}, idx::Int, ::Type, args::Vararg{Type, N}) where {S, N}
return _first(S, idx + 1, args...)
end
_first(::Type, idx::Int) = nothing

_findfirst(::Type{S}, sets::Tuple) where {S} = _first(S, 1, sets...)

function MOI.supports_constraint(
model::ConicForm,
::Type{MOI.VectorAffineFunction{Float64}},
::Type{S}) where S <: MOI.AbstractVectorSet
return _findfirst(S, model.cone_types) !== nothing
end

function _allocate_variables(model::ConicForm{T, AT, VT}, vis_src, idxmap) where {T, AT, VT}
model.A = AT(length(vis_src))
for (i, vi) in enumerate(vis_src)
idxmap[vi] = MOI.VariableIndex(i)
end
return
end

function rows(model::ConicForm, ci::CI{MOI.VectorAffineFunction{Float64}})
return ci.value .+ (1:model.dimension[ci.value])
end

function MOI.set(::ConicForm, ::MOI.VariablePrimalStart,
::MOI.VariableIndex, ::Nothing)
end
function MOI.set(model::ConicForm, ::MOI.VariablePrimalStart,
vi::MOI.VariableIndex, value::Float64)
model.primal[vi.value] = value
end
function MOI.set(::ConicForm, ::MOI.ConstraintPrimalStart,
::MOI.ConstraintIndex, ::Nothing)
end
function MOI.set(model::ConicForm, ::MOI.ConstraintPrimalStart,
ci::MOI.ConstraintIndex, value)
offset = constroffset(model, ci)
model.slack[rows(model, ci)] .= value
end
function MOI.set(::ConicForm, ::MOI.ConstraintDualStart,
::MOI.ConstraintIndex, ::Nothing)
end
function MOI.set(model::ConicForm, ::MOI.ConstraintDualStart,
ci::MOI.ConstraintIndex, value)
offset = constroffset(model, ci)
model.dual[rows(model, ci)] .= value
end
function MOI.set(model::ConicForm, ::MOI.ObjectiveSense, sense::MOI.OptimizationSense)
model.sense = sense
end
variable_index_value(t::MOI.ScalarAffineTerm) = t.variable_index.value
variable_index_value(t::MOI.VectorAffineTerm) = variable_index_value(t.scalar_term)
function MOI.set(model::ConicForm, ::MOI.ObjectiveFunction,
f::MOI.ScalarAffineFunction{Float64})
c = Vector(sparsevec(variable_index_value.(f.terms), MOI.coefficient.(f.terms),
model.A.n))
model.objective_constant = f.constant
model.c = c
return nothing
end

function _allocate_constraint(model::ConicForm, src, indexmap, cone_id, ci)
# TODO use `CanonicalConstraintFunction`
func = MOI.get(src, MOI.ConstraintFunction(), ci)
func = MOIU.is_canonical(func) ? func : MOI.Utilities.canonical(func)
allocate_terms(model.A, indexmap, func)
offset = model.num_rows[cone_id]
model.num_rows[cone_id] = offset + MOI.output_dimension(func)
return ci, offset, func
end

function _allocate_constraints(model::ConicForm, src, indexmap, cone_id, ::Type{S}) where S
cis = MOI.get(src, MOI.ListOfConstraintIndices{MOI.VectorAffineFunction{Float64}, S}())
return map(cis) do ci
_allocate_constraint(model, src, indexmap, cone_id, ci)
end
end

function _load_variables(model::ConicForm, nvars::Integer)
m = sum(model.num_rows)
model.A.m = m
model.b = zeros(m)
model.c = zeros(model.A.n)
allocate_nonzeros(model.A)
end

function _load_constraints(model::ConicForm, src, indexmap, cone_offset, i, cache, preprocess)
for (ci_src, offset_in_cone, func) in cache
offset = cone_offset + offset_in_cone
set = MOI.get(src, MOI.ConstraintSet(), ci_src)
new_func = preprocess(func, set)
load_terms(model.A, indexmap, new_func, offset)
copyto!(model.b, offset + 1, new_func.constants)
model.dimension[offset] = MOI.output_dimension(func)
indexmap[ci_src] = typeof(ci_src)(offset)
end
end

# Vectorized length for matrix dimension n
sympackedlen(n) = div(n*(n+1), 2)
# Matrix dimension for vectorized length n
sympackeddim(n) = div(isqrt(1+8n) - 1, 2)
function _sympackedto(x, n, mapfrom, mapto)
@assert length(x) == sympackedlen(n)
akshay326 marked this conversation as resolved.
Show resolved Hide resolved
y = similar(x)
for i in 1:n, j in 1:i
y[mapto(i, j)] = x[mapfrom(i, j)]
end
y
end
trimap(i::Integer, j::Integer) = i < j ? trimap(j, i) : div((i-1)*i, 2) + j
trimapL(i::Integer, j::Integer, n::Integer) = i < j ? trimapL(j, i, n) : i + div((2n-j) * (j-1), 2)
sympackedLtoU(x, n=sympackeddim(length(x))) = _sympackedto(x, n, (i, j) -> trimapL(i, j, n), trimap)
sympackedUtoL(x, n) = _sympackedto(x, n, trimap, (i, j) -> trimapL(i, j, n))
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why are these here? This is specific to SCS needing to scale off diagonal entries, do you need this too in DiffOpt?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sorry for the late response.
yes, we need at least this part to scale diagonal entries for a PSD cone. my only source of cross checking the actual values was https://github.com/cvxgrp/diffcp, which accepts/returns matrices in SCS format

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It doesn't make much sense to do it by default for the conic form. I would suggest DiffOpt to use something like
https://github.com/jump-dev/SCS.jl/blob/c1e3a8d5469e9f289dd0b3cdd87245792d266317/src/MOI_wrapper.jl#L147
In the future, if the scaled psd cone is brought back then SCS and DiffOpt could use this without any preprocessing function.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

See jump-dev/MathOptInterface.jl#531 for the scaled psd cone

Copy link
Contributor Author

@akshay326 akshay326 Oct 15, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ok got it now. will need to correct the tests accordingly!

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should I check the presence of SCS/Mosek solver and edit the matrices accordingly - so that for every solver matrix A is same

Not sure to understand what you mean

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

forgot to edit the comment 😅 . saw the conic solver info at https://github.com/cvxgrp/cvxpy/tree/master/cvxpy/reductions/solvers/conic_solvers and understood this trimap thing is specific to SCS and shouldn't be a part of MatOI

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@blegat is this PR good to go? or is there anything specific to SCS that I'm missing


function _scale(i, coef)
if MOI.Utilities.is_diagonal_vectorized_index(i)
return coef
else
return coef * √2
end
end

function _preprocess_function(func, set::MOI.PositiveSemidefiniteConeTriangle)
n = set.side_dimension
LtoU_map = sympackedLtoU(1:sympackedlen(n), n)
function map_term(t::MOI.VectorAffineTerm)
return MOI.VectorAffineTerm(
LtoU_map[t.output_index],
MOI.ScalarAffineTerm(
_scale(t.output_index, t.scalar_term.coefficient),
t.scalar_term.variable_index
)
)
end
UtoL_map = sympackedUtoL(1:sympackedlen(n), n)
function constant(row)
i = UtoL_map[row]
return _scale(i, func.constants[i])
end
new_func = MOI.VectorAffineFunction{Float64}(
MOI.VectorAffineTerm{Float64}[map_term(t) for t in func.terms],
constant.(eachindex(func.constants))
)
# The rows have been reordered in `map_term` so we need to re-canonicalize to reorder the rows.
MOI.Utilities.canonicalize!(new_func)
return new_func
end
_preprocess_function(func, set) = func

function MOI.copy_to(dest::ConicForm, src::MOI.ModelLike; preprocess = _preprocess_function, copy_names::Bool=true)
MOI.empty!(dest)

vis_src = MOI.get(src, MOI.ListOfVariableIndices())
idxmap = MOIU.IndexMap()

has_constraints = BitSet()
for (F, S) in MOI.get(src, MOI.ListOfConstraints())
i = _findfirst(S, dest.cone_types)
if i === nothing || F != MOI.VectorAffineFunction{Float64}
throw(MOI.UnsupportedConstraint{F, S}())
end
push!(has_constraints, i)
end

_allocate_variables(dest, vis_src, idxmap)

# Allocate constraints
caches = map(collect(has_constraints)) do i
_allocate_constraints(dest, src, idxmap, i, dest.cone_types[i])
end

# Load variables
_load_variables(dest, length(vis_src))

# Set variable attributes
MOIU.pass_attributes(dest, src, copy_names, idxmap, vis_src)

# Set model attributes
MOIU.pass_attributes(dest, src, copy_names, idxmap)

# Load constraints
offset = 0
for (i, cache) in zip(has_constraints, caches)
_load_constraints(dest, src, idxmap, offset, i, cache, preprocess)
offset += dest.num_rows[i]
end

return idxmap
end
44 changes: 44 additions & 0 deletions src/sparse_matrix.jl
@@ -0,0 +1,44 @@
mutable struct SparseMatrixCSRtoCSC{T}
m::Int # Number of rows
n::Int # Number of columns
colptr::Vector{T}
rowval::Vector{T}
nzval::Vector{Float64}
function SparseMatrixCSRtoCSC{T}(n) where T
A = new()
A.n = n
A.colptr = zeros(T, n + 1)
return A
end
end
function allocate_nonzeros(A::SparseMatrixCSRtoCSC{T}) where T
for i in 3:length(A.colptr)
A.colptr[i] += A.colptr[i - 1]
end
A.rowval = Vector{T}(undef, A.colptr[end])
A.nzval = Vector{Float64}(undef, A.colptr[end])
end
function final_touch(A::SparseMatrixCSRtoCSC)
for i in length(A.colptr):-1:2
A.colptr[i] = A.colptr[i - 1]
end
A.colptr[1] = 0
end
function _allocate_terms(colptr, indexmap, terms)
for term in terms
colptr[indexmap[term.scalar_term.variable_index].value + 1] += 1
end
end
function allocate_terms(A::SparseMatrixCSRtoCSC, indexmap, func)
_allocate_terms(A.colptr, indexmap, func.terms)
end
function _load_terms(colptr, rowval, nzval, indexmap, terms, offset)
for term in terms
ptr = colptr[indexmap[term.scalar_term.variable_index].value] += 1
rowval[ptr] = offset + term.output_index - 1
nzval[ptr] = -term.scalar_term.coefficient
end
end
function load_terms(A::SparseMatrixCSRtoCSC, indexmap, func, offset)
_load_terms(A.colptr, A.rowval, A.nzval, indexmap, func.terms, offset)
end