Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 80 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,3 +56,83 @@ List of supported constraint types:
List of supported model attributes:

* [`MOI.ObjectiveSense()`](@ref)

## Attributes

The algorithm is parametrized by the attributes that can be used both with `JuMP.set_attributes` and `JuMP.get_attributes`
and have the following types and default values:
```julia
rho_f::Cdouble = 1.0e-5
rho_c::Cdouble = 1.0e-1
sigmafac::Cdouble = 2.0
rankreduce::Csize_t = 0
timelim::Csize_t = 3600
printlevel::Csize_t = 1
dthresh_dim::Csize_t = 10
dthresh_dens::Cdouble = 0.75
numbfgsvecs::Csize_t = 4
rankredtol::Cdouble = 2.2204460492503131e-16
gaptol::Cdouble = 1.0e-3
checkbd::Cptrdiff_t = -1
typebd::Cptrdiff_t = 1
maxrank::Function = default_maxrank
```

The following attributes can be also be used both with `JuMP.set_attributes` and `JuMP.get_attributes`, but they are also
modified by `optimize!`:
* `majiter`
* `iter`
* `lambdaupdate`
* `totaltime`
* `sigma`

When they are `set`, it provides the initial value of the algorithm.
With `get`, they provide the value at the end of the algorithm.
`totaltime` is the total time in second. For the other attributes,
their meaning is best described by the following pseudo-code.

Given values of `R`, `lambda` and `sigma`, let
`vio = [dot(A[i], R * R') - b[i]) for i in 1:m]` (`vio[0]` is `dot(C, R * R')` in the C implementation, but we ignore this entry here),
`val = dot(C, R * R') - dot(vio, lambda) + sigma/2 * norm(vio)^2`,
`y = -lambda - sigma * vio`,
`S = C + sum(A[i] * y[i] for i in 1:m)` and
the gradient is `G = 2S * R`.
Note that `norm(G)` used in SDPLR when comparing with `rho_c` which has a 2-scaling difference
from `norm(S * R)` used in the paper.

The SDPLR solvers implements the following algorithm.
```julia
sigma = inv(sum(size(A[i], 1) for i in 1:m))
origval = val
while majiter++ < 100_000
lambdaupdate = 0
localiter = 100
while localiter > 10
lambdaupdate += 1
localiter = 0
if norm(G) / (norm(C) + 1) <= rho_c / sigma
break
end
while norm(G) / (norm(C) + 1) - rho_c / sigma > eps()
localiter += 1
iter += 1
D = lbfgs(G)
R += linesearch(D) * D
if norm(vio) / (norm(b) + 1) <= rho_f || totaltime >= timelim || iter >= 10_000_000
return
end
end
lambda -= sigma * vio
end
if val - 1e10 * abs(origval) > eps()
return
end
if norm(vio) / (norm(b) + 1) <= rho_f || totaltime >= timelim || iter >= 10_000_000
return
end
while norm(G) / (norm(C) + 1) > rho_c / sigma
sigma *= 2
end
lambdaupdate = 0
end
```
149 changes: 99 additions & 50 deletions src/MOI_wrapper.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,19 @@
# This file modifies code from SDPAFamily.jl (https://github.com/ericphanson/SDPAFamily.jl/), which is available under an MIT license (see LICENSE).

import MathOptInterface as MOI

const MAX_MAJITER = 100_000
const MAX_ITER = 10_000_000

const PIECES_MAP = Dict{String,Int}(
"majiter" => 1,
"iter" => 2,
"lambdaupdate" => 3,
"CG" => 4,
"curr_CG" => 5,
"totaltime" => 6,
"sigma" => 7,
"overallsc" => 8,
)

const SupportedSets =
Union{MOI.Nonnegatives,MOI.PositiveSemidefiniteConeTriangle}

Expand All @@ -24,11 +36,14 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
Ainfo_type::Vector{Vector{Cchar}}
silent::Bool
params::Parameters
# Solution
Rmap::Vector{Int}
R::Union{Nothing,Vector{Cdouble}}
lambda::Vector{Cdouble}
maxranks::Vector{Csize_t}
ranks::Vector{Csize_t}
R::Vector{Cdouble}
lambda::Vector{Cdouble}
pieces::Union{Nothing,Vector{Cdouble}}
set_pieces::Dict{Int,Cdouble}
function Optimizer()
return new(
0.0,
Expand All @@ -50,31 +65,54 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
false,
Parameters(),
Int[],
nothing,
Cdouble[],
Csize_t[],
Csize_t[],
Cdouble[],
Cdouble[],
nothing,
Dict{Int,Cdouble}(),
)
end
end

function MOI.supports(::Optimizer, param::MOI.RawOptimizerAttribute)
return hasfield(Parameters, Symbol(param.name))
return haskey(PIECES_MAP, param.name) ||
hasfield(Parameters, Symbol(param.name))
end
function MOI.set(optimizer::Optimizer, param::MOI.RawOptimizerAttribute, value)
if !MOI.supports(optimizer, param)
throw(MOI.UnsupportedAttribute(param))
end
s = Symbol(param.name)
setfield!(optimizer.params, s, convert(fieldtype(Parameters, s), value))
if haskey(PIECES_MAP, param.name)
idx = PIECES_MAP[param.name]
if isnothing(value)
delete!(optimizer.set_pieces, idx)
else
optimizer.set_pieces[idx] = value
if !isnothing(optimizer.pieces)
optimizer.pieces[idx] = value
end
end
else
s = Symbol(param.name)
setfield!(optimizer.params, s, convert(fieldtype(Parameters, s), value))
end
return
end
function MOI.get(optimizer::Optimizer, param::MOI.RawOptimizerAttribute)
if !MOI.supports(optimizer, param)
throw(MOI.UnsupportedAttribute(param))
end
getfield!(optimizer.params, Symbol(param.name))
return
if haskey(PIECES_MAP, param.name)
idx = PIECES_MAP[param.name]
if isnothing(optimizer.pieces)
return get(optimizer.set_pieces, idx, nothing)
else
return optimizer.pieces[idx]
end
else
return getfield(optimizer.params, Symbol(param.name))
end
end

MOI.supports(::Optimizer, ::MOI.Silent) = true
Expand Down Expand Up @@ -121,6 +159,7 @@ function _new_block(model::Optimizer, set::MOI.PositiveSemidefiniteConeTriangle)
end

function MOI.add_constrained_variables(model::Optimizer, set::SupportedSets)
reset_solution!(model)
offset = length(model.varmap)
_new_block(model, set)
ci = MOI.ConstraintIndex{MOI.VectorOfVariables,typeof(set)}(offset + 1)
Expand Down Expand Up @@ -220,6 +259,7 @@ function MOI.add_constraint(
func::MOI.ScalarAffineFunction{Cdouble},
set::MOI.EqualTo{Cdouble},
)
reset_solution!(model)
push!(model.Ainfo_entptr, Csize_t[])
push!(model.Ainfo_type, Cchar[])
_fill!(
Expand Down Expand Up @@ -262,31 +302,27 @@ function MOI.optimize!(model::Optimizer)
push!(CAinfo_entptr, length(CAent))
CAinfo_type =
reduce(vcat, vcat([model.Cinfo_type], model.Ainfo_type), init = Cchar[])
maxranks = default_maxranks(
model.params.maxrank,
model.blktype,
model.blksz,
CAinfo_entptr,
length(model.b),
)
Rsizes = map(eachindex(model.blktype)) do k
if model.blktype[k] == Cchar('s')
return model.blksz[k] * maxranks[k]
else
@assert model.blktype[k] == Cchar('d')
return model.blksz[k]
end
end
model.Rmap = [0; cumsum(Rsizes)]
# In `main.c`, it does `(rand() / RAND_MAX) - (rand() - RAND_MAX)`` to take the difference between
# two numbers between 0 and 1. Here, Julia's `rand()`` is already between 0 and 1 so we don't have
# to divide by anything.
nr = last(model.Rmap)
params = deepcopy(model.params)
if model.silent
params.printlevel = 0
end
R = rand(nr) - rand(nr)
if isnothing(model.pieces)
model.maxranks = default_maxranks(
model.params.maxrank,
model.blktype,
model.blksz,
CAinfo_entptr,
length(model.b),
)
model.ranks = copy(model.maxranks)
model.Rmap, model.R =
default_R(model.blktype, model.blksz, model.maxranks)
model.lambda = zeros(Cdouble, length(model.b))
model.pieces = default_pieces(model.blksz)
for (idx, val) in model.set_pieces
model.pieces[idx] = val
end
end
_, model.R, model.lambda, model.ranks, model.pieces = solve(
model.blksz,
model.blktype,
Expand All @@ -296,9 +332,12 @@ function MOI.optimize!(model::Optimizer)
CAcol,
CAinfo_entptr,
CAinfo_type,
params = params,
maxranks = maxranks,
R = R,
params = model.params,
maxranks = model.maxranks,
ranks = model.ranks,
R = model.R,
lambda = model.lambda,
pieces = model.pieces,
)
return
end
Expand All @@ -309,13 +348,23 @@ function MOI.get(optimizer::Optimizer, ::MOI.RawStatusString)
return "majiter = $majiter, iter = $iter, λupdate = $λupdate, CG = $CG, curr_CG = $curr_CG, totaltime = $totaltime, σ = $σ, overallsc = $overallsc"
end
function MOI.get(optimizer::Optimizer, ::MOI.SolveTimeSec)
return optimizer.pieces[6]
return MOI.get(optimizer, MOI.RawOptimizerAttribute("totaltime"))
end

function MOI.is_empty(optimizer::Optimizer)
return isempty(optimizer.b) && isempty(optimizer.varmap)
end

function reset_solution!(optimizer::Optimizer)
empty!(optimizer.Rmap)
empty!(optimizer.maxranks)
empty!(optimizer.ranks)
empty!(optimizer.R)
empty!(optimizer.lambda)
optimizer.pieces = nothing
return
end

function MOI.empty!(optimizer::Optimizer)
optimizer.objective_constant = 0.0
optimizer.objective_sign = 1
Expand All @@ -333,11 +382,7 @@ function MOI.empty!(optimizer::Optimizer)
empty!(optimizer.Acol)
empty!(optimizer.Ainfo_entptr)
empty!(optimizer.Ainfo_type)
empty!(optimizer.Rmap)
optimizer.R = nothing
empty!(optimizer.lambda)
empty!(optimizer.ranks)
optimizer.pieces = nothing
reset_solution!(optimizer)
return
end

Expand All @@ -356,6 +401,7 @@ function MOI.set(
::MOI.ObjectiveSense,
sense::MOI.OptimizationSense,
)
reset_solution!(model)
sign = sense == MOI.MAX_SENSE ? -1 : 1
if model.objective_sign != sign
model.Cent .*= -1
Expand All @@ -369,6 +415,7 @@ function MOI.set(
::MOI.ObjectiveFunction,
func::MOI.ScalarAffineFunction{Cdouble},
)
reset_solution!(model)
model.objective_constant = MOI.constant(func)
empty!(model.Cent)
empty!(model.Crow)
Expand All @@ -393,23 +440,25 @@ end
function MOI.get(model::Optimizer, ::MOI.TerminationStatus)
if isnothing(model.pieces)
return MOI.OPTIMIZE_NOT_CALLED
elseif MOI.get(model, MOI.SolveTimeSec()) >=
MOI.get(model, MOI.RawOptimizerAttribute("timelim"))
return MOI.TIME_LIMIT
elseif MOI.get(model, MOI.RawOptimizerAttribute("iter")) >= MAX_ITER ||
MOI.get(model, MOI.RawOptimizerAttribute("majiter")) >= MAX_MAJITER
return MOI.ITERATION_LIMIT
else
return MOI.LOCALLY_SOLVED
end
end

function MOI.get(m::Optimizer, attr::MOI.PrimalStatus)
if attr.result_index > MOI.get(m, MOI.ResultCount())
return MOI.NO_SOLUTION
end
return MOI.FEASIBLE_POINT
end

function MOI.get(m::Optimizer, attr::MOI.DualStatus)
function MOI.get(m::Optimizer, attr::Union{MOI.PrimalStatus,MOI.DualStatus})
if attr.result_index > MOI.get(m, MOI.ResultCount())
return MOI.NO_SOLUTION
elseif MOI.get(m, MOI.TerminationStatus()) != MOI.LOCALLY_SOLVED
return MOI.UNKNOWN_RESULT_STATUS
else
return MOI.FEASIBLE_POINT
end
return MOI.FEASIBLE_POINT
end

function MOI.get(model::Optimizer, ::MOI.ResultCount)
Expand Down
Loading