Skip to content

Commit

Permalink
Merge 3c5331f into b61db89
Browse files Browse the repository at this point in the history
  • Loading branch information
blegat committed Sep 24, 2020
2 parents b61db89 + 3c5331f commit 9147d67
Show file tree
Hide file tree
Showing 9 changed files with 246 additions and 4 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,5 @@ Manifest.toml
*.jl.cov
*.jl.*.cov
*.jl.mem

generated
1 change: 0 additions & 1 deletion docs/.gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
build/
src/generated
58 changes: 58 additions & 0 deletions examples/Extended Formulation.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# # Extended formulation

#md # [![](https://mybinder.org/badge_logo.svg)](@__BINDER_ROOT_URL__/generated/Extended Formulation.ipynb)
#md # [![](https://img.shields.io/badge/show-nbviewer-579ACA.svg)](@__NBVIEWER_ROOT_URL__/generated/Extended Formulation.ipynb)

# In this notebook, we show how to work with the extended formulation of a polyhedron.
# The convex hull of the union of polyhedra that are H-represented can be obtained
# as the projection of a H-representation [Theorem 3.3, B85].
# In order to use the resulting polyhedron as a constraint set in an optimization problem,
# there is no need to compute the resulting H-representation of this projection.
# Moreover, other operations such as intersection are also implemented between extended H-representations.
# We illustrate this with a simple example. We start by defining the H-representation of a square with JuMP.

# [B85] Balas, E., 1985.
# *Disjunctive programming and a hierarchy of relaxations for discrete optimization problems*.
# SIAM Journal on Algebraic Discrete Methods, 6(3), pp.466-486.

using JuMP
model = Model()
@variable(model, -1 <= x <= 1)
@variable(model, -1 <= y <= 1)
using Polyhedra
square = hrep(model)

# In the following, `diagonal` and `antidiag` are projections of extended Hrepresentations of dimension 7
# hence `diamond` is a projection of an extended H-representation of dimensions 12.

diagonal = convexhull(translate(square, [-2, -2]), translate(square, [2, 2]))
@test fulldim(diagonal.set) == 7 #jl
antidiag = convexhull(translate(square, [-2, 2]), translate(square, [2, -2]))
@test fulldim(antidiag.set) == 7 #jl
diamond = diagonal antidiag
@test fulldim(diamond.set) == 12 #jl

# We don't need to compute the result of the projection to solve an optimization problem
# over `diamond`.

import GLPK
model = Model(GLPK.Optimizer)
@variable(model, x[1:2])
@constraint(model, x in diamond)
@objective(model, Max, x[2])
optimize!(model)
value.(x) #!jl
@test value.(x) [0.0, 2.0] #jl

# In the optimization problem, above, the auxiliary variables of the extended formulation
# are transparently added inside a bridge.
# To manipulate the auxiliary variables, one can use the extended H-representation directly instead of its projection.

import GLPK
model = Model(GLPK.Optimizer)
@variable(model, x[1:12])
@constraint(model, x in diamond.set)
@objective(model, Max, x[2])
optimize!(model)
value.(x) #!jl
@test value.(x) [0.0, 2.0, -0.75, -0.25, 0.75, 2.25, 0.25, -0.75, 2.25, 0.75, -0.25, 0.75] #jl
6 changes: 5 additions & 1 deletion examples/test_examples.jl
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
using Test
import Literate

const EXAMPLES_DIR = @__DIR__
const OUTPUT_DIR = joinpath(@__DIR__, "generated")

const EXAMPLES = ["Minimal Robust Positively Invariant Set.jl"]
const EXAMPLES = [
"Extended Formulation.jl",
"Minimal Robust Positively Invariant Set.jl"
]

@testset "test_examples.jl" begin
@testset "$(example)" for example in EXAMPLES
Expand Down
2 changes: 2 additions & 0 deletions src/Polyhedra.jl
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ include("aff.jl")
include("linearity.jl")
include("redundancy.jl")
include("projection.jl")
include("extended.jl")

# Implementations of representations
include("vecrep.jl")
Expand All @@ -85,6 +86,7 @@ include("opt.jl")
include("polyhedra_to_lp_bridge.jl")
include("vrep_optimizer.jl")
include("default.jl")
include("projection_opt.jl")

# Visualization
include("show.jl")
Expand Down
3 changes: 3 additions & 0 deletions src/dimension.jl
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,9 @@ neg_fulldim(N::Int) = -N
# FIXME May need generated function to make it type stable
neg_fulldim(::StaticArrays.Size{N}) where N = StaticArrays.Size{(-N[1],)}()

map_fulldim(f::Function, N::Integer) = f(N)
map_fulldim(f::Function, ::StaticArrays.Size{N}) where N = StaticArrays.Size{(f(N[1]),)}()

sum_fulldim(N1::Int, N2::Int) = N1 + N2
# FIXME May need generated function to make it type stable
sum_fulldim(::StaticArrays.Size{N1}, ::StaticArrays.Size{N2}) where {N1, N2} = StaticArrays.Size{(N1[1] + N2[1],)}()
Expand Down
34 changes: 32 additions & 2 deletions src/elements.jl
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,18 @@ function Base.:*(P::AbstractMatrix, v::ElemT) where {T, ElemT<:VStruct{T}}
return ElemTout(P * v.a)
end

function zeropad(a::AbstractSparseVector{T}, n::Integer, start::Integer) where T
if iszero(n)
return a
elseif iszero(start)
return zeropad(a, -n)
elseif start == length(a)
return zeropad(a, n)
else
return [a[1:start]; spzeros(T, n); a[start+1:end]]::AbstractSparseVector{T}
end
end

function zeropad(a::AbstractSparseVector{T}, n::Integer) where T
if iszero(n)
return a
Expand Down Expand Up @@ -247,6 +259,7 @@ function zeropad(a::Vector{T}, n::Integer) where T
return [a; zeros(T, n)]
end
end
zeropad(h::HRepElement, d, start) = constructor(h)(zeropad(h.a, d, start), h.β)
zeropad(h::HRepElement, d::FullDim) = constructor(h)(zeropad(h.a, d), h.β)
zeropad(v::VStruct, d::FullDim) = constructor(v)(zeropad(v.a, d))
# Called in cartesian product of two polyhedra, only the second using static arrays.
Expand Down Expand Up @@ -298,14 +311,31 @@ function pushbefore(a::StaticArrays.SVector, β)
StaticArrays.SVector(β, a...)
end

function push_after(a::AbstractSparseVector{T}, β::T) where T
b = spzeros(T, length(a)+1)
b[end] = β
b[1:end-1] = a
return b
end
push_after(a::AbstractVector, β) = [a; β]
function push_after(a::StaticArrays.SVector, β)
StaticArrays.SVector(a..., β)
end

constructor(::HyperPlane) = HyperPlane
constructor(::HalfSpace) = HalfSpace
constructor(::Ray) = Ray
constructor(::Line) = Line

function lift(h::HRepElement{T}) where {T}
constructor(h)(pushbefore(h.a, -h.β), zero(T))
function lift(h::HRepElement{T}, pre::Bool=true) where T
a = pre ? pushbefore(h.a, -h.β) : push_after(h.a, -h.β)
constructor(h)(a, zero(T))
end
function complement_lift(h::HRepElement{T}, pre::Bool=true) where T
a = pre ? pushbefore(h.a, h.β) : push_after(h.a, h.β)
constructor(h)(a, h.β)
end

lift(v::VStruct{T}) where {T} = constructor(v)(pushbefore(v.a, zero(T)))
lift(v::AbstractVector{T}) where {T} = pushbefore(v, one(T))

Expand Down
79 changes: 79 additions & 0 deletions src/extended.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
struct HSumSpace{T} <: HRepresentation{T}
d::Int
end
hvectortype(::Type{HSumSpace{T}}) where T = SparseVector{T, Int}
Base.length(idxs::HyperPlaneIndices{T, HSumSpace{T}}) where T = idxs.rep.d
Base.isempty(idxs::HyperPlaneIndices{T, HSumSpace{T}}) where T = idxs.rep.d <= 0
startindex(idxs::HyperPlaneIndices{T, HSumSpace{T}}) where T = isempty(idxs) ? nothing : eltype(idxs)(1)
function Base.get(h::HSumSpace{T}, idx::HyperPlaneIndex) where T
i = idx.value
a = sparsevec(i:h.d:i+2h.d, StaticArrays.SVector(one(T), -one(T), -one(T)), 3h.d)
return HyperPlane(a, zero(T))
end
function nextindex(h::HSumSpace, idx::HyperPlaneIndex)
if idx.value < h.d
return typeof(idx)(idx.value + 1)
else
return nothing
end
end
@norepelem HSumSpace HalfSpace

struct Projection{S, I}
set::S
dimensions::I
end
fulldim(p::Projection) = length(p.dimensions)

# TODO should be cartesian product with FullSpace
function zeropad(p::HRep{T}, padding...) where T
d = map_fulldim(N -> N + abs(padding[1]), FullDim(p))
f = (i, el) -> zeropad(el, padding...)
return similar(p, d, T, hmap(f, d, T, p)...)
end

function Base.intersect(p1::Projection{S1, <:Base.OneTo}, p2::Projection{S2, <:Base.OneTo}) where {S1, S2}
d = length(p1.dimensions)
d == length(p2.dimensions) || throw(DimensionMismatch())
p = intersect(zeropad(p1.set, fulldim(p2.set) - d),
zeropad(p2.set, fulldim(p1.set) - d, d))
return Projection(p, Base.OneTo(d))
end

"""
convexhull(p1::HRepresentation, p2::HRepresentation)
Returns the Balas [Theorem 3.3, B85] extended H-representation of the convex hull
of `p1` and `p2`.
[B85] Balas, E., 1985.
*Disjunctive programming and a hierarchy of relaxations for discrete optimization problems*.
SIAM Journal on Algebraic Discrete Methods, 6(3), pp.466-486.
"""
function convexhull(p1::HRepresentation, p2::HRepresentation)
d = FullDim(p1)
T = promote_coefficient_type((p1, p2))
d_2 = map_fulldim(N -> 2N, d)
d_3 = map_fulldim(N -> 3N, d)
function f(i, el)
if i == 3
return zeropad(el, 1)
elseif i == 4
return zeropad(el, -d_3)
else
if i == 1
_padded = zeropad(el, neg_fulldim(d))
padded = zeropad(_padded, d)
return lift(padded, false)
else
padded = zeropad(el, neg_fulldim(d_2))
return complement_lift(padded, false)
end
end
end
ei = basis(hvectortype(p1), map_fulldim(N -> 1, d), 1)
λ = HalfSpace(-ei, zero(T)) HalfSpace(ei, one(T))
lifted = similar((p1, p2), map_fulldim(N -> 3N + 1, d), T,
hmap(f, d, T, p1, p2, HSumSpace{T}(fulldim(p1)), λ)...)
return Projection(lifted, Base.OneTo(fulldim(p1)))
end
65 changes: 65 additions & 0 deletions src/projection_opt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
struct ProjectionOptSet{T, RepT<:Rep{T}, I} <: MOI.AbstractVectorSet
p::Projection{RepT, I}
end
MOI.dimension(set::ProjectionOptSet) = fulldim(set.p)
Base.copy(set::ProjectionOptSet) = set

struct ProjectionBridge{T, F, RepT, I} <: MOI.Bridges.Constraint.AbstractBridge
variables::Vector{MOI.VariableIndex}
constraint::MOI.ConstraintIndex{F, PolyhedraOptSet{T, RepT}}
dimensions::I
end
function MOI.Bridges.Constraint.bridge_constraint(
::Type{ProjectionBridge{T, F, RepT, I}}, model::MOI.ModelLike,
f::MOI.AbstractVectorFunction, p::ProjectionOptSet) where {T, F, RepT, I}
vf = MOIU.eachscalar(f)
func = Vector{eltype(vf)}(undef, fulldim(p.p.set))
for (i, j) in enumerate(p.p.dimensions)
func[j] = vf[i]
end
N = fulldim(p.p.set)
variables = MOI.add_variables(model, N - fulldim(p.p))
for (i, j) in enumerate(setdiff(1:N, p.p.dimensions))
func[j] = MOI.SingleVariable(variables[i])
end
constraint = MOI.add_constraint(model, MOI.Utilities.vectorize(func), PolyhedraOptSet(p.p.set))
return ProjectionBridge{T, F, RepT, I}(variables, constraint, p.p.dimensions)
end

MOI.supports_constraint(::Type{ProjectionBridge{T}},
::Type{<:MOI.AbstractVectorFunction},
::Type{<:ProjectionOptSet{T}}) where {T} = true
function MOI.Bridges.added_constrained_variable_types(::Type{<:ProjectionBridge})
return Tuple{DataType}[]
end
function MOI.Bridges.added_constraint_types(::Type{ProjectionBridge{T, F, RepT, I}}) where {T, F, RepT, I}
return [(F, PolyhedraOptSet{T, RepT})]
end
function MOI.Bridges.Constraint.concrete_bridge_type(
::Type{<:ProjectionBridge{T}},
F::Type{<:MOI.AbstractVectorFunction},
::Type{ProjectionOptSet{T, RepT, I}}) where {T, RepT, I}
return ProjectionBridge{T, F, RepT, I}
end

# Attributes, Bridge acting as an model
function MOI.get(b::ProjectionBridge{T, F, RepT, I}, ::MOI.NumberOfConstraints{F, ProjectionOptSet{RepT, I}}) where {T, F, RepT, I}
return 1
end
function MOI.get(b::ProjectionBridge{T, F, RepT, I}, ::MOI.ListOfConstraintIndices{F, ProjectionOptSet{RepT, I}}) where {T, F, RepT, I}
return [b.constraint]
end

# Indices
function MOI.delete(model::MOI.ModelLike, b::ProjectionBridge)
MOI.delete(model, b.constraint)
for vi in b.variables
MOI.delete(model, vi)
end
end

function JuMP.build_constraint(error_fun::Function, func, set::Projection)
return JuMP.BridgeableConstraint(JuMP.BridgeableConstraint(
JuMP.build_constraint(error_fun, func, ProjectionOptSet(set)),
ProjectionBridge), PolyhedraToLPBridge)
end

0 comments on commit 9147d67

Please sign in to comment.