Skip to content

Commit

Permalink
allow access to constraint bounds without going though MPB
Browse files Browse the repository at this point in the history
  • Loading branch information
mlubin committed Nov 8, 2015
1 parent 79b6685 commit b42ed18
Show file tree
Hide file tree
Showing 4 changed files with 55 additions and 21 deletions.
5 changes: 4 additions & 1 deletion doc/nlp.rst
Expand Up @@ -193,12 +193,15 @@ For example::

When querying derivatives, ``cons2`` will appear first, because it is the first linear constraint, then ``cons1``, because it is the first quadratic constraint, then ``cons3``, because it is the first nonlinear constraint. Note that for one-sided nonlinear constraints, JuMP subtracts any values on the right-hand side when computing expression. In other words, one-sided linear constraints are always transformed to have a right-hand side of zero.

The ``getConstraintBounds(m::Model)`` method returns the lower and upper bounds
of all the constraints in the model, concatenated in the order discussed above.

This method of querying derivatives directly from a JuMP model is convenient for
interacting with the model in a structured way, e.g., for accessing derivatives of
specific variables. For example, in statistical maximum likelihood estimation problems,
one is often interested in the Hessian matrix at the optimal solution,
which can be queried using the ``JuMPNLPEvaluator``.

However, the examples above are *not* a convenient way to access the NLP `standard-form <http://mathprogbasejl.readthedocs.org/en/latest/nlp.html>`_ representation of a JuMP model, because there is no direct way to access the vector of constraint upper and lower bounds. In this case, you should implement an ``AbstractMathProgSolver`` and corresponding ``AbstractMathProgModel`` type following the MathProgBase nonlinear interface, collecting the problem data through the ``MathProgBase.loadnonlinearproblem!`` `method <http://mathprogbasejl.readthedocs.org/en/latest/nlp.html#loadnonlinearproblem!>`_. This approach has the advantage of being nominally independent of JuMP itself in terms of problem format. You may use the ``buildInternalModel`` method to ask JuMP to populate the "solver" without calling ``optimize!``.
If you are writing a "solver", we *highly encourage* use of the `MathProgBase nonlinear interface <http://mathprogbasejl.readthedocs.org/en/latest/nlp.html>`_ over querying derivatives using the above methods. These methods are provided for convenience but do not fully integrate with JuMP's solver infrastructure. In particular, they do not allow users to specify your solver to the ``Model()`` constructor nor to call it using ``solve()`` nor to populate the solution back into the model. Use of the MathProgBase interface also has the advantage of being independent of JuMP itself; users of MathProgBase solvers are free to implement their own evaluation routines instead of expressing their model in JuMP. You may use the ``buildInternalModel`` method to ask JuMP to populate the "solver" without calling ``optimize!``.

.. [1] Dunning, Huchette, and Lubin, "JuMP: A Modeling Language for Mathematical Optimization", `arXiv <http://arxiv.org/abs/1508.01982>`_.
1 change: 1 addition & 0 deletions src/JuMP.jl
Expand Up @@ -33,6 +33,7 @@ export
writeLP, writeMPS, setObjective,
addConstraint, addSOS1, addSOS2, solve,
getInternalModel, buildInternalModel, setSolveHook, setPrintHook,
getConstraintBounds,
# Variable
setName, getName, setLower, setUpper, getLower, getUpper,
getValue, setValue, getDual, setCategory, getCategory,
Expand Down
63 changes: 43 additions & 20 deletions src/nlp.jl
Expand Up @@ -498,32 +498,21 @@ function MathProgBase.constr_expr(d::JuMPNLPEvaluator,i::Integer)
end
end

function _buildInternalModel_nlp(m::Model, traits)

@assert isempty(m.sdpconstr) && isempty(m.socconstr)
getConstraintBounds(m::Model) = getConstraintBounds(m, ProblemTraits(m))

linobj, linrowlb, linrowub = prepProblemBounds(m)
function getConstraintBounds(m::Model,traits::ProblemTraits)

nldata::NLPData = m.nlpdata
if m.internalModelLoaded
@assert isa(nldata.evaluator, JuMPNLPEvaluator)
d = nldata.evaluator
else
d = JuMPNLPEvaluator(m)
nldata.evaluator = d
if traits.conic
error("Not implemented for conic problems")
elseif traits.sos
error("Not implemented for SOS constraints")
end

numConstr = length(m.linconstr) + length(m.quadconstr) + length(nldata.nlconstr)
linobj, linrowlb, linrowub = prepProblemBounds(m)

nlrowlb = Float64[]
nlrowub = Float64[]
for c in nldata.nlconstr
push!(nlrowlb, c.lb)
push!(nlrowub, c.ub)
end
quadrowlb = Float64[]
quadrowub = Float64[]
for c::QuadConstraint in d.m.quadconstr
for c::QuadConstraint in m.quadconstr
if c.sense == :(<=)
push!(quadrowlb, -Inf)
push!(quadrowub, 0.0)
Expand All @@ -535,9 +524,43 @@ function _buildInternalModel_nlp(m::Model, traits)
end
end

nlrowlb = Float64[]
nlrowub = Float64[]

if traits.nlp
nldata::NLPData = m.nlpdata
for c in nldata.nlconstr
push!(nlrowlb, c.lb)
push!(nlrowub, c.ub)
end
end

lb = [linrowlb;quadrowlb;nlrowlb]
ub = [linrowub;quadrowub;nlrowub]

return lb, ub

end

function _buildInternalModel_nlp(m::Model, traits)

linobj, linrowlb, linrowub = prepProblemBounds(m)

nldata::NLPData = m.nlpdata
if m.internalModelLoaded
@assert isa(nldata.evaluator, JuMPNLPEvaluator)
d = nldata.evaluator
else
d = JuMPNLPEvaluator(m)
nldata.evaluator = d
end

nlp_lb, nlp_ub = getConstraintBounds(m)
numConstr = length(nlp_lb)

m.internalModel = MathProgBase.model(m.solver)

MathProgBase.loadnonlinearproblem!(m.internalModel, m.numCols, numConstr, m.colLower, m.colUpper, [linrowlb;quadrowlb;nlrowlb], [linrowub;quadrowub;nlrowub], m.objSense, d)
MathProgBase.loadnonlinearproblem!(m.internalModel, m.numCols, numConstr, m.colLower, m.colUpper, nlp_lb, nlp_ub, m.objSense, d)
if traits.int
if applicable(MathProgBase.setvartype!, m.internalModel, m.colCat)
MathProgBase.setvartype!(m.internalModel, vartypes_without_fixed(m))
Expand Down
7 changes: 7 additions & 0 deletions test/nonlinear.jl
Expand Up @@ -435,6 +435,9 @@ function test_nl_mpb()
@addConstraint(m, 2x+y <= 0)
@addConstraint(m, -5 <= 2x+y <= 5)
#solve(m) # FIXME maybe?
lb,ub = getConstraintBounds(m)
@fact lb --> [-Inf,-Inf,-5.0]
@fact ub --> [1.0,-0.0,5.0]

@addConstraint(m, 2x^2+y >= 2)
@addNLConstraint(m, sin(x)*cos(y) == 5)
Expand All @@ -444,6 +447,10 @@ function test_nl_mpb()

@setNLObjective(m, Min, x^y)
solve(m)

lb,ub = getConstraintBounds(m)
@fact lb --> [-Inf,-Inf,-5.0,0.0,0.0,0.0,0.0,-0.5]
@fact ub --> [1.0,-0.0,5.0,Inf,0.0,0.0,0.0,0.5]
end
test_nl_mpb()

Expand Down

0 comments on commit b42ed18

Please sign in to comment.