From b42ed1885716266ce321a7c8ef6795207b83a846 Mon Sep 17 00:00:00 2001 From: Miles Lubin Date: Thu, 5 Nov 2015 11:57:01 -0500 Subject: [PATCH] allow access to constraint bounds without going though MPB --- doc/nlp.rst | 5 +++- src/JuMP.jl | 1 + src/nlp.jl | 63 ++++++++++++++++++++++++++++++++--------------- test/nonlinear.jl | 7 ++++++ 4 files changed, 55 insertions(+), 21 deletions(-) diff --git a/doc/nlp.rst b/doc/nlp.rst index f568400e098..90534575b5e 100644 --- a/doc/nlp.rst +++ b/doc/nlp.rst @@ -193,12 +193,15 @@ For example:: When querying derivatives, ``cons2`` will appear first, because it is the first linear constraint, then ``cons1``, because it is the first quadratic constraint, then ``cons3``, because it is the first nonlinear constraint. Note that for one-sided nonlinear constraints, JuMP subtracts any values on the right-hand side when computing expression. In other words, one-sided linear constraints are always transformed to have a right-hand side of zero. +The ``getConstraintBounds(m::Model)`` method returns the lower and upper bounds +of all the constraints in the model, concatenated in the order discussed above. + This method of querying derivatives directly from a JuMP model is convenient for interacting with the model in a structured way, e.g., for accessing derivatives of specific variables. For example, in statistical maximum likelihood estimation problems, one is often interested in the Hessian matrix at the optimal solution, which can be queried using the ``JuMPNLPEvaluator``. -However, the examples above are *not* a convenient way to access the NLP `standard-form `_ representation of a JuMP model, because there is no direct way to access the vector of constraint upper and lower bounds. In this case, you should implement an ``AbstractMathProgSolver`` and corresponding ``AbstractMathProgModel`` type following the MathProgBase nonlinear interface, collecting the problem data through the ``MathProgBase.loadnonlinearproblem!`` `method `_. This approach has the advantage of being nominally independent of JuMP itself in terms of problem format. You may use the ``buildInternalModel`` method to ask JuMP to populate the "solver" without calling ``optimize!``. +If you are writing a "solver", we *highly encourage* use of the `MathProgBase nonlinear interface `_ over querying derivatives using the above methods. These methods are provided for convenience but do not fully integrate with JuMP's solver infrastructure. In particular, they do not allow users to specify your solver to the ``Model()`` constructor nor to call it using ``solve()`` nor to populate the solution back into the model. Use of the MathProgBase interface also has the advantage of being independent of JuMP itself; users of MathProgBase solvers are free to implement their own evaluation routines instead of expressing their model in JuMP. You may use the ``buildInternalModel`` method to ask JuMP to populate the "solver" without calling ``optimize!``. .. [1] Dunning, Huchette, and Lubin, "JuMP: A Modeling Language for Mathematical Optimization", `arXiv `_. diff --git a/src/JuMP.jl b/src/JuMP.jl index 9dc04601f67..b626aae01ac 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -33,6 +33,7 @@ export writeLP, writeMPS, setObjective, addConstraint, addSOS1, addSOS2, solve, getInternalModel, buildInternalModel, setSolveHook, setPrintHook, + getConstraintBounds, # Variable setName, getName, setLower, setUpper, getLower, getUpper, getValue, setValue, getDual, setCategory, getCategory, diff --git a/src/nlp.jl b/src/nlp.jl index 67b7084860f..41c5d2e3803 100644 --- a/src/nlp.jl +++ b/src/nlp.jl @@ -498,32 +498,21 @@ function MathProgBase.constr_expr(d::JuMPNLPEvaluator,i::Integer) end end -function _buildInternalModel_nlp(m::Model, traits) - - @assert isempty(m.sdpconstr) && isempty(m.socconstr) +getConstraintBounds(m::Model) = getConstraintBounds(m, ProblemTraits(m)) - linobj, linrowlb, linrowub = prepProblemBounds(m) +function getConstraintBounds(m::Model,traits::ProblemTraits) - nldata::NLPData = m.nlpdata - if m.internalModelLoaded - @assert isa(nldata.evaluator, JuMPNLPEvaluator) - d = nldata.evaluator - else - d = JuMPNLPEvaluator(m) - nldata.evaluator = d + if traits.conic + error("Not implemented for conic problems") + elseif traits.sos + error("Not implemented for SOS constraints") end - numConstr = length(m.linconstr) + length(m.quadconstr) + length(nldata.nlconstr) + linobj, linrowlb, linrowub = prepProblemBounds(m) - nlrowlb = Float64[] - nlrowub = Float64[] - for c in nldata.nlconstr - push!(nlrowlb, c.lb) - push!(nlrowub, c.ub) - end quadrowlb = Float64[] quadrowub = Float64[] - for c::QuadConstraint in d.m.quadconstr + for c::QuadConstraint in m.quadconstr if c.sense == :(<=) push!(quadrowlb, -Inf) push!(quadrowub, 0.0) @@ -535,9 +524,43 @@ function _buildInternalModel_nlp(m::Model, traits) end end + nlrowlb = Float64[] + nlrowub = Float64[] + + if traits.nlp + nldata::NLPData = m.nlpdata + for c in nldata.nlconstr + push!(nlrowlb, c.lb) + push!(nlrowub, c.ub) + end + end + + lb = [linrowlb;quadrowlb;nlrowlb] + ub = [linrowub;quadrowub;nlrowub] + + return lb, ub + +end + +function _buildInternalModel_nlp(m::Model, traits) + + linobj, linrowlb, linrowub = prepProblemBounds(m) + + nldata::NLPData = m.nlpdata + if m.internalModelLoaded + @assert isa(nldata.evaluator, JuMPNLPEvaluator) + d = nldata.evaluator + else + d = JuMPNLPEvaluator(m) + nldata.evaluator = d + end + + nlp_lb, nlp_ub = getConstraintBounds(m) + numConstr = length(nlp_lb) + m.internalModel = MathProgBase.model(m.solver) - MathProgBase.loadnonlinearproblem!(m.internalModel, m.numCols, numConstr, m.colLower, m.colUpper, [linrowlb;quadrowlb;nlrowlb], [linrowub;quadrowub;nlrowub], m.objSense, d) + MathProgBase.loadnonlinearproblem!(m.internalModel, m.numCols, numConstr, m.colLower, m.colUpper, nlp_lb, nlp_ub, m.objSense, d) if traits.int if applicable(MathProgBase.setvartype!, m.internalModel, m.colCat) MathProgBase.setvartype!(m.internalModel, vartypes_without_fixed(m)) diff --git a/test/nonlinear.jl b/test/nonlinear.jl index e49c1026fad..a7fe1f71a0f 100644 --- a/test/nonlinear.jl +++ b/test/nonlinear.jl @@ -435,6 +435,9 @@ function test_nl_mpb() @addConstraint(m, 2x+y <= 0) @addConstraint(m, -5 <= 2x+y <= 5) #solve(m) # FIXME maybe? + lb,ub = getConstraintBounds(m) + @fact lb --> [-Inf,-Inf,-5.0] + @fact ub --> [1.0,-0.0,5.0] @addConstraint(m, 2x^2+y >= 2) @addNLConstraint(m, sin(x)*cos(y) == 5) @@ -444,6 +447,10 @@ function test_nl_mpb() @setNLObjective(m, Min, x^y) solve(m) + + lb,ub = getConstraintBounds(m) + @fact lb --> [-Inf,-Inf,-5.0,0.0,0.0,0.0,0.0,-0.5] + @fact ub --> [1.0,-0.0,5.0,Inf,0.0,0.0,0.0,0.5] end test_nl_mpb()