From eea5064f70f10188b695315074cb3af12d2c3d22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Sat, 25 Aug 2018 11:42:08 +0200 Subject: [PATCH 1/8] Implement model copy --- src/JuMP.jl | 5 +- src/affexpr.jl | 11 +++-- src/copy.jl | 126 +++++++++++++++++++++++++++++++++++++++++++++++ src/macros.jl | 4 +- src/operators.jl | 38 +++++++------- src/parseexpr.jl | 8 +-- src/quadexpr.jl | 10 ++-- test/model.jl | 57 +++++++++++++++++++++ 8 files changed, 223 insertions(+), 36 deletions(-) create mode 100644 src/copy.jl diff --git a/src/JuMP.jl b/src/JuMP.jl index 6971a25b63b..b96a7632a06 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -245,6 +245,7 @@ function direct_model(backend::MOI.ModelLike) Dict{Symbol, Any}()) end + if VERSION >= v"0.7-" Base.broadcastable(model::Model) = Ref(model) end @@ -331,8 +332,6 @@ function objective_sense(model::Model) end # TODO(IainNZ): Document these too. -# TODO(#1381): Implement Base.copy for Model. -object_dictionary(model::Model) = model.obj_dict termination_status(model::Model) = MOI.get(model, MOI.TerminationStatus()) primal_status(model::Model) = MOI.get(model, MOI.PrimalStatus()) dual_status(model::Model) = MOI.get(model, MOI.DualStatus()) @@ -733,6 +732,7 @@ struct NonlinearParameter <: AbstractJuMPScalar end ########################################################################## +include("copy.jl") include("containers.jl") include("operators.jl") include("macros.jl") @@ -740,6 +740,5 @@ include("optimizerinterface.jl") include("nlp.jl") include("print.jl") - ########################################################################## end diff --git a/src/affexpr.jl b/src/affexpr.jl index cfd56dbf99c..cc83dbe9d2f 100644 --- a/src/affexpr.jl +++ b/src/affexpr.jl @@ -75,7 +75,7 @@ Base.zero(::Type{GenericAffExpr{C,V}}) where {C,V} = GenericAffExpr{C,V}(zero(C) Base.one(::Type{GenericAffExpr{C,V}}) where {C,V} = GenericAffExpr{C,V}(one(C), OrderedDict{V,C}()) Base.zero(a::GenericAffExpr) = zero(typeof(a)) Base.one( a::GenericAffExpr) = one(typeof(a)) -Base.copy(a::GenericAffExpr) = GenericAffExpr(copy(a.constant), copy(a.terms)) +Base.copy(a::GenericAffExpr) = GenericAffExpr(Base.copy(a.constant), Base.copy(a.terms)) if VERSION >= v"0.7-" Base.broadcastable(a::GenericAffExpr) = Ref(a) end @@ -92,7 +92,7 @@ function map_coefficients_inplace!(f::Function, a::GenericAffExpr) end function map_coefficients(f::Function, a::GenericAffExpr) - return map_coefficients_inplace!(f, copy(a)) + return map_coefficients_inplace!(f, Base.copy(a)) end Base.sizehint!(a::GenericAffExpr, n::Int) = sizehint!(a.terms, n) @@ -192,7 +192,7 @@ end Base.hash(aff::GenericAffExpr, h::UInt) = hash(aff.constant, hash(aff.terms, h)) function Compat.SparseArrays.dropzeros(aff::GenericAffExpr) - result = copy(aff) + result = Base.copy(aff) for (coef, var) in linear_terms(aff) if iszero(coef) delete!(result.terms, var) @@ -308,8 +308,13 @@ end # variables to the new model's variables function Base.copy(a::GenericAffExpr, new_model::Model) result = zero(a) +<<<<<<< HEAD for (coef, var) in linear_terms(a) add_to_expression!(result, coef, copy(var, new_model)) +======= + for (coef, var) in linearterms(a) + add_to_expression!(result, coef, Base.copy(var, new_model)) +>>>>>>> Implement model copy end result.constant = a.constant return result diff --git a/src/copy.jl b/src/copy.jl new file mode 100644 index 00000000000..edb014b8dc7 --- /dev/null +++ b/src/copy.jl @@ -0,0 +1,126 @@ +# Copyright 2017, Iain Dunning, Joey Huchette, Miles Lubin, and contributors +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" + copy_extension_data(data, new_model::AbstractModel, model::AbstractModel) + +Return a copy of the extension data `data` of the model `model` to the extension +data of the new model `new_model`. A method should be added for any JuMP +extension storing data in the `ext` field. +""" +function copy_extension_data end + +""" + copy_variablewise_constraints(dest::Dict{MOIVAR, + MOICON{MOI.SingleVariable, S}}, + src::Dict{MOIVAR, + MOICON{MOI.SingleVariable, S}}, + index_map) where S + +Copy the variablewise constraint indices of `src` into `dest` mapping variable +and constraint indices using `index_map`. +""" +function copy_variablewise_constraints(dest::Dict{MOIVAR, + MOICON{MOI.SingleVariable, S}}, + src::Dict{MOIVAR, + MOICON{MOI.SingleVariable, S}}, + index_map) where S + for (variable_index, constraint_index) in src + dest[index_map[variable_index]] = index_map[constraint_index] + end +end + +""" + ReferenceMap + +Mapping between variable and constraint reference of a model and its copy. The +reference of the copied model can be obtained by indexing the map with the +reference of the corresponding reference of the original model. +""" +struct ReferenceMap + model::Model + index_map::MOIU.IndexMap +end +function Base.getindex(reference_map::ReferenceMap, vref::VariableRef) + return VariableRef(reference_map.model, + reference_map.index_map[index(vref)]) +end +function Base.getindex(reference_map::ReferenceMap, cref::ConstraintRef) + return ConstraintRef(reference_map.model, + reference_map.index_map[index(cref)], + cref.shape) +end + +""" + copy(model::Model) + +Return a copy of the model `model` and a [`ReferenceMap`](@ref) that can be used +to obtain the variable and constraint reference of the new model corresponding +to a given `model`'s reference. + +## Note + +Model copy is not supported in Direct mode, i.e. when a model is constructed +using the [`direct_model`](@ref) constructor instead of the [`Model`](@ref) +constructor. + +## Examples + +In the following example, a model `model` is constructed with a variable `x` and +a constraint `cref`. It is then copied into a model `new_model` with the new +references assigned to `x_new` and `cref_new`. +```julia +model = Model() +@variable(model, x) +@constraint(model, cref, x == 2) + +new_model, reference_map = JuMP.copy(model) +x_new = reference_map[x] +cref_new = reference_map[cref] +``` +""" +function copy(model::Model) + if mode(model) == Direct + error("Cannot copy a model in Direct mode. Use the `Model` constructor", + " instead of the `direct_model` constructor to be able to copy", + " the constructed model.") + end + caching_mode = caching_optimizer(model).mode + # TODO add bridges added to the bridge optimizer that are not part of the + # fullbridgeoptimizer + bridge_constraints = model.moibackend isa MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer} + new_model = Model(caching_mode = caching_mode, + bridge_constraints = bridge_constraints) + + # Copy the MOI backend, note that variable and constraint indices may have + # changed, the `index_map` gives the map between the indices of + # `model.moibackend` and the indices of `new_model.moibackend`. + index_map = MOI.copy!(new_model.moibackend, model.moibackend, + copynames = true) + + copy_variablewise_constraints(new_model.variabletolowerbound, + model.variabletolowerbound, index_map) + copy_variablewise_constraints(new_model.variabletoupperbound, + model.variabletoupperbound, index_map) + copy_variablewise_constraints(new_model.variabletofix, + model.variabletofix, index_map) + copy_variablewise_constraints(new_model.variabletointegrality, + model.variabletointegrality, index_map) + copy_variablewise_constraints(new_model.variabletozeroone, + model.variabletozeroone, index_map) + + new_model.optimizehook = model.optimizehook + + # TODO copy NLP data + @assert model.nlpdata === nothing + + # TODO copy objdict + + for (key, data) in model.ext + new_model.ext[key] = copy_extension_data(data, new_model, model) + end + + return new_model, ReferenceMap(new_model, index_map) +end diff --git a/src/macros.jl b/src/macros.jl index 0b2e5a9d36e..355a219d351 100644 --- a/src/macros.jl +++ b/src/macros.jl @@ -15,7 +15,7 @@ end include("parseexpr.jl") function buildrefsets(expr::Expr, cname) - c = copy(expr) + c = Base.copy(expr) idxvars = Any[] idxsets = Any[] # Creating an indexed set of refs @@ -138,7 +138,7 @@ function getloopedcode(varname, code, condition, idxvars, idxsets, sym, requeste @assert !hasdependentsets(idxvars, idxsets) i, j = esc(idxvars[1]), esc(idxvars[2]) - expr = copy(code) + expr = Base.copy(code) vname = expr.args[1].args[1] tmp = gensym() expr.args[1] = tmp diff --git a/src/operators.jl b/src/operators.jl index 0ba9ade6df2..a814f730dde 100644 --- a/src/operators.jl +++ b/src/operators.jl @@ -26,7 +26,7 @@ Base.:-(lhs::Number, rhs::AbstractVariableRef) = GenericAffExpr(convert(Float64, Base.:*(lhs::Number, rhs::AbstractVariableRef) = GenericAffExpr(0.0, rhs => convert(Float64,lhs)) # Number--GenericAffExpr function Base.:+(lhs::Number, rhs::GenericAffExpr) - result = copy(rhs) + result = Base.copy(rhs) result.constant += lhs return result end @@ -37,7 +37,7 @@ function Base.:-(lhs::Number, rhs::GenericAffExpr) end Base.:*(lhs::Number, rhs::GenericAffExpr) = map_coefficients(c -> lhs * c, rhs) # Number--QuadExpr -Base.:+(lhs::Number, rhs::GenericQuadExpr) = GenericQuadExpr(lhs+rhs.aff, copy(rhs.terms)) +Base.:+(lhs::Number, rhs::GenericQuadExpr) = GenericQuadExpr(lhs+rhs.aff, Base.copy(rhs.terms)) function Base.:-(lhs::Number, rhs::GenericQuadExpr) result = -rhs result.aff.constant += lhs @@ -99,7 +99,7 @@ function Base.:*(lhs::V, rhs::GenericAffExpr{C,V}) where {C, V <: AbstractVariab end Base.:/(lhs::AbstractVariableRef, rhs::GenericAffExpr) = error("Cannot divide a variable by an affine expression") # AbstractVariableRef--GenericQuadExpr -Base.:+(v::AbstractVariableRef, q::GenericQuadExpr) = GenericQuadExpr(v+q.aff, copy(q.terms)) +Base.:+(v::AbstractVariableRef, q::GenericQuadExpr) = GenericQuadExpr(v+q.aff, Base.copy(q.terms)) function Base.:-(v::AbstractVariableRef, q::GenericQuadExpr) result = -q # This makes an unnecessary copy of aff, but it's important for v to appear @@ -130,10 +130,10 @@ end Base.:^(lhs::Union{AbstractVariableRef,GenericAffExpr}, rhs::Number) = error("Only exponents of 0, 1, or 2 are currently supported. Are you trying to build a nonlinear problem? Make sure you use @NLconstraint/@NLobjective.") # GenericAffExpr--AbstractVariableRef function Base.:+(lhs::GenericAffExpr{C,V}, rhs::V) where {C, V <: AbstractVariableRef} - return add_to_expression!(copy(lhs), one(C), rhs) + return add_to_expression!(Base.copy(lhs), one(C), rhs) end function Base.:-(lhs::GenericAffExpr{C,V}, rhs::V) where {C, V <: AbstractVariableRef} - return add_to_expression!(copy(lhs), -one(C), rhs) + return add_to_expression!(Base.copy(lhs), -one(C), rhs) end # Don't fall back on AbstractVariableRef*GenericAffExpr to preserve lhs/rhs # consistency (appears in printing). @@ -156,7 +156,7 @@ function Base.:+(lhs::GenericAffExpr{C,V}, rhs::GenericAffExpr{C,V}) where {C,V< operator_warn(owner_model(first(linear_terms(lhs))[2])) end end - result_terms = copy(lhs.terms) + result_terms = Base.copy(lhs.terms) # merge() returns a Dict(), so we need to call merge!() instead. # Note: merge!() doesn't appear to call sizehint!(). Is this important? merge!(+, result_terms, rhs.terms) @@ -164,7 +164,7 @@ function Base.:+(lhs::GenericAffExpr{C,V}, rhs::GenericAffExpr{C,V}) where {C,V< end function Base.:-(lhs::GenericAffExpr{C,V}, rhs::GenericAffExpr{C,V}) where {C,V<:JuMPTypes} - result = copy(lhs) + result = Base.copy(lhs) result.constant -= rhs.constant sizehint!(result, length(linear_terms(lhs)) + length(linear_terms(rhs))) for (coef, var) in linear_terms(rhs) @@ -216,7 +216,7 @@ function Base.:*(lhs::GenericAffExpr{C,V}, rhs::GenericAffExpr{C,V}) where {C,V< return result end # GenericAffExpr--GenericQuadExpr -Base.:+(a::GenericAffExpr, q::GenericQuadExpr) = GenericQuadExpr(a+q.aff, copy(q.terms)) +Base.:+(a::GenericAffExpr, q::GenericQuadExpr) = GenericQuadExpr(a+q.aff, Base.copy(q.terms)) function Base.:-(a::GenericAffExpr, q::GenericQuadExpr) result = -q # This makes an unnecessary copy of aff, but it's important for a to appear @@ -234,18 +234,18 @@ Base.:-(lhs::GenericQuadExpr, rhs::Number) = (+)(-rhs,lhs) Base.:*(lhs::GenericQuadExpr, rhs::Number) = (*)(rhs,lhs) Base.:/(lhs::GenericQuadExpr, rhs::Number) = (*)(inv(rhs),lhs) # GenericQuadExpr--AbstractVariableRef -Base.:+(q::GenericQuadExpr, v::AbstractVariableRef) = GenericQuadExpr(q.aff+v, copy(q.terms)) -Base.:-(q::GenericQuadExpr, v::AbstractVariableRef) = GenericQuadExpr(q.aff-v, copy(q.terms)) +Base.:+(q::GenericQuadExpr, v::AbstractVariableRef) = GenericQuadExpr(q.aff+v, Base.copy(q.terms)) +Base.:-(q::GenericQuadExpr, v::AbstractVariableRef) = GenericQuadExpr(q.aff-v, Base.copy(q.terms)) Base.:*(q::GenericQuadExpr, v::AbstractVariableRef) = error("Cannot multiply a quadratic expression by a variable") Base.:/(q::GenericQuadExpr, v::AbstractVariableRef) = error("Cannot divide a quadratic expression by a variable") # GenericQuadExpr--GenericAffExpr -Base.:+(q::GenericQuadExpr, a::GenericAffExpr) = GenericQuadExpr(q.aff+a, copy(q.terms)) -Base.:-(q::GenericQuadExpr, a::GenericAffExpr) = GenericQuadExpr(q.aff-a, copy(q.terms)) +Base.:+(q::GenericQuadExpr, a::GenericAffExpr) = GenericQuadExpr(q.aff+a, Base.copy(q.terms)) +Base.:-(q::GenericQuadExpr, a::GenericAffExpr) = GenericQuadExpr(q.aff-a, Base.copy(q.terms)) Base.:*(q::GenericQuadExpr, a::GenericAffExpr) = error("Cannot multiply a quadratic expression by an aff. expression") Base.:/(q::GenericQuadExpr, a::GenericAffExpr) = error("Cannot divide a quadratic expression by an aff. expression") # GenericQuadExpr--GenericQuadExpr function Base.:+(q1::GenericQuadExpr, q2::GenericQuadExpr) - result = copy(q1) + result = Base.copy(q1) for (coef, var1, var2) in quadterms(q2) add_to_expression!(result, coef, var1, var2) end @@ -256,7 +256,7 @@ function Base.:+(q1::GenericQuadExpr, q2::GenericQuadExpr) return result end function Base.:-(q1::GenericQuadExpr, q2::GenericQuadExpr) - result = copy(q1) + result = Base.copy(q1) for (coef, var1, var2) in quadterms(q2) add_to_expression!(result, -coef, var1, var2) end @@ -659,15 +659,15 @@ end # Special-case sparse matrix scalar multiplication/division Base.:*(lhs::Number, rhs::SparseMatrixCSC{T}) where {T<:JuMPTypes} = - SparseMatrixCSC(rhs.m, rhs.n, copy(rhs.colptr), copy(rhs.rowval), lhs .* rhs.nzval) + SparseMatrixCSC(rhs.m, rhs.n, Base.copy(rhs.colptr), Base.copy(rhs.rowval), lhs .* rhs.nzval) Base.:*(lhs::JuMPTypes, rhs::SparseMatrixCSC) = - SparseMatrixCSC(rhs.m, rhs.n, copy(rhs.colptr), copy(rhs.rowval), lhs .* rhs.nzval) + SparseMatrixCSC(rhs.m, rhs.n, Base.copy(rhs.colptr), Base.copy(rhs.rowval), lhs .* rhs.nzval) Base.:*(lhs::SparseMatrixCSC{T}, rhs::Number) where {T<:JuMPTypes} = - SparseMatrixCSC(lhs.m, lhs.n, copy(lhs.colptr), copy(lhs.rowval), lhs.nzval .* rhs) + SparseMatrixCSC(lhs.m, lhs.n, Base.copy(lhs.colptr), Base.copy(lhs.rowval), lhs.nzval .* rhs) Base.:*(lhs::SparseMatrixCSC, rhs::JuMPTypes) = - SparseMatrixCSC(lhs.m, lhs.n, copy(lhs.colptr), copy(lhs.rowval), lhs.nzval .* rhs) + SparseMatrixCSC(lhs.m, lhs.n, Base.copy(lhs.colptr), Base.copy(lhs.rowval), lhs.nzval .* rhs) Base.:/(lhs::SparseMatrixCSC{T}, rhs::Number) where {T<:JuMPTypes} = - SparseMatrixCSC(lhs.m, lhs.n, copy(lhs.colptr), copy(lhs.rowval), lhs.nzval ./ rhs) + SparseMatrixCSC(lhs.m, lhs.n, Base.copy(lhs.colptr), Base.copy(lhs.rowval), lhs.nzval ./ rhs) for (op,opsymbol) in [(+,:+), (-,:-), (*,:*), (/,:/)] diff --git a/src/parseexpr.jl b/src/parseexpr.jl index b577967c9ef..c211a6bede6 100644 --- a/src/parseexpr.jl +++ b/src/parseexpr.jl @@ -284,11 +284,11 @@ destructive_add!(ex, c, x) = ex .+ c * x destructive_add_with_reorder!(ex, arg) = destructive_add!(ex, 1.0, arg) # Special case because "Val{false}()" is used as the default empty expression. -destructive_add_with_reorder!(ex::Val{false}, arg) = copy(arg) -# Calling `copy` on the matrix will not copy the entries -destructive_add_with_reorder!(ex::Val{false}, arg::AbstractArray) = copy.(arg) +destructive_add_with_reorder!(ex::Val{false}, arg) = Base.copy(arg) +# Calling `Base.copy` on the matrix will not copy the entries +destructive_add_with_reorder!(ex::Val{false}, arg::AbstractArray) = Base.copy.(arg) function destructive_add_with_reorder!(ex::Val{false}, arg::Symmetric) - Symmetric(copy.(arg)) + Symmetric(Base.copy.(arg)) end destructive_add_with_reorder!(ex::Val{false}, args...) = (*)(args...) diff --git a/src/quadexpr.jl b/src/quadexpr.jl index 12498965c7d..930b8d16c9f 100644 --- a/src/quadexpr.jl +++ b/src/quadexpr.jl @@ -58,7 +58,7 @@ function Base.one(::Type{GenericQuadExpr{C,V}}) where {C,V} end Base.zero(q::GenericQuadExpr) = zero(typeof(q)) Base.one(q::GenericQuadExpr) = one(typeof(q)) -Base.copy(q::GenericQuadExpr) = GenericQuadExpr(copy(q.aff), copy(q.terms)) +Base.copy(q::GenericQuadExpr) = GenericQuadExpr(Base.copy(q.aff), Base.copy(q.terms)) if VERSION >= v"0.7-" Base.broadcastable(q::GenericQuadExpr) = Ref(q) end @@ -73,7 +73,7 @@ function map_coefficients_inplace!(f::Function, q::GenericQuadExpr) end function map_coefficients(f::Function, q::GenericQuadExpr) - return map_coefficients_inplace!(f, copy(q)) + return map_coefficients_inplace!(f, Base.copy(q)) end """ @@ -154,7 +154,7 @@ end Base.hash(quad::GenericQuadExpr, h::UInt) = hash(quad.aff, hash(quad.terms, h)) function Compat.SparseArrays.dropzeros(quad::GenericQuadExpr) - quad_terms = copy(quad.terms) + quad_terms = Base.copy(quad.terms) for (key, value) in quad.terms if iszero(value) delete!(quad_terms, key) @@ -229,8 +229,8 @@ end # Copy a quadratic expression to a new model by converting all the # variables to the new model's variables function Base.copy(q::GenericQuadExpr, new_model::Model) - GenericQuadExpr(copy(q.qvars1, new_model), copy(q.qvars2, new_model), - copy(q.qcoeffs), copy(q.aff, new_model)) + GenericQuadExpr(Base.copy(q.qvars1, new_model), Base.copy(q.qvars2, new_model), + Base.copy(q.qcoeffs), Base.copy(q.aff, new_model)) end # TODO: result_value for QuadExpr diff --git a/test/model.jl b/test/model.jl index 40b1f66cc59..96ff309f055 100644 --- a/test/model.jl +++ b/test/model.jl @@ -66,3 +66,60 @@ end @test optimizer.a == 1 @test optimizer.b == 2 end + +struct DummyExtensionData + model::JuMP.Model +end +function JuMP.copy_extension_data(data::DummyExtensionData, + new_model::JuMP.AbstractModel, + model::JuMP.AbstractModel) + @test data.model === model + return DummyExtensionData(new_model) +end +function dummy_optimizer_hook(::JuMP.AbstractModel) end + +@testset "Model copy" begin + for caching_mode in (MOIU.Automatic, MOIU.Manual) + @testset "In $caching_mode mode" begin + for bridge_constraints in (false, true) + model = Model(caching_mode = caching_mode, + bridge_constraints = bridge_constraints) + model.optimizehook = dummy_optimizer_hook + data = DummyExtensionData(model) + model.ext[:dummy] = data + @variable(model, x ≥ 0, Bin) + @variable(model, y ≤ 1, Int) + @variable(model, z == 0) + @constraint(model, cref, x + y == 1) + + new_model, reference_map = JuMP.copy(model) + @test MOIU.mode(JuMP.caching_optimizer(new_model)) == caching_mode + @test bridge_constraints == (new_model.moibackend isa MOI.Bridges.LazyBridgeOptimizer) + @test new_model.optimizehook === dummy_optimizer_hook + @test new_model.ext[:dummy].model === new_model + x_new = reference_map[x] + @test x_new.m === new_model + @test JuMP.name(x_new) == "x" + y_new = reference_map[y] + @test y_new.m === new_model + @test JuMP.name(y_new) == "y" + z_new = reference_map[z] + @test z_new.m === new_model + @test JuMP.name(z_new) == "z" + @test JuMP.LowerBoundRef(x_new) == reference_map[JuMP.LowerBoundRef(x)] + @test JuMP.BinaryRef(x_new) == reference_map[JuMP.BinaryRef(x)] + @test JuMP.UpperBoundRef(y_new) == reference_map[JuMP.UpperBoundRef(y)] + @test JuMP.IntegerRef(y_new) == reference_map[JuMP.IntegerRef(y)] + @test JuMP.FixRef(z_new) == reference_map[JuMP.FixRef(z)] + cref_new = reference_map[cref] + @test cref_new.m === new_model + @test JuMP.name(cref_new) == "cref" + end + end + end + @testset "In Direct mode" begin + mock = MOIU.MockOptimizer(JuMP.JuMPMOIModel{Float64}()) + model = JuMP.direct_model(mock) + @test_throws ErrorException JuMP.copy(model) + end +end From 80d2a959e6addabeef2482d33af38d7d78411143 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Sat, 25 Aug 2018 11:44:09 +0200 Subject: [PATCH 2/8] Add comment --- src/copy.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/copy.jl b/src/copy.jl index edb014b8dc7..f412a92f649 100644 --- a/src/copy.jl +++ b/src/copy.jl @@ -99,6 +99,8 @@ function copy(model::Model) # `model.moibackend` and the indices of `new_model.moibackend`. index_map = MOI.copy!(new_model.moibackend, model.moibackend, copynames = true) + # TODO copynames is needed because of https://github.com/JuliaOpt/MathOptInterface.jl/issues/494 + # we can remove it when this is fixed and released copy_variablewise_constraints(new_model.variabletolowerbound, model.variabletolowerbound, index_map) From ad70b160c86cc325296b3cd8022d3662af1b1445 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Wed, 29 Aug 2018 10:53:05 +0200 Subject: [PATCH 3/8] assert to error --- src/JuMP.jl | 1 + src/affexpr.jl | 5 ----- src/copy.jl | 5 ++++- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/src/JuMP.jl b/src/JuMP.jl index b96a7632a06..e61306aa52d 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -332,6 +332,7 @@ function objective_sense(model::Model) end # TODO(IainNZ): Document these too. +object_dictionary(model::Model) = model.obj_dict termination_status(model::Model) = MOI.get(model, MOI.TerminationStatus()) primal_status(model::Model) = MOI.get(model, MOI.PrimalStatus()) dual_status(model::Model) = MOI.get(model, MOI.DualStatus()) diff --git a/src/affexpr.jl b/src/affexpr.jl index cc83dbe9d2f..94c678268ce 100644 --- a/src/affexpr.jl +++ b/src/affexpr.jl @@ -308,13 +308,8 @@ end # variables to the new model's variables function Base.copy(a::GenericAffExpr, new_model::Model) result = zero(a) -<<<<<<< HEAD for (coef, var) in linear_terms(a) add_to_expression!(result, coef, copy(var, new_model)) -======= - for (coef, var) in linearterms(a) - add_to_expression!(result, coef, Base.copy(var, new_model)) ->>>>>>> Implement model copy end result.constant = a.constant return result diff --git a/src/copy.jl b/src/copy.jl index f412a92f649..ad106315970 100644 --- a/src/copy.jl +++ b/src/copy.jl @@ -116,7 +116,10 @@ function copy(model::Model) new_model.optimizehook = model.optimizehook # TODO copy NLP data - @assert model.nlpdata === nothing + if model.nlpdata !== nothing + error("copy is not supported yet for models with nonlinear constraints", + " and/or nonlinear objective function") + end # TODO copy objdict From cc70e61967633ccd57303e498da7bf7d48af7dff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Wed, 29 Aug 2018 11:32:57 +0200 Subject: [PATCH 4/8] Implements Base.copy --- src/copy.jl | 81 ++++++++++++++++++++++++++++++++++++------------ test/model.jl | 86 +++++++++++++++++++++++++++++++-------------------- 2 files changed, 113 insertions(+), 54 deletions(-) diff --git a/src/copy.jl b/src/copy.jl index ad106315970..10d46450282 100644 --- a/src/copy.jl +++ b/src/copy.jl @@ -52,9 +52,13 @@ function Base.getindex(reference_map::ReferenceMap, cref::ConstraintRef) reference_map.index_map[index(cref)], cref.shape) end +if VERSION >= v"0.7-" + Base.broadcastable(reference_map::ReferenceMap) = Ref(reference_map) +end + """ - copy(model::Model) + copy_model(model::Model) Return a copy of the model `model` and a [`ReferenceMap`](@ref) that can be used to obtain the variable and constraint reference of the new model corresponding @@ -76,12 +80,12 @@ model = Model() @variable(model, x) @constraint(model, cref, x == 2) -new_model, reference_map = JuMP.copy(model) +new_model, reference_map = JuMP.copy_model(model) x_new = reference_map[x] cref_new = reference_map[cref] ``` """ -function copy(model::Model) +function copy_model(model::Model) if mode(model) == Direct error("Cannot copy a model in Direct mode. Use the `Model` constructor", " instead of the `direct_model` constructor to be able to copy", @@ -90,42 +94,79 @@ function copy(model::Model) caching_mode = caching_optimizer(model).mode # TODO add bridges added to the bridge optimizer that are not part of the # fullbridgeoptimizer - bridge_constraints = model.moibackend isa MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer} + bridge_constraints = model.moi_backend isa MOI.Bridges.LazyBridgeOptimizer{<:MOIU.CachingOptimizer} new_model = Model(caching_mode = caching_mode, bridge_constraints = bridge_constraints) # Copy the MOI backend, note that variable and constraint indices may have # changed, the `index_map` gives the map between the indices of - # `model.moibackend` and the indices of `new_model.moibackend`. - index_map = MOI.copy!(new_model.moibackend, model.moibackend, + # `model.moi_backend` and the indices of `new_model.moi_backend`. + index_map = MOI.copy!(new_model.moi_backend, model.moi_backend, copynames = true) # TODO copynames is needed because of https://github.com/JuliaOpt/MathOptInterface.jl/issues/494 # we can remove it when this is fixed and released - copy_variablewise_constraints(new_model.variabletolowerbound, - model.variabletolowerbound, index_map) - copy_variablewise_constraints(new_model.variabletoupperbound, - model.variabletoupperbound, index_map) - copy_variablewise_constraints(new_model.variabletofix, - model.variabletofix, index_map) - copy_variablewise_constraints(new_model.variabletointegrality, - model.variabletointegrality, index_map) - copy_variablewise_constraints(new_model.variabletozeroone, - model.variabletozeroone, index_map) + copy_variablewise_constraints(new_model.variable_to_lower_bound, + model.variable_to_lower_bound, index_map) + copy_variablewise_constraints(new_model.variable_to_upper_bound, + model.variable_to_upper_bound, index_map) + copy_variablewise_constraints(new_model.variable_to_fix, + model.variable_to_fix, index_map) + copy_variablewise_constraints(new_model.variable_to_integrality, + model.variable_to_integrality, index_map) + copy_variablewise_constraints(new_model.variable_to_zero_one, + model.variable_to_zero_one, index_map) - new_model.optimizehook = model.optimizehook + new_model.optimize_hook = model.optimize_hook # TODO copy NLP data - if model.nlpdata !== nothing + if model.nlp_data !== nothing error("copy is not supported yet for models with nonlinear constraints", " and/or nonlinear objective function") end - # TODO copy objdict + reference_map = ReferenceMap(new_model, index_map) + + for (name, value) in object_dictionary(model) + new_model.obj_dict[name] = getindex.(reference_map, value) + end for (key, data) in model.ext new_model.ext[key] = copy_extension_data(data, new_model, model) end - return new_model, ReferenceMap(new_model, index_map) + return new_model, reference_map +end + +""" + copy(model::AbstractModel) + +Return a copy of the model `model`. It is similar to [`copy_model`](@ref) +except that it does not return the mapping between the references of `model` +and its copy. + +## Note + +Model copy is not supported in Direct mode, i.e. when a model is constructed +using the [`direct_model`](@ref) constructor instead of the [`Model`](@ref) +constructor. + +## Examples + +In the following example, a model `model` is constructed with a variable `x` and +a constraint `cref`. It is then copied into a model `new_model` with the new +references assigned to `x_new` and `cref_new`. +```julia +model = Model() +@variable(model, x) +@constraint(model, cref, x == 2) + +new_model = copy(model) +x_new = model[:x] +cref_new = model[:cref] +``` +""" +function Base.copy(model::AbstractModel) + new_model, _ = copy_model(model) + return new_model end diff --git a/test/model.jl b/test/model.jl index 96ff309f055..9a61b643dc3 100644 --- a/test/model.jl +++ b/test/model.jl @@ -79,41 +79,59 @@ end function dummy_optimizer_hook(::JuMP.AbstractModel) end @testset "Model copy" begin - for caching_mode in (MOIU.Automatic, MOIU.Manual) - @testset "In $caching_mode mode" begin - for bridge_constraints in (false, true) - model = Model(caching_mode = caching_mode, - bridge_constraints = bridge_constraints) - model.optimizehook = dummy_optimizer_hook - data = DummyExtensionData(model) - model.ext[:dummy] = data - @variable(model, x ≥ 0, Bin) - @variable(model, y ≤ 1, Int) - @variable(model, z == 0) - @constraint(model, cref, x + y == 1) + for copy_model in (true, true) + @testset "Using $(copy_model ? "JuMP.copy_model" : "Base.copy")" begin + for caching_mode in (MOIU.Automatic, MOIU.Manual) + @testset "In $caching_mode mode" begin + for bridge_constraints in (false, true) + model = Model(caching_mode = caching_mode, + bridge_constraints = bridge_constraints) + model.optimize_hook = dummy_optimizer_hook + data = DummyExtensionData(model) + model.ext[:dummy] = data + @variable(model, x ≥ 0, Bin) + @variable(model, y ≤ 1, Int) + @variable(model, z == 0) + @constraint(model, cref, x + y == 1) - new_model, reference_map = JuMP.copy(model) - @test MOIU.mode(JuMP.caching_optimizer(new_model)) == caching_mode - @test bridge_constraints == (new_model.moibackend isa MOI.Bridges.LazyBridgeOptimizer) - @test new_model.optimizehook === dummy_optimizer_hook - @test new_model.ext[:dummy].model === new_model - x_new = reference_map[x] - @test x_new.m === new_model - @test JuMP.name(x_new) == "x" - y_new = reference_map[y] - @test y_new.m === new_model - @test JuMP.name(y_new) == "y" - z_new = reference_map[z] - @test z_new.m === new_model - @test JuMP.name(z_new) == "z" - @test JuMP.LowerBoundRef(x_new) == reference_map[JuMP.LowerBoundRef(x)] - @test JuMP.BinaryRef(x_new) == reference_map[JuMP.BinaryRef(x)] - @test JuMP.UpperBoundRef(y_new) == reference_map[JuMP.UpperBoundRef(y)] - @test JuMP.IntegerRef(y_new) == reference_map[JuMP.IntegerRef(y)] - @test JuMP.FixRef(z_new) == reference_map[JuMP.FixRef(z)] - cref_new = reference_map[cref] - @test cref_new.m === new_model - @test JuMP.name(cref_new) == "cref" + if copy_model + new_model, reference_map = JuMP.copy_model(model) + else + new_model = copy(model) + reference_map = Dict{Union{JuMP.VariableRef, + JuMP.ConstraintRef}, + Union{JuMP.VariableRef, + JuMP.ConstraintRef}}() + reference_map[x] = new_model[:x] + reference_map[y] = new_model[:y] + reference_map[z] = new_model[:z] + reference_map[cref] = new_model[:cref] + end + @test MOIU.mode(JuMP.caching_optimizer(new_model)) == caching_mode + @test bridge_constraints == (new_model.moi_backend isa MOI.Bridges.LazyBridgeOptimizer) + @test new_model.optimize_hook === dummy_optimizer_hook + @test new_model.ext[:dummy].model === new_model + x_new = reference_map[x] + @test x_new.m === new_model + @test JuMP.name(x_new) == "x" + y_new = reference_map[y] + @test y_new.m === new_model + @test JuMP.name(y_new) == "y" + z_new = reference_map[z] + @test z_new.m === new_model + @test JuMP.name(z_new) == "z" + if copy_model + @test JuMP.LowerBoundRef(x_new) == reference_map[JuMP.LowerBoundRef(x)] + @test JuMP.BinaryRef(x_new) == reference_map[JuMP.BinaryRef(x)] + @test JuMP.UpperBoundRef(y_new) == reference_map[JuMP.UpperBoundRef(y)] + @test JuMP.IntegerRef(y_new) == reference_map[JuMP.IntegerRef(y)] + @test JuMP.FixRef(z_new) == reference_map[JuMP.FixRef(z)] + end + cref_new = reference_map[cref] + @test cref_new.m === new_model + @test JuMP.name(cref_new) == "cref" + end + end end end end From 8966fb1250565e38705e8d48a8e9598ab2a1f396 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Thu, 30 Aug 2018 10:20:08 +0200 Subject: [PATCH 5/8] variablewise -> single_variable --- src/copy.jl | 38 ++++++++++++++++++++------------------ 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/src/copy.jl b/src/copy.jl index 10d46450282..92528f4c86f 100644 --- a/src/copy.jl +++ b/src/copy.jl @@ -13,20 +13,22 @@ extension storing data in the `ext` field. function copy_extension_data end """ - copy_variablewise_constraints(dest::Dict{MOIVAR, - MOICON{MOI.SingleVariable, S}}, - src::Dict{MOIVAR, - MOICON{MOI.SingleVariable, S}}, - index_map) where S - -Copy the variablewise constraint indices of `src` into `dest` mapping variable -and constraint indices using `index_map`. + copy_single_variable_constraints(dest::Dict{MOI.VariableIndex, + MOICON{MOI.SingleVariable, S}}, + src::Dict{MOI.VariableIndex, + MOICON{MOI.SingleVariable, S}}, + index_map) where S + +Copy the single variable constraint indices of `src` into `dest` mapping +variable and constraint indices using `index_map`. """ -function copy_variablewise_constraints(dest::Dict{MOIVAR, - MOICON{MOI.SingleVariable, S}}, - src::Dict{MOIVAR, - MOICON{MOI.SingleVariable, S}}, - index_map) where S +function copy_single_variable_constraints(dest::Dict{MOI.VariableIndex, + MOICON{MOI.SingleVariable, + S}}, + src::Dict{MOI.VariableIndex, + MOICON{MOI.SingleVariable, + S}}, + index_map) where S for (variable_index, constraint_index) in src dest[index_map[variable_index]] = index_map[constraint_index] end @@ -106,15 +108,15 @@ function copy_model(model::Model) # TODO copynames is needed because of https://github.com/JuliaOpt/MathOptInterface.jl/issues/494 # we can remove it when this is fixed and released - copy_variablewise_constraints(new_model.variable_to_lower_bound, + copy_single_variable_constraints(new_model.variable_to_lower_bound, model.variable_to_lower_bound, index_map) - copy_variablewise_constraints(new_model.variable_to_upper_bound, + copy_single_variable_constraints(new_model.variable_to_upper_bound, model.variable_to_upper_bound, index_map) - copy_variablewise_constraints(new_model.variable_to_fix, + copy_single_variable_constraints(new_model.variable_to_fix, model.variable_to_fix, index_map) - copy_variablewise_constraints(new_model.variable_to_integrality, + copy_single_variable_constraints(new_model.variable_to_integrality, model.variable_to_integrality, index_map) - copy_variablewise_constraints(new_model.variable_to_zero_one, + copy_single_variable_constraints(new_model.variable_to_zero_one, model.variable_to_zero_one, index_map) new_model.optimize_hook = model.optimize_hook From b4de4fd0d30e20baffd5bdde78e747e66fd398a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Thu, 30 Aug 2018 10:20:26 +0200 Subject: [PATCH 6/8] Comment optimizer being dropped --- src/copy.jl | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/copy.jl b/src/copy.jl index 92528f4c86f..b68caae9bd1 100644 --- a/src/copy.jl +++ b/src/copy.jl @@ -70,7 +70,9 @@ to a given `model`'s reference. Model copy is not supported in Direct mode, i.e. when a model is constructed using the [`direct_model`](@ref) constructor instead of the [`Model`](@ref) -constructor. +constructor. Moreover, independently on whether an optimizer was provided at +model construction, the new model will have no optimizer, i.e., an optimizer +will have to be provided to the new model in the [`optimize!`](@ref) call. ## Examples @@ -151,7 +153,9 @@ and its copy. Model copy is not supported in Direct mode, i.e. when a model is constructed using the [`direct_model`](@ref) constructor instead of the [`Model`](@ref) -constructor. +constructor. Moreover, independently on whether an optimizer was provided at +model construction, the new model will have no optimizer, i.e., an optimizer +will have to be provided to the new model in the [`optimize!`](@ref) call. ## Examples From 6c5efafbd4f3e3ee4dda223c34c32d7899c1e811 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Thu, 30 Aug 2018 10:23:32 +0200 Subject: [PATCH 7/8] Mention Base.copy method --- src/copy.jl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/copy.jl b/src/copy.jl index b68caae9bd1..c2c9c734bd8 100644 --- a/src/copy.jl +++ b/src/copy.jl @@ -64,7 +64,9 @@ end Return a copy of the model `model` and a [`ReferenceMap`](@ref) that can be used to obtain the variable and constraint reference of the new model corresponding -to a given `model`'s reference. +to a given `model`'s reference. A [`Base.copy(::AbstractModel)`](@ref) method +has also been implemented, it is similar to `copy_model` but does not return +the reference map. ## Note From f53158eecc2857388697568a3a4c626da4b93f0d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Thu, 30 Aug 2018 10:30:19 +0200 Subject: [PATCH 8/8] Remove unnecessary Base. prefix --- src/JuMP.jl | 2 +- src/affexpr.jl | 6 +++--- src/macros.jl | 4 ++-- src/operators.jl | 38 +++++++++++++++++++------------------- src/parseexpr.jl | 8 ++++---- src/quadexpr.jl | 10 +++++----- 6 files changed, 34 insertions(+), 34 deletions(-) diff --git a/src/JuMP.jl b/src/JuMP.jl index e61306aa52d..5756fa87306 100644 --- a/src/JuMP.jl +++ b/src/JuMP.jl @@ -245,7 +245,6 @@ function direct_model(backend::MOI.ModelLike) Dict{Symbol, Any}()) end - if VERSION >= v"0.7-" Base.broadcastable(model::Model) = Ref(model) end @@ -741,5 +740,6 @@ include("optimizerinterface.jl") include("nlp.jl") include("print.jl") + ########################################################################## end diff --git a/src/affexpr.jl b/src/affexpr.jl index 94c678268ce..cfd56dbf99c 100644 --- a/src/affexpr.jl +++ b/src/affexpr.jl @@ -75,7 +75,7 @@ Base.zero(::Type{GenericAffExpr{C,V}}) where {C,V} = GenericAffExpr{C,V}(zero(C) Base.one(::Type{GenericAffExpr{C,V}}) where {C,V} = GenericAffExpr{C,V}(one(C), OrderedDict{V,C}()) Base.zero(a::GenericAffExpr) = zero(typeof(a)) Base.one( a::GenericAffExpr) = one(typeof(a)) -Base.copy(a::GenericAffExpr) = GenericAffExpr(Base.copy(a.constant), Base.copy(a.terms)) +Base.copy(a::GenericAffExpr) = GenericAffExpr(copy(a.constant), copy(a.terms)) if VERSION >= v"0.7-" Base.broadcastable(a::GenericAffExpr) = Ref(a) end @@ -92,7 +92,7 @@ function map_coefficients_inplace!(f::Function, a::GenericAffExpr) end function map_coefficients(f::Function, a::GenericAffExpr) - return map_coefficients_inplace!(f, Base.copy(a)) + return map_coefficients_inplace!(f, copy(a)) end Base.sizehint!(a::GenericAffExpr, n::Int) = sizehint!(a.terms, n) @@ -192,7 +192,7 @@ end Base.hash(aff::GenericAffExpr, h::UInt) = hash(aff.constant, hash(aff.terms, h)) function Compat.SparseArrays.dropzeros(aff::GenericAffExpr) - result = Base.copy(aff) + result = copy(aff) for (coef, var) in linear_terms(aff) if iszero(coef) delete!(result.terms, var) diff --git a/src/macros.jl b/src/macros.jl index 355a219d351..0b2e5a9d36e 100644 --- a/src/macros.jl +++ b/src/macros.jl @@ -15,7 +15,7 @@ end include("parseexpr.jl") function buildrefsets(expr::Expr, cname) - c = Base.copy(expr) + c = copy(expr) idxvars = Any[] idxsets = Any[] # Creating an indexed set of refs @@ -138,7 +138,7 @@ function getloopedcode(varname, code, condition, idxvars, idxsets, sym, requeste @assert !hasdependentsets(idxvars, idxsets) i, j = esc(idxvars[1]), esc(idxvars[2]) - expr = Base.copy(code) + expr = copy(code) vname = expr.args[1].args[1] tmp = gensym() expr.args[1] = tmp diff --git a/src/operators.jl b/src/operators.jl index a814f730dde..0ba9ade6df2 100644 --- a/src/operators.jl +++ b/src/operators.jl @@ -26,7 +26,7 @@ Base.:-(lhs::Number, rhs::AbstractVariableRef) = GenericAffExpr(convert(Float64, Base.:*(lhs::Number, rhs::AbstractVariableRef) = GenericAffExpr(0.0, rhs => convert(Float64,lhs)) # Number--GenericAffExpr function Base.:+(lhs::Number, rhs::GenericAffExpr) - result = Base.copy(rhs) + result = copy(rhs) result.constant += lhs return result end @@ -37,7 +37,7 @@ function Base.:-(lhs::Number, rhs::GenericAffExpr) end Base.:*(lhs::Number, rhs::GenericAffExpr) = map_coefficients(c -> lhs * c, rhs) # Number--QuadExpr -Base.:+(lhs::Number, rhs::GenericQuadExpr) = GenericQuadExpr(lhs+rhs.aff, Base.copy(rhs.terms)) +Base.:+(lhs::Number, rhs::GenericQuadExpr) = GenericQuadExpr(lhs+rhs.aff, copy(rhs.terms)) function Base.:-(lhs::Number, rhs::GenericQuadExpr) result = -rhs result.aff.constant += lhs @@ -99,7 +99,7 @@ function Base.:*(lhs::V, rhs::GenericAffExpr{C,V}) where {C, V <: AbstractVariab end Base.:/(lhs::AbstractVariableRef, rhs::GenericAffExpr) = error("Cannot divide a variable by an affine expression") # AbstractVariableRef--GenericQuadExpr -Base.:+(v::AbstractVariableRef, q::GenericQuadExpr) = GenericQuadExpr(v+q.aff, Base.copy(q.terms)) +Base.:+(v::AbstractVariableRef, q::GenericQuadExpr) = GenericQuadExpr(v+q.aff, copy(q.terms)) function Base.:-(v::AbstractVariableRef, q::GenericQuadExpr) result = -q # This makes an unnecessary copy of aff, but it's important for v to appear @@ -130,10 +130,10 @@ end Base.:^(lhs::Union{AbstractVariableRef,GenericAffExpr}, rhs::Number) = error("Only exponents of 0, 1, or 2 are currently supported. Are you trying to build a nonlinear problem? Make sure you use @NLconstraint/@NLobjective.") # GenericAffExpr--AbstractVariableRef function Base.:+(lhs::GenericAffExpr{C,V}, rhs::V) where {C, V <: AbstractVariableRef} - return add_to_expression!(Base.copy(lhs), one(C), rhs) + return add_to_expression!(copy(lhs), one(C), rhs) end function Base.:-(lhs::GenericAffExpr{C,V}, rhs::V) where {C, V <: AbstractVariableRef} - return add_to_expression!(Base.copy(lhs), -one(C), rhs) + return add_to_expression!(copy(lhs), -one(C), rhs) end # Don't fall back on AbstractVariableRef*GenericAffExpr to preserve lhs/rhs # consistency (appears in printing). @@ -156,7 +156,7 @@ function Base.:+(lhs::GenericAffExpr{C,V}, rhs::GenericAffExpr{C,V}) where {C,V< operator_warn(owner_model(first(linear_terms(lhs))[2])) end end - result_terms = Base.copy(lhs.terms) + result_terms = copy(lhs.terms) # merge() returns a Dict(), so we need to call merge!() instead. # Note: merge!() doesn't appear to call sizehint!(). Is this important? merge!(+, result_terms, rhs.terms) @@ -164,7 +164,7 @@ function Base.:+(lhs::GenericAffExpr{C,V}, rhs::GenericAffExpr{C,V}) where {C,V< end function Base.:-(lhs::GenericAffExpr{C,V}, rhs::GenericAffExpr{C,V}) where {C,V<:JuMPTypes} - result = Base.copy(lhs) + result = copy(lhs) result.constant -= rhs.constant sizehint!(result, length(linear_terms(lhs)) + length(linear_terms(rhs))) for (coef, var) in linear_terms(rhs) @@ -216,7 +216,7 @@ function Base.:*(lhs::GenericAffExpr{C,V}, rhs::GenericAffExpr{C,V}) where {C,V< return result end # GenericAffExpr--GenericQuadExpr -Base.:+(a::GenericAffExpr, q::GenericQuadExpr) = GenericQuadExpr(a+q.aff, Base.copy(q.terms)) +Base.:+(a::GenericAffExpr, q::GenericQuadExpr) = GenericQuadExpr(a+q.aff, copy(q.terms)) function Base.:-(a::GenericAffExpr, q::GenericQuadExpr) result = -q # This makes an unnecessary copy of aff, but it's important for a to appear @@ -234,18 +234,18 @@ Base.:-(lhs::GenericQuadExpr, rhs::Number) = (+)(-rhs,lhs) Base.:*(lhs::GenericQuadExpr, rhs::Number) = (*)(rhs,lhs) Base.:/(lhs::GenericQuadExpr, rhs::Number) = (*)(inv(rhs),lhs) # GenericQuadExpr--AbstractVariableRef -Base.:+(q::GenericQuadExpr, v::AbstractVariableRef) = GenericQuadExpr(q.aff+v, Base.copy(q.terms)) -Base.:-(q::GenericQuadExpr, v::AbstractVariableRef) = GenericQuadExpr(q.aff-v, Base.copy(q.terms)) +Base.:+(q::GenericQuadExpr, v::AbstractVariableRef) = GenericQuadExpr(q.aff+v, copy(q.terms)) +Base.:-(q::GenericQuadExpr, v::AbstractVariableRef) = GenericQuadExpr(q.aff-v, copy(q.terms)) Base.:*(q::GenericQuadExpr, v::AbstractVariableRef) = error("Cannot multiply a quadratic expression by a variable") Base.:/(q::GenericQuadExpr, v::AbstractVariableRef) = error("Cannot divide a quadratic expression by a variable") # GenericQuadExpr--GenericAffExpr -Base.:+(q::GenericQuadExpr, a::GenericAffExpr) = GenericQuadExpr(q.aff+a, Base.copy(q.terms)) -Base.:-(q::GenericQuadExpr, a::GenericAffExpr) = GenericQuadExpr(q.aff-a, Base.copy(q.terms)) +Base.:+(q::GenericQuadExpr, a::GenericAffExpr) = GenericQuadExpr(q.aff+a, copy(q.terms)) +Base.:-(q::GenericQuadExpr, a::GenericAffExpr) = GenericQuadExpr(q.aff-a, copy(q.terms)) Base.:*(q::GenericQuadExpr, a::GenericAffExpr) = error("Cannot multiply a quadratic expression by an aff. expression") Base.:/(q::GenericQuadExpr, a::GenericAffExpr) = error("Cannot divide a quadratic expression by an aff. expression") # GenericQuadExpr--GenericQuadExpr function Base.:+(q1::GenericQuadExpr, q2::GenericQuadExpr) - result = Base.copy(q1) + result = copy(q1) for (coef, var1, var2) in quadterms(q2) add_to_expression!(result, coef, var1, var2) end @@ -256,7 +256,7 @@ function Base.:+(q1::GenericQuadExpr, q2::GenericQuadExpr) return result end function Base.:-(q1::GenericQuadExpr, q2::GenericQuadExpr) - result = Base.copy(q1) + result = copy(q1) for (coef, var1, var2) in quadterms(q2) add_to_expression!(result, -coef, var1, var2) end @@ -659,15 +659,15 @@ end # Special-case sparse matrix scalar multiplication/division Base.:*(lhs::Number, rhs::SparseMatrixCSC{T}) where {T<:JuMPTypes} = - SparseMatrixCSC(rhs.m, rhs.n, Base.copy(rhs.colptr), Base.copy(rhs.rowval), lhs .* rhs.nzval) + SparseMatrixCSC(rhs.m, rhs.n, copy(rhs.colptr), copy(rhs.rowval), lhs .* rhs.nzval) Base.:*(lhs::JuMPTypes, rhs::SparseMatrixCSC) = - SparseMatrixCSC(rhs.m, rhs.n, Base.copy(rhs.colptr), Base.copy(rhs.rowval), lhs .* rhs.nzval) + SparseMatrixCSC(rhs.m, rhs.n, copy(rhs.colptr), copy(rhs.rowval), lhs .* rhs.nzval) Base.:*(lhs::SparseMatrixCSC{T}, rhs::Number) where {T<:JuMPTypes} = - SparseMatrixCSC(lhs.m, lhs.n, Base.copy(lhs.colptr), Base.copy(lhs.rowval), lhs.nzval .* rhs) + SparseMatrixCSC(lhs.m, lhs.n, copy(lhs.colptr), copy(lhs.rowval), lhs.nzval .* rhs) Base.:*(lhs::SparseMatrixCSC, rhs::JuMPTypes) = - SparseMatrixCSC(lhs.m, lhs.n, Base.copy(lhs.colptr), Base.copy(lhs.rowval), lhs.nzval .* rhs) + SparseMatrixCSC(lhs.m, lhs.n, copy(lhs.colptr), copy(lhs.rowval), lhs.nzval .* rhs) Base.:/(lhs::SparseMatrixCSC{T}, rhs::Number) where {T<:JuMPTypes} = - SparseMatrixCSC(lhs.m, lhs.n, Base.copy(lhs.colptr), Base.copy(lhs.rowval), lhs.nzval ./ rhs) + SparseMatrixCSC(lhs.m, lhs.n, copy(lhs.colptr), copy(lhs.rowval), lhs.nzval ./ rhs) for (op,opsymbol) in [(+,:+), (-,:-), (*,:*), (/,:/)] diff --git a/src/parseexpr.jl b/src/parseexpr.jl index c211a6bede6..b577967c9ef 100644 --- a/src/parseexpr.jl +++ b/src/parseexpr.jl @@ -284,11 +284,11 @@ destructive_add!(ex, c, x) = ex .+ c * x destructive_add_with_reorder!(ex, arg) = destructive_add!(ex, 1.0, arg) # Special case because "Val{false}()" is used as the default empty expression. -destructive_add_with_reorder!(ex::Val{false}, arg) = Base.copy(arg) -# Calling `Base.copy` on the matrix will not copy the entries -destructive_add_with_reorder!(ex::Val{false}, arg::AbstractArray) = Base.copy.(arg) +destructive_add_with_reorder!(ex::Val{false}, arg) = copy(arg) +# Calling `copy` on the matrix will not copy the entries +destructive_add_with_reorder!(ex::Val{false}, arg::AbstractArray) = copy.(arg) function destructive_add_with_reorder!(ex::Val{false}, arg::Symmetric) - Symmetric(Base.copy.(arg)) + Symmetric(copy.(arg)) end destructive_add_with_reorder!(ex::Val{false}, args...) = (*)(args...) diff --git a/src/quadexpr.jl b/src/quadexpr.jl index 930b8d16c9f..5a15458bf1b 100644 --- a/src/quadexpr.jl +++ b/src/quadexpr.jl @@ -58,7 +58,7 @@ function Base.one(::Type{GenericQuadExpr{C,V}}) where {C,V} end Base.zero(q::GenericQuadExpr) = zero(typeof(q)) Base.one(q::GenericQuadExpr) = one(typeof(q)) -Base.copy(q::GenericQuadExpr) = GenericQuadExpr(Base.copy(q.aff), Base.copy(q.terms)) +Base.copy(q::GenericQuadExpr) = GenericQuadExpr(copy(q.aff), copy(q.terms)) if VERSION >= v"0.7-" Base.broadcastable(q::GenericQuadExpr) = Ref(q) end @@ -73,7 +73,7 @@ function map_coefficients_inplace!(f::Function, q::GenericQuadExpr) end function map_coefficients(f::Function, q::GenericQuadExpr) - return map_coefficients_inplace!(f, Base.copy(q)) + return map_coefficients_inplace!(f, copy(q)) end """ @@ -154,7 +154,7 @@ end Base.hash(quad::GenericQuadExpr, h::UInt) = hash(quad.aff, hash(quad.terms, h)) function Compat.SparseArrays.dropzeros(quad::GenericQuadExpr) - quad_terms = Base.copy(quad.terms) + quad_terms = copy(quad.terms) for (key, value) in quad.terms if iszero(value) delete!(quad_terms, key) @@ -229,8 +229,8 @@ end # Copy a quadratic expression to a new model by converting all the # variables to the new model's variables function Base.copy(q::GenericQuadExpr, new_model::Model) - GenericQuadExpr(Base.copy(q.qvars1, new_model), Base.copy(q.qvars2, new_model), - Base.copy(q.qcoeffs), Base.copy(q.aff, new_model)) + GenericQuadExpr(copy(q.qvars1, new_model), copy(q.qvars2, new_model), + copy(q.qcoeffs), copy(q.aff, new_model)) end # TODO: result_value for QuadExpr