Skip to content

Commit

Permalink
Merge 2f87485 into 42668d1
Browse files Browse the repository at this point in the history
  • Loading branch information
odow committed Jul 12, 2020
2 parents 42668d1 + 2f87485 commit 6d9bd2d
Show file tree
Hide file tree
Showing 26 changed files with 663 additions and 589 deletions.
8 changes: 1 addition & 7 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,10 @@ ForwardDiff = "~0.5.0, ~0.6, ~0.7, ~0.8, ~0.9, ~0.10"
MathOptInterface = "~0.9.11"
MutableArithmetics = "0.2"
NaNMath = "0.3"
OffsetArrays = "≥ 0.2.13"
julia = "1"

[extras]
DualNumbers = "fa6b7ba4-c1ee-5f82-b5fc-ecf0adba8f74"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
OffsetArrays = "6fe1bfb0-de20-5000-8ca7-80f57d26f881"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["OffsetArrays", "LinearAlgebra", "DualNumbers", "Random", "SparseArrays", "Test"]
test = ["Test"]
20 changes: 11 additions & 9 deletions test/Containers/Containers.jl
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
using Test
using JuMP
using JuMP.Containers

@testset "Containers" begin
include("DenseAxisArray.jl")
include("SparseAxisArray.jl")
include("generate_container.jl")
include("vectorized_product_iterator.jl")
include("nested_iterator.jl")
include("no_duplicate_dict.jl")
include("macro.jl")
@testset "$(file)" for file in filter(f -> endswith(f, ".jl"), readdir(@__DIR__))
if file in [
"Containers.jl",
]
continue
end
filename = joinpath(@__DIR__, file)
t = time()
include(filename)
println("$(filename) took $(round(time() - t; digits = 1)) seconds.")
end
end
3 changes: 3 additions & 0 deletions test/Containers/DenseAxisArray.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
using JuMP.Containers
using Test

@testset "DenseAxisArray" begin
@testset "undef constructor" begin
A = @inferred DenseAxisArray{Int}(undef, [:a, :b], 1:2)
Expand Down
3 changes: 3 additions & 0 deletions test/Containers/SparseAxisArray.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
using JuMP.Containers
using Test

@testset "SparseAxisArray" begin
function sparse_test(d, sum_d, d2, d3, dsqr, d_bads)
sqr(x) = x^2
Expand Down
2 changes: 1 addition & 1 deletion test/Containers/generate_container.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

using Test
using JuMP
using JuMP.Containers
using Test

macro dummycontainer(expr, requestedtype)
name = gensym()
Expand Down
2 changes: 1 addition & 1 deletion test/Containers/macro.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
using Test
using JuMP
using JuMP.Containers
using Test

@testset "Macro" begin
@testset "Array" begin
Expand Down
3 changes: 3 additions & 0 deletions test/Containers/nested_iterator.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
using JuMP.Containers
using Test

@testset "Nested Iterator" begin
iterators = (() -> 1:3, i -> 1:i)
condition(i, j) = j > i
Expand Down
3 changes: 3 additions & 0 deletions test/Containers/no_duplicate_dict.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
using JuMP.Containers
using Test

@testset "Iterator with constant eltype" begin
f(ij) = ij => sum(ij)
g = Base.Generator(f, Iterators.product(1:2, 1:2))
Expand Down
3 changes: 3 additions & 0 deletions test/Containers/vectorized_product_iterator.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
using JuMP.Containers
using Test

@testset "Vectorized Product Iterator" begin
I = [1 2
3 4]
Expand Down
5 changes: 2 additions & 3 deletions test/JuMPExtension.jl
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
module JuMPExtension

# Simple example of JuMP extension used in the tests to check that JuMP works well with extensions
# The main difference between `JuMP.Model` and `JuMPExtension.MyModel` is the fact that in `add_variable` (resp. `add_constraint`),
# `JuMP.Model` applies the modification to its `moi_backend` field while
# `JuMPExtension.MyModel` stores the `AbstractVariable` (resp. `AbstractConstraint`) in a list.

using MathOptInterface
const MOI = MathOptInterface
import JuMP
using JuMP

struct ConstraintIndex
value::Int # Index in `model.constraints`
Expand Down
10 changes: 10 additions & 0 deletions test/constraint.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,13 @@
using JuMP
using LinearAlgebra
using Test

include(joinpath(@__DIR__, "utilities.jl"))

@static if !(:JuMPExtension in names(Main))
include(joinpath(@__DIR__, "JuMPExtension.jl"))
end

function test_constraint_name(constraint, name, F::Type, S::Type)
@test name == @inferred JuMP.name(constraint)
model = constraint.model
Expand Down
47 changes: 24 additions & 23 deletions test/derivatives.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
using JuMP
using JuMP._Derivatives
using LinearAlgebra
using Test
using MathOptInterface

struct ΦEvaluator <: MathOptInterface.AbstractNLPEvaluator
const ForwardDiff = JuMP.ForwardDiff

struct ΦEvaluator <: MOI.AbstractNLPEvaluator
end

@testset "Derivatives" begin
Expand Down Expand Up @@ -282,18 +285,18 @@ test_linearity(:(1/ifelse(x[1] < 1, x[1],0)), NONLINEAR, Set([(1,1)]))
#Φ(x,y) = 1/3(y)^3 - 2x^2
# c(x) = cos(x)

function MathOptInterface.eval_objective(::ΦEvaluator,x)
function MOI.eval_objective(::ΦEvaluator,x)
@assert length(x) == 2
return (1/3)*x[2]^3-2x[1]^2
end
function MathOptInterface.eval_objective_gradient(::ΦEvaluator,grad,x)
function MOI.eval_objective_gradient(::ΦEvaluator,grad,x)
grad[1] = -4x[1]
grad[2] = x[2]^2
end
r = _Derivatives.UserOperatorRegistry()
register_multivariate_operator!(r,,ΦEvaluator())
register_univariate_operator!(r,:c,cos,x->-sin(x),x->-cos(x))
Φ(x,y) = MathOptInterface.eval_objective(ΦEvaluator(),[x,y])
Φ(x,y) = MOI.eval_objective(ΦEvaluator(),[x,y])
ex = :(Φ(x[2],x[1]-1)*c(x[3]))
nd,const_values = expr_to_nodedata(ex,r)
@test _Derivatives.has_user_multivariate_operators(nd)
Expand All @@ -313,11 +316,6 @@ reverse_extract(grad,reverse_storage,nd,adj,[],1.0)
true_grad = [cos(x[3])*(x[1]-1)^2, -4cos(x[3])*x[2], -sin(x[3])*Φ(x[2],x[1]-1)]
@test isapprox(grad,true_grad)



using DualNumbers
using ForwardDiff

# dual forward test
function dualforward(ex, x; ignore_nan=false)
nd,const_values = expr_to_nodedata(ex)
Expand Down Expand Up @@ -345,31 +343,34 @@ function dualforward(ex, x; ignore_nan=false)
@test isapprox(fval_ϵ[1], dot(grad,ones(length(x))))

# compare with running dual numbers
forward_dual_storage = zeros(DualNumbers.Dual{Float64},length(nd))
partials_dual_storage = zeros(DualNumbers.Dual{Float64},length(nd))
output_dual_storage = zeros(DualNumbers.Dual{Float64},length(x))
reverse_dual_storage = zeros(DualNumbers.Dual{Float64},length(nd))
x_dual = [DualNumbers.Dual(x[i],1.0) for i in 1:length(x)]
_epsilon(x::ForwardDiff.Dual{Nothing, Float64, 1}) = x.partials[1]

forward_dual_storage = zeros(ForwardDiff.Dual{Nothing, Float64, 1},length(nd))
partials_dual_storage = zeros(ForwardDiff.Dual{Nothing, Float64, 1},length(nd))
output_dual_storage = zeros(ForwardDiff.Dual{Nothing, Float64, 1},length(x))
reverse_dual_storage = zeros(ForwardDiff.Dual{Nothing, Float64, 1},length(nd))

x_dual = [ForwardDiff.Dual(x[i],1.0) for i in 1:length(x)]
fval = forward_eval(forward_dual_storage, partials_dual_storage, nd, adj,
const_values, [], x_dual, [], [], [], NO_USER_OPS)
reverse_eval(reverse_dual_storage,partials_dual_storage,nd,adj)
reverse_extract(output_dual_storage,reverse_dual_storage,nd,adj,[],DualNumbers.Dual(2.0))
reverse_extract(output_dual_storage,reverse_dual_storage,nd,adj,[],ForwardDiff.Dual(2.0, 0.0))
for k in 1:length(nd)
@test isapprox(epsilon(forward_dual_storage[k]), forward_storage_ϵ[k][1])
if !(isnan(epsilon(partials_dual_storage[k])) && ignore_nan)
@test isapprox(epsilon(partials_dual_storage[k]), partials_storage_ϵ[k][1])
@test isapprox(_epsilon(forward_dual_storage[k]), forward_storage_ϵ[k][1])
if !(isnan(_epsilon(partials_dual_storage[k])) && ignore_nan)
@test isapprox(_epsilon(partials_dual_storage[k]), partials_storage_ϵ[k][1])
else
@test !isnan(forward_storage_ϵ[k][1])
end
if !(isnan(epsilon(reverse_dual_storage[k])) && ignore_nan)
@test isapprox(epsilon(reverse_dual_storage[k]), reverse_storage_ϵ[k][1]/2)
if !(isnan(_epsilon(reverse_dual_storage[k])) && ignore_nan)
@test isapprox(_epsilon(reverse_dual_storage[k]), reverse_storage_ϵ[k][1]/2)
else
@test !isnan(reverse_storage_ϵ[k][1])
end
end
for k in 1:length(x)
if !(isnan(epsilon(output_dual_storage[k])) && ignore_nan)
@test isapprox(epsilon(output_dual_storage[k]), output_ϵ[k][1])
if !(isnan(_epsilon(output_dual_storage[k])) && ignore_nan)
@test isapprox(_epsilon(output_dual_storage[k]), output_ϵ[k][1])
else
@test !isnan(output_ϵ[k][1])
end
Expand Down
13 changes: 8 additions & 5 deletions test/derivatives_coloring.jl
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
using Test

import JuMP._Derivatives.Coloring: acyclic_coloring, recovery_preprocess,
reverse_topological_sort_by_dfs,
gen_adjlist, hessian_color_preprocess,
prepare_seed_matrix!, recover_from_matmat!,
seed_matrix
import JuMP._Derivatives.Coloring:
acyclic_coloring, recovery_preprocess,
reverse_topological_sort_by_dfs,
gen_adjlist,
hessian_color_preprocess,
prepare_seed_matrix!,
recover_from_matmat!,
seed_matrix

struct Graph
num_vertices::Int
Expand Down
12 changes: 10 additions & 2 deletions test/expr.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
import MutableArithmetics
const MA = MutableArithmetics
using JuMP
using Test

const MA = JuMP._MA

include(joinpath(@__DIR__, "utilities.jl"))

@static if !(:JuMPExtension in names(Main))
include(joinpath(@__DIR__, "JuMPExtension.jl"))
end

# For "expression^3 and unary*"
struct PowVariable <: JuMP.AbstractVariableRef
Expand Down
131 changes: 49 additions & 82 deletions test/file_formats.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,87 +6,54 @@
using JuMP
using Test

function test_mof_file()
model = Model()
@variable(model, x)
@constraint(model, my_c, 3 * x >= 1)
@objective(model, Min, 2 * x^2 + x + 1)
write_to_file(model, "my_model.mof.json")
model_2 = read_from_file("my_model.mof.json")
@test sprint(print, model) == sprint(print, model_2)
rm("my_model.mof.json")
end

function test_mof_io()
model = Model()
@variable(model, x)
@constraint(model, my_c, 3 * x >= 1)
@objective(model, Min, 2 * x^2 + x + 1)
io = IOBuffer()
@test_throws(
ErrorException("Unable to infer the file format from an IO stream."),
write(io, model; format = MOI.FileFormats.FORMAT_AUTOMATIC)
)
write(io, model; format = MOI.FileFormats.FORMAT_MOF)
seekstart(io)
@test_throws(
ErrorException("Unable to infer the file format from an IO stream."),
read(io, Model; format = MOI.FileFormats.FORMAT_AUTOMATIC)
)
seekstart(io)
model_2 = read(io, Model; format = MOI.FileFormats.FORMAT_MOF)
@test sprint(print, model) == sprint(print, model_2)
end

function test_mof_nlp()
model = Model()
@variable(model, x)
@variable(model, y)
@NLobjective(model, Min, (1 - x)^2 + 100 * (y - x^2)^2)
@NLconstraint(model, x^2 + y^2 <= 100.0)
@constraint(model, x + y == 10)
io = IOBuffer()
write(io, model; format = MOI.FileFormats.FORMAT_MOF)
seekstart(io)
@test read(io, String) ==
read(joinpath(@__DIR__, "data", "nlp_model.mof.json"), String)
end

@testset "File formats" begin
@testset "MOF" begin
model = Model()
@variable(model, x)
@constraint(model, my_c, 3 * x >= 1)
@objective(model, Min, 2 * x^2 + x + 1)
write_to_file(model, "my_model.mof.json")
model_2 = read_from_file("my_model.mof.json")
@test sprint(print, model) == sprint(print, model_2)
rm("my_model.mof.json")
end
@testset "MPS" begin
model = Model()
@variable(model, x >= 0)
@constraint(model, my_c, 3 * x >= 1)
@objective(model, Min, 2 * x)
write_to_file(model, "my_model.mps")
model_2 = read_from_file("my_model.mps")
@test sprint(print, model) == sprint(print, model_2)
rm("my_model.mps")
end
@testset "LP" begin
model = Model()
@variable(model, x >= 0)
@constraint(model, my_c, 3 * x >= 1)
@objective(model, Min, 2 * x)
write_to_file(model, "my_model.lp")
@test read("my_model.lp", String) ==
"minimize\nobj: 2 x\nsubject to\nmy_c: 3 x >= 1\nBounds\nx >= 0\nEnd\n"
@test_throws(
ErrorException("read! is not implemented for LP files."),
read_from_file("my_model.lp")
)
rm("my_model.lp")
end
@testset "CBF" begin
model = Model()
@variable(model, X[1:2, 1:2], PSD)
@constraint(model, my_c, sum(X) >= 1)
@objective(model, Min, sum(X))
write_to_file(model, "my_model.cbf")
@test read("my_model.cbf", String) ==
"VER\n3\n\nOBJSENSE\nMIN\n\nVAR\n3 1\nF 3\n\nOBJACOORD\n3\n0 1.0\n1 2.0\n2 1.0\n\nCON\n1 1\nL+ 1\n\nACOORD\n3\n0 0 1.0\n0 1 2.0\n0 2 1.0\n\nBCOORD\n1\n0 -1.0\n\nPSDCON\n1\n2\n\nHCOORD\n3\n0 0 0 0 1.0\n0 1 1 0 1.0\n0 2 1 1 1.0\n\n"
model_2 = read_from_file("my_model.cbf")
# Note: we replace ' in ' => ' ∈ ' because the unicode doesn't print on
# Windows systems for some reason.
@test replace(sprint(print, model_2), " in " => "") ==
"Min noname + 2 noname + noname\nSubject to\n [noname + 2 noname + noname - 1] ∈ MathOptInterface.Nonnegatives(1)\n [noname, noname, noname] ∈ MathOptInterface.PositiveSemidefiniteConeTriangle(2)\n"
rm("my_model.cbf")
end
@testset "Base read/write via io" begin
model = Model()
@variable(model, x)
@constraint(model, my_c, 3 * x >= 1)
@objective(model, Min, 2 * x^2 + x + 1)
io = IOBuffer()
@test_throws(
ErrorException("Unable to infer the file format from an IO stream."),
write(io, model; format = MOI.FileFormats.FORMAT_AUTOMATIC)
)
write(io, model; format = MOI.FileFormats.FORMAT_MOF)
seekstart(io)
@test_throws(
ErrorException("Unable to infer the file format from an IO stream."),
read(io, Model; format = MOI.FileFormats.FORMAT_AUTOMATIC)
)
seekstart(io)
model_2 = read(io, Model; format = MOI.FileFormats.FORMAT_MOF)
@test sprint(print, model) == sprint(print, model_2)
end
@testset "NLP MOF" begin
model = Model()
@variable(model, x)
@variable(model, y)
@NLobjective(model, Min, (1 - x)^2 + 100 * (y - x^2)^2)
@NLconstraint(model, x^2 + y^2 <= 100.0)
@constraint(model, x + y == 10)
io = IOBuffer()
write(io, model; format = MOI.FileFormats.FORMAT_MOF)
seekstart(io)
@test read(io, String) ==
read(joinpath(@__DIR__, "data", "nlp_model.mof.json"), String)
end
test_mof_file()
test_mof_io()
test_mof_nlp()
end

0 comments on commit 6d9bd2d

Please sign in to comment.