/
bound-constrained.jl
76 lines (73 loc) · 2 KB
/
bound-constrained.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
export bound_constrained_nlp
function bound_constrained_nlp_set(;kwargs...)
n = 30
D = Diagonal([0.1 + 0.9 * (i - 1) / (n - 1) for i = 1:n])
A = spdiagm(0 => 2 * ones(n), -1 => -ones(n - 1), -1 => -ones(n - 1))
return [
ADNLPModel(
x -> (x[1] - 1)^2 + 4 * (x[2] - 1)^2,
zeros(2),
[0.5; 0.25],
[1.2; 1.5],
name = "Simple quadratic";
kwargs...,
),
ADNLPModel(
x -> (x[1] - 1)^2 + 100 * (x[2] - x[1]^2)^2,
[-1.2; 1.0],
[0.5; 0.25],
[1.2; 1.5],
name = "Rosenbrock inactive bounds";
kwargs...,
),
ADNLPModel(
x -> (x[1] - 1)^2 + 100 * (x[2] - x[1]^2)^2,
[-1.2; 1.0],
[0.5; 0.25],
[1.0; 1.5],
name = "Rosenbrock active bounds";
kwargs...,
),
ADNLPModel(
x -> (x[1] - 2)^2 + (x[2] - 1)^2 - 1,
zeros(2),
[1.0; 0.0],
[1.0; 2.0],
name = "One fixed variable";
kwargs...,
),
ADNLPModel(x -> sum(x .^ 2) - n, zeros(n), ones(n), ones(n), name = "All variables fixed"; kwargs...),
ADNLPModel(
x -> sum(100 * (x[i + 1] - x[i]^2)^2 + (x[i] - 1)^2 for i = 1:(n - 1)),
collect(1:n) ./ (n + 1),
zeros(n),
ones(n),
name = "Extended Rosenbrock";
kwargs...,
),
]
end
"""
bound_constrained_nlp(solver; problem_set = bound_constrained_nlp_set(), atol = 1e-6, rtol = 1e-6)
Test the `solver` on bound-constrained problems.
If `rtol` is non-zero, the relative error uses the gradient at the initial guess.
"""
function bound_constrained_nlp(
solver;
problem_set = bound_constrained_nlp_set(),
atol = 1e-6,
rtol = 1e-6,
)
@testset "Problem $(nlp.meta.name)" for nlp in problem_set
stats = with_logger(NullLogger()) do
solver(nlp)
end
ng0 = rtol != 0 ? norm(grad(nlp, nlp.meta.x0)) : 0
ϵ = atol + rtol * ng0
primal, dual = kkt_checker(nlp, stats.solution)
@test all(abs.(dual) .< ϵ)
@test all(abs.(primal) .< ϵ)
@test stats.dual_feas < ϵ
@test stats.status == :first_order
end
end