-
Notifications
You must be signed in to change notification settings - Fork 214
/
array.jl
98 lines (85 loc) · 4.1 KB
/
array.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
using StableRNGs
@testset "normalized array" begin
rng = StableRNG(1323)
grdt!(buf, _) = (buf .= 0; buf[1] = 1; buf)
result = optimize(x->x[1], grdt!, randn(rng,2,2), ConjugateGradient(manifold=Sphere()))
@test result.minimizer ≈ [-1 0; 0 0]
@test result.minimum ≈ -1
end
@testset "input types" begin
f(X) = (10 - X[1])^2 + (0 - X[2])^2 + (0 - X[3])^2 + (5 - X[4])^2
function g!(storage, x)
storage[1] = -20 + 2 * x[1]
storage[2] = 2 * x[2]
storage[3] = 2 * x[3]
storage[4] = -10 + 2 * x[4]
return
end
@testset "vector" begin
for m in (AcceleratedGradientDescent, ConjugateGradient, BFGS, LBFGS, NelderMead, GradientDescent, MomentumGradientDescent, NelderMead, ParticleSwarm, SimulatedAnnealing, NGMRES, OACCEL)
debug_printing && printstyled("Solver: "*string(m); color = :green)
res = optimize(f, g!, [1., 0., 1., 0.], m())
@test typeof(Optim.minimizer(res)) <: Vector
if !(m in (NelderMead, SimulatedAnnealing, ParticleSwarm))
@test norm(Optim.minimizer(res) - [10.0, 0.0, 0.0, 5.0]) < 10e-8
end
end
end
@testset "matrix" begin
for m in (AcceleratedGradientDescent, ConjugateGradient, BFGS, LBFGS, ConjugateGradient, GradientDescent, MomentumGradientDescent, ParticleSwarm, SimulatedAnnealing, NGMRES, OACCEL)
res = optimize(f, g!, Matrix{Float64}(I, 2, 2), m())
@test typeof(Optim.minimizer(res)) <: Matrix
if !(m in (SimulatedAnnealing, ParticleSwarm))
@test norm(Optim.minimizer(res) - [10.0 0.0; 0.0 5.0]) < 10e-8
end
end
end
@testset "tensor" begin
eye3 = zeros(2,2,1)
eye3[:,:,1] = Matrix{Float64}(I, 2, 2)
for m in (AcceleratedGradientDescent, ConjugateGradient, BFGS, LBFGS, ConjugateGradient, GradientDescent, MomentumGradientDescent, ParticleSwarm, SimulatedAnnealing, NGMRES, OACCEL)
res = optimize(f, g!, eye3, m())
_minimizer = Optim.minimizer(res)
@test typeof(_minimizer) <: Array{Float64, 3}
@test size(_minimizer) == (2,2,1)
if !(m in (SimulatedAnnealing, ParticleSwarm))
@test norm(_minimizer - [10.0 0.0; 0.0 5.0]) < 10e-8
end
end
end
end
using RecursiveArrayTools
@testset "arraypartition input" begin
rng = StableRNG(133)
function polynomial(x)
return (10.0 - x[1])^2 + (7.0 - x[2])^4 + (108.0 - x[3])^4
end
function polynomial_gradient!(storage, x)
storage[1] = -2.0 * (10.0 - x[1])
storage[2] = -4.0 * (7.0 - x[2])^3
storage[3] = -4.0 * (108.0 - x[3])^3
end
function polynomial_hessian!(storage, x)
storage[1, 1] = 2.0
storage[1, 2] = 0.0
storage[1, 3] = 0.0
storage[2, 1] = 0.0
storage[2, 2] = 12.0 * (7.0 - x[2])^2
storage[2, 3] = 0.0
storage[3, 1] = 0.0
storage[3, 2] = 0.0
storage[3, 3] = 12.0 * (108.0 - x[3])^2
end
ap = ArrayPartition(rand(rng, 1), rand(rng, 2))
optimize(polynomial, polynomial_gradient!, polynomial_hessian!, ap, NelderMead())
optimize(polynomial, polynomial_gradient!, polynomial_hessian!, ap, ParticleSwarm())
optimize(polynomial, polynomial_gradient!, polynomial_hessian!, ap, SimulatedAnnealing())
optimize(polynomial, polynomial_gradient!, polynomial_hessian!, ap, GradientDescent())
optimize(polynomial, polynomial_gradient!, polynomial_hessian!, ap, AcceleratedGradientDescent())
optimize(polynomial, polynomial_gradient!, polynomial_hessian!, ap, MomentumGradientDescent())
optimize(polynomial, polynomial_gradient!, polynomial_hessian!, ap, ConjugateGradient())
optimize(polynomial, polynomial_gradient!, polynomial_hessian!, ap, BFGS())
optimize(polynomial, polynomial_gradient!, polynomial_hessian!, ap, LBFGS())
optimize(polynomial, polynomial_gradient!, polynomial_hessian!, ap, Newton())
# optimize(polynomial, polynomial_gradient!, polynomial_hessian!, ap, NewtonTrustRegion())
end