-
Notifications
You must be signed in to change notification settings - Fork 214
/
newton.jl
77 lines (63 loc) · 2.65 KB
/
newton.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
let
function f_1(x::Vector)
(x[1] - 5.0)^4
end
function g!_1(x::Vector, storage::Vector)
storage[1] = 4.0 * (x[1] - 5.0)^3
end
function h!_1(x::Vector, storage::Matrix)
storage[1, 1] = 12.0 * (x[1] - 5.0)^2
end
d = TwiceDifferentiableFunction(f_1, g!_1, h!_1)
# Need to specify autodiff!
@test_throws ErrorException Optim.optimize(DifferentiableFunction(f_1, g!_1), [0.0], Newton())
Optim.optimize(DifferentiableFunction(f_1, g!_1), [0.0], Newton(), Optim.Options(autodiff = true))
results = Optim.optimize(d, [0.0], Newton())
@test_throws ErrorException Optim.x_trace(results)
@assert Optim.g_converged(results)
@assert norm(Optim.minimizer(results) - [5.0]) < 0.01
eta = 0.9
function f_2(x::Vector)
(1.0 / 2.0) * (x[1]^2 + eta * x[2]^2)
end
function g!_2(x::Vector, storage::Vector)
storage[1] = x[1]
storage[2] = eta * x[2]
end
function h!_2(x::Vector, storage::Matrix)
storage[1, 1] = 1.0
storage[1, 2] = 0.0
storage[2, 1] = 0.0
storage[2, 2] = eta
end
d = TwiceDifferentiableFunction(f_2, g!_2, h!_2)
results = Optim.optimize(d, [127.0, 921.0], Newton())
@test_throws ErrorException Optim.x_trace(results)
@assert Optim.g_converged(results)
@assert norm(Optim.minimizer(results) - [0.0, 0.0]) < 0.01
# Test Optim.newton for all twice differentiable functions in Optim.UnconstrainedProblems.examples
for (name, prob) in Optim.UnconstrainedProblems.examples
if prob.istwicedifferentiable
ddf = TwiceDifferentiableFunction(prob.f, prob.g!,prob.h!)
res = Optim.optimize(ddf, prob.initial_x, Newton())
@assert norm(Optim.minimizer(res) - prob.solutions) < 1e-2
end
end
let
prob=Optim.UnconstrainedProblems.examples["Himmelblau"]
ddf = TwiceDifferentiableFunction(prob.f, prob.g!, prob.h!)
res = optimize(ddf, [0., 0.], Newton())
@assert norm(Optim.minimizer(res) - prob.solutions) < 1e-10
end
for (name, prob) in Optim.UnconstrainedProblems.examples
if prob.istwicedifferentiable
ddf = DifferentiableFunction(prob.f, prob.g!)
res = Optim.optimize(ddf, prob.initial_x, Newton(), Optim.Options(autodiff = true))
@assert norm(Optim.minimizer(res) - prob.solutions) < 1e-2
res = Optim.optimize(ddf.f, prob.initial_x, Newton(), Optim.Options(autodiff = true))
@assert norm(Optim.minimizer(res) - prob.solutions) < 1e-2
res = Optim.optimize(ddf.f, ddf.g!, prob.initial_x, Newton(), Optim.Options(autodiff = true))
@assert norm(Optim.minimizer(res) - prob.solutions) < 1e-2
end
end
end