-
Notifications
You must be signed in to change notification settings - Fork 214
/
extrapolate.jl
48 lines (44 loc) · 1.96 KB
/
extrapolate.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import LineSearches
@testset "Extrapolation" begin
methods = [LBFGS(),
ConjugateGradient(),
LBFGS(alphaguess = LineSearches.InitialQuadratic(),
linesearch = LineSearches.BackTracking(order=2))]
msgs = ["LBFGS Default Options: ",
"CG Default Options: ",
"LBFGS + Backtracking + Extrapolation: "]
if debug_printing
println("--------------------")
println("Rosenbrock Example: ")
println("--------------------")
end
rosenbrock(x) = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2
for (method, msg) in zip(methods, msgs)
results = Optim.optimize(rosenbrock, zeros(2), method)
debug_printing && println(msg, "g_calls = ", results.g_calls, ", f_calls = ", results.f_calls)
end
if debug_printing
println("--------------------------------------")
println("p-Laplacian Example (preconditioned): ")
println("--------------------------------------")
end
plap(U; n=length(U)) = (n-1) * sum((0.1 .+ diff(U).^2).^2) - sum(U) / (n-1)
plap1(U; n=length(U), dU = diff(U), dW = 4 .* (0.1 .+ dU.^2) .* dU) =
(n - 1) .* ([0.0; dW] .- [dW; 0.0]) .- ones(U) / (n - 1)
precond(x::Vector) = precond(length(x))
precond(n::Number) = spdiagm(( -ones(n-1), 2*ones(n), -ones(n-1) ),
(-1,0,1), n, n) * (n+1)
f(X) = plap([0;X;0])
g!(g, X) = copy!(g, (plap1([0;X;0]))[2:end-1])
N = 100
initial_x = zeros(N)
P = precond(initial_x)
methods = [LBFGS(P=P),
ConjugateGradient(P=P),
LBFGS(alphaguess = LineSearches.InitialQuadratic(),
linesearch = LineSearches.BackTracking(order=2), P=P)]
for (method, msg) in zip(methods, msgs)
results = Optim.optimize(f, g!, copy(initial_x), method)
debug_printing && println(msg, "g_calls = ", Optim.g_calls(results), ", f_calls = ", Optim.f_calls(results))
end
end