Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
290 changes: 117 additions & 173 deletions src/solve.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,32 @@
abstract type AbstractOptimizationSolution end #experimental; comments welcome
mutable struct OptimizationSolution{O, Tx, Tf, Tls, Tsb} <: AbstractOptimizationSolution
method::O
initial_x::Tx
minimizer::Tx
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we should have a getproperty for this to be .u to match the others.

minimum::Tf
iterations::Int
iteration_converged::Bool
ls_success::Tls
time_run::Float64
stopped_by::Tsb
end

function Base.show(io::IO, r::AbstractOptimizationSolution)
take = Iterators.take
failure_string = "failure"
if isa(r.ls_success, Bool) && !r.ls_success
failure_string *= " (line search failed)"
end

@printf io " * Status: %s\n\n" r.iteration_converged ? "success" : failure_string
@printf io " * Candidate solution\n"
@printf io " Final objective value: %e\n" r.minimum
@printf io "\n"
@printf io " * Found with\n"
@printf io " Algorithm: %s\n" r.method
return
end

struct NullData end
const DEFAULT_DATA = Iterators.cycle((NullData(),))
Base.iterate(::NullData, i=1) = nothing
Expand Down Expand Up @@ -124,34 +153,15 @@ function __solve(prob::OptimizationProblem, opt, data = DEFAULT_DATA;

_time = time()

Optim.MultivariateOptimizationResults(opt,
prob.u0,# initial_x,
θ, #pick_best_x(f_incr_pick, state),
save_best ? first(min_err) : first(x), # pick_best_f(f_incr_pick, state, d),
maxiters, #iteration,
maxiters >= maxiters, #iteration == options.iterations,
true, # x_converged,
0.0,#T(options.x_tol),
0.0,#T(options.x_tol),
NaN,# x_abschange(state),
NaN,# x_abschange(state),
true,# f_converged,
0.0,#T(options.f_tol),
0.0,#T(options.f_tol),
NaN,#f_abschange(d, state),
NaN,#f_abschange(d, state),
true,#g_converged,
0.0,#T(options.g_tol),
NaN,#g_residual(d),
false, #f_increased,
nothing,
maxiters,
maxiters,
0,
true,
NaN,
_time-t0,
NamedTuple())
OptimizationSolution(opt,
prob.u0,# initial_x,
θ, #pick_best_x(f_incr_pick, state),
save_best ? first(min_err) : first(x), # pick_best_f(f_incr_pick, state, d),
maxiters, #iteration,
maxiters >= maxiters, #iteration == options.iterations,
true,
_time-t0,
NamedTuple())
end


Expand Down Expand Up @@ -384,34 +394,16 @@ function __init__()
bboptre = !(isnothing(maxiters)) ? BlackBoxOptim.bboptimize(_loss;Method = opt.method, SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)], MaxSteps = maxiters, CallbackFunction = _cb, CallbackInterval = 0.0, kwargs...) : BlackBoxOptim.bboptimize(_loss;Method = opt.method, SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)], CallbackFunction = _cb, CallbackInterval = 0.0, kwargs...)


Optim.MultivariateOptimizationResults(opt.method,
[NaN],# initial_x,
BlackBoxOptim.best_candidate(bboptre), #pick_best_x(f_incr_pick, state),
BlackBoxOptim.best_fitness(bboptre), # pick_best_f(f_incr_pick, state, d),
bboptre.iterations, #iteration,
!(isnothing(maxiters)) ? bboptre.iterations >= maxiters : true, #iteration == options.iterations,
false, # x_converged,
0.0,#T(options.x_tol),
0.0,#T(options.x_tol),
NaN,# x_abschange(state),
NaN,# x_abschange(state),
false,# f_converged,
0.0,#T(options.f_tol),
0.0,#T(options.f_tol),
NaN,#f_abschange(d, state),
NaN,#f_abschange(d, state),
false,#g_converged,
0.0,#T(options.g_tol),
NaN,#g_residual(d),
false, #f_increased,
nothing,
bboptre.f_calls,
0,
0,
true,
NaN,
bboptre.elapsed_time,
NamedTuple())
OptimizationSolution(opt.method,
[NaN],# initial_x,
BlackBoxOptim.best_candidate(bboptre), #pick_best_x(f_incr_pick, state),
BlackBoxOptim.best_fitness(bboptre), # pick_best_f(f_incr_pick, state, d),
bboptre.iterations, #iteration,
!(isnothing(maxiters)) ? bboptre.iterations >= maxiters : true, #iteration == options.iterations,
true,
bboptre.elapsed_time,
NamedTuple())

end
end

Expand Down Expand Up @@ -461,38 +453,19 @@ function __init__()
end
end

t0= time()
t0 = time()
(minf,minx,ret) = NLopt.optimize(opt, prob.u0)
_time = time()

Optim.MultivariateOptimizationResults(opt,
prob.u0,# initial_x,
minx, #pick_best_x(f_incr_pick, state),
minf, # pick_best_f(f_incr_pick, state, d),
Int(opt.numevals), #iteration,
!(isnothing(maxiters)) ? opt.numevals >= maxiters : true, #iteration == options.iterations,
false, # x_converged,
0.0,#T(options.x_tol),
0.0,#T(options.x_tol),
NaN,# x_abschange(state),
NaN,# x_abschange(state),
false,# f_converged,
0.0,#T(options.f_tol),
0.0,#T(options.f_tol),
NaN,#f_abschange(d, state),
NaN,#f_abschange(d, state),
false,#g_converged,
0.0,#T(options.g_tol),
NaN,#g_residual(d),
false, #f_increased,
nothing,
Int(opt.numevals),
0,
0,
ret,
NaN,
_time-t0,
NamedTuple())
OptimizationSolution(opt.algorithm,
prob.u0,# initial_x,
minx, #pick_best_x(f_incr_pick, state),
minf, # pick_best_f(f_incr_pick, state, d),
Int(opt.numevals), #iteration,
!(isnothing(maxiters)) ? opt.numevals >= maxiters : true, #iteration == options.iterations,
ret,
_time-t0,
NamedTuple())
end
end

Expand Down Expand Up @@ -526,34 +499,15 @@ function __init__()

t1 = time()

Optim.MultivariateOptimizationResults(opt,
[NaN],# initial_x,
p.location, #pick_best_x(f_incr_pick, state),
p.value, # pick_best_f(f_incr_pick, state, d),
0, #iteration,
false, #iteration == options.iterations,
false, # x_converged,
0.0,#T(options.x_tol),
0.0,#T(options.x_tol),
NaN,# x_abschange(state),
NaN,# x_abschange(state),
false,# f_converged,
0.0,#T(options.f_tol),
0.0,#T(options.f_tol),
NaN,#f_abschange(d, state),
NaN,#f_abschange(d, state),
false,#g_converged,
0.0,#T(options.g_tol),
NaN,#g_residual(d),
false, #f_increased,
nothing,
0,
0,
0,
true,
NaN,
t1 - t0,
NamedTuple())
OptimizationSolution(opt,
[NaN],# initial_x,
p.location, #pick_best_x(f_incr_pick, state),
p.value, # pick_best_f(f_incr_pick, state, d),
local_maxiters,
local_maxiters>=opt.maxeval, #not sure if that's correct
true,
t1 - t0,
NamedTuple())
end
end

Expand Down Expand Up @@ -588,34 +542,15 @@ function __init__()
box = minimum(root)
t1 = time()

Optim.MultivariateOptimizationResults(opt,
[NaN],# initial_x,
QuadDIRECT.position(box, x0), #pick_best_x(f_incr_pick, state),
QuadDIRECT.value(box), # pick_best_f(f_incr_pick, state, d),
0, #iteration,
false, #iteration == options.iterations,
false, # x_converged,
0.0,#T(options.x_tol),
0.0,#T(options.x_tol),
NaN,# x_abschange(state),
NaN,# x_abschange(state),
false,# f_converged,
0.0,#T(options.f_tol),
0.0,#T(options.f_tol),
NaN,#f_abschange(d, state),
NaN,#f_abschange(d, state),
false,#g_converged,
0.0,#T(options.g_tol),
NaN,#g_residual(d),
false, #f_increased,
nothing,
0,
0,
0,
true,
NaN,
t1 - t0,
NamedTuple())
OptimizationSolution(opt,
[NaN],# initial_x,
QuadDIRECT.position(box, x0), #pick_best_x(f_incr_pick, state),
QuadDIRECT.value(box), # pick_best_f(f_incr_pick, state, d),
!(isnothing(maxiters)) ? maxiters : 0,
box.qnconverged, #not sure if that's correct
true,
t1 - t0,
NamedTuple())
end
end

Expand Down Expand Up @@ -657,16 +592,29 @@ function __init__()
return first(x)
end

Evolutionary.optimize(_loss, prob.u0, opt, !isnothing(maxiters) ? Evolutionary.Options(;iterations = maxiters, callback = _cb, kwargs...)
t0 = time()

result = Evolutionary.optimize(_loss, prob.u0, opt, !isnothing(maxiters) ? Evolutionary.Options(;iterations = maxiters, callback = _cb, kwargs...)
: Evolutionary.Options(;callback = _cb, kwargs...))
t1 = time()

OptimizationSolution(summary(result),
prob.u0, #initial_x
Evolutionary.minimizer(result), #pick_best_x
minimum(result), #pick_best_f
Evolutionary.iterations(result), #iteration
Evolutionary.converged(result), #convergence status
true,
t1 - t0,
NamedTuple())
end
end
@require CMAEvolutionStrategy="8d3b24bd-414e-49e0-94fb-163cc3a3e411" begin

struct CMAEvolutionStrategyOpt end

function __solve(prob::OptimizationProblem, opt::CMAEvolutionStrategyOpt, data = DEFAULT_DATA;
cb = (args...) -> (false),
cb = (args...) -> (false), maxiters = nothing,
progress = false, kwargs...)
local x, cur, state

Expand All @@ -690,36 +638,32 @@ function __init__()
return first(x)
end

result = CMAEvolutionStrategy.minimize(_loss, prob.u0, 0.1; lower = prob.lb, upper = prob.ub, kwargs...)

Optim.MultivariateOptimizationResults(opt,
prob.u0,# initial_x,
result.logger.xbest[end], #pick_best_x(f_incr_pick, state),
result.logger.fbest[end], # pick_best_f(f_incr_pick, state, d),
0, #iteration,
false, #iteration == options.iterations,
false, # x_converged,
0.0,#T(options.x_tol),
0.0,#T(options.x_tol),
NaN,# x_abschange(state),
NaN,# x_abschange(state),
false,# f_converged,
0.0,#T(options.f_tol),
0.0,#T(options.f_tol),
NaN,#f_abschange(d, state),
NaN,#f_abschange(d, state),
false,#g_converged,
0.0,#T(options.g_tol),
NaN,#g_residual(d),
false, #f_increased,
nothing,
0,
0,
0,
true,
NaN,
result.logger.times[end] - result.logger.times[1],
NamedTuple())

if !(isnothing(maxiters)) && maxiters <= 0.0
error("The number of maxiters has to be a non-negative and non-zero number.")
elseif !(isnothing(maxiters))
maxiters = convert(Int, maxiters)
end

result = CMAEvolutionStrategy.minimize(_loss, prob.u0, 0.1; lower = prob.lb, upper = prob.ub, maxiter = maxiters, kwargs...)
CMAEvolutionStrategy.print_header(result)
CMAEvolutionStrategy.print_result(result)
println("\n")
criterion = true

if (result.stop.reason === :maxtime) #this is an arbitrary choice of convergence (based on the stop.reason values)
criterion = false
end

OptimizationSolution(opt,
prob.u0,# initial_x,
result.logger.xbest[end], #pick_best_x(f_incr_pick, state),
result.logger.fbest[end], # pick_best_f(f_incr_pick, state, d),
length(result.logger.fbest),
criterion,
true,
result.logger.times[end] - result.logger.times[1],
NamedTuple())
end
end
end