Skip to content

Commit

Permalink
Merge 25c57be into fd50e23
Browse files Browse the repository at this point in the history
  • Loading branch information
btracey committed Dec 21, 2018
2 parents fd50e23 + 25c57be commit 0055e87
Show file tree
Hide file tree
Showing 14 changed files with 110 additions and 106 deletions.
6 changes: 5 additions & 1 deletion optimize/bfgs.go
Expand Up @@ -20,6 +20,10 @@ type BFGS struct {
// Accepted steps should satisfy the strong Wolfe conditions.
// If Linesearcher == nil, an appropriate default is chosen.
Linesearcher Linesearcher
// GradStopThreshold sets the threshold for stopping if the gradient norm
// gets too small. If GradStopThreshold is 0 it is defaulted to 1e-12, and
// if it is NaN the setting is not used.
GradStopThreshold float64

ls *LinesearchMethod

Expand Down Expand Up @@ -49,7 +53,7 @@ func (b *BFGS) Init(dim, tasks int) int {
}

func (b *BFGS) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
b.status, b.err = localOptimizer{}.run(b, operation, result, tasks)
b.status, b.err = localOptimizer{}.run(b, b.GradStopThreshold, operation, result, tasks)
close(operation)
return
}
Expand Down
6 changes: 5 additions & 1 deletion optimize/cg.go
Expand Up @@ -87,6 +87,10 @@ type CG struct {
// If AngleRestartThreshold is 0, it will be set to -0.9.
// CG will panic if AngleRestartThreshold is not in the interval [-1, 0].
AngleRestartThreshold float64
// GradStopThreshold sets the threshold for stopping if the gradient norm
// gets too small. If GradStopThreshold is 0 it is defaulted to 1e-12, and
// if it is NaN the setting is not used.
GradStopThreshold float64

ls *LinesearchMethod

Expand All @@ -112,7 +116,7 @@ func (cg *CG) Init(dim, tasks int) int {
}

func (cg *CG) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
cg.status, cg.err = localOptimizer{}.run(cg, operation, result, tasks)
cg.status, cg.err = localOptimizer{}.run(cg, cg.GradStopThreshold, operation, result, tasks)
close(operation)
return
}
Expand Down
35 changes: 21 additions & 14 deletions optimize/cmaes_test.go
Expand Up @@ -16,6 +16,19 @@ import (
"gonum.org/v1/gonum/optimize/functions"
)

type functionThresholdConverger struct {
Threshold float64
}

func (functionThresholdConverger) Init(dim int) {}

func (f functionThresholdConverger) Converged(loc *Location) Status {
if loc.F < f.Threshold {
return FunctionThreshold
}
return NotTerminated
}

type cmaTestCase struct {
dim int
problem Problem
Expand All @@ -41,8 +54,7 @@ func cmaTestCases() []cmaTestCase {
StopLogDet: math.NaN(),
},
settings: &Settings{
FunctionThreshold: 0.01,
Converger: NeverTerminate{},
Converger: functionThresholdConverger{0.01},
},
good: func(result *Result, err error, concurrent int) error {
if result.Status != FunctionThreshold {
Expand All @@ -63,8 +75,7 @@ func cmaTestCases() []cmaTestCase {
},
method: &CmaEsChol{},
settings: &Settings{
FunctionThreshold: math.Inf(-1),
Converger: NeverTerminate{},
Converger: NeverTerminate{},
},
good: func(result *Result, err error, concurrent int) error {
if result.Status != MethodConverge {
Expand All @@ -88,9 +99,8 @@ func cmaTestCases() []cmaTestCase {
ForgetBest: true, // Otherwise may get an update at the end.
},
settings: &Settings{
FunctionThreshold: math.Inf(-1),
MajorIterations: 10,
Converger: NeverTerminate{},
MajorIterations: 10,
Converger: NeverTerminate{},
},
good: func(result *Result, err error, concurrent int) error {
if result.Status != IterationLimit {
Expand Down Expand Up @@ -118,9 +128,8 @@ func cmaTestCases() []cmaTestCase {
Population: 100,
},
settings: &Settings{
FunctionThreshold: math.Inf(-1),
FuncEvaluations: 250, // Somewhere in the middle of an iteration.
Converger: NeverTerminate{},
FuncEvaluations: 250, // Somewhere in the middle of an iteration.
Converger: NeverTerminate{},
},
good: func(result *Result, err error, concurrent int) error {
if result.Status != FunctionEvaluationLimit {
Expand Down Expand Up @@ -150,8 +159,7 @@ func cmaTestCases() []cmaTestCase {
Population: 100, // Increase the population size to reduce noise.
},
settings: &Settings{
FunctionThreshold: math.Inf(-1),
Converger: NeverTerminate{},
Converger: NeverTerminate{},
},
good: func(result *Result, err error, concurrent int) error {
if result.Status != MethodConverge {
Expand All @@ -176,8 +184,7 @@ func cmaTestCases() []cmaTestCase {
ForgetBest: true, // So that if it accidentally finds a better place we still converge to the minimum.
},
settings: &Settings{
FunctionThreshold: math.Inf(-1),
Converger: NeverTerminate{},
Converger: NeverTerminate{},
},
good: func(result *Result, err error, concurrent int) error {
if result.Status != MethodConverge {
Expand Down
20 changes: 0 additions & 20 deletions optimize/global.go

This file was deleted.

6 changes: 5 additions & 1 deletion optimize/gradientdescent.go
Expand Up @@ -15,6 +15,10 @@ type GradientDescent struct {
// StepSizer determines the initial step size along each direction.
// If StepSizer is nil, a reasonable default will be chosen.
StepSizer StepSizer
// GradStopThreshold sets the threshold for stopping if the gradient norm
// gets too small. If GradStopThreshold is 0 it is defaulted to 1e-12, and
// if it is NaN the setting is not used.
GradStopThreshold float64

ls *LinesearchMethod

Expand All @@ -33,7 +37,7 @@ func (g *GradientDescent) Init(dim, tasks int) int {
}

func (g *GradientDescent) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
g.status, g.err = localOptimizer{}.run(g, operation, result, tasks)
g.status, g.err = localOptimizer{}.run(g, g.GradStopThreshold, operation, result, tasks)
close(operation)
return
}
Expand Down
2 changes: 1 addition & 1 deletion optimize/guessandcheck_test.go
Expand Up @@ -29,7 +29,7 @@ func TestGuessAndCheck(t *testing.T) {
initX := make([]float64, dim)
Minimize(problem, initX, nil, &GuessAndCheck{Rander: d})

settings := DefaultSettingsGlobal()
settings := &Settings{}
settings.Concurrent = 5
settings.MajorIterations = 15
Minimize(problem, initX, settings, &GuessAndCheck{Rander: d})
Expand Down
6 changes: 5 additions & 1 deletion optimize/lbfgs.go
Expand Up @@ -26,6 +26,10 @@ type LBFGS struct {
// Store is the size of the limited-memory storage.
// If Store is 0, it will be defaulted to 15.
Store int
// GradStopThreshold sets the threshold for stopping if the gradient norm
// gets too small. If GradStopThreshold is 0 it is defaulted to 1e-12, and
// if it is NaN the setting is not used.
GradStopThreshold float64

status Status
err error
Expand Down Expand Up @@ -55,7 +59,7 @@ func (l *LBFGS) Init(dim, tasks int) int {
}

func (l *LBFGS) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
l.status, l.err = localOptimizer{}.run(l, operation, result, tasks)
l.status, l.err = localOptimizer{}.run(l, l.GradStopThreshold, operation, result, tasks)
close(operation)
return
}
Expand Down
32 changes: 28 additions & 4 deletions optimize/local.go
Expand Up @@ -6,6 +6,8 @@ package optimize

import (
"math"

"gonum.org/v1/gonum/floats"
)

// localOptimizer is a helper type for running an optimization using a LocalMethod.
Expand All @@ -15,15 +17,15 @@ type localOptimizer struct{}
// must close the operation channel at the conclusion of the optimization. This
// provides a happens before relationship between the return of status and the
// closure of operation, and thus a call to method.Status (if necessary).
func (l localOptimizer) run(method localMethod, operation chan<- Task, result <-chan Task, tasks []Task) (Status, error) {
func (l localOptimizer) run(method localMethod, gradThresh float64, operation chan<- Task, result <-chan Task, tasks []Task) (Status, error) {
// Local methods start with a fully-specified initial location.
task := tasks[0]
task = l.initialLocation(operation, result, task, method)
if task.Op == PostIteration {
l.finish(operation, result)
return NotTerminated, nil
}
status, err := l.checkStartingLocation(task)
status, err := l.checkStartingLocation(task, gradThresh)
if err != nil {
l.finishMethodDone(operation, result, task)
return status, err
Expand Down Expand Up @@ -51,6 +53,14 @@ Loop:
switch r.Op {
case PostIteration:
break Loop
case MajorIteration:
// The last operation was a MajorIteration. Check if the gradient
// is below the threshold.
if status := l.checkGradientConvergence(r.Gradient, gradThresh); status != NotTerminated {
l.finishMethodDone(operation, result, task)
return GradientThreshold, nil
}
fallthrough
default:
op, err := method.iterateLocal(r.Location)
if err != nil {
Expand Down Expand Up @@ -91,7 +101,7 @@ func (l localOptimizer) initialLocation(operation chan<- Task, result <-chan Tas
return <-result
}

func (localOptimizer) checkStartingLocation(task Task) (Status, error) {
func (l localOptimizer) checkStartingLocation(task Task, gradThresh float64) (Status, error) {
if math.IsInf(task.F, 1) || math.IsNaN(task.F) {
return Failure, ErrFunc(task.F)
}
Expand All @@ -100,7 +110,21 @@ func (localOptimizer) checkStartingLocation(task Task) (Status, error) {
return Failure, ErrGrad{Grad: v, Index: i}
}
}
return NotTerminated, nil
status := l.checkGradientConvergence(task.Gradient, gradThresh)
return status, nil
}

func (localOptimizer) checkGradientConvergence(gradient []float64, gradThresh float64) Status {
if gradient == nil || math.IsNaN(gradThresh) {
return NotTerminated
}
if gradThresh == 0 {
gradThresh = defaultGradientAbsTol
}
if norm := floats.Norm(gradient, math.Inf(1)); norm < gradThresh {
return GradientThreshold
}
return NotTerminated
}

// finish completes the channel operations to finish an optimization.
Expand Down
7 changes: 1 addition & 6 deletions optimize/local_example_test.go
Expand Up @@ -19,12 +19,7 @@ func ExampleMinimize() {
}

x := []float64{1.3, 0.7, 0.8, 1.9, 1.2}
settings := optimize.DefaultSettingsLocal()
settings.Recorder = nil
settings.GradientThreshold = 1e-12
settings.Converger = optimize.NeverTerminate{}

result, err := optimize.Minimize(p, x, settings, &optimize.BFGS{})
result, err := optimize.Minimize(p, x, nil, nil)
if err != nil {
log.Fatal(err)
}
Expand Down
33 changes: 15 additions & 18 deletions optimize/minimize.go
Expand Up @@ -101,15 +101,11 @@ type Method interface {
// Some Methods do not require an initial location, but initX must still be
// specified for the dimension of the optimization problem.
//
// The third argument contains the settings for the minimization. The
// DefaultSettingsLocal and DefaultSettingsGlobal functions can be called for
// different default settings depending on the optimization method. If
// settings is nil, DefaultSettingsLocal is used. All settings will be honored
// for all Methods, even if that setting is counter-productive to the method.
// However, the information used to check the Settings, and the times at which
// they are checked, are controlled by the Method. For example, if the Method
// never evaluates the gradient of the function then GradientThreshold will not
// be checked. Minimize cannot guarantee strict adherence to the bounds
// The third argument contains the settings for the minimization. If settings
// is nil, the zero value will be used, see the documentation of the Settings
// type for more information, and see the warning below. All settings will be
// honored for all Methods, even if that setting is counter-productive to the
// method. Minimize cannot guarantee strict adherence to the evaluation bounds
// specified when performing concurrent evaluations and updates.
//
// The final argument is the optimization method to use. If method == nil, then
Expand All @@ -131,7 +127,7 @@ func Minimize(p Problem, initX []float64, settings *Settings, method Method) (*R
method = getDefaultMethod(&p)
}
if settings == nil {
settings = DefaultSettingsLocal()
settings = &Settings{}
}
stats := &Stats{}
dim := len(initX)
Expand All @@ -147,10 +143,7 @@ func Minimize(p Problem, initX []float64, settings *Settings, method Method) (*R

converger := settings.Converger
if converger == nil {
converger = &FunctionConverge{
Absolute: 1e-10,
Iterations: 100,
}
converger = defaultFunctionConverge()
}
converger.Init(dim)

Expand Down Expand Up @@ -370,6 +363,13 @@ func minimize(prob *Problem, method Method, settings *Settings, converger Conver
return finalStatus, finalError
}

func defaultFunctionConverge() *FunctionConverge {
return &FunctionConverge{
Absolute: 1e-10,
Iterations: 100,
}
}

// newLocation allocates a new locatian structure of the appropriate size. It
// allocates memory based on the dimension and the values in Needs.
func newLocation(dim int, method Needser) *Location {
Expand Down Expand Up @@ -513,15 +513,12 @@ func checkLocationConvergence(loc *Location, settings *Settings, converger Conve
if math.IsInf(loc.F, -1) {
return FunctionNegativeInfinity
}
if loc.Gradient != nil {
if loc.Gradient != nil && settings.GradientThreshold > 0 {
norm := floats.Norm(loc.Gradient, math.Inf(1))
if norm < settings.GradientThreshold {
return GradientThreshold
}
}
if loc.F < settings.FunctionThreshold {
return FunctionThreshold
}
return converger.Converged(loc)
}

Expand Down
3 changes: 2 additions & 1 deletion optimize/neldermead.go
Expand Up @@ -5,6 +5,7 @@
package optimize

import (
"math"
"sort"

"gonum.org/v1/gonum/floats"
Expand Down Expand Up @@ -96,7 +97,7 @@ func (n *NelderMead) Init(dim, tasks int) int {
}

func (n *NelderMead) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
n.status, n.err = localOptimizer{}.run(n, operation, result, tasks)
n.status, n.err = localOptimizer{}.run(n, math.NaN(), operation, result, tasks)
close(operation)
return
}
Expand Down
6 changes: 5 additions & 1 deletion optimize/newton.go
Expand Up @@ -45,6 +45,10 @@ type Newton struct {
// information in H.
// Increase must be greater than 1. If Increase is 0, it is defaulted to 5.
Increase float64
// GradStopThreshold sets the threshold for stopping if the gradient norm
// gets too small. If GradStopThreshold is 0 it is defaulted to 1e-12, and
// if it is NaN the setting is not used.
GradStopThreshold float64

status Status
err error
Expand All @@ -67,7 +71,7 @@ func (n *Newton) Init(dim, tasks int) int {
}

func (n *Newton) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
n.status, n.err = localOptimizer{}.run(n, operation, result, tasks)
n.status, n.err = localOptimizer{}.run(n, n.GradStopThreshold, operation, result, tasks)
close(operation)
return
}
Expand Down

0 comments on commit 0055e87

Please sign in to comment.