Skip to content

Commit

Permalink
optimize: Change Settings to allow InitialLocation
Browse files Browse the repository at this point in the history
This modifies Settings to allow specifying an initial location and properties of the function (value, gradient, etc.). This allows to work with local optimizers that are seeded with initial settings. This has two fields that must be specified (both Location and Operation). The difficulty is that the default value of the function is 0, so we either must require the user to specify it is set (somehow), or require the user to change the default value away if it is not set. The former seems much safer.
  • Loading branch information
btracey committed May 16, 2018
1 parent 3f7b30d commit a6d030e
Show file tree
Hide file tree
Showing 5 changed files with 112 additions and 52 deletions.
19 changes: 13 additions & 6 deletions optimize/global.go
Expand Up @@ -58,6 +58,12 @@ type GlobalMethod interface {
// The last parameter to RunGlobal is a slice of tasks with length equal to
// the return from InitGlobal. GlobalTask has an ID field which may be
// set and modified by GlobalMethod, and must not be modified by the caller.
// The first element of tasks contains information about the initial location
// if any is specified in Settings. The Location field will contain the value
// specified by InitLocation, defaulted to a value of zero if none was specified.
// The Operation field specifies which other values of Location are known.
// If Operation == NoOperation, none of the values should be used, otherwise
// the Evaluation operations will be composed to specify the valid fields.
//
// GlobalMethod may have its own specific convergence criteria, which can
// be communicated using a MethodDone operation. This will trigger a
Expand Down Expand Up @@ -120,16 +126,15 @@ func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Resul
return nil, err
}

// TODO(btracey): These init calls don't do anything with their arguments
// because optLoc is meaningless at this point. Should change the function
// signatures.
optLoc := newLocation(dim, method)
optLoc.F = math.Inf(1)

if settings.FunctionConverge != nil {
settings.FunctionConverge.Init()
}

initLoc := getInitLocation(dim, settings.InitLocation, settings.InitOperation, method)

stats.Runtime = time.Since(startTime)

// Send initial location to Recorder
Expand All @@ -142,7 +147,7 @@ func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Resul

// Run optimization
var status Status
status, err = minimizeGlobal(&p, method, settings, stats, optLoc, startTime)
status, err = minimizeGlobal(&p, method, settings, stats, settings.InitOperation, initLoc, optLoc, startTime)

// Cleanup and collect results
if settings.Recorder != nil && err == nil {
Expand All @@ -158,7 +163,7 @@ func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Resul

// minimizeGlobal performs a Global optimization. minimizeGlobal updates the
// settings and optLoc, and returns the final Status and error.
func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stats *Stats, optLoc *Location, startTime time.Time) (Status, error) {
func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stats *Stats, initOp Operation, initLoc, optLoc *Location, startTime time.Time) (Status, error) {
dim := len(optLoc.X)
nTasks := settings.Concurrent
if nTasks == 0 {
Expand All @@ -176,7 +181,9 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
results := make(chan GlobalTask, nTasks)
go func() {
tasks := make([]GlobalTask, nTasks)
for i := range tasks {
tasks[0].Location = initLoc
tasks[0].Op = initOp
for i := 1; i < len(tasks); i++ {
tasks[i].Location = newLocation(dim, method)
}
method.RunGlobal(operations, results, tasks)
Expand Down
56 changes: 26 additions & 30 deletions optimize/local.go
Expand Up @@ -4,7 +4,11 @@

package optimize

import "math"
import (
"math"

"gonum.org/v1/gonum/floats"
)

// Local finds a local minimum of a minimization problem using a sequential
// algorithm. A maximization problem can be transformed into a minimization
Expand Down Expand Up @@ -62,6 +66,11 @@ func Local(p Problem, initX []float64, settings *Settings, method Method) (*Resu
if settings == nil {
settings = DefaultSettings()
}
// Check that the initial location matches the one in settings.
if settings.InitLocation != nil && settings.InitLocation.X != nil &&
!floats.Equal(settings.InitLocation.X, initX) {
panic("local: initX does not match settings x location")
}
lg := &localGlobal{
Method: method,
InitX: initX,
Expand Down Expand Up @@ -117,6 +126,7 @@ func (l *localGlobal) RunGlobal(operations chan<- GlobalTask, results <-chan Glo
l.cleanup(operations, results)
return
}

// Check the starting condition.
if math.IsInf(task.F, 1) || math.IsNaN(task.F) {
l.status = Failure
Expand Down Expand Up @@ -193,38 +203,24 @@ func (l *localGlobal) cleanup(operation chan<- GlobalTask, result <-chan GlobalT

func (l *localGlobal) getStartingLocation(operation chan<- GlobalTask, result <-chan GlobalTask, task GlobalTask) Operation {
copy(task.X, l.InitX)
if l.Settings.UseInitialData {
task.F = l.Settings.InitialValue
if task.Gradient != nil {
g := l.Settings.InitialGradient
if g == nil {
panic("optimize: initial gradient is nil")
}
if len(g) != l.dim {
panic("optimize: initial gradient size mismatch")
}
copy(task.Gradient, g)
}
if task.Hessian != nil {
h := l.Settings.InitialHessian
if h == nil {
panic("optimize: initial Hessian is nil")
}
if h.Symmetric() != l.dim {
panic("optimize: initial Hessian size mismatch")
}
task.Hessian.CopySym(h)
}
return NoOperation
// Construct the operation by what is missing.
needs := l.Method.Needs()
initOp := l.Settings.InitOperation
op := NoOperation
if initOp&FuncEvaluation == 0 {
op |= FuncEvaluation
}
eval := FuncEvaluation
if task.Gradient != nil {
eval |= GradEvaluation
if needs.Gradient && initOp&GradEvaluation == 0 {
op |= GradEvaluation
}
if task.Hessian != nil {
eval |= HessEvaluation
if needs.Hessian && initOp&HessEvaluation == 0 {
op |= HessEvaluation
}
task.Op = eval

if op == NoOperation {
return NoOperation
}
task.Op = op
operation <- task
task = <-result
return task.Op
Expand Down
54 changes: 51 additions & 3 deletions optimize/minimize.go
Expand Up @@ -26,14 +26,12 @@ func min(a, b int) int {
}

// newLocation allocates a new locatian structure of the appropriate size. It
// allocates memory based on the dimension and the values in Needs. The initial
// function value is set to math.Inf(1).
// allocates memory based on the dimension and the values in Needs.
func newLocation(dim int, method Needser) *Location {
// TODO(btracey): combine this with Local.
loc := &Location{
X: make([]float64, dim),
}
loc.F = math.Inf(1)
if method.Needs().Gradient {
loc.Gradient = make([]float64, dim)
}
Expand All @@ -60,6 +58,56 @@ func copyLocation(dst, src *Location) {
}
}

// getInitLocation checks the validity of initLocation and initOperation and
// returns the initial values as a *Location.
func getInitLocation(dim int, initLocation *Location, initOperation Operation, method Needser) *Location {
needs := method.Needs()
loc := newLocation(dim, method)
if initLocation == nil {
return loc
}
// Check consistency of initOperation and initLocation even if initLocation.X == nil
// to catch common bugs.
if initOperation != NoOperation {
if initLocation.X == nil {
panic("optimize: initOperation set but no initial X specified")
}
if initOperation&GradEvaluation != 0 {
if initLocation.Gradient == nil {
panic("optimize: operation specifies gradient, but no gradient specified")
}
if len(initLocation.Gradient) != dim {
panic("optimize: initial gradient does not match problem dimension")
}
}
if initOperation&HessEvaluation != 0 {
if initLocation.Hessian == nil {
panic("optimize: operation specifies gradient, but no gradient specified")
}
if initLocation.Hessian.Symmetric() != dim {
panic("optimize: initial Hessian does not match problem dimension")
}
}
}
if initLocation.X == nil {
return loc
}
if len(initLocation.X) != dim {
panic("optimize: specified initial location does not match problem dimension")
}
copy(loc.X, initLocation.X)
if initOperation&FuncEvaluation != 0 {
loc.F = initLocation.F
}
if needs.Gradient && initOperation&GradEvaluation != 0 {
copy(loc.Gradient, initLocation.Gradient)
}
if needs.Hessian && initOperation&HessEvaluation != 0 {
loc.Hessian.CopySym(initLocation.Hessian)
}
return loc
}

func checkOptimization(p Problem, dim int, method Needser, recorder Recorder) error {
if p.Func == nil {
panic(badProblem)
Expand Down
19 changes: 12 additions & 7 deletions optimize/types.go
Expand Up @@ -164,15 +164,20 @@ func (p Problem) satisfies(method Needser) error {
// settings, convergence information, and Recorder information. In general, users
// should use DefaultSettings rather than constructing a Settings literal.
//
// If UseInitData is true, InitialValue, InitialGradient and InitialHessian
// specify function information at the initial location.
//
// If Recorder is nil, no information will be recorded.
type Settings struct {
UseInitialData bool // Use supplied information about the conditions at the initial x.
InitialValue float64 // Function value at the initial x.
InitialGradient []float64 // Gradient at the initial x.
InitialHessian *mat.SymDense // Hessian at the initial x.
// InitLocation specifies an initial location for the Method, and optionally
// additional information about the function at the initial location. If
// InitLocation is nil, or InitLocation.X is nil, then a default location of
// 0 is used. Properties at the initial location (function value, gradient, etc.)
// may also be specified in InitLocation. The InitOperation field must also
// be set to specify which fields have been set, for example to FunctionEvaluation
// if InitLocation.F is correct, FunctionEvaluation | GradEvaluation if
// both InitLocation.F and InitLocation.Gradient are valid, etc.
InitLocation *Location
// InitOperation specifies the valid locations of the InitLocation field.
// See the InitLocation documentation for more information.
InitOperation Operation

// FunctionThreshold is the threshold for acceptably small values of the
// objective function. FunctionThreshold status is returned if
Expand Down
16 changes: 10 additions & 6 deletions optimize/unconstrained_test.go
Expand Up @@ -1230,15 +1230,19 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {

// We are going to restart the solution using known initial data, so
// evaluate them.
settings.UseInitialData = true
settings.InitialValue = test.p.Func(test.x)
settings.InitLocation = &Location{}
settings.InitLocation.X = test.x
settings.InitLocation.F = test.p.Func(test.x)
settings.InitOperation = FuncEvaluation
if method.Needs().Gradient {
settings.InitialGradient = resize(settings.InitialGradient, len(test.x))
test.p.Grad(settings.InitialGradient, test.x)
settings.InitLocation.Gradient = resize(settings.InitLocation.Gradient, len(test.x))
test.p.Grad(settings.InitLocation.Gradient, test.x)
settings.InitOperation |= GradEvaluation
}
if method.Needs().Hessian {
settings.InitialHessian = mat.NewSymDense(len(test.x), nil)
test.p.Hess(settings.InitialHessian, test.x)
settings.InitLocation.Hessian = mat.NewSymDense(len(test.x), nil)
test.p.Hess(settings.InitLocation.Hessian, test.x)
settings.InitOperation |= HessEvaluation
}

// Rerun the test again to make sure that it gets the same answer with
Expand Down

0 comments on commit a6d030e

Please sign in to comment.