Skip to content

Commit

Permalink
Improve CMA-ES algorithm and other tests
Browse files Browse the repository at this point in the history
  • Loading branch information
btracey committed Oct 23, 2017
1 parent e81623b commit 7c8015b
Show file tree
Hide file tree
Showing 2 changed files with 86 additions and 22 deletions.
23 changes: 12 additions & 11 deletions optimize/cmaes.go
Expand Up @@ -41,9 +41,9 @@ import (
//
// The CMA-ES-Chol algorithm differs from the standard CMA-ES algorithm in that
// it directly updates the Cholesky decomposition of the normal distribution.
// This changes the runtime is O(dim^2*pop) rather than O(dim^3). The evolution
// of the multi-variate normal will be similar to the baseline CMA-ES algorithm,
// but the covariance update equation is not identical.
// This changes the runtime from O(dimension^3) to O(dimension^2*population)
// The evolution of the multi-variate normal will be similar to the baseline
// CMA-ES algorithm, but the covariance update equation is not identical.
//
// For more information about the CMA-ES algorithm, see
// https://en.wikipedia.org/wiki/CMA-ES
Expand Down Expand Up @@ -71,8 +71,8 @@ type CmaEsChol struct {
// (log) "volume" of the normal distribution, and when it is too small
// the samples are almost the same. If the log determinant of the covariance
// matrix becomes less than StopLogDet, the optimization run is concluded.
// If StopDeterminant is 0, a default value of dim*log(1e-16) is used.
// If StopDeterminant is NaN, the stopping criteria is not used, though
// If StopLogDet is 0, a default value of dim*log(1e-16) is used.
// If StopLogDet is NaN, the stopping criteria is not used, though
// this can cause numeric instabilities in the algorithm.
StopLogDet float64
// ForgetBest, when true, does not track the best overall function value found,
Expand Down Expand Up @@ -103,10 +103,11 @@ type CmaEsChol struct {
chol mat.Cholesky

// Parallel fields.
taskIdxs []int // Stores which simulation the task ran.
evals []int // remaining evaluations in this iteration.
mux sync.Mutex // protect access to evals.
wg sync.WaitGroup // wait for simulations to finish before iterating.
taskIdxs []int // Stores which simulation the task ran.
evals []int // remaining evaluations in this iteration.

mux sync.Mutex // protect access to evals.
wg sync.WaitGroup // wait for simulations to finish before iterating.

// Overall best.
bestX []float64
Expand All @@ -124,7 +125,7 @@ func (cma *CmaEsChol) Needs() struct{ Gradient, Hessian bool } {

func (cma *CmaEsChol) Done() {}

// Status returns the status of the method. CMA
// Status returns the status of the method.
func (cma *CmaEsChol) Status() (Status, error) {
sd := cma.StopLogDet
switch {
Expand Down Expand Up @@ -229,7 +230,7 @@ func (cma *CmaEsChol) InitGlobal(dim, tasks int) int {
cma.bestF = math.Inf(1)

t := min(tasks, cma.pop)
cma.taskIdxs = make([]int, cma.pop)
cma.taskIdxs = make([]int, t)
for i := 0; i < t; i++ {
cma.taskIdxs[i] = -1
}
Expand Down
85 changes: 74 additions & 11 deletions optimize/cmaes_test.go
Expand Up @@ -7,19 +7,29 @@ package optimize
import (
"errors"
"math"
"math/rand"
"testing"

"gonum.org/v1/gonum/mat"

"gonum.org/v1/gonum/floats"
"gonum.org/v1/gonum/optimize/functions"
)

func TestCmaEsChol(t *testing.T) {
for i, test := range []struct {
dim int
problem Problem
method *CmaEsChol
settings *Settings
good func(*Result, error) error
}{
type cmaTestCase struct {
dim int
problem Problem
method *CmaEsChol
settings *Settings
good func(*Result, error) error
}

func cmaTestCases() []cmaTestCase {
localMinMean := []float64{2.2, -2.2}
s := mat.NewSymDense(2, []float64{0.01, 0, 0, 0.01})
var localMinChol mat.Cholesky
localMinChol.Factorize(s)
return []cmaTestCase{
{
// Test that can find a small value.
dim: 10,
Expand Down Expand Up @@ -115,15 +125,68 @@ func TestCmaEsChol(t *testing.T) {
return nil
},
},
} {
{
// Test that the global minimum is found with the right initialization.
dim: 2,
problem: Problem{
Func: functions.Rastrigin{}.Func,
},
method: &CmaEsChol{
Population: 100, // Increase the population size to reduce noise.
},
settings: &Settings{
FunctionThreshold: math.Inf(-1),
},
good: func(result *Result, err error) error {
if result.Status != MethodConverge {
return errors.New("result not method converge")
}
if !floats.EqualApprox(result.X, []float64{0, 0}, 1e-6) {
return errors.New("global minimum not found")
}
return nil
},
},
{
// Test that a local minimum is found (with a different initialization).
dim: 2,
problem: Problem{
Func: functions.Rastrigin{}.Func,
},
method: &CmaEsChol{
Population: 100, // Increase the population size to reduce noise.
InitMean: localMinMean,
InitCholesky: &localMinChol,
},
settings: &Settings{
FunctionThreshold: math.Inf(-1),
},
good: func(result *Result, err error) error {
if result.Status != MethodConverge {
return errors.New("result not method converge")
}
if !floats.EqualApprox(result.X, []float64{2, -2}, 1e-2) {
return errors.New("local minimum not found")
}
return nil
},
},
}
}

func TestCmaEsChol(t *testing.T) {
for i, test := range cmaTestCases() {
src := rand.New(rand.NewSource(1))
method := test.method
method.Src = src
// Run and check that the expected termination occurs.
result, err := Global(test.problem, test.dim, test.settings, test.method)
result, err := Global(test.problem, test.dim, test.settings, method)
if testErr := test.good(result, err); testErr != nil {
t.Errorf("cas %d: %v", i, testErr)
}

// Run a second time to make sure there are no residual effects
result, err = Global(test.problem, test.dim, test.settings, test.method)
result, err = Global(test.problem, test.dim, test.settings, method)
if testErr := test.good(result, err); testErr != nil {
t.Errorf("cas %d second: %v", i, testErr)
}
Expand Down

0 comments on commit 7c8015b

Please sign in to comment.