Skip to content

Commit

Permalink
add TunedOpt, add tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Aizen committed Apr 22, 2024
1 parent 7c8c0a7 commit 786455e
Show file tree
Hide file tree
Showing 3 changed files with 72 additions and 14 deletions.
29 changes: 27 additions & 2 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ type Config struct {
Momentum float64
SigmaTol float64
Verbose bool
Seed uint64
}

func Defaults() Config {
Expand All @@ -65,8 +66,9 @@ func Defaults() Config {
cfg.LR_mu = 0.6
cfg.LR_sigma = 0.15
cfg.Momentum = 0.93
cfg.SigmaTol = 1e-12
cfg.SigmaTol = 1e-14
cfg.Verbose = false
cfg.Seed = 798371291237
return cfg
}

Expand All @@ -77,6 +79,7 @@ type Result struct {

const const_Ez0 = 0.7978845608028661 // mean(abs(randn()))
func Opt(fn func([]float64) float64, mu []float64, sigma []float64, cfg Config) (Result, error) {
rng := rand.New(rand.NewSource(cfg.Seed))
pop_n := cfg.PopSize
n := len(mu)
if len(sigma) != n {
Expand Down Expand Up @@ -105,7 +108,7 @@ func Opt(fn func([]float64) float64, mu []float64, sigma []float64, cfg Config)
trial := make([]float64, n)
for {
for i := range n {
z[i] = rand.NormFloat64()
z[i] = rng.NormFloat64()
trial[i] = z[i]*sd[i] + av[i]
}
cost := fn(trial)
Expand Down Expand Up @@ -134,6 +137,7 @@ func Opt(fn func([]float64) float64, mu []float64, sigma []float64, cfg Config)
g[j] = 0
g_log_sigma[j] = 0
}

for i, p := range pop {
if W[i] <= 0 {
break
Expand Down Expand Up @@ -163,6 +167,27 @@ func DefaultOpt(fn func([]float64) float64, mu []float64, sigma []float64) (Resu
cfg.Generations = int(math.Ceil(math.Sqrt(float64(len(mu)*2+1)) * 300))
return Opt(fn, mu, sigma, cfg)
}

func TunedOpt(fn func([]float64) float64, mu []float64, sigma []float64) (Result, error) {
max_gen := int(math.Ceil(math.Sqrt(float64(len(mu)*2+1)) * 1500))
tuned, _ := DefaultOpt(func(f []float64) float64 {
cfg := Defaults()
cfg.Generations = max_gen / 50
cfg.LR_mu = Probability(f[0])
cfg.LR_sigma = Probability(f[1])
cfg.Momentum = Probability(f[2])
res, _ := Opt(fn, mu, sigma, cfg)
return fn(res.Mu)
}, []float64{3, -3, -3}, []float64{0.5, 0.5, 0.5})

cfg := Defaults()
cfg.LR_mu = Probability(tuned.Mu[0])
cfg.LR_sigma = Probability(tuned.Mu[1])
cfg.Momentum = Probability(tuned.Mu[2])
cfg.Generations = max_gen
return Opt(fn, mu, sigma, cfg)
}

func makeWeights(pop_size int) []float64 {
W := make([]float64, pop_size)
for i := range pop_size {
Expand Down
52 changes: 41 additions & 11 deletions main_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,20 +42,50 @@ func TestUni(t *testing.T) {
}
}
}
func TestBounded(t *testing.T) {
cost := func(x []float64) float64 {
f := Bounded(x[0], -2, 5)
return f
}
res, _ := DefaultOpt(cost, []float64{0.0}, []float64{1.0})
got := cost(res.Mu)
want := -2.0
err := math.Abs(got - want)
if err > 1e-5 {
t.Errorf("got %.5f, wanted %.5f, err %.2g", got, want, err)
}
}

func TestBi(t *testing.T) {
muw := []float64{4, -3}
sol, err_opt := DefaultOpt(func(f []float64) float64 {
return abs2(f[0]-muw[0]) + 100.0*abs2(f[0]+f[1]-muw[0]-muw[1])
}, []float64{0.0, 0.0}, []float64{1.0, 1.0})
if err_opt != nil {
t.Error(err_opt)
{
muw := []float64{4, -3}
sol, err_opt := DefaultOpt(func(f []float64) float64 {
return abs2(f[0]-muw[0]) + 100.0*abs2(f[0]+f[1]-muw[0]-muw[1])
}, []float64{0.0, 0.0}, []float64{1.0, 1.0})
if err_opt != nil {
t.Error(err_opt)
}
mu := sol.Mu
sig := sol.Sigma
err := math.Sqrt(abs2((mu[0]-muw[0])/muw[0]) + abs2((mu[1]-muw[1])/muw[1]))
if err > 1e-6 {
t.Error("DefaultOpt, got: ", mu, sig, " wanted:", muw, " error:", err)
}
}
mu := sol.Mu
sig := sol.Sigma
err := math.Sqrt(abs2((mu[0]-muw[0])/muw[0]) + abs2((mu[1]-muw[1])/muw[1]))
if err > 1e-6 {
t.Error("got: ", mu, sig, " wanted:", muw, " error:", err)
{
muw := []float64{4, -3}
sol, err_opt := TunedOpt(func(f []float64) float64 {
return abs2(f[0]-muw[0]) + 500.0*abs2(f[0]+f[1]-muw[0]-muw[1])
}, []float64{0.0, 0.0}, []float64{1.0, 1.0})
if err_opt != nil {
t.Error(err_opt)
}
mu := sol.Mu
sig := sol.Sigma
err := math.Sqrt(abs2((mu[0]-muw[0])/muw[0]) + abs2((mu[1]-muw[1])/muw[1]))
if err > 1e-8 {
t.Error("TunedOpt, got: ", mu, sig, " wanted:", muw, " error:", err)
}
}
}

Expand Down
5 changes: 4 additions & 1 deletion readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,10 @@ The `GoES` package implements an Evolutionary Algorithm (EA) for optimization pr
* `fn`: A user-defined function representing the objective function to be optimized. This function should accept a slice of `float64` values as input and return a single `float64` value representing the cost or fitness of the solution.
* `mu`: An initial mean vector of `float64` values, defining the starting point of the search in the solution space.
* `sigma`: An initial standard deviation vector of `float64` values, determining the initial search radius around the mean vector.

* **`TunedOpt` function:** This convenience function calls `Opt` with a configuration which is tuned according to the user cost-function. Running this tuned optimised is more expensive than using `Opt` directly but it may lead to better results.
* `fn`: A user-defined function representing the objective function to be optimized. This function should accept a slice of `float64` values as input and return a single `float64` value representing the cost or fitness of the solution.
* `mu`: An initial mean vector of `float64` values, defining the starting point of the search in the solution space.
* `sigma`: An initial standard deviation vector of `float64` values, determining the initial search radius around the mean vector.
**Configuration**

The `Config` struct allows fine-tuning the optimization process:
Expand Down

0 comments on commit 786455e

Please sign in to comment.