Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Provide possibility to restrict local search to k best individuals #160

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 21 additions & 4 deletions cgp/ea/mu_plus_lambda.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import concurrent.futures
import numpy as np

from typing import Callable, List, Tuple
from typing import Callable, List, Tuple, Union

from ..individual import IndividualBase
from ..population import Population
Expand All @@ -25,6 +25,7 @@ def __init__(
*,
n_processes: int = 1,
local_search: Callable[[IndividualBase], None] = lambda combined: None,
k_local_search: Union[int, None] = None,
):
"""Init function

Expand All @@ -43,7 +44,9 @@ def __init__(
Called before each fitness evaluation with a joint list of
offsprings and parents to optimize numeric leaf values of
the graph. Defaults to identity function.

k_local_search : int
Number of individuals in the whole population (parents +
offsprings) to apply local search to.
"""
self.n_offsprings = n_offsprings

Expand All @@ -57,6 +60,7 @@ def __init__(
self.tournament_size = tournament_size
self.n_processes = n_processes
self.local_search = local_search
self.k_local_search = k_local_search

def initialize_fitness_parents(
self, pop: Population, objective: Callable[[IndividualBase], IndividualBase]
Expand Down Expand Up @@ -108,10 +112,23 @@ def step(
# population instead of the other way around
combined = offsprings + pop.parents

for ind in combined:
self.local_search(ind)
# we follow a two-step process for selection of new parents:
# we first determine the fitness for all individuals, then, if
# applicable, we apply local search to the k_local_search
# fittest individuals; after this we need to recompute the
# fitness for all individuals for which parameters changed
# during local search; finally we sort again by fitness, now
# taking into account the effect of local search for
# subsequent selection
combined = self._compute_fitness(combined, objective)
combined = self._sort(combined)

n_total = self.n_offsprings + pop.n_parents
k_local_search = n_total if self.k_local_search is None else self.k_local_search
for idx in range(k_local_search):
self.local_search(combined[idx])

combined = self._compute_fitness(combined, objective)
combined = self._sort(combined)

pop.parents = self._create_new_parent_population(pop.n_parents, combined)
Expand Down
8 changes: 7 additions & 1 deletion examples/example_differential_evo_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,13 @@ def evolution():
"primitives": (cgp.Add, cgp.Sub, cgp.Mul, cgp.Parameter),
}

ea_params = {"n_offsprings": 4, "n_breeding": 4, "tournament_size": 1, "n_processes": 1}
ea_params = {
"n_offsprings": 4,
"n_breeding": 4,
"tournament_size": 1,
"n_processes": 1,
"k_local_search": 2,
}

evolve_params = {"max_generations": 2000, "min_fitness": 0.0}

Expand Down
5 changes: 5 additions & 0 deletions test/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,3 +48,8 @@ def population_simple_fitness(population_params, genome_params):
parent.fitness = float(i)

return pop


@fixture
def local_search_params():
return {"lr": 1e-3, "gradient_steps": 9}
47 changes: 47 additions & 0 deletions test/test_ea_mu_plus_lambda.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,3 +57,50 @@ def objective(ind):

for idx, ind in enumerate(offsprings):
assert ind.idx == len(pop.parents) + idx


def test_local_search_is_only_applied_to_best_k_individuals(
population_params, local_search_params
):

torch = pytest.importorskip("torch")

def inner_objective(f):
return torch.nn.MSELoss()(torch.Tensor([[1.1]]), f(torch.zeros(1, 1)))

def objective(ind):
if ind.fitness is not None:
return ind

f = ind.to_torch()
ind.fitness = -inner_objective(f).item()
return ind

population_params["mutation_rate"] = 0.3

genome_params = {
"n_inputs": 1,
"n_outputs": 1,
"n_columns": 1,
"n_rows": 1,
"levels_back": None,
"primitives": (cgp.Parameter,),
}

k_local_search = 2

pop = cgp.Population(**population_params, genome_params=genome_params)

local_search = functools.partial(
cgp.local_search.gradient_based, objective=inner_objective, **local_search_params,
)

ea = cgp.ea.MuPlusLambda(5, 5, 1, local_search=local_search, k_local_search=k_local_search)
ea.initialize_fitness_parents(pop, objective)
ea.step(pop, objective)

for idx in range(k_local_search):
assert pop[idx].genome._parameter_names_to_values["<p1>"] != pytest.approx(1.0)

for idx in range(k_local_search, population_params["n_parents"]):
assert pop[idx].genome._parameter_names_to_values["<p1>"] == pytest.approx(1.0)