Skip to content

Commit

Permalink
Merge pull request #262 from jakobj/enh/local-search-es
Browse files Browse the repository at this point in the history
Implement local search with natural evolution strategies
  • Loading branch information
HenrikMettler committed Feb 9, 2021
2 parents b471d4f + 9ff15f1 commit 2cb906f
Show file tree
Hide file tree
Showing 8 changed files with 774 additions and 7 deletions.
50 changes: 43 additions & 7 deletions cgp/ea/mu_plus_lambda.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def __init__(
*,
tournament_size: Union[None, int] = None,
n_processes: int = 1,
local_search: Callable[[IndividualBase], None] = lambda combined: None,
local_search: Union[None, Callable[[IndividualBase], None]] = None,
k_local_search: Union[int, None] = None,
reorder_genome: bool = False,
hurdle_percentile: List = [0.0],
Expand Down Expand Up @@ -152,18 +152,54 @@ def step(
combined = self._compute_fitness(combined, objective)
combined = self._sort(combined)

n_total = self.n_offsprings + pop.n_parents
k_local_search = n_total if self.k_local_search is None else self.k_local_search
for idx in range(k_local_search):
self.local_search(combined[idx])
if self.local_search is not None:
assert isinstance(pop.champion.fitness, float)
prev_avg_fitness: float = np.mean([ind.fitness for ind in combined])

combined = self._compute_fitness(combined, objective)
combined = self._sort(combined)
combined_copy = [ind.copy() for ind in combined]

k_local_search = (
len(combined_copy) if self.k_local_search is None else self.k_local_search
)
for idx in range(k_local_search):
self.local_search(combined_copy[idx])

combined_copy = self._compute_fitness(combined_copy, objective)

new_combined = self._create_new_combined_population_after_local_search(
combined, combined_copy
)

combined = self._sort(new_combined)

avg_fitness: float = np.mean([ind.fitness for ind in combined])
if prev_avg_fitness > avg_fitness:
raise RuntimeError(
"The average fitness decreased after executing the local search. This"
"indicates that something went wrong during the"
"optimization. Aborting."
)

pop.parents = self._create_new_parent_population(pop.n_parents, combined)

return pop

@staticmethod
def _create_new_combined_population_after_local_search(
combined: List["IndividualBase"], combined_copy: List["IndividualBase"]
) -> List["IndividualBase"]:
new_combined: List["IndividualBase"] = []
for ind in combined:
for ind_copy in combined_copy:
if ind.idx == ind_copy.idx:
assert ind.fitness is not None
assert ind_copy.fitness is not None
if ind.fitness < ind_copy.fitness:
new_combined.append(ind_copy)
else:
new_combined.append(ind)
return new_combined

def _create_new_offspring_generation(self, pop: Population) -> List[IndividualBase]:
# use tournament selection to randomly select individuals from
# parent population
Expand Down
32 changes: 32 additions & 0 deletions cgp/genome.py
Original file line number Diff line number Diff line change
Expand Up @@ -748,3 +748,35 @@ def _initialize_parameter_values(
)
modified_parameter_value = True
return modified_parameter_value

def parameters_to_numpy_array(self, only_active_nodes: bool = False) -> "np.ndarray[float]":
if only_active_nodes:
graph = CartesianGraph(self)
active_regions: List[int] = graph.determine_active_regions()
params_names: List[str] = []
params: List[float] = []
for p in self._parameter_names_to_values:
region_idx: int = self._region_idx_from_parameter_name(p)
if region_idx in active_regions:
params_names.append(p)
params.append(self._parameter_names_to_values[p])
return np.fromiter(params, dtype=np.float), params_names
else:
return (
np.fromiter(self._parameter_names_to_values.values(), dtype=np.float),
list(self._parameter_names_to_values.keys()),
)

def _region_idx_from_parameter_name(self, parameter_name: str) -> int:
return int(re.findall("<[A-z]+([0-9]+)>", parameter_name)[0])

def update_parameters_from_numpy_array(
self, params: "np.ndarray[float]", params_names: List[str]
) -> bool:
any_parameter_updated: bool = False
for v, p in zip(params, params_names):
assert p in self._parameter_names_to_values
if not np.isclose(self._parameter_names_to_values[p], v, rtol=0.0):
self._parameter_names_to_values[p] = v
any_parameter_updated = True
return any_parameter_updated
73 changes: 73 additions & 0 deletions cgp/individual.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,9 @@ def objective_idx(self, v: int) -> None:
def clone(self):
raise NotImplementedError()

def copy(self):
raise NotImplementedError()

def _copy_user_defined_attributes(self, other):
"""Copy all attributes that are not defined in __init__ of the (sub
and super) class from self to other.
Expand Down Expand Up @@ -110,6 +113,12 @@ def to_sympy(self, simplify):
def update_parameters_from_torch_class(self, torch_cls):
raise NotImplementedError()

def parameters_to_numpy_array(self, only_active_nodes: bool = False) -> "np.ndarray[float]":
raise NotImplementedError()

def update_parameters_from_numpy_array(self, params, params_names):
raise NotImplementedError()

@staticmethod
def _mutate_genome(genome: Genome, mutation_rate: float, rng: np.random.RandomState) -> bool:
return genome.mutate(mutation_rate, rng)
Expand Down Expand Up @@ -142,6 +151,16 @@ def _to_sympy(genome: Genome, simplify) -> "sympy_expr.Expr":
def _update_parameters_from_torch_class(genome: Genome, torch_cls: "torch.nn.Module") -> bool:
return genome.update_parameters_from_torch_class(torch_cls)

@staticmethod
def _parameters_to_numpy_array(genome: Genome, only_active_nodes: bool) -> "np.ndarray[float]":
return genome.parameters_to_numpy_array(only_active_nodes)

@staticmethod
def _update_parameters_from_numpy_array(
genome: Genome, params: "np.ndarray[float]", params_names: List[str]
) -> bool:
return genome.update_parameters_from_numpy_array(params, params_names)

def __lt__(self, other):
for i in range(len(self._fitness)):
if self._fitness[i] is None and other._fitness[i] is None:
Expand Down Expand Up @@ -178,6 +197,14 @@ def clone(self) -> "IndividualSingleGenome":
self._copy_user_defined_attributes(ind)
return ind

def copy(self) -> "IndividualSingleGenome":
ind = IndividualSingleGenome(self.genome.clone())
ind._fitness = list(self._fitness)
ind.idx = self.idx
ind.parent_idx = self.parent_idx
self._copy_user_defined_attributes(ind)
return ind

def mutate(self, mutation_rate: float, rng: np.random.RandomState) -> None:
only_silent_mutations = self._mutate_genome(self.genome, mutation_rate, rng)
if not only_silent_mutations:
Expand Down Expand Up @@ -206,6 +233,18 @@ def update_parameters_from_torch_class(self, torch_cls: "torch.nn.Module") -> No
if any_parameter_updated:
self.reset_fitness()

def parameters_to_numpy_array(self, only_active_nodes: bool = False) -> "np.ndarray[float]":
return self._parameters_to_numpy_array(self.genome, only_active_nodes)

def update_parameters_from_numpy_array(
self, params: "np.ndarray[float]", params_names: List[str]
) -> None:
any_parameter_updated: bool = self._update_parameters_from_numpy_array(
self.genome, params, params_names
)
if any_parameter_updated:
self.reset_fitness()


class IndividualMultiGenome(IndividualBase):
"""An individual with multiple genomes each representing a particular computational graph.
Expand All @@ -227,6 +266,14 @@ def clone(self) -> "IndividualMultiGenome":
self._copy_user_defined_attributes(ind)
return ind

def copy(self) -> "IndividualMultiGenome":
ind = IndividualMultiGenome([g.clone() for g in self.genome])
ind._fitness = list(self._fitness)
ind.idx = self.idx
ind.parent_idx = self.parent_idx
self._copy_user_defined_attributes(ind)
return ind

def mutate(self, mutation_rate: float, rng: np.random.RandomState) -> None:
for g in self.genome:
only_silent_mutations = self._mutate_genome(g, mutation_rate, rng)
Expand Down Expand Up @@ -262,3 +309,29 @@ def update_parameters_from_torch_class(self, torch_cls: List["torch.nn.Module"])
)
if any_parameter_updated:
self.reset_fitness()

def parameters_to_numpy_array(self, only_active_nodes: bool = False) -> "np.ndarray[float]":
params: List[np.ndarray[float]] = []
params_names: List[str] = []
for g in self.genome:
p, pn = self._parameters_to_numpy_array(g, only_active_nodes)
params.append(p)
params_names += pn
return np.hstack(params), params_names

def update_parameters_from_numpy_array(
self, params: "np.ndarray[float]", params_names: List[str]
) -> None:
any_parameter_updated: bool = False
offset: int = 0
for g in self.genome:
n_parameters: int = len(g._parameter_names_to_values)
any_parameter_updated_inner: bool = self._update_parameters_from_numpy_array(
g,
params[offset : offset + n_parameters],
params_names[offset : offset + n_parameters],
)
any_parameter_updated = any_parameter_updated or any_parameter_updated_inner
offset += n_parameters
if any_parameter_updated:
self.reset_fitness()
1 change: 1 addition & 0 deletions cgp/local_search/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
from .evolution_strategies import EvolutionStrategies
from .gradient_based import gradient_based
Loading

0 comments on commit 2cb906f

Please sign in to comment.