Skip to content

Commit

Permalink
Merge pull request #115 from jakobj/maint/rename-module
Browse files Browse the repository at this point in the history
Rename module "python-gp" -> "hal-cgp"
  • Loading branch information
jakobj committed May 25, 2020
2 parents 5e6b057 + c488a4e commit 8bb73db
Show file tree
Hide file tree
Showing 32 changed files with 232 additions and 229 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Expand Up @@ -25,6 +25,6 @@ script:
- pytest --cov
- black --check .
- flake8 --config=.flake8 .
- mypy gp
- mypy cgp
after_success:
- coveralls
14 changes: 7 additions & 7 deletions README.md
@@ -1,10 +1,10 @@
python-gp
=========
HAL-CGP
=======
[![Python3.6](https://img.shields.io/badge/python-3.6-red.svg)](https://www.python.org/downloads/release/python-369/)
[![Python3.7](https://img.shields.io/badge/python-3.7-red.svg)](https://www.python.org/)
[![Python3.8](https://img.shields.io/badge/python-3.8-red.svg)](https://www.python.org/)
[![GPL license](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-3.0.html)
[![Build Status](https://api.travis-ci.org/Happy-Algorithms-League/python-gp.svg?branch=master)](https://travis-ci.org/Happy-Algorithms-League/python-gp)
[![Build Status](https://api.travis-ci.org/Happy-Algorithms-League/hal-cgp.svg?branch=master)](https://travis-ci.org/Happy-Algorithms-League/hal-cgp)
[![Checked with mypy](http://www.mypy-lang.org/static/mypy_badge.svg)](http://mypy-lang.org/)

Cartesian Genetic Programming (CGP) in Python.
Expand Down Expand Up @@ -46,7 +46,7 @@ genome_params = {
"n_columns": 10,
"n_rows": 2,
"levels_back": 5,
"primitives": [gp.Add, gp.Sub, gp.Mul, gp.Div, gp.ConstantFloat],
"primitives": [cgp.Add, cgp.Sub, cgp.Mul, cgp.Div, cgp.ConstantFloat],
}

ea_params = {"n_offsprings": 10, "n_breeding": 10, "tournament_size": 2, "n_processes": 2}
Expand All @@ -55,8 +55,8 @@ evolve_params = {"max_generations": 1000, "min_fitness": 0.0}
```
3. Initialize a population and an evolutionary algorithm instance:
```python
pop = gp.Population(**population_params, genome_params=genome_params)
ea = gp.ea.MuPlusLambda(**ea_params)
pop = cgp.Population(**population_params, genome_params=genome_params)
ea = cgp.ea.MuPlusLambda(**ea_params)
```
4. Define a callback function to record information about the progress of the evolution:
```python
Expand All @@ -67,7 +67,7 @@ def recording_callback(pop):
```
5. Use the `evolve` function that ties everything together and executes the evolution:
```python
gp.evolve(pop, obj, ea, **evolve_params, print_progress=True, callback=recording_callback)
cgp.evolve(pop, obj, ea, **evolve_params, print_progress=True, callback=recording_callback)
```


Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
10 changes: 5 additions & 5 deletions gp/ea/mu_plus_lambda.py → cgp/ea/mu_plus_lambda.py
Expand Up @@ -65,11 +65,11 @@ def initialize_fitness_parents(
Parameters
----------
pop : gp.Population
pop : Population
Population instance.
objective : Callable[[gp.Individual], gp.Individual]
An objective function used for the evolution. Needs to take an
invidual (gp.Individual) as input parameter and return
individual (Individual) as input parameter and return
a modified individual (with updated fitness).
"""
# TODO can we avoid this function? how should a population be
Expand All @@ -81,16 +81,16 @@ def step(self, pop: Population, objective: Callable[[Individual], Individual]) -
Parameters
----------
pop : gp.Population
pop : Population
Population instance.
objective : Callable[[gp.Individual], gp.Individual]
An objective function used for the evolution. Needs to take an
invidual (gp.Individual) as input parameter and return
individual (Individual) as input parameter and return
a modified individual (with updated fitness).
Returns
----------
pop : gp.Population
Population
Modified population with new parents.
"""
offsprings = self._create_new_offspring_generation(pop)
Expand Down
2 changes: 1 addition & 1 deletion gp/genome.py → cgp/genome.py
Expand Up @@ -423,7 +423,7 @@ def clone(self) -> "Genome":
Returns
-------
gp.Genome
Genome
"""

new = Genome(
Expand Down
4 changes: 2 additions & 2 deletions gp/hl_api.py → cgp/hl_api.py
Expand Up @@ -21,11 +21,11 @@ def evolve(
Parameters
----------
pop : gp.Population
pop : Population
A population class that will be evolved.
objective : Callable
An objective function used for the evolution. Needs to take an
invidual (gp.Individual) as input parameter and return
individual (Individual) as input parameter and return
a modified individual (with updated fitness).
ea : EA algorithm instance
The evolution algorithm. Needs to be a class instance with an
Expand Down
10 changes: 5 additions & 5 deletions gp/individual.py → cgp/individual.py
Expand Up @@ -23,7 +23,7 @@ def __init__(self, fitness, genome):
fitness : float
Fitness of the individual.
genome: Genome instance
Genome of the invididual.
Genome of the individual.
"""
self.fitness = fitness
self.genome = genome
Expand All @@ -40,7 +40,7 @@ def clone(self):
Returns
-------
gp.Individual
Individual
"""
new_individual = Individual(self.fitness, self.genome.clone())

Expand All @@ -55,14 +55,14 @@ def crossover(self, other_parent, rng):
Parameters
----------
other_parent : gp.Individual
other_parent : Individual
Other individual to perform crossover with.
rng : numpy.RandomState
Random number generator instance to use for crossover.
Returns
-------
gp.Individual
Individual
"""
raise NotImplementedError("crossover currently not supported")

Expand Down Expand Up @@ -181,7 +181,7 @@ def update_parameters_from_torch_class(self, torch_cls):
class IndividualMultiGenome(Individual):
"""An individual with multiple genomes.
Derived from gp.Individual.
Derived from Individual.
"""

def clone(self):
Expand Down
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion gp/node.py → cgp/node.py
Expand Up @@ -11,7 +11,7 @@ def register(cls: Type["Node"]) -> None:
Parameters
----------
cls : Type[gp.Node]
cls : Type[Node]
Primitive to be registered.
Returns
Expand Down
File renamed without changes.
8 changes: 4 additions & 4 deletions gp/population.py → cgp/population.py
Expand Up @@ -95,14 +95,14 @@ def crossover(self, breeding_pool: List[Individual], n_offsprings: int) -> List[
Parameters
----------
breeding_pool : List[gp.Individual]
breeding_pool : List[Individual]
List of individuals from which the offspring are created.
n_offsprings : int
Number of offspring to be created.
Returns
----------
List[gp.Individual]
List[Individual]
List of offspring individuals.
"""
# in principle crossover would rely on a procedure like the
Expand Down Expand Up @@ -130,12 +130,12 @@ def mutate(self, offsprings: List[Individual]) -> List[Individual]:
Parameters
----------
offsprings : List[gp.Individual]
offsprings : List[Individual]
List of offspring individuals to be mutated.
Returns
----------
List[gp.Individual]
List[Individual]
List of mutated offspring individuals.
"""

Expand Down
File renamed without changes.
File renamed without changes.
12 changes: 6 additions & 6 deletions examples/example_caching.py
@@ -1,7 +1,7 @@
import numpy as np
import time

import gp
import cgp

""" Example demonstrating the use of the caching decorator.
Expand All @@ -16,7 +16,7 @@ def f_target(x):
return x ** 2 + x + 1.0


@gp.utils.disk_cache("example_caching_cache.pkl")
@cgp.utils.disk_cache("example_caching_cache.pkl")
def inner_objective(expr):
"""The caching decorator uses the function parameters to identify
identical function calls. Here, as many different genotypes
Expand Down Expand Up @@ -62,15 +62,15 @@ def evolution():
"n_columns": 10,
"n_rows": 2,
"levels_back": 2,
"primitives": [gp.Add, gp.Sub, gp.Mul, gp.ConstantFloat],
"primitives": [cgp.Add, cgp.Sub, cgp.Mul, cgp.ConstantFloat],
},
"evolve_params": {"max_generations": 100, "min_fitness": -1e-12},
}

pop = gp.Population(**params["population_params"], genome_params=params["genome_params"])
ea = gp.ea.MuPlusLambda(**params["ea_params"])
pop = cgp.Population(**params["population_params"], genome_params=params["genome_params"])
ea = cgp.ea.MuPlusLambda(**params["ea_params"])

gp.evolve(pop, objective, ea, **params["evolve_params"], print_progress=True)
cgp.evolve(pop, objective, ea, **params["evolve_params"], print_progress=True)

return pop.champion

Expand Down
12 changes: 6 additions & 6 deletions examples/example_differential_evo_regression.py
Expand Up @@ -4,7 +4,7 @@
import scipy.constants
import torch

import gp
import cgp


"""Example demonstrating the use of Cartesian Genetic Programming for
Expand Down Expand Up @@ -67,7 +67,7 @@ def evolution():
"n_columns": 20,
"n_rows": 1,
"levels_back": None,
"primitives": [gp.Add, gp.Sub, gp.Mul, gp.Parameter],
"primitives": [cgp.Add, cgp.Sub, cgp.Mul, cgp.Parameter],
}

ea_params = {"n_offsprings": 4, "n_breeding": 4, "tournament_size": 1, "n_processes": 1}
Expand All @@ -78,19 +78,19 @@ def evolution():
# average out for clipped values
local_search_params = {"lr": 1e-3, "gradient_steps": 9}

pop = gp.Population(**population_params, genome_params=genome_params)
pop = cgp.Population(**population_params, genome_params=genome_params)

# define the function for local search; parameters such as the
# learning rate and number of gradient steps are fixed via the use
# of `partial`; the local_search function should only receive a
# population of individuals as input
local_search = functools.partial(
gp.local_search.gradient_based,
cgp.local_search.gradient_based,
objective=functools.partial(inner_objective, seed=population_params["seed"]),
**local_search_params,
)

ea = gp.ea.MuPlusLambda(**ea_params, local_search=local_search)
ea = cgp.ea.MuPlusLambda(**ea_params, local_search=local_search)

history = {}
history["champion"] = []
Expand All @@ -102,7 +102,7 @@ def recording_callback(pop):

obj = functools.partial(objective, seed=population_params["seed"])

gp.evolve(
cgp.evolve(
pop, obj, ea, **evolve_params, print_progress=True, callback=recording_callback,
)

Expand Down
14 changes: 7 additions & 7 deletions examples/example_evo_regression.py
Expand Up @@ -4,7 +4,7 @@
import scipy.constants
import warnings

import gp
import cgp


"""Example demonstrating the use of Cartesian Genetic Programming for
Expand All @@ -24,14 +24,14 @@ def objective(individual, target_function, seed):
Parameters
----------
individual : gp.Individual
individual : Individual
Individual of the Cartesian Genetic Programming Framework.
target_function : Callable
Target function.
Returns
-------
gp.Individual
Individual
Modified individual with updated fitness value.
"""
if individual.fitness is not None:
Expand Down Expand Up @@ -87,18 +87,18 @@ def evolution(f_target):
"n_columns": 10,
"n_rows": 2,
"levels_back": 5,
"primitives": [gp.Add, gp.Sub, gp.Mul, gp.Div, gp.ConstantFloat],
"primitives": [cgp.Add, cgp.Sub, cgp.Mul, cgp.Div, cgp.ConstantFloat],
}

ea_params = {"n_offsprings": 10, "n_breeding": 10, "tournament_size": 2, "n_processes": 2}

evolve_params = {"max_generations": 1000, "min_fitness": 0.0}

# create population that will be evolved
pop = gp.Population(**population_params, genome_params=genome_params)
pop = cgp.Population(**population_params, genome_params=genome_params)

# create instance of evolutionary algorithm
ea = gp.ea.MuPlusLambda(**ea_params)
ea = cgp.ea.MuPlusLambda(**ea_params)

# define callback for recording of fitness over generations
history = {}
Expand All @@ -112,7 +112,7 @@ def recording_callback(pop):
obj = functools.partial(objective, target_function=f_target, seed=population_params["seed"])

# Perform the evolution
gp.evolve(
cgp.evolve(
pop, obj, ea, **evolve_params, print_progress=True, callback=recording_callback,
)
return history, pop.champion
Expand Down
11 changes: 6 additions & 5 deletions setup.py
Expand Up @@ -23,18 +23,18 @@ def read_extra_requirements():


setup(
name="python-gp",
name="hal-cgp",
version="0.1",
author="Jakob Jordan, Maximilian Schmidt",
author_email="jakobjordan@posteo.de",
description=("Cartesian Genetic Programming in Python."),
description=("Cartesian Genetic Programming in pure Python."),
license="GPLv3",
keywords="genetic programming",
url="https://github.com/jakobj/python-gp",
url="https://github.com/Happy-Algorithms-League/hal-cgp",
python_requires=">=3.6, <4",
install_requires=read_requirements(),
extras_require=read_extra_requirements(),
packages=["gp", "gp.ea", "gp.local_search"],
packages=["cgp", "cgp.ea", "cgp.local_search"],
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
classifiers=[
Expand All @@ -45,6 +45,7 @@ def read_extra_requirements():
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Utilities",
"Topic :: Scientific/Engineering",
"Typing :: Typed",
],
)
7 changes: 4 additions & 3 deletions test/conftest.py
@@ -1,6 +1,7 @@
import gp
from pytest import fixture

import cgp


@fixture
def rng_seed():
Expand All @@ -15,7 +16,7 @@ def genome_params():
"n_columns": 3,
"n_rows": 3,
"levels_back": 2,
"primitives": (gp.Add, gp.Sub, gp.ConstantFloat),
"primitives": (cgp.Add, cgp.Sub, cgp.ConstantFloat),
}


Expand All @@ -36,7 +37,7 @@ def mutation_rate():

@fixture
def population_simple_fitness(population_params, genome_params):
pop = gp.Population(**population_params, genome_params=genome_params)
pop = cgp.Population(**population_params, genome_params=genome_params)

for i, parent in enumerate(pop.parents):
parent.fitness = i
Expand Down

0 comments on commit 8bb73db

Please sign in to comment.