Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
51 commits
Select commit Hold shift + click to select a range
7187a41
move gfo-opt-algos into gfo directory and add skeleton files for gfo …
SimonBlanke Jun 9, 2025
a9cecdf
rename opt. algos.
SimonBlanke Jun 14, 2025
62e40aa
add test to gfo adapter
SimonBlanke Jun 14, 2025
5ae1a35
add simulated annealing
SimonBlanke Jun 14, 2025
a0cf60a
remove unused file
SimonBlanke Jun 14, 2025
4c9048d
add bayesian opt.
SimonBlanke Jun 14, 2025
66e984c
search for test-files in src/hyperactive dir
SimonBlanke Jun 14, 2025
172f516
add differential evolution
SimonBlanke Jun 14, 2025
85857dc
reformat
SimonBlanke Jun 14, 2025
4f8bf49
add direct algo.
SimonBlanke Jun 14, 2025
2d68234
add downhill simplex algo.
SimonBlanke Jun 14, 2025
0ac7c8e
add evolution strategy optimizer
SimonBlanke Jun 14, 2025
2da33f9
add forest optimizer
SimonBlanke Jun 14, 2025
28f86aa
add genetic algorithm optimizer
SimonBlanke Jun 14, 2025
7d758f0
add grid search opt.
SimonBlanke Jun 14, 2025
fdd4dbf
add lipschitz optimizer
SimonBlanke Jun 14, 2025
ff23a3e
add parallel tempering optimizer
SimonBlanke Jun 14, 2025
d41fb3b
add particle swarm optimization
SimonBlanke Jun 14, 2025
584cb27
add pattern search
SimonBlanke Jun 14, 2025
7326172
add powell's method
SimonBlanke Jun 14, 2025
5a8cac7
add random search
SimonBlanke Jun 14, 2025
44aea0e
fix sim. ann. test para
SimonBlanke Jun 14, 2025
f833eef
add random rest. hill. climb. opt.
SimonBlanke Jun 14, 2025
49210f2
add spiral opt.
SimonBlanke Jun 14, 2025
c7bb77f
add tpe optimizer
SimonBlanke Jun 14, 2025
fb74eb6
check_estimator
fkiraly Jun 14, 2025
edc33a9
check_estimator
fkiraly Jun 14, 2025
a480766
Merge branch 'check_estimator' into feature/add-gfo-algos
fkiraly Jun 14, 2025
0b135be
Merge remote-tracking branch 'upstream/master' into feature/add-gfo-a…
fkiraly Jun 14, 2025
0fdc7f0
add _tags
SimonBlanke Jun 19, 2025
f6a0bbd
add docstring examples
SimonBlanke Jun 19, 2025
1eba3bb
add _tags to optimizers
SimonBlanke Jun 19, 2025
7d74989
fix mutable default error
SimonBlanke Jun 21, 2025
b6bec07
fix another mutable default error
SimonBlanke Jun 21, 2025
d8b5168
check if key is in dict
SimonBlanke Jun 21, 2025
540e4b6
fix syntax error in docstring example
SimonBlanke Jun 21, 2025
e87da4a
fix imports in docstring examples
SimonBlanke Jun 21, 2025
9410d4d
fix examples (numpy.arrays to lists in search-space dict-values)
SimonBlanke Jun 21, 2025
4bab535
move from base
fkiraly Jun 21, 2025
0e9d8ab
quick fix for search-space conversion
SimonBlanke Jun 22, 2025
1a4f76d
create separate module for gfo-adapter
SimonBlanke Jun 22, 2025
e551eb3
score function takes just one argument
SimonBlanke Jun 22, 2025
d44b94a
Revert "score function takes just one argument"
fkiraly Jun 22, 2025
f58a5f0
Revert "create separate module for gfo-adapter"
fkiraly Jun 22, 2025
d45d284
Revert "quick fix for search-space conversion"
fkiraly Jun 22, 2025
9c0cd8e
fix crossover rate
fkiraly Jun 22, 2025
bf0dc75
fix ForestOptimizer param
fkiraly Jun 22, 2025
0b6c4a0
tsp
fkiraly Jun 22, 2025
04b47ff
move generator
fkiraly Jun 22, 2025
53806bd
Update _lipschitz_optimization.py
fkiraly Jun 22, 2025
4f8a15e
Update _gfo.py
fkiraly Jun 22, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ test-search_space:
done

test-pytest:
python -m pytest --durations=10 -x -p no:warnings tests/; \
python -m pytest --durations=10 -x -p no:warnings tests/ src/hyperactive/; \

test-timings:
cd tests/_local_test_timings; \
Expand Down
Empty file added scripts/__init__.py
Empty file.
63 changes: 63 additions & 0 deletions scripts/_generator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
import os
from pathlib import Path

# List of algorithm names and corresponding class names
algo_info = [
("downhill_simplex", "DownhillSimplexOptimizer"),
("simulated_annealing", "SimulatedAnnealingOptimizer"),
("direct_algorithm", "DirectAlgorithm"),
("lipschitz_optimization", "LipschitzOptimizer"),
("pattern_search", "PatternSearch"),
("random_restart_hill_climbing", "RandomRestartHillClimbingOptimizer"),
("random_search", "RandomSearchOptimizer"),
("powells_method", "PowellsMethod"),
("differential_evolution", "DifferentialEvolutionOptimizer"),
("evolution_strategy", "EvolutionStrategyOptimizer"),
("genetic_algorithm", "GeneticAlgorithmOptimizer"),
("parallel_tempering", "ParallelTemperingOptimizer"),
("particle_swarm_optimization", "ParticleSwarmOptimizer"),
("spiral_optimization", "SpiralOptimization"),
("bayesian_optimization", "BayesianOptimizer"),
("forest_optimizer", "ForestOptimizer"),
("tree_structured_parzen_estimators", "TreeStructuredParzenEstimators"),
]

BASE_DIR = Path("generated_opt_algos")


# Template for the Python class file
def create_class_file_content(class_name: str) -> str:
return f'''from hyperactive.opt._adapters._gfo import _BaseGFOadapter


class {class_name}(_BaseGFOadapter):

def _get_gfo_class(self):
"""Get the GFO class to use.

Returns
-------
class
The GFO class to use. One of the concrete GFO classes
"""
from gradient_free_optimizers import {class_name}

return {class_name}
'''


# Main generation loop
for name, class_name in algo_info:
algo_folder = BASE_DIR / name
algo_folder.mkdir(parents=True, exist_ok=True)

init_file = algo_folder / "__init__.py"
class_file = algo_folder / f"_{name}.py"

# Create __init__.py (empty)
init_file.touch(exist_ok=True)

# Write the optimizer class file
class_file.write_text(create_class_file_content(class_name))

print(f"Generated {len(algo_info)} folders in {BASE_DIR.resolve()}")
1 change: 1 addition & 0 deletions src/hyperactive/base/_optimizer.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Base class for optimizer."""

# copyright: hyperactive developers, MIT License (see LICENSE file)

from skbase.base import BaseObject
Expand Down
50 changes: 45 additions & 5 deletions src/hyperactive/opt/__init__.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,54 @@
"""Individual optimization algorithms."""

# copyright: hyperactive developers, MIT License (see LICENSE file)

from hyperactive.opt.gridsearch import GridSearchSk
from hyperactive.opt.hillclimbing import HillClimbing
from hyperactive.opt.hillclimbing_repulsing import HillClimbingRepulsing
from hyperactive.opt.hillclimbing_stochastic import HillClimbingStochastic
from .gfo import (
HillClimbing,
StochasticHillClimbing,
RepulsingHillClimbing,
SimulatedAnnealing,
DownhillSimplexOptimizer,
RandomSearch,
GridSearch,
RandomRestartHillClimbing,
PowellsMethod,
PatternSearch,
LipschitzOptimizer,
DirectAlgorithm,
ParallelTempering,
ParticleSwarmOptimizer,
SpiralOptimization,
GeneticAlgorithm,
EvolutionStrategy,
DifferentialEvolution,
BayesianOptimizer,
TreeStructuredParzenEstimators,
ForestOptimizer,
)


__all__ = [
"GridSearchSk",
"HillClimbing",
"HillClimbingRepulsing",
"HillClimbingStochastic",
"RepulsingHillClimbing",
"StochasticHillClimbing",
"SimulatedAnnealing",
"DownhillSimplexOptimizer",
"RandomSearch",
"GridSearch",
"RandomRestartHillClimbing",
"PowellsMethod",
"PatternSearch",
"LipschitzOptimizer",
"DirectAlgorithm",
"ParallelTempering",
"ParticleSwarmOptimizer",
"SpiralOptimization",
"GeneticAlgorithm",
"EvolutionStrategy",
"DifferentialEvolution",
"BayesianOptimizer",
"TreeStructuredParzenEstimators",
"ForestOptimizer",
]
77 changes: 69 additions & 8 deletions src/hyperactive/opt/_adapters/_gfo.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Adapter for gfo package."""

# copyright: hyperactive developers, MIT License (see LICENSE file)

from hyperactive.base import BaseOptimizer
Expand Down Expand Up @@ -40,9 +41,7 @@ def _get_gfo_class(self):
class
The GFO class to use. One of the concrete GFO classes
"""
raise NotImplementedError(
"This method should be implemented in a subclass."
)
raise NotImplementedError("This method should be implemented in a subclass.")

def get_search_config(self):
"""Get the search configuration.
Expand All @@ -55,8 +54,63 @@ def get_search_config(self):
search_config = super().get_search_config()
search_config["initialize"] = self._initialize
del search_config["verbose"]

search_config = self._handle_gfo_defaults(search_config)

search_config["search_space"] = self._to_dict_np(search_config["search_space"])

return search_config

def _handle_gfo_defaults(self, search_config):
"""Handle default values for GFO search configuration.

Temporary measure until GFO handles defaults gracefully.

Parameters
----------
search_config : dict with str keys
The search configuration dictionary to handle defaults for.

Returns
-------
search_config : dict with str keys
The search configuration dictionary with defaults handled.
"""
if "sampling" in search_config and search_config["sampling"] is None:
search_config["sampling"] = {"random": 1000000}

if "tree_para" in search_config and search_config["tree_para"] is None:
search_config["tree_para"] = {"n_estimators": 100}

return search_config

def _to_dict_np(self, search_space):
"""Coerce the search space to a format suitable for gfo optimizers.

gfo expects dicts of numpy arrays, not lists.
This method coerces lists or tuples in the search space to numpy arrays.

Parameters
----------
search_space : dict with str keys and iterable values
The search space to coerce.

Returns
-------
dict with str keys and 1D numpy arrays as values
The coerced search space.
"""
import numpy as np

def coerce_to_numpy(arr):
"""Coerce a list or tuple to a numpy array."""
if not isinstance(arr, np.ndarray):
return np.array(arr)
return arr

coerced_search_space = {k: coerce_to_numpy(v) for k, v in search_space.items()}
return coerced_search_space

def _run(self, experiment, **search_config):
"""Run the optimization search process.
Parameters
Expand All @@ -75,15 +129,15 @@ def _run(self, experiment, **search_config):
max_time = search_config.pop("max_time", None)

gfo_cls = self._get_gfo_class()
hcopt = gfo_cls(**search_config)
gfopt = gfo_cls(**search_config)

with StdoutMute(active=not self.verbose):
hcopt.search(
gfopt.search(
objective_function=experiment.score,
n_iter=n_iter,
max_time=max_time,
)
best_params = hcopt.best_para
best_params = gfopt.best_para
return best_params

@classmethod
Expand Down Expand Up @@ -143,5 +197,12 @@ def get_test_params(cls, parameter_set="default"):
},
"n_iter": 100,
}

return [params_sklearn, params_ackley]
params_ackley_list = {
"experiment": ackley_exp,
"search_space": {
"x0": list(np.linspace(-5, 5, 10)),
"x1": list(np.linspace(-5, 5, 10)),
},
"n_iter": 100,
}
return [params_sklearn, params_ackley, params_ackley_list]
50 changes: 50 additions & 0 deletions src/hyperactive/opt/gfo/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
"""Individual optimization algorithms."""

# copyright: hyperactive developers, MIT License (see LICENSE file)

from ._hillclimbing import HillClimbing
from ._stochastic_hillclimbing import StochasticHillClimbing
from ._repulsing_hillclimbing import RepulsingHillClimbing
from ._simulated_annealing import SimulatedAnnealing
from ._downhill_simplex import DownhillSimplexOptimizer
from ._random_search import RandomSearch
from ._grid_search import GridSearch
from ._random_restart_hill_climbing import RandomRestartHillClimbing
from ._powells_method import PowellsMethod
from ._pattern_search import PatternSearch
from ._lipschitz_optimization import LipschitzOptimizer
from ._direct_algorithm import DirectAlgorithm
from ._parallel_tempering import ParallelTempering
from ._particle_swarm_optimization import ParticleSwarmOptimizer
from ._spiral_optimization import SpiralOptimization
from ._genetic_algorithm import GeneticAlgorithm
from ._evolution_strategy import EvolutionStrategy
from ._differential_evolution import DifferentialEvolution
from ._bayesian_optimization import BayesianOptimizer
from ._tree_structured_parzen_estimators import TreeStructuredParzenEstimators
from ._forest_optimizer import ForestOptimizer


__all__ = [
"HillClimbing",
"RepulsingHillClimbing",
"StochasticHillClimbing",
"SimulatedAnnealing",
"DownhillSimplexOptimizer",
"RandomSearch",
"GridSearch",
"RandomRestartHillClimbing",
"PowellsMethod",
"PatternSearch",
"LipschitzOptimizer",
"DirectAlgorithm",
"ParallelTempering",
"ParticleSwarmOptimizer",
"SpiralOptimization",
"GeneticAlgorithm",
"EvolutionStrategy",
"DifferentialEvolution",
"BayesianOptimizer",
"TreeStructuredParzenEstimators",
"ForestOptimizer",
]
Loading