From 7187a41452b79d20d58c4910b5bcea0f4e500894 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Mon, 9 Jun 2025 10:32:33 +0200 Subject: [PATCH 01/49] move gfo-opt-algos into gfo directory and add skeleton files for gfo opt. --- src/hyperactive/opt/__init__.py | 45 ++++++++++++- src/hyperactive/opt/generator.py | 63 +++++++++++++++++++ src/hyperactive/opt/gfo/__init__.py | 50 +++++++++++++++ .../opt/gfo/_base_gfo_optimizer.py | 6 ++ .../opt/gfo/_bayesian_optimization.py | 16 +++++ .../opt/gfo/_differential_evolution.py | 16 +++++ src/hyperactive/opt/gfo/_direct_algorithm.py | 16 +++++ src/hyperactive/opt/gfo/_downhill_simplex.py | 16 +++++ .../opt/gfo/_evolution_strategy.py | 16 +++++ src/hyperactive/opt/gfo/_forest_optimizer.py | 16 +++++ src/hyperactive/opt/gfo/_genetic_algorithm.py | 16 +++++ src/hyperactive/opt/gfo/_grid_search.py | 16 +++++ .../{hillclimbing => gfo}/_hillclimbing.py | 0 .../_hillclimbing_repulsing.py | 0 .../_hillclimbing_stochastic.py | 0 .../opt/gfo/_lipschitz_optimization.py | 16 +++++ .../opt/gfo/_parallel_tempering.py | 16 +++++ .../opt/gfo/_particle_swarm_optimization.py | 16 +++++ src/hyperactive/opt/gfo/_pattern_search.py | 16 +++++ src/hyperactive/opt/gfo/_powells_method.py | 16 +++++ .../opt/gfo/_random_restart_hill_climbing.py | 16 +++++ src/hyperactive/opt/gfo/_random_search.py | 16 +++++ .../opt/gfo/_simulated_annealing.py | 16 +++++ .../opt/gfo/_spiral_optimization.py | 16 +++++ .../gfo/_tree_structured_parzen_estimators.py | 16 +++++ src/hyperactive/opt/hillclimbing/__init__.py | 6 -- .../opt/hillclimbing_repulsing/__init__.py | 8 --- .../opt/hillclimbing_stochastic/__init__.py | 8 --- 28 files changed, 449 insertions(+), 25 deletions(-) create mode 100644 src/hyperactive/opt/generator.py create mode 100644 src/hyperactive/opt/gfo/__init__.py create mode 100644 src/hyperactive/opt/gfo/_base_gfo_optimizer.py create mode 100644 src/hyperactive/opt/gfo/_bayesian_optimization.py create mode 100644 src/hyperactive/opt/gfo/_differential_evolution.py create mode 100644 src/hyperactive/opt/gfo/_direct_algorithm.py create mode 100644 src/hyperactive/opt/gfo/_downhill_simplex.py create mode 100644 src/hyperactive/opt/gfo/_evolution_strategy.py create mode 100644 src/hyperactive/opt/gfo/_forest_optimizer.py create mode 100644 src/hyperactive/opt/gfo/_genetic_algorithm.py create mode 100644 src/hyperactive/opt/gfo/_grid_search.py rename src/hyperactive/opt/{hillclimbing => gfo}/_hillclimbing.py (100%) rename src/hyperactive/opt/{hillclimbing_repulsing => gfo}/_hillclimbing_repulsing.py (100%) rename src/hyperactive/opt/{hillclimbing_stochastic => gfo}/_hillclimbing_stochastic.py (100%) create mode 100644 src/hyperactive/opt/gfo/_lipschitz_optimization.py create mode 100644 src/hyperactive/opt/gfo/_parallel_tempering.py create mode 100644 src/hyperactive/opt/gfo/_particle_swarm_optimization.py create mode 100644 src/hyperactive/opt/gfo/_pattern_search.py create mode 100644 src/hyperactive/opt/gfo/_powells_method.py create mode 100644 src/hyperactive/opt/gfo/_random_restart_hill_climbing.py create mode 100644 src/hyperactive/opt/gfo/_random_search.py create mode 100644 src/hyperactive/opt/gfo/_simulated_annealing.py create mode 100644 src/hyperactive/opt/gfo/_spiral_optimization.py create mode 100644 src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py delete mode 100644 src/hyperactive/opt/hillclimbing/__init__.py delete mode 100644 src/hyperactive/opt/hillclimbing_repulsing/__init__.py delete mode 100644 src/hyperactive/opt/hillclimbing_stochastic/__init__.py diff --git a/src/hyperactive/opt/__init__.py b/src/hyperactive/opt/__init__.py index 8014b642..9f00eb2c 100644 --- a/src/hyperactive/opt/__init__.py +++ b/src/hyperactive/opt/__init__.py @@ -1,14 +1,53 @@ """Individual optimization algorithms.""" + # copyright: hyperactive developers, MIT License (see LICENSE file) from hyperactive.opt.gridsearch import GridSearchSk -from hyperactive.opt.hillclimbing import HillClimbing -from hyperactive.opt.hillclimbing_repulsing import HillClimbingRepulsing -from hyperactive.opt.hillclimbing_stochastic import HillClimbingStochastic +from .gfo import ( + HillClimbing, + HillClimbingStochastic, + HillClimbingRepulsing, + SimulatedAnnealing, + DownhillSimplexOptimizer, + RandomSearch, + GridSearch, + RandomRestartHillClimbing, + PowellsMethod, + PatternSearch, + LipschitzOptimizer, + DirectAlgorithm, + ParallelTempering, + ParticleSwarmOptimizer, + SpiralOptimization, + GeneticAlgorithm, + EvolutionStrategy, + DifferentialEvolution, + BayesianOptimizer, + TreeStructuredParzenEstimators, + ForestOptimizer, +) __all__ = [ "GridSearchSk", "HillClimbing", "HillClimbingRepulsing", "HillClimbingStochastic", + "SimulatedAnnealing", + "DownhillSimplexOptimizer", + "RandomSearch", + "GridSearch", + "RandomRestartHillClimbing", + "PowellsMethod", + "PatternSearch", + "LipschitzOptimizer", + "DirectAlgorithm", + "ParallelTempering", + "ParticleSwarmOptimizer", + "SpiralOptimization", + "GeneticAlgorithm", + "EvolutionStrategy", + "DifferentialEvolution", + "BayesianOptimizer", + "TreeStructuredParzenEstimators", + "ForestOptimizer", ] diff --git a/src/hyperactive/opt/generator.py b/src/hyperactive/opt/generator.py new file mode 100644 index 00000000..dc8c329d --- /dev/null +++ b/src/hyperactive/opt/generator.py @@ -0,0 +1,63 @@ +import os +from pathlib import Path + +# List of algorithm names and corresponding class names +algo_info = [ + ("downhill_simplex", "DownhillSimplexOptimizer"), + ("simulated_annealing", "SimulatedAnnealingOptimizer"), + ("direct_algorithm", "DirectAlgorithm"), + ("lipschitz_optimization", "LipschitzOptimizer"), + ("pattern_search", "PatternSearch"), + ("random_restart_hill_climbing", "RandomRestartHillClimbingOptimizer"), + ("random_search", "RandomSearchOptimizer"), + ("powells_method", "PowellsMethod"), + ("differential_evolution", "DifferentialEvolutionOptimizer"), + ("evolution_strategy", "EvolutionStrategyOptimizer"), + ("genetic_algorithm", "GeneticAlgorithmOptimizer"), + ("parallel_tempering", "ParallelTemperingOptimizer"), + ("particle_swarm_optimization", "ParticleSwarmOptimizer"), + ("spiral_optimization", "SpiralOptimization"), + ("bayesian_optimization", "BayesianOptimizer"), + ("forest_optimizer", "ForestOptimizer"), + ("tree_structured_parzen_estimators", "TreeStructuredParzenEstimators"), +] + +BASE_DIR = Path("generated_opt_algos") + + +# Template for the Python class file +def create_class_file_content(class_name: str) -> str: + return f'''from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class {class_name}(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import {class_name} + + return {class_name} +''' + + +# Main generation loop +for name, class_name in algo_info: + algo_folder = BASE_DIR / name + algo_folder.mkdir(parents=True, exist_ok=True) + + init_file = algo_folder / "__init__.py" + class_file = algo_folder / f"_{name}.py" + + # Create __init__.py (empty) + init_file.touch(exist_ok=True) + + # Write the optimizer class file + class_file.write_text(create_class_file_content(class_name)) + +print(f"Generated {len(algo_info)} folders in {BASE_DIR.resolve()}") diff --git a/src/hyperactive/opt/gfo/__init__.py b/src/hyperactive/opt/gfo/__init__.py new file mode 100644 index 00000000..55beb213 --- /dev/null +++ b/src/hyperactive/opt/gfo/__init__.py @@ -0,0 +1,50 @@ +"""Individual optimization algorithms.""" + +# copyright: hyperactive developers, MIT License (see LICENSE file) + +from ._hillclimbing import HillClimbing +from ._hillclimbing_stochastic import HillClimbingStochastic +from ._hillclimbing_repulsing import HillClimbingRepulsing +from ._simulated_annealing import SimulatedAnnealing +from ._downhill_simplex import DownhillSimplexOptimizer +from ._random_search import RandomSearch +from ._grid_search import GridSearch +from ._random_restart_hill_climbing import RandomRestartHillClimbing +from ._powells_method import PowellsMethod +from ._pattern_search import PatternSearch +from ._lipschitz_optimization import LipschitzOptimizer +from ._direct_algorithm import DirectAlgorithm +from ._parallel_tempering import ParallelTempering +from ._particle_swarm_optimization import ParticleSwarmOptimizer +from ._spiral_optimization import SpiralOptimization +from ._genetic_algorithm import GeneticAlgorithm +from ._evolution_strategy import EvolutionStrategy +from ._differential_evolution import DifferentialEvolution +from ._bayesian_optimization import BayesianOptimizer +from ._tree_structured_parzen_estimators import TreeStructuredParzenEstimators +from ._forest_optimizer import ForestOptimizer + + +__all__ = [ + "HillClimbing", + "HillClimbingRepulsing", + "HillClimbingStochastic", + "SimulatedAnnealing", + "DownhillSimplexOptimizer", + "RandomSearch", + "GridSearch", + "RandomRestartHillClimbing", + "PowellsMethod", + "PatternSearch", + "LipschitzOptimizer", + "DirectAlgorithm", + "ParallelTempering", + "ParticleSwarmOptimizer", + "SpiralOptimization", + "GeneticAlgorithm", + "EvolutionStrategy", + "DifferentialEvolution", + "BayesianOptimizer", + "TreeStructuredParzenEstimators", + "ForestOptimizer", +] diff --git a/src/hyperactive/opt/gfo/_base_gfo_optimizer.py b/src/hyperactive/opt/gfo/_base_gfo_optimizer.py new file mode 100644 index 00000000..81ebf7e7 --- /dev/null +++ b/src/hyperactive/opt/gfo/_base_gfo_optimizer.py @@ -0,0 +1,6 @@ +from hyperactive.base import BaseOptimizer + + +class BaseGfoOptimizer(BaseOptimizer): + def __init__(self): + pass diff --git a/src/hyperactive/opt/gfo/_bayesian_optimization.py b/src/hyperactive/opt/gfo/_bayesian_optimization.py new file mode 100644 index 00000000..efabb189 --- /dev/null +++ b/src/hyperactive/opt/gfo/_bayesian_optimization.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class BayesianOptimizer(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import BayesianOptimizer + + return BayesianOptimizer diff --git a/src/hyperactive/opt/gfo/_differential_evolution.py b/src/hyperactive/opt/gfo/_differential_evolution.py new file mode 100644 index 00000000..99abcce9 --- /dev/null +++ b/src/hyperactive/opt/gfo/_differential_evolution.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class DifferentialEvolution(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import DifferentialEvolutionOptimizer + + return DifferentialEvolutionOptimizer diff --git a/src/hyperactive/opt/gfo/_direct_algorithm.py b/src/hyperactive/opt/gfo/_direct_algorithm.py new file mode 100644 index 00000000..df459833 --- /dev/null +++ b/src/hyperactive/opt/gfo/_direct_algorithm.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class DirectAlgorithm(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import DirectAlgorithm + + return DirectAlgorithm diff --git a/src/hyperactive/opt/gfo/_downhill_simplex.py b/src/hyperactive/opt/gfo/_downhill_simplex.py new file mode 100644 index 00000000..2b8b0c7b --- /dev/null +++ b/src/hyperactive/opt/gfo/_downhill_simplex.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class DownhillSimplexOptimizer(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import DownhillSimplexOptimizer + + return DownhillSimplexOptimizer diff --git a/src/hyperactive/opt/gfo/_evolution_strategy.py b/src/hyperactive/opt/gfo/_evolution_strategy.py new file mode 100644 index 00000000..844325fa --- /dev/null +++ b/src/hyperactive/opt/gfo/_evolution_strategy.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class EvolutionStrategy(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import EvolutionStrategyOptimizer + + return EvolutionStrategyOptimizer diff --git a/src/hyperactive/opt/gfo/_forest_optimizer.py b/src/hyperactive/opt/gfo/_forest_optimizer.py new file mode 100644 index 00000000..327533d3 --- /dev/null +++ b/src/hyperactive/opt/gfo/_forest_optimizer.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class ForestOptimizer(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import ForestOptimizer + + return ForestOptimizer diff --git a/src/hyperactive/opt/gfo/_genetic_algorithm.py b/src/hyperactive/opt/gfo/_genetic_algorithm.py new file mode 100644 index 00000000..979def8c --- /dev/null +++ b/src/hyperactive/opt/gfo/_genetic_algorithm.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class GeneticAlgorithm(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import GeneticAlgorithmOptimizer + + return GeneticAlgorithmOptimizer diff --git a/src/hyperactive/opt/gfo/_grid_search.py b/src/hyperactive/opt/gfo/_grid_search.py new file mode 100644 index 00000000..8e6d3e58 --- /dev/null +++ b/src/hyperactive/opt/gfo/_grid_search.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class GridSearch(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import GridSearchOptimizer + + return GridSearchOptimizer diff --git a/src/hyperactive/opt/hillclimbing/_hillclimbing.py b/src/hyperactive/opt/gfo/_hillclimbing.py similarity index 100% rename from src/hyperactive/opt/hillclimbing/_hillclimbing.py rename to src/hyperactive/opt/gfo/_hillclimbing.py diff --git a/src/hyperactive/opt/hillclimbing_repulsing/_hillclimbing_repulsing.py b/src/hyperactive/opt/gfo/_hillclimbing_repulsing.py similarity index 100% rename from src/hyperactive/opt/hillclimbing_repulsing/_hillclimbing_repulsing.py rename to src/hyperactive/opt/gfo/_hillclimbing_repulsing.py diff --git a/src/hyperactive/opt/hillclimbing_stochastic/_hillclimbing_stochastic.py b/src/hyperactive/opt/gfo/_hillclimbing_stochastic.py similarity index 100% rename from src/hyperactive/opt/hillclimbing_stochastic/_hillclimbing_stochastic.py rename to src/hyperactive/opt/gfo/_hillclimbing_stochastic.py diff --git a/src/hyperactive/opt/gfo/_lipschitz_optimization.py b/src/hyperactive/opt/gfo/_lipschitz_optimization.py new file mode 100644 index 00000000..ed400d9c --- /dev/null +++ b/src/hyperactive/opt/gfo/_lipschitz_optimization.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class LipschitzOptimizer(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import LipschitzOptimizer + + return LipschitzOptimizer diff --git a/src/hyperactive/opt/gfo/_parallel_tempering.py b/src/hyperactive/opt/gfo/_parallel_tempering.py new file mode 100644 index 00000000..f11f8782 --- /dev/null +++ b/src/hyperactive/opt/gfo/_parallel_tempering.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class ParallelTempering(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import ParallelTemperingOptimizer + + return ParallelTemperingOptimizer diff --git a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py new file mode 100644 index 00000000..122066d3 --- /dev/null +++ b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class ParticleSwarmOptimizer(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import ParticleSwarmOptimizer + + return ParticleSwarmOptimizer diff --git a/src/hyperactive/opt/gfo/_pattern_search.py b/src/hyperactive/opt/gfo/_pattern_search.py new file mode 100644 index 00000000..168d3f28 --- /dev/null +++ b/src/hyperactive/opt/gfo/_pattern_search.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class PatternSearch(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import PatternSearch + + return PatternSearch diff --git a/src/hyperactive/opt/gfo/_powells_method.py b/src/hyperactive/opt/gfo/_powells_method.py new file mode 100644 index 00000000..c63d2353 --- /dev/null +++ b/src/hyperactive/opt/gfo/_powells_method.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class PowellsMethod(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import PowellsMethod + + return PowellsMethod diff --git a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py new file mode 100644 index 00000000..50f64920 --- /dev/null +++ b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class RandomRestartHillClimbing(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import RandomRestartHillClimbingOptimizer + + return RandomRestartHillClimbingOptimizer diff --git a/src/hyperactive/opt/gfo/_random_search.py b/src/hyperactive/opt/gfo/_random_search.py new file mode 100644 index 00000000..f6d50a4a --- /dev/null +++ b/src/hyperactive/opt/gfo/_random_search.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class RandomSearch(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import RandomSearchOptimizer + + return RandomSearchOptimizer diff --git a/src/hyperactive/opt/gfo/_simulated_annealing.py b/src/hyperactive/opt/gfo/_simulated_annealing.py new file mode 100644 index 00000000..f03b6b05 --- /dev/null +++ b/src/hyperactive/opt/gfo/_simulated_annealing.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class SimulatedAnnealing(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import SimulatedAnnealingOptimizer + + return SimulatedAnnealingOptimizer diff --git a/src/hyperactive/opt/gfo/_spiral_optimization.py b/src/hyperactive/opt/gfo/_spiral_optimization.py new file mode 100644 index 00000000..a5ec17d4 --- /dev/null +++ b/src/hyperactive/opt/gfo/_spiral_optimization.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class SpiralOptimization(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import SpiralOptimization + + return SpiralOptimization diff --git a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py new file mode 100644 index 00000000..82b2edc5 --- /dev/null +++ b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py @@ -0,0 +1,16 @@ +from hyperactive.opt._adapters._gfo import _BaseGFOadapter + + +class TreeStructuredParzenEstimators(_BaseGFOadapter): + + def _get_gfo_class(self): + """Get the GFO class to use. + + Returns + ------- + class + The GFO class to use. One of the concrete GFO classes + """ + from gradient_free_optimizers import TreeStructuredParzenEstimators + + return TreeStructuredParzenEstimators diff --git a/src/hyperactive/opt/hillclimbing/__init__.py b/src/hyperactive/opt/hillclimbing/__init__.py deleted file mode 100644 index 4acd2fbf..00000000 --- a/src/hyperactive/opt/hillclimbing/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Hill climbing optimizer.""" -# copyright: hyperactive developers, MIT License (see LICENSE file) - -from hyperactive.opt.hillclimbing._hillclimbing import HillClimbing - -__all__ = ["HillClimbing"] diff --git a/src/hyperactive/opt/hillclimbing_repulsing/__init__.py b/src/hyperactive/opt/hillclimbing_repulsing/__init__.py deleted file mode 100644 index f6bd0b16..00000000 --- a/src/hyperactive/opt/hillclimbing_repulsing/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Hill climbing optimizer.""" -# copyright: hyperactive developers, MIT License (see LICENSE file) - -from hyperactive.opt.hillclimbing_repulsing._hillclimbing_repulsing import ( - HillClimbingRepulsing, -) - -__all__ = ["HillClimbingRepulsing"] diff --git a/src/hyperactive/opt/hillclimbing_stochastic/__init__.py b/src/hyperactive/opt/hillclimbing_stochastic/__init__.py deleted file mode 100644 index f7d1e78b..00000000 --- a/src/hyperactive/opt/hillclimbing_stochastic/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Hill climbing optimizer.""" -# copyright: hyperactive developers, MIT License (see LICENSE file) - -from hyperactive.opt.hillclimbing_stochastic._hillclimbing_stochastic import ( - HillClimbingStochastic, -) - -__all__ = ["HillClimbingStochastic"] From a9cecdf1ebfdfbe754919f8c2cc124121af2ac00 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 09:47:03 +0200 Subject: [PATCH 02/49] rename opt. algos. --- src/hyperactive/opt/__init__.py | 9 +++++---- src/hyperactive/opt/gfo/__init__.py | 8 ++++---- ...lclimbing_repulsing.py => _repulsing_hillclimbing.py} | 5 +++-- ...limbing_stochastic.py => _stochastic_hillclimbing.py} | 5 +++-- 4 files changed, 15 insertions(+), 12 deletions(-) rename src/hyperactive/opt/gfo/{_hillclimbing_repulsing.py => _repulsing_hillclimbing.py} (98%) rename src/hyperactive/opt/gfo/{_hillclimbing_stochastic.py => _stochastic_hillclimbing.py} (98%) diff --git a/src/hyperactive/opt/__init__.py b/src/hyperactive/opt/__init__.py index 9f00eb2c..45ba40a4 100644 --- a/src/hyperactive/opt/__init__.py +++ b/src/hyperactive/opt/__init__.py @@ -5,8 +5,8 @@ from hyperactive.opt.gridsearch import GridSearchSk from .gfo import ( HillClimbing, - HillClimbingStochastic, - HillClimbingRepulsing, + StochasticHillClimbing, + RepulsingHillClimbing, SimulatedAnnealing, DownhillSimplexOptimizer, RandomSearch, @@ -27,11 +27,12 @@ ForestOptimizer, ) + __all__ = [ "GridSearchSk", "HillClimbing", - "HillClimbingRepulsing", - "HillClimbingStochastic", + "RepulsingHillClimbing", + "StochasticHillClimbing", "SimulatedAnnealing", "DownhillSimplexOptimizer", "RandomSearch", diff --git a/src/hyperactive/opt/gfo/__init__.py b/src/hyperactive/opt/gfo/__init__.py index 55beb213..f1c430b7 100644 --- a/src/hyperactive/opt/gfo/__init__.py +++ b/src/hyperactive/opt/gfo/__init__.py @@ -3,8 +3,8 @@ # copyright: hyperactive developers, MIT License (see LICENSE file) from ._hillclimbing import HillClimbing -from ._hillclimbing_stochastic import HillClimbingStochastic -from ._hillclimbing_repulsing import HillClimbingRepulsing +from ._stochastic_hillclimbing import StochasticHillClimbing +from ._repulsing_hillclimbing import RepulsingHillClimbing from ._simulated_annealing import SimulatedAnnealing from ._downhill_simplex import DownhillSimplexOptimizer from ._random_search import RandomSearch @@ -27,8 +27,8 @@ __all__ = [ "HillClimbing", - "HillClimbingRepulsing", - "HillClimbingStochastic", + "RepulsingHillClimbing", + "StochasticHillClimbing", "SimulatedAnnealing", "DownhillSimplexOptimizer", "RandomSearch", diff --git a/src/hyperactive/opt/gfo/_hillclimbing_repulsing.py b/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py similarity index 98% rename from src/hyperactive/opt/gfo/_hillclimbing_repulsing.py rename to src/hyperactive/opt/gfo/_repulsing_hillclimbing.py index 7b50c62a..88a23e99 100644 --- a/src/hyperactive/opt/gfo/_hillclimbing_repulsing.py +++ b/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py @@ -1,10 +1,11 @@ """Hill climbing optimizer from gfo.""" + # copyright: hyperactive developers, MIT License (see LICENSE file) from hyperactive.opt._adapters._gfo import _BaseGFOadapter -class HillClimbingRepulsing(_BaseGFOadapter): +class RepulsingHillClimbing(_BaseGFOadapter): """Repulsing hill climbing optimizer. Parameters @@ -62,7 +63,7 @@ class HillClimbingRepulsing(_BaseGFOadapter): 2. setting up the hill climbing optimizer: >>> from hyperactive.opt import HillClimbingRepulsing >>> import numpy as np - >>> + >>> >>> hc_config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), diff --git a/src/hyperactive/opt/gfo/_hillclimbing_stochastic.py b/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py similarity index 98% rename from src/hyperactive/opt/gfo/_hillclimbing_stochastic.py rename to src/hyperactive/opt/gfo/_stochastic_hillclimbing.py index c44ad9bd..eb31c0fc 100644 --- a/src/hyperactive/opt/gfo/_hillclimbing_stochastic.py +++ b/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py @@ -1,10 +1,11 @@ """Hill climbing optimizer from gfo.""" + # copyright: hyperactive developers, MIT License (see LICENSE file) from hyperactive.opt._adapters._gfo import _BaseGFOadapter -class HillClimbingStochastic(_BaseGFOadapter): +class StochasticHillClimbing(_BaseGFOadapter): """Stochastic hill climbing optimizer. Parameters @@ -62,7 +63,7 @@ class HillClimbingStochastic(_BaseGFOadapter): 2. setting up the hill climbing optimizer: >>> from hyperactive.opt import HillClimbingStochastic >>> import numpy as np - >>> + >>> >>> hc_config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), From 62e40aa80913ba8219e6b2020256ea07c8bec3c8 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 09:47:35 +0200 Subject: [PATCH 03/49] add test to gfo adapter --- src/hyperactive/opt/_adapters/_gfo.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/src/hyperactive/opt/_adapters/_gfo.py b/src/hyperactive/opt/_adapters/_gfo.py index aca57da3..228b43c7 100644 --- a/src/hyperactive/opt/_adapters/_gfo.py +++ b/src/hyperactive/opt/_adapters/_gfo.py @@ -1,4 +1,5 @@ """Adapter for gfo package.""" + # copyright: hyperactive developers, MIT License (see LICENSE file) from hyperactive.base import BaseOptimizer @@ -40,9 +41,7 @@ def _get_gfo_class(self): class The GFO class to use. One of the concrete GFO classes """ - raise NotImplementedError( - "This method should be implemented in a subclass." - ) + raise NotImplementedError("This method should be implemented in a subclass.") def get_search_config(self): """Get the search configuration. @@ -143,5 +142,12 @@ def get_test_params(cls, parameter_set="default"): }, "n_iter": 100, } - - return [params_sklearn, params_ackley] + params_ackley_list = { + "experiment": ackley_exp, + "search_space": { + "x0": list(np.linspace(-5, 5, 10)), + "x1": list(np.linspace(-5, 5, 10)), + }, + "n_iter": 100, + } + return [params_sklearn, params_ackley, params_ackley_list] From 5ae1a3520c63adc88e219f8af16383a3a70eff7f Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:04:54 +0200 Subject: [PATCH 04/49] add simulated annealing --- .../opt/gfo/_simulated_annealing.py | 70 +++++++++++++++++++ 1 file changed, 70 insertions(+) diff --git a/src/hyperactive/opt/gfo/_simulated_annealing.py b/src/hyperactive/opt/gfo/_simulated_annealing.py index f03b6b05..05f3fda0 100644 --- a/src/hyperactive/opt/gfo/_simulated_annealing.py +++ b/src/hyperactive/opt/gfo/_simulated_annealing.py @@ -2,6 +2,76 @@ class SimulatedAnnealing(_BaseGFOadapter): + """Simulated annealing optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the the search process. + epsilon : float + The step-size for the climbing. + distribution : str + The type of distribution to sample from. + n_neighbours : int + The number of neighbours to sample and evaluate before moving to the best + of those neighbours. + annealing_rate : float + The rate at which the temperature is annealed. + start_temp : float + The initial temperature. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + epsilon=0.01, + distribution="normal", + n_neighbours=10, + annealing_rate=0.97, + start_temp=1, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.epsilon = epsilon + self.distribution = distribution + self.n_neighbours = n_neighbours + self.annealing_rate = annealing_rate + self.start_temp = start_temp + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. From a0cf60a1db36fa969b116ab78352c616f38e70b8 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:05:07 +0200 Subject: [PATCH 05/49] remove unused file --- src/hyperactive/opt/gfo/_base_gfo_optimizer.py | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 src/hyperactive/opt/gfo/_base_gfo_optimizer.py diff --git a/src/hyperactive/opt/gfo/_base_gfo_optimizer.py b/src/hyperactive/opt/gfo/_base_gfo_optimizer.py deleted file mode 100644 index 81ebf7e7..00000000 --- a/src/hyperactive/opt/gfo/_base_gfo_optimizer.py +++ /dev/null @@ -1,6 +0,0 @@ -from hyperactive.base import BaseOptimizer - - -class BaseGfoOptimizer(BaseOptimizer): - def __init__(self): - pass From 4c9048da1874afa76d638abc3d1fe854f4f997ba Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:22:14 +0200 Subject: [PATCH 06/49] add bayesian opt. --- .../opt/gfo/_bayesian_optimization.py | 97 +++++++++++++++++++ 1 file changed, 97 insertions(+) diff --git a/src/hyperactive/opt/gfo/_bayesian_optimization.py b/src/hyperactive/opt/gfo/_bayesian_optimization.py index efabb189..edb38741 100644 --- a/src/hyperactive/opt/gfo/_bayesian_optimization.py +++ b/src/hyperactive/opt/gfo/_bayesian_optimization.py @@ -2,6 +2,78 @@ class BayesianOptimizer(_BaseGFOadapter): + """Bayesian optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the search process. + warm_start_smbo + The warm start for SMBO. + max_sample_size : int + The maximum number of points to sample. + sampling : dict + The sampling method to use. + replacement : bool + Whether to sample with replacement. + gpr : dict + The Gaussian Process Regressor to use. + xi : float + The exploration-exploitation trade-off parameter. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + warm_start_smbo=None, + max_sample_size=10000000, + sampling={"random": 1000000}, + replacement=True, + xi=0.03, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + + self.warm_start_smbo = warm_start_smbo + self.max_sample_size = max_sample_size + self.sampling = sampling + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.replacement = replacement + self.xi = xi + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +86,28 @@ def _get_gfo_class(self): from gradient_free_optimizers import BayesianOptimizer return BayesianOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "xi": 0.33, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From 66e984c6f2cf03b91d92d834c26b2dfe602a8e42 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:23:20 +0200 Subject: [PATCH 07/49] search for test-files in src/hyperactive dir --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 42159410..6060d35e 100644 --- a/Makefile +++ b/Makefile @@ -36,7 +36,7 @@ test-search_space: done test-pytest: - python -m pytest --durations=10 -x -p no:warnings tests/; \ + python -m pytest --durations=10 -x -p no:warnings tests/ src/hyperactive/; \ test-timings: cd tests/_local_test_timings; \ From 172f5161e3c3465496d45bc1028101eda871aa59 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:27:11 +0200 Subject: [PATCH 08/49] add differential evolution --- .../opt/gfo/_differential_evolution.py | 88 +++++++++++++++++++ 1 file changed, 88 insertions(+) diff --git a/src/hyperactive/opt/gfo/_differential_evolution.py b/src/hyperactive/opt/gfo/_differential_evolution.py index 99abcce9..65509847 100644 --- a/src/hyperactive/opt/gfo/_differential_evolution.py +++ b/src/hyperactive/opt/gfo/_differential_evolution.py @@ -2,6 +2,67 @@ class DifferentialEvolution(_BaseGFOadapter): + """Differential evolution optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the the search process. + population : int + The number of individuals in the population. + mutation_rate : float + The mutation rate. + crossover_rate : float + The crossover rate. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + population=10, + mutation_rate=0.9, + crossover_rate=0.9, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.population = population + self.mutation_rate = mutation_rate + self.crossover_rate = crossover_rate + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +75,30 @@ def _get_gfo_class(self): from gradient_free_optimizers import DifferentialEvolutionOptimizer return DifferentialEvolutionOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "population": 8, + "mutation_rate": 0.8, + "crossover_rate": 2, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From 85857dc9103a1fee9260e9dac13f64972f4ccff6 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:27:31 +0200 Subject: [PATCH 09/49] reformat --- src/hyperactive/tests/test_all_objects.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/hyperactive/tests/test_all_objects.py b/src/hyperactive/tests/test_all_objects.py index 84257db0..6653b388 100644 --- a/src/hyperactive/tests/test_all_objects.py +++ b/src/hyperactive/tests/test_all_objects.py @@ -165,9 +165,9 @@ def test_paramnames(self, object_class): for inst, obj_param in zip(inst_params, obj_params): obj_inst = object_class(**inst) paramnames = obj_inst.paramnames() - assert set(obj_param.keys()) <= set(paramnames), ( - f"Parameter names do not match: {paramnames} != {obj_param}" - ) + assert set(obj_param.keys()) <= set( + paramnames + ), f"Parameter names do not match: {paramnames} != {obj_param}" def test_score_function(self, object_class): """Test that substituting into score works as intended.""" @@ -211,9 +211,7 @@ def test_opt_run(self, object_instance): """Test that run returns the expected result.""" paramnames = object_instance.get_params().keys() if "experiment" not in paramnames: - raise ValueError( - "Optimizer must have an 'experiment' parameter." - ) + raise ValueError("Optimizer must have an 'experiment' parameter.") # check that experiment occurs last in __init__ signature if not object_instance.__init__.__code__.co_varnames[-1] == "experiment": raise ValueError( @@ -255,6 +253,7 @@ def test_gfo_integration(self, object_instance): Runs the optimizer on the sklearn tuning experiment. """ from hyperactive.opt._adapters._gfo import _BaseGFOadapter + if not isinstance(object_instance, _BaseGFOadapter): return None @@ -282,8 +281,8 @@ def test_gfo_integration(self, object_instance): _config = { "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), }, "n_iter": 100, "experiment": sklearn_exp, From 4f8bf4939a59e65f1984cf801e295c3b2a50c22c Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:30:34 +0200 Subject: [PATCH 10/49] add direct algo. --- src/hyperactive/opt/gfo/_direct_algorithm.py | 91 ++++++++++++++++++++ 1 file changed, 91 insertions(+) diff --git a/src/hyperactive/opt/gfo/_direct_algorithm.py b/src/hyperactive/opt/gfo/_direct_algorithm.py index df459833..485512f8 100644 --- a/src/hyperactive/opt/gfo/_direct_algorithm.py +++ b/src/hyperactive/opt/gfo/_direct_algorithm.py @@ -2,6 +2,71 @@ class DirectAlgorithm(_BaseGFOadapter): + """Direct optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the the search process. + warm_start_smbo + The warm start for SMBO. + max_sample_size : int + The maximum number of points to sample. + sampling : dict + The sampling method to use. + replacement : bool + Whether to sample with replacement. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + warm_start_smbo=None, + max_sample_size: int = 10000000, + sampling={"random": 1000000}, + replacement=True, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.warm_start_smbo = warm_start_smbo + self.max_sample_size = max_sample_size + self.sampling = sampling + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.replacement = replacement + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +79,29 @@ def _get_gfo_class(self): from gradient_free_optimizers import DirectAlgorithm return DirectAlgorithm + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "replacement": True, + "max_sample_size": 1000, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From 2d682341be8031da4d64c9cbd382a89b2ea7e283 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:33:01 +0200 Subject: [PATCH 11/49] add downhill simplex algo. --- src/hyperactive/opt/gfo/_downhill_simplex.py | 93 ++++++++++++++++++++ 1 file changed, 93 insertions(+) diff --git a/src/hyperactive/opt/gfo/_downhill_simplex.py b/src/hyperactive/opt/gfo/_downhill_simplex.py index 2b8b0c7b..222c170d 100644 --- a/src/hyperactive/opt/gfo/_downhill_simplex.py +++ b/src/hyperactive/opt/gfo/_downhill_simplex.py @@ -2,6 +2,71 @@ class DownhillSimplexOptimizer(_BaseGFOadapter): + """Downhill simplex optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the the search process. + alpha : float + The reflection parameter of the simplex algorithm. + gamma : float + The expansion parameter of the simplex algorithm. + beta : float + The contraction parameter of the simplex algorithm. + sigma : float + The shrinking parameter of the simplex algorithm. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + alpha=1, + gamma=2, + beta=0.5, + sigma=0.5, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.alpha = alpha + self.gamma = gamma + self.beta = beta + self.sigma = sigma + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +79,31 @@ def _get_gfo_class(self): from gradient_free_optimizers import DownhillSimplexOptimizer return DownhillSimplexOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "alpha": 0.33, + "beta": 0.33, + "gamma": 0.33, + "sigma": 0.33, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From 0ac7c8e0d1cc9b665167cc35d3f414cef774f471 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:35:46 +0200 Subject: [PATCH 12/49] add evolution strategy optimizer --- .../opt/gfo/_evolution_strategy.py | 100 ++++++++++++++++++ 1 file changed, 100 insertions(+) diff --git a/src/hyperactive/opt/gfo/_evolution_strategy.py b/src/hyperactive/opt/gfo/_evolution_strategy.py index 844325fa..d8e68b67 100644 --- a/src/hyperactive/opt/gfo/_evolution_strategy.py +++ b/src/hyperactive/opt/gfo/_evolution_strategy.py @@ -2,6 +2,77 @@ class EvolutionStrategy(_BaseGFOadapter): + """Evolution strategy optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the the search process. + population : int + The number of individuals in the population. + offspring : int + The number of offspring to generate in each generation. + replace_parents : bool + If True, the parents are replaced with the offspring in the next + generation. If False, the parents are kept in the next generation and the + offspring are added to the population. + mutation_rate : float + The mutation rate for the mutation operator. + crossover_rate : float + The crossover rate for the crossover operator. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + population=10, + offspring=20, + replace_parents=False, + mutation_rate=0.7, + crossover_rate=0.3, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.population = population + self.offspring = offspring + self.replace_parents = replace_parents + self.mutation_rate = mutation_rate + self.crossover_rate = crossover_rate + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +85,32 @@ def _get_gfo_class(self): from gradient_free_optimizers import EvolutionStrategyOptimizer return EvolutionStrategyOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "population": 15, + "offspring": 10, + "replace_parents": True, + "mutation_rate": 1, + "crossover_rate": 2, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From 2da33f9dd3b6d89426daae5e94b699381f065667 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:39:54 +0200 Subject: [PATCH 13/49] add forest optimizer --- src/hyperactive/opt/gfo/_forest_optimizer.py | 104 +++++++++++++++++++ 1 file changed, 104 insertions(+) diff --git a/src/hyperactive/opt/gfo/_forest_optimizer.py b/src/hyperactive/opt/gfo/_forest_optimizer.py index 327533d3..08948df2 100644 --- a/src/hyperactive/opt/gfo/_forest_optimizer.py +++ b/src/hyperactive/opt/gfo/_forest_optimizer.py @@ -2,6 +2,83 @@ class ForestOptimizer(_BaseGFOadapter): + """Forest optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the the search process. + warm_start_smbo + The warm start for SMBO. + max_sample_size : int + The maximum number of points to sample. + sampling : dict + The sampling method to use. + replacement : bool + Whether to sample with replacement. + tree_regressor : str + The tree regressor model to use. + tree_para : dict + The model specific parameters for the tree regressor. + xi : float + The xi parameter for the tree regressor. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + warm_start_smbo=None, + max_sample_size=10000000, + sampling={"random": 1000000}, + replacement=True, + tree_regressor="extra_tree", + tree_para={"n_estimators": 100}, + xi=0.03, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.warm_start_smbo = warm_start_smbo + self.max_sample_size = max_sample_size + self.sampling = sampling + self.replacement = replacement + self.tree_regressor = tree_regressor + self.tree_para = tree_para + self.xi = xi + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +91,30 @@ def _get_gfo_class(self): from gradient_free_optimizers import ForestOptimizer return ForestOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "replacement": False, + "tree_para": {"n_estimators": 50}, + "xi": 0.33, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From 28f86aa0b918c92f50e249eb9128d6183be2dbf5 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:42:29 +0200 Subject: [PATCH 14/49] add genetic algorithm optimizer --- src/hyperactive/opt/gfo/_genetic_algorithm.py | 103 ++++++++++++++++++ 1 file changed, 103 insertions(+) diff --git a/src/hyperactive/opt/gfo/_genetic_algorithm.py b/src/hyperactive/opt/gfo/_genetic_algorithm.py index 979def8c..93a2bd21 100644 --- a/src/hyperactive/opt/gfo/_genetic_algorithm.py +++ b/src/hyperactive/opt/gfo/_genetic_algorithm.py @@ -2,6 +2,80 @@ class GeneticAlgorithm(_BaseGFOadapter): + """Genetic algorithm optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the search process. + population : int + The number of individuals in the population. + offspring : int + The number of offspring to generate in each generation. + crossover : str + The crossover operator to use. + n_parents : int + The number of parents to select for crossover. + mutation_rate : float + The mutation rate. + crossover_rate : float + The crossover rate. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + population=10, + offspring=10, + crossover="discrete-recombination", + n_parents=2, + mutation_rate=0.5, + crossover_rate=0.5, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.population = population + self.offspring = offspring + self.crossover = crossover + self.n_parents = n_parents + self.mutation_rate = mutation_rate + self.crossover_rate = crossover_rate + + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +88,32 @@ def _get_gfo_class(self): from gradient_free_optimizers import GeneticAlgorithmOptimizer return GeneticAlgorithmOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "population": 15, + "offspring": 10, + "n_parents": 3, + "mutation_rate": 0.01, + "crossover_rate": 0.02, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From 7d758f0ad62a1d780321acff6494bc0a9b704567 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:44:31 +0200 Subject: [PATCH 15/49] add grid search opt. --- src/hyperactive/opt/gfo/_grid_search.py | 83 +++++++++++++++++++++++++ 1 file changed, 83 insertions(+) diff --git a/src/hyperactive/opt/gfo/_grid_search.py b/src/hyperactive/opt/gfo/_grid_search.py index 8e6d3e58..094049f6 100644 --- a/src/hyperactive/opt/gfo/_grid_search.py +++ b/src/hyperactive/opt/gfo/_grid_search.py @@ -2,6 +2,63 @@ class GridSearch(_BaseGFOadapter): + """Grid search optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the the search process. + step_size : int + The step-size for the grid search. + direction : "diagonal" or "orthogonal" + The direction of the grid search. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + step_size=1, + direction="diagonal", + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.step_size = step_size + self.direction = direction + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +71,29 @@ def _get_gfo_class(self): from gradient_free_optimizers import GridSearchOptimizer return GridSearchOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "step_size": 3, + "direction": "orthogonal", + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From fdd4dbf1e0fde8659b9231b32f5a8f62eb5c0e28 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:46:42 +0200 Subject: [PATCH 16/49] add lipschitz optimizer --- .../opt/gfo/_lipschitz_optimization.py | 91 +++++++++++++++++++ 1 file changed, 91 insertions(+) diff --git a/src/hyperactive/opt/gfo/_lipschitz_optimization.py b/src/hyperactive/opt/gfo/_lipschitz_optimization.py index ed400d9c..4e02011f 100644 --- a/src/hyperactive/opt/gfo/_lipschitz_optimization.py +++ b/src/hyperactive/opt/gfo/_lipschitz_optimization.py @@ -2,6 +2,71 @@ class LipschitzOptimizer(_BaseGFOadapter): + """Lipschitz optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the the search process. + warm_start_smbo + The warm start for SMBO. + max_sample_size : int + The maximum number of points to sample. + sampling : dict + The sampling method to use. + replacement : bool + Whether to sample with replacement. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + warm_start_smbo=None, + max_sample_size=10000000, + sampling={"random": 1000000}, + replacement=True, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.warm_start_smbo = warm_start_smbo + self.max_sample_size = max_sample_size + self.sampling = sampling + self.replacement = replacement + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +79,29 @@ def _get_gfo_class(self): from gradient_free_optimizers import LipschitzOptimizer return LipschitzOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "max_sample_size": 1000, + "replacement": False, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From ff23a3e33502b2a09a110518678ab41a527755de Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:50:57 +0200 Subject: [PATCH 17/49] add parallel tempering optimizer --- .../opt/gfo/_parallel_tempering.py | 86 +++++++++++++++++++ 1 file changed, 86 insertions(+) diff --git a/src/hyperactive/opt/gfo/_parallel_tempering.py b/src/hyperactive/opt/gfo/_parallel_tempering.py index f11f8782..8c67baa2 100644 --- a/src/hyperactive/opt/gfo/_parallel_tempering.py +++ b/src/hyperactive/opt/gfo/_parallel_tempering.py @@ -2,6 +2,66 @@ class ParallelTempering(_BaseGFOadapter): + """Parallel tempering optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the the search process. + epsilon : float + The step-size for the climbing. + distribution : str + The type of distribution to sample from. + n_neighbours : int + The number of neighbours to sample and evaluate before moving to the best + of those neighbours. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + population: int = 5, + n_iter_swap: int = 5, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.population = population + self.n_iter_swap = n_iter_swap + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +74,29 @@ def _get_gfo_class(self): from gradient_free_optimizers import ParallelTemperingOptimizer return ParallelTemperingOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "population": 10, + "n_iter_swap": 3, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From d41fb3bf104fb299ebb90626205ae695b2b94cd8 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:53:25 +0200 Subject: [PATCH 18/49] add particle swarm optimization --- .../opt/gfo/_particle_swarm_optimization.py | 98 +++++++++++++++++++ 1 file changed, 98 insertions(+) diff --git a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py index 122066d3..9f9404c0 100644 --- a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py +++ b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py @@ -2,6 +2,75 @@ class ParticleSwarmOptimizer(_BaseGFOadapter): + """Particle swarm optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the the search process. + population : int + The number of particles in the swarm. + inertia : float + The inertia of the swarm. + cognitive_weight : float + A factor of the movement towards the personal best position of the individual optimizers in the population. + social_weight : float + A factor of the movement towards the personal best position of the individual optimizers in the population. + temp_weight : float + The temperature weight of the swarm. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + population=10, + inertia=0.5, + cognitive_weight=0.5, + social_weight=0.5, + temp_weight=0.2, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.population = population + self.inertia = inertia + self.cognitive_weight = cognitive_weight + self.social_weight = social_weight + self.temp_weight = temp_weight + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +83,32 @@ def _get_gfo_class(self): from gradient_free_optimizers import ParticleSwarmOptimizer return ParticleSwarmOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "population": 15, + "inertia": 0.9, + "cognitive_weight": 0.9, + "social_weight": 0.9, + "temp_weight": 0.9, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From 584cb27fca799f047f4549c03918867a110bde5c Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:55:31 +0200 Subject: [PATCH 19/49] add pattern search --- src/hyperactive/opt/gfo/_pattern_search.py | 88 ++++++++++++++++++++++ 1 file changed, 88 insertions(+) diff --git a/src/hyperactive/opt/gfo/_pattern_search.py b/src/hyperactive/opt/gfo/_pattern_search.py index 168d3f28..20da359c 100644 --- a/src/hyperactive/opt/gfo/_pattern_search.py +++ b/src/hyperactive/opt/gfo/_pattern_search.py @@ -2,6 +2,67 @@ class PatternSearch(_BaseGFOadapter): + """Pattern search optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the the search process. + n_positions : int + Number of positions that the pattern consists of. + pattern_size : float + The initial size of the patterns in percentage of the size of the search space in the corresponding dimension. + reduction : float + The factor that reduces the size of the pattern if no better position is found. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + n_positions=4, + pattern_size=0.25, + reduction=0.9, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.n_positions = n_positions + self.pattern_size = pattern_size + self.reduction = reduction + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +75,30 @@ def _get_gfo_class(self): from gradient_free_optimizers import PatternSearch return PatternSearch + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "n_positions": 3, + "pattern_size": 0.5, + "reduction": 0.999, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From 73261725a602a270891f05f5edab85bd71a1bd05 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 10:57:38 +0200 Subject: [PATCH 20/49] add powell's method --- src/hyperactive/opt/gfo/_powells_method.py | 83 ++++++++++++++++++++++ 1 file changed, 83 insertions(+) diff --git a/src/hyperactive/opt/gfo/_powells_method.py b/src/hyperactive/opt/gfo/_powells_method.py index c63d2353..2dcc8068 100644 --- a/src/hyperactive/opt/gfo/_powells_method.py +++ b/src/hyperactive/opt/gfo/_powells_method.py @@ -2,6 +2,64 @@ class PowellsMethod(_BaseGFOadapter): + """Powell's method optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the search process. + epsilon : float + The step-size for the climbing. + distribution : str + The type of distribution to sample from. + n_neighbours : int + The number of neighbours to sample and evaluate before moving to the best + of those neighbours. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + iters_p_dim=10, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.iters_p_dim = iters_p_dim + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +72,28 @@ def _get_gfo_class(self): from gradient_free_optimizers import PowellsMethod return PowellsMethod + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "iters_p_dim": 3, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From 5a8cac7380ffa2dbbbaac4da431d710153e94419 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 11:01:00 +0200 Subject: [PATCH 21/49] add random search --- src/hyperactive/opt/gfo/_random_search.py | 70 +++++++++++++++++++++++ 1 file changed, 70 insertions(+) diff --git a/src/hyperactive/opt/gfo/_random_search.py b/src/hyperactive/opt/gfo/_random_search.py index f6d50a4a..d4926923 100644 --- a/src/hyperactive/opt/gfo/_random_search.py +++ b/src/hyperactive/opt/gfo/_random_search.py @@ -2,6 +2,52 @@ class RandomSearch(_BaseGFOadapter): + """Random search optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + Optional, can be passed later via ``set_params``. + initialize : dict[str, int], default={"grid": 4, "random": 2, "vertices": 4} + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable], default=[] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int, default=None + If None, create a new random state. If int, create a new random state + seeded with the value. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +60,27 @@ def _get_gfo_class(self): from gradient_free_optimizers import RandomSearchOptimizer return RandomSearchOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From 44aea0ef836a7a15a8c8b6a7e53c2a00746be3d3 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 11:01:15 +0200 Subject: [PATCH 22/49] fix sim. ann. test para --- .../opt/gfo/_simulated_annealing.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/src/hyperactive/opt/gfo/_simulated_annealing.py b/src/hyperactive/opt/gfo/_simulated_annealing.py index 05f3fda0..e46133cd 100644 --- a/src/hyperactive/opt/gfo/_simulated_annealing.py +++ b/src/hyperactive/opt/gfo/_simulated_annealing.py @@ -84,3 +84,29 @@ def _get_gfo_class(self): from gradient_free_optimizers import SimulatedAnnealingOptimizer return SimulatedAnnealingOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "start_temp": 0.33, + "annealing_rate": 1.01, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From f833eeff5e1862df3e0b3b4298b97217b0f1a98d Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 11:01:31 +0200 Subject: [PATCH 23/49] add random rest. hill. climb. opt. --- .../opt/gfo/_random_restart_hill_climbing.py | 84 +++++++++++++++++++ 1 file changed, 84 insertions(+) diff --git a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py index 50f64920..17d1481f 100644 --- a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py +++ b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py @@ -2,6 +2,65 @@ class RandomRestartHillClimbing(_BaseGFOadapter): + """Random restart hill climbing optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + initialize : dict[str, int] + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float + The probability of a random iteration during the the search process. + epsilon : float + The step-size for the climbing. + distribution : str + The type of distribution to sample from. + n_neighbours : int + The number of neighbours to sample and evaluate before moving to the best + of those neighbours. + n_iter_restart : int + The number of iterations after which to restart at a random position. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + epsilon=0.01, + distribution="normal", + n_neighbours=10, + n_iter_restart=0.5, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.epsilon = epsilon + self.distribution = distribution + self.n_neighbours = n_neighbours + self.n_iter_restart = n_iter_restart + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +73,28 @@ def _get_gfo_class(self): from gradient_free_optimizers import RandomRestartHillClimbingOptimizer return RandomRestartHillClimbingOptimizer + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "n_iter_restart": 2, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From 49210f2b5646080ef42c5639271db54829a3b41d Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 11:03:21 +0200 Subject: [PATCH 24/49] add spiral opt. --- .../opt/gfo/_spiral_optimization.py | 85 +++++++++++++++++++ 1 file changed, 85 insertions(+) diff --git a/src/hyperactive/opt/gfo/_spiral_optimization.py b/src/hyperactive/opt/gfo/_spiral_optimization.py index a5ec17d4..a555c3e1 100644 --- a/src/hyperactive/opt/gfo/_spiral_optimization.py +++ b/src/hyperactive/opt/gfo/_spiral_optimization.py @@ -2,6 +2,65 @@ class SpiralOptimization(_BaseGFOadapter): + """Spiral optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + Optional, can be passed later via ``set_params``. + initialize : dict[str, int], default={"grid": 4, "random": 2, "vertices": 4} + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable], default=[] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int, default=None + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float, default=0.1 + The probability of a random iteration during the the search process. + population : int + The number of particles in the swarm. + decay_rate : float + This parameter is a factor, that influences the radius of the particles during their spiral movement. + Lower values accelerates the convergence of the particles to the best known position, while values above 1 eventually lead to a movement where the particles spiral away from each other. + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + population: int = 10, + decay_rate: float = 0.99, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.population = population + self.decay_rate = decay_rate + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +73,29 @@ def _get_gfo_class(self): from gradient_free_optimizers import SpiralOptimization return SpiralOptimization + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "population": 20, + "decay_rate": 0.9999, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From c7bb77fef9b1272ccacb3b38b30531bbc1f0bc4b Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 14 Jun 2025 11:49:07 +0200 Subject: [PATCH 25/49] add tpe optimizer --- .../gfo/_tree_structured_parzen_estimators.py | 97 +++++++++++++++++++ 1 file changed, 97 insertions(+) diff --git a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py index 82b2edc5..2c47ad6d 100644 --- a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py +++ b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py @@ -2,6 +2,76 @@ class TreeStructuredParzenEstimators(_BaseGFOadapter): + """Tree structured parzen estimators optimizer. + + Parameters + ---------- + search_space : dict[str, list] + The search space to explore. A dictionary with parameter + names as keys and a numpy array as values. + Optional, can be passed later via ``set_params``. + initialize : dict[str, int], default={"grid": 4, "random": 2, "vertices": 4} + The method to generate initial positions. A dictionary with + the following key literals and the corresponding value type: + {"grid": int, "vertices": int, "random": int, "warm_start": list[dict]} + constraints : list[callable], default=[] + A list of constraints, where each constraint is a callable. + The callable returns `True` or `False` dependend on the input parameters. + random_state : None, int, default=None + If None, create a new random state. If int, create a new random state + seeded with the value. + rand_rest_p : float, default=0.1 + The probability of a random iteration during the the search process. + warm_start_smbo + The warm start for SMBO. + max_sample_size : int + The maximum number of points to sample. + sampling : dict + The sampling method to use. + replacement : bool + Whether to sample with replacement. + gamma_tpe : float + The parameter for the Tree Structured Parzen Estimators + n_iter : int, default=100 + The number of iterations to run the optimizer. + verbose : bool, default=False + If True, print the progress of the optimization process. + experiment : BaseExperiment, optional + The experiment to optimize parameters for. + Optional, can be passed later via ``set_params``. + """ + + def __init__( + self, + search_space=None, + initialize=None, + constraints=None, + random_state=None, + rand_rest_p=0.1, + warm_start_smbo=None, + max_sample_size=10000000, + sampling={"random": 1000000}, + replacement=True, + gamma_tpe=0.2, + n_iter=100, + verbose=False, + experiment=None, + ): + self.random_state = random_state + self.rand_rest_p = rand_rest_p + self.warm_start_smbo = warm_start_smbo + self.max_sample_size = max_sample_size + self.sampling = sampling + self.replacement = replacement + self.gamma_tpe = gamma_tpe + self.search_space = search_space + self.initialize = initialize + self.constraints = constraints + self.n_iter = n_iter + self.experiment = experiment + self.verbose = verbose + + super().__init__() def _get_gfo_class(self): """Get the GFO class to use. @@ -14,3 +84,30 @@ def _get_gfo_class(self): from gradient_free_optimizers import TreeStructuredParzenEstimators return TreeStructuredParzenEstimators + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Get the test parameters for the optimizer. + + Returns + ------- + dict with str keys + The test parameters dictionary. + """ + import numpy as np + + params = super().get_test_params() + experiment = params[0]["experiment"] + more_params = { + "experiment": experiment, + "max_sample_size": 100, + "replacement": False, + "gamma_tpe": 0.01, + "search_space": { + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + }, + "n_iter": 100, + } + params.append(more_params) + return params From fb74eb6f12535acef8aebf76b021d58754537256 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20Kir=C3=A1ly?= Date: Sat, 14 Jun 2025 14:06:07 +0200 Subject: [PATCH 26/49] check_estimator --- src/hyperactive/utils/__init__.py | 7 ++ src/hyperactive/utils/estimator_checks.py | 139 ++++++++++++++++++++++ 2 files changed, 146 insertions(+) create mode 100644 src/hyperactive/utils/__init__.py create mode 100644 src/hyperactive/utils/estimator_checks.py diff --git a/src/hyperactive/utils/__init__.py b/src/hyperactive/utils/__init__.py new file mode 100644 index 00000000..c9c88720 --- /dev/null +++ b/src/hyperactive/utils/__init__.py @@ -0,0 +1,7 @@ +"""Utility functionality.""" + +from hyperactive.utils.estimator_checks import check_estimator + +__all__ = [ + "check_estimator", +] diff --git a/src/hyperactive/utils/estimator_checks.py b/src/hyperactive/utils/estimator_checks.py new file mode 100644 index 00000000..1bc9f793 --- /dev/null +++ b/src/hyperactive/utils/estimator_checks.py @@ -0,0 +1,139 @@ +"""Estimator checker for extension.""" + +__author__ = ["fkiraly"] +__all__ = ["check_estimator"] + +from skbase.utils.dependencies import _check_soft_dependencies + + +def check_estimator( + estimator, + raise_exceptions=False, + tests_to_run=None, + fixtures_to_run=None, + verbose=True, + tests_to_exclude=None, + fixtures_to_exclude=None, +): + """Run all tests on one single estimator. + + Tests that are run on estimator: + + * all tests in test_all_estimators + * all interface compatibility tests from the module of estimator's scitype + + Parameters + ---------- + estimator : estimator class or estimator instance + raise_exceptions : bool, optional, default=False + whether to return exceptions/failures in the results dict, or raise them + + * if False: returns exceptions in returned `results` dict + * if True: raises exceptions as they occur + + tests_to_run : str or list of str, optional. Default = run all tests. + Names (test/function name string) of tests to run. + sub-sets tests that are run to the tests given here. + fixtures_to_run : str or list of str, optional. Default = run all tests. + pytest test-fixture combination codes, which test-fixture combinations to run. + sub-sets tests and fixtures to run to the list given here. + If both tests_to_run and fixtures_to_run are provided, runs the *union*, + i.e., all test-fixture combinations for tests in tests_to_run, + plus all test-fixture combinations in fixtures_to_run. + verbose : str, optional, default=True. + whether to print out informative summary of tests run. + tests_to_exclude : str or list of str, names of tests to exclude. default = None + removes tests that should not be run, after subsetting via tests_to_run. + fixtures_to_exclude : str or list of str, fixtures to exclude. default = None + removes test-fixture combinations that should not be run. + This is done after subsetting via fixtures_to_run. + + Returns + ------- + results : dict of results of the tests in self + keys are test/fixture strings, identical as in pytest, e.g., test[fixture] + entries are the string "PASSED" if the test passed, + or the exception raised if the test did not pass + returned only if all tests pass, or raise_exceptions=False + + Raises + ------ + if raise_exceptions=True, + raises any exception produced by the tests directly + + Examples + -------- + >>> from hyperactive.opt import HillClimbing + >>> from hyperactive.utils import check_estimator + + Running all tests for HillClimbing class, + this uses all instances from get_test_params and compatible scenarios + + >>> results = check_estimator(HillClimbing) + All tests PASSED! + + Running all tests for a specific HillClimbing + this uses the instance that is passed and compatible scenarios + + >>> specific_hill_climbing = HillClimbing.create_test_instance() + >>> results = check_estimator(specific_hill_climbing) + All tests PASSED! + + Running specific test (all fixtures) HillClimbing + + >>> results = check_estimator(HillClimbing, tests_to_run="test_clone") + All tests PASSED! + + {'test_clone[HillClimbing-0]': 'PASSED', + 'test_clone[HillClimbing-1]': 'PASSED'} + + Running one specific test-fixture-combination for ResidualDouble + + >>> check_estimator( + ... HillClimbing, fixtures_to_run="test_clone[HillClimbing-1]" + ... ) + All tests PASSED! + {'test_clone[HillClimbing-1]': 'PASSED'} + """ + msg = ( + "check_estimator is a testing utility for developers, and " + "requires pytest to be present " + "in the python environment, but pytest was not found. " + "pytest is a developer dependency and not included in the base " + "sktime installation. Please run: `pip install pytest` to " + "install the pytest package. " + "To install sktime with all developer dependencies, run:" + " `pip install hyperactive[dev]`" + ) + _check_soft_dependencies("pytest", msg=msg) + + from hyperactive.tests.test_class_register import get_test_classes_for_obj + + test_clss_for_est = get_test_classes_for_obj(estimator) + + results = {} + + for test_cls in test_clss_for_est: + test_cls_results = test_cls().run_tests( + obj=estimator, + raise_exceptions=raise_exceptions, + tests_to_run=tests_to_run, + fixtures_to_run=fixtures_to_run, + tests_to_exclude=tests_to_exclude, + fixtures_to_exclude=fixtures_to_exclude, + ) + results.update(test_cls_results) + + failed_tests = [key for key in results.keys() if results[key] != "PASSED"] + if len(failed_tests) > 0: + msg = failed_tests + msg = ["FAILED: " + x for x in msg] + msg = "\n".join(msg) + else: + msg = "All tests PASSED!" + + if verbose: + # printing is an intended feature, for console usage and interactive debugging + print(msg) # noqa T001 + + return results From edc33a9493f8decae95add1ddca7e51e55231f5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20Kir=C3=A1ly?= Date: Sat, 14 Jun 2025 14:37:54 +0200 Subject: [PATCH 27/49] check_estimator --- src/hyperactive/tests/test_all_objects.py | 5 +- src/hyperactive/tests/test_class_register.py | 94 ++++++++++++++++++++ 2 files changed, 97 insertions(+), 2 deletions(-) create mode 100644 src/hyperactive/tests/test_class_register.py diff --git a/src/hyperactive/tests/test_all_objects.py b/src/hyperactive/tests/test_all_objects.py index 84257db0..986c0086 100644 --- a/src/hyperactive/tests/test_all_objects.py +++ b/src/hyperactive/tests/test_all_objects.py @@ -4,6 +4,7 @@ import shutil from skbase.testing import BaseFixtureGenerator as _BaseFixtureGenerator +from skbase.testing import QuickTester as _QuickTester from skbase.testing import TestAllObjects as _TestAllObjects from hyperactive._registry import all_objects @@ -154,7 +155,7 @@ class ExperimentFixtureGenerator(BaseFixtureGenerator): object_type_filter = "experiment" -class TestAllExperiments(ExperimentFixtureGenerator): +class TestAllExperiments(ExperimentFixtureGenerator, _QuickTester): """Module level tests for all experiment classes.""" def test_paramnames(self, object_class): @@ -204,7 +205,7 @@ class OptimizerFixtureGenerator(BaseFixtureGenerator): object_type_filter = "optimizer" -class TestAllOptimizers(OptimizerFixtureGenerator): +class TestAllOptimizers(OptimizerFixtureGenerator, _QuickTester): """Module level tests for all optimizer classes.""" def test_opt_run(self, object_instance): diff --git a/src/hyperactive/tests/test_class_register.py b/src/hyperactive/tests/test_class_register.py new file mode 100644 index 00000000..dfbd1f1f --- /dev/null +++ b/src/hyperactive/tests/test_class_register.py @@ -0,0 +1,94 @@ +# copyright: skpro developers, BSD-3-Clause License (see LICENSE file) +"""Registry and dispatcher for test classes. + +Module does not contain tests, only test utilities. +""" + +__author__ = ["fkiraly"] + +from inspect import isclass + + +def get_test_class_registry(): + """Return test class registry. + + Wrapped in a function to avoid circular imports. + + Returns + ------- + testclass_dict : dict + test class registry + keys are scitypes, values are test classes TestAll[Scitype] + """ + from hyperactive.tests.test_all_objects import ( + TestAllExperiments, + TestAllObjects, + TestAllOptimizers, + ) + + testclass_dict = dict() + # every object in sktime inherits from BaseObject + # "object" tests are run for all objects + testclass_dict["object"] = TestAllObjects + # more specific base classes + # these inherit either from BaseEstimator or BaseObject, + # so also imply estimator and object tests, or only object tests + testclass_dict["experiment"] = TestAllExperiments + testclass_dict["optimizer"] = TestAllOptimizers + + return testclass_dict + + +def get_test_classes_for_obj(obj): + """Get all test classes relevant for an object or estimator. + + Parameters + ---------- + obj : object or estimator, descendant of sktime BaseObject or BaseEstimator + object or estimator for which to get test classes + + Returns + ------- + test_classes : list of test classes + list of test classes relevant for obj + these are references to the actual classes, not strings + if obj was not a descendant of BaseObject or BaseEstimator, returns empty list + """ + from skbase.base import BaseObject + + def is_object(obj): + """Return whether obj is an estimator class or estimator object.""" + if isclass(obj): + return issubclass(obj, BaseObject) + else: + return isinstance(obj, BaseObject) + + # warning: BaseEstimator does not inherit from BaseObject, + # therefore we need to check both + if not is_object(obj): + return [] + + testclass_dict = get_test_class_registry() + + # we always need to run "object" tests + test_clss = [testclass_dict["object"]] + + try: + if isclass(obj): + obj_scitypes = obj.get_class_tag("object_type") + elif hasattr(obj, "get_tag"): + obj_scitypes = obj.get_tag("object_type") + else: + obj_scitypes = [] + except Exception: + obj_scitypes = [] + + if isinstance(obj_scitypes, str): + # if obj_scitypes is a string, convert to list + obj_scitypes = [obj_scitypes] + + for obj_scitype in obj_scitypes: + if obj_scitype in testclass_dict: + test_clss += [testclass_dict[obj_scitype]] + + return test_clss From 0fdc7f00d1439b026a3c329c1999a99934c18afa Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Thu, 19 Jun 2025 20:45:58 +0200 Subject: [PATCH 28/49] add _tags --- .../opt/gfo/_tree_structured_parzen_estimators.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py index 2c47ad6d..7d1053f8 100644 --- a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py +++ b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py @@ -26,7 +26,7 @@ class TreeStructuredParzenEstimators(_BaseGFOadapter): The warm start for SMBO. max_sample_size : int The maximum number of points to sample. - sampling : dict + sampling : dict0 The sampling method to use. replacement : bool Whether to sample with replacement. @@ -41,6 +41,13 @@ class TreeStructuredParzenEstimators(_BaseGFOadapter): Optional, can be passed later via ``set_params``. """ + _tags = { + "info:name": "Tree Structured Parzen Estimators", + "info:local_vs_global": "mixed", # "local", "mixed", "global" + "info:explore_vs_exploit": "mixed", # "explore", "exploit", "mixed" + "info:compute": "high", # "low", "middle", "high" + } + def __init__( self, search_space=None, From f6a0bbdf2b6301028a2420c0776d4a43862b30dc Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Thu, 19 Jun 2025 21:02:52 +0200 Subject: [PATCH 29/49] add docstring examples --- .../opt/gfo/_bayesian_optimization.py | 38 ++++++++++++++++++- .../opt/gfo/_differential_evolution.py | 38 ++++++++++++++++++- src/hyperactive/opt/gfo/_direct_algorithm.py | 38 ++++++++++++++++++- src/hyperactive/opt/gfo/_downhill_simplex.py | 38 ++++++++++++++++++- .../opt/gfo/_evolution_strategy.py | 38 ++++++++++++++++++- src/hyperactive/opt/gfo/_forest_optimizer.py | 38 ++++++++++++++++++- src/hyperactive/opt/gfo/_genetic_algorithm.py | 38 ++++++++++++++++++- src/hyperactive/opt/gfo/_grid_search.py | 38 ++++++++++++++++++- .../opt/gfo/_lipschitz_optimization.py | 38 ++++++++++++++++++- .../opt/gfo/_parallel_tempering.py | 38 ++++++++++++++++++- .../opt/gfo/_particle_swarm_optimization.py | 38 ++++++++++++++++++- src/hyperactive/opt/gfo/_pattern_search.py | 38 ++++++++++++++++++- src/hyperactive/opt/gfo/_powells_method.py | 38 ++++++++++++++++++- .../opt/gfo/_random_restart_hill_climbing.py | 38 ++++++++++++++++++- src/hyperactive/opt/gfo/_random_search.py | 38 ++++++++++++++++++- .../opt/gfo/_simulated_annealing.py | 38 ++++++++++++++++++- .../opt/gfo/_spiral_optimization.py | 38 ++++++++++++++++++- .../gfo/_tree_structured_parzen_estimators.py | 38 ++++++++++++++++++- 18 files changed, 666 insertions(+), 18 deletions(-) diff --git a/src/hyperactive/opt/gfo/_bayesian_optimization.py b/src/hyperactive/opt/gfo/_bayesian_optimization.py index edb38741..8836df8e 100644 --- a/src/hyperactive/opt/gfo/_bayesian_optimization.py +++ b/src/hyperactive/opt/gfo/_bayesian_optimization.py @@ -40,7 +40,43 @@ class BayesianOptimizer(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of BayesianOptimizer with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the bayesianOptimizer optimizer: + >>> from hyperactive.opt import BayesianOptimizer + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = BayesianOptimizer(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_differential_evolution.py b/src/hyperactive/opt/gfo/_differential_evolution.py index 65509847..b2a6b71d 100644 --- a/src/hyperactive/opt/gfo/_differential_evolution.py +++ b/src/hyperactive/opt/gfo/_differential_evolution.py @@ -34,7 +34,43 @@ class DifferentialEvolution(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of DifferentialEvolution with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the differentialEvolution optimizer: + >>> from hyperactive.opt import DifferentialEvolution + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = DifferentialEvolution(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_direct_algorithm.py b/src/hyperactive/opt/gfo/_direct_algorithm.py index 485512f8..144e72e1 100644 --- a/src/hyperactive/opt/gfo/_direct_algorithm.py +++ b/src/hyperactive/opt/gfo/_direct_algorithm.py @@ -36,7 +36,43 @@ class DirectAlgorithm(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of DirectAlgorithm with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the directAlgorithm optimizer: + >>> from hyperactive.opt import DirectAlgorithm + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = DirectAlgorithm(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_downhill_simplex.py b/src/hyperactive/opt/gfo/_downhill_simplex.py index 222c170d..935b34fc 100644 --- a/src/hyperactive/opt/gfo/_downhill_simplex.py +++ b/src/hyperactive/opt/gfo/_downhill_simplex.py @@ -36,7 +36,43 @@ class DownhillSimplexOptimizer(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of DownhillSimplexOptimizer with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the downhillSimplexOptimizer optimizer: + >>> from hyperactive.opt import DownhillSimplexOptimizer + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = DownhillSimplexOptimizer(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_evolution_strategy.py b/src/hyperactive/opt/gfo/_evolution_strategy.py index d8e68b67..a46fb045 100644 --- a/src/hyperactive/opt/gfo/_evolution_strategy.py +++ b/src/hyperactive/opt/gfo/_evolution_strategy.py @@ -40,7 +40,43 @@ class EvolutionStrategy(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of EvolutionStrategy with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the evolutionStrategy optimizer: + >>> from hyperactive.opt import EvolutionStrategy + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = EvolutionStrategy(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_forest_optimizer.py b/src/hyperactive/opt/gfo/_forest_optimizer.py index 08948df2..623f8880 100644 --- a/src/hyperactive/opt/gfo/_forest_optimizer.py +++ b/src/hyperactive/opt/gfo/_forest_optimizer.py @@ -42,7 +42,43 @@ class ForestOptimizer(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of ForestOptimizer with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the forestOptimizer optimizer: + >>> from hyperactive.opt import ForestOptimizer + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = ForestOptimizer(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_genetic_algorithm.py b/src/hyperactive/opt/gfo/_genetic_algorithm.py index 93a2bd21..631c3b24 100644 --- a/src/hyperactive/opt/gfo/_genetic_algorithm.py +++ b/src/hyperactive/opt/gfo/_genetic_algorithm.py @@ -40,7 +40,43 @@ class GeneticAlgorithm(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of GeneticAlgorithm with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the geneticAlgorithm optimizer: + >>> from hyperactive.opt import GeneticAlgorithm + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = GeneticAlgorithm(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_grid_search.py b/src/hyperactive/opt/gfo/_grid_search.py index 094049f6..733c298e 100644 --- a/src/hyperactive/opt/gfo/_grid_search.py +++ b/src/hyperactive/opt/gfo/_grid_search.py @@ -32,7 +32,43 @@ class GridSearch(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of GridSearch with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the gridSearch optimizer: + >>> from hyperactive.opt import GridSearch + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = GridSearch(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_lipschitz_optimization.py b/src/hyperactive/opt/gfo/_lipschitz_optimization.py index 4e02011f..36bad83f 100644 --- a/src/hyperactive/opt/gfo/_lipschitz_optimization.py +++ b/src/hyperactive/opt/gfo/_lipschitz_optimization.py @@ -36,7 +36,43 @@ class LipschitzOptimizer(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of LipschitzOptimizer with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the lipschitzOptimizer optimizer: + >>> from hyperactive.opt import LipschitzOptimizer + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = LipschitzOptimizer(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_parallel_tempering.py b/src/hyperactive/opt/gfo/_parallel_tempering.py index 8c67baa2..7beaa0c0 100644 --- a/src/hyperactive/opt/gfo/_parallel_tempering.py +++ b/src/hyperactive/opt/gfo/_parallel_tempering.py @@ -35,7 +35,43 @@ class ParallelTempering(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of ParallelTempering with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the parallelTempering optimizer: + >>> from hyperactive.opt import ParallelTempering + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = ParallelTempering(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py index 9f9404c0..399842f6 100644 --- a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py +++ b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py @@ -38,7 +38,43 @@ class ParticleSwarmOptimizer(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of ParticleSwarmOptimizer with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the particleSwarmOptimizer optimizer: + >>> from hyperactive.opt import ParticleSwarmOptimizer + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = ParticleSwarmOptimizer(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_pattern_search.py b/src/hyperactive/opt/gfo/_pattern_search.py index 20da359c..105864fc 100644 --- a/src/hyperactive/opt/gfo/_pattern_search.py +++ b/src/hyperactive/opt/gfo/_pattern_search.py @@ -34,7 +34,43 @@ class PatternSearch(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of PatternSearch with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the patternSearch optimizer: + >>> from hyperactive.opt import PatternSearch + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = PatternSearch(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_powells_method.py b/src/hyperactive/opt/gfo/_powells_method.py index 2dcc8068..5ff2a0bd 100644 --- a/src/hyperactive/opt/gfo/_powells_method.py +++ b/src/hyperactive/opt/gfo/_powells_method.py @@ -35,7 +35,43 @@ class PowellsMethod(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of PowellsMethod with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the powellsMethod optimizer: + >>> from hyperactive.opt import PowellsMethod + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = PowellsMethod(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py index 17d1481f..be9f685f 100644 --- a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py +++ b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py @@ -30,7 +30,43 @@ class RandomRestartHillClimbing(_BaseGFOadapter): of those neighbours. n_iter_restart : int The number of iterations after which to restart at a random position. - """ + + Examples + -------- + Basic usage of RandomRestartHillClimbing with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the randomRestartHillClimbing optimizer: + >>> from hyperactive.opt import RandomRestartHillClimbing + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = RandomRestartHillClimbing(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_random_search.py b/src/hyperactive/opt/gfo/_random_search.py index d4926923..9696c6a8 100644 --- a/src/hyperactive/opt/gfo/_random_search.py +++ b/src/hyperactive/opt/gfo/_random_search.py @@ -27,7 +27,43 @@ class RandomSearch(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of RandomSearch with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the randomSearch optimizer: + >>> from hyperactive.opt import RandomSearch + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = RandomSearch(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_simulated_annealing.py b/src/hyperactive/opt/gfo/_simulated_annealing.py index e46133cd..319ed606 100644 --- a/src/hyperactive/opt/gfo/_simulated_annealing.py +++ b/src/hyperactive/opt/gfo/_simulated_annealing.py @@ -39,7 +39,43 @@ class SimulatedAnnealing(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of SimulatedAnnealing with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the simulatedAnnealing optimizer: + >>> from hyperactive.opt import SimulatedAnnealing + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = SimulatedAnnealing(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_spiral_optimization.py b/src/hyperactive/opt/gfo/_spiral_optimization.py index a555c3e1..527cc35a 100644 --- a/src/hyperactive/opt/gfo/_spiral_optimization.py +++ b/src/hyperactive/opt/gfo/_spiral_optimization.py @@ -34,7 +34,43 @@ class SpiralOptimization(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of SpiralOptimization with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the spiralOptimization optimizer: + >>> from hyperactive.opt import SpiralOptimization + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = SpiralOptimization(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ def __init__( self, diff --git a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py index 7d1053f8..40d1bf4d 100644 --- a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py +++ b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py @@ -39,7 +39,43 @@ class TreeStructuredParzenEstimators(_BaseGFOadapter): experiment : BaseExperiment, optional The experiment to optimize parameters for. Optional, can be passed later via ``set_params``. - """ + + Examples + -------- + Basic usage of TreeStructuredParzenEstimators with a scikit-learn experiment: + + 1. defining the experiment to optimize: + >>> from hyperactive.experiment.integrations import SklearnCvExperiment + >>> from sklearn.datasets import load_iris + >>> from sklearn.svm import SVC + >>> + >>> X, y = load_iris(return_X_y=True) + >>> + >>> sklearn_exp = SklearnCvExperiment( + ... estimator=SVC(), + ... X=X, + ... y=y, + ... ) + + 2. setting up the treeStructuredParzenEstimators optimizer: + >>> from hyperactive.opt import TreeStructuredParzenEstimators + >>> import numpy as np + >>> + >>> config = { + ... "search_space": { + ... "C": np.array([0.01, 0.1, 1, 10]), + ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... }, + ... "n_iter": 100, + ... } + >>> optimizer = TreeStructuredParzenEstimators(experiment=sklearn_exp, **config) + + 3. running the optimization: + >>> best_params = optimizer.run() + + Best parameters can also be accessed via: + >>> best_params = optimizer.best_params_ + """ _tags = { "info:name": "Tree Structured Parzen Estimators", From 1eba3bbd622718541e0d8a10411a498c921bb6ce Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Thu, 19 Jun 2025 21:07:00 +0200 Subject: [PATCH 30/49] add _tags to optimizers --- src/hyperactive/opt/gfo/_bayesian_optimization.py | 11 +++++++++-- src/hyperactive/opt/gfo/_differential_evolution.py | 11 +++++++++-- src/hyperactive/opt/gfo/_direct_algorithm.py | 11 +++++++++-- src/hyperactive/opt/gfo/_downhill_simplex.py | 11 +++++++++-- src/hyperactive/opt/gfo/_evolution_strategy.py | 11 +++++++++-- src/hyperactive/opt/gfo/_forest_optimizer.py | 11 +++++++++-- src/hyperactive/opt/gfo/_genetic_algorithm.py | 11 +++++++++-- src/hyperactive/opt/gfo/_grid_search.py | 11 +++++++++-- src/hyperactive/opt/gfo/_hillclimbing.py | 3 ++- src/hyperactive/opt/gfo/_lipschitz_optimization.py | 11 +++++++++-- src/hyperactive/opt/gfo/_parallel_tempering.py | 11 +++++++++-- .../opt/gfo/_particle_swarm_optimization.py | 11 +++++++++-- src/hyperactive/opt/gfo/_pattern_search.py | 11 +++++++++-- src/hyperactive/opt/gfo/_powells_method.py | 11 +++++++++-- .../opt/gfo/_random_restart_hill_climbing.py | 11 +++++++++-- src/hyperactive/opt/gfo/_random_search.py | 11 +++++++++-- src/hyperactive/opt/gfo/_simulated_annealing.py | 11 +++++++++-- src/hyperactive/opt/gfo/_spiral_optimization.py | 11 +++++++++-- .../opt/gfo/_tree_structured_parzen_estimators.py | 2 +- 19 files changed, 156 insertions(+), 36 deletions(-) diff --git a/src/hyperactive/opt/gfo/_bayesian_optimization.py b/src/hyperactive/opt/gfo/_bayesian_optimization.py index 8836df8e..527f7cdc 100644 --- a/src/hyperactive/opt/gfo/_bayesian_optimization.py +++ b/src/hyperactive/opt/gfo/_bayesian_optimization.py @@ -76,7 +76,14 @@ class BayesianOptimizer(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Bayesian Optimization", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "exploit", + "info:compute": "high", + } + def __init__( self, @@ -146,4 +153,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_differential_evolution.py b/src/hyperactive/opt/gfo/_differential_evolution.py index b2a6b71d..8ddafa50 100644 --- a/src/hyperactive/opt/gfo/_differential_evolution.py +++ b/src/hyperactive/opt/gfo/_differential_evolution.py @@ -70,7 +70,14 @@ class DifferentialEvolution(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Differential Evolution", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "explore", + "info:compute": "middle", + } + def __init__( self, @@ -137,4 +144,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_direct_algorithm.py b/src/hyperactive/opt/gfo/_direct_algorithm.py index 144e72e1..de9c4003 100644 --- a/src/hyperactive/opt/gfo/_direct_algorithm.py +++ b/src/hyperactive/opt/gfo/_direct_algorithm.py @@ -72,7 +72,14 @@ class DirectAlgorithm(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "DIRECT Algorithm", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "mixed", + "info:compute": "high", + } + def __init__( self, @@ -140,4 +147,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_downhill_simplex.py b/src/hyperactive/opt/gfo/_downhill_simplex.py index 935b34fc..275e372a 100644 --- a/src/hyperactive/opt/gfo/_downhill_simplex.py +++ b/src/hyperactive/opt/gfo/_downhill_simplex.py @@ -72,7 +72,14 @@ class DownhillSimplexOptimizer(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Downhill Simplex", + "info:local_vs_global": "local", + "info:explore_vs_exploit": "exploit", + "info:compute": "low", + } + def __init__( self, @@ -142,4 +149,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_evolution_strategy.py b/src/hyperactive/opt/gfo/_evolution_strategy.py index a46fb045..6434f437 100644 --- a/src/hyperactive/opt/gfo/_evolution_strategy.py +++ b/src/hyperactive/opt/gfo/_evolution_strategy.py @@ -76,7 +76,14 @@ class EvolutionStrategy(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Evolution Strategy", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "explore", + "info:compute": "middle", + } + def __init__( self, @@ -149,4 +156,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_forest_optimizer.py b/src/hyperactive/opt/gfo/_forest_optimizer.py index 623f8880..6e5f03cc 100644 --- a/src/hyperactive/opt/gfo/_forest_optimizer.py +++ b/src/hyperactive/opt/gfo/_forest_optimizer.py @@ -78,7 +78,14 @@ class ForestOptimizer(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Forest Optimizer", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "explore", + "info:compute": "middle", + } + def __init__( self, @@ -153,4 +160,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_genetic_algorithm.py b/src/hyperactive/opt/gfo/_genetic_algorithm.py index 631c3b24..4f93ac5e 100644 --- a/src/hyperactive/opt/gfo/_genetic_algorithm.py +++ b/src/hyperactive/opt/gfo/_genetic_algorithm.py @@ -76,7 +76,14 @@ class GeneticAlgorithm(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Genetic Algorithm", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "explore", + "info:compute": "high", + } + def __init__( self, @@ -152,4 +159,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_grid_search.py b/src/hyperactive/opt/gfo/_grid_search.py index 733c298e..e8c6a310 100644 --- a/src/hyperactive/opt/gfo/_grid_search.py +++ b/src/hyperactive/opt/gfo/_grid_search.py @@ -68,7 +68,14 @@ class GridSearch(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Grid Search", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "explore", + "info:compute": "high", + } + def __init__( self, @@ -132,4 +139,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_hillclimbing.py b/src/hyperactive/opt/gfo/_hillclimbing.py index a6eabc81..2db8a931 100644 --- a/src/hyperactive/opt/gfo/_hillclimbing.py +++ b/src/hyperactive/opt/gfo/_hillclimbing.py @@ -1,4 +1,5 @@ """Hill climbing optimizer from gfo.""" + # copyright: hyperactive developers, MIT License (see LICENSE file) from hyperactive.opt._adapters._gfo import _BaseGFOadapter @@ -60,7 +61,7 @@ class HillClimbing(_BaseGFOadapter): 2. setting up the hill climbing optimizer: >>> from hyperactive.opt import HillClimbing >>> import numpy as np - >>> + >>> >>> hillclimbing_config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), diff --git a/src/hyperactive/opt/gfo/_lipschitz_optimization.py b/src/hyperactive/opt/gfo/_lipschitz_optimization.py index 36bad83f..a4388005 100644 --- a/src/hyperactive/opt/gfo/_lipschitz_optimization.py +++ b/src/hyperactive/opt/gfo/_lipschitz_optimization.py @@ -72,7 +72,14 @@ class LipschitzOptimizer(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Lipschitz Optimization", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "mixed", + "info:compute": "high", + } + def __init__( self, @@ -140,4 +147,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_parallel_tempering.py b/src/hyperactive/opt/gfo/_parallel_tempering.py index 7beaa0c0..72d976cb 100644 --- a/src/hyperactive/opt/gfo/_parallel_tempering.py +++ b/src/hyperactive/opt/gfo/_parallel_tempering.py @@ -71,7 +71,14 @@ class ParallelTempering(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Parallel Tempering", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "explore", + "info:compute": "high", + } + def __init__( self, @@ -135,4 +142,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py index 399842f6..fbf90377 100644 --- a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py +++ b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py @@ -74,7 +74,14 @@ class ParticleSwarmOptimizer(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Particle Swarm Optimization", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "explore", + "info:compute": "middle", + } + def __init__( self, @@ -147,4 +154,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_pattern_search.py b/src/hyperactive/opt/gfo/_pattern_search.py index 105864fc..1c027ae8 100644 --- a/src/hyperactive/opt/gfo/_pattern_search.py +++ b/src/hyperactive/opt/gfo/_pattern_search.py @@ -70,7 +70,14 @@ class PatternSearch(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Pattern Search", + "info:local_vs_global": "local", + "info:explore_vs_exploit": "explore", + "info:compute": "middle", + } + def __init__( self, @@ -137,4 +144,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_powells_method.py b/src/hyperactive/opt/gfo/_powells_method.py index 5ff2a0bd..83a8a572 100644 --- a/src/hyperactive/opt/gfo/_powells_method.py +++ b/src/hyperactive/opt/gfo/_powells_method.py @@ -71,7 +71,14 @@ class PowellsMethod(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Powell’s Method", + "info:local_vs_global": "local", + "info:explore_vs_exploit": "exploit", + "info:compute": "low", + } + def __init__( self, @@ -132,4 +139,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py index be9f685f..535b0f0e 100644 --- a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py +++ b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py @@ -66,7 +66,14 @@ class RandomRestartHillClimbing(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Random Restart Hill Climbing", + "info:local_vs_global": "local", + "info:explore_vs_exploit": "mixed", + "info:compute": "middle", + } + def __init__( self, @@ -133,4 +140,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_random_search.py b/src/hyperactive/opt/gfo/_random_search.py index 9696c6a8..7b78bda5 100644 --- a/src/hyperactive/opt/gfo/_random_search.py +++ b/src/hyperactive/opt/gfo/_random_search.py @@ -63,7 +63,14 @@ class RandomSearch(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Random Search", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "explore", + "info:compute": "low", + } + def __init__( self, @@ -119,4 +126,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_simulated_annealing.py b/src/hyperactive/opt/gfo/_simulated_annealing.py index 319ed606..d6fb5742 100644 --- a/src/hyperactive/opt/gfo/_simulated_annealing.py +++ b/src/hyperactive/opt/gfo/_simulated_annealing.py @@ -75,7 +75,14 @@ class SimulatedAnnealing(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Simulated Annealing", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "explore", + "info:compute": "middle", + } + def __init__( self, @@ -145,4 +152,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_spiral_optimization.py b/src/hyperactive/opt/gfo/_spiral_optimization.py index 527cc35a..b506da0b 100644 --- a/src/hyperactive/opt/gfo/_spiral_optimization.py +++ b/src/hyperactive/opt/gfo/_spiral_optimization.py @@ -70,7 +70,14 @@ class SpiralOptimization(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ + _tags = { + "info:name": "Spiral Optimization", + "info:local_vs_global": "mixed", + "info:explore_vs_exploit": "explore", + "info:compute": "middle", + } + def __init__( self, @@ -134,4 +141,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params + return params \ No newline at end of file diff --git a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py index 40d1bf4d..8e7001e9 100644 --- a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py +++ b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py @@ -75,7 +75,7 @@ class TreeStructuredParzenEstimators(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ - """ + """ _tags = { "info:name": "Tree Structured Parzen Estimators", From 7d74989fd3f78fd471188f80326b1cbf1e4b83a6 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 21 Jun 2025 16:39:09 +0200 Subject: [PATCH 31/49] fix mutable default error --- src/hyperactive/base/_optimizer.py | 5 +++++ src/hyperactive/opt/gfo/_bayesian_optimization.py | 6 +++--- src/hyperactive/opt/gfo/_direct_algorithm.py | 6 +++--- src/hyperactive/opt/gfo/_forest_optimizer.py | 6 +++--- src/hyperactive/opt/gfo/_lipschitz_optimization.py | 6 +++--- .../opt/gfo/_tree_structured_parzen_estimators.py | 2 +- 6 files changed, 18 insertions(+), 13 deletions(-) diff --git a/src/hyperactive/base/_optimizer.py b/src/hyperactive/base/_optimizer.py index f1dc0fa9..b7356787 100644 --- a/src/hyperactive/base/_optimizer.py +++ b/src/hyperactive/base/_optimizer.py @@ -1,4 +1,5 @@ """Base class for optimizer.""" + # copyright: hyperactive developers, MIT License (see LICENSE file) from skbase.base import BaseObject @@ -38,6 +39,10 @@ def get_search_config(self): """ search_config = self.get_params(deep=False) search_config.pop("experiment", None) + + if search_config["sampling"] is None: + search_config["sampling"] = {"random": 1000000} + return search_config def get_experiment(self): diff --git a/src/hyperactive/opt/gfo/_bayesian_optimization.py b/src/hyperactive/opt/gfo/_bayesian_optimization.py index 527f7cdc..ee4c5faa 100644 --- a/src/hyperactive/opt/gfo/_bayesian_optimization.py +++ b/src/hyperactive/opt/gfo/_bayesian_optimization.py @@ -77,6 +77,7 @@ class BayesianOptimizer(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Bayesian Optimization", "info:local_vs_global": "global", @@ -84,7 +85,6 @@ class BayesianOptimizer(_BaseGFOadapter): "info:compute": "high", } - def __init__( self, search_space=None, @@ -94,7 +94,7 @@ def __init__( rand_rest_p=0.1, warm_start_smbo=None, max_sample_size=10000000, - sampling={"random": 1000000}, + sampling=None, replacement=True, xi=0.03, n_iter=100, @@ -153,4 +153,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_direct_algorithm.py b/src/hyperactive/opt/gfo/_direct_algorithm.py index de9c4003..f1192389 100644 --- a/src/hyperactive/opt/gfo/_direct_algorithm.py +++ b/src/hyperactive/opt/gfo/_direct_algorithm.py @@ -73,6 +73,7 @@ class DirectAlgorithm(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "DIRECT Algorithm", "info:local_vs_global": "global", @@ -80,7 +81,6 @@ class DirectAlgorithm(_BaseGFOadapter): "info:compute": "high", } - def __init__( self, search_space=None, @@ -90,7 +90,7 @@ def __init__( rand_rest_p=0.1, warm_start_smbo=None, max_sample_size: int = 10000000, - sampling={"random": 1000000}, + sampling=None, replacement=True, n_iter=100, verbose=False, @@ -147,4 +147,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_forest_optimizer.py b/src/hyperactive/opt/gfo/_forest_optimizer.py index 6e5f03cc..acea37f6 100644 --- a/src/hyperactive/opt/gfo/_forest_optimizer.py +++ b/src/hyperactive/opt/gfo/_forest_optimizer.py @@ -79,6 +79,7 @@ class ForestOptimizer(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Forest Optimizer", "info:local_vs_global": "global", @@ -86,7 +87,6 @@ class ForestOptimizer(_BaseGFOadapter): "info:compute": "middle", } - def __init__( self, search_space=None, @@ -96,7 +96,7 @@ def __init__( rand_rest_p=0.1, warm_start_smbo=None, max_sample_size=10000000, - sampling={"random": 1000000}, + sampling=None, replacement=True, tree_regressor="extra_tree", tree_para={"n_estimators": 100}, @@ -160,4 +160,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_lipschitz_optimization.py b/src/hyperactive/opt/gfo/_lipschitz_optimization.py index a4388005..d5c3cadb 100644 --- a/src/hyperactive/opt/gfo/_lipschitz_optimization.py +++ b/src/hyperactive/opt/gfo/_lipschitz_optimization.py @@ -73,6 +73,7 @@ class LipschitzOptimizer(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Lipschitz Optimization", "info:local_vs_global": "global", @@ -80,7 +81,6 @@ class LipschitzOptimizer(_BaseGFOadapter): "info:compute": "high", } - def __init__( self, search_space=None, @@ -90,7 +90,7 @@ def __init__( rand_rest_p=0.1, warm_start_smbo=None, max_sample_size=10000000, - sampling={"random": 1000000}, + sampling=None, replacement=True, n_iter=100, verbose=False, @@ -147,4 +147,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py index 8e7001e9..0fed9dc9 100644 --- a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py +++ b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py @@ -93,7 +93,7 @@ def __init__( rand_rest_p=0.1, warm_start_smbo=None, max_sample_size=10000000, - sampling={"random": 1000000}, + sampling=None, replacement=True, gamma_tpe=0.2, n_iter=100, From b6bec074be49c081de73d930ddfc95e2b3826533 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 21 Jun 2025 16:41:11 +0200 Subject: [PATCH 32/49] fix another mutable default error --- src/hyperactive/base/_optimizer.py | 3 +++ src/hyperactive/opt/gfo/_forest_optimizer.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/hyperactive/base/_optimizer.py b/src/hyperactive/base/_optimizer.py index b7356787..16fa7e7b 100644 --- a/src/hyperactive/base/_optimizer.py +++ b/src/hyperactive/base/_optimizer.py @@ -43,6 +43,9 @@ def get_search_config(self): if search_config["sampling"] is None: search_config["sampling"] = {"random": 1000000} + if search_config["tree_para"] is None: + search_config["tree_para"] = {"n_estimators": 100} + return search_config def get_experiment(self): diff --git a/src/hyperactive/opt/gfo/_forest_optimizer.py b/src/hyperactive/opt/gfo/_forest_optimizer.py index acea37f6..6c63fa69 100644 --- a/src/hyperactive/opt/gfo/_forest_optimizer.py +++ b/src/hyperactive/opt/gfo/_forest_optimizer.py @@ -99,7 +99,7 @@ def __init__( sampling=None, replacement=True, tree_regressor="extra_tree", - tree_para={"n_estimators": 100}, + tree_para=None, xi=0.03, n_iter=100, verbose=False, From d8b5168db09a04251437d0211319598f1be3ce3d Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 21 Jun 2025 16:45:56 +0200 Subject: [PATCH 33/49] check if key is in dict --- src/hyperactive/base/_optimizer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/hyperactive/base/_optimizer.py b/src/hyperactive/base/_optimizer.py index 16fa7e7b..aa93aea6 100644 --- a/src/hyperactive/base/_optimizer.py +++ b/src/hyperactive/base/_optimizer.py @@ -40,10 +40,10 @@ def get_search_config(self): search_config = self.get_params(deep=False) search_config.pop("experiment", None) - if search_config["sampling"] is None: + if "sampling" in search_config and search_config["sampling"] is None: search_config["sampling"] = {"random": 1000000} - if search_config["tree_para"] is None: + if "tree_para" in search_config and search_config["tree_para"] is None: search_config["tree_para"] = {"n_estimators": 100} return search_config From 540e4b6be13170a29e40e00f4b1979d00aa5d32a Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 21 Jun 2025 16:46:12 +0200 Subject: [PATCH 34/49] fix syntax error in docstring example --- src/hyperactive/opt/gfo/_bayesian_optimization.py | 2 +- src/hyperactive/opt/gfo/_differential_evolution.py | 6 +++--- src/hyperactive/opt/gfo/_direct_algorithm.py | 2 +- src/hyperactive/opt/gfo/_downhill_simplex.py | 6 +++--- src/hyperactive/opt/gfo/_evolution_strategy.py | 6 +++--- src/hyperactive/opt/gfo/_forest_optimizer.py | 2 +- src/hyperactive/opt/gfo/_genetic_algorithm.py | 6 +++--- src/hyperactive/opt/gfo/_grid_search.py | 6 +++--- src/hyperactive/opt/gfo/_lipschitz_optimization.py | 2 +- src/hyperactive/opt/gfo/_parallel_tempering.py | 6 +++--- src/hyperactive/opt/gfo/_particle_swarm_optimization.py | 6 +++--- src/hyperactive/opt/gfo/_pattern_search.py | 6 +++--- src/hyperactive/opt/gfo/_powells_method.py | 6 +++--- src/hyperactive/opt/gfo/_random_restart_hill_climbing.py | 6 +++--- src/hyperactive/opt/gfo/_random_search.py | 6 +++--- src/hyperactive/opt/gfo/_simulated_annealing.py | 6 +++--- src/hyperactive/opt/gfo/_spiral_optimization.py | 6 +++--- .../opt/gfo/_tree_structured_parzen_estimators.py | 2 +- 18 files changed, 44 insertions(+), 44 deletions(-) diff --git a/src/hyperactive/opt/gfo/_bayesian_optimization.py b/src/hyperactive/opt/gfo/_bayesian_optimization.py index ee4c5faa..9a87aadb 100644 --- a/src/hyperactive/opt/gfo/_bayesian_optimization.py +++ b/src/hyperactive/opt/gfo/_bayesian_optimization.py @@ -65,7 +65,7 @@ class BayesianOptimizer(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } diff --git a/src/hyperactive/opt/gfo/_differential_evolution.py b/src/hyperactive/opt/gfo/_differential_evolution.py index 8ddafa50..55f9e6dc 100644 --- a/src/hyperactive/opt/gfo/_differential_evolution.py +++ b/src/hyperactive/opt/gfo/_differential_evolution.py @@ -59,7 +59,7 @@ class DifferentialEvolution(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -71,6 +71,7 @@ class DifferentialEvolution(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Differential Evolution", "info:local_vs_global": "global", @@ -78,7 +79,6 @@ class DifferentialEvolution(_BaseGFOadapter): "info:compute": "middle", } - def __init__( self, search_space=None, @@ -144,4 +144,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_direct_algorithm.py b/src/hyperactive/opt/gfo/_direct_algorithm.py index f1192389..33453aef 100644 --- a/src/hyperactive/opt/gfo/_direct_algorithm.py +++ b/src/hyperactive/opt/gfo/_direct_algorithm.py @@ -61,7 +61,7 @@ class DirectAlgorithm(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } diff --git a/src/hyperactive/opt/gfo/_downhill_simplex.py b/src/hyperactive/opt/gfo/_downhill_simplex.py index 275e372a..43dfcbe3 100644 --- a/src/hyperactive/opt/gfo/_downhill_simplex.py +++ b/src/hyperactive/opt/gfo/_downhill_simplex.py @@ -61,7 +61,7 @@ class DownhillSimplexOptimizer(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -73,6 +73,7 @@ class DownhillSimplexOptimizer(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Downhill Simplex", "info:local_vs_global": "local", @@ -80,7 +81,6 @@ class DownhillSimplexOptimizer(_BaseGFOadapter): "info:compute": "low", } - def __init__( self, search_space=None, @@ -149,4 +149,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_evolution_strategy.py b/src/hyperactive/opt/gfo/_evolution_strategy.py index 6434f437..5f466c39 100644 --- a/src/hyperactive/opt/gfo/_evolution_strategy.py +++ b/src/hyperactive/opt/gfo/_evolution_strategy.py @@ -65,7 +65,7 @@ class EvolutionStrategy(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -77,6 +77,7 @@ class EvolutionStrategy(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Evolution Strategy", "info:local_vs_global": "global", @@ -84,7 +85,6 @@ class EvolutionStrategy(_BaseGFOadapter): "info:compute": "middle", } - def __init__( self, search_space=None, @@ -156,4 +156,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_forest_optimizer.py b/src/hyperactive/opt/gfo/_forest_optimizer.py index 6c63fa69..de3ce666 100644 --- a/src/hyperactive/opt/gfo/_forest_optimizer.py +++ b/src/hyperactive/opt/gfo/_forest_optimizer.py @@ -67,7 +67,7 @@ class ForestOptimizer(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } diff --git a/src/hyperactive/opt/gfo/_genetic_algorithm.py b/src/hyperactive/opt/gfo/_genetic_algorithm.py index 4f93ac5e..78809a14 100644 --- a/src/hyperactive/opt/gfo/_genetic_algorithm.py +++ b/src/hyperactive/opt/gfo/_genetic_algorithm.py @@ -65,7 +65,7 @@ class GeneticAlgorithm(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -77,6 +77,7 @@ class GeneticAlgorithm(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Genetic Algorithm", "info:local_vs_global": "global", @@ -84,7 +85,6 @@ class GeneticAlgorithm(_BaseGFOadapter): "info:compute": "high", } - def __init__( self, search_space=None, @@ -159,4 +159,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_grid_search.py b/src/hyperactive/opt/gfo/_grid_search.py index e8c6a310..5ee2bd5e 100644 --- a/src/hyperactive/opt/gfo/_grid_search.py +++ b/src/hyperactive/opt/gfo/_grid_search.py @@ -57,7 +57,7 @@ class GridSearch(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -69,6 +69,7 @@ class GridSearch(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Grid Search", "info:local_vs_global": "global", @@ -76,7 +77,6 @@ class GridSearch(_BaseGFOadapter): "info:compute": "high", } - def __init__( self, search_space=None, @@ -139,4 +139,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_lipschitz_optimization.py b/src/hyperactive/opt/gfo/_lipschitz_optimization.py index d5c3cadb..c5e2d351 100644 --- a/src/hyperactive/opt/gfo/_lipschitz_optimization.py +++ b/src/hyperactive/opt/gfo/_lipschitz_optimization.py @@ -61,7 +61,7 @@ class LipschitzOptimizer(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } diff --git a/src/hyperactive/opt/gfo/_parallel_tempering.py b/src/hyperactive/opt/gfo/_parallel_tempering.py index 72d976cb..192ecd75 100644 --- a/src/hyperactive/opt/gfo/_parallel_tempering.py +++ b/src/hyperactive/opt/gfo/_parallel_tempering.py @@ -60,7 +60,7 @@ class ParallelTempering(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -72,6 +72,7 @@ class ParallelTempering(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Parallel Tempering", "info:local_vs_global": "global", @@ -79,7 +80,6 @@ class ParallelTempering(_BaseGFOadapter): "info:compute": "high", } - def __init__( self, search_space=None, @@ -142,4 +142,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py index fbf90377..1dd8d290 100644 --- a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py +++ b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py @@ -63,7 +63,7 @@ class ParticleSwarmOptimizer(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -75,6 +75,7 @@ class ParticleSwarmOptimizer(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Particle Swarm Optimization", "info:local_vs_global": "global", @@ -82,7 +83,6 @@ class ParticleSwarmOptimizer(_BaseGFOadapter): "info:compute": "middle", } - def __init__( self, search_space=None, @@ -154,4 +154,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_pattern_search.py b/src/hyperactive/opt/gfo/_pattern_search.py index 1c027ae8..a77f0075 100644 --- a/src/hyperactive/opt/gfo/_pattern_search.py +++ b/src/hyperactive/opt/gfo/_pattern_search.py @@ -59,7 +59,7 @@ class PatternSearch(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -71,6 +71,7 @@ class PatternSearch(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Pattern Search", "info:local_vs_global": "local", @@ -78,7 +79,6 @@ class PatternSearch(_BaseGFOadapter): "info:compute": "middle", } - def __init__( self, search_space=None, @@ -144,4 +144,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_powells_method.py b/src/hyperactive/opt/gfo/_powells_method.py index 83a8a572..e0e7c826 100644 --- a/src/hyperactive/opt/gfo/_powells_method.py +++ b/src/hyperactive/opt/gfo/_powells_method.py @@ -60,7 +60,7 @@ class PowellsMethod(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -72,6 +72,7 @@ class PowellsMethod(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Powell’s Method", "info:local_vs_global": "local", @@ -79,7 +80,6 @@ class PowellsMethod(_BaseGFOadapter): "info:compute": "low", } - def __init__( self, search_space=None, @@ -139,4 +139,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py index 535b0f0e..df054a2e 100644 --- a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py +++ b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py @@ -55,7 +55,7 @@ class RandomRestartHillClimbing(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -67,6 +67,7 @@ class RandomRestartHillClimbing(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Random Restart Hill Climbing", "info:local_vs_global": "local", @@ -74,7 +75,6 @@ class RandomRestartHillClimbing(_BaseGFOadapter): "info:compute": "middle", } - def __init__( self, search_space=None, @@ -140,4 +140,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_random_search.py b/src/hyperactive/opt/gfo/_random_search.py index 7b78bda5..9142195a 100644 --- a/src/hyperactive/opt/gfo/_random_search.py +++ b/src/hyperactive/opt/gfo/_random_search.py @@ -52,7 +52,7 @@ class RandomSearch(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -64,6 +64,7 @@ class RandomSearch(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Random Search", "info:local_vs_global": "global", @@ -71,7 +72,6 @@ class RandomSearch(_BaseGFOadapter): "info:compute": "low", } - def __init__( self, search_space=None, @@ -126,4 +126,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_simulated_annealing.py b/src/hyperactive/opt/gfo/_simulated_annealing.py index d6fb5742..2ae5fad5 100644 --- a/src/hyperactive/opt/gfo/_simulated_annealing.py +++ b/src/hyperactive/opt/gfo/_simulated_annealing.py @@ -64,7 +64,7 @@ class SimulatedAnnealing(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -76,6 +76,7 @@ class SimulatedAnnealing(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Simulated Annealing", "info:local_vs_global": "global", @@ -83,7 +84,6 @@ class SimulatedAnnealing(_BaseGFOadapter): "info:compute": "middle", } - def __init__( self, search_space=None, @@ -152,4 +152,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_spiral_optimization.py b/src/hyperactive/opt/gfo/_spiral_optimization.py index b506da0b..4a430d1c 100644 --- a/src/hyperactive/opt/gfo/_spiral_optimization.py +++ b/src/hyperactive/opt/gfo/_spiral_optimization.py @@ -59,7 +59,7 @@ class SpiralOptimization(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } @@ -71,6 +71,7 @@ class SpiralOptimization(_BaseGFOadapter): Best parameters can also be accessed via: >>> best_params = optimizer.best_params_ """ + _tags = { "info:name": "Spiral Optimization", "info:local_vs_global": "mixed", @@ -78,7 +79,6 @@ class SpiralOptimization(_BaseGFOadapter): "info:compute": "middle", } - def __init__( self, search_space=None, @@ -141,4 +141,4 @@ def get_test_params(cls, parameter_set="default"): "n_iter": 100, } params.append(more_params) - return params \ No newline at end of file + return params diff --git a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py index 0fed9dc9..124b6f9c 100644 --- a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py +++ b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py @@ -64,7 +64,7 @@ class TreeStructuredParzenEstimators(_BaseGFOadapter): >>> config = { ... "search_space": { ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": : np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), ... }, ... "n_iter": 100, ... } From e87da4aeb85649eb114b3bc9b072658f89b29eca Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 21 Jun 2025 16:47:56 +0200 Subject: [PATCH 35/49] fix imports in docstring examples --- src/hyperactive/opt/gfo/_repulsing_hillclimbing.py | 4 ++-- src/hyperactive/opt/gfo/_stochastic_hillclimbing.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py b/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py index 88a23e99..1c943df0 100644 --- a/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py +++ b/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py @@ -61,7 +61,7 @@ class RepulsingHillClimbing(_BaseGFOadapter): ... ) 2. setting up the hill climbing optimizer: - >>> from hyperactive.opt import HillClimbingRepulsing + >>> from hyperactive.opt import RepulsingHillClimbing >>> import numpy as np >>> >>> hc_config = { @@ -71,7 +71,7 @@ class RepulsingHillClimbing(_BaseGFOadapter): ... }, ... "n_iter": 100, ... } - >>> hillclimbing = HillClimbingRepulsing(experiment=sklearn_exp, **hc_config) + >>> hillclimbing = RepulsingHillClimbing(experiment=sklearn_exp, **hc_config) 3. running the hill climbing search: >>> best_params = hillclimbing.run() diff --git a/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py b/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py index eb31c0fc..c1fbfc83 100644 --- a/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py +++ b/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py @@ -61,7 +61,7 @@ class StochasticHillClimbing(_BaseGFOadapter): ... ) 2. setting up the hill climbing optimizer: - >>> from hyperactive.opt import HillClimbingStochastic + >>> from hyperactive.opt import StochasticHillClimbing >>> import numpy as np >>> >>> hc_config = { @@ -71,7 +71,7 @@ class StochasticHillClimbing(_BaseGFOadapter): ... }, ... "n_iter": 100, ... } - >>> hillclimbing = HillClimbingStochastic(experiment=sklearn_exp, **hc_config) + >>> hillclimbing = StochasticHillClimbing(experiment=sklearn_exp, **hc_config) 3. running the hill climbing search: >>> best_params = hillclimbing.run() From 9410d4dc92668858633bd9f93c9baff481e599c4 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sat, 21 Jun 2025 17:00:30 +0200 Subject: [PATCH 36/49] fix examples (numpy.arrays to lists in search-space dict-values) --- src/hyperactive/opt/gfo/_bayesian_optimization.py | 8 ++++---- src/hyperactive/opt/gfo/_differential_evolution.py | 8 ++++---- src/hyperactive/opt/gfo/_direct_algorithm.py | 8 ++++---- src/hyperactive/opt/gfo/_downhill_simplex.py | 8 ++++---- src/hyperactive/opt/gfo/_evolution_strategy.py | 8 ++++---- src/hyperactive/opt/gfo/_forest_optimizer.py | 8 ++++---- src/hyperactive/opt/gfo/_genetic_algorithm.py | 8 ++++---- src/hyperactive/opt/gfo/_grid_search.py | 8 ++++---- src/hyperactive/opt/gfo/_hillclimbing.py | 8 ++++---- src/hyperactive/opt/gfo/_lipschitz_optimization.py | 8 ++++---- src/hyperactive/opt/gfo/_parallel_tempering.py | 8 ++++---- .../opt/gfo/_particle_swarm_optimization.py | 8 ++++---- src/hyperactive/opt/gfo/_pattern_search.py | 8 ++++---- src/hyperactive/opt/gfo/_powells_method.py | 8 ++++---- .../opt/gfo/_random_restart_hill_climbing.py | 8 ++++---- src/hyperactive/opt/gfo/_random_search.py | 8 ++++---- src/hyperactive/opt/gfo/_repulsing_hillclimbing.py | 12 ++++++------ src/hyperactive/opt/gfo/_simulated_annealing.py | 8 ++++---- src/hyperactive/opt/gfo/_spiral_optimization.py | 8 ++++---- src/hyperactive/opt/gfo/_stochastic_hillclimbing.py | 12 ++++++------ .../opt/gfo/_tree_structured_parzen_estimators.py | 8 ++++---- 21 files changed, 88 insertions(+), 88 deletions(-) diff --git a/src/hyperactive/opt/gfo/_bayesian_optimization.py b/src/hyperactive/opt/gfo/_bayesian_optimization.py index 9a87aadb..0d4f6b13 100644 --- a/src/hyperactive/opt/gfo/_bayesian_optimization.py +++ b/src/hyperactive/opt/gfo/_bayesian_optimization.py @@ -64,8 +64,8 @@ class BayesianOptimizer(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -147,8 +147,8 @@ def get_test_params(cls, parameter_set="default"): "experiment": experiment, "xi": 0.33, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_differential_evolution.py b/src/hyperactive/opt/gfo/_differential_evolution.py index 55f9e6dc..0a3d8d9b 100644 --- a/src/hyperactive/opt/gfo/_differential_evolution.py +++ b/src/hyperactive/opt/gfo/_differential_evolution.py @@ -58,8 +58,8 @@ class DifferentialEvolution(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -138,8 +138,8 @@ def get_test_params(cls, parameter_set="default"): "mutation_rate": 0.8, "crossover_rate": 2, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_direct_algorithm.py b/src/hyperactive/opt/gfo/_direct_algorithm.py index 33453aef..9abf95de 100644 --- a/src/hyperactive/opt/gfo/_direct_algorithm.py +++ b/src/hyperactive/opt/gfo/_direct_algorithm.py @@ -60,8 +60,8 @@ class DirectAlgorithm(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -141,8 +141,8 @@ def get_test_params(cls, parameter_set="default"): "replacement": True, "max_sample_size": 1000, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_downhill_simplex.py b/src/hyperactive/opt/gfo/_downhill_simplex.py index 43dfcbe3..145a38ef 100644 --- a/src/hyperactive/opt/gfo/_downhill_simplex.py +++ b/src/hyperactive/opt/gfo/_downhill_simplex.py @@ -60,8 +60,8 @@ class DownhillSimplexOptimizer(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -143,8 +143,8 @@ def get_test_params(cls, parameter_set="default"): "gamma": 0.33, "sigma": 0.33, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_evolution_strategy.py b/src/hyperactive/opt/gfo/_evolution_strategy.py index 5f466c39..2df50c8c 100644 --- a/src/hyperactive/opt/gfo/_evolution_strategy.py +++ b/src/hyperactive/opt/gfo/_evolution_strategy.py @@ -64,8 +64,8 @@ class EvolutionStrategy(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -150,8 +150,8 @@ def get_test_params(cls, parameter_set="default"): "mutation_rate": 1, "crossover_rate": 2, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_forest_optimizer.py b/src/hyperactive/opt/gfo/_forest_optimizer.py index de3ce666..c6928cbd 100644 --- a/src/hyperactive/opt/gfo/_forest_optimizer.py +++ b/src/hyperactive/opt/gfo/_forest_optimizer.py @@ -66,8 +66,8 @@ class ForestOptimizer(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -154,8 +154,8 @@ def get_test_params(cls, parameter_set="default"): "tree_para": {"n_estimators": 50}, "xi": 0.33, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_genetic_algorithm.py b/src/hyperactive/opt/gfo/_genetic_algorithm.py index 78809a14..aaf281dd 100644 --- a/src/hyperactive/opt/gfo/_genetic_algorithm.py +++ b/src/hyperactive/opt/gfo/_genetic_algorithm.py @@ -64,8 +64,8 @@ class GeneticAlgorithm(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -153,8 +153,8 @@ def get_test_params(cls, parameter_set="default"): "mutation_rate": 0.01, "crossover_rate": 0.02, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_grid_search.py b/src/hyperactive/opt/gfo/_grid_search.py index 5ee2bd5e..191c869c 100644 --- a/src/hyperactive/opt/gfo/_grid_search.py +++ b/src/hyperactive/opt/gfo/_grid_search.py @@ -56,8 +56,8 @@ class GridSearch(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -133,8 +133,8 @@ def get_test_params(cls, parameter_set="default"): "step_size": 3, "direction": "orthogonal", "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_hillclimbing.py b/src/hyperactive/opt/gfo/_hillclimbing.py index 2db8a931..9537793e 100644 --- a/src/hyperactive/opt/gfo/_hillclimbing.py +++ b/src/hyperactive/opt/gfo/_hillclimbing.py @@ -62,14 +62,14 @@ class HillClimbing(_BaseGFOadapter): >>> from hyperactive.opt import HillClimbing >>> import numpy as np >>> - >>> hillclimbing_config = { + >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } - >>> hillclimbing = HillClimbing(experiment=sklearn_exp, **hillclimbing_config) + >>> hillclimbing = HillClimbing(experiment=sklearn_exp, **config) 3. running the hill climbing search: >>> best_params = hillclimbing.run() diff --git a/src/hyperactive/opt/gfo/_lipschitz_optimization.py b/src/hyperactive/opt/gfo/_lipschitz_optimization.py index c5e2d351..83f162ce 100644 --- a/src/hyperactive/opt/gfo/_lipschitz_optimization.py +++ b/src/hyperactive/opt/gfo/_lipschitz_optimization.py @@ -60,8 +60,8 @@ class LipschitzOptimizer(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -141,8 +141,8 @@ def get_test_params(cls, parameter_set="default"): "max_sample_size": 1000, "replacement": False, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_parallel_tempering.py b/src/hyperactive/opt/gfo/_parallel_tempering.py index 192ecd75..9b8208ae 100644 --- a/src/hyperactive/opt/gfo/_parallel_tempering.py +++ b/src/hyperactive/opt/gfo/_parallel_tempering.py @@ -59,8 +59,8 @@ class ParallelTempering(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -136,8 +136,8 @@ def get_test_params(cls, parameter_set="default"): "population": 10, "n_iter_swap": 3, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py index 1dd8d290..a24ac2f0 100644 --- a/src/hyperactive/opt/gfo/_particle_swarm_optimization.py +++ b/src/hyperactive/opt/gfo/_particle_swarm_optimization.py @@ -62,8 +62,8 @@ class ParticleSwarmOptimizer(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -148,8 +148,8 @@ def get_test_params(cls, parameter_set="default"): "social_weight": 0.9, "temp_weight": 0.9, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_pattern_search.py b/src/hyperactive/opt/gfo/_pattern_search.py index a77f0075..ac8d6d27 100644 --- a/src/hyperactive/opt/gfo/_pattern_search.py +++ b/src/hyperactive/opt/gfo/_pattern_search.py @@ -58,8 +58,8 @@ class PatternSearch(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -138,8 +138,8 @@ def get_test_params(cls, parameter_set="default"): "pattern_size": 0.5, "reduction": 0.999, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_powells_method.py b/src/hyperactive/opt/gfo/_powells_method.py index e0e7c826..00db4a54 100644 --- a/src/hyperactive/opt/gfo/_powells_method.py +++ b/src/hyperactive/opt/gfo/_powells_method.py @@ -59,8 +59,8 @@ class PowellsMethod(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -133,8 +133,8 @@ def get_test_params(cls, parameter_set="default"): "experiment": experiment, "iters_p_dim": 3, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py index df054a2e..8de5bafe 100644 --- a/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py +++ b/src/hyperactive/opt/gfo/_random_restart_hill_climbing.py @@ -54,8 +54,8 @@ class RandomRestartHillClimbing(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -134,8 +134,8 @@ def get_test_params(cls, parameter_set="default"): "experiment": experiment, "n_iter_restart": 2, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_random_search.py b/src/hyperactive/opt/gfo/_random_search.py index 9142195a..06a25887 100644 --- a/src/hyperactive/opt/gfo/_random_search.py +++ b/src/hyperactive/opt/gfo/_random_search.py @@ -51,8 +51,8 @@ class RandomSearch(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -120,8 +120,8 @@ def get_test_params(cls, parameter_set="default"): more_params = { "experiment": experiment, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py b/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py index 1c943df0..e76ba5d7 100644 --- a/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py +++ b/src/hyperactive/opt/gfo/_repulsing_hillclimbing.py @@ -64,14 +64,14 @@ class RepulsingHillClimbing(_BaseGFOadapter): >>> from hyperactive.opt import RepulsingHillClimbing >>> import numpy as np >>> - >>> hc_config = { + >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } - >>> hillclimbing = RepulsingHillClimbing(experiment=sklearn_exp, **hc_config) + >>> hillclimbing = RepulsingHillClimbing(experiment=sklearn_exp, **config) 3. running the hill climbing search: >>> best_params = hillclimbing.run() @@ -146,8 +146,8 @@ def get_test_params(cls, parameter_set="default"): "experiment": experiment, "repulsion_factor": 7, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_simulated_annealing.py b/src/hyperactive/opt/gfo/_simulated_annealing.py index 2ae5fad5..6a169238 100644 --- a/src/hyperactive/opt/gfo/_simulated_annealing.py +++ b/src/hyperactive/opt/gfo/_simulated_annealing.py @@ -63,8 +63,8 @@ class SimulatedAnnealing(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -146,8 +146,8 @@ def get_test_params(cls, parameter_set="default"): "start_temp": 0.33, "annealing_rate": 1.01, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_spiral_optimization.py b/src/hyperactive/opt/gfo/_spiral_optimization.py index 4a430d1c..e7fc457c 100644 --- a/src/hyperactive/opt/gfo/_spiral_optimization.py +++ b/src/hyperactive/opt/gfo/_spiral_optimization.py @@ -58,8 +58,8 @@ class SpiralOptimization(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -135,8 +135,8 @@ def get_test_params(cls, parameter_set="default"): "population": 20, "decay_rate": 0.9999, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py b/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py index c1fbfc83..7b8f7e40 100644 --- a/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py +++ b/src/hyperactive/opt/gfo/_stochastic_hillclimbing.py @@ -64,14 +64,14 @@ class StochasticHillClimbing(_BaseGFOadapter): >>> from hyperactive.opt import StochasticHillClimbing >>> import numpy as np >>> - >>> hc_config = { + >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } - >>> hillclimbing = StochasticHillClimbing(experiment=sklearn_exp, **hc_config) + >>> hillclimbing = StochasticHillClimbing(experiment=sklearn_exp, **config) 3. running the hill climbing search: >>> best_params = hillclimbing.run() @@ -146,8 +146,8 @@ def get_test_params(cls, parameter_set="default"): "experiment": experiment, "p_accept": 0.33, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } diff --git a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py index 124b6f9c..7bb87b72 100644 --- a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py +++ b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py @@ -63,8 +63,8 @@ class TreeStructuredParzenEstimators(_BaseGFOadapter): >>> >>> config = { ... "search_space": { - ... "C": np.array([0.01, 0.1, 1, 10]), - ... "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + ... "C": [0.01, 0.1, 1, 10], + ... "gamma": [0.0001, 0.01, 0.1, 1, 10], ... }, ... "n_iter": 100, ... } @@ -147,8 +147,8 @@ def get_test_params(cls, parameter_set="default"): "replacement": False, "gamma_tpe": 0.01, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } From 4bab535b016739929ece263bf2096eb613e17eaf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20Kir=C3=A1ly?= Date: Sat, 21 Jun 2025 18:40:06 +0200 Subject: [PATCH 37/49] move from base --- src/hyperactive/base/_optimizer.py | 7 ---- src/hyperactive/opt/_adapters/_gfo.py | 55 +++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 7 deletions(-) diff --git a/src/hyperactive/base/_optimizer.py b/src/hyperactive/base/_optimizer.py index aa93aea6..8a3cca54 100644 --- a/src/hyperactive/base/_optimizer.py +++ b/src/hyperactive/base/_optimizer.py @@ -39,13 +39,6 @@ def get_search_config(self): """ search_config = self.get_params(deep=False) search_config.pop("experiment", None) - - if "sampling" in search_config and search_config["sampling"] is None: - search_config["sampling"] = {"random": 1000000} - - if "tree_para" in search_config and search_config["tree_para"] is None: - search_config["tree_para"] = {"n_estimators": 100} - return search_config def get_experiment(self): diff --git a/src/hyperactive/opt/_adapters/_gfo.py b/src/hyperactive/opt/_adapters/_gfo.py index 228b43c7..08fd97ea 100644 --- a/src/hyperactive/opt/_adapters/_gfo.py +++ b/src/hyperactive/opt/_adapters/_gfo.py @@ -54,8 +54,63 @@ def get_search_config(self): search_config = super().get_search_config() search_config["initialize"] = self._initialize del search_config["verbose"] + + search_config = self._handle_gfo_defaults(search_config) + + search_config["search_space"] = self._to_dict_np(search_config["search_space"]) + + return search_config + + def _handle_gfo_defaults(self, search_config): + """Handle default values for GFO search configuration. + + Temporary measure until GFO handles defaults gracefully. + + Parameters + ---------- + search_config : dict with str keys + The search configuration dictionary to handle defaults for. + + Returns + ------- + search_config : dict with str keys + The search configuration dictionary with defaults handled. + """ + if "sampling" in search_config and search_config["sampling"] is None: + search_config["sampling"] = {"random": 1000000} + + if "tree_para" in search_config and search_config["tree_para"] is None: + search_config["tree_para"] = {"n_estimators": 100} + return search_config + def _to_dict_np(self, search_space): + """Coerce the search space to a format suitable for gfo optimizers. + + gfo expects dicts of numpy arrays, not lists. + This method coerces lists or tuples in the search space to numpy arrays. + + Parameters + ---------- + search_space : dict with str keys and iterable values + The search space to coerce. + + Returns + ------- + dict with str keys and 1D numpy arrays as values + The coerced search space. + """ + import numpy as np + + def coerce_to_numpy(arr): + """Coerce a list or tuple to a numpy array.""" + if not isinstance(arr, np.ndarray): + return np.array(arr) + return arr + + coerced_search_space = {k: coerce_to_numpy(v) for k, v in search_space.items()} + return coerced_search_space + def _run(self, experiment, **search_config): """Run the optimization search process. Parameters From 0e9d8ab36bbe3261ead0b5531b6f9d8e9d31f2d1 Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sun, 22 Jun 2025 17:10:03 +0200 Subject: [PATCH 38/49] quick fix for search-space conversion --- src/hyperactive/opt/_adapters/_gfo.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/hyperactive/opt/_adapters/_gfo.py b/src/hyperactive/opt/_adapters/_gfo.py index 08fd97ea..d1c193a6 100644 --- a/src/hyperactive/opt/_adapters/_gfo.py +++ b/src/hyperactive/opt/_adapters/_gfo.py @@ -2,6 +2,7 @@ # copyright: hyperactive developers, MIT License (see LICENSE file) +import numpy as np from hyperactive.base import BaseOptimizer from skbase.utils.stdout_mute import StdoutMute @@ -128,6 +129,13 @@ def _run(self, experiment, **search_config): n_iter = search_config.pop("n_iter", 100) max_time = search_config.pop("max_time", None) + # convert hyper search-space into gfo search-space + search_space_hyper = search_config["search_space"] + search_space_gfo = {} + for key in search_space_hyper.keys(): + search_space_gfo[key] = np.array(range(len(search_space_hyper[key]))) + search_config["search_space"] = search_space_gfo + gfo_cls = self._get_gfo_class() hcopt = gfo_cls(**search_config) @@ -180,8 +188,8 @@ def get_test_params(cls, parameter_set="default"): params_sklearn = { "experiment": sklearn_exp, "search_space": { - "C": np.array([0.01, 0.1, 1, 10]), - "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), + "C": [0.01, 0.1, 1, 10], + "gamma": [0.0001, 0.01, 0.1, 1, 10], }, "n_iter": 100, } From 1a4f76d1535f442c91bdf622b100ce58c86b3bcd Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sun, 22 Jun 2025 17:55:07 +0200 Subject: [PATCH 39/49] create separate module for gfo-adapter --- .../opt/_adapters/_gfo/__init__.py | 5 +++ .../opt/_adapters/{ => _gfo}/_gfo.py | 38 ++++++++++++------- .../opt/_adapters/_gfo/_objective_function.py | 37 ++++++++++++++++++ .../opt/_adapters/_gfo/dictionary.py | 17 +++++++++ 4 files changed, 84 insertions(+), 13 deletions(-) create mode 100644 src/hyperactive/opt/_adapters/_gfo/__init__.py rename src/hyperactive/opt/_adapters/{ => _gfo}/_gfo.py (90%) create mode 100644 src/hyperactive/opt/_adapters/_gfo/_objective_function.py create mode 100644 src/hyperactive/opt/_adapters/_gfo/dictionary.py diff --git a/src/hyperactive/opt/_adapters/_gfo/__init__.py b/src/hyperactive/opt/_adapters/_gfo/__init__.py new file mode 100644 index 00000000..ed64ab25 --- /dev/null +++ b/src/hyperactive/opt/_adapters/_gfo/__init__.py @@ -0,0 +1,5 @@ +"""Adapters for individual packages.""" + +# copyright: hyperactive developers, MIT License (see LICENSE file) + +from ._gfo import _BaseGFOadapter diff --git a/src/hyperactive/opt/_adapters/_gfo.py b/src/hyperactive/opt/_adapters/_gfo/_gfo.py similarity index 90% rename from src/hyperactive/opt/_adapters/_gfo.py rename to src/hyperactive/opt/_adapters/_gfo/_gfo.py index d1c193a6..d42704b5 100644 --- a/src/hyperactive/opt/_adapters/_gfo.py +++ b/src/hyperactive/opt/_adapters/_gfo/_gfo.py @@ -6,6 +6,8 @@ from hyperactive.base import BaseOptimizer from skbase.utils.stdout_mute import StdoutMute +from ._objective_function import ObjectiveFunction + __all__ = ["_BaseGFOadapter"] @@ -58,7 +60,10 @@ def get_search_config(self): search_config = self._handle_gfo_defaults(search_config) - search_config["search_space"] = self._to_dict_np(search_config["search_space"]) + self.search_space_hyper = search_config["search_space"] + search_config["search_space"] = self._conv_search_space( + search_config["search_space"] + ) return search_config @@ -85,6 +90,18 @@ def _handle_gfo_defaults(self, search_config): return search_config + @staticmethod + def _conv_search_space(search_space): + # convert hyper search-space into gfo search-space + search_space_gfo = {} + for key in search_space.keys(): + search_space_gfo[key] = np.array(range(len(search_space[key]))) + return search_space_gfo + + @staticmethod + def _conv_objective_function(objective_function, search_space): + return ObjectiveFunction(objective_function).convert(search_space) + def _to_dict_np(self, search_space): """Coerce the search space to a format suitable for gfo optimizers. @@ -108,7 +125,7 @@ def coerce_to_numpy(arr): if not isinstance(arr, np.ndarray): return np.array(arr) return arr - + coerced_search_space = {k: coerce_to_numpy(v) for k, v in search_space.items()} return coerced_search_space @@ -129,23 +146,18 @@ def _run(self, experiment, **search_config): n_iter = search_config.pop("n_iter", 100) max_time = search_config.pop("max_time", None) - # convert hyper search-space into gfo search-space - search_space_hyper = search_config["search_space"] - search_space_gfo = {} - for key in search_space_hyper.keys(): - search_space_gfo[key] = np.array(range(len(search_space_hyper[key]))) - search_config["search_space"] = search_space_gfo - gfo_cls = self._get_gfo_class() - hcopt = gfo_cls(**search_config) + opt = gfo_cls(**search_config) + + score = self._conv_objective_function(experiment, self.search_space_hyper) with StdoutMute(active=not self.verbose): - hcopt.search( - objective_function=experiment.score, + opt.search( + objective_function=score, n_iter=n_iter, max_time=max_time, ) - best_params = hcopt.best_para + best_params = opt.best_para return best_params @classmethod diff --git a/src/hyperactive/opt/_adapters/_gfo/_objective_function.py b/src/hyperactive/opt/_adapters/_gfo/_objective_function.py new file mode 100644 index 00000000..565f9127 --- /dev/null +++ b/src/hyperactive/opt/_adapters/_gfo/_objective_function.py @@ -0,0 +1,37 @@ +# Author: Simon Blanke +# Email: simon.blanke@yahoo.com +# License: MIT License + + +from .dictionary import DictClass + + +def gfo2hyper(search_space, para): + values_dict = {} + for _, key in enumerate(search_space.keys()): + pos_ = int(para[key]) + values_dict[key] = search_space[key][pos_] + + return values_dict + + +class ObjectiveFunction(DictClass): + def __init__(self, objective_function): + super().__init__() + + self.objective_function = objective_function + + def run_callbacks(self, type_): + if self.callbacks and type_ in self.callbacks: + [callback(self) for callback in self.callbacks[type_]] + + def convert(self, search_space): + # wrapper for GFOs + def _model(para): + para = gfo2hyper(search_space, para) + self.para_dict = para + + return self.objective_function(self) + + _model.__name__ = self.objective_function.__name__ + return _model diff --git a/src/hyperactive/opt/_adapters/_gfo/dictionary.py b/src/hyperactive/opt/_adapters/_gfo/dictionary.py new file mode 100644 index 00000000..ca30e652 --- /dev/null +++ b/src/hyperactive/opt/_adapters/_gfo/dictionary.py @@ -0,0 +1,17 @@ +# Author: Simon Blanke +# Email: simon.blanke@yahoo.com +# License: MIT License + + +class DictClass: + def __init__(self): + self.para_dict = {} + + def __getitem__(self, key): + return self.para_dict[key] + + def keys(self): + return self.para_dict.keys() + + def values(self): + return self.para_dict.values() From e551eb36edfdee40c7569edaf32d130c1c48d70d Mon Sep 17 00:00:00 2001 From: Simon Blanke Date: Sun, 22 Jun 2025 17:55:51 +0200 Subject: [PATCH 40/49] score function takes just one argument --- src/hyperactive/base/_experiment.py | 3 ++- src/hyperactive/experiment/integrations/sklearn_cv.py | 4 ++-- src/hyperactive/experiment/toy/_ackley.py | 3 +-- src/hyperactive/experiment/toy/_parabola.py | 3 +-- src/hyperactive/experiment/toy/_sphere.py | 9 ++------- 5 files changed, 8 insertions(+), 14 deletions(-) diff --git a/src/hyperactive/base/_experiment.py b/src/hyperactive/base/_experiment.py index 22036e63..415989fd 100644 --- a/src/hyperactive/base/_experiment.py +++ b/src/hyperactive/base/_experiment.py @@ -1,4 +1,5 @@ """Base class for experiment.""" + # copyright: hyperactive developers, MIT License (see LICENSE file) import numpy as np @@ -19,7 +20,7 @@ class BaseExperiment(BaseObject): def __init__(self): super().__init__() - def __call__(self, **kwargs): + def __call__(self, kwargs): """Score parameters, with kwargs call.""" score, _ = self.score(kwargs) return score diff --git a/src/hyperactive/experiment/integrations/sklearn_cv.py b/src/hyperactive/experiment/integrations/sklearn_cv.py index bf60db00..f4dc49d5 100644 --- a/src/hyperactive/experiment/integrations/sklearn_cv.py +++ b/src/hyperactive/experiment/integrations/sklearn_cv.py @@ -1,4 +1,5 @@ """Experiment adapter for sklearn cross-validation experiments.""" + # copyright: hyperactive developers, MIT License (see LICENSE file) from sklearn import clone @@ -8,6 +9,7 @@ from hyperactive.base import BaseExperiment + class SklearnCvExperiment(BaseExperiment): """Experiment adapter for sklearn cross-validation experiments. @@ -71,8 +73,6 @@ class SklearnCvExperiment(BaseExperiment): >>> params = {"C": 1.0, "kernel": "linear"} >>> score, add_info = sklearn_exp.score(params) - Quick call without metadata return or dictionary: - >>> score = sklearn_exp(C=1.0, kernel="linear") """ def __init__(self, estimator, X, y, scoring=None, cv=None): diff --git a/src/hyperactive/experiment/toy/_ackley.py b/src/hyperactive/experiment/toy/_ackley.py index 40d14e15..1f21c428 100644 --- a/src/hyperactive/experiment/toy/_ackley.py +++ b/src/hyperactive/experiment/toy/_ackley.py @@ -1,4 +1,5 @@ """Ackley function, common benchmark for optimization algorithms.""" + # copyright: hyperactive developers, MIT License (see LICENSE file) import numpy as np @@ -41,8 +42,6 @@ class Ackley(BaseExperiment): >>> params = {"x0": 1, "x1": 2} >>> score, add_info = ackley.score(params) - Quick call without metadata return or dictionary: - >>> score = ackley(x0=1, x1=2) """ # noqa: E501 _tags = { diff --git a/src/hyperactive/experiment/toy/_parabola.py b/src/hyperactive/experiment/toy/_parabola.py index 40893524..055e8dea 100644 --- a/src/hyperactive/experiment/toy/_parabola.py +++ b/src/hyperactive/experiment/toy/_parabola.py @@ -1,4 +1,5 @@ """2D parabola function, common benchmark for optimization algorithms.""" + # copyright: hyperactive developers, MIT License (see LICENSE file) from hyperactive.base import BaseExperiment @@ -35,8 +36,6 @@ class Parabola(BaseExperiment): >>> params = {"x": 1, "y": 2} >>> score, add_info = parabola.score(params) - Quick call without metadata return or dictionary: - >>> score = parabola(x=1, y=2) """ _tags = { diff --git a/src/hyperactive/experiment/toy/_sphere.py b/src/hyperactive/experiment/toy/_sphere.py index afb1493a..abdb80d8 100644 --- a/src/hyperactive/experiment/toy/_sphere.py +++ b/src/hyperactive/experiment/toy/_sphere.py @@ -1,4 +1,5 @@ """Sphere function, a common benchmark for optimization algorithms.""" + # copyright: hyperactive developers, MIT License (see LICENSE file) import numpy as np @@ -38,12 +39,6 @@ class Sphere(BaseExperiment): >>> params = {"x0": 1, "x1": 2, "x2": 3} >>> score, add_info = sphere.score(params) - Quick call without metadata return or dictionary: - >>> score = sphere(x0=1, x1=2, x2=3) - - Different number of dimensions changes the parameter names: - >>> sphere4D = Sphere(const=0, n_dim=4) - >>> score4D = sphere4D(x0=1, x1=2, x2=3, x3=4) """ _tags = { @@ -63,7 +58,7 @@ def _paramnames(self): def _score(self, params): params_vec = np.array([params[f"x{i}"] for i in range(self.n_dim)]) - return np.sum(params_vec ** 2) + self.const, {} + return np.sum(params_vec**2) + self.const, {} @classmethod def get_test_params(cls, parameter_set="default"): From d44b94a9f7c0c0a26ffe43b648f4a66696ced996 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20Kir=C3=A1ly?= Date: Sun, 22 Jun 2025 19:06:01 +0200 Subject: [PATCH 41/49] Revert "score function takes just one argument" This reverts commit e551eb36edfdee40c7569edaf32d130c1c48d70d. --- src/hyperactive/base/_experiment.py | 3 +-- src/hyperactive/experiment/integrations/sklearn_cv.py | 4 ++-- src/hyperactive/experiment/toy/_ackley.py | 3 ++- src/hyperactive/experiment/toy/_parabola.py | 3 ++- src/hyperactive/experiment/toy/_sphere.py | 9 +++++++-- 5 files changed, 14 insertions(+), 8 deletions(-) diff --git a/src/hyperactive/base/_experiment.py b/src/hyperactive/base/_experiment.py index 415989fd..22036e63 100644 --- a/src/hyperactive/base/_experiment.py +++ b/src/hyperactive/base/_experiment.py @@ -1,5 +1,4 @@ """Base class for experiment.""" - # copyright: hyperactive developers, MIT License (see LICENSE file) import numpy as np @@ -20,7 +19,7 @@ class BaseExperiment(BaseObject): def __init__(self): super().__init__() - def __call__(self, kwargs): + def __call__(self, **kwargs): """Score parameters, with kwargs call.""" score, _ = self.score(kwargs) return score diff --git a/src/hyperactive/experiment/integrations/sklearn_cv.py b/src/hyperactive/experiment/integrations/sklearn_cv.py index f4dc49d5..bf60db00 100644 --- a/src/hyperactive/experiment/integrations/sklearn_cv.py +++ b/src/hyperactive/experiment/integrations/sklearn_cv.py @@ -1,5 +1,4 @@ """Experiment adapter for sklearn cross-validation experiments.""" - # copyright: hyperactive developers, MIT License (see LICENSE file) from sklearn import clone @@ -9,7 +8,6 @@ from hyperactive.base import BaseExperiment - class SklearnCvExperiment(BaseExperiment): """Experiment adapter for sklearn cross-validation experiments. @@ -73,6 +71,8 @@ class SklearnCvExperiment(BaseExperiment): >>> params = {"C": 1.0, "kernel": "linear"} >>> score, add_info = sklearn_exp.score(params) + Quick call without metadata return or dictionary: + >>> score = sklearn_exp(C=1.0, kernel="linear") """ def __init__(self, estimator, X, y, scoring=None, cv=None): diff --git a/src/hyperactive/experiment/toy/_ackley.py b/src/hyperactive/experiment/toy/_ackley.py index 1f21c428..40d14e15 100644 --- a/src/hyperactive/experiment/toy/_ackley.py +++ b/src/hyperactive/experiment/toy/_ackley.py @@ -1,5 +1,4 @@ """Ackley function, common benchmark for optimization algorithms.""" - # copyright: hyperactive developers, MIT License (see LICENSE file) import numpy as np @@ -42,6 +41,8 @@ class Ackley(BaseExperiment): >>> params = {"x0": 1, "x1": 2} >>> score, add_info = ackley.score(params) + Quick call without metadata return or dictionary: + >>> score = ackley(x0=1, x1=2) """ # noqa: E501 _tags = { diff --git a/src/hyperactive/experiment/toy/_parabola.py b/src/hyperactive/experiment/toy/_parabola.py index 055e8dea..40893524 100644 --- a/src/hyperactive/experiment/toy/_parabola.py +++ b/src/hyperactive/experiment/toy/_parabola.py @@ -1,5 +1,4 @@ """2D parabola function, common benchmark for optimization algorithms.""" - # copyright: hyperactive developers, MIT License (see LICENSE file) from hyperactive.base import BaseExperiment @@ -36,6 +35,8 @@ class Parabola(BaseExperiment): >>> params = {"x": 1, "y": 2} >>> score, add_info = parabola.score(params) + Quick call without metadata return or dictionary: + >>> score = parabola(x=1, y=2) """ _tags = { diff --git a/src/hyperactive/experiment/toy/_sphere.py b/src/hyperactive/experiment/toy/_sphere.py index abdb80d8..afb1493a 100644 --- a/src/hyperactive/experiment/toy/_sphere.py +++ b/src/hyperactive/experiment/toy/_sphere.py @@ -1,5 +1,4 @@ """Sphere function, a common benchmark for optimization algorithms.""" - # copyright: hyperactive developers, MIT License (see LICENSE file) import numpy as np @@ -39,6 +38,12 @@ class Sphere(BaseExperiment): >>> params = {"x0": 1, "x1": 2, "x2": 3} >>> score, add_info = sphere.score(params) + Quick call without metadata return or dictionary: + >>> score = sphere(x0=1, x1=2, x2=3) + + Different number of dimensions changes the parameter names: + >>> sphere4D = Sphere(const=0, n_dim=4) + >>> score4D = sphere4D(x0=1, x1=2, x2=3, x3=4) """ _tags = { @@ -58,7 +63,7 @@ def _paramnames(self): def _score(self, params): params_vec = np.array([params[f"x{i}"] for i in range(self.n_dim)]) - return np.sum(params_vec**2) + self.const, {} + return np.sum(params_vec ** 2) + self.const, {} @classmethod def get_test_params(cls, parameter_set="default"): From f58a5f008e8c424d933a2d664172987218d7864d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20Kir=C3=A1ly?= Date: Sun, 22 Jun 2025 19:38:46 +0200 Subject: [PATCH 42/49] Revert "create separate module for gfo-adapter" This reverts commit 1a4f76d1535f442c91bdf622b100ce58c86b3bcd. --- .../opt/_adapters/{_gfo => }/_gfo.py | 38 +++++++------------ .../opt/_adapters/_gfo/__init__.py | 5 --- .../opt/_adapters/_gfo/_objective_function.py | 37 ------------------ .../opt/_adapters/_gfo/dictionary.py | 17 --------- 4 files changed, 13 insertions(+), 84 deletions(-) rename src/hyperactive/opt/_adapters/{_gfo => }/_gfo.py (90%) delete mode 100644 src/hyperactive/opt/_adapters/_gfo/__init__.py delete mode 100644 src/hyperactive/opt/_adapters/_gfo/_objective_function.py delete mode 100644 src/hyperactive/opt/_adapters/_gfo/dictionary.py diff --git a/src/hyperactive/opt/_adapters/_gfo/_gfo.py b/src/hyperactive/opt/_adapters/_gfo.py similarity index 90% rename from src/hyperactive/opt/_adapters/_gfo/_gfo.py rename to src/hyperactive/opt/_adapters/_gfo.py index d42704b5..d1c193a6 100644 --- a/src/hyperactive/opt/_adapters/_gfo/_gfo.py +++ b/src/hyperactive/opt/_adapters/_gfo.py @@ -6,8 +6,6 @@ from hyperactive.base import BaseOptimizer from skbase.utils.stdout_mute import StdoutMute -from ._objective_function import ObjectiveFunction - __all__ = ["_BaseGFOadapter"] @@ -60,10 +58,7 @@ def get_search_config(self): search_config = self._handle_gfo_defaults(search_config) - self.search_space_hyper = search_config["search_space"] - search_config["search_space"] = self._conv_search_space( - search_config["search_space"] - ) + search_config["search_space"] = self._to_dict_np(search_config["search_space"]) return search_config @@ -90,18 +85,6 @@ def _handle_gfo_defaults(self, search_config): return search_config - @staticmethod - def _conv_search_space(search_space): - # convert hyper search-space into gfo search-space - search_space_gfo = {} - for key in search_space.keys(): - search_space_gfo[key] = np.array(range(len(search_space[key]))) - return search_space_gfo - - @staticmethod - def _conv_objective_function(objective_function, search_space): - return ObjectiveFunction(objective_function).convert(search_space) - def _to_dict_np(self, search_space): """Coerce the search space to a format suitable for gfo optimizers. @@ -125,7 +108,7 @@ def coerce_to_numpy(arr): if not isinstance(arr, np.ndarray): return np.array(arr) return arr - + coerced_search_space = {k: coerce_to_numpy(v) for k, v in search_space.items()} return coerced_search_space @@ -146,18 +129,23 @@ def _run(self, experiment, **search_config): n_iter = search_config.pop("n_iter", 100) max_time = search_config.pop("max_time", None) - gfo_cls = self._get_gfo_class() - opt = gfo_cls(**search_config) + # convert hyper search-space into gfo search-space + search_space_hyper = search_config["search_space"] + search_space_gfo = {} + for key in search_space_hyper.keys(): + search_space_gfo[key] = np.array(range(len(search_space_hyper[key]))) + search_config["search_space"] = search_space_gfo - score = self._conv_objective_function(experiment, self.search_space_hyper) + gfo_cls = self._get_gfo_class() + hcopt = gfo_cls(**search_config) with StdoutMute(active=not self.verbose): - opt.search( - objective_function=score, + hcopt.search( + objective_function=experiment.score, n_iter=n_iter, max_time=max_time, ) - best_params = opt.best_para + best_params = hcopt.best_para return best_params @classmethod diff --git a/src/hyperactive/opt/_adapters/_gfo/__init__.py b/src/hyperactive/opt/_adapters/_gfo/__init__.py deleted file mode 100644 index ed64ab25..00000000 --- a/src/hyperactive/opt/_adapters/_gfo/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Adapters for individual packages.""" - -# copyright: hyperactive developers, MIT License (see LICENSE file) - -from ._gfo import _BaseGFOadapter diff --git a/src/hyperactive/opt/_adapters/_gfo/_objective_function.py b/src/hyperactive/opt/_adapters/_gfo/_objective_function.py deleted file mode 100644 index 565f9127..00000000 --- a/src/hyperactive/opt/_adapters/_gfo/_objective_function.py +++ /dev/null @@ -1,37 +0,0 @@ -# Author: Simon Blanke -# Email: simon.blanke@yahoo.com -# License: MIT License - - -from .dictionary import DictClass - - -def gfo2hyper(search_space, para): - values_dict = {} - for _, key in enumerate(search_space.keys()): - pos_ = int(para[key]) - values_dict[key] = search_space[key][pos_] - - return values_dict - - -class ObjectiveFunction(DictClass): - def __init__(self, objective_function): - super().__init__() - - self.objective_function = objective_function - - def run_callbacks(self, type_): - if self.callbacks and type_ in self.callbacks: - [callback(self) for callback in self.callbacks[type_]] - - def convert(self, search_space): - # wrapper for GFOs - def _model(para): - para = gfo2hyper(search_space, para) - self.para_dict = para - - return self.objective_function(self) - - _model.__name__ = self.objective_function.__name__ - return _model diff --git a/src/hyperactive/opt/_adapters/_gfo/dictionary.py b/src/hyperactive/opt/_adapters/_gfo/dictionary.py deleted file mode 100644 index ca30e652..00000000 --- a/src/hyperactive/opt/_adapters/_gfo/dictionary.py +++ /dev/null @@ -1,17 +0,0 @@ -# Author: Simon Blanke -# Email: simon.blanke@yahoo.com -# License: MIT License - - -class DictClass: - def __init__(self): - self.para_dict = {} - - def __getitem__(self, key): - return self.para_dict[key] - - def keys(self): - return self.para_dict.keys() - - def values(self): - return self.para_dict.values() From d45d284907973958dd5575c086277ab00a1be5d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20Kir=C3=A1ly?= Date: Sun, 22 Jun 2025 19:40:11 +0200 Subject: [PATCH 43/49] Revert "quick fix for search-space conversion" This reverts commit 0e9d8ab36bbe3261ead0b5531b6f9d8e9d31f2d1. --- src/hyperactive/opt/_adapters/_gfo.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/src/hyperactive/opt/_adapters/_gfo.py b/src/hyperactive/opt/_adapters/_gfo.py index d1c193a6..08fd97ea 100644 --- a/src/hyperactive/opt/_adapters/_gfo.py +++ b/src/hyperactive/opt/_adapters/_gfo.py @@ -2,7 +2,6 @@ # copyright: hyperactive developers, MIT License (see LICENSE file) -import numpy as np from hyperactive.base import BaseOptimizer from skbase.utils.stdout_mute import StdoutMute @@ -129,13 +128,6 @@ def _run(self, experiment, **search_config): n_iter = search_config.pop("n_iter", 100) max_time = search_config.pop("max_time", None) - # convert hyper search-space into gfo search-space - search_space_hyper = search_config["search_space"] - search_space_gfo = {} - for key in search_space_hyper.keys(): - search_space_gfo[key] = np.array(range(len(search_space_hyper[key]))) - search_config["search_space"] = search_space_gfo - gfo_cls = self._get_gfo_class() hcopt = gfo_cls(**search_config) @@ -188,8 +180,8 @@ def get_test_params(cls, parameter_set="default"): params_sklearn = { "experiment": sklearn_exp, "search_space": { - "C": [0.01, 0.1, 1, 10], - "gamma": [0.0001, 0.01, 0.1, 1, 10], + "C": np.array([0.01, 0.1, 1, 10]), + "gamma": np.array([0.0001, 0.01, 0.1, 1, 10]), }, "n_iter": 100, } From 9c0cd8eb1a6257196d46ee1e0d200d4357782e6a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20Kir=C3=A1ly?= Date: Sun, 22 Jun 2025 19:43:41 +0200 Subject: [PATCH 44/49] fix crossover rate --- src/hyperactive/opt/gfo/_differential_evolution.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hyperactive/opt/gfo/_differential_evolution.py b/src/hyperactive/opt/gfo/_differential_evolution.py index 0a3d8d9b..287b4fc6 100644 --- a/src/hyperactive/opt/gfo/_differential_evolution.py +++ b/src/hyperactive/opt/gfo/_differential_evolution.py @@ -136,7 +136,7 @@ def get_test_params(cls, parameter_set="default"): "experiment": experiment, "population": 8, "mutation_rate": 0.8, - "crossover_rate": 2, + "crossover_rate": 0.7, "search_space": { "C": [0.01, 0.1, 1, 10], "gamma": [0.0001, 0.01, 0.1, 1, 10], From bf0dc7585c7383819577e3afe330f5ce7a0181e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20Kir=C3=A1ly?= Date: Sun, 22 Jun 2025 19:52:04 +0200 Subject: [PATCH 45/49] fix ForestOptimizer param --- src/hyperactive/opt/gfo/_forest_optimizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hyperactive/opt/gfo/_forest_optimizer.py b/src/hyperactive/opt/gfo/_forest_optimizer.py index c6928cbd..5e971643 100644 --- a/src/hyperactive/opt/gfo/_forest_optimizer.py +++ b/src/hyperactive/opt/gfo/_forest_optimizer.py @@ -150,7 +150,7 @@ def get_test_params(cls, parameter_set="default"): experiment = params[0]["experiment"] more_params = { "experiment": experiment, - "replacement": False, + "replacement": True, "tree_para": {"n_estimators": 50}, "xi": 0.33, "search_space": { From 0b6c4a0b923372202b96cc8efe844567bab5e08c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20Kir=C3=A1ly?= Date: Sun, 22 Jun 2025 20:00:53 +0200 Subject: [PATCH 46/49] tsp --- src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py index 7bb87b72..765307b2 100644 --- a/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py +++ b/src/hyperactive/opt/gfo/_tree_structured_parzen_estimators.py @@ -144,7 +144,7 @@ def get_test_params(cls, parameter_set="default"): more_params = { "experiment": experiment, "max_sample_size": 100, - "replacement": False, + "replacement": True, "gamma_tpe": 0.01, "search_space": { "C": [0.01, 0.1, 1, 10], From 04b47ffce61fd5494e54f56dab6f8e811b1595ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20Kir=C3=A1ly?= Date: Sun, 22 Jun 2025 20:03:06 +0200 Subject: [PATCH 47/49] move generator --- scripts/__init__.py | 0 src/hyperactive/opt/generator.py => scripts/_generator.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 scripts/__init__.py rename src/hyperactive/opt/generator.py => scripts/_generator.py (100%) diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/hyperactive/opt/generator.py b/scripts/_generator.py similarity index 100% rename from src/hyperactive/opt/generator.py rename to scripts/_generator.py From 53806bda6bd660a0a6dbc1df6c86440fc0126aa3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20Kir=C3=A1ly?= Date: Sun, 22 Jun 2025 20:03:50 +0200 Subject: [PATCH 48/49] Update _lipschitz_optimization.py --- src/hyperactive/opt/gfo/_lipschitz_optimization.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hyperactive/opt/gfo/_lipschitz_optimization.py b/src/hyperactive/opt/gfo/_lipschitz_optimization.py index 83f162ce..d5ddfe56 100644 --- a/src/hyperactive/opt/gfo/_lipschitz_optimization.py +++ b/src/hyperactive/opt/gfo/_lipschitz_optimization.py @@ -139,7 +139,7 @@ def get_test_params(cls, parameter_set="default"): more_params = { "experiment": experiment, "max_sample_size": 1000, - "replacement": False, + "replacement": True, "search_space": { "C": [0.01, 0.1, 1, 10], "gamma": [0.0001, 0.01, 0.1, 1, 10], From 4f8a15e861fa23ea36296cd8ad97e39ad16da825 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franz=20Kir=C3=A1ly?= Date: Sun, 22 Jun 2025 20:10:47 +0200 Subject: [PATCH 49/49] Update _gfo.py --- src/hyperactive/opt/_adapters/_gfo.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/hyperactive/opt/_adapters/_gfo.py b/src/hyperactive/opt/_adapters/_gfo.py index 08fd97ea..c2bae3d9 100644 --- a/src/hyperactive/opt/_adapters/_gfo.py +++ b/src/hyperactive/opt/_adapters/_gfo.py @@ -129,15 +129,15 @@ def _run(self, experiment, **search_config): max_time = search_config.pop("max_time", None) gfo_cls = self._get_gfo_class() - hcopt = gfo_cls(**search_config) + gfopt = gfo_cls(**search_config) with StdoutMute(active=not self.verbose): - hcopt.search( + gfopt.search( objective_function=experiment.score, n_iter=n_iter, max_time=max_time, ) - best_params = hcopt.best_para + best_params = gfopt.best_para return best_params @classmethod