Skip to content

Commit

Permalink
merge feature/add-opt-strat-early-stop into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
SimonBlanke committed Aug 26, 2023
2 parents 51f0322 + 05db3f5 commit a392e71
Show file tree
Hide file tree
Showing 4 changed files with 170 additions and 5 deletions.
10 changes: 5 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -933,17 +933,17 @@ Each of the following optimizer classes can be initialized and passed to the "ad
<details>
<summary><b>v4.4.0</b> :heavy_check_mark: </summary>

- [ ] add Optimization-Strategies
- [ ] redesign progress-bar
- [x] add Optimization-Strategies
- [x] redesign progress-bar

</details>

<details>
<summary><b>v4.5.0</b> </summary>

- [ ] add early stopping feature to custom optimization strategies
- [ ] display additional outputs from objective-function in results in command-line
- [ ] add type hints to hyperactive-api
- [x] add early stopping feature to custom optimization strategies
- [x] display additional outputs from objective-function in results in command-line
- [x] add type hints to hyperactive-api

</details>

Expand Down
Empty file.
110 changes: 110 additions & 0 deletions tests/test_optimization_strategies/_parametrize.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
from hyperactive.optimizers import (
HillClimbingOptimizer,
StochasticHillClimbingOptimizer,
RepulsingHillClimbingOptimizer,
SimulatedAnnealingOptimizer,
DownhillSimplexOptimizer,
RandomSearchOptimizer,
GridSearchOptimizer,
RandomRestartHillClimbingOptimizer,
RandomAnnealingOptimizer,
PowellsMethod,
PatternSearch,
ParallelTemperingOptimizer,
ParticleSwarmOptimizer,
SpiralOptimization,
EvolutionStrategyOptimizer,
BayesianOptimizer,
LipschitzOptimizer,
DirectAlgorithm,
TreeStructuredParzenEstimators,
ForestOptimizer,
)


optimizers = (
"Optimizer",
[
(HillClimbingOptimizer),
(StochasticHillClimbingOptimizer),
(RepulsingHillClimbingOptimizer),
(SimulatedAnnealingOptimizer),
(DownhillSimplexOptimizer),
(RandomSearchOptimizer),
(GridSearchOptimizer),
(RandomRestartHillClimbingOptimizer),
(RandomAnnealingOptimizer),
(PowellsMethod),
(PatternSearch),
(ParallelTemperingOptimizer),
(ParticleSwarmOptimizer),
(SpiralOptimization),
(EvolutionStrategyOptimizer),
(BayesianOptimizer),
(LipschitzOptimizer),
(DirectAlgorithm),
(TreeStructuredParzenEstimators),
(ForestOptimizer),
],
)


optimizers_strat = (
"Optimizer_strat",
[
(HillClimbingOptimizer),
(StochasticHillClimbingOptimizer),
(RepulsingHillClimbingOptimizer),
(SimulatedAnnealingOptimizer),
(DownhillSimplexOptimizer),
(RandomSearchOptimizer),
(GridSearchOptimizer),
(RandomRestartHillClimbingOptimizer),
(RandomAnnealingOptimizer),
(PowellsMethod),
(PatternSearch),
(ParallelTemperingOptimizer),
(ParticleSwarmOptimizer),
(SpiralOptimization),
(EvolutionStrategyOptimizer),
(BayesianOptimizer),
(LipschitzOptimizer),
(DirectAlgorithm),
(TreeStructuredParzenEstimators),
(ForestOptimizer),
],
)


optimizers_non_smbo = (
"Optimizer_non_smbo",
[
(HillClimbingOptimizer),
(StochasticHillClimbingOptimizer),
(RepulsingHillClimbingOptimizer),
(SimulatedAnnealingOptimizer),
(DownhillSimplexOptimizer),
(RandomSearchOptimizer),
(GridSearchOptimizer),
(RandomRestartHillClimbingOptimizer),
(RandomAnnealingOptimizer),
(PowellsMethod),
(PatternSearch),
(ParallelTemperingOptimizer),
(ParticleSwarmOptimizer),
(SpiralOptimization),
(EvolutionStrategyOptimizer),
],
)


optimizers_smbo = (
"Optimizer_smbo",
[
(BayesianOptimizer),
(LipschitzOptimizer),
(DirectAlgorithm),
(TreeStructuredParzenEstimators),
(ForestOptimizer),
],
)
55 changes: 55 additions & 0 deletions tests/test_optimization_strategies/test_early_stopping.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import pytest
import numpy as np


from hyperactive import Hyperactive
from hyperactive.optimizers.strategies import CustomOptimizationStrategy
from hyperactive.optimizers import RandomSearchOptimizer

from ._parametrize import optimizers


@pytest.mark.parametrize(*optimizers)
def test_strategy_early_stopping_0(Optimizer):
def objective_function(para):
score = -para["x1"] * para["x1"]
return score

search_space = {
"x1": list(np.arange(0, 100, 0.1)),
}

n_iter_no_change = 5
early_stopping = {
"n_iter_no_change": n_iter_no_change,
}

optimizer1 = Optimizer()
optimizer2 = RandomSearchOptimizer()

opt_strat = CustomOptimizationStrategy()
opt_strat.add_optimizer(optimizer1, duration=0.5, early_stopping=early_stopping)
opt_strat.add_optimizer(optimizer2, duration=0.5)

n_iter = 30

hyper = Hyperactive()
hyper.add_search(
objective_function,
search_space,
optimizer=opt_strat,
n_iter=n_iter,
initialize={"warm_start": [{"x1": 0}]},
)
hyper.run()

optimizer1 = hyper.opt_pros[0].optimizer_setup_l[0]["optimizer"]
optimizer2 = hyper.opt_pros[0].optimizer_setup_l[1]["optimizer"]

search_data = optimizer1.search_data
n_performed_iter = len(search_data)

print("\n n_performed_iter \n", n_performed_iter)
print("\n n_iter_no_change \n", n_iter_no_change)

assert n_performed_iter == (n_iter_no_change + 1)

0 comments on commit a392e71

Please sign in to comment.