From e9ea1dfbba3516cd95ead38b13248f80dd0ec4a4 Mon Sep 17 00:00:00 2001 From: aaronkl Date: Wed, 23 Aug 2023 14:21:34 +0200 Subject: [PATCH 01/15] add ehvi --- syne_tune/optimizer/baselines.py | 42 +++++++++++++++++++ .../schedulers/multiobjective/__init__.py | 4 ++ 2 files changed, 46 insertions(+) diff --git a/syne_tune/optimizer/baselines.py b/syne_tune/optimizer/baselines.py index f6a8516f9..99d6a96d6 100644 --- a/syne_tune/optimizer/baselines.py +++ b/syne_tune/optimizer/baselines.py @@ -25,6 +25,7 @@ MultiObjectiveRegularizedEvolution, NSGA2Searcher, LinearScalarizedScheduler, + ExpectedHyperVolumeImprovement ) from syne_tune.optimizer.schedulers.searchers.bayesopt.models.estimator import Estimator from syne_tune.optimizer.schedulers.searchers.regularized_evolution import ( @@ -948,6 +949,47 @@ def __init__( ) +class EHVI(FIFOScheduler): + """ + + See :class:`~syne_tune.optimizer.schedulers.searchers.RandomSearcher` + for ``kwargs["search_options"]`` parameters. + + :param config_space: Configuration space for evaluation function + :param metric: Name of metric to optimize + :param population_size: See + :class:`~syne_tune.optimizer.schedulers.searchers.RegularizedEvolution`. + Defaults to 100 + :param sample_size: See + :class:`~syne_tune.optimizer.schedulers.searchers.RegularizedEvolution`. + Defaults to 10 + :param random_seed: Random seed, optional + :param kwargs: Additional arguments to + :class:`~syne_tune.optimizer.schedulers.FIFOScheduler` + """ + + def __init__( + self, + config_space: Dict[str, Any], + metric: List[str], + mode: Union[List[str], str] = "min", + random_seed: Optional[int] = None, + **kwargs, + ): + searcher_kwargs = _create_searcher_kwargs( + config_space, metric, random_seed, kwargs + ) + searcher_kwargs["mode"] = mode + + super(EHVI, self).__init__( + config_space=config_space, + metric=metric, + mode=mode, + searcher=ExpectedHyperVolumeImprovement(**searcher_kwargs), + random_seed=random_seed, + **kwargs, + ) + class MOLinearScalarizationBayesOpt(LinearScalarizedScheduler): """ Uses :class:`~syne_tune.optimizer.schedulers.multiobjective.LinearScalarizedScheduler` diff --git a/syne_tune/optimizer/schedulers/multiobjective/__init__.py b/syne_tune/optimizer/schedulers/multiobjective/__init__.py index bfd870d88..a857908f0 100644 --- a/syne_tune/optimizer/schedulers/multiobjective/__init__.py +++ b/syne_tune/optimizer/schedulers/multiobjective/__init__.py @@ -21,12 +21,16 @@ from syne_tune.optimizer.schedulers.multiobjective.linear_scalarizer import ( LinearScalarizedScheduler, ) +from syne_tune.optimizer.schedulers.multiobjective.expected_hyper_volume_improvement import ( + ExpectedHyperVolumeImprovement, +) __all__ = [ "MOASHA", "MultiObjectiveRegularizedEvolution", "NSGA2Searcher", "LinearScalarizedScheduler", + "ExpectedHyperVolumeImprovement" ] try: From 9e67d95ae29c723f62cb2c33cc52e7186916ff8b Mon Sep 17 00:00:00 2001 From: aaronkl Date: Wed, 23 Aug 2023 14:21:54 +0200 Subject: [PATCH 02/15] format --- syne_tune/optimizer/baselines.py | 3 ++- syne_tune/optimizer/schedulers/multiobjective/__init__.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/syne_tune/optimizer/baselines.py b/syne_tune/optimizer/baselines.py index 99d6a96d6..f0ffc2003 100644 --- a/syne_tune/optimizer/baselines.py +++ b/syne_tune/optimizer/baselines.py @@ -25,7 +25,7 @@ MultiObjectiveRegularizedEvolution, NSGA2Searcher, LinearScalarizedScheduler, - ExpectedHyperVolumeImprovement + ExpectedHyperVolumeImprovement, ) from syne_tune.optimizer.schedulers.searchers.bayesopt.models.estimator import Estimator from syne_tune.optimizer.schedulers.searchers.regularized_evolution import ( @@ -990,6 +990,7 @@ def __init__( **kwargs, ) + class MOLinearScalarizationBayesOpt(LinearScalarizedScheduler): """ Uses :class:`~syne_tune.optimizer.schedulers.multiobjective.LinearScalarizedScheduler` diff --git a/syne_tune/optimizer/schedulers/multiobjective/__init__.py b/syne_tune/optimizer/schedulers/multiobjective/__init__.py index a857908f0..705ae0a4a 100644 --- a/syne_tune/optimizer/schedulers/multiobjective/__init__.py +++ b/syne_tune/optimizer/schedulers/multiobjective/__init__.py @@ -30,7 +30,7 @@ "MultiObjectiveRegularizedEvolution", "NSGA2Searcher", "LinearScalarizedScheduler", - "ExpectedHyperVolumeImprovement" + "ExpectedHyperVolumeImprovement", ] try: From decee789f41e10e48407b21af9da020555997820 Mon Sep 17 00:00:00 2001 From: aaronkl Date: Fri, 17 Nov 2023 12:38:01 +0100 Subject: [PATCH 03/15] add --- .../expected_hyper_volume_improvement.py | 366 ++++++++++++++++++ 1 file changed, 366 insertions(+) create mode 100644 syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py diff --git a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py new file mode 100644 index 000000000..8e9daade1 --- /dev/null +++ b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py @@ -0,0 +1,366 @@ +# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). +# You may not use this file except in compliance with the License. +# A copy of the License is located at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# or in the "license" file accompanying this file. This file is distributed +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either +# express or implied. See the License for the specific language governing +# permissions and limitations under the License. +from typing import Optional, List, Dict, Any, Union +import logging + +import numpy as np +import torch +from syne_tune.try_import import try_import_botorch_message + +# try: +from torch import Tensor, randn_like, random +from botorch.models import SingleTaskGP +from botorch.fit import fit_gpytorch_mll +from botorch.models.transforms import Warp +from botorch.utils import standardize +from botorch.sampling.samplers import SobolQMCNormalSampler +from botorch.utils.transforms import normalize +from botorch.utils.multi_objective.box_decompositions import NondominatedPartitioning +from botorch.acquisition.multi_objective.monte_carlo import qExpectedHypervolumeImprovement +from botorch.optim import optimize_acqf +from botorch.exceptions.errors import ModelFittingError +from gpytorch.mlls import ExactMarginalLogLikelihood +from linear_operator.utils.errors import NotPSDError +# except ImportError: +# print(try_import_botorch_message()) + +from syne_tune.optimizer.schedulers.searchers import ( + StochasticAndFilterDuplicatesSearcher, +) + +logger = logging.getLogger(__name__) + + +NOISE_LEVEL = 1e-3 +MC_SAMPLES = 128 + +class ExpectedHyperVolumeImprovement(StochasticAndFilterDuplicatesSearcher): + """ + + + Additional arguments on top of parent class + :class:`~syne_tune.optimizer.schedulers.searchers.StochasticAndFilterDuplicatesSearcher`: + + :param mode: "min" (default) or "max" + :param num_init_random: :meth:`get_config` returns randomly drawn + configurations until at least ``init_random`` observations have been + recorded in :meth:`update`. After that, the BOTorch algorithm is used. + Defaults to 3 + :param no_fantasizing: If ``True``, fantasizing is not done and pending + evaluations are ignored. This may lead to loss of diversity in + decisions. Defaults to ``False`` + :param max_num_observations: Maximum number of observation to use when + fitting the GP. If the number of observations gets larger than this + number, then data is subsampled. If ``None``, then all data is used to + fit the GP. Defaults to 200 + :param input_warping: Whether to apply input warping when fitting the GP. + Defaults to ``True`` + """ + + def __init__( + self, + config_space: Dict[str, Any], + metric: List[str], + mode: Union[List[str], str], + points_to_evaluate: Optional[List[dict]] = None, + allow_duplicates: bool = False, + restrict_configurations: Optional[List[Dict[str, Any]]] = None, + num_init_random: int = 3, + no_fantasizing: bool = False, + max_num_observations: Optional[int] = 200, + input_warping: bool = True, + **kwargs, + ): + if isinstance(mode, str): + mode = [mode] * len(metric) + + super(ExpectedHyperVolumeImprovement, self).__init__( + config_space, + metric=metric, + points_to_evaluate=points_to_evaluate, + allow_duplicates=allow_duplicates, + restrict_configurations=restrict_configurations, + mode=mode, + **kwargs, + ) + assert num_init_random >= 2 + + self.num_minimum_observations = num_init_random + self.fantasising = not no_fantasizing + self.max_num_observations = max_num_observations + self.input_warping = input_warping + self.trial_configs = dict() + self.pending_trials = set() + self.trial_observations = dict() + self.ref_point = torch.ones(len(metric)) * 2 + + # Set the random seed for botorch as well + if "random_seed" in kwargs: + random.manual_seed(kwargs["random_seed"]) + + def _update(self, trial_id: str, config: Dict[str, Any], result: Dict[str, Any]): + trial_id = int(trial_id) + + observations = [] + for mode, metric in zip(self._mode, self._metric): + value = result[metric] + if mode == "max": + value *= -1 + observations.append(value) + + self.trial_observations[trial_id] = observations + + if trial_id in self.pending_trials: + self.pending_trials.remove(trial_id) + + def clone_from_state(self, state: Dict[str, Any]): + raise NotImplementedError + + def num_suggestions(self): + return len(self.trial_configs) + + def _get_config(self, trial_id: str, **kwargs) -> Optional[dict]: + trial_id = int(trial_id) + config_suggested = self._next_initial_config() + + if config_suggested is None: + if self.objectives().shape[0] < self.num_minimum_observations: + config_suggested = self._get_random_config() + else: + config_suggested = self._sample_next_candidate() + + if config_suggested is not None: + self.trial_configs[trial_id] = config_suggested + + return config_suggested + + def register_pending( + self, + trial_id: str, + config: Optional[dict] = None, + milestone: Optional[int] = None, + ): + super().register_pending(trial_id, config, milestone) + self.pending_trials.add(int(trial_id)) + + def evaluation_failed(self, trial_id: str): + super().evaluation_failed(trial_id) + self.cleanup_pending(trial_id) + + def cleanup_pending(self, trial_id: str): + trial_id = int(trial_id) + if trial_id in self.pending_trials: + self.pending_trials.remove(trial_id) + + def dataset_size(self): + return len(self.trial_observations) + + def configure_scheduler(self, scheduler): + from syne_tune.optimizer.schedulers.scheduler_searcher import ( + TrialSchedulerWithSearcher, + ) + + assert isinstance( + scheduler, TrialSchedulerWithSearcher + ), "This searcher requires TrialSchedulerWithSearcher scheduler" + super().configure_scheduler(scheduler) + + def _get_gp_bounds(self): + return Tensor(self._hp_ranges.get_ndarray_bounds()).T + + def _config_from_ndarray(self, candidate) -> dict: + return self._hp_ranges.from_ndarray(candidate) + + def _sample_next_candidate(self) -> Optional[dict]: + """ + :return: A next candidate to evaluate, if possible it is obtained by + fitting a GP on past data and maximizing EI. If this fails because + of numerical difficulties with non PSD matrices, then the candidate + is sampled at random. + """ + try: + X = np.array(self._config_to_feature_matrix(self._configs_with_results())) + Y = Tensor(self.objectives()) + if self._mode == "min": + # BoTorch only supports maximization + Y *= -1 + + if ( + self.max_num_observations is not None + and len(X) >= self.max_num_observations + ): + perm = self.random_state.permutation(len(X))[ + : self.max_num_observations + ] + X = X[perm] + Y = Y[perm] + subsample = True + else: + subsample = False + + X_tensor = Tensor(X) + bounds = torch.stack([Y.min(0).values, + Y.max(0).values]) + + noise_std = NOISE_LEVEL + Y += noise_std * randn_like(Y) + Y_tensor = normalize(Y, bounds=bounds) + gp = self._make_gp(X_tensor=X_tensor, Y_tensor=Y_tensor) + mll = ExactMarginalLogLikelihood(gp.likelihood, gp) + fit_gpytorch_mll(mll, max_attempts=1) + + if self.pending_trials and self.fantasising and not subsample: + X_pending = self._config_to_feature_matrix(self._configs_pending()) + else: + X_pending = None + sampler = SobolQMCNormalSampler(num_samples=MC_SAMPLES) + partitioning = NondominatedPartitioning(ref_point=self.ref_point, + Y=Y_tensor) + acq_func = qExpectedHypervolumeImprovement( + model=gp, + ref_point=self.ref_point, # use known reference point + partitioning=partitioning, + sampler=sampler, + ) + + config = None + if self._restrict_configurations is None: + # Continuous optimization of acquisition function only if + # ``restrict_configurations`` not used + candidate, acq_value = optimize_acqf( + acq_func, + bounds=self._get_gp_bounds(), + q=1, + num_restarts=3, + raw_samples=100, + ) + candidate = candidate.detach().numpy()[0] + config = self._config_from_ndarray(candidate) + if self.should_not_suggest(config): + logger.warning( + "Optimization of the acquisition function yielded a config that was already seen." + ) + config = None + return self._sample_and_pick_acq_best(acq_func) if config is None else config + except NotPSDError as _: + logging.warning("Chlolesky inversion failed, sampling randomly.") + return self._get_random_config() + except ModelFittingError as _: + logging.warning("Botorch was unable to fit the model, sampling randomly.") + return self._get_random_config() + # except: + # # BoTorch can raise different errors, easier to not try to catch them individually + # logging.warning("Botorch was unable to fit the model, sampling randomly.") + # return self._get_random_config() + + def _make_gp(self, X_tensor: Tensor, Y_tensor: Tensor) -> SingleTaskGP: + double_precision = False + if double_precision: + X_tensor = X_tensor.double() + Y_tensor = Y_tensor.double() + + # noise_std = NOISE_LEVEL + # Y_tensor += noise_std * randn_like(Y_tensor) + + if self.input_warping: + warp_tf = Warp(indices=list(range(X_tensor.shape[-1]))) + else: + warp_tf = None + return SingleTaskGP(X_tensor, Y_tensor, input_transform=warp_tf) + + def _config_to_feature_matrix(self, configs: List[dict]) -> Tensor: + bounds = Tensor(self._hp_ranges.get_ndarray_bounds()).T + X = Tensor(self._hp_ranges.to_ndarray_matrix(configs)) + return normalize(X, bounds) + + def objectives(self): + return np.array(list(self.trial_observations.values())) + + def _sample_and_pick_acq_best(self, acq, num_samples: int = 100) -> Optional[dict]: + """ + :param acq: + :param num_samples: + :return: Samples ``num_samples`` candidates and return the one maximizing + the acquisitition function ``acq`` that was not seen earlier, if all + samples were seen, return a random sample instead. + """ + configs_candidates = [self._get_random_config() for _ in range(num_samples)] + configs_candidates = [x for x in configs_candidates if x is not None] + logger.debug(f"Sampling among {len(configs_candidates)} unseen configs") + if configs_candidates: + X_tensor = self._config_to_feature_matrix(configs_candidates) + ei = acq(X_tensor.unsqueeze(dim=-2)) + return configs_candidates[ei.argmax()] + else: + return self._get_random_config() + + def _configs_with_results(self) -> List[dict]: + return [ + config + for trial, config in self.trial_configs.items() + if not trial in self.pending_trials + ] + + def _configs_pending(self) -> List[dict]: + return [ + config + for trial, config in self.trial_configs.items() + if trial in self.pending_trials + ] + + def metric_names(self) -> List[str]: + return [self._metric] + + def metric_mode(self) -> str: + return self._mode + + +if __name__ == '__main__': + + from syne_tune.config_space import uniform + from syne_tune.optimizer.schedulers.searchers.utils import make_hyperparameter_ranges + + random_seed = 31415927 + random_state = np.random.RandomState(random_seed) + hp_cols = ("x0", "x1", "x2") + config_space = { + node: uniform(0, 1) + for node in hp_cols + } + metric = ["error", 'size'] + mode = ['min', 'min'] + searcher = ExpectedHyperVolumeImprovement( + config_space=config_space, + metric=metric, + mode=mode, + points_to_evaluate=[], + ) + + hp_ranges = make_hyperparameter_ranges(config_space) + num_data = 20 + trial_ids = list(range(num_data)) + configs = hp_ranges.random_configs(random_state, num_data) + metric_values = random_state.randn(num_data, len(metric)) + # Feed data to searcher + for trial_id, config, metric_val in zip( + trial_ids, configs, metric_values + ): + searcher.get_config(trial_id=trial_id) + + result = {name: value for name, value in zip(metric, metric_val)} + searcher._update( + trial_id=trial_id, + config=config, + result=result, + ) + From 7d26bd173039abb70043051064b6d9991ef62654 Mon Sep 17 00:00:00 2001 From: aaronkl Date: Fri, 17 Nov 2023 12:38:12 +0100 Subject: [PATCH 04/15] format --- .../expected_hyper_volume_improvement.py | 38 ++++++++++--------- 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py index 8e9daade1..936939aab 100644 --- a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py +++ b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py @@ -26,11 +26,14 @@ from botorch.sampling.samplers import SobolQMCNormalSampler from botorch.utils.transforms import normalize from botorch.utils.multi_objective.box_decompositions import NondominatedPartitioning -from botorch.acquisition.multi_objective.monte_carlo import qExpectedHypervolumeImprovement +from botorch.acquisition.multi_objective.monte_carlo import ( + qExpectedHypervolumeImprovement, +) from botorch.optim import optimize_acqf from botorch.exceptions.errors import ModelFittingError from gpytorch.mlls import ExactMarginalLogLikelihood from linear_operator.utils.errors import NotPSDError + # except ImportError: # print(try_import_botorch_message()) @@ -44,6 +47,7 @@ NOISE_LEVEL = 1e-3 MC_SAMPLES = 128 + class ExpectedHyperVolumeImprovement(StochasticAndFilterDuplicatesSearcher): """ @@ -209,8 +213,7 @@ def _sample_next_candidate(self) -> Optional[dict]: subsample = False X_tensor = Tensor(X) - bounds = torch.stack([Y.min(0).values, - Y.max(0).values]) + bounds = torch.stack([Y.min(0).values, Y.max(0).values]) noise_std = NOISE_LEVEL Y += noise_std * randn_like(Y) @@ -224,8 +227,9 @@ def _sample_next_candidate(self) -> Optional[dict]: else: X_pending = None sampler = SobolQMCNormalSampler(num_samples=MC_SAMPLES) - partitioning = NondominatedPartitioning(ref_point=self.ref_point, - Y=Y_tensor) + partitioning = NondominatedPartitioning( + ref_point=self.ref_point, Y=Y_tensor + ) acq_func = qExpectedHypervolumeImprovement( model=gp, ref_point=self.ref_point, # use known reference point @@ -251,7 +255,9 @@ def _sample_next_candidate(self) -> Optional[dict]: "Optimization of the acquisition function yielded a config that was already seen." ) config = None - return self._sample_and_pick_acq_best(acq_func) if config is None else config + return ( + self._sample_and_pick_acq_best(acq_func) if config is None else config + ) except NotPSDError as _: logging.warning("Chlolesky inversion failed, sampling randomly.") return self._get_random_config() @@ -325,20 +331,19 @@ def metric_mode(self) -> str: return self._mode -if __name__ == '__main__': +if __name__ == "__main__": from syne_tune.config_space import uniform - from syne_tune.optimizer.schedulers.searchers.utils import make_hyperparameter_ranges + from syne_tune.optimizer.schedulers.searchers.utils import ( + make_hyperparameter_ranges, + ) random_seed = 31415927 random_state = np.random.RandomState(random_seed) hp_cols = ("x0", "x1", "x2") - config_space = { - node: uniform(0, 1) - for node in hp_cols - } - metric = ["error", 'size'] - mode = ['min', 'min'] + config_space = {node: uniform(0, 1) for node in hp_cols} + metric = ["error", "size"] + mode = ["min", "min"] searcher = ExpectedHyperVolumeImprovement( config_space=config_space, metric=metric, @@ -352,9 +357,7 @@ def metric_mode(self) -> str: configs = hp_ranges.random_configs(random_state, num_data) metric_values = random_state.randn(num_data, len(metric)) # Feed data to searcher - for trial_id, config, metric_val in zip( - trial_ids, configs, metric_values - ): + for trial_id, config, metric_val in zip(trial_ids, configs, metric_values): searcher.get_config(trial_id=trial_id) result = {name: value for name, value in zip(metric, metric_val)} @@ -363,4 +366,3 @@ def metric_mode(self) -> str: config=config, result=result, ) - From 008adc0f967355fff182fc8ef5fccd39b2005752 Mon Sep 17 00:00:00 2001 From: kleiaaro Date: Wed, 20 Dec 2023 11:34:53 +0100 Subject: [PATCH 05/15] import --- .../multiobjective/expected_hyper_volume_improvement.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py index 936939aab..a1e72580a 100644 --- a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py +++ b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py @@ -23,7 +23,7 @@ from botorch.fit import fit_gpytorch_mll from botorch.models.transforms import Warp from botorch.utils import standardize -from botorch.sampling.samplers import SobolQMCNormalSampler +from botorch.sampling import SobolQMCNormalSampler from botorch.utils.transforms import normalize from botorch.utils.multi_objective.box_decompositions import NondominatedPartitioning from botorch.acquisition.multi_objective.monte_carlo import ( From a3faa03d40c31bc6a1c8faa03ed9d6c5ed3167e8 Mon Sep 17 00:00:00 2001 From: kleiaaro Date: Thu, 15 Feb 2024 12:17:46 +0100 Subject: [PATCH 06/15] add ehvi; unit test --- syne_tune/optimizer/baselines.py | 44 +++++++++ syne_tune/optimizer/schedulers/fifo.py | 2 +- .../expected_hyper_volume_improvement.py | 96 +++++-------------- tst/schedulers/test_schedulers_api.py | 6 ++ tst/schedulers/test_searchers.py | 2 + 5 files changed, 78 insertions(+), 72 deletions(-) diff --git a/syne_tune/optimizer/baselines.py b/syne_tune/optimizer/baselines.py index f0ffc2003..729e04f93 100644 --- a/syne_tune/optimizer/baselines.py +++ b/syne_tune/optimizer/baselines.py @@ -1247,6 +1247,49 @@ def __init__( ) +class EHVI(FIFOScheduler): + """ + Implements the Expected Hypervolume Improvement method. + + See :class:`~syne_tune.optimizer.schedulers.searchers.RandomSearcher` + for ``kwargs["search_options"]`` parameters. + + :param config_space: Configuration space for evaluation function + :param metric: Name of metric to optimize + :param population_size: See + :class:`~syne_tune.optimizer.schedulers.searchers.RegularizedEvolution`. + Defaults to 100 + :param sample_size: See + :class:`~syne_tune.optimizer.schedulers.searchers.RegularizedEvolution`. + Defaults to 10 + :param random_seed: Random seed, optional + :param kwargs: Additional arguments to + :class:`~syne_tune.optimizer.schedulers.FIFOScheduler` + """ + + def __init__( + self, + config_space: Dict[str, Any], + metric: List[str], + mode: Union[List[str], str] = "min", + random_seed: Optional[int] = None, + **kwargs, + ): + searcher_kwargs = _create_searcher_kwargs( + config_space, metric, random_seed, kwargs + ) + searcher_kwargs["mode"] = mode + print(mode) + + super(EHVI, self).__init__( + config_space=config_space, + metric=metric, + mode=mode, + searcher=ExpectedHyperVolumeImprovement(**searcher_kwargs), + random_seed=random_seed, + **kwargs, + ) + class ASHACQR(HyperbandScheduler): """ Multi-fidelity Conformal Quantile Regression approach proposed in: @@ -1352,4 +1395,5 @@ def __init__( "ASHACTS": ASHACTS, "CQR": CQR, "ASHACQR": ASHACQR, + "EHVI": EHVI, } diff --git a/syne_tune/optimizer/schedulers/fifo.py b/syne_tune/optimizer/schedulers/fifo.py index bd25f9fd6..2fd8954b8 100644 --- a/syne_tune/optimizer/schedulers/fifo.py +++ b/syne_tune/optimizer/schedulers/fifo.py @@ -242,7 +242,7 @@ def _check_metric_mode( else: len_mode = 1 if len_mode == 1: - mode = [mode * num_objectives] + mode = [mode] * num_objectives allowed_values = {"min", "max"} assert all( x in allowed_values for x in mode diff --git a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py index a1e72580a..cb94b2312 100644 --- a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py +++ b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py @@ -1,41 +1,29 @@ -# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"). -# You may not use this file except in compliance with the License. -# A copy of the License is located at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# or in the "license" file accompanying this file. This file is distributed -# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language governing -# permissions and limitations under the License. from typing import Optional, List, Dict, Any, Union import logging import numpy as np import torch + from syne_tune.try_import import try_import_botorch_message -# try: -from torch import Tensor, randn_like, random -from botorch.models import SingleTaskGP -from botorch.fit import fit_gpytorch_mll -from botorch.models.transforms import Warp -from botorch.utils import standardize -from botorch.sampling import SobolQMCNormalSampler -from botorch.utils.transforms import normalize -from botorch.utils.multi_objective.box_decompositions import NondominatedPartitioning -from botorch.acquisition.multi_objective.monte_carlo import ( - qExpectedHypervolumeImprovement, -) -from botorch.optim import optimize_acqf -from botorch.exceptions.errors import ModelFittingError -from gpytorch.mlls import ExactMarginalLogLikelihood -from linear_operator.utils.errors import NotPSDError +try: + from torch import Tensor, randn_like, random + from botorch.models import SingleTaskGP + from botorch.fit import fit_gpytorch_mll + from botorch.models.transforms import Warp + from botorch.sampling import SobolQMCNormalSampler + from botorch.utils.transforms import normalize + from botorch.utils.multi_objective.box_decompositions import NondominatedPartitioning + from botorch.acquisition.multi_objective.monte_carlo import ( + qExpectedHypervolumeImprovement, + ) + from botorch.optim import optimize_acqf + from botorch.exceptions.errors import ModelFittingError + from gpytorch.mlls import ExactMarginalLogLikelihood + from linear_operator.utils.errors import NotPSDError -# except ImportError: -# print(try_import_botorch_message()) +except ImportError: + print(try_import_botorch_message()) from syne_tune.optimizer.schedulers.searchers import ( StochasticAndFilterDuplicatesSearcher, @@ -50,7 +38,12 @@ class ExpectedHyperVolumeImprovement(StochasticAndFilterDuplicatesSearcher): """ + Implementation of expected hypervolume improvement [1] based on the BOTorch implementation. + [1] S. Daulton, M. Balandat, and E. Bakshy. + Differentiable Expected Hypervolume Improvement for Parallel Multi-Objective + Bayesian Optimization. + Advances in Neural Information Processing Systems 33, 2020. Additional arguments on top of parent class :class:`~syne_tune.optimizer.schedulers.searchers.StochasticAndFilterDuplicatesSearcher`: @@ -226,7 +219,8 @@ def _sample_next_candidate(self) -> Optional[dict]: X_pending = self._config_to_feature_matrix(self._configs_pending()) else: X_pending = None - sampler = SobolQMCNormalSampler(num_samples=MC_SAMPLES) + sampler = SobolQMCNormalSampler(torch.Size([MC_SAMPLES])) + partitioning = NondominatedPartitioning( ref_point=self.ref_point, Y=Y_tensor ) @@ -275,9 +269,6 @@ def _make_gp(self, X_tensor: Tensor, Y_tensor: Tensor) -> SingleTaskGP: X_tensor = X_tensor.double() Y_tensor = Y_tensor.double() - # noise_std = NOISE_LEVEL - # Y_tensor += noise_std * randn_like(Y_tensor) - if self.input_warping: warp_tf = Warp(indices=list(range(X_tensor.shape[-1]))) else: @@ -329,40 +320,3 @@ def metric_names(self) -> List[str]: def metric_mode(self) -> str: return self._mode - - -if __name__ == "__main__": - - from syne_tune.config_space import uniform - from syne_tune.optimizer.schedulers.searchers.utils import ( - make_hyperparameter_ranges, - ) - - random_seed = 31415927 - random_state = np.random.RandomState(random_seed) - hp_cols = ("x0", "x1", "x2") - config_space = {node: uniform(0, 1) for node in hp_cols} - metric = ["error", "size"] - mode = ["min", "min"] - searcher = ExpectedHyperVolumeImprovement( - config_space=config_space, - metric=metric, - mode=mode, - points_to_evaluate=[], - ) - - hp_ranges = make_hyperparameter_ranges(config_space) - num_data = 20 - trial_ids = list(range(num_data)) - configs = hp_ranges.random_configs(random_state, num_data) - metric_values = random_state.randn(num_data, len(metric)) - # Feed data to searcher - for trial_id, config, metric_val in zip(trial_ids, configs, metric_values): - searcher.get_config(trial_id=trial_id) - - result = {name: value for name, value in zip(metric, metric_val)} - searcher._update( - trial_id=trial_id, - config=config, - result=result, - ) diff --git a/tst/schedulers/test_schedulers_api.py b/tst/schedulers/test_schedulers_api.py index 1d2239b09..6a774e288 100644 --- a/tst/schedulers/test_schedulers_api.py +++ b/tst/schedulers/test_schedulers_api.py @@ -34,6 +34,7 @@ DyHPO, PASHA, REA, + EHVI, SyncHyperband, SyncBOHB, SyncMOBSTER, @@ -316,6 +317,11 @@ def make_transfer_learning_evaluations(num_evals: int = 10): metrics=[metric1, metric2], mode=mode, ), + EHVI( + config_space=config_space, + metric=[metric1, metric2], + mode=mode, + ), MedianStoppingRule( scheduler=FIFOScheduler( config_space, searcher="random", metric=metric1, mode=mode diff --git a/tst/schedulers/test_searchers.py b/tst/schedulers/test_searchers.py index 1e1810905..70ec3b5a2 100644 --- a/tst/schedulers/test_searchers.py +++ b/tst/schedulers/test_searchers.py @@ -28,6 +28,7 @@ SyncBOHB, BORE, KDE, + EHVI, ) from syne_tune.config_space import ( choice, @@ -53,6 +54,7 @@ (SyncBOHB, True), (BORE, False), (KDE, False), + (EHVI, False), ] From 7a83551c6f5d8753e27616101f60241d256f05ac Mon Sep 17 00:00:00 2001 From: kleiaaro Date: Thu, 15 Feb 2024 12:51:11 +0100 Subject: [PATCH 07/15] formatting --- syne_tune/optimizer/baselines.py | 1 + .../multiobjective/expected_hyper_volume_improvement.py | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/syne_tune/optimizer/baselines.py b/syne_tune/optimizer/baselines.py index 729e04f93..8ea4e03b3 100644 --- a/syne_tune/optimizer/baselines.py +++ b/syne_tune/optimizer/baselines.py @@ -1290,6 +1290,7 @@ def __init__( **kwargs, ) + class ASHACQR(HyperbandScheduler): """ Multi-fidelity Conformal Quantile Regression approach proposed in: diff --git a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py index cb94b2312..1e6d6ba57 100644 --- a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py +++ b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py @@ -13,7 +13,9 @@ from botorch.models.transforms import Warp from botorch.sampling import SobolQMCNormalSampler from botorch.utils.transforms import normalize - from botorch.utils.multi_objective.box_decompositions import NondominatedPartitioning + from botorch.utils.multi_objective.box_decompositions import ( + NondominatedPartitioning, + ) from botorch.acquisition.multi_objective.monte_carlo import ( qExpectedHypervolumeImprovement, ) @@ -23,7 +25,7 @@ from linear_operator.utils.errors import NotPSDError except ImportError: - print(try_import_botorch_message()) + print(try_import_botorch_message()) from syne_tune.optimizer.schedulers.searchers import ( StochasticAndFilterDuplicatesSearcher, From 77ae1e3cef513a22d23fd169457c0c143537b3b0 Mon Sep 17 00:00:00 2001 From: kleiaaro Date: Thu, 15 Feb 2024 13:05:11 +0100 Subject: [PATCH 08/15] import --- .../expected_hyper_volume_improvement.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py index 1e6d6ba57..e5dc40a72 100644 --- a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py +++ b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py @@ -1,12 +1,24 @@ +# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). +# You may not use this file except in compliance with the License. +# A copy of the License is located at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# or in the "license" file accompanying this file. This file is distributed +# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either +# express or implied. See the License for the specific language governing +# permissions and limitations under the License. from typing import Optional, List, Dict, Any, Union import logging import numpy as np -import torch from syne_tune.try_import import try_import_botorch_message try: + import torch from torch import Tensor, randn_like, random from botorch.models import SingleTaskGP from botorch.fit import fit_gpytorch_mll From c5668d45013c14d632704b991ebeb98f73bb2322 Mon Sep 17 00:00:00 2001 From: kleiaaro Date: Thu, 15 Feb 2024 13:19:45 +0100 Subject: [PATCH 09/15] import safe --- syne_tune/optimizer/baselines.py | 137 ++++++++++++------------------- 1 file changed, 51 insertions(+), 86 deletions(-) diff --git a/syne_tune/optimizer/baselines.py b/syne_tune/optimizer/baselines.py index 8ea4e03b3..f6fd3090d 100644 --- a/syne_tune/optimizer/baselines.py +++ b/syne_tune/optimizer/baselines.py @@ -25,7 +25,6 @@ MultiObjectiveRegularizedEvolution, NSGA2Searcher, LinearScalarizedScheduler, - ExpectedHyperVolumeImprovement, ) from syne_tune.optimizer.schedulers.searchers.bayesopt.models.estimator import Estimator from syne_tune.optimizer.schedulers.searchers.regularized_evolution import ( @@ -949,48 +948,6 @@ def __init__( ) -class EHVI(FIFOScheduler): - """ - - See :class:`~syne_tune.optimizer.schedulers.searchers.RandomSearcher` - for ``kwargs["search_options"]`` parameters. - - :param config_space: Configuration space for evaluation function - :param metric: Name of metric to optimize - :param population_size: See - :class:`~syne_tune.optimizer.schedulers.searchers.RegularizedEvolution`. - Defaults to 100 - :param sample_size: See - :class:`~syne_tune.optimizer.schedulers.searchers.RegularizedEvolution`. - Defaults to 10 - :param random_seed: Random seed, optional - :param kwargs: Additional arguments to - :class:`~syne_tune.optimizer.schedulers.FIFOScheduler` - """ - - def __init__( - self, - config_space: Dict[str, Any], - metric: List[str], - mode: Union[List[str], str] = "min", - random_seed: Optional[int] = None, - **kwargs, - ): - searcher_kwargs = _create_searcher_kwargs( - config_space, metric, random_seed, kwargs - ) - searcher_kwargs["mode"] = mode - - super(EHVI, self).__init__( - config_space=config_space, - metric=metric, - mode=mode, - searcher=ExpectedHyperVolumeImprovement(**searcher_kwargs), - random_seed=random_seed, - **kwargs, - ) - - class MOLinearScalarizationBayesOpt(LinearScalarizedScheduler): """ Uses :class:`~syne_tune.optimizer.schedulers.multiobjective.LinearScalarizedScheduler` @@ -1247,49 +1204,6 @@ def __init__( ) -class EHVI(FIFOScheduler): - """ - Implements the Expected Hypervolume Improvement method. - - See :class:`~syne_tune.optimizer.schedulers.searchers.RandomSearcher` - for ``kwargs["search_options"]`` parameters. - - :param config_space: Configuration space for evaluation function - :param metric: Name of metric to optimize - :param population_size: See - :class:`~syne_tune.optimizer.schedulers.searchers.RegularizedEvolution`. - Defaults to 100 - :param sample_size: See - :class:`~syne_tune.optimizer.schedulers.searchers.RegularizedEvolution`. - Defaults to 10 - :param random_seed: Random seed, optional - :param kwargs: Additional arguments to - :class:`~syne_tune.optimizer.schedulers.FIFOScheduler` - """ - - def __init__( - self, - config_space: Dict[str, Any], - metric: List[str], - mode: Union[List[str], str] = "min", - random_seed: Optional[int] = None, - **kwargs, - ): - searcher_kwargs = _create_searcher_kwargs( - config_space, metric, random_seed, kwargs - ) - searcher_kwargs["mode"] = mode - print(mode) - - super(EHVI, self).__init__( - config_space=config_space, - metric=metric, - mode=mode, - searcher=ExpectedHyperVolumeImprovement(**searcher_kwargs), - random_seed=random_seed, - **kwargs, - ) - class ASHACQR(HyperbandScheduler): """ @@ -1374,6 +1288,57 @@ def __init__( ) ) +try: + from syne_tune.optimizer.schedulers.multiobjective import ExpectedHyperVolumeImprovement + class EHVI(FIFOScheduler): + """ + Implements the Expected Hypervolume Improvement method. + + See :class:`~syne_tune.optimizer.schedulers.searchers.RandomSearcher` + for ``kwargs["search_options"]`` parameters. + + :param config_space: Configuration space for evaluation function + :param metric: Name of metric to optimize + :param population_size: See + :class:`~syne_tune.optimizer.schedulers.searchers.RegularizedEvolution`. + Defaults to 100 + :param sample_size: See + :class:`~syne_tune.optimizer.schedulers.searchers.RegularizedEvolution`. + Defaults to 10 + :param random_seed: Random seed, optional + :param kwargs: Additional arguments to + :class:`~syne_tune.optimizer.schedulers.FIFOScheduler` + """ + + def __init__( + self, + config_space: Dict[str, Any], + metric: List[str], + mode: Union[List[str], str] = "min", + random_seed: Optional[int] = None, + **kwargs, + ): + searcher_kwargs = _create_searcher_kwargs( + config_space, metric, random_seed, kwargs + ) + searcher_kwargs["mode"] = mode + print(mode) + + super(EHVI, self).__init__( + config_space=config_space, + metric=metric, + mode=mode, + searcher=ExpectedHyperVolumeImprovement(**searcher_kwargs), + random_seed=random_seed, + **kwargs, + ) +except ImportError: + logging.info( + _try_import_message( + message_text="EHVI is not imported (not contained in extra)", + tag="ehvi", + ) + ) # Dictionary that allows to also list baselines who don't need a wrapper class # such as :class:`PopulationBasedTraining` baselines_dict = { From f5e707633a624a98428bbe50c136fca74f64e802 Mon Sep 17 00:00:00 2001 From: kleiaaro Date: Thu, 15 Feb 2024 13:19:52 +0100 Subject: [PATCH 10/15] import safe --- syne_tune/optimizer/baselines.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/syne_tune/optimizer/baselines.py b/syne_tune/optimizer/baselines.py index f6fd3090d..2fae79d63 100644 --- a/syne_tune/optimizer/baselines.py +++ b/syne_tune/optimizer/baselines.py @@ -1204,7 +1204,6 @@ def __init__( ) - class ASHACQR(HyperbandScheduler): """ Multi-fidelity Conformal Quantile Regression approach proposed in: @@ -1289,7 +1288,10 @@ def __init__( ) try: - from syne_tune.optimizer.schedulers.multiobjective import ExpectedHyperVolumeImprovement + from syne_tune.optimizer.schedulers.multiobjective import ( + ExpectedHyperVolumeImprovement, + ) + class EHVI(FIFOScheduler): """ Implements the Expected Hypervolume Improvement method. @@ -1311,12 +1313,12 @@ class EHVI(FIFOScheduler): """ def __init__( - self, - config_space: Dict[str, Any], - metric: List[str], - mode: Union[List[str], str] = "min", - random_seed: Optional[int] = None, - **kwargs, + self, + config_space: Dict[str, Any], + metric: List[str], + mode: Union[List[str], str] = "min", + random_seed: Optional[int] = None, + **kwargs, ): searcher_kwargs = _create_searcher_kwargs( config_space, metric, random_seed, kwargs @@ -1332,6 +1334,7 @@ def __init__( random_seed=random_seed, **kwargs, ) + except ImportError: logging.info( _try_import_message( From e130fe250ad986a9283b709e7e7bebb7a4cf2a36 Mon Sep 17 00:00:00 2001 From: kleiaaro Date: Thu, 15 Feb 2024 13:48:45 +0100 Subject: [PATCH 11/15] import --- .../optimizer/schedulers/multiobjective/__init__.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/syne_tune/optimizer/schedulers/multiobjective/__init__.py b/syne_tune/optimizer/schedulers/multiobjective/__init__.py index 705ae0a4a..3fddcef39 100644 --- a/syne_tune/optimizer/schedulers/multiobjective/__init__.py +++ b/syne_tune/optimizer/schedulers/multiobjective/__init__.py @@ -10,7 +10,7 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either # express or implied. See the License for the specific language governing # permissions and limitations under the License. -from syne_tune.try_import import try_import_moo_message +from syne_tune.try_import import try_import_moo_message, try_import_botorch_message from syne_tune.optimizer.schedulers.multiobjective.moasha import MOASHA from syne_tune.optimizer.schedulers.multiobjective.multi_objective_regularized_evolution import ( MultiObjectiveRegularizedEvolution, @@ -21,18 +21,21 @@ from syne_tune.optimizer.schedulers.multiobjective.linear_scalarizer import ( LinearScalarizedScheduler, ) -from syne_tune.optimizer.schedulers.multiobjective.expected_hyper_volume_improvement import ( - ExpectedHyperVolumeImprovement, -) __all__ = [ "MOASHA", "MultiObjectiveRegularizedEvolution", "NSGA2Searcher", "LinearScalarizedScheduler", - "ExpectedHyperVolumeImprovement", ] +try: + from syne_tune.optimizer.schedulers.multiobjective.expected_hyper_volume_improvement import ( + ExpectedHyperVolumeImprovement, + ) +except ImportError: + print(try_import_botorch_message()) + try: from syne_tune.optimizer.schedulers.multiobjective.multi_surrogate_multi_objective_searcher import ( # noqa: F401 MultiObjectiveMultiSurrogateSearcher, From 26dfc1e0f963696469e13b837f596128624d3445 Mon Sep 17 00:00:00 2001 From: kleiaaro Date: Thu, 15 Feb 2024 14:09:33 +0100 Subject: [PATCH 12/15] update --- syne_tune/optimizer/baselines.py | 2 +- syne_tune/optimizer/schedulers/multiobjective/__init__.py | 7 ------- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/syne_tune/optimizer/baselines.py b/syne_tune/optimizer/baselines.py index 2fae79d63..15c68f49a 100644 --- a/syne_tune/optimizer/baselines.py +++ b/syne_tune/optimizer/baselines.py @@ -1288,7 +1288,7 @@ def __init__( ) try: - from syne_tune.optimizer.schedulers.multiobjective import ( + from syne_tune.optimizer.schedulers.multiobjective.expected_hyper_volume_improvement import ( ExpectedHyperVolumeImprovement, ) diff --git a/syne_tune/optimizer/schedulers/multiobjective/__init__.py b/syne_tune/optimizer/schedulers/multiobjective/__init__.py index 3fddcef39..f0c258e34 100644 --- a/syne_tune/optimizer/schedulers/multiobjective/__init__.py +++ b/syne_tune/optimizer/schedulers/multiobjective/__init__.py @@ -29,13 +29,6 @@ "LinearScalarizedScheduler", ] -try: - from syne_tune.optimizer.schedulers.multiobjective.expected_hyper_volume_improvement import ( - ExpectedHyperVolumeImprovement, - ) -except ImportError: - print(try_import_botorch_message()) - try: from syne_tune.optimizer.schedulers.multiobjective.multi_surrogate_multi_objective_searcher import ( # noqa: F401 MultiObjectiveMultiSurrogateSearcher, From da585fb05d9df7d63f2fc0dbb4939f31e03f3afc Mon Sep 17 00:00:00 2001 From: kleiaaro Date: Thu, 15 Feb 2024 14:35:13 +0100 Subject: [PATCH 13/15] import --- .../multiobjective/expected_hyper_volume_improvement.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py index e5dc40a72..294c717e0 100644 --- a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py +++ b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py @@ -277,7 +277,7 @@ def _sample_next_candidate(self) -> Optional[dict]: # logging.warning("Botorch was unable to fit the model, sampling randomly.") # return self._get_random_config() - def _make_gp(self, X_tensor: Tensor, Y_tensor: Tensor) -> SingleTaskGP: + def _make_gp(self, X_tensor, Y_tensor): double_precision = False if double_precision: X_tensor = X_tensor.double() From 23a40f96288f04ea47eba8c3eafe259eb02642f4 Mon Sep 17 00:00:00 2001 From: kleiaaro Date: Thu, 15 Feb 2024 14:55:41 +0100 Subject: [PATCH 14/15] imports --- syne_tune/optimizer/baselines.py | 1 - syne_tune/optimizer/schedulers/multiobjective/__init__.py | 2 +- .../multiobjective/expected_hyper_volume_improvement.py | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/syne_tune/optimizer/baselines.py b/syne_tune/optimizer/baselines.py index 15c68f49a..f426a7d9c 100644 --- a/syne_tune/optimizer/baselines.py +++ b/syne_tune/optimizer/baselines.py @@ -1364,5 +1364,4 @@ def __init__( "ASHACTS": ASHACTS, "CQR": CQR, "ASHACQR": ASHACQR, - "EHVI": EHVI, } diff --git a/syne_tune/optimizer/schedulers/multiobjective/__init__.py b/syne_tune/optimizer/schedulers/multiobjective/__init__.py index f0c258e34..bfd870d88 100644 --- a/syne_tune/optimizer/schedulers/multiobjective/__init__.py +++ b/syne_tune/optimizer/schedulers/multiobjective/__init__.py @@ -10,7 +10,7 @@ # on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either # express or implied. See the License for the specific language governing # permissions and limitations under the License. -from syne_tune.try_import import try_import_moo_message, try_import_botorch_message +from syne_tune.try_import import try_import_moo_message from syne_tune.optimizer.schedulers.multiobjective.moasha import MOASHA from syne_tune.optimizer.schedulers.multiobjective.multi_objective_regularized_evolution import ( MultiObjectiveRegularizedEvolution, diff --git a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py index 294c717e0..6347b6a32 100644 --- a/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py +++ b/syne_tune/optimizer/schedulers/multiobjective/expected_hyper_volume_improvement.py @@ -289,7 +289,7 @@ def _make_gp(self, X_tensor, Y_tensor): warp_tf = None return SingleTaskGP(X_tensor, Y_tensor, input_transform=warp_tf) - def _config_to_feature_matrix(self, configs: List[dict]) -> Tensor: + def _config_to_feature_matrix(self, configs: List[dict]): bounds = Tensor(self._hp_ranges.get_ndarray_bounds()).T X = Tensor(self._hp_ranges.to_ndarray_matrix(configs)) return normalize(X, bounds) From 73245f5f0cee556cfbdd7d0c169e8e318a75be2e Mon Sep 17 00:00:00 2001 From: kleiaaro Date: Thu, 15 Feb 2024 16:48:41 +0100 Subject: [PATCH 15/15] import unit tests --- tst/schedulers/test_schedulers_api.py | 12 +++++++----- tst/schedulers/test_searchers.py | 2 -- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tst/schedulers/test_schedulers_api.py b/tst/schedulers/test_schedulers_api.py index 6a774e288..b34625559 100644 --- a/tst/schedulers/test_schedulers_api.py +++ b/tst/schedulers/test_schedulers_api.py @@ -317,11 +317,6 @@ def make_transfer_learning_evaluations(num_evals: int = 10): metrics=[metric1, metric2], mode=mode, ), - EHVI( - config_space=config_space, - metric=[metric1, metric2], - mode=mode, - ), MedianStoppingRule( scheduler=FIFOScheduler( config_space, searcher="random", metric=metric1, mode=mode @@ -484,6 +479,13 @@ def make_transfer_learning_evaluations(num_evals: int = 10): mode=mode, ), ) + list_schedulers_to_test.append( + EHVI( + config_space=config_space, + metric=[metric1, metric2], + mode=mode, + ), + ) @pytest.mark.parametrize("scheduler", list_schedulers_to_test) diff --git a/tst/schedulers/test_searchers.py b/tst/schedulers/test_searchers.py index 70ec3b5a2..1e1810905 100644 --- a/tst/schedulers/test_searchers.py +++ b/tst/schedulers/test_searchers.py @@ -28,7 +28,6 @@ SyncBOHB, BORE, KDE, - EHVI, ) from syne_tune.config_space import ( choice, @@ -54,7 +53,6 @@ (SyncBOHB, True), (BORE, False), (KDE, False), - (EHVI, False), ]