Skip to content

Commit

Permalink
[tune] Add scikit-optimize to Tune (ray-project#3924)
Browse files Browse the repository at this point in the history
  • Loading branch information
Adi Zimmerman authored and richardliaw committed Feb 12, 2019
1 parent 8df7728 commit 9797028
Show file tree
Hide file tree
Showing 6 changed files with 175 additions and 0 deletions.
24 changes: 24 additions & 0 deletions doc/source/tune-searchalg.rst
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,30 @@ An example of this can be found in `sigopt_example.py <https://github.com/ray-pr
:show-inheritance:
:noindex:

Scikit-Optimize Search
----------------------

The ``SkOptSearch`` is a SearchAlgorithm that is backed by `Scikit-Optimize <https://scikit-optimize.github.io>`__ to perform sequential model-based hyperparameter optimization. Note that this class does not extend ``ray.tune.suggest.BasicVariantGenerator``, so you will not be able to use Tune's default variant generation/search space declaration when using SkOptSearch.

In order to use this search algorithm, you will need to install Scikit-Optimize via the following command:

.. code-block:: bash
$ pip install scikit-optimize
This algorithm requires using the `Scikit-Optimize ask and tell interface <https://scikit-optimize.github.io/notebooks/ask-and-tell.html>`__. This interface requires using the `Optimizer <https://scikit-optimize.github.io/#skopt.Optimizer>`__ provided by Scikit-Optimize. You can use SkOptSearch like follows:

.. code-block:: python
optimizer = Optimizer(dimension, ...)
run_experiments(experiment_config, search_alg=SkOptSearch(optimizer, parameter_names, ... ))
An example of this can be found in `skopt_example.py <https://github.com/ray-project/ray/blob/master/python/ray/tune/examples/skopt_example.py>`__.

.. autoclass:: ray.tune.suggest.SkOptSearch
:show-inheritance:
:noindex:

Contributing a New Algorithm
----------------------------

Expand Down
1 change: 1 addition & 0 deletions docker/examples/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,5 @@ RUN pip install -U h5py # Mutes FutureWarnings
RUN pip install --upgrade bayesian-optimization
RUN pip install --upgrade git+git://github.com/hyperopt/hyperopt.git
RUN pip install --upgrade sigopt
RUN pip install --upgrade scikit-optimize
RUN conda install pytorch-cpu torchvision-cpu -c pytorch
56 changes: 56 additions & 0 deletions python/ray/tune/examples/skopt_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
"""This test checks that Skopt is functional.
It also checks that it is usable with a separate scheduler.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import ray
from ray.tune import run_experiments, register_trainable
from ray.tune.schedulers import AsyncHyperBandScheduler
from ray.tune.suggest import SkOptSearch


def easy_objective(config, reporter):
import time
time.sleep(0.2)
for i in range(config["iterations"]):
reporter(
timesteps_total=i,
neg_mean_loss=-(config["height"] - 14)**2 +
abs(config["width"] - 3))
time.sleep(0.02)


if __name__ == '__main__':
import argparse
from skopt import Optimizer

parser = argparse.ArgumentParser()
parser.add_argument(
"--smoke-test", action="store_true", help="Finish quickly for testing")
args, _ = parser.parse_known_args()
ray.init(redirect_output=True)

register_trainable("exp", easy_objective)

config = {
"skopt_exp": {
"run": "exp",
"num_samples": 10 if args.smoke_test else 50,
"config": {
"iterations": 100,
},
"stop": {
"timesteps_total": 100
},
}
}
optimizer = Optimizer([(0, 20), (-100, 100)])
algo = SkOptSearch(
optimizer, ["width", "height"],
max_concurrent=4,
reward_attr="neg_mean_loss")
scheduler = AsyncHyperBandScheduler(reward_attr="neg_mean_loss")
run_experiments(config, search_alg=algo, scheduler=scheduler)
2 changes: 2 additions & 0 deletions python/ray/tune/suggest/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from ray.tune.suggest.suggestion import SuggestionAlgorithm
from ray.tune.suggest.bayesopt import BayesOptSearch
from ray.tune.suggest.hyperopt import HyperOptSearch
from ray.tune.suggest.skopt import SkOptSearch
from ray.tune.suggest.sigopt import SigOptSearch
from ray.tune.suggest.variant_generator import grid_search, function, \
sample_from
Expand All @@ -12,6 +13,7 @@
"BasicVariantGenerator",
"BayesOptSearch",
"HyperOptSearch",
"SkOptSearch",
"SigOptSearch",
"SuggestionAlgorithm",
"grid_search",
Expand Down
88 changes: 88 additions & 0 deletions python/ray/tune/suggest/skopt.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

try:
import skopt
except Exception:
skopt = None

from ray.tune.suggest.suggestion import SuggestionAlgorithm


class SkOptSearch(SuggestionAlgorithm):
"""A wrapper around skopt to provide trial suggestions.
Requires skopt to be installed.
Parameters:
optimizer (skopt.optimizer.Optimizer): Optimizer provided
from skopt.
parameter_names (list): List of parameter names. Should match
the dimension of the optimizer output.
max_concurrent (int): Number of maximum concurrent trials. Defaults
to 10.
reward_attr (str): The training result objective value attribute.
This refers to an increasing value.
Example:
>>> from skopt import Optimizer
>>> optimizer = Optimizer([(0,20),(-100,100)])
>>> config = {
>>> "my_exp": {
>>> "run": "exp",
>>> "num_samples": 10,
>>> "stop": {
>>> "training_iteration": 100
>>> },
>>> }
>>> }
>>> algo = SkOptSearch(optimizer,
>>> ["width", "height"], max_concurrent=4,
>>> reward_attr="neg_mean_loss")
"""

def __init__(self,
optimizer,
parameter_names,
max_concurrent=10,
reward_attr="episode_reward_mean",
**kwargs):
assert skopt is not None, """skopt must be installed!
You can install Skopt with the command:
`pip install scikit-optimize`."""
assert type(max_concurrent) is int and max_concurrent > 0
self._max_concurrent = max_concurrent
self._parameters = parameter_names
self._reward_attr = reward_attr
self._skopt_opt = optimizer
self._live_trial_mapping = {}
super(SkOptSearch, self).__init__(**kwargs)

def _suggest(self, trial_id):
if self._num_live_trials() >= self._max_concurrent:
return None
suggested_config = self._skopt_opt.ask()
self._live_trial_mapping[trial_id] = suggested_config
return dict(zip(self._parameters, suggested_config))

def on_trial_result(self, trial_id, result):
pass

def on_trial_complete(self,
trial_id,
result=None,
error=False,
early_terminated=False):
"""Passes the result to skopt unless early terminated or errored.
The result is internally negated when interacting with Skopt
so that Skopt Optimizers can "maximize" this value,
as it minimizes on default.
"""
skopt_trial_info = self._live_trial_mapping.pop(trial_id)
if result:
self._skopt_opt.tell(skopt_trial_info, -result[self._reward_attr])

def _num_live_trials(self):
return len(self._live_trial_mapping)
4 changes: 4 additions & 0 deletions test/jenkins_tests/run_multi_node_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -377,6 +377,10 @@ docker run --rm --shm-size=${SHM_SIZE} --memory=${MEMORY_SIZE} $DOCKER_SHA \
python /ray/python/ray/tune/examples/genetic_example.py \
--smoke-test

docker run --rm --shm-size=${SHM_SIZE} --memory=${MEMORY_SIZE} $DOCKER_SHA \
python /ray/python/ray/tune/examples/skopt_example.py \
--smoke-test

docker run --rm --shm-size=${SHM_SIZE} --memory=${MEMORY_SIZE} $DOCKER_SHA \
python /ray/python/ray/rllib/examples/multiagent_cartpole.py --num-iters=2

Expand Down

0 comments on commit 9797028

Please sign in to comment.