Skip to content
Permalink
Browse files

implementing bayesopt solver

  • Loading branch information...
claesenm committed Feb 24, 2016
1 parent 3cc27b6 commit 35aca1664a3ad9ecfce6972725d5a7f56e90cd10
Showing with 179 additions and 3 deletions.
  1. +2 −0 docs/global.rst
  2. +18 −3 notebooks/local-optima.ipynb
  3. +151 −0 optunity/solvers/BayesOpt.py
  4. +8 −0 optunity/solvers/__init__.py
@@ -20,6 +20,7 @@
.. |randomsearch| replace:: :doc:`/user/solvers/random_search`
.. |nelder-mead| replace:: :doc:`/user/solvers/nelder-mead`
.. |tpe| replace:: :doc:`/user/solvers/TPE`
.. |bayesopt| replace:: :doc:`/user/solvers/BayesOpt`
.. |sobol| replace:: :doc:`/user/solvers/sobol`

.. |api-solvers| replace:: :doc:`optunity.solvers`
@@ -29,6 +30,7 @@
.. |api-randomsearch| replace:: :class:`optunity.solvers.RandomSearch`
.. |api-nelder-mead| replace:: :class:`optunity.solvers.NelderMead`
.. |api-tpe| replace:: :class:`optunity.solvers.TPE`
.. |api-bayesopt| replace:: :class:`optunity.solvers.BayesOpt`
.. |api-sobol| replace:: :class:`optunity.solvers.Sobol`

.. |api-constraints| replace:: :doc:`/api/optunity.constraints`
@@ -1,7 +1,7 @@
{
"metadata": {
"name": "",
"signature": "sha256:46f7452c0690209a05fe44af99097d1dd62a8dd1174c27c41a4d57399f9485db"
"signature": "sha256:bead76f00855e8b414cd92f5b2b202def7b8c7920455a5ef21d87dd3331e860a"
},
"nbformat": 3,
"nbformat_minor": 0,
@@ -36,7 +36,7 @@
"import math\n",
"import pandas\n",
"\n",
"%matplotlib inline\n",
"#%matplotlib inline\n",
"from matplotlib import pylab as plt\n",
"from mpl_toolkits.mplot3d import Axes3D"
],
@@ -124,7 +124,14 @@
{
"cell_type": "code",
"collapsed": false,
"input": [],
"input": [
"@optunity.cross_validated(x=data, y=labels, num_folds=5, regenerate_folds=True)\n",
"def svm_rbf_tuned_auroc(x_train, y_train, x_test, y_test, logC, logGamma):\n",
" model = sklearn.svm.SVC(C=10 ** logC, gamma=10 ** logGamma).fit(x_train, y_train)\n",
" decision_values = model.decision_function(x_test)\n",
" auc = optunity.metrics.roc_auc(y_test, decision_values)\n",
" return auc"
],
"language": "python",
"metadata": {},
"outputs": [],
@@ -468,6 +475,14 @@
"source": [
"Clearly, this response surface is filled with local minima. This is a general observation in automated hyperparameter optimization, and is one of the key reasons we need robust solvers. If there were no local minima, a simple gradient-descent-like solver would do the trick."
]
},
{
"cell_type": "code",
"collapsed": false,
"input": [],
"language": "python",
"metadata": {},
"outputs": []
}
],
"metadata": {}
@@ -0,0 +1,151 @@
#! /usr/bin/env python

# Copyright (c) 2014 KU Leuven, ESAT-STADIUS
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither name of copyright holders nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

from .solver_registry import register_solver
from .util import Solver, _copydoc
import functools

import random

_numpy_available = True
try:
import numpy as np
except ImportError:
_numpy_available = False

_bayesopt_available = True
try:
import bayesopt
except ImportError:
_bayesopt_available = False

class BayesOpt(Solver):
"""
.. include:: /global.rst
This solver provides an interface to BayesOpt, as described in [BO2014]_.
This solver uses BayesOpt in the back-end and exposes its solver with uniform priors.
Please refer to |bayesopt| for details about this algorithm.
.. [BO2014] Martinez-Cantin, Ruben. "BayesOpt: A Bayesian optimization library for nonlinear optimization, experimental design and bandits." The Journal of Machine Learning Research 15.1 (2014): 3735-3739.
"""

def __init__(self, num_evals=100, seed=None, **kwargs):
"""
Initialize the BayesOpt solver.
:param num_evals: number of permitted function evaluations
:type num_evals: int
:param seed: the random seed to be used
:type seed: double
:param kwargs: box constraints for each hyperparameter
:type kwargs: {'name': [lb, ub], ...}
"""
if not _bayesopt_available:
raise ImportError('This solver requires bayesopt but it is missing.')
if not _numpy_available:
raise ImportError('This solver requires NumPy but it is missing.')

self._seed = seed
self._bounds = kwargs
self._num_evals = num_evals
self._lb = np.array(map(lambda x: x[1][0], sorted(kwargs.items())))
self._ub = np.array(map(lambda x: x[1][1], sorted(kwargs.items())))

@staticmethod
def suggest_from_box(num_evals, **kwargs):
"""
Verify that we can effectively make a solver from box.
>>> s = BayesOpt.suggest_from_box(30, x=[0, 1], y=[-1, 0], z=[-1, 1])
>>> solver = BayesOpt(**s) #doctest:+SKIP
"""
d = dict(kwargs)
d['num_evals'] = num_evals
return d

@property
def seed(self):
return self._seed

@property
def bounds(self):
return self._bounds

@property
def lb(self):
return self._lb

@property
def ub(self):
return self._ub

@property
def num_evals(self):
return self._num_evals

@_copydoc(Solver.optimize)
def optimize(self, f, maximize=True, pmap=map):

seed = self.seed if self.seed else random.randint(0, 9999999999)
params = {'n_iterations': self.num_evals,
'random_seed': self.seed}
n_dimensions = len(self.lb)


if maximize:
def obj(args):
kwargs = dict([(k, v) for k, v in zip(sorted(self.bounds.keys()), args)])
return -f(**kwargs)

else:
def obj(args):
kwargs = dict([(k, v) for k, v in zip(sorted(self.bounds.keys()), args)])
return f(**kwargs)

mvalue, x_out, error = bayesopt.optimize(obj, n_dimensions,
self.lb, self.ub, params)
best = dict([(k, v) for k, v in zip(sorted(self.bounds.keys()), x_out)])
return best, None


# BayesOpt is a simple wrapper around bayesopt's BayesOpt solver
if _bayesopt_available and _numpy_available:
BayesOpt = register_solver('BayesOpt', 'Tree of Parzen estimators',
['BayesOpt: Tree of Parzen Estimators']
)(BayesOpt)
@@ -41,6 +41,7 @@
* :class:`CMA_ES`
* :class:`TPE`
* :class:`Sobol`
* :class:`BayesOpt`
.. warning::
:class:`CMA_ES` require DEAP_ and NumPy_.
@@ -54,6 +55,12 @@
.. _Hyperopt: http://jaberg.github.io/hyperopt/
.. _NumPy: http://www.numpy.org
.. warning::
:class:`BayesOpt` require BayesOpt_ and NumPy_.
.. _BayesOpt: http://rmcantin.bitbucket.org/html/
.. _NumPy: http://www.numpy.org
.. moduleauthor:: Marc Claesen
"""
@@ -65,3 +72,4 @@
from .CMAES import CMA_ES
from .TPE import TPE
from .Sobol import Sobol
from .BayesOpt import BayesOpt

0 comments on commit 35aca16

Please sign in to comment.
You can’t perform that action at this time.