Skip to content
This repository has been archived by the owner on Feb 23, 2023. It is now read-only.

HyperparameterOptions class to make life easier in some use cases. #175

Open
ahundt opened this issue Mar 29, 2018 · 0 comments
Open

HyperparameterOptions class to make life easier in some use cases. #175

ahundt opened this issue Mar 29, 2018 · 0 comments

Comments

@ahundt
Copy link

ahundt commented Mar 29, 2018

I found the class below made working with 2d numpy arrays that configure optimization choices much simpler for my use cases. You simply add the parameters you want to it, then when you get a callback with a configuration it will convert your settings back to the actual format you need automatically. Would you be interested in a pull request, and if so where should I put it?

import os
import sys
import copy
import six
import json
import GPy
import GPyOpt
import numpy as np


class HyperparameterOptions(object):

    def __init__(self, verbose=1):
        self.index_dict = {}
        self.search_space = []
        self.verbose = verbose

    def add_param(self, name, domain, domain_type='discrete', enable=True, required=True, default=None):
        """

        # Arguments

            search_space: list of hyperparameter configurations required by BayseanOptimizer
            index_dict: dictionary that will be used to lookup real values
                and types when we get the hyperopt callback with ints or floats
            enable: this parameter will be part of hyperparameter search
            required: this parameter must be passed to the model
            default: default value if required

        """
        if self.search_space is None:
            self.search_space = []
        if self.index_dict is None:
            self.index_dict = {'current_index': 0}
        if 'current_index' not in self.index_dict:
            self.index_dict['current_index'] = 0

        if enable:
            param_index = self.index_dict['current_index']
            numerical_domain = domain
            needs_reverse_lookup = False
            lookup_as = float
            # convert string domains to a domain of integer indexes
            if domain_type == 'discrete':
                if isinstance(domain, list) and isinstance(domain[0], str):
                    numerical_domain = [i for i in range(len(domain))]
                    lookup_as = str
                    needs_reverse_lookup = True
                elif isinstance(domain, list) and isinstance(domain[0], bool):
                    numerical_domain = [i for i in range(len(domain))]
                    lookup_as = bool
                    needs_reverse_lookup = True
                elif isinstance(domain, list) and isinstance(domain[0], float):
                    lookup_as = float
                else:
                    lookup_as = int

            opt_dict = {
                'name': name,
                'type': domain_type,
                'domain': numerical_domain}

            if enable:
                self.search_space += [opt_dict]
                # create a second version for us to construct the real function call
                opt_dict = copy.deepcopy(opt_dict)
                opt_dict['lookup_as'] = lookup_as
            else:
                opt_dict['lookup_as'] = None

            opt_dict['enable'] = enable
            opt_dict['required'] = required
            opt_dict['default'] = default
            opt_dict['index'] = param_index
            opt_dict['domain'] = domain
            opt_dict['needs_reverse_lookup'] = needs_reverse_lookup
            self.index_dict[name] = opt_dict
            self.index_dict['current_index'] += 1

    def params_to_args(self, x):
        """ Convert GPyOpt Bayesian Optimizer params back into function call arguments

        Arguments:

            x: the callback parameter of the GPyOpt Bayesian Optimizer
            index_dict: a dictionary with all the information necessary to convert back to function call arguments
        """
        if len(x.shape) == 1:
            # if we get a 1d array convert it to 2d so we are consistent
            x = np.expand_dims(x, axis=0)
        # x is a funky 2d numpy array, so we convert it back to normal parameters
        kwargs = {}
        for key, opt_dict in six.iteritems(self.index_dict):
            if key == 'current_index':
                continue

            if opt_dict['enable']:
                arg_name = opt_dict['name']
                optimizer_param_column = opt_dict['index']
                if optimizer_param_column > x.shape[-1]:
                    raise ValueError('Attempting to access optimizer_param_column' + str(optimizer_param_column) +
                                     ' outside parameter bounds' + str(x.shape) +
                                     ' of optimizer array with index dict: ' + str(self.index_dict) +
                                     'and array x: ' + str(x))
                param_value = x[:, optimizer_param_column]
                if opt_dict['type'] == 'discrete':
                    # the value is an integer indexing into the lookup dict
                    if opt_dict['needs_reverse_lookup']:
                        domain_index = int(param_value)
                        domain_value = opt_dict['domain'][domain_index]
                        value = opt_dict['lookup_as'](domain_value)
                    else:
                        value = opt_dict['lookup_as'](param_value)

                else:
                    # the value is a param to use directly
                    value = opt_dict['lookup_as'](param_value)

                kwargs[arg_name] = value
            elif opt_dict['required']:
                kwargs[opt_dict['name']] = opt_dict['default']
        return kwargs

    def get_domain(self):
        """ Get the hyperparameter search space in the gpyopt domain format.
        """
        return self.search_space
@ahundt ahundt changed the title Hyperparameter Options class to make life easier in some use cases. HyperparameterOptions class to make life easier in some use cases. Mar 29, 2018
Sign up for free to subscribe to this conversation on GitHub. Already have an account? Sign in.
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant