-
Notifications
You must be signed in to change notification settings - Fork 332
/
random_search.py
136 lines (115 loc) · 4.07 KB
/
random_search.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
# -*- coding: utf-8 -*-
"""
Hyperparameter random search.
Compares the relative performance of combinations of randomly generated
hyperparameter values in optimizing a specified objective function.
User provides lists of bounds for the uniform random value generation of
'c1', 'c2', and 'w', and the random integer value generation of 'k'.
Combinations of values are generated for the number of iterations specified,
and the generated grid of combinations is used in the search method to find
the optimal parameters for the objective function. The search method default
returns the minimum objective function score and hyperparameters that yield
the minimum score, yet maximum score can also be evaluated.
>>> options = {'c1': [1, 5],
'c2': [6, 10],
'w' : [2, 5],
'k' : [11, 15],
'p' : 1}
>>> g = RandomSearch(LocalBestPSO, n_particles=40, dimensions=20,
options=options, objective_func=sphere, iters=10)
>>> best_score, best_options = g.search()
>>> best_score
1.41978545901
>>> best_options['c1']
1.543556887693
>>> best_options['c2']
9.504769054771
"""
# Import from __future__
from __future__ import absolute_import, print_function, with_statement
# Import modules
import numpy as np
from past.builtins import xrange
# Import from pyswarms
# Import from package
from pyswarms.utils.search.base_search import SearchBase
class RandomSearch(SearchBase):
"""Search of optimal performance on selected objective function
over combinations of randomly selected hyperparameter values
within specified bounds for specified number of selection iterations."""
def assertions(self):
"""Assertion method to check :code:`n_selection_iters` input
Raises
------
TypeError
When :code:`n_selection_iters` is not of type int
"""
super(RandomSearch, self).assertions()
# Check type of n_selection_iters parameter
if not isinstance(self.n_selection_iters, int):
raise TypeError(
"Parameter `n_selection_iters` must be of " "type int"
)
def __init__(
self,
optimizer,
n_particles,
dimensions,
options,
objective_func,
iters,
n_selection_iters,
bounds=None,
velocity_clamp=(0, 1),
):
"""Initialize the Search
Attributes
----------
n_selection_iters: int
number of iterations of random parameter selection
"""
# Assign n_selection_iters as attribute
self.n_selection_iters = n_selection_iters
# Assign attributes
super(RandomSearch, self).__init__(
optimizer,
n_particles,
dimensions,
options,
objective_func,
iters,
bounds=bounds,
velocity_clamp=velocity_clamp,
)
# Invoke assertions
self.assertions()
def generate_grid(self):
"""Generate the grid of hyperparameter value combinations"""
options = dict(self.options)
params = {}
# Remove 'p' to hold as a constant in the paramater combinations
p = options.pop("p")
params["p"] = [p for _ in xrange(self.n_selection_iters)]
# Assign generators based on parameter type
param_generators = {
"c1": np.random.uniform,
"c2": np.random.uniform,
"w": np.random.uniform,
"k": np.random.randint,
}
# Generate random values for hyperparameters 'c1', 'c2', 'w', and 'k'
for idx, bounds in options.items():
params[idx] = param_generators[idx](
*bounds, size=self.n_selection_iters
)
# Return list of dicts of hyperparameter combinations
return [
{
"c1": params["c1"][i],
"c2": params["c2"][i],
"w": params["w"][i],
"k": params["k"][i],
"p": params["p"][i],
}
for i in xrange(self.n_selection_iters)
]