/
globalGA.py
129 lines (110 loc) · 3.7 KB
/
globalGA.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
from __future__ import absolute_import
from builtins import map
from builtins import range
from builtins import object
import abc
import copy
import random
from .technique import SearchTechnique
from opentuner.search import technique
class GlobalEvolutionaryTechnique(SearchTechnique):
def __init__(self,
mutation_rate = 0.1,
crossover_rate = 0.0,
must_mutate_count = 1,
crossover_strength = 0.1,
*pargs, **kwargs):
super(GlobalEvolutionaryTechnique, self).__init__(*pargs, **kwargs)
self.mutation_rate = mutation_rate
self.crossover_rate = crossover_rate
self.must_mutate_count = must_mutate_count
self.crossover_strength = crossover_strength
@classmethod
def get_hyper_parameters(cls):
return ['mutation_rate', 'crossover_rate', 'must_mutate_count', 'crossover_strength']
def desired_configuration(self):
"""
return a (cfg, priority) that we should test,
through random mutation and crossover
"""
#TODO: set limit value
parents = self.selection()
parents = list(map(copy.deepcopy, parents))
parent_hashes = list(map(self.manipulator.hash_config, parents))
if len(parents) > 1:
cfg = self.crossover(parents)
else:
cfg = parents[0]
for z in range(10): #retries
self.mutation(cfg)
if self.manipulator.hash_config(cfg) in parent_hashes:
continue # try again
return cfg
def mutation(self, cfg):
"""
mutate cfg in place
"""
params = self.manipulator.parameters(cfg)
random.shuffle(params)
for param in params[:self.must_mutate_count]:
self.mutate_param(cfg, param)
for param in params[self.must_mutate_count:]:
if random.random() < self.mutation_rate:
self.mutate_param(cfg, param)
def mutate_param(self, cfg, param):
"""
mutate single parameter of cfg in place
"""
param.op1_randomize(cfg)
def crossover(self, cfgs):
cfg1, cfg2, = cfgs
new = self.manipulator.copy(cfg1)
params = self.manipulator.parameters(cfg1)
random.shuffle(params)
d = int(self.crossover_strength*len(params))
for param in params[:d]:
param.set_value(new, param.get_value(cfg2))
return new
def selection(self):
"""return a list of parent configurations to use"""
if random.random() < self.crossover_rate:
return [self.select(),
self.select()]
else:
return [self.select()]
@abc.abstractmethod
def select(self):
"""return a single random parent configuration"""
return None
class GreedySelectionMixin(object):
"""
EvolutionaryTechnique mixin for greedily selecting the best known
configuration
"""
def select(self):
"""return a single random parent configuration"""
if (self.driver.best_result is not None and
self.driver.best_result.state == 'OK'):
return self.driver.best_result.configuration.data
else:
return self.manipulator.random()
class NormalMutationMixin(object):
"""
Mutate primitive parameters according to normal distribution
"""
def __init__(self, sigma = 0.1, *pargs, **kwargs):
super(NormalMutationMixin, self).__init__(*pargs, **kwargs)
self.sigma = sigma
def mutate_param(self, cfg, param):
"""
mutate single parameter of cfg in place
"""
if param.is_primitive():
param.op1_normal_mutation(cfg, self.sigma)
else:
random.choice(param.manipulators(cfg))(cfg)
class UniformGreedyMutation(GreedySelectionMixin, GlobalEvolutionaryTechnique):
pass
class NormalGreedyMutation(NormalMutationMixin, GreedySelectionMixin, GlobalEvolutionaryTechnique):
pass
technique.register(NormalGreedyMutation( crossover_rate=0.5, crossover_strength=0.2, name='GGA'))