Skip to content

Commit

Permalink
create new class to handle all conversions between hyperactive and gfo
Browse files Browse the repository at this point in the history
  • Loading branch information
SimonBlanke committed Jan 15, 2021
1 parent 5f3278e commit 5db63de
Show file tree
Hide file tree
Showing 2 changed files with 112 additions and 51 deletions.
86 changes: 86 additions & 0 deletions hyperactive/hyper_gradient_trafo.py
@@ -0,0 +1,86 @@
# Author: Simon Blanke
# Email: simon.blanke@yahoo.com
# License: MIT License

import numpy as np
import pandas as pd


class Converter:
def __init__(self, search_space):
self.search_space = search_space
self.para_names = list(self.search_space.keys())

def value2position(self, value):
position = []
for n, space_dim in enumerate(self.search_space_values):
pos = np.abs(value[n] - space_dim).argmin()
position.append(pos)

return np.array(position).astype(int)

def value2para(self, value):
para = {}
for key, p_ in zip(self.para_names, value):
para[key] = p_

return para

def position2value(self, position):
value = []

for n, space_dim in enumerate(self.search_space_values):
value.append(space_dim[position[n]])

return np.array(value)

def para2value(self, para):

value = []
for para_name in self.para_names:
value.append(para[para_name])

return np.array(value)


class HyperGradientTrafo(Converter):
def __init__(self, search_space):
super().__init__(search_space)
self.search_space_values = list(self.search_space.values())

search_space_positions = {}
for key in search_space.keys():
search_space_positions[key] = np.array(
range(len(search_space[key]))
)
self.search_space_positions = search_space_positions

def trafo_initialize(self, initialize):
if "warm_start" in list(initialize.keys()):
warm_start = initialize["warm_start"]
warm_start_gfo = []
for warm_start_ in warm_start:
value = self.trafo.para2value(warm_start_)
position = self.trafo.value2position(value)
pos_para = self.trafo.value2para(position)

warm_start_gfo.append(pos_para)

initialize["warm_start"] = warm_start_gfo

return initialize

def trafo_memory_warm_start(self, results):
if results is None:
return results

df_positions_dict = {}
for para_name in self.para_names:
list1_values = list(results[para_name].values)
list1_positions = [self.search_space[para_name].index(value) for value in list1_values]
df_positions_dict[para_name] = list1_positions

results_new = pd.DataFrame(df_positions_dict)
results_new["score"] = results["score"]

return results_new
77 changes: 26 additions & 51 deletions hyperactive/optimizers.py
Expand Up @@ -22,6 +22,8 @@
EnsembleOptimizer as _EnsembleOptimizer,
)

from .hyper_gradient_trafo import HyperGradientTrafo


class DictClass:
def __init__(self):
Expand All @@ -46,43 +48,22 @@ def init(
self, search_space, initialize={"grid": 8, "random": 4, "vertices": 8}
):
self.search_space = search_space
self.optimizer_hyper_ss = self._OptimizerClass(
search_space, initialize
)

search_space_positions = {}
for key in search_space.keys():
search_space_positions[key] = np.array(
range(len(search_space[key]))
)
self.trafo = HyperGradientTrafo(search_space)


initialize = self._warm_start_conv(initialize)
initialize = self.trafo.trafo_initialize(initialize)
search_space_positions = self.trafo.search_space_positions

self.optimizer = self._OptimizerClass(
search_space_positions, initialize, **self.opt_params
)
self.search_space_positions = search_space_positions

self.conv = self.optimizer.conv

def print_info(self, *args):
self.optimizer.print_info(*args)

def _warm_start_conv(self, initialize):
if "warm_start" in list(initialize.keys()):
warm_start = initialize["warm_start"]
warm_start_gfo = []
for warm_start_ in warm_start:
value = self.optimizer_hyper_ss.conv.para2value(warm_start_)
position = self.optimizer_hyper_ss.conv.value2position(value)
pos_para = self.optimizer_hyper_ss.conv.value2para(position)

warm_start_gfo.append(pos_para)

initialize["warm_start"] = warm_start_gfo

return initialize

def _process_results(self):
results_dict = {}

Expand All @@ -97,20 +78,27 @@ def _process_results(self):
diff_list = np.setdiff1d(self.positions.columns, self.results.columns)
self.results[diff_list] = self.positions[diff_list]

def _values2positions_dataframe(self, results):
para_names = list(self.search_space.keys())
search_space_values = list(self.search_space.values())

df_positions_dict = {}
for para_name in para_names:
list1_values = list(results[para_name].values)
list1_positions = [self.search_space[para_name].index(value) for value in list1_values]
df_positions_dict[para_name] = list1_positions
def _convert_args2gfo(self, memory_warm_start):
memory_warm_start = self.trafo.trafo_memory_warm_start(memory_warm_start)

return memory_warm_start

def _convert_results2hyper(self):
self.eval_time = np.array(self.optimizer.eval_times).sum()
self.iter_time = np.array(self.optimizer.iter_times).sum()

value = self.trafo.para2value(
self.optimizer.best_para
)
self.position = self.trafo.position2value(value)
best_para = self.trafo.value2para(self.position)

results_new = pd.DataFrame(df_positions_dict)
results_new["score"] = results["score"]
self.best_para = best_para
self.best_score =self.optimizer.best_score
self.positions =self.optimizer.results

return results_new
self._process_results()


def search(
Expand All @@ -130,8 +118,7 @@ def search(
random_state=None,
nth_process=None,
):
# if memory_warm_start is not None:
# memory_warm_start = self._values2positions_dataframe(memory_warm_start)
memory_warm_start = self._convert_args2gfo(memory_warm_start)

self.optimizer.search(
objective_function,
Expand All @@ -145,20 +132,8 @@ def search(
nth_process,
)

self.eval_time = np.array(self.optimizer.eval_times).sum()
self.iter_time = np.array(self.optimizer.iter_times).sum()

value = self.optimizer_hyper_ss.conv.para2value(
self.optimizer.best_para
)
position = self.optimizer_hyper_ss.conv.position2value(value)
best_para = self.optimizer_hyper_ss.conv.value2para(position)

self.best_para = best_para
self.best_score = self.optimizer.best_score
self.positions = self.optimizer.results
self._convert_results2hyper()

self._process_results()


class HillClimbingOptimizer(_BaseOptimizer_):
Expand Down

0 comments on commit 5db63de

Please sign in to comment.