Permalink
Browse files

Fixed halting conditions

Previously the training runs had to be completed before M-LOOP would
halt. This lead to unintuitive behavior when the halting conditions
were early on in the optimization process.

M-LOOP now halts immediately when any of the halting conditions are
met.
  • Loading branch information...
1 parent 1897106 commit cfa5748ebb82d1ac7d23eaab5042e4b056881984 @michaelhush committed Nov 4, 2016
Showing with 55 additions and 29 deletions.
  1. +1 −1 examples/gaussian_process_complete_config.txt
  2. +28 −18 mloop/controllers.py
  3. +0 −1 mloop/launchers.py
  4. +2 −9 mloop/learners.py
  5. +24 −0 mloop/utilities.py
@@ -2,7 +2,7 @@
#---------------------------------
#General options
-max_num_runs = 500 #number of planned runs
+max_num_runs = 100 #number of planned runs
target_cost = 0.1 #cost to beat
#Gaussian process options
View
@@ -334,6 +334,8 @@ def optimize(self):
self._start_up()
self._optimization_routine()
log.info('Controller finished. Closing down M-LOOP. Please wait a moment...')
+ except ControllerInterrupt:
+ self.log.warning('Controller ended by interruption.')
except (KeyboardInterrupt,SystemExit):
log.warning('!!! Do not give the interrupt signal again !!! \n M-LOOP stopped with keyboard interupt or system exit. Please wait at least 1 minute for the threads to safely shut down. \n ')
log.warning('Closing down controller.')
@@ -392,22 +394,19 @@ def _optimization_routine(self):
Runs controller main loop. Gives parameters to experiment and saves costs returned.
'''
self.log.debug('Start controller loop.')
- try:
+ self.log.info('Run:' + str(self.num_in_costs +1))
+ next_params = self._first_params()
+ self._put_params_and_out_dict(next_params)
+ self.save_archive()
+ self._get_cost_and_in_dict()
+ while self.check_end_conditions():
self.log.info('Run:' + str(self.num_in_costs +1))
- next_params = self._first_params()
+ next_params = self._next_params()
self._put_params_and_out_dict(next_params)
self.save_archive()
self._get_cost_and_in_dict()
- while self.check_end_conditions():
- self.log.info('Run:' + str(self.num_in_costs +1))
- next_params = self._next_params()
- self._put_params_and_out_dict(next_params)
- self.save_archive()
- self._get_cost_and_in_dict()
- self.log.debug('End controller loop.')
- except ControllerInterrupt:
- self.log.warning('Controller ended by interruption.')
-
+ self.log.debug('End controller loop.')
+
def _first_params(self):
'''
Checks queue to get first parameters.
@@ -619,7 +618,7 @@ def __init__(self, interface,
self.new_params_event = self.gp_learner.new_params_event
self.remaining_kwargs = self.gp_learner.remaining_kwargs
self.generation_num = self.gp_learner.generation_num
-
+
def _put_params_and_out_dict(self, params):
'''
Override _put_params_and_out_dict function, used when the training learner creates parameters. Makes the defualt param_type the training type and sets last_training_run_flag.
@@ -678,8 +677,18 @@ def _optimization_routine(self):
'''
#Run the training runs using the standard optimization routine. Adjust the number of max_runs
self.log.debug('Starting training optimization.')
- super(GaussianProcessController,self)._optimization_routine()
-
+ self.log.info('Run:' + str(self.num_in_costs +1))
+ next_params = self._first_params()
+ self._put_params_and_out_dict(next_params)
+ self.save_archive()
+ self._get_cost_and_in_dict()
+ while (self.num_in_costs < self.num_training_runs) and self.check_end_conditions():
+ self.log.info('Run:' + str(self.num_in_costs +1))
+ next_params = self._next_params()
+ self._put_params_and_out_dict(next_params)
+ self.save_archive()
+ self._get_cost_and_in_dict()
+
if self.check_end_conditions():
#Start last training run
self.log.info('Run:' + str(self.num_in_costs +1))
@@ -690,10 +699,11 @@ def _optimization_routine(self):
self.new_params_event.set()
self.save_archive()
self._get_cost_and_in_dict()
-
+ self.log.debug('End training runs.')
+
gp_consec = 0
- gp_count = 0
-
+ gp_count = 0
+
while self.check_end_conditions():
self.log.info('Run:' + str(self.num_in_costs +1))
if gp_consec==self.generation_num or (self.no_delay and self.gp_learner_params_queue.empty()):
View
@@ -27,7 +27,6 @@ def launch_from_file(config_filename,
except (IOError, OSError):
print('Unable to open M-LOOP configuration file:' + repr(config_filename))
raise
-
file_kwargs.update(kwargs)
#Main run sequence
#Create interface and extract unused keywords
View
@@ -927,10 +927,7 @@ def __init__(self,
self.length_scale = np.squeeze(np.array(self.training_dict['length_scale']))
self.length_scale_history = list(self.training_dict['length_scale_history'])
self.noise_level = float(self.training_dict['noise_level'])
- if isinstance(self.training_dict['noise_level_history'], np.ndarray):
- self.noise_level_history = list(np.squeeze(self.training_dict['noise_level_history']))
- else:
- self.noise_level_history = list( self.training_dict['noise_level_history'])
+ self.noise_level_history = mlu.safe_cast_to_list(self.training_dict['noise_level_history'])
#Counters
self.costs_count = int(self.training_dict['costs_count'])
@@ -942,11 +939,7 @@ def __init__(self,
self.all_costs = np.squeeze(np.array(self.training_dict['all_costs'], dtype=float))
self.all_uncers = np.squeeze(np.array(self.training_dict['all_uncers'], dtype=float))
- if isinstance(self.training_dict['bad_run_indexs'], np.ndarray):
- self.bad_run_indexs = list(np.squeeze(self.training_dict['bad_run_indexs']))
- else:
- self.bad_run_indexs = list(self.training_dict['bad_run_indexs'])
-
+ self.bad_run_indexs = mlu.safe_cast_to_list(self.training_dict['bad_run_indexs'])
#Derived properties
self.best_cost = float(self.training_dict['best_cost'])
View
@@ -173,6 +173,30 @@ def check_file_type_supported(file_type):
'''
return file_type == 'mat' or 'txt' or 'pkl'
+def safe_cast_to_list(in_array):
+ '''
+ Attempts to safely cast a numpy array to a list, if not a numpy array just casts to list on the object.
+
+ Args:
+ in_array (array or equivalent): The array (or otherwise) to be converted to a list.
+
+ Returns:
+ list : List of elements from in_array
+
+ '''
+
+ if isinstance(in_array, np.ndarray):
+ t_array = np.squeeze(in_array)
+ if t_array.shape == ():
+ out_list = [t_array[()]]
+ else:
+ out_list = list(t_array)
+ else:
+ out_list = list(in_array)
+
+ return out_list
+
+
class NullQueueListener():
'''
Shell class with start and stop functions that do nothing. Queue listener is not implemented in python 2. Current fix is to simply use the multiprocessing class to pipe straight to the cmd line if running on python 2. This is class is just a placeholder.

0 comments on commit cfa5748

Please sign in to comment.