diff --git a/gplearn/genetic.py b/gplearn/genetic.py index 10f3e570..6a57672e 100644 --- a/gplearn/genetic.py +++ b/gplearn/genetic.py @@ -481,7 +481,11 @@ def fit(self, X, y, sample_weight=None): if isinstance(self, TransformerMixin): # Find the best individuals in the final generation fitness = np.array(fitness) - hall_of_fame = fitness.argsort()[:self.hall_of_fame] + if self._metric.greater_is_better: + hall_of_fame = fitness.argsort()[:self.hall_of_fame] + else: + hall_of_fame = fitness.argsort()[(self.population_size - + self.hall_of_fame):] evaluation = np.array([gp.execute(X) for gp in [self._programs[-1][i] for i in hall_of_fame]]) diff --git a/gplearn/tests/test_genetic.py b/gplearn/tests/test_genetic.py index f0639252..8a83a6f5 100644 --- a/gplearn/tests/test_genetic.py +++ b/gplearn/tests/test_genetic.py @@ -1022,14 +1022,16 @@ def test_warm_start(): assert_equal(cold_program, warm_program) -def test_customizied_regressor_metrics(): +def test_customized_regressor_metrics(): """Check whether parameter greater_is_better works fine""" x_data = rng.uniform(-1, 1, 100).reshape(50, 2) y_true = x_data[:, 0] ** 2 + x_data[:, 1] ** 2 - est_gp = SymbolicRegressor(metric='mean absolute error', stopping_criteria=0.000001, random_state=415, - parsimony_coefficient=0.001, verbose=0, init_method='full', init_depth=(2, 4)) + est_gp = SymbolicRegressor(metric='mean absolute error', + stopping_criteria=0.000001, random_state=415, + parsimony_coefficient=0.001, init_method='full', + init_depth=(2, 4)) est_gp.fit(x_data, y_true) formula = est_gp.__str__() assert_equal("add(mul(X1, X1), mul(X0, X0))", formula, True) @@ -1037,13 +1039,15 @@ def test_customizied_regressor_metrics(): def neg_mean_absolute_error(y, y_pred, sample_weight): return -1 * mean_absolute_error(y, y_pred, sample_weight) - customizied_fitness = make_fitness(neg_mean_absolute_error, greater_is_better=True) + customizied_fitness = make_fitness(neg_mean_absolute_error, + greater_is_better=True) - c_est_gp = SymbolicRegressor(metric=customizied_fitness, stopping_criteria=-0.000001, random_state=415, - parsimony_coefficient=0.001, verbose=0, init_method='full', init_depth=(2, 4)) + c_est_gp = SymbolicRegressor(metric=customizied_fitness, + stopping_criteria=-0.000001, random_state=415, + parsimony_coefficient=0.001, verbose=0, + init_method='full', init_depth=(2, 4)) c_est_gp.fit(x_data, y_true) c_formula = c_est_gp.__str__() - assert_equal("add(mul(X1, X1), mul(X0, X0))", c_formula, True)