Skip to content

Commit

Permalink
Fix test_automl_feature_selection. Update feature selection component…
Browse files Browse the repository at this point in the history
…s to save number_features and n_jobs
  • Loading branch information
dsherry committed May 28, 2020
1 parent b93bc78 commit 0a9a863
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 5 deletions.
1 change: 1 addition & 0 deletions evalml/automl/auto_search_base.py
Expand Up @@ -245,6 +245,7 @@ def search(self, X, y, data_checks=None, feature_types=None, raise_errors=True,
try:
current_batch_pipelines = automl_algorithm.next_batch()
except StopIteration:
logger.info('AutoML Algorithm out of recommendations, ending')
break
pipeline = current_batch_pipelines.pop(0)
parameters = pipeline.parameters
Expand Down
Expand Up @@ -20,7 +20,8 @@ def __init__(self, number_features=None, n_estimators=10, max_depth=None,
if number_features:
max_features = max(1, int(percent_features * number_features))
parameters = {"percent_features": percent_features,
"threshold": threshold}
"threshold": threshold,
"number_features": number_features}
estimator = SKRandomForestClassifier(random_state=random_state,
n_estimators=n_estimators,
max_depth=max_depth,
Expand Down
Expand Up @@ -20,7 +20,8 @@ def __init__(self, number_features=None, n_estimators=10, max_depth=None,
if number_features:
max_features = max(1, int(percent_features * number_features))
parameters = {"percent_features": percent_features,
"threshold": threshold}
"threshold": threshold,
"number_features": number_features}
estimator = SKRandomForestRegressor(random_state=random_state,
n_estimators=n_estimators,
max_depth=max_depth,
Expand Down
5 changes: 2 additions & 3 deletions evalml/tests/automl_tests/test_autobase.py
Expand Up @@ -306,7 +306,7 @@ def test_automl_str_no_param_search():

@patch('evalml.pipelines.BinaryClassificationPipeline.score')
@patch('evalml.pipelines.BinaryClassificationPipeline.fit')
@patch('evalml.automl.auto_search_base.get_pipelines')
@patch('evalml.automl.automl_algorithm.automl_algorithm.get_pipelines')
def test_automl_feature_selection(mock_get_pipelines, mock_fit, mock_score, X_y):
X, y = X_y
mock_score.return_value = {'Log Loss Binary': 1.0}
Expand All @@ -321,14 +321,13 @@ def fit(self, X, y):
mock_get_pipelines.return_value = allowed_pipelines
start_iteration_callback = MagicMock()
automl = AutoClassificationSearch(max_pipelines=2, start_iteration_callback=start_iteration_callback)
assert automl.possible_pipelines == allowed_pipelines
automl.search(X, y)

assert start_iteration_callback.call_count == 2
proposed_parameters = start_iteration_callback.call_args[0][1]
print(start_iteration_callback.call_args[0])
assert proposed_parameters.keys() == {'RF Classifier Select From Model', 'Logistic Regression Classifier'}
assert proposed_parameters['RF Classifier Select From Model']['number_features'] == X.shape[1]
assert proposed_parameters['RF Classifier Select From Model']['n_jobs'] == -1


@patch('evalml.tuners.random_search_tuner.RandomSearchTuner.is_search_space_exhausted')
Expand Down

0 comments on commit 0a9a863

Please sign in to comment.