Skip to content

Commit

Permalink
Adding pytest.raises to tests that expect a PipelineScoreError.
Browse files Browse the repository at this point in the history
  • Loading branch information
freddyaboulton committed Jul 20, 2020
1 parent e7e054c commit 3e63c68
Showing 1 changed file with 12 additions and 0 deletions.
12 changes: 12 additions & 0 deletions evalml/tests/pipeline_tests/test_pipelines.py
Expand Up @@ -622,6 +622,9 @@ def test_score_regression_objective_error(mock_predict, mock_fit, mock_objective
clf = make_mock_regression_pipeline()
clf.fit(X, y)
objective_names = ['r2', 'mse']
# Using pytest.raises to make sure we error if an error is not thrown.
with pytest.raises(PipelineScoreError):
_ = clf.score(X, y, objective_names)
try:
_ = clf.score(X, y, objective_names)
except PipelineScoreError as e:
Expand All @@ -642,6 +645,9 @@ def test_score_binary_objective_error(mock_predict, mock_fit, mock_objective_sco
clf = make_mock_binary_pipeline()
clf.fit(X, y)
objective_names = ['f1', 'precision']
# Using pytest.raises to make sure we error if an error is not thrown.
with pytest.raises(PipelineScoreError):
_ = clf.score(X, y, objective_names)
try:
_ = clf.score(X, y, objective_names)
except PipelineScoreError as e:
Expand All @@ -661,6 +667,9 @@ def test_score_multiclass_objective_error(mock_predict, mock_fit, mock_objective
clf = make_mock_multiclass_pipeline()
clf.fit(X, y)
objective_names = ['f1_micro', 'precision_micro']
# Using pytest.raises to make sure we error if an error is not thrown.
with pytest.raises(PipelineScoreError):
_ = clf.score(X, y, objective_names)
try:
_ = clf.score(X, y, objective_names)
except PipelineScoreError as e:
Expand Down Expand Up @@ -780,6 +789,9 @@ class MockPipelineNone(BinaryClassificationPipeline):
def test_score_with_objective_that_requires_predict_proba(mock_predict, dummy_regression_pipeline_class, X_y_binary):
X, y = X_y_binary
mock_predict.return_value = np.array([1] * 100)
# Using pytest.raises to make sure we error if an error is not thrown.
with pytest.raises(PipelineScoreError):
dummy_regression_pipeline_class(parameters={}).score(X, y, ['precision', 'auc'])
try:
dummy_regression_pipeline_class(parameters={}).score(X, y, ['precision', 'auc'])
except PipelineScoreError as e:
Expand Down

0 comments on commit 3e63c68

Please sign in to comment.