Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MNT Use check_scalar in AdaBoostRegressor #21605

Merged
22 changes: 21 additions & 1 deletion sklearn/ensemble/_weight_boosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -1078,9 +1078,29 @@ def fit(self, X, y, sample_weight=None):
self : object
Fitted AdaBoostRegressor estimator.
"""
# Validate scalar parameters
check_scalar(
self.n_estimators,
"n_estimators",
target_type=numbers.Integral,
min_val=1,
include_boundaries="left",
)

check_scalar(
self.learning_rate,
"learning_rate",
target_type=numbers.Real,
min_val=0,
include_boundaries="neither",
)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should I delete this one?

We can move both of these checks into BaseWeightBoosting, and replace the check for self.learning_rate:

if self.learning_rate <= 0:
raise ValueError("learning_rate must be greater than zero")

Copy link
Contributor Author

@genvalen genvalen Dec 10, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank you. In that case I will remove the checks I added to AdaBoostClassifier too. They are the exact same checks as what I am about to move into BaseWeightBoosting.


# Check loss
if self.loss not in ("linear", "square", "exponential"):
raise ValueError("loss must be 'linear', 'square', or 'exponential'")
raise ValueError(
"loss must be 'linear', 'square', or 'exponential'"
f" Got {self.loss!r} instead."
)

# Fit
return super().fit(X, y, sample_weight)
Expand Down
27 changes: 27 additions & 0 deletions sklearn/ensemble/tests/test_weight_boosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -569,6 +569,33 @@ def test_adaboost_classifier_params_validation(params, err_type, err_msg):
AdaBoostClassifier(**params).fit(X, y_class)


@pytest.mark.parametrize(
# Test that it gives proper exception on deficient input.
"params, err_type, err_msg",
[
({"n_estimators": -1}, ValueError, "n_estimators == -1, must be >= 1"),
({"n_estimators": 0}, ValueError, "n_estimators == 0, must be >= 1"),
(
{"n_estimators": 1.5},
TypeError,
"n_estimators must be an instance of <class 'numbers.Integral'>,"
" not <class 'float'>",
),
({"learning_rate": -1}, ValueError, "learning_rate == -1, must be > 0."),
({"learning_rate": 0}, ValueError, "learning_rate == 0, must be > 0."),
(
{"loss": "unknown"},
ValueError,
"loss must be 'linear', 'square', or 'exponential'",
),
],
)
def test_adaboost_regressor_params_validation(params, err_type, err_msg):
"""Check the parameters validation in `AdaBoostRegressor`."""
with pytest.raises(err_type, match=err_msg):
AdaBoostRegressor(**params).fit(X, y_regr)


@pytest.mark.parametrize("algorithm", ["SAMME", "SAMME.R"])
def test_adaboost_consistent_predict(algorithm):
# check that predict_proba and predict give consistent results
Expand Down