Skip to content

Commit

Permalink
feat: add learning_rate to AdaBoost classifier and regressor. (#251)
Browse files Browse the repository at this point in the history
Closes #167.

Adds learning_rate parameter to AdaBoost classifier and regressor.

<!-- Please provide a summary of changes in this pull request, ensuring
all changes are explained. -->

---------

Co-authored-by: Lars Reimann <mail@larsreimann.com>
  • Loading branch information
alex-senger and lars-reimann committed Apr 28, 2023
1 parent e642d1d commit 7f74440
Show file tree
Hide file tree
Showing 4 changed files with 62 additions and 8 deletions.
18 changes: 14 additions & 4 deletions src/safeds/ml/classical/classification/_ada_boost.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,22 @@


class AdaBoost(Classifier):
"""Ada Boost classification."""
"""Ada Boost classification.
def __init__(self) -> None:
Parameters
----------
learning_rate : float
Weight applied to each classifier at each boosting iteration.
A higher learning rate increases the contribution of each classifier.
"""

def __init__(self, learning_rate: float = 1.0) -> None:
self._wrapped_classifier: sk_AdaBoostClassifier | None = None
self._feature_names: list[str] | None = None
self._target_name: str | None = None
if learning_rate <= 0:
raise ValueError("learning_rate must be positive.")
self._learning_rate = learning_rate

def fit(self, training_set: TaggedTable) -> AdaBoost:
"""
Expand All @@ -41,10 +51,10 @@ def fit(self, training_set: TaggedTable) -> AdaBoost:
LearningError
If the training data contains invalid values or if the training failed.
"""
wrapped_classifier = sk_AdaBoostClassifier()
wrapped_classifier = sk_AdaBoostClassifier(learning_rate=self._learning_rate)
fit(wrapped_classifier, training_set)

result = AdaBoost()
result = AdaBoost(learning_rate=self._learning_rate)
result._wrapped_classifier = wrapped_classifier
result._feature_names = training_set.features.column_names
result._target_name = training_set.target.name
Expand Down
18 changes: 14 additions & 4 deletions src/safeds/ml/classical/regression/_ada_boost.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,22 @@


class AdaBoost(Regressor):
"""Ada Boost regression."""
"""Ada Boost regression.
def __init__(self) -> None:
Parameters
----------
learning_rate : float
Weight applied to each regressor at each boosting iteration.
A higher learning rate increases the contribution of each regressor.
"""

def __init__(self, learning_rate: float = 1.0) -> None:
self._wrapped_regressor: sk_AdaBoostRegressor | None = None
self._feature_names: list[str] | None = None
self._target_name: str | None = None
if learning_rate <= 0:
raise ValueError("learning_rate must be positive.")
self.learning_rate = learning_rate

def fit(self, training_set: TaggedTable) -> AdaBoost:
"""
Expand All @@ -41,10 +51,10 @@ def fit(self, training_set: TaggedTable) -> AdaBoost:
LearningError
If the training data contains invalid values or if the training failed.
"""
wrapped_regressor = sk_AdaBoostRegressor()
wrapped_regressor = sk_AdaBoostRegressor(learning_rate=self.learning_rate)
fit(wrapped_regressor, training_set)

result = AdaBoost()
result = AdaBoost(learning_rate=self.learning_rate)
result._wrapped_regressor = wrapped_regressor
result._feature_names = training_set.features.column_names
result._target_name = training_set.target.name
Expand Down
17 changes: 17 additions & 0 deletions tests/safeds/ml/classical/classification/test_ada_boost.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import pytest
from safeds.data.tabular.containers import Table
from safeds.ml.classical.classification import AdaBoost


def test_should_throw_value_error_if_learning_rate_is_non_positive() -> None:
with pytest.raises(ValueError, match="learning_rate must be positive."):
AdaBoost(learning_rate=-1)


def test_should_give_learning_rate_to_sklearn() -> None:
training_set = Table.from_dict({"col1": [1, 2, 3, 4], "col2": [1, 2, 3, 4]})
tagged_table = training_set.tag_columns("col1")

regressor = AdaBoost(learning_rate=2).fit(tagged_table)
assert regressor._wrapped_classifier is not None
assert regressor._wrapped_classifier.learning_rate == regressor._learning_rate
17 changes: 17 additions & 0 deletions tests/safeds/ml/classical/regression/test_ada_boost.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import pytest
from safeds.data.tabular.containers import Table
from safeds.ml.classical.regression import AdaBoost


def test_should_throw_value_error_if_learning_rate_is_non_positive() -> None:
with pytest.raises(ValueError, match="learning_rate must be positive."):
AdaBoost(learning_rate=-1)


def test_should_give_learning_rate_to_sklearn() -> None:
training_set = Table.from_dict({"col1": [1, 2, 3, 4], "col2": [1, 2, 3, 4]})
tagged_table = training_set.tag_columns("col1")

regressor = AdaBoost(learning_rate=2).fit(tagged_table)
assert regressor._wrapped_regressor is not None
assert regressor._wrapped_regressor.learning_rate == regressor.learning_rate

0 comments on commit 7f74440

Please sign in to comment.