Skip to content

Commit

Permalink
Updates to support Python 3.7
Browse files Browse the repository at this point in the history
  • Loading branch information
yrajas committed Dec 17, 2023
1 parent b2869dc commit 2fe1d2f
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 13 deletions.
18 changes: 5 additions & 13 deletions raimitigations/automitigator/evaluator.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,21 +14,20 @@ class Evaluator:

def __init__(self, automl_args=None) -> None:
self.automl_args = automl_args
self.pipeline_steps = []
self.pipeline = None

def _pipeline_append(self, step):
"""
Append a step to the pipeline.
Note: This is done to support older versions of sklearn < 1.1 (required to support python 3.7)
which doesn't support an empty pipeline to be initialized.
which doesn't support an empty pipeline to be initialized nor appending a step to an existing pipeline
without an estimator at the end.
:param sklearn.pipeline.Pipeline pipeline: The pipeline to add the step to
:param step: The step to add
"""
if self.pipeline is None:
self.pipeline = Pipeline([step])
else:
self.pipeline.steps.append(step)
self.pipeline_steps.append(step)

def evaluate(self, train_x, train_y, search_config):
"""
Expand All @@ -52,8 +51,6 @@ def evaluate(self, train_x, train_y, search_config):
cohort = search_space[amd.cohort_key]
if cohort == amd.all_cohort:
return self.mitigate_full_dataset(train_x, train_y, search_space)
else:
raise ValueError(f"Unknown cohort type {cohort}")

def _process_feature_selector(self, selector_type):
"""
Expand Down Expand Up @@ -131,8 +128,6 @@ def mitigate_full_dataset(self, train_x, train_y, search_space):
self._process_feature_selector(config[amd.mitigation_type_key])
elif mitigation_name == amd.no_mitigation:
continue
else:
raise ValueError(f"Unknown mitigation {mitigation_name}")

fit_results = self._fit_model(train_x, train_y)
fit_results["search_space"] = search_space
Expand All @@ -153,8 +148,6 @@ def _process_imputer(self, imputer_type):
self._pipeline_append((amd.iterative_imputer, dp.IterativeDataImputer()))
elif imputer_name == amd.knn_imputer:
self._pipeline_append((amd.knn_imputer, dp.KNNDataImputer()))
else:
raise ValueError(f"Unknown imputer {imputer_name}")

def _process_scaler(self, scaler_type):
"""
Expand All @@ -177,8 +170,6 @@ def _process_scaler(self, scaler_type):
self._pipeline_append((amd.normalize_scaler, dp.DataNormalizer()))
elif scaler_name == amd.minmax_scaler:
self._pipeline_append((amd.minmax_scaler, dp.DataMinMaxScaler()))
else:
raise ValueError(f"Unknown scaler {scaler_name}")

def _fit_model(self, train_x, train_y):
"""
Expand All @@ -197,6 +188,7 @@ def _fit_model(self, train_x, train_y):
"""
automl = AutoML(**self.automl_args)
self._pipeline_append(("automl", automl))
self.pipeline = Pipeline(self.pipeline_steps)

try:
self.pipeline.fit(train_x, train_y)
Expand Down
21 changes: 21 additions & 0 deletions test/automitigator/automitigator_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,27 @@ def _test_mitigation(train_x, train_y, test_x, mitigation):

assert autoMitigator._pipeline is not None

# -----------------------------------
def test_duplicate_mitigations(df_breast_cancer, label_name_bc):
train_x, test_x, train_y, test_y = _prepare_data(df_breast_cancer, label_name_bc)

config = {
'search_space':
{
'cohort': 'all',
'mitigations': { 'action0': {'type': 0, 'strategy': 0, 'name': 'rebalancer'},
'action1': {'type': 0, 'strategy': 0, 'name': 'rebalancer'}}
}
}

tune_args = {'points_to_evaluate':[config]}
autoMitigator = AutoMitigator(max_mitigations=2, num_samples=1, use_ray=False, tune_args=tune_args)
autoMitigator.fit(train_x, train_y)
_ = autoMitigator.predict(test_x)

assert autoMitigator._pipeline is not None

# -----------------------------------
def test_auto_mitigator_starting_points(df_breast_cancer, label_name_bc):
train_x, test_x, train_y, test_y = _prepare_data(df_breast_cancer, label_name_bc)

Expand Down

0 comments on commit 2fe1d2f

Please sign in to comment.