Skip to content

Commit

Permalink
Add tests using the weighted manhattan distance as a regularizer
Browse files Browse the repository at this point in the history
  • Loading branch information
andreArtelt committed Jul 16, 2019
1 parent 83d1f74 commit a79a375
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 2 deletions.
10 changes: 10 additions & 0 deletions tests/sklearn/test_sklearn_softmaxregression.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from sklearn.linear_model import LogisticRegression

from ceml.sklearn import generate_counterfactual
from ceml.backend.jax.costfunctions import LMadCost


def test_softmaxregression():
Expand All @@ -26,13 +27,22 @@ def test_softmaxregression():
x_orig = X_test[1:4][0,:]
assert model.predict([x_orig]) == 2

# Create weighted manhattan distance cost function
md = np.median(X_train, axis=0)
mad = np.median(np.abs(X_train - md), axis=0)
regularization_mad = LMadCost(x_orig, mad)

# Compute counterfactual
features_whitelist = None

x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=features_whitelist, regularization="l1", C=1.0, optimizer="bfgs", return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0

x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=features_whitelist, regularization=regularization_mad, C=1.0, optimizer="bfgs", return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0

x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=features_whitelist, regularization="l1", C=1.0, optimizer="cg", return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
Expand Down
12 changes: 11 additions & 1 deletion tests/tfkeras/test_tfkeras_softmaxregression.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from sklearn.metrics import accuracy_score

from ceml.tfkeras import generate_counterfactual
from ceml.backend.tensorflow.costfunctions import NegLogLikelihoodCost
from ceml.backend.tensorflow.costfunctions import NegLogLikelihoodCost, LMadCost
from ceml.model import ModelWithLoss


Expand Down Expand Up @@ -61,6 +61,11 @@ def get_loss(self, y_target, pred=None):
x_orig = X_test[1,:]
assert model.predict(np.array([x_orig])) == 1

# Create weighted manhattan distance cost function
md = np.median(X_train, axis=0)
mad = np.median(np.abs(X_train - md), axis=0)
regularization_mad = LMadCost(x_orig, mad)

# Compute counterfactual
features_whitelist = None

Expand All @@ -70,6 +75,11 @@ def get_loss(self, y_target, pred=None):
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0

optimizer = "bfgs"
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, y_target=0, features_whitelist=features_whitelist, regularization=regularization_mad, C=0.01, optimizer=optimizer, optimizer_args=optimizer_args, return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0

optimizer = "nelder-mead"
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, y_target=0, features_whitelist=features_whitelist, regularization="l1", C=0.01, optimizer=optimizer, optimizer_args=optimizer_args, return_as_dict=False)
assert y_cf == 0
Expand Down
13 changes: 12 additions & 1 deletion tests/torch/test_torch_softmaxregression.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from sklearn.metrics import accuracy_score

from ceml.torch import generate_counterfactual
from ceml.backend.torch.costfunctions import NegLogLikelihoodCost
from ceml.backend.torch.costfunctions import NegLogLikelihoodCost, LMadCost
from ceml.model import ModelWithLoss


Expand Down Expand Up @@ -74,6 +74,11 @@ def get_loss(self, y_target, pred=None):
x_orig = X_test[1,:]
assert model.predict(torch.from_numpy(np.array([x_orig]))).numpy() == 1

# Create weighted manhattan distance cost function
md = np.median(X_train, axis=0)
mad = np.median(np.abs(X_train - md), axis=0)
regularization_mad = LMadCost(torch.from_numpy(x_orig), torch.from_numpy(mad))

# Compute counterfactual
features_whitelist = None

Expand All @@ -83,6 +88,12 @@ def get_loss(self, y_target, pred=None):
assert y_cf == 0
assert model.predict(torch.from_numpy(np.array([x_cf], dtype=np.float32))).numpy() == 0

optimizer = "bfgs"
optimizer_args = {"max_iter": 1000, "args": {"lr": 0.9, "momentum": 0.9}}
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, y_target=0, features_whitelist=features_whitelist, regularization=regularization_mad, C=0.001, optimizer=optimizer, optimizer_args=optimizer_args, return_as_dict=False)
assert y_cf == 0
assert model.predict(torch.from_numpy(np.array([x_cf], dtype=np.float32))).numpy() == 0

optimizer = "nelder-mead"
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, y_target=0, features_whitelist=features_whitelist, regularization="l1", C=0.001, optimizer=optimizer, optimizer_args=optimizer_args, return_as_dict=False)
assert y_cf == 0
Expand Down

0 comments on commit a79a375

Please sign in to comment.