Skip to content

Commit

Permalink
Increase test coverage
Browse files Browse the repository at this point in the history
  • Loading branch information
andreArtelt committed Dec 2, 2019
1 parent 37a33e7 commit bc1bae4
Show file tree
Hide file tree
Showing 7 changed files with 59 additions and 6 deletions.
16 changes: 15 additions & 1 deletion tests/sklearn/test_sklearn_isolationforest.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# -*- coding: utf-8 -*-
import numpy as np
np.random.seed(42)
import pytest
import sklearn
from sklearn.ensemble import IsolationForest
from sklearn.datasets import make_blobs

Expand Down Expand Up @@ -31,4 +33,16 @@ def test_isolationforest():

x_cf, y_cf, _ = generate_counterfactual(model, x, y_target=y_target, return_as_dict=False)
assert y_cf == y_target
assert model.predict(np.array([x_cf])) == y_target
assert model.predict(np.array([x_cf])) == y_target

cf = generate_counterfactual(model, x, y_target=y_target, return_as_dict=True)
assert cf["y_cf"] == y_target
assert model.predict(np.array([cf["x_cf"]])) == y_target

# Other stuff
from ceml.sklearn import IsolationForest as IsolationForestCf
model_cf = IsolationForestCf(model)
assert model.predict([x]) == model_cf.predict(x)

with pytest.raises(TypeError):
IsolationForestCf(sklearn.linear_model.LogisticRegression())
8 changes: 7 additions & 1 deletion tests/sklearn/test_sklearn_lda.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import numpy as np
np.random.seed(42)
import pytest
import sklearn
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
Expand Down Expand Up @@ -83,4 +84,9 @@ def test_lda():
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=features_whitelist, regularization=None, optimizer="nelder-mead", return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x_orig.shape[0])])
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x_orig.shape[0])])

# Other stuff
from ceml.sklearn import LdaCounterfactual
with pytest.raises(TypeError):
LdaCounterfactual(sklearn.linear_model.LogisticRegression())
8 changes: 7 additions & 1 deletion tests/sklearn/test_sklearn_linearregression.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import numpy as np
np.random.seed(42)
import pytest
import sklearn
from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
Expand Down Expand Up @@ -87,4 +88,9 @@ def test_linearregression():
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, y_target, done=y_target_done, features_whitelist=features_whitelist, regularization=None, optimizer="nelder-mead", return_as_dict=False)
assert y_target_done(y_cf)
assert y_target_done(model.predict(np.array([x_cf])))
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x_orig.shape[0])])
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x_orig.shape[0])])

# Other stuff
from ceml.sklearn import LinearRegressionCounterfactual
with pytest.raises(TypeError):
LinearRegressionCounterfactual(sklearn.naive_bayes.GaussianNB())
6 changes: 6 additions & 0 deletions tests/sklearn/test_sklearn_lvq.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@

import numpy as np
np.random.seed(42)
import pytest
import sklearn
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn_lvq import GlvqModel, GmlvqModel, LgmlvqModel

from ceml.sklearn import generate_counterfactual
from ceml.sklearn import LVQ as LvqCf


def test_glvq():
Expand Down Expand Up @@ -81,6 +83,10 @@ def test_glvq():
assert model.predict(np.array([x_cf])) == 0
assert all([True if i in features_whitelist else delta[i] == 0. for i in range(x_orig.shape[0])])

# Other stuff
with pytest.raises(TypeError):
LvqCf(sklearn.linear_model.LogisticRegression())


def test_gmlvq():
# Load data
Expand Down
8 changes: 7 additions & 1 deletion tests/sklearn/test_sklearn_naivebayes.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import numpy as np
np.random.seed(42)
import pytest
import sklearn
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
Expand Down Expand Up @@ -113,4 +114,9 @@ def test_gaussiannaivebayes():
x_cf, y_cf, delta = generate_counterfactual(model, x_orig, y_target=0, features_whitelist=features_whitelist, optimizer="mp", return_as_dict=False)
assert y_cf == 0
print(model.predict_proba(np.array([x_cf])))
assert model.predict(np.array([x_cf])) == 0
assert model.predict(np.array([x_cf])) == 0

# Other stuff
from ceml.sklearn import GaussianNbCounterfactual
with pytest.raises(TypeError):
GaussianNbCounterfactual(sklearn.linear_model.LogisticRegression())
8 changes: 7 additions & 1 deletion tests/sklearn/test_sklearn_qda.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import numpy as np
np.random.seed(42)
import pytest
import sklearn
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
Expand Down Expand Up @@ -107,4 +108,9 @@ def test_qda():

x_cf, y_cf, delta = generate_counterfactual(model, x_orig, y_target=0, features_whitelist=features_whitelist, optimizer="mp", return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
assert model.predict(np.array([x_cf])) == 0

# Other stuff
from ceml.sklearn import QdaCounterfactual
with pytest.raises(TypeError):
QdaCounterfactual(sklearn.linear_model.LogisticRegression())
11 changes: 10 additions & 1 deletion tests/sklearn/test_sklearn_softmaxregression.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

import numpy as np
np.random.seed(42)
import pytest
import sklearn
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
Expand Down Expand Up @@ -156,4 +157,12 @@ def test_softmaxregression():

x_cf, y_cf, delta = generate_counterfactual(model, x_orig, 0, features_whitelist=features_whitelist, regularization="l1", optimizer="mp", return_as_dict=False)
assert y_cf == 0
assert model.predict(np.array([x_cf])) == 0
assert model.predict(np.array([x_cf])) == 0

# Other stuff
from ceml.sklearn import SoftmaxCounterfactual
with pytest.raises(TypeError):
SoftmaxCounterfactual(sklearn.linear_model.LinearRegression())

with pytest.raises(ValueError):
SoftmaxCounterfactual(LogisticRegression(multi_class="ovr"))

0 comments on commit bc1bae4

Please sign in to comment.