Skip to content

Commit

Permalink
code coverage
Browse files Browse the repository at this point in the history
  • Loading branch information
sdpython committed Jul 13, 2020
1 parent e768031 commit bf5f146
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 9 deletions.
3 changes: 2 additions & 1 deletion mlinsights/mlmodel/ml_featurizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,8 @@ def wrap_predict_sklearn(X, fct, many):
"""
isv = is_vector(X)
if many == isv:
raise ValueError("Inconsistency X is a single vector, many is True")
raise ValueError( # pragma: no cover
"Inconsistency X is a single vector, many is True")
if isv:
X = [X]
y = fct(X)
Expand Down
10 changes: 4 additions & 6 deletions mlinsights/mlmodel/quantile_mlpregressor.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from sklearn.neural_network._base import DERIVATIVES, LOSS_FUNCTIONS
try:
from sklearn.neural_network._multilayer_perceptron import BaseMultilayerPerceptron
except ImportError:
except ImportError: # pragma: no cover
# scikit-learn < 0.22.
from sklearn.neural_network.multilayer_perceptron import BaseMultilayerPerceptron
from sklearn.metrics import mean_absolute_error
Expand All @@ -35,10 +35,9 @@ def float_sign(a):
"Returns 1 if *a > 0*, otherwise -1"
if a > 1e-8:
return 1.
elif a < -1e-8:
if a < -1e-8:
return -1.
else:
return 0.
return 0.


EXTENDED_LOSS_FUNCTIONS = {'absolute_loss': absolute_loss}
Expand Down Expand Up @@ -87,8 +86,7 @@ def _modify_loss_derivatives(self, last_deltas):
"""
if self.loss == 'absolute_loss':
return DERIVATIVE_LOSS_FUNCTIONS['absolute_loss'](last_deltas)
else:
return last_deltas
return last_deltas # pragma: no cover

def _backprop(self, X, y, activations, deltas, coef_grads,
intercept_grads):
Expand Down
4 changes: 2 additions & 2 deletions mlinsights/mlmodel/transfer_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,11 @@ def __init__(self, estimator, method=None, copy_estimator=True,
elif hasattr(estimator, "predict"):
method = "predict"
else:
raise AttributeError(
raise AttributeError( # pragma: no cover
"Cannot find a method transform, predict_proba, decision_function, "
"predict in object {}".format(type(estimator)))
if not hasattr(estimator, method):
raise AttributeError(
raise AttributeError( # pragma: no cover
"Cannot find method '{}' in object {}".format(
method, type(estimator)))
self.method = method
Expand Down

0 comments on commit bf5f146

Please sign in to comment.