Skip to content

Commit

Permalink
pylint
Browse files Browse the repository at this point in the history
  • Loading branch information
sdpython committed Jan 1, 2021
1 parent a846f64 commit 17ed580
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 3 deletions.
2 changes: 2 additions & 0 deletions appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ init:

install:
- "%PYTHON%\\python -m pip install --upgrade pip"
- "%PYTHON%\\python -m pip install pymyinstall"
- "%PYTHON%\\Scripts\\pymy_install3 numpy"
- "%PYTHON%\\Scripts\\pip install torch==1.7.1+cpu torchvision==0.8.2+cpu torchaudio===0.7.2 -f https://download.pytorch.org/whl/torch_stable.html"
- "%PYTHON%\\Scripts\\pip install -r requirements.txt"
build: off
Expand Down
2 changes: 1 addition & 1 deletion mlinsights/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
@brief Module *mlinsights*.
Look for insights for machine learned models.
"""
__version__ = "0.3.520"
__version__ = "0.3.521"
__author__ = "Xavier Dupré"
__github__ = "https://github.com/sdpython/mlinsights"
__url__ = "http://www.xavierdupre.fr/app/mlinsights/helpsphinx/index.html"
Expand Down
4 changes: 2 additions & 2 deletions mlinsights/mlmodel/quantile_mlpregressor.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def _backprop(self, X, y, activations, deltas, coef_grads,
deltas[last] = self._modify_loss_derivatives(deltas[last])

# Compute gradient for the last layer
temp = self._compute_loss_grad(
temp = self._compute_loss_grad( # pylint: disable=E1111
last, n_samples, activations, deltas, coef_grads, intercept_grads)
if temp is None:
# recent version of scikit-learn
Expand All @@ -173,7 +173,7 @@ def _backprop(self, X, y, activations, deltas, coef_grads,
inplace_derivative = DERIVATIVES[self.activation]
inplace_derivative(activations[i], deltas[i - 1])

coef_grads, intercept_grads = self._compute_loss_grad(
coef_grads, intercept_grads = self._compute_loss_grad( # pylint: disable=E1111
i - 1, n_samples, activations, deltas, coef_grads,
intercept_grads)

Expand Down

0 comments on commit 17ed580

Please sign in to comment.