Skip to content
Permalink
Browse files

Satisfy pylint (#252)

* Move NeuralNetClassifier and -Regressor to own modules.

* adjust imports to reflect that
* deprecate old imports

* Refactor net tests to reflect new module structure.

* Minor non-functional changes in test_net.py.

* Non-functional changes to satisfy pylint.

* Increase maximum line number to 88 in pylintrc.

We might one day switch to 'black'.
  • Loading branch information...
benjamin-work authored and ottonemo committed Jun 22, 2018
1 parent c08d502 commit 00f53ecf9455df702c7fe686e238ac7641567e03
Showing with 25 additions and 19 deletions.
  1. +2 −2 pylintrc
  2. +12 −10 skorch/__init__.py
  3. +1 −3 skorch/callbacks/logging.py
  4. +4 −2 skorch/callbacks/scoring.py
  5. +2 −0 skorch/dataset.py
  6. +1 −0 skorch/net.py
  7. +1 −1 skorch/tests/test_net.py
  8. +2 −1 skorch/utils.py
@@ -134,7 +134,7 @@ dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_)

# Maximum number of characters on a single line.
# DEFAULT: max-line-length=80
max-line-length=80
max-line-length=88

# Regexp for a line that is allowed to be longer than the limit.
# DEFAULT: ignore-long-lines=^\s*(# )?<?https?://\S+>?$
@@ -212,7 +212,7 @@ method-rgx=^(?:(?P<exempt>__[a-z0-9_]+__|next)|(?P<camel_case>_{0,2}[A-Z][a-zA-Z

# Regular expression matching correct attribute names
# DEFAULT: attr-rgx=[a-z_][a-z0-9_]{2,30}$
attr-rgx=^_{0,2}[a-z][a-z0-9_]*$
attr-rgx=^_{0,2}[a-zX][a-z0-9_]*$

# Regular expression matching correct argument names
# DEFAULT: argument-rgx=[a-z_][a-z0-9_]{2,30}$
@@ -3,13 +3,23 @@
import pkg_resources
from pkg_resources import parse_version

from .history import History
from .net import NeuralNet
from .classifier import NeuralNetClassifier
from .regressor import NeuralNetRegressor
from . import callbacks


MIN_TORCH_VERSION = '0.4.0'

try:
# pylint: disable=wrong-import-position
import torch
except ModuleNotFoundError:
raise ModuleNotFoundError("No module named 'torch', and skorch depends on PyTorch (aka 'torch'). "
'Visit https://pytorch.org/ for installation instructions')
raise ModuleNotFoundError(
"No module named 'torch', and skorch depends on PyTorch "
"(aka 'torch'). "
"Visit https://pytorch.org/ for installation instructions.")

torch_version = pkg_resources.get_distribution('torch').version
if parse_version(torch_version) < parse_version(MIN_TORCH_VERSION):
@@ -18,14 +28,6 @@
raise ImportWarning(msg.format(req=MIN_TORCH_VERSION, installed=torch_version))


from .history import History
from .net import NeuralNet
from .classifier import NeuralNetClassifier
from .regressor import NeuralNetRegressor

from . import callbacks


__all__ = [
'History',
'NeuralNet',
@@ -134,11 +134,9 @@ def _sorted_keys(self, keys):

def _yield_keys_formatted(self, row):
colors = cycle([color.value for color in Ansi if color != color.ENDC])
color = next(colors)
for key in self._sorted_keys(row.keys()):
for key, color in zip(self._sorted_keys(row.keys()), colors):
formatted = self.format_row(row, key, color=color)
yield key, formatted
color = next(colors)

def table(self, row):
headers = []
@@ -75,7 +75,9 @@ def __init__(
self.target_extractor = target_extractor
self.use_caching = use_caching

# pylint: disable=protected-access
def _get_name(self):
"""Find name of scoring function."""
if self.name is not None:
return self.name
if self.scoring_ is None:
@@ -94,7 +96,7 @@ def initialize(self):
self.name_ = self._get_name()
return self

# pylint: disable=unused-parameters
# pylint: disable=attribute-defined-outside-init,arguments-differ
def on_train_begin(self, net, X, y, **kwargs):
self.X_indexing_ = check_indexing(X)
self.y_indexing_ = check_indexing(y)
@@ -292,7 +294,7 @@ def initialize(self):
self._initialize_cache()
return self

# pylint: disable=arguments-differ
# pylint: disable=arguments-differ,unused-argument
def on_epoch_begin(self, net, dataset_train, dataset_valid, **kwargs):
self._initialize_cache()

@@ -59,6 +59,7 @@ def uses_placeholder_y(ds):


class Dataset(torch.utils.data.Dataset):
# pylint: disable=anomalous-backslash-in-string
"""General dataset wrapper that can be used in conjunction with
PyTorch :class:`~torch.utils.data.DataLoader`.
@@ -127,6 +128,7 @@ def __len__(self):
return self._len

def transform(self, X, y):
# pylint: disable=anomalous-backslash-in-string
"""Additional transformations on ``X`` and ``y``.
By default, they are cast to PyTorch :class:`~torch.Tensor`\s.
@@ -1232,6 +1232,7 @@ def set_params(self, **kwargs):
return self

def _set_params_callback(self, **params):
"""Special handling for setting params on callbacks."""
# model after sklearn.utils._BaseCompostion._set_params
# 1. All steps
if 'callbacks' in params:
@@ -1402,7 +1402,7 @@ def test_accumulator_that_returns_last_value(
side_effect = []

class SGD3Calls(torch.optim.SGD):
def step(self, closure):
def step(self, closure=None):
for _ in range(3):
loss = super().step(closure)
side_effect.append(float(loss))
@@ -118,6 +118,7 @@ def flatten(arr):
yield item


# pylint: disable=unused-argument
def _indexing_none(data, i):
return None

@@ -369,7 +370,7 @@ def noop(*args, **kwargs):
@contextmanager
def open_file_like(f, mode):
"""Wrapper for opening a file"""
new_fd = isinstance(f, str) or isinstance(f, pathlib.Path)
new_fd = isinstance(f, (str, pathlib.Path))
if new_fd:
f = open(f, mode)
try:

0 comments on commit 00f53ec

Please sign in to comment.
You can’t perform that action at this time.