Skip to content

Commit

Permalink
update pylint (#630)
Browse files Browse the repository at this point in the history
* unpin pylint, remove setuptools constraint

* bump cache

* install simpleeval in noxfile

* re-pin pylint

* fix lint

* nox uses setuptools < 58.0.0
  • Loading branch information
cosmicBboy committed Sep 19, 2021
1 parent 554f822 commit c47a380
Show file tree
Hide file tree
Showing 10 changed files with 31 additions and 37 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci-tests.yml
Expand Up @@ -17,7 +17,7 @@ env:
DEFAULT_PYTHON: 3.8
CI: "true"
# Increase this value to reset cache if environment.yml has not changed
CACHE_VERSION: 4
CACHE_VERSION: 5

jobs:

Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Expand Up @@ -41,7 +41,7 @@ repos:
args: ["--line-length=79"]

- repo: https://github.com/pycqa/pylint
rev: v2.10.2
rev: v2.11.1
hooks:
- id: pylint
args: ["--disable=import-error"]
Expand Down
3 changes: 1 addition & 2 deletions environment.yml
Expand Up @@ -26,13 +26,12 @@ dependencies:
- isort >= 5.7.0
- codecov
- mypy >= 0.902 # mypy no longer bundle stubs for third-party libraries
- pylint = 2.10.2
- pylint = v2.11.1
- pytest
- pytest-cov
- pytest-xdist
- pytest-asyncio
- xdoctest
- setuptools < 58.0.0
- nox = 2020.12.31 # pinning due to UnicodeDecodeError, see https://github.com/pandera-dev/pandera/pull/504/checks?check_run_id=2841360122
- importlib_metadata # required if python < 3.8

Expand Down
4 changes: 2 additions & 2 deletions pandera/hypotheses.py
Expand Up @@ -210,8 +210,8 @@ def _relationships(self, relationship: Union[str, Callable]):
relationship = self.RELATIONSHIPS[relationship]
elif not callable(relationship):
raise ValueError(
"expected relationship to be str or callable, found %s"
% type(relationship)
"expected relationship to be str or callable, found "
f"{type(relationship)}"
)
return relationship

Expand Down
6 changes: 3 additions & 3 deletions pandera/io.py
Expand Up @@ -270,7 +270,7 @@ def from_yaml(yaml_schema):
:returns: dataframe schema.
"""
try:
with Path(yaml_schema).open("r") as f:
with Path(yaml_schema).open("r", encoding="utf-8") as f:
serialized_schema = yaml.safe_load(f)
except (TypeError, OSError):
serialized_schema = yaml.safe_load(yaml_schema)
Expand All @@ -290,7 +290,7 @@ def _write_yaml(obj, stream):
return yaml.safe_dump(obj, stream=stream, sort_keys=False)

try:
with Path(stream).open("w") as f:
with Path(stream).open("w", encoding="utf-8") as f:
_write_yaml(statistics, f)
except (TypeError, OSError):
return _write_yaml(statistics, stream)
Expand Down Expand Up @@ -437,7 +437,7 @@ def to_script(dataframe_schema, path_or_buf=None):
if path_or_buf is None:
return formatted_script

with Path(path_or_buf).open("w") as f:
with Path(path_or_buf).open("w", encoding="utf-8") as f:
f.write(formatted_script)


Expand Down
41 changes: 19 additions & 22 deletions pandera/schemas.py
Expand Up @@ -13,7 +13,7 @@
import numpy as np
import pandas as pd

from . import constants, errors
from . import errors
from . import strategies as st
from .checks import Check
from .deprecations import deprecate_pandas_dtype
Expand Down Expand Up @@ -278,9 +278,9 @@ def dtypes(self) -> Dict[str, DataType]:
]
if regex_columns:
warnings.warn(
"Schema has columns specified as regex column names: %s "
"Use the `get_dtypes` to get the datatypes for these "
"columns." % regex_columns,
"Schema has columns specified as regex column names: "
f"{regex_columns}. Use the `get_dtypes` to get the datatypes "
"for these columns.",
UserWarning,
)
return {n: c.dtype for n, c in self.columns.items() if not c.regex}
Expand Down Expand Up @@ -460,11 +460,10 @@ def validate(

if self._is_inferred:
warnings.warn(
"This %s is an inferred schema that hasn't been "
f"This {type(self)} is an inferred schema that hasn't been "
"modified. It's recommended that you refine the schema "
"by calling `add_columns`, `remove_columns`, or "
"`update_columns` before using it to validate data."
% type(self),
"`update_columns` before using it to validate data.",
UserWarning,
)

Expand Down Expand Up @@ -1744,10 +1743,9 @@ def validate(

if self._is_inferred:
warnings.warn(
"This %s is an inferred schema that hasn't been "
f"This {type(self)} is an inferred schema that hasn't been "
"modified. It's recommended that you refine the schema "
"by calling `set_checks` before using it to validate data."
% type(self),
"by calling `set_checks` before using it to validate data.",
UserWarning,
)

Expand All @@ -1771,10 +1769,9 @@ def validate(
)

if self.name is not None and series.name != self._name:
msg = "Expected %s to have name '%s', found '%s'" % (
type(self),
self._name,
series.name,
msg = (
f"Expected {type(self)} to have name '{self._name}', found "
f"'{series.name}'"
)
error_handler.collect_error(
"wrong_field_name",
Expand All @@ -1790,9 +1787,10 @@ def validate(
if not self._nullable:
nulls = series.isna()
if sum(nulls) > 0:
msg = "non-nullable series '%s' contains null values: %s" % (
series.name,
series[nulls].head(constants.N_FAILURE_CASES).to_dict(),
failed = series[nulls]
msg = (
f"non-nullable series '{series.name}' contains null "
f"values:\n{failed}"
)
error_handler.collect_error(
"series_contains_nulls",
Expand All @@ -1811,11 +1809,10 @@ def validate(
if self._unique:
duplicates = series.duplicated()
if any(duplicates):
msg = "series '%s' contains duplicate values: %s" % (
series.name,
series[duplicates]
.head(constants.N_FAILURE_CASES)
.to_dict(),
failed = series[duplicates]
msg = (
f"series '{series.name}' contains duplicate values:\n"
f"{series[duplicates]}"
)
error_handler.collect_error(
"series_contains_duplicates",
Expand Down
3 changes: 1 addition & 2 deletions requirements-dev.txt
Expand Up @@ -17,13 +17,12 @@ black >= 20.8b1
isort >= 5.7.0
codecov
mypy >= 0.902
pylint == 2.10.2
pylint == v2.11.1
pytest
pytest-cov
pytest-xdist
pytest-asyncio
xdoctest
setuptools < 58.0.0
nox == 2020.12.31
importlib_metadata
sphinx
Expand Down
1 change: 0 additions & 1 deletion setup.py
Expand Up @@ -44,7 +44,6 @@
"typing_inspect >= 0.6.0",
"wrapt",
"pyarrow",
"setuptools < 58.0.0",
],
extras_require=extras_require,
python_requires=">=3.7",
Expand Down
2 changes: 1 addition & 1 deletion tests/core/test_decorators.py
Expand Up @@ -819,7 +819,7 @@ class Meta(type):
@check_output(Schema.to_schema())
@check_input(Schema.to_schema(), "df1")
@check_io(df1=Schema.to_schema(), out=Schema.to_schema())
async def regular_meta_coroutine(
async def regular_meta_coroutine( # pylint: disable=no-self-use
cls,
df1: DataFrame[Schema],
) -> DataFrame[Schema]:
Expand Down
4 changes: 2 additions & 2 deletions tests/core/test_schemas.py
Expand Up @@ -160,15 +160,15 @@ def test_dataframe_schema_strict_regex() -> None:
{"foo_*": Column(int, regex=True)},
strict=True,
)
df = pd.DataFrame({"foo_%d" % i: range(10) for i in range(5)})
df = pd.DataFrame({f"foo_{i}": range(10) for i in range(5)})

assert isinstance(schema.validate(df), pd.DataFrame)

# Raise a SchemaError if schema is strict and a regex pattern yields
# no matches
with pytest.raises(errors.SchemaError):
schema.validate(
pd.DataFrame({"bar_%d" % i: range(10) for i in range(5)})
pd.DataFrame({f"bar_{i}": range(10) for i in range(5)})
)


Expand Down

0 comments on commit c47a380

Please sign in to comment.