Skip to content

Commit

Permalink
DEV/STYLE: use ruff for linting (#50160)
Browse files Browse the repository at this point in the history
* Test ruff for linting

* further updates

* add pre-commit

* fixup

* restore flake (still needed for validate_docstrings)

* hey, all the per-file-ignores can be removed!

* Update pandas/tests/io/test_html.py

Co-authored-by: Joris Van den Bossche <jorisvandenbossche@gmail.com>

* remove outdated per-file-ignores

* update to .216, set target version

* sync version with environment (todo - remove later)

* CI didnt start, empty commit to retry?

* remove ruff from environment.yml

* back to 0.0.215

Co-authored-by: MarcoGorelli <>
Co-authored-by: Marco Edward Gorelli <33491632+MarcoGorelli@users.noreply.github.com>
  • Loading branch information
jorisvandenbossche and MarcoGorelli committed Jan 10, 2023
1 parent 05e9359 commit db8af0e
Show file tree
Hide file tree
Showing 23 changed files with 135 additions and 349 deletions.
24 changes: 4 additions & 20 deletions .pre-commit-config.yaml
Expand Up @@ -15,6 +15,10 @@ default_stages: [
ci:
autofix_prs: false
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.215
hooks:
- id: ruff
- repo: https://github.com/MarcoGorelli/absolufy-imports
rev: v0.3.1
hooks:
Expand Down Expand Up @@ -66,13 +70,6 @@ repos:
--linelength=88,
'--filter=-readability/casting,-runtime/int,-build/include_subdir,-readability/fn_size'
]
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8
additional_dependencies: &flake8_dependencies
- flake8==6.0.0
- flake8-bugbear==22.7.1
- repo: https://github.com/pycqa/pylint
rev: v2.15.9
hooks:
Expand Down Expand Up @@ -117,12 +114,6 @@ repos:
rev: v0.6.7
hooks:
- id: sphinx-lint
- repo: https://github.com/asottile/yesqa
rev: v1.4.0
hooks:
- id: yesqa
additional_dependencies: *flake8_dependencies
stages: [manual]
- repo: local
hooks:
# NOTE: we make `black` a local hook because if it's installed from
Expand Down Expand Up @@ -326,13 +317,6 @@ repos:
files: ^(environment.yml|requirements-dev.txt)$
pass_filenames: false
additional_dependencies: [pyyaml, toml]
- id: sync-flake8-versions
name: Check flake8 version is synced across flake8, yesqa, and environment.yml
language: python
entry: python scripts/sync_flake8_versions.py
files: ^(\.pre-commit-config\.yaml|environment\.yml)$
pass_filenames: false
additional_dependencies: [pyyaml, toml]
- id: title-capitalization
name: Validate correct capitalization among titles in documentation
entry: python scripts/validate_rst_title_capitalization.py
Expand Down
2 changes: 1 addition & 1 deletion doc/source/development/contributing_codebase.rst
Expand Up @@ -43,7 +43,7 @@ Pre-commit
----------

Additionally, :ref:`Continuous Integration <contributing.ci>` will run code formatting checks
like ``black``, ``flake8``,
like ``black``, ``ruff``,
``isort``, and ``cpplint`` and more using `pre-commit hooks <https://pre-commit.com/>`_
Any warnings from these checks will cause the :ref:`Continuous Integration <contributing.ci>` to fail; therefore,
it is helpful to run the check yourself before submitting code. This
Expand Down
3 changes: 1 addition & 2 deletions environment.yml
Expand Up @@ -78,12 +78,11 @@ dependencies:
- black=22.10.0
- cpplint
- flake8=6.0.0
- flake8-bugbear=22.7.1 # used by flake8, find likely bugs
- isort>=5.2.1 # check that imports are in the right order
- mypy=0.991
- pre-commit>=2.15.0
- pycodestyle # used by flake8
- pyupgrade
- ruff=0.0.215

# documentation
- gitpython # obtain contributors from git for whatsnew
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/computation/pytables.py
Expand Up @@ -242,7 +242,7 @@ def stringify(value):
return TermValue(v, v, kind)
elif kind == "bool":
if isinstance(v, str):
v = not v.strip().lower() in [
v = v.strip().lower() not in [
"false",
"f",
"no",
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/generic.py
Expand Up @@ -10811,7 +10811,7 @@ def _accum_func(

if axis == 1:
return self.T._accum_func(
name, func, axis=0, skipna=skipna, *args, **kwargs
name, func, axis=0, skipna=skipna, *args, **kwargs # noqa: B026
).T

def block_accum_func(blk_values):
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/reshape/merge.py
Expand Up @@ -10,6 +10,7 @@
from typing import (
TYPE_CHECKING,
Hashable,
Literal,
Sequence,
cast,
)
Expand All @@ -31,7 +32,6 @@
DtypeObj,
IndexLabel,
JoinHow,
Literal,
MergeHow,
Shape,
Suffixes,
Expand Down
2 changes: 1 addition & 1 deletion pandas/io/formats/latex.py
Expand Up @@ -54,7 +54,7 @@ def _split_into_full_short_caption(
return full_caption, short_caption


class RowStringConverter(ABC):
class RowStringConverter:
r"""Converter for dataframe rows into LaTeX strings.
Parameters
Expand Down
2 changes: 1 addition & 1 deletion pandas/io/formats/style.py
Expand Up @@ -2253,7 +2253,7 @@ def set_sticky(
"props": props + "top:0px; z-index:2;",
}
]
if not self.index.names[0] is None:
if self.index.names[0] is not None:
styles[0]["props"] = (
props + f"top:0px; z-index:2; height:{pixel_size}px;"
)
Expand Down
8 changes: 4 additions & 4 deletions pandas/io/parsers/python_parser.py
Expand Up @@ -1017,12 +1017,12 @@ def _rows_to_cols(self, content: list[list[Scalar]]) -> list[np.ndarray]:
content_len = len(content)
content = []

for (i, l) in iter_content:
actual_len = len(l)
for (i, _content) in iter_content:
actual_len = len(_content)

if actual_len > col_len:
if callable(self.on_bad_lines):
new_l = self.on_bad_lines(l)
new_l = self.on_bad_lines(_content)
if new_l is not None:
content.append(new_l)
elif self.on_bad_lines in (
Expand All @@ -1035,7 +1035,7 @@ def _rows_to_cols(self, content: list[list[Scalar]]) -> list[np.ndarray]:
if self.on_bad_lines == self.BadLineHandleMethod.ERROR:
break
else:
content.append(l)
content.append(_content)

for row_num, actual_len in bad_lines:
msg = (
Expand Down
21 changes: 19 additions & 2 deletions pandas/plotting/_matplotlib/core.py
Expand Up @@ -455,6 +455,7 @@ def generate(self) -> None:
self._post_plot_logic_common(ax, self.data)
self._post_plot_logic(ax, self.data)

@abstractmethod
def _args_adjust(self) -> None:
pass

Expand Down Expand Up @@ -664,6 +665,7 @@ def _post_plot_logic_common(self, ax, data):
else: # pragma no cover
raise ValueError

@abstractmethod
def _post_plot_logic(self, ax, data) -> None:
"""Post process for each axes. Overridden in child classes"""

Expand Down Expand Up @@ -1278,6 +1280,9 @@ def _make_plot(self):
err_kwds["ecolor"] = scatter.get_facecolor()[0]
ax.errorbar(data[x].values, data[y].values, linestyle="none", **err_kwds)

def _args_adjust(self) -> None:
pass


class HexBinPlot(PlanePlot):
@property
Expand Down Expand Up @@ -1310,6 +1315,9 @@ def _make_plot(self) -> None:
def _make_legend(self) -> None:
pass

def _args_adjust(self) -> None:
pass


class LinePlot(MPLPlot):
_default_rot = 0
Expand Down Expand Up @@ -1469,6 +1477,9 @@ def _update_stacker(cls, ax: Axes, stacking_id, values) -> None:
elif (values <= 0).all():
ax._stacker_neg_prior[stacking_id] += values

def _args_adjust(self) -> None:
pass

def _post_plot_logic(self, ax: Axes, data) -> None:
from matplotlib.ticker import FixedLocator

Expand Down Expand Up @@ -1573,6 +1584,9 @@ def _plot( # type: ignore[override]
res = [rect]
return res

def _args_adjust(self) -> None:
pass

def _post_plot_logic(self, ax: Axes, data) -> None:
LinePlot._post_plot_logic(self, ax, data)

Expand Down Expand Up @@ -1855,5 +1869,8 @@ def blank_labeler(label, value):

# leglabels is used for legend labels
leglabels = labels if labels is not None else idx
for p, l in zip(patches, leglabels):
self._append_legend_handles_labels(p, l)
for _patch, _leglabel in zip(patches, leglabels):
self._append_legend_handles_labels(_patch, _leglabel)

def _post_plot_logic(self, ax: Axes, data) -> None:
pass
8 changes: 4 additions & 4 deletions pandas/tests/arithmetic/test_datetime64.py
Expand Up @@ -201,13 +201,13 @@ def test_nat_comparisons(
expected,
):
box = index_or_series
l, r = pair
lhs, rhs = pair
if reverse:
# add lhs / rhs switched data
l, r = r, l
lhs, rhs = rhs, lhs

left = Series(l, dtype=dtype)
right = box(r, dtype=dtype)
left = Series(lhs, dtype=dtype)
right = box(rhs, dtype=dtype)

result = op(left, right)

Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/frame/test_constructors.py
Expand Up @@ -2469,7 +2469,7 @@ def test_dict_nocopy(
if (
using_array_manager
and not copy
and not (any_numpy_dtype in (tm.STRING_DTYPES + tm.BYTES_DTYPES))
and any_numpy_dtype not in tm.STRING_DTYPES + tm.BYTES_DTYPES
):
# TODO(ArrayManager) properly honor copy keyword for dict input
td.mark_array_manager_not_yet_implemented(request)
Expand Down
4 changes: 2 additions & 2 deletions pandas/tests/indexes/period/test_indexing.py
Expand Up @@ -778,8 +778,8 @@ def test_contains_freq_mismatch(self):
rng = period_range("2007-01", freq="M", periods=10)

assert Period("2007-01", freq="M") in rng
assert not Period("2007-01", freq="D") in rng
assert not Period("2007-01", freq="2M") in rng
assert Period("2007-01", freq="D") not in rng
assert Period("2007-01", freq="2M") not in rng

def test_contains_nat(self):
# see gh-13582
Expand Down
2 changes: 1 addition & 1 deletion pandas/tests/io/formats/test_format.py
Expand Up @@ -134,7 +134,7 @@ def has_horizontally_truncated_repr(df):
return False
# Make sure each row has this ... in the same place
r = repr(df)
for ix, l in enumerate(r.splitlines()):
for ix, _ in enumerate(r.splitlines()):
if not r.split()[cand_col] == "...":
return False
return True
Expand Down
6 changes: 2 additions & 4 deletions pandas/tests/io/test_html.py
Expand Up @@ -405,10 +405,8 @@ def test_invalid_table_attrs(self, banklist_data):
url, match="First Federal Bank of Florida", attrs={"id": "tasdfable"}
)

def _bank_data(self, path, *args, **kwargs):
return self.read_html(
path, match="Metcalf", attrs={"id": "table"}, *args, **kwargs
)
def _bank_data(self, path, **kwargs):
return self.read_html(path, match="Metcalf", attrs={"id": "table"}, **kwargs)

@pytest.mark.slow
def test_multiindex_header(self, banklist_data):
Expand Down
26 changes: 13 additions & 13 deletions pandas/tests/plotting/test_datetimelike.py
Expand Up @@ -1021,10 +1021,10 @@ def test_time(self):
# verify tick labels
ticks = ax.get_xticks()
labels = ax.get_xticklabels()
for t, l in zip(ticks, labels):
m, s = divmod(int(t), 60)
for _tick, _label in zip(ticks, labels):
m, s = divmod(int(_tick), 60)
h, m = divmod(m, 60)
rs = l.get_text()
rs = _label.get_text()
if len(rs) > 0:
if s != 0:
xp = time(h, m, s).strftime("%H:%M:%S")
Expand All @@ -1045,10 +1045,10 @@ def test_time_change_xlim(self):
# verify tick labels
ticks = ax.get_xticks()
labels = ax.get_xticklabels()
for t, l in zip(ticks, labels):
m, s = divmod(int(t), 60)
for _tick, _label in zip(ticks, labels):
m, s = divmod(int(_tick), 60)
h, m = divmod(m, 60)
rs = l.get_text()
rs = _label.get_text()
if len(rs) > 0:
if s != 0:
xp = time(h, m, s).strftime("%H:%M:%S")
Expand All @@ -1062,10 +1062,10 @@ def test_time_change_xlim(self):
# check tick labels again
ticks = ax.get_xticks()
labels = ax.get_xticklabels()
for t, l in zip(ticks, labels):
m, s = divmod(int(t), 60)
for _tick, _label in zip(ticks, labels):
m, s = divmod(int(_tick), 60)
h, m = divmod(m, 60)
rs = l.get_text()
rs = _label.get_text()
if len(rs) > 0:
if s != 0:
xp = time(h, m, s).strftime("%H:%M:%S")
Expand All @@ -1086,13 +1086,13 @@ def test_time_musec(self):
# verify tick labels
ticks = ax.get_xticks()
labels = ax.get_xticklabels()
for t, l in zip(ticks, labels):
m, s = divmod(int(t), 60)
for _tick, _label in zip(ticks, labels):
m, s = divmod(int(_tick), 60)

us = round((t - int(t)) * 1e6)
us = round((_tick - int(_tick)) * 1e6)

h, m = divmod(m, 60)
rs = l.get_text()
rs = _label.get_text()
if len(rs) > 0:
if (us % 1000) != 0:
xp = time(h, m, s, us).strftime("%H:%M:%S.%f")
Expand Down
8 changes: 4 additions & 4 deletions pandas/util/_doctools.py
Expand Up @@ -77,9 +77,9 @@ def plot(self, left, right, labels: Iterable[str] = (), vertical: bool = True):
# left
max_left_cols = max(self._shape(df)[1] for df in left)
max_left_rows = max(self._shape(df)[0] for df in left)
for i, (l, label) in enumerate(zip(left, labels)):
for i, (_left, _label) in enumerate(zip(left, labels)):
ax = fig.add_subplot(gs[i, 0:max_left_cols])
self._make_table(ax, l, title=label, height=1.0 / max_left_rows)
self._make_table(ax, _left, title=_label, height=1.0 / max_left_rows)
# right
ax = plt.subplot(gs[:, max_left_cols:])
self._make_table(ax, right, title="Result", height=1.05 / vcells)
Expand All @@ -90,10 +90,10 @@ def plot(self, left, right, labels: Iterable[str] = (), vertical: bool = True):
gs = gridspec.GridSpec(1, hcells)
# left
i = 0
for df, label in zip(left, labels):
for df, _label in zip(left, labels):
sp = self._shape(df)
ax = fig.add_subplot(gs[0, i : i + sp[1]])
self._make_table(ax, df, title=label, height=height)
self._make_table(ax, df, title=_label, height=height)
i += sp[1]
# right
ax = plt.subplot(gs[0, i:])
Expand Down

0 comments on commit db8af0e

Please sign in to comment.