Skip to content

Commit

Permalink
Autoformat
Browse files Browse the repository at this point in the history
  • Loading branch information
tqa236 committed Mar 7, 2024
1 parent bcff983 commit d8010f0
Show file tree
Hide file tree
Showing 307 changed files with 810 additions and 932 deletions.
1 change: 1 addition & 0 deletions asv_bench/benchmarks/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
lower-level methods directly on Index and subclasses, see index_object.py,
indexing_engine.py, and index_cached.py
"""

from datetime import datetime
import warnings

Expand Down
1 change: 1 addition & 0 deletions asv_bench/benchmarks/libs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
If a PR does not edit anything in _libs/, then it is unlikely that the
benchmarks will be affected.
"""

import numpy as np

from pandas._libs.lib import (
Expand Down
1 change: 1 addition & 0 deletions asv_bench/benchmarks/package.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Benchmarks for pandas at the package-level.
"""

import subprocess
import sys

Expand Down
1 change: 1 addition & 0 deletions asv_bench/benchmarks/period.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Period benchmarks with non-tslibs dependencies. See
benchmarks.tslibs.period for benchmarks that rely only on tslibs.
"""

from pandas import (
DataFrame,
Period,
Expand Down
1 change: 1 addition & 0 deletions asv_bench/benchmarks/tslibs/offsets.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
offsets benchmarks that rely only on tslibs. See benchmarks.offset for
offsets benchmarks that rely on other parts of pandas.
"""

from datetime import datetime

import numpy as np
Expand Down
1 change: 1 addition & 0 deletions asv_bench/benchmarks/tslibs/resolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
df.loc[key] = (val.average, val.stdev)
"""

import numpy as np

try:
Expand Down
1 change: 1 addition & 0 deletions asv_bench/benchmarks/tslibs/timedelta.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Timedelta benchmarks that rely only on tslibs. See benchmarks.timedeltas for
Timedelta benchmarks that rely on other parts of pandas.
"""

import datetime

import numpy as np
Expand Down
1 change: 1 addition & 0 deletions asv_bench/benchmarks/tslibs/tslib.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
val = %timeit -o tr.time_ints_to_pydatetime(box, size, tz)
df.loc[key] = (val.average, val.stdev)
"""

from datetime import (
timedelta,
timezone,
Expand Down
1 change: 1 addition & 0 deletions doc/make.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
$ python make.py html
$ python make.py latex
"""

import argparse
import csv
import importlib
Expand Down
1 change: 1 addition & 0 deletions pandas/_config/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
importing `dates` and `display` ensures that keys needed by _libs
are initialized.
"""

__all__ = [
"config",
"detect_console_encoding",
Expand Down
6 changes: 2 additions & 4 deletions pandas/_config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -688,15 +688,13 @@ def _build_option_description(k: str) -> str:
@overload
def pp_options_list(
keys: Iterable[str], *, width: int = ..., _print: Literal[False] = ...
) -> str:
...
) -> str: ...


@overload
def pp_options_list(
keys: Iterable[str], *, width: int = ..., _print: Literal[True]
) -> None:
...
) -> None: ...


def pp_options_list(
Expand Down
1 change: 1 addition & 0 deletions pandas/_config/dates.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
config for datetime formatting
"""

from __future__ import annotations

from pandas._config import config as cf
Expand Down
1 change: 1 addition & 0 deletions pandas/_config/localization.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Name `localization` is chosen to avoid overlap with builtin `locale` module.
"""

from __future__ import annotations

from contextlib import contextmanager
Expand Down
1 change: 1 addition & 0 deletions pandas/_testing/_hypothesis.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Hypothesis data generator helpers.
"""

from datetime import datetime

from hypothesis import strategies as st
Expand Down
1 change: 1 addition & 0 deletions pandas/_testing/compat.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Helpers for sharing tests between DataFrame/Series
"""

from __future__ import annotations

from typing import TYPE_CHECKING
Expand Down
30 changes: 10 additions & 20 deletions pandas/_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,30 +140,22 @@

class SequenceNotStr(Protocol[_T_co]):
@overload
def __getitem__(self, index: SupportsIndex, /) -> _T_co:
...
def __getitem__(self, index: SupportsIndex, /) -> _T_co: ...

@overload
def __getitem__(self, index: slice, /) -> Sequence[_T_co]:
...
def __getitem__(self, index: slice, /) -> Sequence[_T_co]: ...

def __contains__(self, value: object, /) -> bool:
...
def __contains__(self, value: object, /) -> bool: ...

def __len__(self) -> int:
...
def __len__(self) -> int: ...

def __iter__(self) -> Iterator[_T_co]:
...
def __iter__(self) -> Iterator[_T_co]: ...

def index(self, value: Any, start: int = ..., stop: int = ..., /) -> int:
...
def index(self, value: Any, start: int = ..., stop: int = ..., /) -> int: ...

def count(self, value: Any, /) -> int:
...
def count(self, value: Any, /) -> int: ...

def __reversed__(self) -> Iterator[_T_co]:
...
def __reversed__(self) -> Iterator[_T_co]: ...


ListLike = Union[AnyArrayLike, SequenceNotStr, range]
Expand Down Expand Up @@ -317,13 +309,11 @@ def flush(self) -> Any:


class ReadPickleBuffer(ReadBuffer[bytes], Protocol):
def readline(self) -> bytes:
...
def readline(self) -> bytes: ...


class WriteExcelBuffer(WriteBuffer[bytes], Protocol):
def truncate(self, size: int | None = ...) -> int:
...
def truncate(self, size: int | None = ...) -> int: ...


class ReadCsvBuffer(ReadBuffer[AnyStr_co], Protocol):
Expand Down
6 changes: 3 additions & 3 deletions pandas/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,9 +358,9 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces[
"error"
] = f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'"
pieces["error"] = (
f"tag '{full_tag}' doesn't start with prefix '{tag_prefix}'"
)
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix) :]

Expand Down
3 changes: 2 additions & 1 deletion pandas/api/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
""" public toolkit API """
"""public toolkit API"""

from pandas.api import (
extensions,
indexers,
Expand Down
1 change: 1 addition & 0 deletions pandas/arrays/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
See :ref:`extending.extension-types` for more.
"""

from pandas.core.arrays import (
ArrowExtensionArray,
ArrowStringArray,
Expand Down
1 change: 1 addition & 0 deletions pandas/compat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
Other items:
* platform checker
"""

from __future__ import annotations

import os
Expand Down
6 changes: 2 additions & 4 deletions pandas/compat/_optional.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,8 +91,7 @@ def import_optional_dependency(
min_version: str | None = ...,
*,
errors: Literal["raise"] = ...,
) -> types.ModuleType:
...
) -> types.ModuleType: ...


@overload
Expand All @@ -102,8 +101,7 @@ def import_optional_dependency(
min_version: str | None = ...,
*,
errors: Literal["warn", "ignore"],
) -> types.ModuleType | None:
...
) -> types.ModuleType | None: ...


def import_optional_dependency(
Expand Down
3 changes: 2 additions & 1 deletion pandas/compat/numpy/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
""" support numpy compatibility across versions """
"""support numpy compatibility across versions"""

import warnings

import numpy as np
Expand Down
7 changes: 3 additions & 4 deletions pandas/compat/numpy/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
methods that are spread throughout the codebase. This module will make it
easier to adjust to future upstream changes in the analogous numpy signatures.
"""

from __future__ import annotations

from typing import (
Expand Down Expand Up @@ -179,13 +180,11 @@ def validate_argsort_with_ascending(ascending: bool | int | None, args, kwargs)


@overload
def validate_clip_with_axis(axis: ndarray, args, kwargs) -> None:
...
def validate_clip_with_axis(axis: ndarray, args, kwargs) -> None: ...


@overload
def validate_clip_with_axis(axis: AxisNoneT, args, kwargs) -> AxisNoneT:
...
def validate_clip_with_axis(axis: AxisNoneT, args, kwargs) -> AxisNoneT: ...


def validate_clip_with_axis(
Expand Down
1 change: 1 addition & 0 deletions pandas/compat/pickle_compat.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Pickle compatibility to pandas version 1.0
"""

from __future__ import annotations

import contextlib
Expand Down
2 changes: 1 addition & 1 deletion pandas/compat/pyarrow.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
""" support pyarrow compatibility across versions """
"""support pyarrow compatibility across versions"""

from __future__ import annotations

Expand Down
1 change: 1 addition & 0 deletions pandas/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
- Dtypes
- Misc
"""

from __future__ import annotations

from collections import abc
Expand Down
1 change: 1 addition & 0 deletions pandas/core/_numba/kernels/mean_.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
Mirrors pandas/_libs/window/aggregation.pyx
"""

from __future__ import annotations

from typing import TYPE_CHECKING
Expand Down
1 change: 1 addition & 0 deletions pandas/core/_numba/kernels/min_max_.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
Mirrors pandas/_libs/window/aggregation.pyx
"""

from __future__ import annotations

from typing import TYPE_CHECKING
Expand Down
1 change: 1 addition & 0 deletions pandas/core/_numba/kernels/sum_.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
Mirrors pandas/_libs/window/aggregation.pyx
"""

from __future__ import annotations

from typing import (
Expand Down
1 change: 1 addition & 0 deletions pandas/core/_numba/kernels/var_.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
Mirrors pandas/_libs/window/aggregation.pyx
"""

from __future__ import annotations

from typing import TYPE_CHECKING
Expand Down
1 change: 1 addition & 0 deletions pandas/core/accessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
that can be mixed into or pinned onto other pandas classes.
"""

from __future__ import annotations

from typing import (
Expand Down
1 change: 1 addition & 0 deletions pandas/core/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Generic data algorithms. This module is experimental at the moment and not
intended for public consumption
"""

from __future__ import annotations

import decimal
Expand Down
7 changes: 4 additions & 3 deletions pandas/core/apply.py
Original file line number Diff line number Diff line change
Expand Up @@ -1300,9 +1300,10 @@ def apply_with_numba(self) -> dict[int, Any]:

# Convert from numba dict to regular dict
# Our isinstance checks in the df constructor don't pass for numbas typed dict
with set_numba_data(self.obj.index) as index, set_numba_data(
self.columns
) as columns:
with (
set_numba_data(self.obj.index) as index,
set_numba_data(self.columns) as columns,
):
res = dict(nb_func(self.values, columns, index))

return res
Expand Down
1 change: 1 addition & 0 deletions pandas/core/array_algos/masked_reductions.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
masked_reductions.py is for reduction algorithms using a mask-based approach
for missing values.
"""

from __future__ import annotations

from typing import (
Expand Down
1 change: 1 addition & 0 deletions pandas/core/array_algos/putmask.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
EA-compatible analogue to np.putmask
"""

from __future__ import annotations

from typing import (
Expand Down
1 change: 1 addition & 0 deletions pandas/core/array_algos/replace.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Methods used by Block.replace and related methods.
"""

from __future__ import annotations

import operator
Expand Down
6 changes: 2 additions & 4 deletions pandas/core/array_algos/take.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,7 @@ def take_nd(
axis: AxisInt = ...,
fill_value=...,
allow_fill: bool = ...,
) -> np.ndarray:
...
) -> np.ndarray: ...


@overload
Expand All @@ -52,8 +51,7 @@ def take_nd(
axis: AxisInt = ...,
fill_value=...,
allow_fill: bool = ...,
) -> ArrayLike:
...
) -> ArrayLike: ...


def take_nd(
Expand Down

0 comments on commit d8010f0

Please sign in to comment.