Skip to content

Commit

Permalink
Do not ignore missing imports in mypy run by default (#2187)
Browse files Browse the repository at this point in the history
Under the global config `ignore_missing_imports`, if mypy does not detect a `py.typed` file, it will use stubs, but only if available, for all our dependencies. Otherwise it will assume `Any` as types for all dependencies, meaning no type hinting.

In this PR:
* remove the global `ignore_missing_imports`
* add dependency pandas-stubs
* for dependencies that gave errors and could not easily find stubs, added to ignore list (see `pyproject.toml`. We can iterate on this in the future
* make changes to code base to deal with type errors that were not noticed before due to this global config
  • Loading branch information
zundertj committed Dec 28, 2021
1 parent 75395bc commit f6d7a18
Show file tree
Hide file tree
Showing 6 changed files with 30 additions and 7 deletions.
2 changes: 2 additions & 0 deletions py-polars/build.requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,5 @@ sphinxcontrib-napoleon
commonmark==0.9.1
numpydoc==1.1.0

# Stub files
pandas-stubs==1.2.0.39
20 changes: 19 additions & 1 deletion py-polars/polars/convert.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence, Union
from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence, Union, overload

import numpy as np

Expand Down Expand Up @@ -210,6 +210,24 @@ def from_arrow(
raise ValueError(f"Expected Arrow Table or Array, got {type(a)}.")


@overload
def from_pandas(
df: "pd.DataFrame",
rechunk: bool = True,
nan_to_none: bool = True,
) -> DataFrame:
...


@overload
def from_pandas(
df: Union["pd.Series", "pd.DatetimeIndex"],
rechunk: bool = True,
nan_to_none: bool = True,
) -> Series:
...


def from_pandas(
df: Union["pd.DataFrame", "pd.Series", "pd.DatetimeIndex"],
rechunk: bool = True,
Expand Down
2 changes: 1 addition & 1 deletion py-polars/polars/datatypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
except ImportError: # pragma: no cover
_PYARROW_AVAILABLE = False

from _ctypes import _SimpleCData
from _ctypes import _SimpleCData # type: ignore


class DataType:
Expand Down
4 changes: 2 additions & 2 deletions py-polars/polars/internals/construction.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,11 +218,11 @@ def _pandas_series_to_arrow(
arr = pa.compute.cast(arr, pa.int64())
return pa.compute.cast(arr, pa.timestamp("ms"))
elif dtype == "object" and len(values) > 0:
if isinstance(values.iloc[0], str):
if isinstance(values.values[0], str):
return pa.array(values, pa.large_utf8(), from_pandas=nan_to_none)

# array is null array, we set to a float64 array
if values.iloc[0] is None and min_len is not None:
if values.values[0] is None and min_len is not None:
return pa.nulls(min_len, pa.float64())
else:
return pa.array(values, from_pandas=nan_to_none)
Expand Down
5 changes: 4 additions & 1 deletion py-polars/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,12 @@ connectorx = ["connectorx"]
profile = "black"

[tool.mypy]
ignore_missing_imports = true
disallow_untyped_defs = true
files = ["polars", "tests"]

[[tool.mypy.overrides]]
module = ["pyarrow.*", "polars.polars", "matplotlib.*", "fsspec.*", "connectorx"]
ignore_missing_imports = true

[tool.coverage.report]
exclude_lines = ["pragma: no cover", "@overload", "@tp.overload"]
4 changes: 2 additions & 2 deletions py-polars/tests/test_interop.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@

def test_from_pandas_datetime() -> None:
ts = datetime.datetime(2021, 1, 1, 20, 20, 20, 20)
s = pd.Series([ts, ts])
tmp: pl.DataFrame = pl.from_pandas(s.to_frame("a")) # type: ignore
pl_s = pd.Series([ts, ts])
tmp = pl.from_pandas(pl_s.to_frame("a"))
s = tmp["a"]
assert s.dt.hour()[0] == 20
assert s.dt.minute()[0] == 20
Expand Down

0 comments on commit f6d7a18

Please sign in to comment.