Skip to content

Commit

Permalink
Add DataFrame constructor to docs (#2318)
Browse files Browse the repository at this point in the history
  • Loading branch information
zundertj committed Jan 8, 2022
1 parent 07b350e commit eee62da
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 12 deletions.
2 changes: 2 additions & 0 deletions py-polars/docs/source/reference/dataframe.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ DataFrame

Constructor
-----------
.. autosummary::
:toctree: api/

DataFrame

Expand Down
8 changes: 4 additions & 4 deletions py-polars/polars/internals/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ def _from_pandas(
)

@staticmethod
def read_csv(
def _read_csv(
file: Union[str, BinaryIO, bytes],
has_header: bool = True,
columns: Optional[Union[List[int], List[str]]] = None,
Expand Down Expand Up @@ -548,7 +548,7 @@ def read_csv(
return self

@staticmethod
def read_parquet(
def _read_parquet(
file: Union[str, BinaryIO],
columns: Optional[Union[List[int], List[str]]] = None,
n_rows: Optional[int] = None,
Expand Down Expand Up @@ -588,7 +588,7 @@ def read_parquet(
return self

@staticmethod
def read_ipc(
def _read_ipc(
file: Union[str, BinaryIO],
columns: Optional[Union[List[int], List[str]]] = None,
n_rows: Optional[int] = None,
Expand Down Expand Up @@ -629,7 +629,7 @@ def read_ipc(
return self

@staticmethod
def read_json(file: Union[str, BytesIO]) -> "DataFrame":
def _read_json(file: Union[str, BytesIO]) -> "DataFrame":
"""
Read into a DataFrame from JSON format.
Expand Down
8 changes: 4 additions & 4 deletions py-polars/polars/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ def read_csv(
}

with _prepare_file_arg(file, **storage_options) as data:
df = DataFrame.read_csv(
df = DataFrame._read_csv(
file=data,
has_header=has_header,
columns=columns if columns else projection,
Expand Down Expand Up @@ -697,7 +697,7 @@ def read_ipc(
tbl = pa.feather.read_table(data, memory_map=memory_map, columns=columns)
return DataFrame._from_arrow(tbl)

return DataFrame.read_ipc(
return DataFrame._read_ipc(
data,
columns=columns,
n_rows=n_rows,
Expand Down Expand Up @@ -772,7 +772,7 @@ def read_parquet(
)
)

return DataFrame.read_parquet(
return DataFrame._read_parquet(
source_prep, columns=columns, n_rows=n_rows, parallel=parallel
)

Expand All @@ -786,7 +786,7 @@ def read_json(source: Union[str, BytesIO]) -> DataFrame:
source
Path to a file or a file like object.
"""
return DataFrame.read_json(source)
return DataFrame._read_json(source)


def read_sql(
Expand Down
10 changes: 6 additions & 4 deletions py-polars/tests/test_df.py
Original file line number Diff line number Diff line change
Expand Up @@ -633,13 +633,15 @@ def test_file_buffer() -> None:
f = BytesIO()
f.write(b"1,2,3,4,5,6\n7,8,9,10,11,12")
f.seek(0)
df = pl.DataFrame.read_csv(f, has_header=False)
df = pl.read_csv(f, has_header=False)
assert df.shape == (2, 6)
f.seek(0)

f = BytesIO()
f.write(b"1,2,3,4,5,6\n7,8,9,10,11,12")
f.seek(0)
# check if not fails on TryClone and Length impl in file.rs
with pytest.raises(RuntimeError) as e:
df.read_parquet(f)
pl.read_parquet(f)
assert "Invalid Parquet file" in str(e.value)


Expand Down Expand Up @@ -872,7 +874,7 @@ def test_read_csv_categorical() -> None:
f = BytesIO()
f.write(b"col1,col2,col3,col4,col5,col6\n'foo',2,3,4,5,6\n'bar',8,9,10,11,12")
f.seek(0)
df = pl.DataFrame.read_csv(f, has_header=True, dtypes={"col1": pl.Categorical})
df = pl.read_csv(f, has_header=True, dtypes={"col1": pl.Categorical})
assert df["col1"].dtype == pl.Categorical


Expand Down

0 comments on commit eee62da

Please sign in to comment.