Skip to content

Commit

Permalink
fix: conform epoch_seconds impls to expression return type
Browse files Browse the repository at this point in the history
  • Loading branch information
cpcloud committed Mar 7, 2022
1 parent 8467f15 commit 18a70f1
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 6 deletions.
6 changes: 5 additions & 1 deletion ibis/backends/pandas/execution/temporal.py
Expand Up @@ -55,7 +55,11 @@ def execute_extract_millisecond_series(op, data, **kwargs):

@execute_node.register(ops.ExtractEpochSeconds, (pd.Timestamp, pd.Series))
def execute_epoch_seconds(op, data, **kwargs):
return data.astype('int64') // int(1e9)
# older versions of dask do not have a view method, so use astype
# instead
convert = getattr(data, "view", data.astype)
series = convert(np.int64)
return (series // 1_000_000_000).astype(np.int32)


@execute_node.register(
Expand Down
2 changes: 1 addition & 1 deletion ibis/backends/postgres/registry.py
Expand Up @@ -694,7 +694,7 @@ def _day_of_week_name(t, expr):
ops.ExtractDay: _extract('day'),
ops.ExtractDayOfYear: _extract('doy'),
ops.ExtractQuarter: _extract('quarter'),
ops.ExtractEpochSeconds: _extract('epoch', sa.BigInteger),
ops.ExtractEpochSeconds: _extract('epoch', sa.Integer),
ops.ExtractWeekOfYear: _extract('week'),
ops.ExtractHour: _extract('hour'),
ops.ExtractMinute: _extract('minute'),
Expand Down
2 changes: 1 addition & 1 deletion ibis/backends/pyspark/compiler.py
Expand Up @@ -1373,7 +1373,7 @@ def _extract_component_from_datetime(
):
op = expr.op()
date_col = t.translate(op.arg, scope, timecontext)
return extract_fn(date_col)
return extract_fn(date_col).cast('integer')


@compiles(ops.ExtractYear)
Expand Down
5 changes: 2 additions & 3 deletions ibis/backends/tests/test_temporal.py
Expand Up @@ -64,14 +64,13 @@ def test_timestamp_extract_epoch_seconds(backend, alltypes, df):
expr = alltypes.timestamp_col.epoch_seconds()
result = expr.execute()

result = result.astype(backend.epoch_seconds_return_type)
expected = backend.default_series_rename(
df.timestamp_col.astype("int64") // 1_000_000_000
(df.timestamp_col.view("int64") // 1_000_000_000).astype("int32")
)
backend.assert_series_equal(result, expected)


@pytest.mark.notimpl(["datafusion"])
@pytest.mark.notimpl(["datafusion", "sqlite"])
def test_timestamp_extract_week_of_year(backend, alltypes, df):
expr = alltypes.timestamp_col.week_of_year()
result = expr.execute()
Expand Down

0 comments on commit 18a70f1

Please sign in to comment.