Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions pandas/core/ops/dispatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,10 @@ def should_series_dispatch(left, right, op):
# numpy integer dtypes as timedelta64 dtypes in this scenario
return True

if is_datetime64_dtype(ldtype) and is_object_dtype(rdtype):
# in particular case where right is an array of DateOffsets
if (is_datetime64_dtype(ldtype) and is_object_dtype(rdtype)) or (
is_datetime64_dtype(rdtype) and is_object_dtype(ldtype)
):
# in particular case where one is an array of DateOffsets
return True

return False
12 changes: 12 additions & 0 deletions pandas/tests/arithmetic/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,18 @@ def id_func(x):


# ------------------------------------------------------------------
@pytest.fixture(
params=[
("foo", None, None),
("Egon", "Venkman", None),
("NCC1701D", "NCC1701D", "NCC1701D"),
]
)
def names(request):
"""
A 3-tuple of names, the first two for operands, the last for a result.
"""
return request.param


@pytest.fixture(params=[1, np.array(1, dtype=np.int64)])
Expand Down
81 changes: 26 additions & 55 deletions pandas/tests/arithmetic/test_datetime64.py
Original file line number Diff line number Diff line change
Expand Up @@ -1438,64 +1438,41 @@ def test_dt64arr_add_sub_DateOffset(self, box_with_array):
tm.assert_equal(result, exp)
tm.assert_equal(result2, exp)

# TODO: __sub__, __rsub__
def test_dt64arr_add_mixed_offset_array(self, box_with_array):
# GH#10699
# array of offsets
s = DatetimeIndex([Timestamp("2000-1-1"), Timestamp("2000-2-1")])
s = tm.box_expected(s, box_with_array)

warn = None if box_with_array is pd.DataFrame else PerformanceWarning
with tm.assert_produces_warning(warn):
other = pd.Index([pd.offsets.DateOffset(years=1), pd.offsets.MonthEnd()])
other = tm.box_expected(other, box_with_array)
result = s + other
exp = DatetimeIndex([Timestamp("2001-1-1"), Timestamp("2000-2-29")])
exp = tm.box_expected(exp, box_with_array)
tm.assert_equal(result, exp)

# same offset
other = pd.Index(
@pytest.mark.parametrize(
"other",
[
np.array([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)]),
np.array([pd.offsets.DateOffset(years=1), pd.offsets.MonthEnd()]),
np.array( # matching offsets
[pd.offsets.DateOffset(years=1), pd.offsets.DateOffset(years=1)]
)
other = tm.box_expected(other, box_with_array)
result = s + other
exp = DatetimeIndex([Timestamp("2001-1-1"), Timestamp("2001-2-1")])
exp = tm.box_expected(exp, box_with_array)
tm.assert_equal(result, exp)

# TODO: overlap with test_dt64arr_add_mixed_offset_array?
def test_dt64arr_add_sub_offset_ndarray(self, tz_naive_fixture, box_with_array):
),
],
)
@pytest.mark.parametrize("op", [operator.add, roperator.radd, operator.sub])
@pytest.mark.parametrize("box_other", [True, False])
def test_dt64arr_add_sub_offset_array(
self, tz_naive_fixture, box_with_array, box_other, op, other
):
# GH#18849
# GH#10699 array of offsets

tz = tz_naive_fixture
dti = pd.date_range("2017-01-01", periods=2, tz=tz)
dtarr = tm.box_expected(dti, box_with_array)

other = np.array([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)])
expected = DatetimeIndex([op(dti[n], other[n]) for n in range(len(dti))])
expected = tm.box_expected(expected, box_with_array)

if box_other:
other = tm.box_expected(other, box_with_array)

warn = PerformanceWarning
if box_with_array is pd.DataFrame and tz is not None:
if box_with_array is pd.DataFrame and not (tz is None and not box_other):
warn = None

with tm.assert_produces_warning(warn):
res = dtarr + other
expected = DatetimeIndex(
[dti[n] + other[n] for n in range(len(dti))], name=dti.name, freq="infer"
)
expected = tm.box_expected(expected, box_with_array)
tm.assert_equal(res, expected)

with tm.assert_produces_warning(warn):
res2 = other + dtarr
tm.assert_equal(res2, expected)
res = op(dtarr, other)

with tm.assert_produces_warning(warn):
res = dtarr - other
expected = DatetimeIndex(
[dti[n] - other[n] for n in range(len(dti))], name=dti.name, freq="infer"
)
expected = tm.box_expected(expected, box_with_array)
tm.assert_equal(res, expected)

@pytest.mark.parametrize(
Expand Down Expand Up @@ -1905,9 +1882,9 @@ def test_dt64_mul_div_numeric_invalid(self, one, dt64_series):

# TODO: parametrize over box
@pytest.mark.parametrize("op", ["__add__", "__radd__", "__sub__", "__rsub__"])
@pytest.mark.parametrize("tz", [None, "Asia/Tokyo"])
def test_dt64_series_add_intlike(self, tz, op):
def test_dt64_series_add_intlike(self, tz_naive_fixture, op):
# GH#19123
tz = tz_naive_fixture
dti = pd.DatetimeIndex(["2016-01-02", "2016-02-03", "NaT"], tz=tz)
ser = Series(dti)

Expand Down Expand Up @@ -2376,12 +2353,9 @@ def test_ufunc_coercions(self):
tm.assert_index_equal(result, exp)
assert result.freq == exp.freq

@pytest.mark.parametrize(
"names", [("foo", None, None), ("baz", "bar", None), ("bar", "bar", "bar")]
)
@pytest.mark.parametrize("tz", [None, "America/Chicago"])
def test_dti_add_series(self, tz, names):
def test_dti_add_series(self, tz_naive_fixture, names):
# GH#13905
tz = tz_naive_fixture
index = DatetimeIndex(
["2016-06-28 05:30", "2016-06-28 05:31"], tz=tz, name=names[0]
)
Expand All @@ -2403,9 +2377,6 @@ def test_dti_add_series(self, tz, names):
tm.assert_index_equal(result4, expected)

@pytest.mark.parametrize("op", [operator.add, roperator.radd, operator.sub])
@pytest.mark.parametrize(
"names", [(None, None, None), ("foo", "bar", None), ("foo", "foo", "foo")]
)
def test_dti_addsub_offset_arraylike(
self, tz_naive_fixture, names, op, index_or_series
):
Expand Down
31 changes: 2 additions & 29 deletions pandas/tests/arithmetic/test_timedelta64.py
Original file line number Diff line number Diff line change
Expand Up @@ -1195,18 +1195,10 @@ def test_td64arr_sub_td64_array(self, box_with_array):
result = tdarr - tdi
tm.assert_equal(result, expected)

@pytest.mark.parametrize(
"names",
[
(None, None, None),
("Egon", "Venkman", None),
("NCC1701D", "NCC1701D", "NCC1701D"),
],
)
def test_td64arr_add_sub_tdi(self, box, names):
# GH#17250 make sure result dtype is correct
# GH#19043 make sure names are propagated correctly
if box is pd.DataFrame and names[1] == "Venkman":
if box is pd.DataFrame and names[1] != names[0]:
pytest.skip(
"Name propagation for DataFrame does not behave like "
"it does for Index/Series"
Expand Down Expand Up @@ -1307,12 +1299,9 @@ def test_td64arr_sub_timedeltalike(self, two_hours, box_with_array):
# ------------------------------------------------------------------
# __add__/__sub__ with DateOffsets and arrays of DateOffsets

@pytest.mark.parametrize(
"names", [(None, None, None), ("foo", "bar", None), ("foo", "foo", "foo")]
)
def test_td64arr_add_offset_index(self, names, box):
# GH#18849, GH#19744
if box is pd.DataFrame and names[1] == "bar":
if box is pd.DataFrame and names[1] != names[0]:
pytest.skip(
"Name propagation for DataFrame does not behave like "
"it does for Index/Series"
Expand Down Expand Up @@ -2041,14 +2030,6 @@ def test_td64arr_div_numeric_array(self, box_with_array, vector, any_real_dtype)
with pytest.raises(TypeError, match=pattern):
vector.astype(object) / tdser

@pytest.mark.parametrize(
"names",
[
(None, None, None),
("Egon", "Venkman", None),
("NCC1701D", "NCC1701D", "NCC1701D"),
],
)
def test_td64arr_mul_int_series(self, box_df_fail, names):
# GH#19042 test for correct name attachment
box = box_df_fail # broadcasts along wrong axis, but doesn't raise
Expand Down Expand Up @@ -2078,14 +2059,6 @@ def test_td64arr_mul_int_series(self, box_df_fail, names):
tm.assert_equal(result, expected)

# TODO: Should we be parametrizing over types for `ser` too?
@pytest.mark.parametrize(
"names",
[
(None, None, None),
("Egon", "Venkman", None),
("NCC1701D", "NCC1701D", "NCC1701D"),
],
)
def test_float_series_rdiv_td64arr(self, box_with_array, names):
# GH#19042 test for correct name attachment
# TODO: the direct operation TimedeltaIndex / Series still
Expand Down