Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 47 additions & 0 deletions asv_bench/benchmarks/coarsen.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import numpy as np

import xarray as xr

from . import randn

# Sizes chosen to test padding optimization
nx_padded = 4003 # Not divisible by 10 - requires padding
ny_padded = 4007 # Not divisible by 10 - requires padding

nx_exact = 4000 # Divisible by 10 - no padding needed
ny_exact = 4000 # Divisible by 10 - no padding needed

window = 10


class Coarsen:
def setup(self, *args, **kwargs):
# Case 1: Requires padding on both dimensions
self.da_padded = xr.DataArray(
randn((nx_padded, ny_padded)),
dims=("x", "y"),
coords={"x": np.arange(nx_padded), "y": np.arange(ny_padded)},
)

# Case 2: No padding required
self.da_exact = xr.DataArray(
randn((nx_exact, ny_exact)),
dims=("x", "y"),
coords={"x": np.arange(nx_exact), "y": np.arange(ny_exact)},
)

def time_coarsen_with_padding(self):
"""Coarsen 2D array where both dimensions require padding."""
self.da_padded.coarsen(x=window, y=window, boundary="pad").mean()

def time_coarsen_no_padding(self):
"""Coarsen 2D array where dimensions are exact multiples (no padding)."""
self.da_exact.coarsen(x=window, y=window, boundary="pad").mean()

def peakmem_coarsen_with_padding(self):
"""Peak memory for coarsening with padding on both dimensions."""
self.da_padded.coarsen(x=window, y=window, boundary="pad").mean()

def peakmem_coarsen_no_padding(self):
"""Peak memory for coarsening without padding."""
self.da_exact.coarsen(x=window, y=window, boundary="pad").mean()
2 changes: 2 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@ Performance
- Speedup and reduce memory usage of :py:func:`concat`. Magnitude of improvement scales
with size of the concatenation dimension. By `Deepak Cherian <https://github.com/dcherian>`_.
:issue:`10864` :pull:`10866`.
- Speedup and reduce memory usage when coarsening along multiple dimensions.
By `Deepak Cherian <https://github.com/dcherian>`_. :pull:`10921`.

Documentation
~~~~~~~~~~~~~
Expand Down
11 changes: 6 additions & 5 deletions xarray/core/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -2280,6 +2280,7 @@ def coarsen_reshape(self, windows, boundary, side):
)

variable = self
pad_widths = {}
for d, window in windows.items():
# trim or pad the object
size = variable.shape[self._get_axis_num(d)]
Expand All @@ -2300,16 +2301,16 @@ def coarsen_reshape(self, windows, boundary, side):
pad = window * n - size
if pad < 0:
pad += window
if side[d] == "left":
pad_width = {d: (0, pad)}
else:
pad_width = {d: (pad, 0)}
variable = variable.pad(pad_width, mode="constant")
elif pad == 0:
continue
pad_widths[d] = (0, pad) if side[d] == "left" else (pad, 0)
else:
raise TypeError(
f"{boundary[d]} is invalid for boundary. Valid option is 'exact', "
"'trim' and 'pad'"
)
if pad_widths:
variable = variable.pad(pad_widths, mode="constant")

shape = []
axes = []
Expand Down
Loading