Skip to content

Commit

Permalink
Keep temp and dest region arrays to same shape in merge
Browse files Browse the repository at this point in the history
Resolves #2202
  • Loading branch information
Sean Gillies committed Jun 11, 2021
1 parent 1a21888 commit 0aeb2bc
Show file tree
Hide file tree
Showing 4 changed files with 72 additions and 24 deletions.
5 changes: 5 additions & 0 deletions CHANGES.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
Changes
=======

1.2.5 (TBD)
-----------

- Prevent a rare merge error (#2202).

1.2.4 (2021-05-31)
------------------

Expand Down
2 changes: 1 addition & 1 deletion rasterio/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
import rasterio.path

__all__ = ['band', 'open', 'pad', 'Env']
__version__ = "1.2.4"
__version__ = "1.2.5dev"
__gdal_version__ = gdal_version()

# Rasterio attaches NullHandler to the 'rasterio' logger and its
Expand Down
49 changes: 27 additions & 22 deletions rasterio/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ def merge(
output_count=None,
resampling=Resampling.nearest,
method="first",
target_aligned_pixels=False,
dst_path=None,
dst_kwds=None,
):
Expand Down Expand Up @@ -217,29 +218,26 @@ def nullcontext(obj):
ys.extend([bottom, top])
dst_w, dst_s, dst_e, dst_n = min(xs), min(ys), max(xs), max(ys)

logger.debug("Output bounds: %r", (dst_w, dst_s, dst_e, dst_n))
output_transform = Affine.translation(dst_w, dst_n)
logger.debug("Output transform, before scaling: %r", output_transform)

# Resolution/pixel size
if not res:
res = first_res
elif not np.iterable(res):
res = (res, res)
elif len(res) == 1:
res = (res[0], res[0])
output_transform *= Affine.scale(res[0], -res[1])
logger.debug("Output transform, after scaling: %r", output_transform)

if target_aligned_pixels:
dst_w = math.floor(dst_w / res[0]) * res[0]
dst_e = math.ceil(dst_e / res[0]) * res[0]
dst_s = math.floor(dst_s / res[1]) * res[1]
dst_n = math.ceil(dst_n / res[1]) * res[1]

# Compute output array shape. We guarantee it will cover the output
# bounds completely
output_width = int(math.ceil((dst_e - dst_w) / res[0]))
output_height = int(math.ceil((dst_n - dst_s) / res[1]))
output_width = int(round((dst_e - dst_w) / res[0]))
output_height = int(round((dst_n - dst_s) / res[1]))

# Adjust bounds to fit
dst_e, dst_s = output_transform * (output_width, output_height)
logger.debug("Output width: %d, height: %d", output_width, output_height)
logger.debug("Adjusted bounds: %r", (dst_w, dst_s, dst_e, dst_n))
output_transform = Affine.translation(dst_w, dst_n) * Affine.scale(res[0], -res[1])

if dtype is not None:
dt = dtype
Expand Down Expand Up @@ -314,23 +312,29 @@ def nullcontext(obj):
)

# 4. Read data in source window into temp
src_window = src_window.round_shape(pixel_precision=0)
dst_window = dst_window.round_shape(pixel_precision=0)
trows, tcols = dst_window.height, dst_window.width
temp_shape = (src_count, trows, tcols)
temp = src.read(
src_window_rnd_shp = src_window.round_shape(pixel_precision=0)
dst_window_rnd_shp = dst_window.round_shape(pixel_precision=0)
dst_window_rnd_off = dst_window_rnd_shp.round_offsets(pixel_precision=0)
temp_height, temp_width = (
dst_window_rnd_off.height,
dst_window_rnd_off.width,
)
temp_shape = (src_count, temp_height, temp_width)
temp_src = src.read(
out_shape=temp_shape,
window=src_window,
window=src_window_rnd_shp,
boundless=False,
masked=True,
indexes=indexes,
resampling=resampling,
)

# 5. Copy elements of temp into dest
dst_window = dst_window.round_offsets(pixel_precision=0)
roff, coff = dst_window.row_off, dst_window.col_off
region = dest[:, roff:roff + trows, coff:coff + tcols]
roff, coff = (
max(0, dst_window_rnd_off.row_off),
max(0, dst_window_rnd_off.col_off),
)
region = dest[:, roff : roff + temp_height, coff : coff + temp_width]

if math.isnan(nodataval):
region_mask = np.isnan(region)
Expand All @@ -339,8 +343,9 @@ def nullcontext(obj):
else:
region_mask = region == nodataval

# Ensure common shape, resolving issue #2202.
temp = temp_src[:, : region.shape[1], : region.shape[2]]
temp_mask = np.ma.getmask(temp)

copyto(region, temp, region_mask, temp_mask, index=idx, roff=roff, coff=coff)

if dst_path is None:
Expand Down
40 changes: 39 additions & 1 deletion tests/test_merge.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
"""Tests of rasterio.merge"""

import boto3
from hypothesis import given, settings
from hypothesis.strategies import floats
import numpy
import pytest

Expand Down Expand Up @@ -58,11 +61,46 @@ def test_issue2163():
with rasterio.open("tests/data/float_raster_with_nodata.tif") as src:
data = src.read()
result, transform = merge([src])
assert numpy.allclose(data, result)
assert numpy.allclose(data, result[:, : data.shape[1], : data.shape[2]])


def test_unsafe_casting():
"""Demonstrate fix for issue 2179"""
with rasterio.open("tests/data/float_raster_with_nodata.tif") as src:
result, transform = merge([src], dtype="uint8", nodata=0.0)
assert not result.any() # this is why it's called "unsafe".


@pytest.mark.skipif(
not (boto3.Session()._session.get_credentials()),
reason="S3 raster access requires credentials",
)
@pytest.mark.network
@pytest.mark.slow
@settings(deadline=None, max_examples=5)
@given(
dx=floats(min_value=-0.05, max_value=0.05),
dy=floats(min_value=-0.05, max_value=0.05),
)
def test_issue2202(dx, dy):
import rasterio.merge
from shapely import wkt
from shapely.affinity import translate

aoi = wkt.loads(
r"POLYGON((11.09 47.94, 11.06 48.01, 11.12 48.11, 11.18 48.11, 11.18 47.94, 11.09 47.94))"
)
aoi = translate(aoi, dx, dy)

with rasterio.Env(AWS_NO_SIGN_REQUEST=True,):
ds = [
rasterio.open(i)
for i in [
"/vsis3/copernicus-dem-30m/Copernicus_DSM_COG_10_N47_00_E011_00_DEM/Copernicus_DSM_COG_10_N47_00_E011_00_DEM.tif",
"/vsis3/copernicus-dem-30m/Copernicus_DSM_COG_10_N48_00_E011_00_DEM/Copernicus_DSM_COG_10_N48_00_E011_00_DEM.tif",
]
]
aux_array, aux_transform = rasterio.merge.merge(datasets=ds, bounds=aoi.bounds)
from rasterio.plot import show

show(aux_array)

0 comments on commit 0aeb2bc

Please sign in to comment.