Skip to content

Commit

Permalink
Merge pull request #4890 from kmaehashi/add-return-indices-maxpool
Browse files Browse the repository at this point in the history
Implement `return_indices` option to `F.max_pooling_2d`
  • Loading branch information
hvy committed Jun 18, 2018
2 parents 3586b84 + 522c43a commit 1fa83b7
Show file tree
Hide file tree
Showing 2 changed files with 67 additions and 7 deletions.
24 changes: 21 additions & 3 deletions chainer/functions/pooling/max_pooling_2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,8 @@ def _forward_gpu_compute_indexes_again(self, inputs):
return y,


def max_pooling_2d(x, ksize, stride=None, pad=0, cover_all=True):
def max_pooling_2d(x, ksize, stride=None, pad=0, cover_all=True,
return_indices=False):
"""Spatial max pooling function.
This function acts similarly to :class:`~functions.Convolution2D`, but
Expand All @@ -356,9 +357,26 @@ def max_pooling_2d(x, ksize, stride=None, pad=0, cover_all=True):
``pad=p`` and ``pad=(p, p)`` are equivalent.
cover_all (bool): If ``True``, all spatial locations are pooled into
some output pixels. It may make the output size larger.
return_indices (bool): If ``True``, pooling indices array is returned
together with the output variable. The returned indices are
expected for use by :func:`chainer.functions.upsampling_2d`.
Note that cuDNN will not be used for this function if
``return_indices`` is set to ``True``, as cuDNN does not return
indices information.
Returns:
~chainer.Variable: Output variable.
~chainer.Variable or tuple:
When ``return_indices`` is ``False`` (default), returns the output
variable.
When ``True``, returns the tuple of the output variable and
pooling indices (`ndarray`). Pooling indices will be on the same
device as the input.
"""
return MaxPooling2D(ksize, stride, pad, cover_all).apply((x,))[0]
func = MaxPooling2D(ksize, stride, pad, cover_all)
if return_indices:
with chainer.using_config('use_cudnn', 'never'):
out = func.apply((x,))[0]
return out, func.indexes

return func.apply((x,))[0]
Original file line number Diff line number Diff line change
Expand Up @@ -72,16 +72,16 @@ def setUp(self):
def forward_cpu(self, inputs):
x, = inputs
expect = numpy.empty(self.output_shape, dtype=self.dtype)
for k in six.moves.range(2):
for i in six.moves.range(2):
for c in six.moves.range(3):
xx = x[k, c]
xx = x[i, c]
if self.cover_all:
expect[k, c] = numpy.array([
expect[i, c] = numpy.array([
[xx[0:2, 0:2].max(), xx[0:2, 1:3].max()],
[xx[1:4, 0:2].max(), xx[1:4, 1:3].max()],
[xx[3:4, 0:2].max(), xx[3:4, 1:3].max()]])
else:
expect[k, c] = numpy.array([
expect[i, c] = numpy.array([
[xx[0:2, 0:2].max(), xx[0:2, 1:3].max()],
[xx[1:4, 0:2].max(), xx[1:4, 1:3].max()]])
return expect,
Expand Down Expand Up @@ -224,4 +224,46 @@ def test_call_cudnn_backward(self):
self.assertEqual(func.called, expect)


class TestMaxPooling2DIndices(unittest.TestCase):
def setUp(self):
self.x = numpy.arange(
2 * 3 * 4 * 4, dtype=numpy.float32).reshape(2, 3, 4, 4)
numpy.random.shuffle(self.x)

def _check(self, x):
out, indices = functions.max_pooling_2d(
x, 2, cover_all=False, return_indices=True)
assert isinstance(out, chainer.Variable)
assert isinstance(out.array, type(x))
assert isinstance(indices, type(x))
assert indices.shape == out.array.shape

# Calculate expected indices.
expect = numpy.zeros(indices.shape, dtype=indices.dtype)
for i in six.moves.range(2):
for c in six.moves.range(3):
xx = x[i, c]
expect[i, c] = numpy.array([
[xx[0:2, 0:2].ravel().argmax(),
xx[0:2, 2:4].ravel().argmax()],
[xx[2:4, 0:2].ravel().argmax(),
xx[2:4, 2:4].ravel().argmax()],
])
if out.xp is not numpy:
expect = cuda.to_gpu(expect)
assert (expect == indices).all()

def test_cpu(self):
self._check(self.x)

@attr.gpu
@attr.cudnn
def test_gpu(self):
x = cuda.to_gpu(self.x)
with chainer.using_config('use_cudnn', 'never'):
self._check(x)
with chainer.using_config('use_cudnn', 'always'):
self._check(x)


testing.run_module(__name__, __file__)

0 comments on commit 1fa83b7

Please sign in to comment.