Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
6 changed files
with
305 additions
and
22 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,127 @@ | ||
import six | ||
|
||
from chainer import cuda | ||
from chainer import function_node | ||
from chainer.utils import type_check | ||
|
||
|
||
class Repeat(function_node.FunctionNode): | ||
|
||
"""Repeat elements of an array.""" | ||
|
||
def __init__(self, repeats, axis=None): | ||
if isinstance(repeats, six.integer_types): | ||
self.repeats = (repeats,) | ||
elif isinstance(repeats, tuple) and all( | ||
isinstance(x, six.integer_types) for x in repeats): | ||
self.repeats = repeats | ||
else: | ||
raise TypeError('repeats must be int or tuple of ints') | ||
|
||
if not all(x >= 0 for x in self.repeats): | ||
raise ValueError('all elements in repeats must be zero or larger') | ||
|
||
self.axis = axis | ||
|
||
def check_type_forward(self, in_types): | ||
type_check.expect(in_types.size() == 1) | ||
|
||
def forward(self, inputs): | ||
self.retain_inputs((0,)) | ||
x, = inputs | ||
xp = cuda.get_array_module(x) | ||
repeats = self.repeats | ||
if self.axis is None or len(self.repeats) == 1: | ||
repeats = self.repeats[0] | ||
return xp.repeat(x, repeats, self.axis), | ||
|
||
def backward(self, indexes, grad_outputs): | ||
x, = self.get_retained_inputs() | ||
return RepeatGrad(self.repeats, self.axis, x.shape, x.dtype).apply( | ||
grad_outputs) | ||
|
||
|
||
class RepeatGrad(function_node.FunctionNode): | ||
|
||
def __init__(self, repeats, axis, in_shape, in_dtype): | ||
self.repeats = repeats | ||
self.axis = axis | ||
self.in_shape = in_shape | ||
self.in_dtype = in_dtype | ||
|
||
def forward(self, inputs): | ||
gy, = inputs | ||
xp = cuda.get_array_module(gy) | ||
repeats = self.repeats | ||
axis = self.axis | ||
|
||
if len(gy) == 0: | ||
gx = xp.zeros(self.in_shape, self.in_dtype) | ||
return gx, | ||
elif axis is None: | ||
gx = gy.reshape(-1, repeats[0]).sum(axis=1).reshape(self.in_shape) | ||
return gx, | ||
elif len(repeats) == 1: | ||
shape = list(self.in_shape) | ||
shape[axis:axis + 1] = [-1, repeats[0]] | ||
gx = gy.reshape(shape).sum(axis=axis + 1) | ||
return gx, | ||
|
||
gx = xp.zeros(self.in_shape, self.in_dtype) | ||
slices = [slice(None) for _ in six.moves.range(self.axis)] | ||
pos = 0 | ||
for (i, r) in enumerate(repeats): | ||
src = slices + [slice(pos, pos + r)] | ||
dst = slices + [slice(i, i + 1)] | ||
gx[dst] = gy[src].sum(axis=self.axis, keepdims=True) | ||
pos += r | ||
return gx, | ||
|
||
def backward(self, indexes, grad_outputs): | ||
return Repeat(self.repeats, self.axis).apply(grad_outputs) | ||
|
||
|
||
def repeat(x, repeats, axis=None): | ||
"""Construct an array by repeating a given array. | ||
Args: | ||
x (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \ | ||
:class:`cupy.ndarray`): | ||
Input variable. | ||
repeats (:class:`int` or :class:`tuple` of :class:`int` s): | ||
The number of times which each element of ``x`` is repeated. | ||
axis (:class:`int`): | ||
The axis along which to repeat values. | ||
Returns: | ||
~chainer.Variable: The repeated output Variable. | ||
.. admonition:: Example | ||
>>> x = np.array([0, 1, 2]) | ||
>>> x.shape | ||
(3,) | ||
>>> y = F.repeat(x, 2) | ||
>>> y.shape | ||
(6,) | ||
>>> y.data | ||
array([0, 0, 1, 1, 2, 2]) | ||
>>> x = np.array([[1,2], [3,4]]) | ||
>>> x.shape | ||
(2, 2) | ||
>>> y = F.repeat(x, 3, axis=1) | ||
>>> y.shape | ||
(2, 6) | ||
>>> y.data | ||
array([[1, 1, 1, 2, 2, 2], | ||
[3, 3, 3, 4, 4, 4]]) | ||
>>> y = F.repeat(x, (1, 2), axis=0) | ||
>>> y.shape | ||
(3, 2) | ||
>>> y.data | ||
array([[1, 2], | ||
[3, 4], | ||
[3, 4]]) | ||
""" | ||
return Repeat(repeats, axis).apply((x,))[0] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
113 changes: 113 additions & 0 deletions
113
tests/chainer_tests/functions_tests/array_tests/test_repeat.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,113 @@ | ||
import unittest | ||
|
||
import numpy | ||
|
||
from chainer import cuda | ||
from chainer import functions | ||
from chainer import gradient_check | ||
from chainer import testing | ||
from chainer.testing import attr | ||
|
||
|
||
@testing.parameterize(*testing.product({ | ||
'shape_repeats_axis': [ | ||
(2, 0, None), | ||
(2, 1, None), | ||
(2, 2, None), | ||
(2, 2, 0), | ||
((3, 2), (2,), 0), | ||
((3, 2), 2, 0), | ||
((3, 2), 2, 1), | ||
((3, 2), (3, 4, 3), 0), | ||
((3, 2), (3, 2), 1), | ||
((3, 2, 3), (3, 2, 1), 0), | ||
((3, 2, 3), (3, 4), 1), | ||
((3, 2, 3), (3, 2, 1), 2), | ||
((3, 4, 3, 2), 3, 1), | ||
((3, 4, 3, 2), (2, 2, 3, 3), 1), | ||
], | ||
'dtype': [numpy.float16, numpy.float32, numpy.float64], | ||
})) | ||
class TestRepeat(unittest.TestCase): | ||
|
||
def setUp(self): | ||
(self.in_shape, self.repeats, self.axis) = self.shape_repeats_axis | ||
self.x = numpy.random.uniform(-1, 1, self.in_shape).astype(self.dtype) | ||
out_shape = numpy.repeat(self.x, self.repeats, self.axis).shape | ||
self.gy = numpy.random.uniform(-1, 1, out_shape).astype(self.dtype) | ||
self.ggx = numpy.random.uniform(-1, 1, self.in_shape) \ | ||
.astype(self.dtype) | ||
|
||
self.check_forward_options = {} | ||
self.check_backward_options = {'dtype': numpy.float64} | ||
if self.dtype == numpy.float16: | ||
self.check_forward_options = {'atol': 5e-4, 'rtol': 5e-3} | ||
self.check_backward_options = { | ||
'dtype': numpy.float64, 'atol': 2 ** -4, 'rtol': 2 ** -4} | ||
|
||
def check_forward(self, x_data): | ||
y = functions.repeat(x_data, self.repeats, self.axis) | ||
y_expected = numpy.repeat(self.x, self.repeats, self.axis) | ||
self.assertEqual(y.dtype, y_expected.dtype) | ||
testing.assert_allclose( | ||
y.data, y_expected, **self.check_forward_options) | ||
|
||
def test_forward_cpu(self): | ||
self.check_forward(self.x) | ||
|
||
@attr.gpu | ||
def test_forward_gpu(self): | ||
self.check_forward(cuda.to_gpu(self.x)) | ||
|
||
def check_backward(self, x_data, y_grad): | ||
def f(x): | ||
return functions.repeat(x, self.repeats, self.axis) | ||
|
||
gradient_check.check_backward( | ||
f, x_data, y_grad, **self.check_backward_options) | ||
|
||
def test_backward_cpu(self): | ||
self.check_backward(self.x, self.gy) | ||
|
||
@attr.gpu | ||
def test_backward_gpu(self): | ||
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy)) | ||
|
||
def check_double_backward(self, x_data, y_grad, x_grad_grad): | ||
def f(x): | ||
y = functions.repeat(x, self.repeats, self.axis) | ||
return y * y | ||
|
||
gradient_check.check_double_backward( | ||
f, x_data, y_grad, x_grad_grad, **self.check_backward_options) | ||
|
||
def test_double_backward_cpu(self): | ||
self.check_double_backward(self.x, self.gy, self.ggx) | ||
|
||
@attr.gpu | ||
def test_double_backward_gpu(self): | ||
self.check_double_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy), | ||
cuda.to_gpu(self.ggx)) | ||
|
||
|
||
@testing.parameterize(*testing.product({ | ||
'repeats': [-1, (-1, -1)], | ||
'axis': [-1], | ||
})) | ||
class TestRepeatValueError(unittest.TestCase): | ||
|
||
def test_value_error(self): | ||
x = numpy.random.uniform(-1, 1, (2,)).astype(numpy.float32) | ||
with self.assertRaises(ValueError): | ||
functions.repeat(x, self.repeats, self.axis) | ||
|
||
|
||
class TestRepeatTypeError(unittest.TestCase): | ||
|
||
def test_type_error(self): | ||
x = numpy.random.uniform(-1, 1, (2,)).astype(numpy.float32) | ||
with self.assertRaises(TypeError): | ||
functions.repeat(x, 'a') | ||
|
||
|
||
testing.run_module(__name__, __file__) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters