Skip to content

Commit

Permalink
Unified bilinear_interp op Python interface specification (#10925)
Browse files Browse the repository at this point in the history
* unify UpsamplingBilinear2d interface specification

* unify UpsamplingBilinear2d interface specification

* fix name conventions

* small fix about computation order
  • Loading branch information
baiyf authored and qingqing01 committed May 25, 2018
1 parent 391c274 commit 1ba2581
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 9 deletions.
4 changes: 2 additions & 2 deletions doc/fluid/api/layers.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1003,9 +1003,9 @@ dice_loss
.. autofunction:: paddle.fluid.layers.dice_loss
:noindex:

bilinear_interp
upsampling_bilinear2d
____

.. autofunction:: paddle.fluid.layers.bilinear_interp
.. autofunction:: paddle.fluid.layers.upsampling_bilinear2d
:noindex:

34 changes: 29 additions & 5 deletions python/paddle/fluid/layers/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@
'label_smooth',
'roi_pool',
'dice_loss',
'bilinear_interp',
'upsampling_bilinear2d',
]


Expand Down Expand Up @@ -3917,8 +3917,10 @@ def dice_loss(input, label, epsilon=0.00001):
return reduce_mean(dice_score)


def bilinear_interp(input, out_h, out_w, name=None):
def upsampling_bilinear2d(input, out_shape=None, scale=None, name=None):
"""
The mathematical meaning of upsampling_bilinear2d is also called
Bilinear interpolation.
Bilinear interpolation is an extension of linear interpolation for
interpolating functions of two variables (e.g. H-direction and
W-direction in this layer) on a rectilinear 2D grid.
Expand All @@ -3930,8 +3932,13 @@ def bilinear_interp(input, out_h, out_w, name=None):
input (Variable): The input tensor of bilinear interpolation,
This is a 4-D tensor of the shape
(num_batches, channels, in_h, in_w).
out_h (int): output height of bilinear interpolation layer.
out_w (int): output width of bilinear interpolation layer.
out_shape(list|tuple|None): Output shape of bilinear interpolation
layer, the shape is (out_h, out_w).
Default: None
scale(int|None): The multiplier for the input height or width.
At least one of out_shape or scale must be set.
And out_shape has a higher priority than scale.
Default: None
name(str|None): A name for this layer(optional). If set None, the layer
will be named automatically.
Expand All @@ -3942,10 +3949,27 @@ def bilinear_interp(input, out_h, out_w, name=None):
Examples:
.. code-block:: python
out = fluid.layers.bilinear_interp(input, out_h=12, out_w=12)
out = fluid.layers.bilinear_interp(input, out_shape=[12, 12])
"""
if out_shape is None and scale is None:
raise ValueError("One of out_shape and scale must not be None")
helper = LayerHelper('bilinear_interp', **locals())
dtype = helper.input_dtype()

def _is_list_or_turple_(data):
return (isinstance(data, list) or isinstance(data, tuple))

if out_shape is not None:
if not (_is_list_or_turple_(out_shape) and len(out_shape) == 2):
raise ValueError('out_shape should be a list or tuple ',
'with length 2, (out_h, out_w).')
out_shape = list(map(int, out_shape))
out_h = out_shape[0]
out_w = out_shape[1]
else:
out_h = int(input.shape[2] * scale)
out_w = int(input.shape[3] * scale)

out = helper.create_tmp_variable(dtype)
helper.append_op(
type="bilinear_interp",
Expand Down
6 changes: 4 additions & 2 deletions python/paddle/fluid/tests/unittests/test_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,11 +369,13 @@ def test_roi_pool(self):
self.assertIsNotNone(output)
print(str(program))

def test_bilinear_interp(self):
def test_upsampling_bilinear2d(self):
program = Program()
with program_guard(program):
x = layers.data(name='x', shape=[3, 9, 6], dtype="float32")
output = layers.bilinear_interp(x, 12, 12)
output = layers.upsampling_bilinear2d(x, out_shape=[12, 12])
self.assertIsNotNone(output)
output = layers.upsampling_bilinear2d(x, scale=3)
self.assertIsNotNone(output)
print(str(program))

Expand Down

0 comments on commit 1ba2581

Please sign in to comment.