Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

python wrap for resize layer #1853

Closed
wants to merge 15 commits into from
5 changes: 5 additions & 0 deletions doc/api/v2/config/layer.rst
Original file line number Diff line number Diff line change
Expand Up @@ -278,6 +278,11 @@ seq_reshape
.. autoclass:: paddle.v2.layer.seq_reshape
:noindex:

resize
-----------
.. autoclass:: paddle.v2.layer.resize
:noindex:

Math Layers
===========

Expand Down
35 changes: 35 additions & 0 deletions python/paddle/trainer_config_helpers/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@
'bilinear_interp_layer',
'trans_layer',
'rotate_layer',
'resize_layer',
'sum_to_one_norm_layer',
'get_output_layer',
'LayerType',
Expand Down Expand Up @@ -168,6 +169,7 @@ class LayerType(object):
SCALING_LAYER = 'scaling'
TRANS_LAYER = 'trans'
ROTATE_LAYER = 'rotate'
RESIZE_LAYER = 'resize'
OUT_PROD_LAYER = 'out_prod'
FEATURE_MAP_EXPAND_LAYER = 'featmap_expand'

Expand Down Expand Up @@ -1919,6 +1921,39 @@ def rotate_layer(input, height, width, name=None, layer_attr=None):
size=l.config.size)


@wrap_name_default()
@layer_support()
def resize_layer(input, size, name=None, layer_attr=None):
"""
A layer for resize a minibatch matrix from (input_batch_num x input_size)
to (output_batch_num x size). The output_batch_num will be reset to
(( input_batch_num x input_size ) / size).

The example usage is:

.. code-block:: python

resize = resize_layer(input=layer, size=size)

:param input: Input layer.
:type input: LayerOutput
:size: The size of each output item in the minibatch.
:param name: Layer name.
:type name: basestring
:param layer_attr: extra layer attributes.
:type layer_attr: ExtraLayerAttribute
:return: LayerOutput object.
:rtype: LayerOutput
"""
Layer(
name=name,
type=LayerType.RESIZE_LAYER,
size=size,
inputs=[input.name],
**ExtraAttr.to_kwargs(layer_attr))
return LayerOutput(name, LayerType.RESIZE_LAYER, parents=[input], size=size)


@wrap_name_default()
@layer_support()
def cos_sim(a, b, scale=1, size=1, name=None, layer_attr=None):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,6 @@ last_first_seq test_expand_layer test_ntm_layers test_hsigmoid
img_layers img_trans_layers util_layers simple_rnn_layers unused_layers test_cost_layers
test_rnn_group shared_fc shared_lstm shared_gru test_cost_layers_with_weight
test_spp_layer test_bilinear_interp test_maxout test_bi_grumemory math_ops
test_seq_concat_reshape test_pad test_smooth_l1 test_multiplex_layer)
test_seq_concat_reshape test_pad test_smooth_l1 test_multiplex_layer test_resize)

export whole_configs=(test_split_datasource)
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
type: "nn"
layers {
name: "input"
type: "data"
size: 300
active_type: ""
height: 100
width: 3
}
layers {
name: "__resize_layer_0__"
type: "resize"
size: 100
active_type: ""
inputs {
input_layer_name: "input"
}
}
input_layer_names: "input"
output_layer_names: "__resize_layer_0__"
sub_models {
name: "root"
layer_names: "input"
layer_names: "__resize_layer_0__"
input_layer_names: "input"
output_layer_names: "__resize_layer_0__"
is_recurrent_layer_group: false
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
import re
import getopt

from paddle.trainer.config_parser import *
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

请问这个文件的修改是必须的么?

Copy link
Collaborator Author

@pengwangucla pengwangucla Jun 11, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done



def main(print_whole_config, globals, locals):
'''
Expand All @@ -33,7 +35,7 @@ def main(print_whole_config, globals, locals):
functionstr = functionstr + " " + line

cmdstr = cmdstr + importstr + """def configs():\n""" + functionstr
#cmdstr = cmdstr + """def configs():\n""" + importstr + functionstr

if print_whole_config:
cmdstr = cmdstr + """print parse_config(configs, "")"""
else:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from paddle.trainer_config_helpers import *

settings(batch_size=1)

data = data_layer(name='input', size=300, height=100, width=3)
resize = resize_layer(input=data, size=100)

outputs(resize)