diff --git a/paddle/operators/pool_cudnn_op.cu b/paddle/operators/pool_cudnn_op.cu index bc29be18e76fd..8d0741dccc1fd 100644 --- a/paddle/operators/pool_cudnn_op.cu +++ b/paddle/operators/pool_cudnn_op.cu @@ -43,6 +43,7 @@ class PoolCudnnOpKernel : public framework::OpKernel { std::vector paddings = ctx.Attr>("paddings"); if (ctx.Attr("globalPooling")) { for (size_t i = 0; i < ksize.size(); ++i) { + paddings[i] = 0; ksize[i] = static_cast(input->dims()[i + 2]); } } @@ -97,8 +98,10 @@ class PoolCudnnGradOpKernel : public framework::OpKernel { std::vector paddings = ctx.Attr>("paddings"); if (ctx.Attr("globalPooling")) { - for (size_t i = 0; i < ksize.size(); ++i) + for (size_t i = 0; i < ksize.size(); ++i) { + paddings[i] = 0; ksize[i] = static_cast(input->dims()[i + 2]); + } } const T *input_data = input->data(); diff --git a/paddle/operators/pool_op.cc b/paddle/operators/pool_op.cc index c4ab29e4d5f7c..4d75c11bc8130 100644 --- a/paddle/operators/pool_op.cc +++ b/paddle/operators/pool_op.cc @@ -39,8 +39,10 @@ void PoolOp::InferShape(framework::InferShapeContext *ctx) const { if (ctx->Attrs().Get("globalPooling")) { ksize.resize(static_cast(in_x_dims.size()) - 2); - for (size_t i = 0; i < ksize.size(); ++i) + for (size_t i = 0; i < ksize.size(); ++i) { + paddings[i] = 0; ksize[i] = static_cast(in_x_dims[i + 2]); + } } PADDLE_ENFORCE(in_x_dims.size() - ksize.size() == 2U, @@ -84,15 +86,16 @@ Pool2dOpMaker::Pool2dOpMaker(framework::OpProto *proto, "(string), pooling type, can be \"max\" for max-pooling " "and \"avg\" for average-pooling.") .InEnum({"max", "avg"}); - AddAttr>( - "ksize", - "(vector ), the pooling window size(height, width) of pooling operator." - "If globalPooling = true, ksize is ignored and need not be " - "specified."); // TODO(Chengduo): Add checker. (Currently, + AddAttr>("ksize", + "(vector ), the pooling window size(height, width) " + "of pooling operator." + "If globalPooling = true, ksize and paddings will " + "be ignored."); // TODO(Chengduo): Add checker. + // (Currently, // TypedAttrChecker don't support vector type.) AddAttr("globalPooling", "(bool default: false), whether to use the global pooling." - "If globalPooling = true, ksize is ignored.") + "If globalPooling = true, ksize and paddings will be ignored.") .SetDefault(false); AddAttr>( "strides", @@ -101,7 +104,8 @@ Pool2dOpMaker::Pool2dOpMaker(framework::OpProto *proto, // TypedAttrChecker don't support vector type.) AddAttr>( "paddings", - "(vector defalut:{0,0}), paddings(height, width) of pooling operator.") + "(vector defalut:{0,0}), paddings(height, width) of pooling operator." + "If globalPooling = true, paddings and ksize will be ignored.") .SetDefault({0, 0}); // TODO(Chengduo): Add checker. (Currently, // TypedAttrChecker don't support vector type.) @@ -145,25 +149,28 @@ Pool3dOpMaker::Pool3dOpMaker(framework::OpProto *proto, "(string), pooling type, can be \"max\" for max-pooling " "and \"avg\" for average-pooling.") .InEnum({"max", "avg"}); - AddAttr>( - "ksize", - "(vector ), the pooling window size(depth, height, width) of pooling " - "operator." - "If globalPooling = true, ksize is ignored and need not be " - "specified."); // TODO(Chengduo): Add checker. (Currently, - // TypedAttrChecker don't support vector type.) + AddAttr>("ksize", + "(vector ), the pooling window size(depth, height, " + "width) of pooling " + "operator." + "If globalPooling = true, ksize and paddings wille " + "be ignored."); // TODO(Chengduo): Add checker. + // (Currently, + // TypedAttrChecker don't support vector type.) AddAttr("globalPooling", "(bool default: false), whether to use the global pooling." - "If globalPooling = true, ksize is ignored.") + "If globalPooling = true, ksize and paddings wille be ignored.") .SetDefault(false); AddAttr>("strides", "(vector, default:{1,1,1}), strides(depth, height, " "width) of pooling operator.") .SetDefault({1, 1, 1}); // TODO(Chengduo): Add checker. (Currently, // TypedAttrChecker don't support vector type.) - AddAttr>("paddings", - "(vector defalut:{0,0,0}), paddings(depth, height, " - "width) of pooling operator.") + AddAttr>( + "paddings", + "(vector defalut:{0,0,0}), paddings(depth, height, " + "width) of pooling operator." + "If globalPooling = true, ksize and paddings wille be ignored.") .SetDefault({0, 0, 0}); // TODO(Chengduo): Add checker. (Currently, // TypedAttrChecker don't support vector type.) diff --git a/paddle/operators/pool_op.h b/paddle/operators/pool_op.h index ba8edc9cf60bc..d9d445f6a6257 100644 --- a/paddle/operators/pool_op.h +++ b/paddle/operators/pool_op.h @@ -63,6 +63,7 @@ class PoolKernel : public framework::OpKernel { std::vector paddings = context.Attr>("paddings"); if (context.Attr("globalPooling")) { for (size_t i = 0; i < ksize.size(); ++i) { + paddings[i] = 0; ksize[i] = static_cast(in_x->dims()[i + 2]); } } @@ -103,6 +104,7 @@ class PoolKernel : public framework::OpKernel { paddings, pool_process); } } break; + default: { PADDLE_THROW("Pool op only supports 2D and 3D input."); } } } }; @@ -123,8 +125,10 @@ class PoolGradKernel : public framework::OpKernel { std::vector paddings = context.Attr>("paddings"); if (context.Attr("globalPooling")) { - for (size_t i = 0; i < ksize.size(); ++i) + for (size_t i = 0; i < ksize.size(); ++i) { + paddings[i] = 0; ksize[i] = static_cast(in_x->dims()[i + 2]); + } } if (in_x_grad) { @@ -164,6 +168,7 @@ class PoolGradKernel : public framework::OpKernel { *out_grad, ksize, strides, paddings, pool_process); } } break; + default: { PADDLE_THROW("Pool op only supports 2D and 3D input."); } } } } diff --git a/paddle/operators/pool_with_index_op.cc b/paddle/operators/pool_with_index_op.cc index ea21845751bee..95e896e7cc33b 100644 --- a/paddle/operators/pool_with_index_op.cc +++ b/paddle/operators/pool_with_index_op.cc @@ -46,8 +46,10 @@ class MaxPoolWithIndexOp : public framework::OperatorWithKernel { if (ctx->Attrs().Get("globalPooling")) { ksize.resize(static_cast(in_x_dims.size()) - 2); - for (size_t i = 0; i < ksize.size(); ++i) + for (size_t i = 0; i < ksize.size(); ++i) { + paddings[i] = 0; ksize[i] = static_cast(in_x_dims[i + 2]); + } } PADDLE_ENFORCE(in_x_dims.size() - ksize.size() == 2U, @@ -87,31 +89,33 @@ class MaxPool2dWithIndexOpMaker : public framework::OpProtoAndCheckerMaker { : OpProtoAndCheckerMaker(proto, op_checker) { AddInput( "X", - "(Tensor) The input tensor of pooling operator. " + "(Tensor), the input tensor of pooling operator. " "The format of input tensor is NCHW. Where N is batch size, C is the " "number of channels, H and W is the height and width of image."); AddOutput("Out", - "(Tensor) The output tensor of pooling operator." + "(Tensor), the output tensor of pooling operator." "The format of output tensor is also NCHW." "Where N is batch size, C is " "the number of channels, H and W is the height and " "width of image."); AddOutput("Mask", - "(Tensor) The Mask tensor of pooling operator." + "(Tensor), the Mask tensor of pooling operator." "The format of output tensor is also NCHW." "Where N is batch size, C is the number of channels, H and W " "is the height and width of image." "The value in it is the index in current feature map"); - AddAttr>( - "ksize", - "(vector ), the pooling window size(height, width) of pooling operator." - "If globalPooling = true, ksize is ignored and need not be " - "specified."); // TODO(Chengduo): Add checker. (Currently, + AddAttr>("ksize", + "(vector ), the pooling window size(height, " + "width) of pooling operator." + "If globalPooling = true, ksize and paddings " + "will be ignored."); // TODO(Chengduo): Add + // checker. (Currently, // TypedAttrChecker don't support vector type.) - AddAttr("globalPooling", - "(bool default: false), whether to use the global pooling." - "If globalPooling = true, ksize is ignored.") + AddAttr( + "globalPooling", + "(bool default: false), whether to use the global pooling." + "If globalPooling = true, ksize and paddings will be ignored.") .SetDefault(false); AddAttr>( "strides", @@ -120,7 +124,8 @@ class MaxPool2dWithIndexOpMaker : public framework::OpProtoAndCheckerMaker { // TypedAttrChecker don't support vector type.) AddAttr>( "paddings", - "(vector defalut:{0,0}), paddings(height, width) of pooling operator.") + "(vector defalut:{0, 0}), paddings(height, width) of pooling operator." + "If globalPooling = true, paddings and will be ignored.") .SetDefault({0, 0}); // TODO(Chengduo): Add checker. (Currently, // TypedAttrChecker don't support vector type.) @@ -153,42 +158,46 @@ class MaxPool3dWithIndexOpMaker : public framework::OpProtoAndCheckerMaker { : OpProtoAndCheckerMaker(proto, op_checker) { AddInput( "X", - "(Tensor) The input tensor of pooling operator. " + "(Tensor), the input tensor of pooling operator. " "The format of input tensor is NCDHW. Where N is batch size, C is " "the number of channels, D, H and W is the depth, height and width of " "image."); AddOutput("Out", - "(Tensor) The output tensor of pooling operator." + "(Tensor), the output tensor of pooling operator." "The format of output tensor is also NCDHW." "Where N is batch size, C is " "the number of channels, D, H and W is the depth, height and " "width of image."); AddOutput("Mask", - "(Tensor) The Mask tensor of pooling operator." + "(Tensor), the Mask tensor of pooling operator." "The format of output tensor is also NCDHW." "Where N is batch size, C is the number of channels, D, H and W " "is the depth, height and width of image." "The value in it is the index in current feature map"); - AddAttr>( - "ksize", - "(vector ), the pooling window size(depth, height, width) of pooling " - "operator." - "If globalPooling = true, ksize is ignored and need not be " - "specified."); // TODO(Chengduo): Add checker. (Currently, + AddAttr>("ksize", + "(vector), the pooling window size(depth, " + "height, width) of pooling " + "operator." + "If globalPooling = true, ksize and paddings " + "will be ignored."); // TODO(Chengduo): Add + // checker. (Currently, // TypedAttrChecker don't support vector type.) - AddAttr("globalPooling", - "(bool default: false), whether to use the global pooling." - "If globalPooling = true, ksize is ignored.") + AddAttr( + "globalPooling", + "(bool default: false), whether to use the global pooling." + "If globalPooling = true, ksize and paddings will be ignored.") .SetDefault(false); AddAttr>("strides", "(vector, default:{1,1,1}), strides(depth, " "height, width) of pooling operator.") .SetDefault({1, 1, 1}); // TODO(Chengduo): Add checker. (Currently, // TypedAttrChecker don't support vector type.) - AddAttr>("paddings", - "(vector defalut:{0,0,0}), paddings(depth, " - "height, width) of pooling operator.") + AddAttr>( + "paddings", + "(vector defalut:{0,0,0}), paddings(depth, " + "height, width) of pooling operator." + "If globalPooling = true, paddings and ksize will be ignored.") .SetDefault({0, 0, 0}); // TODO(Chengduo): Add checker. (Currently, // TypedAttrChecker don't support vector type.) diff --git a/paddle/operators/pool_with_index_op.h b/paddle/operators/pool_with_index_op.h index 01b961ca8295f..48627740435b7 100644 --- a/paddle/operators/pool_with_index_op.h +++ b/paddle/operators/pool_with_index_op.h @@ -37,6 +37,7 @@ class MaxPoolWithIndexKernel : public framework::OpKernel { std::vector paddings = context.Attr>("paddings"); if (context.Attr("globalPooling")) { for (size_t i = 0; i < ksize.size(); ++i) { + paddings[i] = 0; ksize[i] = static_cast(in_x->dims()[i + 2]); } } @@ -54,6 +55,7 @@ class MaxPoolWithIndexKernel : public framework::OpKernel { pool3d_forward(context.device_context(), *in_x, *out, *mask, ksize, strides, paddings); } break; + default: { PADDLE_THROW("Pool op only supports 2D and 3D input."); } } } }; @@ -72,6 +74,7 @@ class MaxPoolWithIndexGradKernel : public framework::OpKernel { std::vector paddings = context.Attr>("paddings"); if (context.Attr("globalPooling")) { for (size_t i = 0; i < ksize.size(); ++i) { + paddings[i] = 0; ksize[i] = static_cast(in_x_grad->dims()[i + 2]); } } @@ -95,6 +98,7 @@ class MaxPoolWithIndexGradKernel : public framework::OpKernel { pool3d_backward(context.device_context(), *in_x_grad, *out_grad, *mask, ksize, strides, paddings); } break; + default: { PADDLE_THROW("Pool op only supports 2D and 3D input."); } } } } diff --git a/python/paddle/v2/framework/tests/test_pool2d_op.py b/python/paddle/v2/framework/tests/test_pool2d_op.py index f04de8133ad3b..c93469e11994c 100644 --- a/python/paddle/v2/framework/tests/test_pool2d_op.py +++ b/python/paddle/v2/framework/tests/test_pool2d_op.py @@ -49,9 +49,12 @@ def setUp(self): self.init_test_case() self.init_op_type() self.init_pool_type() + if self.global_pool: + self.paddings = [0 for _ in range(len(self.paddings))] input = np.random.random(self.shape).astype("float32") output = self.pool2D_forward_naive(input, self.ksize, self.strides, - self.paddings, self.global_pool) + self.paddings, + self.global_pool).astype("float32") self.inputs = {'X': input} self.attrs = { diff --git a/python/paddle/v2/framework/tests/test_pool3d_op.py b/python/paddle/v2/framework/tests/test_pool3d_op.py index d62fbee9746c5..416f0df7cd27f 100644 --- a/python/paddle/v2/framework/tests/test_pool3d_op.py +++ b/python/paddle/v2/framework/tests/test_pool3d_op.py @@ -54,10 +54,13 @@ def avg_pool3D_forward_naive(x, ksize, strides, paddings=[0, 0], global_pool=0): class TestPool3d_Op(OpTest): def setUp(self): - self.initTestCase() + self.init_test_case() + if self.global_pool: + self.paddings = [0 for _ in range(len(self.paddings))] input = np.random.random(self.shape).astype("float32") output = self.pool3D_forward_naive(input, self.ksize, self.strides, - self.paddings, self.global_pool) + self.paddings, + self.global_pool).astype("float32") self.inputs = {'X': input} self.attrs = { @@ -77,7 +80,7 @@ def test_check_grad(self): if self.pool_type != "max": self.check_grad(set(['X']), 'Out', max_relative_error=0.07) - def initTestCase(self): + def init_test_case(self): self.global_pool = True self.op_type = "pool3d" self.pool_type = "avg" @@ -89,7 +92,7 @@ def initTestCase(self): class TestCase1(TestPool3d_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = False self.op_type = "pool3d" self.pool_type = "avg" @@ -101,7 +104,7 @@ def initTestCase(self): class TestCase2(TestPool3d_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = False self.op_type = "pool3d" self.pool_type = "avg" @@ -113,7 +116,7 @@ def initTestCase(self): class TestCase3(TestPool3d_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = True self.op_type = "pool3d" self.pool_type = "max" @@ -125,7 +128,7 @@ def initTestCase(self): class TestCase4(TestPool3d_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = False self.op_type = "pool3d" self.pool_type = "max" @@ -137,7 +140,7 @@ def initTestCase(self): class TestCase5(TestPool3d_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = False self.op_type = "pool3d" self.pool_type = "max" diff --git a/python/paddle/v2/framework/tests/test_pool_max_op.py b/python/paddle/v2/framework/tests/test_pool_max_op.py index f0f8aa6089c74..cc1a867761142 100644 --- a/python/paddle/v2/framework/tests/test_pool_max_op.py +++ b/python/paddle/v2/framework/tests/test_pool_max_op.py @@ -3,11 +3,7 @@ from op_test import OpTest -def max_pool3D_forward_naive(x, - ksize, - strides, - paddings=[0, 0, 0], - global_pool=0): +def max_pool3D_forward_naive(x, ksize, strides, paddings, global_pool=0): N, C, D, H, W = x.shape if global_pool == 1: @@ -44,7 +40,7 @@ def max_pool3D_forward_naive(x, return out, mask -def max_pool2D_forward_naive(x, ksize, strides, paddings=[0, 0], global_pool=0): +def max_pool2D_forward_naive(x, ksize, strides, paddings, global_pool=0): N, C, H, W = x.shape if global_pool == 1: @@ -77,10 +73,14 @@ def max_pool2D_forward_naive(x, ksize, strides, paddings=[0, 0], global_pool=0): class TestMaxPoolWithIndex_Op(OpTest): def setUp(self): - self.initTestCase() + self.init_test_case() + if self.global_pool: + self.paddings = [0 for _ in range(len(self.paddings))] input = np.random.random(self.shape).astype("float32") output, mask = self.pool_forward_naive(input, self.ksize, self.strides, self.paddings, self.global_pool) + output = output.astype("float32") + mask = mask.astype("float32") self.attrs = { 'strides': self.strides, @@ -98,7 +98,7 @@ def test_check_output(self): # def test_check_grad(self): # self.check_grad(set(['X']), ['Out'], max_relative_error=0.07) - def initTestCase(self): + def init_test_case(self): self.global_pool = True self.index = "max_pool3d_with_index" self.op_type = "%s" % self.index @@ -110,7 +110,7 @@ def initTestCase(self): class TestCase1(TestMaxPoolWithIndex_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = True self.op_type = "max_pool3d_with_index" self.pool_forward_naive = max_pool3D_forward_naive @@ -121,7 +121,7 @@ def initTestCase(self): class TestCase2(TestMaxPoolWithIndex_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = False self.op_type = "max_pool3d_with_index" self.pool_forward_naive = max_pool3D_forward_naive @@ -132,7 +132,7 @@ def initTestCase(self): class TestCase3(TestMaxPoolWithIndex_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = False self.op_type = "max_pool3d_with_index" self.pool_forward_naive = max_pool3D_forward_naive @@ -143,7 +143,7 @@ def initTestCase(self): class TestCase4(TestMaxPoolWithIndex_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = True self.op_type = "max_pool3d_with_index" self.pool_forward_naive = max_pool3D_forward_naive @@ -154,7 +154,7 @@ def initTestCase(self): class TestCase5(TestMaxPoolWithIndex_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = True self.op_type = "max_pool3d_with_index" self.pool_forward_naive = max_pool3D_forward_naive @@ -165,7 +165,7 @@ def initTestCase(self): class TestCase6(TestMaxPoolWithIndex_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = False self.op_type = "max_pool2d_with_index" self.pool_forward_naive = max_pool2D_forward_naive @@ -176,7 +176,7 @@ def initTestCase(self): class TestCase7(TestMaxPoolWithIndex_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = False self.op_type = "max_pool2d_with_index" self.pool_forward_naive = max_pool2D_forward_naive @@ -187,7 +187,7 @@ def initTestCase(self): class TestCase8(TestMaxPoolWithIndex_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = True self.op_type = "max_pool2d_with_index" self.pool_forward_naive = max_pool2D_forward_naive @@ -198,7 +198,7 @@ def initTestCase(self): class TestCase9(TestMaxPoolWithIndex_Op): - def initTestCase(self): + def init_test_case(self): self.global_pool = True self.op_type = "max_pool2d_with_index" self.pool_forward_naive = max_pool2D_forward_naive