Navigation Menu

Skip to content

Commit

Permalink
Shugeo pad fix (#6191)
Browse files Browse the repository at this point in the history
* Added constant for constant padding with pad op.

* Fixed with constant and linear case for pad op.

* Fixed 1D padding.
  • Loading branch information
shugeo authored and raver119 committed Aug 23, 2018
1 parent 6c9b7e6 commit c0bc6c7
Show file tree
Hide file tree
Showing 4 changed files with 77 additions and 30 deletions.
14 changes: 9 additions & 5 deletions libnd4j/include/ops/declarable/generic/transforms/pad.cpp
Expand Up @@ -42,10 +42,15 @@ CUSTOM_OP_IMPL(pad, 2, 1, false, 0, 1) {
// input validation
std::string expectedPaddingsShape = ShapeUtils<T>::shapeAsString({rank, 2});
std::string currentPaddingsShape = ShapeUtils<T>::shapeAsString(paddings);
REQUIRE_TRUE(expectedPaddingsShape == currentPaddingsShape, 0, "PAD op: wrong shape of paddings array, expected is %s, but got %s instead !", expectedPaddingsShape.c_str(), currentPaddingsShape.c_str());
REQUIRE_TRUE(expectedPaddingsShape == currentPaddingsShape, 0, "PAD op: wrong shape of paddings array, expected is %s, but got %s instead !", expectedPaddingsShape.c_str(), currentPaddingsShape.c_str());
T padValue = T(0.f);
// in case of REFLECT and SYMMETRIC modes paddings must obey additional shape requirements
// REFLECT case
if(argI->at(0) == 1)
if (argI->at(0) == 0) { // CONSTAND mode
if (!block.getTArguments()->empty())
padValue = T_ARG(0);
}
else if(argI->at(0) == 1)
for(int dim=0; dim < rank; ++dim)
REQUIRE_TRUE((*paddings)(dim,0) <= (input->shapeOf()[dim]-1) && (*paddings)(dim,1) <= (input->shapeOf()[dim]-1), 0, "PAD op: wrong content of paddings array for REFLECT mode !");
// SYMMETRIC case
Expand All @@ -55,11 +60,10 @@ CUSTOM_OP_IMPL(pad, 2, 1, false, 0, 1) {
// CONSTANT->0, REFLECT->1, SYMMETRIC->2
REQUIRE_TRUE(!(argI->at(0) < 0 || argI->at(0) > 2), 0, "PAD op: unknown padding mode, there are only three possible legal values -> 0,1,2, but got %i instead !", argI->at(0));


std::vector<int> dimensions(input->rankOf());
std::vector<int> dimensions(input->rankOf());
std::iota(dimensions.begin(), dimensions.end(), 0); // fill with 0, 1, ... rank-1

helpers::recursiveLoopForPad<T>(argI->at(0), *input, *paddings, *output, dimensions, 0, 0, 0);
helpers::recursiveLoopForPad<T>(argI->at(0), *input, *paddings, *output, dimensions, 0, 0, 0, padValue);

return Status::OK();
}
Expand Down
73 changes: 49 additions & 24 deletions libnd4j/include/ops/declarable/helpers/cpu/transforms.cpp
Expand Up @@ -187,7 +187,7 @@ void randomShuffle(NDArray<T>& input, NDArray<T>& output, nd4j::random::RandomBu
////////////////////////////////////////////////////////////////////////
// initial values of inIdx, outIdx, dim must be equal to zero
template<typename T>
void recursiveLoopForPad(const int mode, NDArray<T>& input, const NDArray<T>& paddings, NDArray<T>& output, std::vector<int> dimensions, int dim, int inIdx, int outIdx ) {
void recursiveLoopForPad(const int mode, NDArray<T>& input, const NDArray<T>& paddings, NDArray<T>& output, std::vector<int> dimensions, int dim, int inIdx, int outIdx, T padValue ) {

int leftOffset;
// dimensions are array of input dimensions, it is sorted by increasing order
Expand All @@ -206,33 +206,42 @@ void recursiveLoopForPad(const int mode, NDArray<T>& input, const NDArray<T>& pa
tadIn.createOffsets();
NDArray<T> subArrIn(input.getBuffer(), tadIn.tadOnlyShapeInfo, output.getWorkspace());
// these indices take into account recursion and always point to actual tads numbers
outIdx = outIdx*output.sizeAt(dim+1);
inIdx = inIdx*input.sizeAt(dim+1);
if (input.rankOf() > 1 && output.rankOf() > 1) {// only for non-vector cases
outIdx = outIdx * output.sizeAt(dim + 1);
inIdx = inIdx * input.sizeAt(dim + 1);
}
// current input tad number, we add to it unity in a loop
int k = -1;
// loop through current dimension
for(int i = 0; i < output.sizeAt(dim); ++i) {
// corresponds to outer range (relevant indices are absent in input)
if(i < (int)paddings(dim,0) || i >= (input.sizeAt(dim) + (int)paddings(dim,0)))
leftOffset = (int)paddings(dim, 0);
if(i < leftOffset || i >= (input.sizeAt(dim) + leftOffset))
continue;

// increase input tads number
++k;
// recursion condition allows for the fact that tad can't reduce to scalar
if(dim < input.rankOf()-2)
recursiveLoopForPad(mode, input, paddings, output, dimensions, dim+1, inIdx + k, outIdx + i);
else {
if(dim < input.rankOf() - 2)
recursiveLoopForPad(mode, input, paddings, output, dimensions, dim + 1, inIdx + k, outIdx + i, padValue);
else if (paddings.sizeAt(0) > dim + 1){
leftOffset = (int)paddings(dim + 1, 0);
// shift buffers pointers to actual element position
subArrOut.setBuffer(output.getBuffer() + tadOut.tadOffsets[outIdx + i]);
subArrIn.setBuffer (input.getBuffer() + tadIn.tadOffsets[inIdx + i - (int)paddings(dim,0)]);
leftOffset = (int)paddings(dim+1,0);
if (output.rankOf() > 1) {
subArrOut.setBuffer(output.getBuffer() + tadOut.tadOffsets[outIdx + i]);
subArrIn.setBuffer(input.getBuffer() + tadIn.tadOffsets[inIdx + i - (int) paddings(dim, 0)]);
}
else {
subArrOut(i) = subArrIn(i - leftOffset);
}
// most inner loop, corresponds to last dim = rank-1
switch (mode) {
case 0: // CONSTANT mode
for(int j = 0; j < subArrOut.lengthOf(); ++j)
if(j < leftOffset || j >= (subArrIn.lengthOf() + leftOffset) ) // firstly fill with zeros outer ranges
subArrOut.putIndexedScalar(j, (T)0.);
else
subArrOut.putIndexedScalar(j, subArrIn.getIndexedScalar(j - leftOffset)); // fill middle with elements of input array
if(j < leftOffset || j >= (subArrIn.lengthOf() + leftOffset) ) // firstly fill with zeros outer ranges
subArrOut(j) = (T)0.;
else
subArrOut(j) = subArrIn(j - leftOffset); // fill middle with elements of input array
break;

case 1: // REFLECT mode
Expand All @@ -253,21 +262,37 @@ void recursiveLoopForPad(const int mode, NDArray<T>& input, const NDArray<T>& pa
subArrOut.putIndexedScalar(j, subArrIn.getIndexedScalar(subArrOut.lengthOf() - j));
break;
}
}
else {

if (mode == 0 && input.rankOf() < 2)
subArrOut(i) = subArrIn(i - leftOffset); // fill middle with elements of input array
}
}

// populate sub-array formed previously
leftOffset = (int)paddings(dim,0);
switch (mode) {
case 0: // CONSTANT mode
for(int j = 1; j <= leftOffset; ++j) { // fill left side with zeros
subArrOut.setBuffer(output.getBuffer() + tadOut.tadOffsets[outIdx + leftOffset - j]);
subArrOut.assign((T)0.);
for(int j = 1; j <= leftOffset; ++j) {
// fill left side with padValue
if (output.rankOf() > 1) {
subArrOut.setBuffer(output.getBuffer() + tadOut.tadOffsets[outIdx + leftOffset - j]);
subArrOut.assign(padValue);
}
else {
subArrOut(j - 1) = padValue;
}
}
// output.printIndexedBuffer("Output at");
for(int j = (output.sizeAt(dim) - leftOffset); j < output.sizeAt(dim); ++j) { // fill left side with zeros
subArrOut.setBuffer(output.getBuffer() + tadOut.tadOffsets[outIdx + j]);
subArrOut.assign((T)0.);
}
if (output.rankOf() > 1) {
subArrOut.setBuffer(output.getBuffer() + tadOut.tadOffsets[outIdx + j]);
subArrOut.assign(padValue);
}
else {
subArrOut(j) = padValue;
}
}
break;

case 1: // REFLECT mode
Expand Down Expand Up @@ -981,9 +1006,9 @@ template void randomShuffle<float>(NDArray<float>& input, NDArray<float>& output
template void randomShuffle<float16>(NDArray<float16>& input, NDArray<float16>& output, nd4j::random::RandomBuffer& rng, const bool isInplace);
template void randomShuffle<double>(NDArray<double>& input, NDArray<double>& output, nd4j::random::RandomBuffer& rng, const bool isInplace);

template void recursiveLoopForPad<float>(const int mode, NDArray<float>& input, const NDArray<float>& paddings, NDArray<float>& output, std::vector<int> dimensions, int dim, int inIdx, int outIdx);
template void recursiveLoopForPad<float16>(const int mode, NDArray<float16>& input, const NDArray<float16>& paddings, NDArray<float16>& output, std::vector<int> dimensions, int dim, int inIdx, int outIdx);
template void recursiveLoopForPad<double>(const int mode, NDArray<double>& input, const NDArray<double>& paddings, NDArray<double>& output, std::vector<int> dimensions, int dim, int inIdx, int outIdx);
template void recursiveLoopForPad<float>(const int mode, NDArray<float>& input, const NDArray<float>& paddings, NDArray<float>& output, std::vector<int> dimensions, int dim, int inIdx, int outIdx, float padValue);
template void recursiveLoopForPad<float16>(const int mode, NDArray<float16>& input, const NDArray<float16>& paddings, NDArray<float16>& output, std::vector<int> dimensions, int dim, int inIdx, int outIdx, float16 padValue);
template void recursiveLoopForPad<double>(const int mode, NDArray<double>& input, const NDArray<double>& paddings, NDArray<double>& output, std::vector<int> dimensions, int dim, int inIdx, int outIdx, double padValue);

template void invertPermutation<float>(const NDArray<float>& input, NDArray<float>& output);
template void invertPermutation<float16>(const NDArray<float16>& input, NDArray<float16>& output);
Expand Down
2 changes: 1 addition & 1 deletion libnd4j/include/ops/declarable/helpers/transforms.h
Expand Up @@ -42,7 +42,7 @@ namespace helpers {

// auxiliary function which serves for recursion purpose and is used in pad operation
template<typename T>
void recursiveLoopForPad(const int mode, NDArray<T>& input, const NDArray<T>& paddings, NDArray<T>& output, std::vector<int> dimensions, int dim, int inIdx, int outIdx);
void recursiveLoopForPad(const int mode, NDArray<T>& input, const NDArray<T>& paddings, NDArray<T>& output, std::vector<int> dimensions, int dim, int inIdx, int outIdx, T padValue);

template<typename T>
void invertPermutation(const NDArray<T>& input, NDArray<T>& output);
Expand Down
18 changes: 18 additions & 0 deletions libnd4j/tests_cpu/layers_tests/DeclarableOpsTests10.cpp
Expand Up @@ -133,6 +133,24 @@ TEST_F(DeclarableOpsTests10, Test_Size_at_1) {
delete result;
}

////////////////////////////////////////////////////////////////////////////////
TEST_F(DeclarableOpsTests10, Pad_SGO_Test_1) {

NDArray<double> in({1., 1., 1., 1., 1.});
// NDArray<double> pad('c', {1, 2}, {1., 1.});// = Nd4j.create(new double[]{1, 1}, new long[]{1, 2});
NDArray<double> pad('c', {1, 2}, {1., 1.});
// NDArray<double> value(10.0);

NDArray<double> exp({10., 1., 1., 1., 1., 1., 10.});

nd4j::ops::pad<double> op;

auto res = op.execute({&in, &pad}, {10.0}, {0});
ASSERT_EQ(res->status(), ND4J_STATUS_OK);
ASSERT_TRUE(exp.equalsTo(res->at(0)));
delete res;
}

////////////////////////////////////////////////////////////////////////////////
TEST_F(DeclarableOpsTests10, Unique_SGO_Test_1) {
NDArray<double> input({3., 4., 3., 1., 3., 0., 2., 4., 2., 4.});
Expand Down

0 comments on commit c0bc6c7

Please sign in to comment.