Skip to content

Commit

Permalink
Shugeo determinant fix (#6110)
Browse files Browse the repository at this point in the history
* Shugeo dynamic bp (#6106)

* Added dynamic_partition_bp op and helpers. Initial revision.

* Implementation of dynamic_partion_bp and test.

* Fixed implementation bugs.

* Minor fixes with dynamic_partition_bp and test.

* Modified dynamic_partition_bp op and tests.

* Modified dynamic_stitch invocation with dynamic_partition_bp op.

* Another approach to implement.

* Final version of dynamic_partition_bp op.

* Refactored revision of dynamic_partition_bp op.

* The first working revision.

* Fixed LUP determinant calculation.

* Removed waste printfs.

* Eliminated some messages and waste tests.
  • Loading branch information
shugeo authored and raver119 committed Aug 10, 2018
1 parent 16e7c80 commit 1654c01
Show file tree
Hide file tree
Showing 4 changed files with 74 additions and 44 deletions.
7 changes: 4 additions & 3 deletions libnd4j/include/ops/declarable/helpers/cpu/lup.cpp
Expand Up @@ -121,7 +121,9 @@ namespace helpers {
if( pivotValue != T(0.0) ) {
swapRows(compoundMatrix.get(), pivot, i);
swapRows(permutationMatrix.get(), pivot, i);
swapCount++;
if (pivot != i)
swapCount++;

for( int j = i + 1; j < rowNum; j++ ) {
(*compoundMatrix)(j, i) /= (*compoundMatrix)(i, i);
for( int k = i + 1; k < rowNum; k++ ) {
Expand All @@ -138,8 +140,7 @@ namespace helpers {
// nd4j_printf("Compound matrix diag %i %f.\n", e, (*compoundMatrix)(e, e));
determinant *= (*compoundMatrix)(e, e);
}
if (0 == swapCount % 2) determinant = -determinant;

if (swapCount % 2) determinant = -determinant;
if (compound != nullptr)
*compound = *compoundMatrix;
if (permutation != nullptr)
Expand Down
66 changes: 66 additions & 0 deletions libnd4j/tests_cpu/layers_tests/DeclarableOpsTests6.cpp
Expand Up @@ -802,6 +802,72 @@ TEST_F(DeclarableOpsTests6, MatrixDeterminant_2) {
delete result;
}

////////////////////////////////////////////////////////////////////////////////
TEST_F(DeclarableOpsTests6, MatrixDeterminant_3) {

NDArray<double> x('c', {1, 3, 3}, {3.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 3.0});
NDArray<double> exp({-54.0});

nd4j::ops::matrix_determinant<double> op;
auto result = op.execute({&x}, {}, {});

ASSERT_EQ(ND4J_STATUS_OK, result->status());

auto z = result->at(0);
//z->printIndexedBuffer("Output ");
//exp.printIndexedBuffer("Expected ");

ASSERT_TRUE(exp.isSameShape(z));
ASSERT_TRUE(exp.equalsTo(z));

delete result;
}

////////////////////////////////////////////////////////////////////////////////
TEST_F(DeclarableOpsTests6, MatrixDeterminant_4) {

NDArray<double> x('c', {1, 3, 3}, {12.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 13.0});
NDArray<double> exp({189.0});

nd4j::ops::matrix_determinant<double> op;
auto result = op.execute({&x}, {}, {});

ASSERT_EQ(ND4J_STATUS_OK, result->status());

auto z = result->at(0);
//z->printIndexedBuffer("Output ");
//exp.printIndexedBuffer("Expected ");

ASSERT_TRUE(exp.isSameShape(z));
ASSERT_TRUE(exp.equalsTo(z));

delete result;
}

////////////////////////////////////////////////////////////////////////////////
TEST_F(DeclarableOpsTests6, MatrixDeterminant_5) {

NDArray<double> x('c', {1, 4, 4});
NDArray<double> exp({-16.0});
x.linspace(1);
x(5) = 4.0;
x(12) = 12.0;

nd4j::ops::matrix_determinant<double> op;
auto result = op.execute({&x}, {}, {});

ASSERT_EQ(ND4J_STATUS_OK, result->status());

auto z = result->at(0);
//z->printIndexedBuffer("Output ");
//exp.printIndexedBuffer("Expected ");

ASSERT_TRUE(exp.isSameShape(z));
ASSERT_TRUE(exp.equalsTo(z));

delete result;
}

////////////////////////////////////////////////////////////////////////////////
TEST_F(DeclarableOpsTests6, MatrixInverse_1) {

Expand Down
4 changes: 2 additions & 2 deletions libnd4j/tests_cpu/layers_tests/DeclarableOpsTests8.cpp
Expand Up @@ -2998,7 +2998,7 @@ NDArray<double> exp('c', {3,3,5,5}, {

ASSERT_EQ(Status::OK(), results->status());
// ASSERT_TRUE(exp.isSameShape(out));
out->printIndexedBuffer("LRN BP out");
// out->printIndexedBuffer("LRN BP out");
// exp.printIndexedBuffer("LRN exp");
ASSERT_TRUE(exp.equalsTo(out));

Expand All @@ -3024,7 +3024,7 @@ TEST_F(DeclarableOpsTests8, LrnTest_BP_2) {

ASSERT_EQ(Status::OK(), results->status());
// ASSERT_TRUE(exp.isSameShape(out));
out->printIndexedBuffer("LRN BP out");
// out->printIndexedBuffer("LRN BP out");
// exp.printIndexedBuffer("LRN exp");
// ASSERT_TRUE(exp.equalsTo(out));

Expand Down
41 changes: 2 additions & 39 deletions libnd4j/tests_cpu/layers_tests/DeclarableOpsTests9.cpp
Expand Up @@ -1246,8 +1246,8 @@ TEST_F(DeclarableOpsTests9, cumprod_bp_check_1) {

x.linspace(1);

const OpArgsHolder<double> argsHolderFF({&x}, {}, {});
const OpArgsHolder<double> argsHolderBP({&x, &gradO}, {}, {});
const OpArgsHolder<double> argsHolderFF({&x}, {}, {0, 0});
const OpArgsHolder<double> argsHolderBP({&x, &gradO}, {}, {0, 0});

nd4j::ops::cumprod<double> opFF;
nd4j::ops::cumprod_bp<double> opBP;
Expand Down Expand Up @@ -1420,37 +1420,6 @@ TEST_F(DeclarableOpsTests9, cumprod_test2) {
ASSERT_TRUE(isGradCorrect);
}

////////////////////////////////////////////////////////////////////////////////
TEST_F(DeclarableOpsTests9, cumprod_test3) {

NDArray<double> inputC('c', {2, 2});
NDArray<double> axis(1.);

// NDArray<double> expFF('c', {3, 5}, {1., 2., 6., 24., 120., 6., 42., 336., 3024., 30240.,11., 132.,1716., 24024.,360360.});
// NDArray<double> expTF('c', {3, 5}, {1, 1, 2, 6, 24,1, 6, 42, 336, 3024,1, 11, 132, 1716, 24024});

// NDArray<double> expFT('c', {3, 5}, {120, 120, 60, 20, 5,30240, 5040, 720, 90, 10,360360, 32760, 2730, 210, 15}); //+++
// NDArray<double> expTT('c', {3, 5}, {120, 60, 20, 5, 1,5040, 720, 90, 10, 1,32760, 2730, 210, 15, 1});
NDArray<double> gradO('c', {2, 2});

int exclusive, reverse;

//************************************//
exclusive = 0; reverse = 0;
inputC.linspace(1);
// const OpArgsHolder<double> argsHolderFF({&inputC, &axis}, {}, {exclusive, reverse});
// const OpArgsHolder<double> argsHolderBP({&inputC, &axis, &gradO}, {}, {exclusive, reverse});

nd4j::ops::cumprod<double> opFF;
// nd4j::ops::cumprod_bp<double> opBP;
auto res = opFF.execute({&inputC, &axis}, {}, {exclusive, reverse});
// const bool isGradCorrect = GradCheck::checkGrad(opFF, opBP, argsHolderFF, argsHolderBP);
ASSERT_TRUE(res->status() == ND4J_STATUS_OK);
res->at(0)->printIndexedBuffer("Cumulative product of 4 ints");
// ASSERT_TRUE(isGradCorrect);
delete res;
}

////////////////////////////////////////////////////////////////////////////////
TEST_F(DeclarableOpsTests9, prelu_test1) {

Expand Down Expand Up @@ -2154,17 +2123,11 @@ TEST_F(DeclarableOpsTests9, Dynamic_Partition_BP_1) {

nd4j::ops::dynamic_partition<double> op1;
auto res1 = op1.execute({&x, &y}, {}, {3});
for (size_t e = 0; e < res1->size(); ++e) {
res1->at(e)->printIndexedBuffer("RES1");
res1->at(e)->printShapeInfo("RES1");
}

nd4j::ops::dynamic_partition_bp<double> op2;
auto res2 = op2.execute({&x, &y, res1->at(0), res1->at(1), res1->at(2)}, {}, {3});
ASSERT_TRUE(res2->status() == ND4J_STATUS_OK);
ASSERT_TRUE(res2->size() == 2);
res2->at(0)->printIndexedBuffer("PARTITION");
res2->at(1)->printIndexedBuffer("INDICES");
delete res1;
delete res2;
}
Expand Down

0 comments on commit 1654c01

Please sign in to comment.