Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implemented log_abs_determinant op. #6689

Merged
merged 17 commits into from Dec 17, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -19,10 +19,10 @@
//

#include <op_boilerplate.h>
#if NOT_EXCLUDED(OP_matrix_determinant)

#include <ops/declarable/CustomOperations.h>
#include <ops/declarable/helpers/lup.h>

#if NOT_EXCLUDED(OP_matrix_determinant)
namespace nd4j {
namespace ops {
CUSTOM_OP_IMPL(matrix_determinant, 1, 1, false, 0, 0) {
Expand Down Expand Up @@ -60,9 +60,53 @@ namespace nd4j {
DECLARE_TYPES(matrix_determinant) {
getOpDescriptor()
->setAllowedInputTypes(nd4j::DataType::ANY)
->setSameMode(true);
->setAllowedOutputTypes({ALL_FLOATS});
}
}
}

#endif
#endif

#if NOT_EXCLUDED(OP_log_matrix_determinant)
namespace nd4j {
namespace ops {
DECLARE_TYPES(log_matrix_determinant) {
getOpDescriptor()
->setAllowedInputTypes(nd4j::DataType::ANY)
->setAllowedOutputTypes({ALL_FLOATS});
}

CUSTOM_OP_IMPL(log_matrix_determinant, 1, 1, false, 0, 0) {
auto input = INPUT_VARIABLE(0);
auto output = OUTPUT_VARIABLE(0);

REQUIRE_TRUE(input->rankOf() >=2, 0, "log_matrix_determinant: The rank of input array should not less than 2, but %i is given", input->rankOf());
REQUIRE_TRUE(input->sizeAt(-1) == input->sizeAt(-2), 0, "log_matrix_determinant: The last two dimmensions should be equal, but %i and %i are given", input->sizeAt(-1), input->sizeAt(-2));

return helpers::log_abs_determinant(input, output);
}

DECLARE_SHAPE_FN(log_matrix_determinant) {
auto inShape = inputShape->at(0);

Nd4jLong* determinantShape;
int targetRank = shape::rank(inShape) - 2; // last two dimensions will be reduced to scalar

if (targetRank == 0) { // scalar only
determinantShape = ShapeBuilders::createScalarShapeInfo(ArrayOptions::dataType(inShape), block.getWorkspace());
}
else if (targetRank == 1) { // vector
determinantShape = ShapeBuilders::createVectorShapeInfo(ArrayOptions::dataType(inShape), shape::sizeAt(inShape, 0), block.getWorkspace());
}
else { // only two last dimensions are excluded
ALLOCATE(determinantShape, block.getWorkspace(), shape::shapeInfoLength(targetRank), Nd4jLong);
if (shape::order(inShape) == 'c')
shape::shapeBuffer(targetRank, ArrayOptions::dataType(inShape), shape::shapeOf(inShape), determinantShape);
else
shape::shapeBufferFortran(targetRank, ArrayOptions::dataType(inShape), shape::shapeOf(inShape), determinantShape);
}
return SHAPELIST(determinantShape);
}
}
}
#endif
15 changes: 15 additions & 0 deletions libnd4j/include/ops/declarable/headers/parity_ops.h
Expand Up @@ -874,6 +874,21 @@ namespace nd4j {
DECLARE_CUSTOM_OP(matrix_determinant, 1, 1, false, 0, 0);
#endif

/**
* log_matrix_determinant op.
*
* input params:
* 0 - the tensor with dimension (x * y * z * ::: * M * M)
*
* return value:
* tensor with dimension (x * y * z * ::: *) with log determinant for all
* M x M matricies
*/

#if NOT_EXCLUDED(OP_log_matrix_determinant)
DECLARE_CUSTOM_OP(log_matrix_determinant, 1, 1, false, 0, 0);
#endif

/**
* matrix_inverse op. - make inverse for all 2D square matricies found in the input tensor
*
Expand Down
36 changes: 29 additions & 7 deletions libnd4j/include/ops/declarable/helpers/cpu/lup.cpp
Expand Up @@ -111,7 +111,7 @@ namespace helpers {
const int rowNum = input->rows();
const int columnNum = input->columns();

T determinant = (T)1.0;
NDArray determinant = NDArrayFactory::create<T>(1.f);
std::unique_ptr<NDArray> compoundMatrix(input->dup()); // copy
std::unique_ptr<NDArray> permutationMatrix(input->dupUninitialized()); //put identity
permutationMatrix->setIdentity();
Expand All @@ -138,7 +138,6 @@ namespace helpers {
swapCount++;

for( int j = i + 1; j < rowNum; j++ ) {

compoundMatrix->p(j, i, compoundMatrix->e<T>(j, i) / compoundMatrix->e<T>(i, i));
for( int k = i + 1; k < rowNum; k++ ) {
T arg = compoundMatrix->e<T>(j, i) * compoundMatrix->e<T>(i, k);
Expand All @@ -159,9 +158,7 @@ namespace helpers {
*compound = *compoundMatrix;
if (permutation != nullptr)
*permutation = *permutationMatrix;


return NDArrayFactory::create<T>(determinant, input->getWorkspace());
return determinant;
}

BUILD_SINGLE_TEMPLATE(template NDArray _lup, (NDArray* input, NDArray* output, NDArray* permutation), FLOAT_TYPES);
Expand All @@ -174,7 +171,7 @@ namespace helpers {
Nd4jLong n = input->sizeAt(-1);
Nd4jLong n2 = n * n;

auto matrix = NDArrayFactory::create('c', {n, n}, input->dataType(), input->getWorkspace()); //, block.getWorkspace());
auto matrix = NDArrayFactory::create(input->ordering(), {n, n}, input->dataType(), input->getWorkspace()); //, block.getWorkspace());
//#pragma omp parallel for if(output->lengthOf() > Environment::getInstance()->elementwiseThreshold()) schedule(static)
for (int e = 0; e < output->lengthOf(); e++) {
for (int k = e * n2, row = 0; k < (e + 1) * n2; ++k, ++row) {
Expand All @@ -193,6 +190,31 @@ namespace helpers {
BUILD_SINGLE_SELECTOR(input->dataType(), return _determinant, (input, output), FLOAT_TYPES);
}

template <typename T>
int log_abs_determinant_(NDArray* input, NDArray* output) {

Nd4jLong n = input->sizeAt(-1);
Nd4jLong n2 = n * n;

NDArray matrix = NDArrayFactory::create(input->ordering(), {n, n}, input->dataType(), input->getWorkspace()); //, block.getWorkspace());
//#pragma omp parallel for if(output->lengthOf() > Environment::getInstance()->elementwiseThreshold()) schedule(static)
for (int e = 0; e < output->lengthOf(); e++) {
for (int k = e * n2, row = 0; k < (e + 1) * n2; ++k, ++row) {
matrix.p(row, input->e<T>(k));
}
NDArray det = _lup<T>(&matrix, (NDArray*)nullptr, (NDArray*)nullptr);
if (det.e<T>(0) != 0.f)
output->p(e, nd4j::math::nd4j_log<T,T>(nd4j::math::nd4j_abs(det.t<T>(0))));
}

return ND4J_STATUS_OK;
}

BUILD_SINGLE_TEMPLATE(template int log_abs_determinant_, (NDArray* input, NDArray* output), FLOAT_TYPES);

int log_abs_determinant(NDArray* input, NDArray* output) {
BUILD_SINGLE_SELECTOR(input->dataType(), return log_abs_determinant_, (input, output), FLOAT_TYPES);
}

template <typename T>
static int _inverse(NDArray* input, NDArray* output) {
Expand Down Expand Up @@ -302,4 +324,4 @@ namespace helpers {

}
}
}
}
1 change: 1 addition & 0 deletions libnd4j/include/ops/declarable/helpers/lup.h
Expand Up @@ -30,6 +30,7 @@ namespace helpers {
T lup(NDArray* input, NDArray* compound, NDArray* permutation);

int determinant(NDArray* input, NDArray* output);
int log_abs_determinant(NDArray* input, NDArray* output);

int inverse(NDArray* input, NDArray* output);

Expand Down
21 changes: 21 additions & 0 deletions libnd4j/tests_cpu/layers_tests/DeclarableOpsTests6.cpp
Expand Up @@ -836,6 +836,27 @@ TEST_F(DeclarableOpsTests6, MatrixDeterminant_6) {
delete result;
}

////////////////////////////////////////////////////////////////////////////////
TEST_F(DeclarableOpsTests6, LogMatrixDeterminant_1) {

auto x = NDArrayFactory::create<double>('c', {2, 3, 3}, {-3.0, 0.0, 0.0, 0.0, 4.0, 0.0, 0.0, 0.0, -3.0, 4.0, 0.0, 0.0, 0.0, -3.0, 0.0, 0.0, 0.0, 4.0});
auto exp = NDArrayFactory::create<double>({3.58351893845611, 3.871201010907891});

nd4j::ops::log_matrix_determinant op;
auto result = op.execute({&x}, {}, {});

ASSERT_EQ(ND4J_STATUS_OK, result->status());

auto z = result->at(0);
//z->printIndexedBuffer("Output ");
//exp.printIndexedBuffer("Expected ");

ASSERT_TRUE(exp.isSameShape(z));
ASSERT_TRUE(exp.equalsTo(z));

delete result;
}

////////////////////////////////////////////////////////////////////////////////
TEST_F(DeclarableOpsTests6, MatrixInverse_1) {

Expand Down