Skip to content
This repository has been archived by the owner on Apr 28, 2023. It is now read-only.

Commit

Permalink
Add a unit test for softmax
Browse files Browse the repository at this point in the history
  • Loading branch information
prigoyal committed Feb 24, 2018
1 parent 29861a5 commit 7c9b722
Showing 1 changed file with 80 additions and 0 deletions.
80 changes: 80 additions & 0 deletions test/test_execution_engine.cc
Expand Up @@ -45,6 +45,86 @@ struct ATenCompilationUnitTest : public ::testing::Test {
}
};

TEST_F(ATenCompilationUnitTest, DISABLED_SoftmaxA) {
at::Tensor a = at::CUDA(at::kFloat).rand({32, 16});
std::vector<at::Tensor> inputs = {a};
std::vector<at::Tensor> outputs;

// Tensor dependencies should strictly be DAG
Check(
R"(
def softmax(float(N, D) I) -> (O, tmp) {
tmp(n) max= I(n, d)
O(n, d) = exp(I(n, d) - tmp(n))
tmp(n) +=! O(n, d)
O(n, d) = O(n, d) / tmp(n)
}
)",
"softmax",
tc::MappingOptions::makeNaiveMappingOptions(),
inputs,
outputs);
}

TEST_F(ATenCompilationUnitTest, DISABLED_SoftmaxB) {
at::Tensor a = at::CUDA(at::kFloat).rand({32, 16});
std::vector<at::Tensor> inputs = {a};
std::vector<at::Tensor> outputs;

// Tensor dependencies should strictly be DAG
Check(
R"(
def softmax(float(N, D) I) -> (O, tmp, tmp1) {
tmp(n) max=! I(n, d)
O(n, d) = exp(I(n, d) - tmp(n))
tmp1(n) +=! O(n, d)
O(n, d) = O(n, d) / tmp1(n)
}
)",
"softmax",
tc::MappingOptions::makeNaiveMappingOptions(),
inputs,
outputs);
}

TEST_F(ATenCompilationUnitTest, DISABLED_SoftmaxC) {
at::Tensor a = at::CUDA(at::kFloat).rand({32, 16});
std::vector<at::Tensor> inputs = {a};
std::vector<at::Tensor> outputs;

Check(
R"(
def softmax(float(N, D) I) -> (O, expsum, maxVal) {
maxVal(n) max=! I(n, d)
expsum(n) +=! exp(I(n, d) - maxVal(n))
O(n, d) = exp(I(n, d) - maxVal(n)) / expsum(n)
}
)",
"softmax",
tc::MappingOptions::makeNaiveMappingOptions(),
inputs,
outputs);
}

TEST_F(ATenCompilationUnitTest, SoftmaxD) {
at::Tensor a = at::CUDA(at::kFloat).rand({32, 16});
std::vector<at::Tensor> inputs = {a};
std::vector<at::Tensor> outputs;

Check(
R"(
def softmax(float(N, D) I) -> (O, expsum, maxVal) {
maxVal(n) max= I(n, d)
expsum(n) +=! exp(I(n, d) - maxVal(n))
O(n, d) = exp(I(n, d) - maxVal(n)) / expsum(n)
}
)",
"softmax",
tc::MappingOptions::makeNaiveMappingOptions(),
inputs,
outputs);
}

TEST_F(ATenCompilationUnitTest, Concat) {
at::Tensor a = at::CUDA(at::kFloat).rand({32, 16});
at::Tensor b = at::CUDA(at::kFloat).rand({32, 16});
Expand Down

0 comments on commit 7c9b722

Please sign in to comment.