Skip to content

Commit

Permalink
Add more eager global test (#7818)
Browse files Browse the repository at this point in the history
* test(SignOp): add eager global test

* test(SoftPlusOp): add eager global test

* test(SliceOp): add eager global test and fix 0size backward bug

* remove softplus test because repeated

* refine test

Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com>
  • Loading branch information
wyg1997 and mergify[bot] committed May 10, 2022
1 parent 02498a3 commit 050c6e2
Show file tree
Hide file tree
Showing 3 changed files with 122 additions and 2 deletions.
1 change: 1 addition & 0 deletions oneflow/user/kernels/slice_kernel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,7 @@ class SliceGradKernel final : public user_op::OpKernel, public user_op::CudaGrap
user_op::Tensor* dx_tensor = ctx->Tensor4ArgNameAndIndex("dx", 0);
size_t dx_byte_size = dx_tensor->shape().elem_cnt() * sizeof(T);
Memset<device_type>(ctx->stream(), dx_tensor->mut_dptr<T>(), 0, dx_byte_size);
if (dy_tensor->shape().elem_cnt() == 0) { return; }
SliceParams params = ConstructSliceParams(ctx, dx_tensor, dy_tensor);
SliceKernelUtil<device_type, T>::Backward(ctx->stream(), params, dy_tensor->dptr<T>(),
dx_tensor->mut_dptr<T>());
Expand Down
42 changes: 42 additions & 0 deletions python/oneflow/test/modules/test_consistent_sign.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""

import unittest

import oneflow as flow
from oneflow.test_utils.automated_test_util import *
import oneflow.unittest


@autotest(n=1, check_graph=False)
def _test_sign_impl(test_case, ndim, placement, sbp):
dims = [random(1, 3) * 8 for i in range(ndim)]
x = random_tensor(ndim, *dims).to_global(placement=placement, sbp=sbp)
y = torch.sign(x)
return y


class TestSign(flow.unittest.TestCase):
@globaltest
def test_sign(test_case):
for placement in all_placement():
ndim = random(1, 4).to(int).value()
for sbp in all_sbp(placement, max_dim=ndim):
_test_sign_impl(test_case, ndim, placement, sbp)


if __name__ == "__main__":
unittest.main()
81 changes: 79 additions & 2 deletions python/oneflow/test/modules/test_consistent_slice.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,80 @@
"""

import unittest
from collections import OrderedDict

import numpy as np
import oneflow as flow
import oneflow.unittest

from oneflow.test_utils.automated_test_util import *


def _check_forward_and_backward(test_case, input, of_out, torch_out):
# compare forward
test_case.assertTrue(
np.array_equal(of_out.numpy(), torch_out.cpu().detach().numpy())
)

# compare backward
of_out.sum().backward()
torch_out.sum().backward()
torch_grad_local = input.pytorch.grad.cpu().detach()
test_case.assertTrue(
np.array_equal(input.oneflow.grad.numpy(), torch_grad_local.numpy())
)


def _test_slice_random_data(test_case, placement, sbp):
dims = [random(1, 2) * 8 for _ in range(2)]
input = random_tensor(2, *dims)
x = input.to_global(placement=placement, sbp=sbp)
slice_tup_list = [[None, None, None], [0, 5, 2]]
of_out = flow.slice(x.oneflow, slice_tup_list=slice_tup_list)
torch_out = x.pytorch[:, 0:5:2]

_check_forward_and_backward(test_case, input, of_out, torch_out)


def _test_slice_empty(test_case, placement, sbp):
dims = [random(1, 2) * 8 for _ in range(2)]
input = random_tensor(2, *dims)
x = input.to_global(placement=placement, sbp=sbp)
slice_tup_list = [[3, 3, 1], [None, None, None]]
of_out = flow.slice(x.oneflow, slice_tup_list=slice_tup_list)
torch_out = x.pytorch[3:3:1, :]

_check_forward_and_backward(test_case, input, of_out, torch_out)


def _test_slice_1dim(test_case, placement, sbp):
dims = [random(1, 2) * 8 for _ in range(2)]
input = random_tensor(2, *dims)
x = input.to_global(placement=placement, sbp=sbp)
of_out = x.oneflow[2]
torch_out = x.pytorch[2]

_check_forward_and_backward(test_case, input, of_out, torch_out)


def _test_negative_index(test_case, placement, sbp):
dims = [random(1, 2) * 8 for _ in range(2)]
input = random_tensor(2, *dims)
x = input.to_global(placement=placement, sbp=sbp)
of_out = x.oneflow[-1:-6:1, :]
torch_out = x.pytorch[-1:-6:1, :]

_check_forward_and_backward(test_case, input, of_out, torch_out)


def _test_slice_ellipsis_type(test_case, placement, sbp):
dims = [random(1, 2) * 8 for _ in range(2)]
input = random_tensor(2, *dims)
x = input.to_global(placement=placement, sbp=sbp)
of_out = x.oneflow[..., :]
torch_out = x.pytorch[..., :]

_check_forward_and_backward(test_case, input, of_out, torch_out)


def _test_logical_slice(test_case, placement, sbp):
x = random_tensor(2, 8, 8, requires_grad=False).oneflow
x_numpy = x.detach().cpu().numpy()
Expand All @@ -46,6 +111,18 @@ def _test_logical_slice_with_bool(test_case, placement, sbp):
test_case.assertTrue(np.array_equal(y.numpy(), x_numpy[0:1:1]))


class TestSlice(flow.unittest.TestCase):
@globaltest
def test_slice(test_case):
for placement in all_placement():
for sbp in all_sbp(placement, max_dim=2):
_test_slice_random_data(test_case, placement, sbp)
_test_slice_empty(test_case, placement, sbp)
_test_slice_1dim(test_case, placement, sbp)
_test_negative_index(test_case, placement, sbp)
_test_slice_ellipsis_type(test_case, placement, sbp)


class TestLogicalSlice(flow.unittest.TestCase):
@globaltest
def test_logical_slice(test_case):
Expand Down

0 comments on commit 050c6e2

Please sign in to comment.