From 33bde90744ba34f8d5860bbe0453e0f9de21278e Mon Sep 17 00:00:00 2001 From: hanrui1sensetime <83800577+hanrui1sensetime@users.noreply.github.com> Date: Tue, 21 Dec 2021 18:03:50 +0800 Subject: [PATCH] [Fix] Fix unittest of ncnn. (#309) * fix test_pytorch_functions * fix test_mmocr_models --- .../test_mmocr/test_mmocr_models.py | 8 +++-- tests/test_pytorch/test_pytorch_functions.py | 29 +++++++++---------- 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/tests/test_codebase/test_mmocr/test_mmocr_models.py b/tests/test_codebase/test_mmocr/test_mmocr_models.py index ec02e41bd..f10b29268 100644 --- a/tests/test_codebase/test_mmocr/test_mmocr_models.py +++ b/tests/test_codebase/test_mmocr/test_mmocr_models.py @@ -165,7 +165,7 @@ def test_bidirectionallstm(backend: Backend): wrapped_model=wrapped_model, model_inputs=rewrite_inputs, deploy_cfg=deploy_cfg, - run_with_backend=False) + run_with_backend=True) if is_backend_output: model_output = model_outputs.cpu().numpy() rewrite_output = rewrite_outputs[0].cpu().numpy() @@ -200,7 +200,8 @@ def test_simple_test_of_single_stage_text_detector(backend: Backend): rewrite_outputs, is_backend_output = get_rewrite_outputs( wrapped_model=wrapped_model, model_inputs=rewrite_inputs, - deploy_cfg=deploy_cfg) + deploy_cfg=deploy_cfg, + run_with_backend=True) if is_backend_output: rewrite_outputs = rewrite_outputs[0] @@ -254,7 +255,8 @@ def test_crnndecoder(backend: Backend, rnn_flag: bool): wrapped_model=wrapped_model, model_inputs=rewrite_inputs, deploy_cfg=deploy_cfg, - run_with_backend=False) + run_with_backend=True) + rewrite_outputs = [rewrite_outputs[-1]] if is_backend_output: for model_output, rewrite_output in zip(model_outputs, rewrite_outputs): diff --git a/tests/test_pytorch/test_pytorch_functions.py b/tests/test_pytorch/test_pytorch_functions.py index c1e37fd12..406b876b0 100644 --- a/tests/test_pytorch/test_pytorch_functions.py +++ b/tests/test_pytorch/test_pytorch_functions.py @@ -42,7 +42,7 @@ def test_get_attribute(): def model_func(tensor): x = tensor.size() assert isinstance(x[0], int) and not isinstance(x[0], torch.Tensor) - return x[0] * tensor + return torch.tensor(x) input = torch.zeros([1, 2, 3, 4]) wrapped_func = WrapFunction(model_func) @@ -50,7 +50,7 @@ def model_func(tensor): wrapped_func, model_inputs={'tensor': input}, deploy_cfg=deploy_cfg_ncnn, - run_with_backend=False) + run_with_backend=True) assert rewrite_outputs is not None, 'Got unexpected rewrite ' 'outputs: {}'.format(rewrite_outputs) @@ -71,9 +71,9 @@ def group_norm_caller(input): wrapped_func, model_inputs={'input': input}, deploy_cfg=deploy_cfg_ncnn, - run_with_backend=False) + run_with_backend=True) - assert np.allclose(model_output, rewrite_output, rtol=1e-03, atol=1e-05) + assert np.allclose(model_output, rewrite_output[0], rtol=1e-03, atol=1e-05) @backend_checker(Backend.NCNN) @@ -89,9 +89,9 @@ def interpolate_caller(*arg, **kwargs): wrapped_func, model_inputs={'input': input}, deploy_cfg=deploy_cfg_ncnn, - run_with_backend=False) + run_with_backend=True) - assert np.allclose(model_output, rewrite_output, rtol=1e-03, atol=1e-05) + assert np.allclose(model_output, rewrite_output[0], rtol=1e-03, atol=1e-05) @backend_checker(Backend.NCNN) @@ -109,9 +109,9 @@ def linear_caller(*arg, **kwargs): wrapped_func, model_inputs={'input': input}, deploy_cfg=deploy_cfg_ncnn, - run_with_backend=False) + run_with_backend=True) - assert np.allclose(model_output, rewrite_output, rtol=1e-03, atol=1e-05) + assert np.allclose(model_output, rewrite_output[0], rtol=1e-03, atol=1e-05) @backend_checker(Backend.TENSORRT) @@ -127,10 +127,10 @@ def model_func(input): deploy_cfg = get_trt_config(['output'], [1]) - rewrite_output, is_backend_ouptut = get_rewrite_outputs( + rewrite_output, is_backend_output = get_rewrite_outputs( wrapped_func, model_inputs={'input': input}, deploy_cfg=deploy_cfg) - if is_backend_ouptut: + if is_backend_output: rewrite_output = rewrite_output[0].detach().cpu() assert np.allclose( @@ -145,7 +145,7 @@ def test_size_of_tensor_static(): def model_func(input): x = torch.Tensor.size(input) assert isinstance(x[0], int) and not isinstance(x[0], torch.Tensor) - return x[0] * input + return torch.tensor(x) input = torch.zeros([1, 2, 3, 4]) wrapped_func = WrapFunction(model_func) @@ -153,7 +153,7 @@ def model_func(input): wrapped_func, model_inputs={'input': input}, deploy_cfg=deploy_cfg_ncnn, - run_with_backend=False) + run_with_backend=True) assert rewrite_outputs is not None, 'Got unexpected rewrite ' 'outputs: {}'.format(rewrite_outputs) @@ -181,9 +181,8 @@ def model_func(input): wrapped_func, model_inputs={'input': TestTopk.input}, deploy_cfg=deploy_cfg_ncnn, - run_with_backend=False) - - assert np.allclose(model_output, output[1], rtol=1e-03, atol=1e-05) + run_with_backend=True) + assert np.allclose(model_output, output[0], rtol=1e-03, atol=1e-05) @backend_checker(Backend.TENSORRT) @pytest.mark.parametrize('k', [1, 3, 4])