Skip to content

Commit

Permalink
[UnitTests][Contrib] Enable contrib tensorrt/coreml unit tests (#8902)
Browse files Browse the repository at this point in the history
* [UnitTests][CoreML] Marked test_annotate as a known failure.

The unit tests in `test_coreml_codegen.py` haven't run in the CI
lately, so this test wasn't caught before.  (See tracking issue

- Added `pytest.mark.xfail` mark to `test_annotate`.

- Added `tvm.testing.requires_package` decorator, which can mark tests
  as requiring a specific python package to be available.  Switched
  from `pytest.importorskip('coremltools')` to
  `requires_package('coremltools')` in `test_coreml_codegen.py` so
  that all tests would explicitly show up as skipped in the report.

- Added `uses_gpu` tag to all tests in `test_coreml_codegen.py`, since
  only ci_gpu has coremltools installed.  In the future, if the ci_cpu
  image has coremltools installed, this mark can be removed.

* [Pytest][TensorRT] Mark the TensorRT tests with tvm.testing.requires_cuda

Previously, the tests had an early bailout if tensorrt was disabled,
or if there was no cuda device present.  However, the tests were not
marked with `pytest.mark.gpu` and so they didn't run during
`task_python_integration_gpuonly.sh`.  This commit adds the
`requires_cuda` mark, and maintains the same behavior of testing the
tensorrt compilation steps if compilation is enabled, and running the
results if tensorrt is enabled.

In addition, some of the tests result in failures when run.  These
have been marked with `pytest.mark.xfail`, and are being tracked in
issue #8901.
  • Loading branch information
Lunderberg committed Sep 2, 2021
1 parent 7deebc6 commit aac0754
Show file tree
Hide file tree
Showing 4 changed files with 357 additions and 239 deletions.
1 change: 1 addition & 0 deletions python/tvm/testing/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from .utils import known_failing_targets, requires_cuda, requires_cudagraph
from .utils import requires_gpu, requires_llvm, requires_rocm, requires_rpc
from .utils import requires_tensorcore, requires_metal, requires_micro, requires_opencl
from .utils import requires_package
from .utils import identity_after, terminate_self

from ._ffi_api import nop, echo, device_test, run_check_signal, object_use_count
Expand Down
44 changes: 44 additions & 0 deletions python/tvm/testing/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,51 @@ def requires_rpc(*args):
return _compose(args, _requires_rpc)


def requires_package(*packages):
"""Mark a test as requiring python packages to run.
If the packages listed are not available, tests marked with
`requires_package` will appear in the pytest results as being skipped.
This is equivalent to using ``foo = pytest.importorskip('foo')`` inside
the test body.
Parameters
----------
packages : List[str]
The python packages that should be available for the test to
run.
Returns
-------
mark: pytest mark
The pytest mark to be applied to unit tests that require this
"""

def has_package(package):
try:
__import__(package)
return True
except ImportError:
return False

marks = [
pytest.mark.skipif(not has_package(package), reason=f"Cannot import '{package}'")
for package in packages
]

def wrapper(func):
for mark in marks:
func = mark(func)
return func

return wrapper


def parametrize_targets(*args):

"""Parametrize a test over a specific set of targets.
Use this decorator when you want your test to be run over a
Expand Down
28 changes: 27 additions & 1 deletion tests/python/contrib/test_coreml_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,12 @@
from unittest import mock

import tvm
import tvm.testing
from tvm import relay
from tvm.relay import transform
from tvm.contrib.target import coreml as _coreml

pytest.importorskip("coremltools")
requires_coremltools = tvm.testing.requires_package("coremltools")


def _has_xcode():
Expand Down Expand Up @@ -88,6 +89,11 @@ def _create_graph_annotated():
return mod


@pytest.mark.xfail(
reason="Currently failing test. See tracking issue https://github.com/apache/tvm/issues/8901"
)
@tvm.testing.uses_gpu
@requires_coremltools
def test_annotate():
mod = _create_graph()
mod = transform.AnnotateTarget("coremlcompiler")(mod)
Expand All @@ -98,6 +104,8 @@ def test_annotate():


@pytest.mark.skipif(not _has_xcode(), reason="Xcode is not available")
@tvm.testing.uses_gpu
@requires_coremltools
def test_compile_and_run():
dev = tvm.cpu()
target = "llvm"
Expand Down Expand Up @@ -136,6 +144,8 @@ def _construct_model(func, m1, m2):
fcompile(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_add():
shape = (10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -144,6 +154,8 @@ def test_add():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_multiply():
shape = (10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -152,6 +164,8 @@ def test_multiply():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_clip():
shape = (10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -160,6 +174,8 @@ def test_clip():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_batch_flatten():
shape = (10, 10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -168,6 +184,8 @@ def test_batch_flatten():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_expand_dims():
shape = (10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -180,6 +198,8 @@ def test_expand_dims():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_relu():
shape = (10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -188,6 +208,8 @@ def test_relu():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_softmax():
shape = (10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -196,6 +218,8 @@ def test_softmax():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_conv2d():
x = relay.var("x", shape=(1, 3, 224, 224))
w = relay.const(np.zeros((16, 3, 3, 3), dtype="float32"))
Expand All @@ -204,6 +228,8 @@ def test_conv2d():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_global_avg_pool2d():
shape = (10, 10, 10, 10)
x = relay.var("x", shape=shape)
Expand Down
Loading

0 comments on commit aac0754

Please sign in to comment.