Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[UnitTests][Contrib] Enable contrib tensorrt/coreml unit tests #8902

Merged
merged 2 commits into from
Sep 2, 2021
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions python/tvm/testing/__init__.py
Original file line number Diff line number Diff line change
@@ -24,6 +24,7 @@
from .utils import known_failing_targets, requires_cuda, requires_cudagraph
from .utils import requires_gpu, requires_llvm, requires_rocm, requires_rpc
from .utils import requires_tensorcore, requires_metal, requires_micro, requires_opencl
from .utils import requires_package
from .utils import identity_after, terminate_self

from ._ffi_api import nop, echo, device_test, run_check_signal, object_use_count
44 changes: 44 additions & 0 deletions python/tvm/testing/utils.py
Original file line number Diff line number Diff line change
@@ -774,7 +774,51 @@ def requires_rpc(*args):
return _compose(args, _requires_rpc)


def requires_package(*packages):
"""Mark a test as requiring python packages to run.

If the packages listed are not available, tests marked with
`requires_package` will appear in the pytest results as being skipped.
This is equivalent to using ``foo = pytest.importorskip('foo')`` inside
the test body.

Parameters
----------
packages : List[str]

The python packages that should be available for the test to
run.

Returns
-------
mark: pytest mark

The pytest mark to be applied to unit tests that require this

"""

def has_package(package):
try:
__import__(package)
return True
except ImportError:
return False

marks = [
pytest.mark.skipif(not has_package(package), reason=f"Cannot import '{package}'")
for package in packages
]

def wrapper(func):
for mark in marks:
func = mark(func)
return func

return wrapper


def parametrize_targets(*args):

"""Parametrize a test over a specific set of targets.

Use this decorator when you want your test to be run over a
28 changes: 27 additions & 1 deletion tests/python/contrib/test_coreml_codegen.py
Original file line number Diff line number Diff line change
@@ -19,11 +19,12 @@
from unittest import mock

import tvm
import tvm.testing
from tvm import relay
from tvm.relay import transform
from tvm.contrib.target import coreml as _coreml

pytest.importorskip("coremltools")
requires_coremltools = tvm.testing.requires_package("coremltools")


def _has_xcode():
@@ -88,6 +89,11 @@ def _create_graph_annotated():
return mod


@pytest.mark.xfail(
reason="Currently failing test. See tracking issue https://github.com/apache/tvm/issues/8901"
)
@tvm.testing.uses_gpu
@requires_coremltools
def test_annotate():
mod = _create_graph()
mod = transform.AnnotateTarget("coremlcompiler")(mod)
@@ -98,6 +104,8 @@ def test_annotate():


@pytest.mark.skipif(not _has_xcode(), reason="Xcode is not available")
@tvm.testing.uses_gpu
@requires_coremltools
def test_compile_and_run():
dev = tvm.cpu()
target = "llvm"
@@ -136,6 +144,8 @@ def _construct_model(func, m1, m2):
fcompile(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_add():
shape = (10, 10)
x = relay.var("x", shape=shape)
@@ -144,6 +154,8 @@ def test_add():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_multiply():
shape = (10, 10)
x = relay.var("x", shape=shape)
@@ -152,6 +164,8 @@ def test_multiply():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_clip():
shape = (10, 10)
x = relay.var("x", shape=shape)
@@ -160,6 +174,8 @@ def test_clip():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_batch_flatten():
shape = (10, 10, 10)
x = relay.var("x", shape=shape)
@@ -168,6 +184,8 @@ def test_batch_flatten():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_expand_dims():
shape = (10, 10)
x = relay.var("x", shape=shape)
@@ -180,6 +198,8 @@ def test_expand_dims():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_relu():
shape = (10, 10)
x = relay.var("x", shape=shape)
@@ -188,6 +208,8 @@ def test_relu():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_softmax():
shape = (10, 10)
x = relay.var("x", shape=shape)
@@ -196,6 +218,8 @@ def test_softmax():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_conv2d():
x = relay.var("x", shape=(1, 3, 224, 224))
w = relay.const(np.zeros((16, 3, 3, 3), dtype="float32"))
@@ -204,6 +228,8 @@ def test_conv2d():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_global_avg_pool2d():
shape = (10, 10, 10, 10)
x = relay.var("x", shape=shape)
523 changes: 285 additions & 238 deletions tests/python/contrib/test_tensorrt.py

Large diffs are not rendered by default.