Skip to content

Commit

Permalink
[FIX,MICROTVM] Skip microtvm tests if microtvm is not built (apache#6693
Browse files Browse the repository at this point in the history
)
  • Loading branch information
tkonolige authored and trevor-m committed Dec 4, 2020
1 parent 3cd547a commit 0c74497
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 1 deletion.
17 changes: 17 additions & 0 deletions python/tvm/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -617,6 +617,23 @@ def requires_llvm(*args):
return _compose(args, _requires_llvm)


def requires_micro(*args):
"""Mark a test as requiring microTVM to run.
Parameters
----------
f : function
Function to mark
"""
_requires_micro = [
pytest.mark.skipif(
tvm.support.libinfo().get("USE_MICRO", "OFF") != "ON",
reason="MicroTVM support not enabled. Set USE_MICRO=ON in config.cmake to enable.",
)
]
return _compose(args, _requires_micro)


def _target_to_requirement(target):
# mapping from target to decorator
if target.startswith("cuda"):
Expand Down
14 changes: 13 additions & 1 deletion tests/python/unittest/test_crt.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@

import tvm
import tvm.relay
import tvm.micro

from tvm.topi.util import get_const_tuple
from tvm.topi.testing import conv2d_nchw_python
Expand Down Expand Up @@ -84,8 +83,11 @@ def _make_ident_sess(workspace):
return _make_sess_from_op(workspace, "ident", sched, [A, B])


@tvm.testing.requires_micro
def test_compile_runtime():
"""Test compiling the on-device runtime."""
import tvm.micro

workspace = tvm.micro.Workspace()

with _make_add_sess(workspace) as sess:
Expand All @@ -101,8 +103,12 @@ def test_compile_runtime():
assert (C_data.asnumpy() == np.array([6, 7])).all()


@tvm.testing.requires_micro
def test_reset():
"""Test when the remote end resets during a session."""
import tvm.micro
from tvm.micro import transport

workspace = tvm.micro.Workspace()

with _make_add_sess(workspace) as sess:
Expand All @@ -113,8 +119,11 @@ def test_reset():
pass


@tvm.testing.requires_micro
def test_graph_runtime():
"""Test use of the graph runtime with microTVM."""
import tvm.micro

workspace = tvm.micro.Workspace()
relay_mod = tvm.parser.fromtext(
"""
Expand Down Expand Up @@ -143,8 +152,11 @@ def @main(%a : Tensor[(1, 2), uint8], %b : Tensor[(1, 2), uint8]) {
assert (out.asnumpy() == np.array([6, 10])).all()


@tvm.testing.requires_micro
def test_std_math_functions():
"""Verify that standard math functions can be used."""
import tvm.micro

workspace = tvm.micro.Workspace()
A = tvm.te.placeholder((2,), dtype="float32", name="A")
B = tvm.te.compute(A.shape, lambda i: tvm.te.exp(A[i]), name="B")
Expand Down

0 comments on commit 0c74497

Please sign in to comment.