Skip to content

Commit

Permalink
Fix dilated convolution & refactor & add tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Jorn Tuyls committed Mar 30, 2021
1 parent 2070cd8 commit 2bbffc7
Show file tree
Hide file tree
Showing 17 changed files with 1,632 additions and 1,645 deletions.
638 changes: 356 additions & 282 deletions python/pyxir/graph/ops/l2_convolution.py

Large diffs are not rendered by default.

5 changes: 4 additions & 1 deletion python/pyxir/target_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,10 @@ def check_target(self, target: str):
""" Check whether the target exists """
if not self.is_target(target):
# Try importing it on the fly
importlib.import_module("pyxir.contrib.target." + target.split("-")[0])
try:
importlib.import_module("pyxir.contrib.target." + target.split("-")[0])
except ModuleNotFoundError:
pass
if not self.is_target(target):
raise ValueError("Unknown target: {}, registered targets"
" are: {}"
Expand Down
88 changes: 41 additions & 47 deletions tests/unit/frontend/tvm/relay_tools/test_relay_l0_other.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,16 +34,13 @@

from pyxir.shapes import TupleShape, TensorShape

class TestRelayL0Other(unittest.TestCase):

class TestRelayL0Other(unittest.TestCase):
@unittest.skipIf(skip, "Could not import TVM and/or TVM frontend")
def test_var_constant(self):
var = relay.var(
"var",
relay.TensorType((-1, 4, 2, 2), "int64")
)
var = relay.var("var", relay.TensorType((-1, 4, 2, 2), "int64"))

const = relay.expr.const(np.array([1, -1], dtype=np.int64), 'int64')
const = relay.expr.const(np.array([1, -1], dtype=np.int64), "int64")

net = relay.add(var, const)

Expand All @@ -56,14 +53,14 @@ def test_var_constant(self):

layers = xgraph.get_layers()

assert layers[0].type[0] == 'Input'
assert isinstance(layers[0].attrs['dtype'], str)
assert layers[0].attrs['dtype'] == 'int64'
assert 'relay_id' in layers[0].attrs
assert layers[0].type[0] == "Input"
assert isinstance(layers[0].attrs["dtype"], str)
assert layers[0].attrs["dtype"] == "int64"
assert "relay_id" in layers[0].attrs

assert layers[1].type[0] == 'BiasAdd'
assert layers[1].type[0] == "BiasAdd"
assert layers[1].shapes == [-1, 4, 2, 2]
assert 'relay_id' in layers[1].attrs
assert "relay_id" in layers[1].attrs

@unittest.skipIf(skip, "Could not import TVM and/or TVM frontend")
def test_tuple(self):
Expand All @@ -79,17 +76,17 @@ def test_tuple(self):
xg = xf_relay.from_relay(mod, {})
layers = xg.get_layers()

assert layers[0].type[0] == 'Input'
assert isinstance(layers[0].attrs['dtype'], str)
assert layers[0].attrs['dtype'] == 'int64'
assert 'relay_id' in layers[0].attrs
assert layers[0].type[0] == "Input"
assert isinstance(layers[0].attrs["dtype"], str)
assert layers[0].attrs["dtype"] == "int64"
assert "relay_id" in layers[0].attrs

assert layers[1].type[0] == 'Input'
assert isinstance(layers[0].attrs['dtype'], str)
assert layers[0].attrs['dtype'] == 'int64'
assert 'relay_id' in layers[0].attrs
assert layers[1].type[0] == "Input"
assert isinstance(layers[0].attrs["dtype"], str)
assert layers[0].attrs["dtype"] == "int64"
assert "relay_id" in layers[0].attrs

assert layers[2].type[0] == 'Tuple'
assert layers[2].type[0] == "Tuple"
assert layers[2].shapes == TupleShape([[-1, 4, 2, 2], [-1, 3, 2, 2]])

@unittest.skipIf(skip, "Could not import TVM and/or TVM frontend")
Expand All @@ -109,29 +106,26 @@ def test_tuple_get_item(self):

assert len(layers) == 4

assert layers[0].type[0] == 'Input'
assert isinstance(layers[0].attrs['dtype'], str)
assert layers[0].attrs['dtype'] == 'int64'
assert 'relay_id' in layers[0].attrs
assert layers[0].type[0] == "Input"
assert isinstance(layers[0].attrs["dtype"], str)
assert layers[0].attrs["dtype"] == "int64"
assert "relay_id" in layers[0].attrs

assert layers[1].type[0] == 'Input'
assert isinstance(layers[0].attrs['dtype'], str)
assert layers[0].attrs['dtype'] == 'int64'
assert 'relay_id' in layers[0].attrs
assert layers[1].type[0] == "Input"
assert isinstance(layers[0].attrs["dtype"], str)
assert layers[0].attrs["dtype"] == "int64"
assert "relay_id" in layers[0].attrs

assert layers[2].type[0] == 'Tuple'
assert layers[2].type[0] == "Tuple"
assert layers[2].shapes == TupleShape([[-1, 4, 2, 2], [-1, 3, 2, 2]])

assert layers[3].type[0] == 'TupleGetItem'
assert layers[3].attrs['index'] == 0
assert layers[3].type[0] == "TupleGetItem"
assert layers[3].attrs["index"] == 0
assert layers[3].shapes == TensorShape([-1, 4, 2, 2])

@unittest.skipIf(skip, "Could not import TVM and/or TVM frontend")
def test_relay_op(self):
data = relay.var(
"data",
relay.TensorType((-1, 4, 2, 2), "float32")
)
data = relay.var("data", relay.TensorType((-1, 4, 2, 2), "float32"))

net = relay.std(data, axis=1, keepdims=False, exclude=False)

Expand All @@ -144,23 +138,23 @@ def test_relay_op(self):

layers = xgraph.get_layers()

assert layers[0].type[0] == 'Input'
assert layers[0].type[0] == "Input"

assert layers[1].type[0] == 'Mean'
assert layers[1].type[0] == "Mean"
assert layers[1].shapes == [-1, 1, 2, 2]
# assert isinstance(layers[1].attrs['relay_id'], list)
assert layers[1].attrs['axes'] == [1]
assert layers[1].attrs['keepdims'] is True
assert layers[1].attrs["axes"] == [1]
assert layers[1].attrs["keepdims"] is True

assert layers[2].type[0] == 'RelayOp'
assert layers[2].type[0] == "RelayOp"
assert layers[2].shapes == [-1, 2, 2]
# assert isinstance(layers[2].attrs['relay_id'], list)
assert layers[2].attrs['relay_shape'] == [-1, 2, 2]
assert layers[2].attrs['dtype'] == 'float32'
assert layers[2].attrs['axis'] == '[1]'
assert layers[2].attrs['keepdims'] == '0'
assert layers[2].attrs['exclude'] == '0'
assert layers[2].attrs["relay_shape"] == [-1, 2, 2]
assert layers[2].attrs["dtype"] == "float32"
assert layers[2].attrs["axis"] == "[1]"
assert layers[2].attrs["keepdims"] == "0"
assert layers[2].attrs["exclude"] == "0"

assert layers[3].type[0] == 'Sqrt'
assert layers[3].type[0] == "Sqrt"
assert layers[3].shapes == [-1, 2, 2]
# assert isinstance(layers[3].attrs['relay_id'], list)
121 changes: 52 additions & 69 deletions tests/unit/frontend/tvm/relay_tools/test_relay_l10_temporary.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,17 +35,12 @@


class TestRelayL10TemporaryOperationConversions(unittest.TestCase):

@unittest.skipIf(skip, "Could not import TVM and/or TVM frontend")
def test_nn_adaptive_avg_pool2d_1(self):
warnings.filterwarnings("ignore")
data = relay.var(
"data",
relay.TensorType((-1, 4, 5, 5), "float32")
)
data = relay.var("data", relay.TensorType((-1, 4, 5, 5), "float32"))

net = relay.nn.adaptive_avg_pool2d(
data, output_size=(3, 3), layout='NCHW')
net = relay.nn.adaptive_avg_pool2d(data, output_size=(3, 3), layout="NCHW")

net = relay.Function(relay.analysis.free_vars(net), net)

Expand All @@ -55,27 +50,23 @@ def test_nn_adaptive_avg_pool2d_1(self):

layers = xgraph.get_layers()

assert layers[0].type[0] == 'Input'
assert layers[1].type[0] == 'Pooling'
assert layers[0].type[0] == "Input"
assert layers[1].type[0] == "Pooling"
assert layers[1].shapes.tolist() == [-1, 4, 3, 3]
assert layers[1].attrs['padding'] == [[0, 0], [0, 0], [0, 0], [0, 0]]
assert layers[1].attrs['insize'] == [5, 5]
assert layers[1].attrs['outsize'] == [3, 3]
assert layers[1].attrs['data_layout'] == 'NCHW'
assert layers[1].attrs['strides'] == [1, 1]
assert layers[1].attrs['kernel_size'] == [3, 3]
assert layers[1].attrs['pool_type'] == 'Avg'
assert layers[1].attrs["padding"] == [[0, 0], [0, 0], [0, 0], [0, 0]]
assert layers[1].attrs["insize"] == [5, 5]
assert layers[1].attrs["outsize"] == [3, 3]
assert layers[1].attrs["data_layout"] == "NCHW"
assert layers[1].attrs["strides"] == [1, 1]
assert layers[1].attrs["kernel_size"] == [3, 3]
assert layers[1].attrs["pool_type"] == "Avg"

@unittest.skipIf(skip, "Could not import TVM and/or TVM frontend")
def test_nn_adaptive_avg_pool2d_2(self):
warnings.filterwarnings("ignore")
data = relay.var(
"data",
relay.TensorType((-1, 4, 6, 6), "float32")
)
data = relay.var("data", relay.TensorType((-1, 4, 6, 6), "float32"))

net = relay.nn.adaptive_avg_pool2d(
data, output_size=(3, 3), layout='NCHW')
net = relay.nn.adaptive_avg_pool2d(data, output_size=(3, 3), layout="NCHW")

net = relay.Function(relay.analysis.free_vars(net), net)

Expand All @@ -85,27 +76,23 @@ def test_nn_adaptive_avg_pool2d_2(self):

layers = xgraph.get_layers()

assert layers[0].type[0] == 'Input'
assert layers[1].type[0] == 'Pooling'
assert layers[0].type[0] == "Input"
assert layers[1].type[0] == "Pooling"
assert layers[1].shapes.tolist() == [-1, 4, 3, 3]
assert layers[1].attrs['padding'] == [[0, 0], [0, 0], [0, 0], [0, 0]]
assert layers[1].attrs['insize'] == [6, 6]
assert layers[1].attrs['outsize'] == [3, 3]
assert layers[1].attrs['data_layout'] == 'NCHW'
assert layers[1].attrs['strides'] == [2, 2]
assert layers[1].attrs['kernel_size'] == [2, 2]
assert layers[1].attrs['pool_type'] == 'Avg'
assert layers[1].attrs["padding"] == [[0, 0], [0, 0], [0, 0], [0, 0]]
assert layers[1].attrs["insize"] == [6, 6]
assert layers[1].attrs["outsize"] == [3, 3]
assert layers[1].attrs["data_layout"] == "NCHW"
assert layers[1].attrs["strides"] == [2, 2]
assert layers[1].attrs["kernel_size"] == [2, 2]
assert layers[1].attrs["pool_type"] == "Avg"

@unittest.skipIf(skip, "Could not import TVM and/or TVM frontend")
def test_nn_adaptive_avg_pool2d_3(self):
warnings.filterwarnings("ignore")
data = relay.var(
"data",
relay.TensorType((-1, 6, 6, 4), "float32")
)
data = relay.var("data", relay.TensorType((-1, 6, 6, 4), "float32"))

net = relay.nn.adaptive_avg_pool2d(
data, output_size=(6, 6), layout='NHWC')
net = relay.nn.adaptive_avg_pool2d(data, output_size=(6, 6), layout="NHWC")

net = relay.Function(relay.analysis.free_vars(net), net)

Expand All @@ -115,30 +102,26 @@ def test_nn_adaptive_avg_pool2d_3(self):

layers = xgraph.get_layers()

assert layers[0].type[0] == 'Input'
assert layers[0].type[0] == "Input"
assert layers[0].shapes.tolist() == [-1, 6, 6, 4]
assert layers[1].type[0] == 'Transpose'
assert layers[1].type[0] == "Transpose"
assert layers[1].shapes.tolist() == [-1, 4, 6, 6]
assert layers[2].type[0] == 'Pooling'
assert layers[2].type[0] == "Pooling"
assert layers[2].shapes.tolist() == [-1, 4, 6, 6]
assert layers[2].attrs['padding'] == [[0, 0], [0, 0], [0, 0], [0, 0]]
assert layers[2].attrs['insize'] == [6, 6]
assert layers[2].attrs['outsize'] == [6, 6]
assert layers[2].attrs['data_layout'] == 'NCHW'
assert layers[2].attrs['strides'] == [1, 1]
assert layers[2].attrs['kernel_size'] == [1, 1]
assert layers[2].attrs['pool_type'] == 'Avg'
assert layers[2].attrs["padding"] == [[0, 0], [0, 0], [0, 0], [0, 0]]
assert layers[2].attrs["insize"] == [6, 6]
assert layers[2].attrs["outsize"] == [6, 6]
assert layers[2].attrs["data_layout"] == "NCHW"
assert layers[2].attrs["strides"] == [1, 1]
assert layers[2].attrs["kernel_size"] == [1, 1]
assert layers[2].attrs["pool_type"] == "Avg"

@unittest.skipIf(skip, "Could not import TVM and/or TVM frontend")
def test_nn_adaptive_avg_pool2d_4(self):
warnings.filterwarnings("ignore")
data = relay.var(
"data",
relay.TensorType((-1, 5, 5, 4), "float32")
)
data = relay.var("data", relay.TensorType((-1, 5, 5, 4), "float32"))

net = relay.nn.adaptive_avg_pool2d(
data, output_size=(1, 1), layout='NHWC')
net = relay.nn.adaptive_avg_pool2d(data, output_size=(1, 1), layout="NHWC")

net = relay.Function(relay.analysis.free_vars(net), net)

Expand All @@ -148,17 +131,17 @@ def test_nn_adaptive_avg_pool2d_4(self):

layers = xgraph.get_layers()

assert layers[0].type[0] == 'Input'
assert layers[1].type[0] == 'Transpose'
assert layers[2].type[0] == 'Pooling'
assert layers[0].type[0] == "Input"
assert layers[1].type[0] == "Transpose"
assert layers[2].type[0] == "Pooling"
assert layers[2].shapes.tolist() == [-1, 4, 1, 1]
assert layers[2].attrs['padding'] == [[0, 0], [0, 0], [0, 0], [0, 0]]
assert layers[2].attrs['insize'] == [5, 5]
assert layers[2].attrs['outsize'] == [1, 1]
assert layers[2].attrs['data_layout'] == 'NCHW'
assert layers[2].attrs['strides'] == [5, 5]
assert layers[2].attrs['kernel_size'] == [5, 5]
assert layers[2].attrs['pool_type'] == 'Avg'
assert layers[2].attrs["padding"] == [[0, 0], [0, 0], [0, 0], [0, 0]]
assert layers[2].attrs["insize"] == [5, 5]
assert layers[2].attrs["outsize"] == [1, 1]
assert layers[2].attrs["data_layout"] == "NCHW"
assert layers[2].attrs["strides"] == [5, 5]
assert layers[2].attrs["kernel_size"] == [5, 5]
assert layers[2].attrs["pool_type"] == "Avg"

@unittest.skipIf(skip, "Could not import TVM and/or TVM frontend")
def test_slice_like(self):
Expand All @@ -172,9 +155,9 @@ def test_slice_like(self):
xgraph = xf_relay.from_relay(mod, {})
layers = xgraph.get_layers()

assert layers[0].type[0] == 'Constant'
assert layers[1].type[0] == 'Constant'
assert layers[2].type[0] == 'AnyOp'
assert layers[0].type[0] == "Constant"
assert layers[1].type[0] == "Constant"
assert layers[2].type[0] == "AnyOp"
assert layers[2].shapes == [1, 4, 3, 3]

data = relay.expr.const(np.ones((1, 6, 4, 4), np.float32))
Expand All @@ -187,7 +170,7 @@ def test_slice_like(self):
xgraph = xf_relay.from_relay(mod, {})
layers = xgraph.get_layers()

assert layers[0].type[0] == 'Constant'
assert layers[1].type[0] == 'Constant'
assert layers[2].type[0] == 'AnyOp'
assert layers[0].type[0] == "Constant"
assert layers[1].type[0] == "Constant"
assert layers[2].type[0] == "AnyOp"
assert layers[2].shapes == [1, 6, 3, 3]
Loading

0 comments on commit 2bbffc7

Please sign in to comment.