Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[TVMC] Improve --desired-layouts functionality #14272

Merged
merged 7 commits into from
Mar 16, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 33 additions & 9 deletions python/tvm/driver/tvmc/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,15 +103,17 @@ def convert_to_mixed_precision(mod, ops=None, calculation_type="float16", acc_ty
raise TVMCException("Error converting mixed precision : {0}".format(str(err)))


def convert_graph_layout(mod, desired_layout, ops=None):
def convert_graph_layout(mod, desired_layouts, ops=None):
"""Alter the layout of the input graph.

Parameters
----------
mod : tvm.IRModule
The relay module to convert.
desired_layout : str
The layout to convert to.
desired_layouts : list[str]
The layouts to convert to.
Expects either a single element or one str per operator.
Can be only data layouts or combination of both, e.g. NHWC:HWIO
ops : list
List of operators to be layout converted.

Expand All @@ -123,7 +125,27 @@ def convert_graph_layout(mod, desired_layout, ops=None):
if ops is None:
ops = ["nn.conv2d", "nn.conv2d_transpose", "qnn.conv2d"]

desired_layouts = {op: [desired_layout, "default"] for op in ops}
if not isinstance(desired_layouts, list):
# For backwards compatibility
assert isinstance(desired_layouts, str)
desired_layouts = [desired_layouts]

if len(desired_layouts) != len(ops):
if len(desired_layouts) != 1:
raise TVMCException(
"Expected 1 or {} layouts but got {}".format(len(ops), len(desired_layouts))
)
desired_layouts = desired_layouts * len(ops)

def layout_helper(layout):
if ":" in layout:
data_layout, kernel_layout = layout.split(":", 1)
else:
data_layout = layout
kernel_layout = "default"
return [data_layout, kernel_layout]

desired_layouts = {op: layout_helper(desired_layouts[i]) for i, op in enumerate(ops)}

# Convert the layout of the graph where possible.
seq = transform.Sequential(
Expand All @@ -137,7 +159,7 @@ def convert_graph_layout(mod, desired_layout, ops=None):
try:
return seq(mod)
except Exception as err:
raise TVMCException("Error converting layout to {0}: {1}".format(desired_layout, str(err)))
raise TVMCException("Error converting layouts: {}".format(str(err)))


def apply_graph_transforms(mod, args):
Expand All @@ -159,7 +181,7 @@ def apply_graph_transforms(mod, args):
return mod

# AlterLayout
if args.get("desired_layout", False):
if args.get("desired_layout", None):
mod = convert_graph_layout(
mod, args["desired_layout"], args.get("desired_layout_ops", None)
)
Expand Down Expand Up @@ -210,9 +232,11 @@ def generate_transform_args(parser):
# AlterLayout
parser.add_argument(
"--desired-layout",
choices=["NCHW", "NHWC"],
default=None,
help="Change the data layout of the whole graph.",
nargs="+",
help="Change the data/kernel layout of the graph. (i.e. NCHW or NHWC:HWIO)"
"This option can be provided multiple times to specify per-operator layouts, "
"e.g. '--desired-layout NHWC:HWIO' (Apply same layout for every operator)."
"e.g. '--desired-layout-ops nn.conv2d nn.avg_pool2d --desired-layout NCHW NHWC'.",
)
parser.add_argument(
"--desired-layout-ops",
Expand Down
86 changes: 84 additions & 2 deletions tests/python/driver/tvmc/test_transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.

import pytest
from unittest.mock import MagicMock

import tvm
Expand All @@ -23,6 +24,7 @@
from tvm.relay.expr_functor import ExprMutator
from tvm.ir.instrument import pass_instrument
from tvm.driver.tvmc.transform import apply_graph_transforms
from tvm.driver.tvmc.model import TVMCException


def test_layout_transform_fold_constant(relay_conv2d):
Expand All @@ -41,7 +43,7 @@ def run_after_pass(self, _, info):

pass_names = CollectPassNames()
with tvm.transform.PassContext(opt_level=3, instruments=[pass_names]):
apply_graph_transforms(relay_conv2d, {"desired_layout": desired_layout})
apply_graph_transforms(relay_conv2d, {"desired_layout": [desired_layout]})

names = pass_names.names
assert "ConvertLayout" in names
Expand All @@ -61,7 +63,7 @@ def test_layout_transform_convert_layout_pass_args(relay_conv2d, monkeypatch):
monkeypatch.setattr(relay.transform, "ConvertLayout", mock_convert_layout)

with tvm.transform.PassContext(opt_level=3):
apply_graph_transforms(relay_conv2d, {"desired_layout": desired_layout})
apply_graph_transforms(relay_conv2d, {"desired_layout": [desired_layout]})

mock_convert_layout.assert_called_once_with(
{
Expand All @@ -72,6 +74,86 @@ def test_layout_transform_convert_layout_pass_args(relay_conv2d, monkeypatch):
)


def test_layout_transform_convert_kernel_layout_pass_args(relay_conv2d, monkeypatch):
"""
Check the convert layout desired layouts arugment is what is expected when
a non-default kernel layout is provided.
"""
desired_layout = "NHWC:HWIO"
desired_layout_ops = ["nn.conv2d"]

mock_convert_layout = MagicMock()
mock_convert_layout.return_value = relay.transform.ConvertLayout({})
monkeypatch.setattr(relay.transform, "ConvertLayout", mock_convert_layout)

with tvm.transform.PassContext(opt_level=3):
apply_graph_transforms(
relay_conv2d,
{"desired_layout": [desired_layout], "desired_layout_ops": desired_layout_ops},
)

mock_convert_layout.assert_called_once_with(
{
"nn.conv2d": ["NHWC", "HWIO"],
}
)


def test_layout_transform_convert_layout_pass_args_multiple(relay_conv2d, monkeypatch):
"""
Check the convert layout desired layouts arugment is what is expected when
a multiple desired layouts are provided.
"""
desired_layout = ["NHWC", "NCHW"]
desired_layout_ops = ["nn.max_pool2d", "qnn.conv2d"]

mock_convert_layout = MagicMock()
mock_convert_layout.return_value = relay.transform.ConvertLayout({})
monkeypatch.setattr(relay.transform, "ConvertLayout", mock_convert_layout)

with tvm.transform.PassContext(opt_level=3):
apply_graph_transforms(
relay_conv2d,
{"desired_layout": desired_layout, "desired_layout_ops": desired_layout_ops},
)

mock_convert_layout.assert_called_once_with(
{
"nn.max_pool2d": ["NHWC", "default"],
"qnn.conv2d": ["NCHW", "default"],
}
)


@pytest.mark.parametrize(
"desired",
[
(["NHWC", "NCHW"], ["nn.max_pool2d"]),
(["NHWC", "NCHW"], None),
],
)
def test_layout_transform_convert_layout_pass_args_multiple_invalid(
relay_conv2d,
monkeypatch,
desired,
):
"""
Check invalid cases when passing multiple values to the desired layouts argument.
"""
desired_layout, desired_layout_ops = desired

mock_convert_layout = MagicMock()
mock_convert_layout.return_value = relay.transform.ConvertLayout({})
monkeypatch.setattr(relay.transform, "ConvertLayout", mock_convert_layout)

with pytest.raises(TVMCException):
with tvm.transform.PassContext(opt_level=3):
apply_graph_transforms(
relay_conv2d,
{"desired_layout": desired_layout, "desired_layout_ops": desired_layout_ops},
)


def test_layout_transform_to_mixed_precision_pass_args_mock(relay_conv2d, monkeypatch):
"""
Check the mixed precision arugments which are expected when
Expand Down