From 8129203138db5ad6c2abcbe8e342e9018d8914f5 Mon Sep 17 00:00:00 2001 From: Lianmin Zheng Date: Sun, 29 Nov 2020 09:07:45 -0800 Subject: [PATCH] update --- include/tvm/relay/attrs/nn.h | 4 ---- python/tvm/relay/op/strategy/generic.py | 9 +++++++-- python/tvm/relay/op/strategy/x86.py | 2 +- src/relay/transforms/auto_scheduler_layout_rewrite.cc | 3 +-- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/include/tvm/relay/attrs/nn.h b/include/tvm/relay/attrs/nn.h index 278e98097e841..67b719ef1dcdb 100644 --- a/include/tvm/relay/attrs/nn.h +++ b/include/tvm/relay/attrs/nn.h @@ -216,7 +216,6 @@ struct Conv2DWinogradAttrs : public tvm::AttrsNode { std::string data_layout; std::string kernel_layout; std::string out_layout; - std::string auto_scheduler_rewritten_layout; DataType out_dtype; TVM_DECLARE_ATTRS(Conv2DWinogradAttrs, "relay.attrs.Conv2DWinogradAttrs") { @@ -269,9 +268,6 @@ struct Conv2DWinogradAttrs : public tvm::AttrsNode { "Dimension ordering of output. Can be 'NCHW', 'NHWC', etc." "'N', 'C', 'H', 'W' stands for batch, channel, height, and width" "dimensions respectively. Default to be same as input layout."); - TVM_ATTR_FIELD(auto_scheduler_rewritten_layout) - .set_default("") - .describe("New kernel layout after auto-scheduler's layout rewrite."); // use 0 bits to indicate none. TVM_ATTR_FIELD(out_dtype) diff --git a/python/tvm/relay/op/strategy/generic.py b/python/tvm/relay/op/strategy/generic.py index e8c3f41c7155d..f746926880cfe 100644 --- a/python/tvm/relay/op/strategy/generic.py +++ b/python/tvm/relay/op/strategy/generic.py @@ -168,7 +168,11 @@ def schedule_bitpack(attrs, outs, target): # conv2d def wrap_compute_conv2d( - topi_compute, need_data_layout=False, need_out_layout=False, has_groups=False + topi_compute, + need_data_layout=False, + need_out_layout=False, + has_groups=False, + need_auto_scheduler_layout=False, ): """Wrap conv2d topi compute""" @@ -189,7 +193,8 @@ def _compute_conv2d(attrs, inputs, out_type): if need_out_layout: args.append(out_layout) args.append(out_dtype) - args.append(auto_scheduler_rewritten_layout) + if need_auto_scheduler_layout: + args.append(auto_scheduler_rewritten_layout) return [topi_compute(*args)] return _compute_conv2d diff --git a/python/tvm/relay/op/strategy/x86.py b/python/tvm/relay/op/strategy/x86.py index a705cc6963e81..f85ee70b485c7 100644 --- a/python/tvm/relay/op/strategy/x86.py +++ b/python/tvm/relay/op/strategy/x86.py @@ -118,7 +118,7 @@ def conv2d_strategy_cpu(attrs, inputs, out_type, target): elif layout == "NHWC": assert kernel_layout == "HWIO" strategy.add_implementation( - wrap_compute_conv2d(topi.nn.conv2d_nhwc), + wrap_compute_conv2d(topi.nn.conv2d_nhwc, need_auto_scheduler_layout=True), wrap_topi_schedule(topi.x86.schedule_conv2d_nhwc), name="conv2d_nhwc.x86", ) diff --git a/src/relay/transforms/auto_scheduler_layout_rewrite.cc b/src/relay/transforms/auto_scheduler_layout_rewrite.cc index 2d95f9ee4b2ca..1b79468aef59b 100644 --- a/src/relay/transforms/auto_scheduler_layout_rewrite.cc +++ b/src/relay/transforms/auto_scheduler_layout_rewrite.cc @@ -95,8 +95,7 @@ class FuncMutator : public ExprMutator { std::deque ori_layouts_queue_; std::deque new_layouts_queue_; - std::vector target_ops_{"nn.contrib_conv2d_winograd_without_weight_transform", - "nn.conv2d"}; + std::vector target_ops_{"nn.conv2d"}; }; Expr AutoSchedulerLayoutRewriter::VisitExpr_(const CallNode* n) {