From 41f212ca655b0742b9d9ae459bf25729b91f6998 Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Wed, 30 Dec 2020 21:11:38 +0900 Subject: [PATCH 01/13] made TShapeDataDependant array --- include/tvm/relay/op_attr_types.h | 2 +- python/tvm/relay/op/op.py | 2 +- src/relay/analysis/util.cc | 3 ++- tests/cpp/relay_build_module_test.cc | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/include/tvm/relay/op_attr_types.h b/include/tvm/relay/op_attr_types.h index 1e9b86d9e0bc..88e37fa0057c 100644 --- a/include/tvm/relay/op_attr_types.h +++ b/include/tvm/relay/op_attr_types.h @@ -85,7 +85,7 @@ using TNonComputational = bool; /*! * \brief Mark the operator whether output shape is data dependant. */ -using TShapeDataDependant = bool; +using TShapeDataDependant = Array; /*! * \brief Computation description interface. diff --git a/python/tvm/relay/op/op.py b/python/tvm/relay/op/op.py index d4d20b3ebc4a..131dfcc5b3b9 100644 --- a/python/tvm/relay/op/op.py +++ b/python/tvm/relay/op/op.py @@ -374,7 +374,7 @@ def register_shape_func(op_name, data_dependant, shape_func=None, level=10): level : int The priority level """ - get(op_name).set_attr("TShapeDataDependant", data_dependant, level) + get(op_name).set_attr("TShapeDataDependant", [int(data_dependant)], level) return tvm.ir.register_op_attr(op_name, "FShapeFunc", shape_func, level) diff --git a/src/relay/analysis/util.cc b/src/relay/analysis/util.cc index bcfbc83da514..afb4d108437e 100644 --- a/src/relay/analysis/util.cc +++ b/src/relay/analysis/util.cc @@ -490,7 +490,8 @@ bool IsDataDependant(const CallNode* call) { } } - return tshape_data_dependant[op]; + Array reqs = tshape_data_dependant[op]; + return reqs[0]->value != 0; } } // namespace relay } // namespace tvm diff --git a/tests/cpp/relay_build_module_test.cc b/tests/cpp/relay_build_module_test.cc index 3212f9079619..17b3d3f22737 100644 --- a/tests/cpp/relay_build_module_test.cc +++ b/tests/cpp/relay_build_module_test.cc @@ -105,7 +105,7 @@ TEST(Relay, BuildModule) { } auto fgeneric = GenericFunc::Get("test.strategy_generic").set_default(*fs); (*reg)("add", "FTVMStrategy", fgeneric, 10); - (*reg)("add", "TShapeDataDependant", false, 10); + (*reg)("add", "TShapeDataDependant", {0}, 10); // build auto pfb = tvm::runtime::Registry::Get("relay.build_module._BuildModule"); tvm::runtime::Module build_mod = (*pfb)(); From b720908a05aa1a327a094b94a372dc5fe2d7f3a6 Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Wed, 30 Dec 2020 21:51:19 +0900 Subject: [PATCH 02/13] add stub --- python/tvm/relay/op/op.py | 4 +++- python/tvm/relay/transform/memory_alloc.py | 4 ++++ src/relay/backend/compile_engine.cc | 24 +++++++++++++++++++++- 3 files changed, 30 insertions(+), 2 deletions(-) diff --git a/python/tvm/relay/op/op.py b/python/tvm/relay/op/op.py index 131dfcc5b3b9..36409d375d74 100644 --- a/python/tvm/relay/op/op.py +++ b/python/tvm/relay/op/op.py @@ -374,7 +374,9 @@ def register_shape_func(op_name, data_dependant, shape_func=None, level=10): level : int The priority level """ - get(op_name).set_attr("TShapeDataDependant", [int(data_dependant)], level) + if not isinstance(data_dependant, list): + data_dependant = [data_dependant] + get(op_name).set_attr("TShapeDataDependant", data_dependant, level) return tvm.ir.register_op_attr(op_name, "FShapeFunc", shape_func, level) diff --git a/python/tvm/relay/transform/memory_alloc.py b/python/tvm/relay/transform/memory_alloc.py index 66528c861788..1419fa369772 100644 --- a/python/tvm/relay/transform/memory_alloc.py +++ b/python/tvm/relay/transform/memory_alloc.py @@ -204,7 +204,11 @@ def emit_shape_func(self, scope, func, new_args): is_inputs = [] input_pos = 0 cpu_ctx = nd.cpu(0) + + print(func) + for i, (arg, state) in enumerate(zip(new_args, input_states)): + print(i, state) state = int(state) # Pass Shapes if state == 2: diff --git a/src/relay/backend/compile_engine.cc b/src/relay/backend/compile_engine.cc index c969c3ba7f06..dde81f25d10c 100644 --- a/src/relay/backend/compile_engine.cc +++ b/src/relay/backend/compile_engine.cc @@ -336,8 +336,10 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> MakeShapeFunc() {} std::pair Create(const Function& prim_func) { - for (auto param : prim_func->params) { + for (size_t i = 0; i < prim_func->params.size(); ++i) { + auto param = prim_func->params[i]; param_states_[param] = kNoNeed; + param_to_index_[param] = i; Array data_inputs; Array shape_inputs; @@ -436,6 +438,11 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> return {}; } else { ICHECK(data_dependants_.size()); + auto dep_spec = data_dependants_per_input_.back(); + auto index = param_to_index_[var]; + LOG(INFO) << dep_spec.size() << ", " << index; + ICHECK(dep_spec.size() > index); + // bool data_dependant = dep_spec[index]; bool data_dependant = data_dependants_.back(); if (data_dependant) { param_states_[var] |= kNeedInputData; @@ -518,6 +525,18 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> << "Internal error, cannot find TShapeDataDependant for " << op->name; data_dependants_.push_back(IsDataDependant(call_node)); + + Array dep_spec = tshape_data_dependant[op]; + if (dep_spec.size() == 1 && call_node->args.size() > 1) { + for (size_t i = 1; i < call_node->args.size(); ++i) { + dep_spec.push_back(dep_spec[0]); + } + } else { + ICHECK_EQ(dep_spec.size(), call_node->args.size()); + } + data_dependants_per_input_.push_back(dep_spec); + LOG(INFO) << op->name << ", " << dep_spec; + // Visit all inputs Array inputs; int count_tuple = 0; @@ -550,6 +569,7 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> // Call shape function auto outputs = fshape_func[op](call_node->attrs, inputs, out_ndims); data_dependants_.pop_back(); + data_dependants_per_input_.pop_back(); readable_name_stream_ << "_" << op->name; return outputs; } @@ -593,8 +613,10 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> std::unordered_map, ObjectPtrHash, ObjectPtrEqual> param_data_; /*! \brief Map from parameter to list of shape placeholder */ std::unordered_map, ObjectPtrHash, ObjectPtrEqual> param_shapes_; + std::unordered_map param_to_index_; /*! \brief Stack of data dependencies for shape function */ std::vector data_dependants_; + std::vector> data_dependants_per_input_; /*! \brief Scalars used in the shape function */ Array scalars_; }; From 8364e6f105bc8c7c760f9e82b4166bf05174caac Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Wed, 30 Dec 2020 21:55:10 +0900 Subject: [PATCH 03/13] dyn strided slice working --- python/tvm/relay/op/dyn/_transform.py | 22 +++++++++++----------- src/relay/analysis/util.cc | 6 ++++-- src/relay/backend/compile_engine.cc | 4 +--- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/python/tvm/relay/op/dyn/_transform.py b/python/tvm/relay/op/dyn/_transform.py index b61d4f9655f6..c7d499573430 100644 --- a/python/tvm/relay/op/dyn/_transform.py +++ b/python/tvm/relay/op/dyn/_transform.py @@ -150,36 +150,36 @@ def one_hot_shape_func(attrs, inputs, _): @script -def _strided_slice_shape_func_input_data(data, begin, end, strides, slice_mode): - ndim = len(data.shape) +def _strided_slice_shape_func_input_data(data_shape, begin, end, strides, slice_mode): + ndim = len(data_shape) out = output_tensor((ndim,), "int64") for i in const_range(ndim): cbegin = int64(0) - cend = int64(data.shape[i]) + cend = int64(data_shape[i]) cstride = int64(1) if strides.shape[0] > i: cstride = int64(strides[i]) if begin.shape[0] > i: cbegin = int64(begin[i]) if cbegin < 0: - cbegin += int64(data.shape[i]) + cbegin += int64(data_shape[i]) if end.shape[0] <= i: - cend = int64(data.shape[i]) + cend = int64(data_shape[i]) elif slice_mode != 0: cstride = int64(1) if end[i] < 0: - cend = int64(data.shape[i]) + cend = int64(data_shape[i]) else: cend = cbegin + int64(end[i]) else: - if end[i] > data.shape[i]: - cend = int64(data.shape[i]) - elif end[i] < -data.shape[i]: + if end[i] > data_shape[i]: + cend = int64(data_shape[i]) + elif end[i] < -data_shape[i]: cend = int64(-1) else: cend = int64(end[i]) if cend < 0: - cend += int64(data.shape[i]) + cend += int64(data_shape[i]) assert cstride != 0, "Strides can't be zero." if cstride < 0: slice_range = cbegin - cend @@ -192,7 +192,7 @@ def _strided_slice_shape_func_input_data(data, begin, end, strides, slice_mode): return out -@_reg.register_shape_func("dyn.strided_slice", True) +@_reg.register_shape_func("dyn.strided_slice", [False, True, True, True]) def strided_slice_shape_func(attrs, inputs, _): """ Shape func for strided_slice diff --git a/src/relay/analysis/util.cc b/src/relay/analysis/util.cc index afb4d108437e..9513d93cb1da 100644 --- a/src/relay/analysis/util.cc +++ b/src/relay/analysis/util.cc @@ -490,8 +490,10 @@ bool IsDataDependant(const CallNode* call) { } } - Array reqs = tshape_data_dependant[op]; - return reqs[0]->value != 0; + for (auto req: tshape_data_dependant[op]) { + if (req->value != 0) return true; + } + return false; } } // namespace relay } // namespace tvm diff --git a/src/relay/backend/compile_engine.cc b/src/relay/backend/compile_engine.cc index dde81f25d10c..5293df90c01e 100644 --- a/src/relay/backend/compile_engine.cc +++ b/src/relay/backend/compile_engine.cc @@ -440,10 +440,8 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> ICHECK(data_dependants_.size()); auto dep_spec = data_dependants_per_input_.back(); auto index = param_to_index_[var]; - LOG(INFO) << dep_spec.size() << ", " << index; ICHECK(dep_spec.size() > index); - // bool data_dependant = dep_spec[index]; - bool data_dependant = data_dependants_.back(); + bool data_dependant = dep_spec[index]; if (data_dependant) { param_states_[var] |= kNeedInputData; return param_data_[var]; From 897d837d6a8cc4c16338f248786079796748a1c1 Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Wed, 30 Dec 2020 21:59:52 +0900 Subject: [PATCH 04/13] reshape also working --- python/tvm/relay/op/dyn/_transform.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/python/tvm/relay/op/dyn/_transform.py b/python/tvm/relay/op/dyn/_transform.py index c7d499573430..a36b56214bc4 100644 --- a/python/tvm/relay/op/dyn/_transform.py +++ b/python/tvm/relay/op/dyn/_transform.py @@ -32,11 +32,8 @@ @script -def _reshape_shape_func_input_data(data, newshape, ndim): +def _reshape_shape_func_input_data(data_shape, newshape, ndim): out = output_tensor((ndim,), "int64") - data_shape = allocate((len(data.shape),), "int64") - for x in const_range(len(data.shape)): - data_shape[x] = int64(data.shape[x]) src_idx = 0 dst_idx = 0 infer_idx = -1 @@ -87,7 +84,7 @@ def _reshape_shape_func_input_data(data, newshape, ndim): return out -@_reg.register_shape_func("dyn.reshape", True) +@_reg.register_shape_func("dyn.reshape", [False, True]) def dynamic_reshape_shape_func(attrs, inputs, out_ndims): return [_reshape_shape_func_input_data(*inputs, out_ndims[0])] From 72cd60c1f2005ac843a64090fafeabc6f7f2992c Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Wed, 30 Dec 2020 22:02:08 +0900 Subject: [PATCH 05/13] remove log --- python/tvm/relay/transform/memory_alloc.py | 3 --- src/relay/backend/compile_engine.cc | 3 +-- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/python/tvm/relay/transform/memory_alloc.py b/python/tvm/relay/transform/memory_alloc.py index 1419fa369772..721078c34bc9 100644 --- a/python/tvm/relay/transform/memory_alloc.py +++ b/python/tvm/relay/transform/memory_alloc.py @@ -205,10 +205,7 @@ def emit_shape_func(self, scope, func, new_args): input_pos = 0 cpu_ctx = nd.cpu(0) - print(func) - for i, (arg, state) in enumerate(zip(new_args, input_states)): - print(i, state) state = int(state) # Pass Shapes if state == 2: diff --git a/src/relay/backend/compile_engine.cc b/src/relay/backend/compile_engine.cc index 5293df90c01e..ca191af2dc52 100644 --- a/src/relay/backend/compile_engine.cc +++ b/src/relay/backend/compile_engine.cc @@ -527,13 +527,12 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> Array dep_spec = tshape_data_dependant[op]; if (dep_spec.size() == 1 && call_node->args.size() > 1) { for (size_t i = 1; i < call_node->args.size(); ++i) { - dep_spec.push_back(dep_spec[0]); + dep_spec.push_back(dep_spec[0]); } } else { ICHECK_EQ(dep_spec.size(), call_node->args.size()); } data_dependants_per_input_.push_back(dep_spec); - LOG(INFO) << op->name << ", " << dep_spec; // Visit all inputs Array inputs; From 7ca8002da6cd23bc5d654fed8b3b019cc9bbce90 Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Thu, 31 Dec 2020 06:16:53 +0900 Subject: [PATCH 06/13] works on maskrcnn --- src/relay/backend/compile_engine.cc | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/src/relay/backend/compile_engine.cc b/src/relay/backend/compile_engine.cc index ca191af2dc52..0bf86b71baa9 100644 --- a/src/relay/backend/compile_engine.cc +++ b/src/relay/backend/compile_engine.cc @@ -336,10 +336,8 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> MakeShapeFunc() {} std::pair Create(const Function& prim_func) { - for (size_t i = 0; i < prim_func->params.size(); ++i) { - auto param = prim_func->params[i]; + for (auto param: prim_func->params) { param_states_[param] = kNoNeed; - param_to_index_[param] = i; Array data_inputs; Array shape_inputs; @@ -438,10 +436,7 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> return {}; } else { ICHECK(data_dependants_.size()); - auto dep_spec = data_dependants_per_input_.back(); - auto index = param_to_index_[var]; - ICHECK(dep_spec.size() > index); - bool data_dependant = dep_spec[index]; + auto data_dependant = data_dependants_per_input_.back(); if (data_dependant) { param_states_[var] |= kNeedInputData; return param_data_[var]; @@ -530,20 +525,22 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> dep_spec.push_back(dep_spec[0]); } } else { - ICHECK_EQ(dep_spec.size(), call_node->args.size()); + // ICHECK_EQ(dep_spec.size(), call_node->args.size()); } - data_dependants_per_input_.push_back(dep_spec); // Visit all inputs Array inputs; int count_tuple = 0; - for (Expr arg : call_node->args) { + for (size_t i = 0; i < call_node->args.size(); ++i) { + Expr arg = call_node->args[i]; if (arg->checked_type().as()) { ++count_tuple; } + data_dependants_per_input_.push_back(dep_spec[i]->value != 0); for (te::Tensor tensor : VisitExpr(arg)) { inputs.push_back(tensor); } + data_dependants_per_input_.pop_back(); } if (count_tuple) { ICHECK_EQ(call_node->args.size(), 1U) << "Only allow function with a single tuple input"; @@ -610,10 +607,9 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> std::unordered_map, ObjectPtrHash, ObjectPtrEqual> param_data_; /*! \brief Map from parameter to list of shape placeholder */ std::unordered_map, ObjectPtrHash, ObjectPtrEqual> param_shapes_; - std::unordered_map param_to_index_; /*! \brief Stack of data dependencies for shape function */ std::vector data_dependants_; - std::vector> data_dependants_per_input_; + std::vector data_dependants_per_input_; /*! \brief Scalars used in the shape function */ Array scalars_; }; From 48a7c7bd8a3e5a43f9463e7a9c83701f2e5e2d01 Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Tue, 5 Jan 2021 13:46:14 +0900 Subject: [PATCH 07/13] lint fix --- python/tvm/relay/transform/memory_alloc.py | 1 - src/relay/analysis/util.cc | 2 +- src/relay/backend/compile_engine.cc | 7 +++---- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/python/tvm/relay/transform/memory_alloc.py b/python/tvm/relay/transform/memory_alloc.py index 721078c34bc9..66528c861788 100644 --- a/python/tvm/relay/transform/memory_alloc.py +++ b/python/tvm/relay/transform/memory_alloc.py @@ -204,7 +204,6 @@ def emit_shape_func(self, scope, func, new_args): is_inputs = [] input_pos = 0 cpu_ctx = nd.cpu(0) - for i, (arg, state) in enumerate(zip(new_args, input_states)): state = int(state) # Pass Shapes diff --git a/src/relay/analysis/util.cc b/src/relay/analysis/util.cc index 9513d93cb1da..257fbd38b5fa 100644 --- a/src/relay/analysis/util.cc +++ b/src/relay/analysis/util.cc @@ -490,7 +490,7 @@ bool IsDataDependant(const CallNode* call) { } } - for (auto req: tshape_data_dependant[op]) { + for (auto req : tshape_data_dependant[op]) { if (req->value != 0) return true; } return false; diff --git a/src/relay/backend/compile_engine.cc b/src/relay/backend/compile_engine.cc index 0bf86b71baa9..a6fa81171fe2 100644 --- a/src/relay/backend/compile_engine.cc +++ b/src/relay/backend/compile_engine.cc @@ -336,7 +336,7 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> MakeShapeFunc() {} std::pair Create(const Function& prim_func) { - for (auto param: prim_func->params) { + for (auto param : prim_func->params) { param_states_[param] = kNoNeed; Array data_inputs; Array shape_inputs; @@ -524,8 +524,6 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> for (size_t i = 1; i < call_node->args.size(); ++i) { dep_spec.push_back(dep_spec[0]); } - } else { - // ICHECK_EQ(dep_spec.size(), call_node->args.size()); } // Visit all inputs @@ -607,8 +605,9 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> std::unordered_map, ObjectPtrHash, ObjectPtrEqual> param_data_; /*! \brief Map from parameter to list of shape placeholder */ std::unordered_map, ObjectPtrHash, ObjectPtrEqual> param_shapes_; - /*! \brief Stack of data dependencies for shape function */ + /*! \brief Stack of data dependencies for shape function, specified per op */ std::vector data_dependants_; + /*! \brief Stack of data dependencies for shape function, specified per each op input */ std::vector data_dependants_per_input_; /*! \brief Scalars used in the shape function */ Array scalars_; From 6d9011d781b49ceaddda6c1c3637e76d0fc251e0 Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Tue, 5 Jan 2021 14:56:51 +0900 Subject: [PATCH 08/13] fix cpp test --- python/tvm/relay/op/op.py | 2 +- tests/cpp/relay_build_module_test.cc | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/python/tvm/relay/op/op.py b/python/tvm/relay/op/op.py index 36409d375d74..58f8f47e7fa2 100644 --- a/python/tvm/relay/op/op.py +++ b/python/tvm/relay/op/op.py @@ -364,7 +364,7 @@ def register_shape_func(op_name, data_dependant, shape_func=None, level=10): op_name : str The name of the op. - data_dependant : bool + data_dependant : bool or list of bool Whether the shape function depends on input data. shape_func : function (attrs: Attrs, inputs: List[Tensor], out_ndims: List[IndexExpr]) diff --git a/tests/cpp/relay_build_module_test.cc b/tests/cpp/relay_build_module_test.cc index 17b3d3f22737..00782d6c15b9 100644 --- a/tests/cpp/relay_build_module_test.cc +++ b/tests/cpp/relay_build_module_test.cc @@ -105,7 +105,9 @@ TEST(Relay, BuildModule) { } auto fgeneric = GenericFunc::Get("test.strategy_generic").set_default(*fs); (*reg)("add", "FTVMStrategy", fgeneric, 10); - (*reg)("add", "TShapeDataDependant", {0}, 10); + Array dep; + dep.push_back(0); + (*reg)("add", "TShapeDataDependant", dep, 10); // build auto pfb = tvm::runtime::Registry::Get("relay.build_module._BuildModule"); tvm::runtime::Module build_mod = (*pfb)(); From 1a7e72116dcac658efabfa58d2f84ec059c36a12 Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Tue, 5 Jan 2021 19:10:29 +0900 Subject: [PATCH 09/13] remove stale pop back --- src/relay/backend/compile_engine.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/relay/backend/compile_engine.cc b/src/relay/backend/compile_engine.cc index a6fa81171fe2..a681655a07e4 100644 --- a/src/relay/backend/compile_engine.cc +++ b/src/relay/backend/compile_engine.cc @@ -561,7 +561,6 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> // Call shape function auto outputs = fshape_func[op](call_node->attrs, inputs, out_ndims); data_dependants_.pop_back(); - data_dependants_per_input_.pop_back(); readable_name_stream_ << "_" << op->name; return outputs; } From 3a15c133c402e6b47c0fa2092098c6466bcb21ea Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Fri, 15 Jan 2021 04:52:01 +0900 Subject: [PATCH 10/13] add more doc --- python/tvm/relay/op/op.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/python/tvm/relay/op/op.py b/python/tvm/relay/op/op.py index 58f8f47e7fa2..d6f3fe733a0a 100644 --- a/python/tvm/relay/op/op.py +++ b/python/tvm/relay/op/op.py @@ -365,7 +365,9 @@ def register_shape_func(op_name, data_dependant, shape_func=None, level=10): The name of the op. data_dependant : bool or list of bool - Whether the shape function depends on input data. + Whether the shape function depends on input data. If this is a list of bool, + the length of the list must be the same as the number of arguments of this op. + The list specifies per-input data dependence of the op. shape_func : function (attrs: Attrs, inputs: List[Tensor], out_ndims: List[IndexExpr]) -> shape_tensors: List From eafddf5ed8644b258bde3a1958301b00b00a37e1 Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Fri, 15 Jan 2021 10:37:35 +0900 Subject: [PATCH 11/13] dependant -> dependent --- include/tvm/relay/op_attr_types.h | 4 +-- python/tvm/relay/op/op.py | 10 ++++---- src/relay/analysis/util.cc | 10 ++++---- src/relay/backend/compile_engine.cc | 38 ++++++++++++++--------------- src/relay/transforms/pass_utils.h | 6 ++--- 5 files changed, 34 insertions(+), 34 deletions(-) diff --git a/include/tvm/relay/op_attr_types.h b/include/tvm/relay/op_attr_types.h index 88e37fa0057c..f916dbeb713f 100644 --- a/include/tvm/relay/op_attr_types.h +++ b/include/tvm/relay/op_attr_types.h @@ -83,9 +83,9 @@ using TOpIsStateful = bool; using TNonComputational = bool; /*! - * \brief Mark the operator whether output shape is data dependant. + * \brief Mark the operator whether output shape is data dependent. */ -using TShapeDataDependant = Array; +using TShapeDataDependent = Array; /*! * \brief Computation description interface. diff --git a/python/tvm/relay/op/op.py b/python/tvm/relay/op/op.py index d6f3fe733a0a..5882027fb1d8 100644 --- a/python/tvm/relay/op/op.py +++ b/python/tvm/relay/op/op.py @@ -356,7 +356,7 @@ def register_gradient(op_name, fgradient=None, level=10): return tvm.ir.register_op_attr(op_name, "FPrimalGradient", fgradient, level) -def register_shape_func(op_name, data_dependant, shape_func=None, level=10): +def register_shape_func(op_name, data_dependent, shape_func=None, level=10): """Register operator shape function for an op. Parameters @@ -364,7 +364,7 @@ def register_shape_func(op_name, data_dependant, shape_func=None, level=10): op_name : str The name of the op. - data_dependant : bool or list of bool + data_dependent : bool or list of bool Whether the shape function depends on input data. If this is a list of bool, the length of the list must be the same as the number of arguments of this op. The list specifies per-input data dependence of the op. @@ -376,9 +376,9 @@ def register_shape_func(op_name, data_dependant, shape_func=None, level=10): level : int The priority level """ - if not isinstance(data_dependant, list): - data_dependant = [data_dependant] - get(op_name).set_attr("TShapeDataDependant", data_dependant, level) + if not isinstance(data_dependent, list): + data_dependent = [data_dependent] + get(op_name).set_attr("TShapeDataDependent", data_dependent, level) return tvm.ir.register_op_attr(op_name, "FShapeFunc", shape_func, level) diff --git a/src/relay/analysis/util.cc b/src/relay/analysis/util.cc index 257fbd38b5fa..abb9e6b034c2 100644 --- a/src/relay/analysis/util.cc +++ b/src/relay/analysis/util.cc @@ -473,24 +473,24 @@ bool IsDynamic(const Type& ty) { TVM_REGISTER_GLOBAL("relay.ir.IsDynamic").set_body_typed(IsDynamic); -bool IsDataDependant(const CallNode* call) { - static auto tshape_data_dependant = Op::GetAttrMap("TShapeDataDependant"); +bool IsDataDependent(const CallNode* call) { + static auto tshape_data_dependent = Op::GetAttrMap("TShapeDataDependent"); Op op = Downcast(call->op); - if (!tshape_data_dependant.count(op)) { + if (!tshape_data_dependent.count(op)) { return false; } if (op->name == "strided_slice") { if (const auto* attrs = call->attrs.as()) { if (attrs->begin && attrs->end && attrs->strides) { - // not data dependant if begin, end and strides exist + // not data dependent if begin, end and strides exist return false; } } } - for (auto req : tshape_data_dependant[op]) { + for (auto req : tshape_data_dependent[op]) { if (req->value != 0) return true; } return false; diff --git a/src/relay/backend/compile_engine.cc b/src/relay/backend/compile_engine.cc index a681655a07e4..b4b13645e2b5 100644 --- a/src/relay/backend/compile_engine.cc +++ b/src/relay/backend/compile_engine.cc @@ -435,9 +435,9 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> LOG(FATAL) << "Free variable " << var->name_hint(); return {}; } else { - ICHECK(data_dependants_.size()); - auto data_dependant = data_dependants_per_input_.back(); - if (data_dependant) { + ICHECK(data_dependents_.size()); + auto data_dependent = data_dependents_per_input_.back(); + if (data_dependent) { param_states_[var] |= kNeedInputData; return param_data_[var]; } else { @@ -449,12 +449,12 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> Array VisitExpr_(const ConstantNode* op) final { using tir::make_const; - ICHECK(data_dependants_.size()); - bool data_dependant = data_dependants_.back(); + ICHECK(data_dependents_.size()); + bool data_dependent = data_dependents_.back(); if (!op->is_scalar()) { // This is a constant weight, extract the shape of the weight tensor. // This can not be data dependent. - CHECK(!data_dependant); + CHECK(!data_dependent); auto ttype = op->checked_type().as(); int ndim = static_cast(ttype->shape.size()); Array out_shape{ndim}; @@ -472,7 +472,7 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> scalars_.push_back(value); return {value}; } - if (data_dependant) { + if (data_dependent) { void* data = op->data->data; DataType dtype = DataType(op->data->dtype); auto value = tvm::te::compute( @@ -507,19 +507,19 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> Array VisitExpr_(const CallNode* call_node) final { static auto fshape_func = Op::GetAttrMap("FShapeFunc"); - static auto tshape_data_dependant = Op::GetAttrMap("TShapeDataDependant"); + static auto tshape_data_dependent = Op::GetAttrMap("TShapeDataDependent"); ICHECK(call_node->op.as()) << "Primitive function only allows call into primitive ops"; Op op = Downcast(call_node->op); - ICHECK(data_dependants_.empty() || !data_dependants_.back()) + ICHECK(data_dependents_.empty() || !data_dependents_.back()) << "Error in op fusion: output of the shape func is fed to a " - << "data-dependant shape func"; + << "data-dependent shape func"; ICHECK_GT(fshape_func.count(op), 0) << "Internal error, cannot find ShapeFunc for " << op->name; - ICHECK_GT(tshape_data_dependant.count(op), 0) - << "Internal error, cannot find TShapeDataDependant for " << op->name; + ICHECK_GT(tshape_data_dependent.count(op), 0) + << "Internal error, cannot find TShapeDataDependent for " << op->name; - data_dependants_.push_back(IsDataDependant(call_node)); + data_dependents_.push_back(IsDataDependent(call_node)); - Array dep_spec = tshape_data_dependant[op]; + Array dep_spec = tshape_data_dependent[op]; if (dep_spec.size() == 1 && call_node->args.size() > 1) { for (size_t i = 1; i < call_node->args.size(); ++i) { dep_spec.push_back(dep_spec[0]); @@ -534,11 +534,11 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> if (arg->checked_type().as()) { ++count_tuple; } - data_dependants_per_input_.push_back(dep_spec[i]->value != 0); + data_dependents_per_input_.push_back(dep_spec[i]->value != 0); for (te::Tensor tensor : VisitExpr(arg)) { inputs.push_back(tensor); } - data_dependants_per_input_.pop_back(); + data_dependents_per_input_.pop_back(); } if (count_tuple) { ICHECK_EQ(call_node->args.size(), 1U) << "Only allow function with a single tuple input"; @@ -560,7 +560,7 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> } // Call shape function auto outputs = fshape_func[op](call_node->attrs, inputs, out_ndims); - data_dependants_.pop_back(); + data_dependents_.pop_back(); readable_name_stream_ << "_" << op->name; return outputs; } @@ -605,9 +605,9 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> /*! \brief Map from parameter to list of shape placeholder */ std::unordered_map, ObjectPtrHash, ObjectPtrEqual> param_shapes_; /*! \brief Stack of data dependencies for shape function, specified per op */ - std::vector data_dependants_; + std::vector data_dependents_; /*! \brief Stack of data dependencies for shape function, specified per each op input */ - std::vector data_dependants_per_input_; + std::vector data_dependents_per_input_; /*! \brief Scalars used in the shape function */ Array scalars_; }; diff --git a/src/relay/transforms/pass_utils.h b/src/relay/transforms/pass_utils.h index a2f22cbbf106..bb2f268a23d7 100644 --- a/src/relay/transforms/pass_utils.h +++ b/src/relay/transforms/pass_utils.h @@ -90,11 +90,11 @@ Expr TypeSubst(const Expr& expr, const tvm::Map& subst_map); bool IsDynamic(const Type& ty); /*! - * \brief Check if call is data dependant. + * \brief Check if call is data dependent. * \param call The call to be checked. - * \return Whether the call is data dependant. + * \return Whether the call is data dependent. */ -bool IsDataDependant(const CallNode* call); +bool IsDataDependent(const CallNode* call); /*! * \brief Make arbitrary transformation preserve the out most function. From 94ced0d6ae1fac37b6b914942b76e6cab5759b61 Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Fri, 15 Jan 2021 10:39:11 +0900 Subject: [PATCH 12/13] remove redundant check --- src/relay/backend/compile_engine.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/relay/backend/compile_engine.cc b/src/relay/backend/compile_engine.cc index b4b13645e2b5..eeb9bce77717 100644 --- a/src/relay/backend/compile_engine.cc +++ b/src/relay/backend/compile_engine.cc @@ -520,7 +520,7 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> data_dependents_.push_back(IsDataDependent(call_node)); Array dep_spec = tshape_data_dependent[op]; - if (dep_spec.size() == 1 && call_node->args.size() > 1) { + if (dep_spec.size() == 1) { for (size_t i = 1; i < call_node->args.size(); ++i) { dep_spec.push_back(dep_spec[0]); } From 6c1b318537ec81c0dabfde295d767e26e0ca0e6e Mon Sep 17 00:00:00 2001 From: Masahiro Masuda Date: Fri, 15 Jan 2021 10:43:07 +0900 Subject: [PATCH 13/13] remove data_dependent_ --- src/relay/backend/compile_engine.cc | 15 ++++++--------- src/relay/transforms/fuse_ops.cc | 2 +- tests/cpp/relay_build_module_test.cc | 2 +- 3 files changed, 8 insertions(+), 11 deletions(-) diff --git a/src/relay/backend/compile_engine.cc b/src/relay/backend/compile_engine.cc index eeb9bce77717..a66ae0a7e2c0 100644 --- a/src/relay/backend/compile_engine.cc +++ b/src/relay/backend/compile_engine.cc @@ -435,7 +435,7 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> LOG(FATAL) << "Free variable " << var->name_hint(); return {}; } else { - ICHECK(data_dependents_.size()); + ICHECK(data_dependents_per_input_.size()); auto data_dependent = data_dependents_per_input_.back(); if (data_dependent) { param_states_[var] |= kNeedInputData; @@ -449,8 +449,8 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> Array VisitExpr_(const ConstantNode* op) final { using tir::make_const; - ICHECK(data_dependents_.size()); - bool data_dependent = data_dependents_.back(); + ICHECK(data_dependents_per_input_.size()); + bool data_dependent = data_dependents_per_input_.back(); if (!op->is_scalar()) { // This is a constant weight, extract the shape of the weight tensor. // This can not be data dependent. @@ -510,17 +510,17 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> static auto tshape_data_dependent = Op::GetAttrMap("TShapeDataDependent"); ICHECK(call_node->op.as()) << "Primitive function only allows call into primitive ops"; Op op = Downcast(call_node->op); - ICHECK(data_dependents_.empty() || !data_dependents_.back()) + ICHECK(data_dependents_per_input_.empty() || !data_dependents_per_input_.back()) << "Error in op fusion: output of the shape func is fed to a " << "data-dependent shape func"; ICHECK_GT(fshape_func.count(op), 0) << "Internal error, cannot find ShapeFunc for " << op->name; ICHECK_GT(tshape_data_dependent.count(op), 0) << "Internal error, cannot find TShapeDataDependent for " << op->name; - data_dependents_.push_back(IsDataDependent(call_node)); - Array dep_spec = tshape_data_dependent[op]; if (dep_spec.size() == 1) { + // This is for cases when data dependence is specified per op + // Replicate 0 or 1 flag to all arguments for (size_t i = 1; i < call_node->args.size(); ++i) { dep_spec.push_back(dep_spec[0]); } @@ -560,7 +560,6 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> } // Call shape function auto outputs = fshape_func[op](call_node->attrs, inputs, out_ndims); - data_dependents_.pop_back(); readable_name_stream_ << "_" << op->name; return outputs; } @@ -604,8 +603,6 @@ class MakeShapeFunc : public backend::MemoizedExprTranslator> std::unordered_map, ObjectPtrHash, ObjectPtrEqual> param_data_; /*! \brief Map from parameter to list of shape placeholder */ std::unordered_map, ObjectPtrHash, ObjectPtrEqual> param_shapes_; - /*! \brief Stack of data dependencies for shape function, specified per op */ - std::vector data_dependents_; /*! \brief Stack of data dependencies for shape function, specified per each op input */ std::vector data_dependents_per_input_; /*! \brief Scalars used in the shape function */ diff --git a/src/relay/transforms/fuse_ops.cc b/src/relay/transforms/fuse_ops.cc index 29f3bfa0a17e..1b28980a0a2f 100644 --- a/src/relay/transforms/fuse_ops.cc +++ b/src/relay/transforms/fuse_ops.cc @@ -241,7 +241,7 @@ class IndexedForwardGraph::Creator : private ExprVisitor { OpPatternKind op_pattern = kOpaque; if (const OpNode* opnode = call->op.as()) { auto op = GetRef(opnode); - if (IsDynamic(call->checked_type()) && IsDataDependant(call)) { + if (IsDynamic(call->checked_type()) && IsDataDependent(call)) { // output of a shape func can't be fed to a data-dependent shape func op_pattern = kOpaque; } else { diff --git a/tests/cpp/relay_build_module_test.cc b/tests/cpp/relay_build_module_test.cc index 00782d6c15b9..a15cdcd3926b 100644 --- a/tests/cpp/relay_build_module_test.cc +++ b/tests/cpp/relay_build_module_test.cc @@ -107,7 +107,7 @@ TEST(Relay, BuildModule) { (*reg)("add", "FTVMStrategy", fgeneric, 10); Array dep; dep.push_back(0); - (*reg)("add", "TShapeDataDependant", dep, 10); + (*reg)("add", "TShapeDataDependent", dep, 10); // build auto pfb = tvm::runtime::Registry::Get("relay.build_module._BuildModule"); tvm::runtime::Module build_mod = (*pfb)();