From 80a88dd51f880a823f2826f4202c65b1f31960f5 Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Thu, 15 Oct 2020 13:32:47 -0700 Subject: [PATCH 01/18] update Update type_relations.cc Update transform.cc Update transform.cc Update transform.cc Update transform.cc Update transform.cc Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py Update mxnet.py update Update mxnet.py debug Update generic.py Update topi_integration.py fix bug update Update test_forward.py Update test_forward.py fix test case Update mxnet.py update Update mxnet.py Update mxnet.py Update test_forward.py Update mxnet.py Update mxnet.py Update test_forward.py Update mxnet.py Update mxnet.py Update mxnet.py debug Update mxnet.py Update mxnet.py Update test_forward.py Update mxnet.py --- python/tvm/relay/frontend/mxnet.py | 141 ++++++++++++++++---- python/tvm/topi/x86/batch_matmul.py | 12 +- tests/python/frontend/mxnet/test_forward.py | 22 ++- 3 files changed, 147 insertions(+), 28 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 25028f47b33c..fe217df723aa 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -58,6 +58,11 @@ _activation_map = {"sigmoid": _op.sigmoid, "tanh": _op.tanh, "relu": _op.nn.relu} +def get_tuple_shape(shape_expr): + """Get the tuple shape from a shape expression""" + return tuple([ele.value for ele in shape_expr]) + + def _mx_fully_connected(inputs, attrs): import mxnet as mx # pylint: disable=import-outside-toplevel @@ -627,6 +632,21 @@ def _mx_expand_dims(inputs, attrs): return _op.expand_dims(inputs[0], axis=axis) +def _mx_where(inputs, attrs): + cond, lhs, rhs = inputs + cond_shape = get_tuple_shape(_infer_type(cond).checked_type.shape) + lhs_shape = get_tuple_shape(_infer_type(lhs).checked_type.shape) + rhs_shape = get_tuple_shape(_infer_type(rhs).checked_type.shape) + out_shape = np.broadcast(np.empty(cond_shape), np.empty(lhs_shape), np.empty(rhs_shape)).shape + if out_shape != cond_shape: + cond = _op.broadcast_to(cond, out_shape) + if out_shape != lhs_shape: + lhs = _op.broadcast_to(lhs, out_shape) + if out_shape != rhs_shape: + rhs = _op.broadcast_to(rhs, out_shape) + return _op.where(cond, lhs, rhs) + + def _mx_pad(inputs, attrs): pad_mode = attrs.get_str("mode", None) if pad_mode is None: @@ -790,6 +810,16 @@ def _mx_dot(inputs, attrs): def _mx_batch_dot(inputs, attrs): assert len(inputs) == 2 a, b = inputs + a_shape = _infer_type(a).checked_type.shape + batch_shapes = None + if len(a_shape) > 3: + batch_shapes = a_shape[:-2] + a = _op.reverse_reshape(a, newshape=(-1, 0, 0)) + b_shape = _infer_type(b).checked_type.shape + if len(b_shape) > 3: + if batch_shapes is None: + batch_shapes = b_shape[:-2] + b = _op.reverse_reshape(b, newshape=(-1, 0, 0)) transpose_a = attrs.get_bool("transpose_a", False) transpose_b = attrs.get_bool("transpose_b", False) if transpose_a is True: @@ -797,7 +827,10 @@ def _mx_batch_dot(inputs, attrs): raise tvm.error.OpAttributeInvalid(msg.format(transpose_a)) if transpose_b is False: b = _op.transpose(b, axes=[0, 2, 1]) - return _op.nn.batch_matmul(a, b) + out = _op.nn.batch_matmul(a, b) + if batch_shapes is not None: + out = _op.reverse_reshape(out, newshape=tuple(batch_shapes) + (0, 0)) + return out def _mx_arange(inputs, attrs): @@ -2312,23 +2345,76 @@ def _mx_npx_reshape(inputs, attrs): reverse = attrs.get_bool("reverse", False) shape_list = list(shape) new_shape_list = [] - for num in shape_list: - if num > 0 or num == -1: - new_shape_list.append(num) - elif num == -2: - new_shape_list.append(0) - elif num == -4: - new_shape_list.append(-2) - elif num == -5: - new_shape_list.append(-3) - elif num == -6: - new_shape_list.append(-4) - else: - raise tvm.error.OpAttributeInvalid("Shape dimension %d is not supported" % num) - shape = tuple(new_shape_list) - if reverse: - return _op.reverse_reshape(inputs[0], newshape=shape) - return _op.reshape(inputs[0], newshape=shape) + if -3 not in shape_list: + for num in shape_list: + if num > 0 or num == -1: + new_shape_list.append(num) + elif num == -2: + new_shape_list.append(0) + elif num == -4: + new_shape_list.append(-2) + elif num == -5: + new_shape_list.append(-3) + elif num == -6: + new_shape_list.append(-4) + else: + raise tvm.error.OpAttributeInvalid("Shape dimension %d is not supported" % num) + shape = tuple(new_shape_list) + if reverse: + return _op.reverse_reshape(inputs[0], newshape=shape) + return _op.reshape(inputs[0], newshape=shape) + else: + old_shape = get_tuple_shape(_infer_type(inputs[0]).checked_type.shape) + new_shape = [] + if reverse: + old_shape = old_shape[::-1] + shape_list = shape_list[::-1] + ptr = 0 + unknown_axis = None + src_ptr = 0 + while src_ptr < len(shape_list): + ele = shape_list[src_ptr] + src_ptr += 1 + if ele > 0: + new_shape.append(ele) + ptr += 1 + elif ele == -1: + new_shape.append(-1) + assert unknown_axis is None, "Can only have one unknown axis." + unknown_axis = len(new_shape) + ptr += 1 + elif ele == -2: + new_shape.append(old_shape[ptr]) + ptr += 1 + elif ele == -3: + assert old_shape[ptr] == 1 + ptr += 1 + elif ele == -4: + new_shape += old_shape[ptr:] + break + elif ele == -5: + new_shape.append(old_shape[ptr] * old_shape[ptr + 1]) + ptr += 2 + elif ele == -6: + # Split axis + lhs = shape_list[src_ptr] + rhs = shape_list[src_ptr + 1] + src_ptr += 2 + assert not (lhs == -1 and rhs == -1) + if lhs == -1: + assert old_shape[ptr] % rhs == 0 + lhs = old_shape[ptr] // rhs + if rhs == -1: + assert old_shape[ptr] % lhs == 0 + rhs = old_shape[ptr] // lhs + new_shape.append(lhs) + new_shape.append(rhs) + ptr += 1 + else: + raise tvm.error.OpAttributeInvalid("Shape dimension %d is not supported" % ele) + if reverse: + new_shape = new_shape[::-1] + return _op.reshape(inputs[0], newshape=new_shape) def _mx_split_v2(inputs, attrs): @@ -2346,12 +2432,21 @@ def _mx_split_v2(inputs, attrs): def _mx_npi_where_rscalar(inputs, attrs): + cond, dat = inputs scalar = attrs.get_float("scalar") - dtype = _infer_type(inputs[1]).checked_type.dtype + cond_shape = get_tuple_shape(_infer_type(cond).checked_type.shape) + dat_shape = get_tuple_shape(_infer_type(dat).checked_type.shape) + dtype = _infer_type(dat).checked_type.dtype + # Check for broadcasting + out_shape = np.broadcast(np.empty(cond_shape), np.empty(dat_shape)).shape + if out_shape != cond_shape: + cond = _op.broadcast_to(cond, out_shape) + if out_shape != dat_shape: + dat = _op.broadcast_to(dat, out_shape) scalar = _expr.const(scalar, dtype=dtype) - ones = _op.ones_like(inputs[1]) + ones = _op.ones_like(dat) scalar = _op.multiply(ones, scalar) - return _op.where(inputs[0], inputs[1], scalar) + return _op.where(cond, dat, scalar) # Note: due to attribute conversion constraint @@ -2372,7 +2467,6 @@ def _mx_npi_where_rscalar(inputs, attrs): "reshape_like", "zeros_like", "ones_like", - "where", "cos", "cosh", "sin", @@ -2384,6 +2478,7 @@ def _mx_npi_where_rscalar(inputs, attrs): _convert_map = { "_copy": _rename(_op.copy), "relu": _rename(_op.nn.relu), + "where": _mx_where, "broadcast_add": _rename(_op.add), "broadcast_plus": _rename(_op.add), "broadcast_sub": _rename(_op.subtract), @@ -2598,6 +2693,7 @@ def _mx_npi_where_rscalar(inputs, attrs): "_npi_concatenate": _mx_npi_concatenate, "_npx_reshape": _mx_npx_reshape, "_np_copy": _rename(_op.copy), + "_npi_copy": _rename(_op.copy), "_npi_power": _rename(_op.power), "_npi_power_scalar": _binop_scalar(_op.power), "_npi_multiply": _rename(_op.multiply), @@ -2717,7 +2813,6 @@ def _from_mxnet_impl(symbol, shape_dict, dtype_info, params=None, mod=None): else: raise RuntimeError("unexpected type %s" % type(res)) node_map[nid] = res - outputs = [node_map[e[0]][e[1]] for e in jgraph["heads"]] outputs = outputs[0] if len(outputs) == 1 else _expr.Tuple(outputs) func = _function.Function(analysis.free_vars(outputs), outputs) diff --git a/python/tvm/topi/x86/batch_matmul.py b/python/tvm/topi/x86/batch_matmul.py index e3f08160509e..4e5f6efc815a 100644 --- a/python/tvm/topi/x86/batch_matmul.py +++ b/python/tvm/topi/x86/batch_matmul.py @@ -37,6 +37,9 @@ def batch_matmul(cfg, x, y, out_shape=None): 3-D with shape [batch, M, K] y : tvm.te.Tensor 3-D with shape [batch, N, K] + out_shape : tuple or None + Shape of the outputs + Returns ------- output : tvm.te.Tensor @@ -135,7 +138,7 @@ def _default_batch_matmul_config(cfg, M, N, K): @autotvm.register_topi_compute("batch_matmul_cblas.x86") -def batch_matmul_cblas(cfg, x, y): +def batch_matmul_cblas(cfg, x, y, out_shape=None): """Computes batch matrix multiplication of `x` and `y` when `x` and `y` are data in batch. @@ -147,6 +150,9 @@ def batch_matmul_cblas(cfg, x, y): 3-D with shape [batch, M, K] y : tvm.te.Tensor 3-D with shape [batch, N, K] + out_shape : tuple or None + Shape of the output + Returns ------- output : tvm.te.Tensor @@ -157,6 +163,10 @@ def batch_matmul_cblas(cfg, x, y): YB, N, YK = get_const_tuple(y.shape) assert XB == YB, "batch dimension doesn't match" assert XK == YK, "shapes of x and y is inconsistant" + if out_shape is not None: + assert out_shape[0] == XB, "got invalid output shape" + assert out_shape[1] == M, "got invalid output shape" + assert out_shape[2] == N, "got invalid output shape" cfg.add_flop(XB * M * N * XK * 2) return cblas.batch_matmul(x, y, False, True) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 3f288763ac83..18983dce9152 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -2011,8 +2011,12 @@ def test_forward_np_copy(data_shape, dtype, target, ctx, kind): ((2, 3, 8), (-2, -2, 2, -1), False), ((8, 3, 3, 3, 4, 4), (-6, 2, -1, -4), False), ((8, 3, 3, 3, 4, 4), (-5, -4), False), + ((1, 8, 3, 3, 3, 4, 4), (-3, -5, -4), False), + ((8, 1, 3, 4), (-2, -3, -1), False), ((8, 3, 3, 3, 3, 8), (-4, -5), True), ((8, 3, 2, 4, 8), (-4, -1, 2, -6), True), + ((3, 2, 4, 8, 1, 1), (-4, -1, 2, -6, -5, -3), True), + ((2, 4, 1, 8), (-4, -3, -1, 2, -6), True), ], ) def test_forward_npx_reshape(data_shape, out_shape, dtype, target, reverse, ctx, kind): @@ -2099,16 +2103,26 @@ def test_forward_npi_tanh(data_shape, dtype, target, ctx, kind): @pytest.mark.skipif(not hasattr(mx.np, "where"), reason="mx.np.where hasn't been publish yet") -@pytest.mark.parametrize("data_shape", [(2, 2, 2), (2, 7, 2), (1, 8), (2, 2), (1, 3)]) +@pytest.mark.parametrize( + "data_shape,cond_shape", + [ + [(2, 2, 2), (2, 2, 2)], + [(2, 7, 2), (7, 2)], + [(2, 2), (1, 2)], + [(1, 3), (3, 3)] + ] +) @pytest.mark.parametrize("data_dtype", ["float64", "float32", "int64", "int32", "bool"]) @pytest.mark.parametrize("cond_dtype", ["float64", "float32", "int64", "int32", "bool"]) @pytest.mark.parametrize("scalar", [1.0, 2.0]) @tvm.testing.parametrize_targets @pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) -def test_forward_npi_where_rscalar(data_shape, cond_dtype, data_dtype, scalar, target, ctx, kind): +def test_forward_npi_where_rscalar( + data_shape, cond_shape, data_dtype, cond_dtype, scalar, target, ctx, kind +): if data_dtype == "bool": scalar = scalar == 0.0 - cond_np = np.random.uniform(size=data_shape).astype(cond_dtype) + cond_np = np.random.uniform(size=cond_shape).astype(cond_dtype) data_np = np.random.uniform(size=data_shape).astype(data_dtype) cond = mx.sym.var("condition") data = mx.sym.var("x") @@ -2118,7 +2132,7 @@ def test_forward_npi_where_rscalar(data_shape, cond_dtype, data_dtype, scalar, t dtypeDic["condition"] = cond_dtype dtypeDic["x"] = data_dtype mod, _ = relay.frontend.from_mxnet( - mx_sym, shape={"condition": data_shape, "x": data_shape}, dtype=dtypeDic + mx_sym, shape={"condition": cond_shape, "x": data_shape}, dtype=dtypeDic ) intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) op_res = intrp.evaluate()(cond_np, data_np) From 7ea0ee150cb49d0d4558242dba5da27940abc9c2 Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Fri, 16 Oct 2020 15:05:22 -0700 Subject: [PATCH 02/18] address comments --- python/tvm/relay/frontend/mxnet.py | 143 +++++++++----------- tests/python/frontend/mxnet/test_forward.py | 9 +- 2 files changed, 67 insertions(+), 85 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index fe217df723aa..0d932d7eef9e 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -58,11 +58,6 @@ _activation_map = {"sigmoid": _op.sigmoid, "tanh": _op.tanh, "relu": _op.nn.relu} -def get_tuple_shape(shape_expr): - """Get the tuple shape from a shape expression""" - return tuple([ele.value for ele in shape_expr]) - - def _mx_fully_connected(inputs, attrs): import mxnet as mx # pylint: disable=import-outside-toplevel @@ -634,9 +629,9 @@ def _mx_expand_dims(inputs, attrs): def _mx_where(inputs, attrs): cond, lhs, rhs = inputs - cond_shape = get_tuple_shape(_infer_type(cond).checked_type.shape) - lhs_shape = get_tuple_shape(_infer_type(lhs).checked_type.shape) - rhs_shape = get_tuple_shape(_infer_type(rhs).checked_type.shape) + cond_shape = get_const_tuple(_infer_type(cond).checked_type.shape) + lhs_shape = get_const_tuple(_infer_type(lhs).checked_type.shape) + rhs_shape = get_const_tuple(_infer_type(rhs).checked_type.shape) out_shape = np.broadcast(np.empty(cond_shape), np.empty(lhs_shape), np.empty(rhs_shape)).shape if out_shape != cond_shape: cond = _op.broadcast_to(cond, out_shape) @@ -2345,76 +2340,68 @@ def _mx_npx_reshape(inputs, attrs): reverse = attrs.get_bool("reverse", False) shape_list = list(shape) new_shape_list = [] - if -3 not in shape_list: - for num in shape_list: - if num > 0 or num == -1: - new_shape_list.append(num) - elif num == -2: - new_shape_list.append(0) - elif num == -4: - new_shape_list.append(-2) - elif num == -5: - new_shape_list.append(-3) - elif num == -6: - new_shape_list.append(-4) - else: - raise tvm.error.OpAttributeInvalid("Shape dimension %d is not supported" % num) - shape = tuple(new_shape_list) - if reverse: - return _op.reverse_reshape(inputs[0], newshape=shape) - return _op.reshape(inputs[0], newshape=shape) - else: - old_shape = get_tuple_shape(_infer_type(inputs[0]).checked_type.shape) - new_shape = [] - if reverse: - old_shape = old_shape[::-1] - shape_list = shape_list[::-1] - ptr = 0 - unknown_axis = None - src_ptr = 0 - while src_ptr < len(shape_list): - ele = shape_list[src_ptr] - src_ptr += 1 - if ele > 0: - new_shape.append(ele) - ptr += 1 - elif ele == -1: - new_shape.append(-1) - assert unknown_axis is None, "Can only have one unknown axis." - unknown_axis = len(new_shape) - ptr += 1 - elif ele == -2: - new_shape.append(old_shape[ptr]) - ptr += 1 - elif ele == -3: - assert old_shape[ptr] == 1 - ptr += 1 - elif ele == -4: - new_shape += old_shape[ptr:] - break - elif ele == -5: - new_shape.append(old_shape[ptr] * old_shape[ptr + 1]) - ptr += 2 - elif ele == -6: - # Split axis - lhs = shape_list[src_ptr] - rhs = shape_list[src_ptr + 1] - src_ptr += 2 - assert not (lhs == -1 and rhs == -1) - if lhs == -1: - assert old_shape[ptr] % rhs == 0 - lhs = old_shape[ptr] // rhs - if rhs == -1: - assert old_shape[ptr] % lhs == 0 - rhs = old_shape[ptr] // lhs - new_shape.append(lhs) - new_shape.append(rhs) - ptr += 1 - else: - raise tvm.error.OpAttributeInvalid("Shape dimension %d is not supported" % ele) - if reverse: - new_shape = new_shape[::-1] - return _op.reshape(inputs[0], newshape=new_shape) + old_shape = get_const_tuple(_infer_type(inputs[0]).checked_type.shape) + new_shape = [] + if reverse: + old_shape = old_shape[::-1] + shape_list = shape_list[::-1] + ptr = 0 + unknown_axis = None + src_ptr = 0 + while src_ptr < len(shape_list): + ele = shape_list[src_ptr] + src_ptr += 1 + if ele > 0: + new_shape.append(ele) + ptr += 1 + elif ele == -1: + new_shape.append(-1) + if unknown_axis is not None: + raise tvm.error.OpAttributeInvalid("Can only have one -1 in the input shape.") + unknown_axis = len(new_shape) + ptr += 1 + elif ele == -2: + new_shape.append(old_shape[ptr]) + ptr += 1 + elif ele == -3: + if old_shape[ptr] != 1: + raise tvm.error.OpAttributeInvalid("Dimension of the original shape " + "that corresponds to -3 must be 1. Received" + " {}".format(old_shape[ptr])) + ptr += 1 + elif ele == -4: + new_shape += old_shape[ptr:] + break + elif ele == -5: + new_shape.append(old_shape[ptr] * old_shape[ptr + 1]) + ptr += 2 + elif ele == -6: + # Split axis + lhs = shape_list[src_ptr] + rhs = shape_list[src_ptr + 1] + src_ptr += 2 + if lhs == -1 and rhs == -1: + raise tvm.error.OpAttributeInvalid("The lhs and rhs can not both be -1.") + if lhs == -1: + if old_shape[ptr] % rhs != 0: + raise tvm.error.OpAttributeInvalid("When splitting the axis, " + "the dimension of the split axis must " + "be divisible by the splitted values.") + lhs = old_shape[ptr] // rhs + if rhs == -1: + if old_shape[ptr] % lhs != 0: + raise tvm.error.OpAttributeInvalid("When splitting the axis, " + "the dimension of the split axis must " + "be divisible by the splitted values.") + rhs = old_shape[ptr] // lhs + new_shape.append(lhs) + new_shape.append(rhs) + ptr += 1 + else: + raise tvm.error.OpAttributeInvalid("Shape dimension %d is not supported" % ele) + if reverse: + new_shape = new_shape[::-1] + return _op.reshape(inputs[0], newshape=new_shape) def _mx_split_v2(inputs, attrs): diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 18983dce9152..a4f6721de6ca 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -2105,12 +2105,7 @@ def test_forward_npi_tanh(data_shape, dtype, target, ctx, kind): @pytest.mark.skipif(not hasattr(mx.np, "where"), reason="mx.np.where hasn't been publish yet") @pytest.mark.parametrize( "data_shape,cond_shape", - [ - [(2, 2, 2), (2, 2, 2)], - [(2, 7, 2), (7, 2)], - [(2, 2), (1, 2)], - [(1, 3), (3, 3)] - ] + [[(2, 2, 2), (2, 2, 2)], [(2, 7, 2), (7, 2)], [(2, 2), (1, 2)], [(1, 3), (3, 3)]], ) @pytest.mark.parametrize("data_dtype", ["float64", "float32", "int64", "int32", "bool"]) @pytest.mark.parametrize("cond_dtype", ["float64", "float32", "int64", "int32", "bool"]) @@ -2118,7 +2113,7 @@ def test_forward_npi_tanh(data_shape, dtype, target, ctx, kind): @tvm.testing.parametrize_targets @pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) def test_forward_npi_where_rscalar( - data_shape, cond_shape, data_dtype, cond_dtype, scalar, target, ctx, kind + data_shape, cond_shape, data_dtype, cond_dtype, scalar, target, ctx, kind ): if data_dtype == "bool": scalar = scalar == 0.0 From 92cc45412f01cb8d52bf27168f552fe2250bc617 Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Fri, 16 Oct 2020 15:18:42 -0700 Subject: [PATCH 03/18] Update mxnet.py --- python/tvm/relay/frontend/mxnet.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 0d932d7eef9e..d832ef674b25 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -2365,9 +2365,11 @@ def _mx_npx_reshape(inputs, attrs): ptr += 1 elif ele == -3: if old_shape[ptr] != 1: - raise tvm.error.OpAttributeInvalid("Dimension of the original shape " - "that corresponds to -3 must be 1. Received" - " {}".format(old_shape[ptr])) + raise tvm.error.OpAttributeInvalid( + "Dimension of the original shape " + "that corresponds to -3 must be 1. Received" + " {}".format(old_shape[ptr]) + ) ptr += 1 elif ele == -4: new_shape += old_shape[ptr:] @@ -2384,15 +2386,19 @@ def _mx_npx_reshape(inputs, attrs): raise tvm.error.OpAttributeInvalid("The lhs and rhs can not both be -1.") if lhs == -1: if old_shape[ptr] % rhs != 0: - raise tvm.error.OpAttributeInvalid("When splitting the axis, " - "the dimension of the split axis must " - "be divisible by the splitted values.") + raise tvm.error.OpAttributeInvalid( + "When splitting the axis, " + "the dimension of the split axis must " + "be divisible by the splitted values." + ) lhs = old_shape[ptr] // rhs if rhs == -1: if old_shape[ptr] % lhs != 0: - raise tvm.error.OpAttributeInvalid("When splitting the axis, " - "the dimension of the split axis must " - "be divisible by the splitted values.") + raise tvm.error.OpAttributeInvalid( + "When splitting the axis, " + "the dimension of the split axis must " + "be divisible by the splitted values." + ) rhs = old_shape[ptr] // lhs new_shape.append(lhs) new_shape.append(rhs) From ecfa6c593307b92c0ed8a5d61595504fdeb84221 Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Fri, 16 Oct 2020 16:49:20 -0700 Subject: [PATCH 04/18] Update mxnet.py --- python/tvm/relay/frontend/mxnet.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index d832ef674b25..62c404887b34 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -2427,8 +2427,8 @@ def _mx_split_v2(inputs, attrs): def _mx_npi_where_rscalar(inputs, attrs): cond, dat = inputs scalar = attrs.get_float("scalar") - cond_shape = get_tuple_shape(_infer_type(cond).checked_type.shape) - dat_shape = get_tuple_shape(_infer_type(dat).checked_type.shape) + cond_shape = get_const_tuple(_infer_type(cond).checked_type.shape) + dat_shape = get_const_tuple(_infer_type(dat).checked_type.shape) dtype = _infer_type(dat).checked_type.dtype # Check for broadcasting out_shape = np.broadcast(np.empty(cond_shape), np.empty(dat_shape)).shape From e886f67a52aff56631e944b2adf12de256a0871c Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Fri, 16 Oct 2020 16:55:25 -0700 Subject: [PATCH 05/18] fix --- python/tvm/relay/frontend/mxnet.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 62c404887b34..e4bb91bbc87d 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -627,7 +627,7 @@ def _mx_expand_dims(inputs, attrs): return _op.expand_dims(inputs[0], axis=axis) -def _mx_where(inputs, attrs): +def _mx_where(inputs, _): cond, lhs, rhs = inputs cond_shape = get_const_tuple(_infer_type(cond).checked_type.shape) lhs_shape = get_const_tuple(_infer_type(lhs).checked_type.shape) @@ -2339,7 +2339,6 @@ def _mx_npx_reshape(inputs, attrs): shape = attrs.get_int_tuple("newshape") reverse = attrs.get_bool("reverse", False) shape_list = list(shape) - new_shape_list = [] old_shape = get_const_tuple(_infer_type(inputs[0]).checked_type.shape) new_shape = [] if reverse: From b9aea389888300c0a86e76444190a75d28dd2e5f Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Fri, 16 Oct 2020 17:22:07 -0700 Subject: [PATCH 06/18] improve where test --- tests/python/frontend/mxnet/test_forward.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index a4f6721de6ca..07e0f37bd35b 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -312,20 +312,24 @@ def test_forward_slice(): @tvm.testing.uses_gpu -def test_forward_where(): +@pytest.mark.parametrize( + "cond_shape,x_shape,y_shape", + [[(2, 2, 2), (2, 2, 2), (2, 2, 2)], [(2, 7, 2), (7, 2), (7, 1)]], +) +def test_forward_where(cond_shape, x_shape, y_shape): cond = mx.sym.var("cond") x = mx.sym.var("x") y = mx.sym.var("y") dshape = (2, 2) dtype = "float32" mx_sym = mx.sym.where(cond, x, y) - np_cond = np.array([[0, 1], [-1, 0]]).astype(dtype) - np_x = np.random.uniform(size=dshape).astype(dtype) - np_y = np.random.uniform(size=dshape).astype(dtype) + np_cond = np.random.randint(0, 2, cond_shape, dtype=dtype) + np_x = np.random.uniform(size=x_shape).astype(dtype) + np_y = np.random.uniform(size=y_shape).astype(dtype) mx_cond = mx.nd.array(np_cond) mx_x = mx.nd.array(np_x) mx_y = mx.nd.array(np_y) - shapes = {"cond": dshape, "x": dshape, "y": dshape} + shapes = {"cond": cond_shape, "x": x_shape, "y": y_shape} mod = mx.mod.Module(mx_sym, label_names=None, data_names=["cond", "x", "y"]) mod.bind(data_shapes=shapes.items(), for_training=False) mod.init_params() From 1f8ee96fcb3fed4127b938f7258a9bffda1160ce Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Fri, 16 Oct 2020 17:24:26 -0700 Subject: [PATCH 07/18] Update test_forward.py --- tests/python/frontend/mxnet/test_forward.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 07e0f37bd35b..6161ced96fb7 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -320,7 +320,6 @@ def test_forward_where(cond_shape, x_shape, y_shape): cond = mx.sym.var("cond") x = mx.sym.var("x") y = mx.sym.var("y") - dshape = (2, 2) dtype = "float32" mx_sym = mx.sym.where(cond, x, y) np_cond = np.random.randint(0, 2, cond_shape, dtype=dtype) From d5b9f2e5e7fe45b2b768f9730033820aa23dffa2 Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Fri, 16 Oct 2020 17:27:21 -0700 Subject: [PATCH 08/18] Update test_forward.py --- tests/python/frontend/mxnet/test_forward.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 6161ced96fb7..5edd587141c3 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -322,7 +322,7 @@ def test_forward_where(cond_shape, x_shape, y_shape): y = mx.sym.var("y") dtype = "float32" mx_sym = mx.sym.where(cond, x, y) - np_cond = np.random.randint(0, 2, cond_shape, dtype=dtype) + np_cond = np.random.randint(0, 2, cond_shape).astype(dtype=dtype) np_x = np.random.uniform(size=x_shape).astype(dtype) np_y = np.random.uniform(size=y_shape).astype(dtype) mx_cond = mx.nd.array(np_cond) From dee6c21428998403752a937a094d90a1715a81e0 Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Fri, 16 Oct 2020 17:29:58 -0700 Subject: [PATCH 09/18] Update test_forward.py --- tests/python/frontend/mxnet/test_forward.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 5edd587141c3..8e7818bca4b6 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -329,13 +329,9 @@ def test_forward_where(cond_shape, x_shape, y_shape): mx_x = mx.nd.array(np_x) mx_y = mx.nd.array(np_y) shapes = {"cond": cond_shape, "x": x_shape, "y": y_shape} - mod = mx.mod.Module(mx_sym, label_names=None, data_names=["cond", "x", "y"]) - mod.bind(data_shapes=shapes.items(), for_training=False) - mod.init_params() - args, auxs = mod.get_params() mx_out = mx.nd.where(mx_cond, mx_x, mx_y).asnumpy() - mod, _ = relay.frontend.from_mxnet(mx_sym, shapes, args, auxs) + mod, _ = relay.frontend.from_mxnet(mx_sym, shapes, None, None) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) From 648d5b5377a94ae7db52a56dea5a865108bdb9f9 Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Fri, 16 Oct 2020 17:31:18 -0700 Subject: [PATCH 10/18] update --- python/tvm/relay/frontend/mxnet.py | 17 +---------------- tests/python/frontend/mxnet/test_forward.py | 21 +++++++++++---------- 2 files changed, 12 insertions(+), 26 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index e4bb91bbc87d..050103b58d83 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -627,21 +627,6 @@ def _mx_expand_dims(inputs, attrs): return _op.expand_dims(inputs[0], axis=axis) -def _mx_where(inputs, _): - cond, lhs, rhs = inputs - cond_shape = get_const_tuple(_infer_type(cond).checked_type.shape) - lhs_shape = get_const_tuple(_infer_type(lhs).checked_type.shape) - rhs_shape = get_const_tuple(_infer_type(rhs).checked_type.shape) - out_shape = np.broadcast(np.empty(cond_shape), np.empty(lhs_shape), np.empty(rhs_shape)).shape - if out_shape != cond_shape: - cond = _op.broadcast_to(cond, out_shape) - if out_shape != lhs_shape: - lhs = _op.broadcast_to(lhs, out_shape) - if out_shape != rhs_shape: - rhs = _op.broadcast_to(rhs, out_shape) - return _op.where(cond, lhs, rhs) - - def _mx_pad(inputs, attrs): pad_mode = attrs.get_str("mode", None) if pad_mode is None: @@ -2465,12 +2450,12 @@ def _mx_npi_where_rscalar(inputs, attrs): "sinh", "tan", "tanh", + "where" ] _convert_map = { "_copy": _rename(_op.copy), "relu": _rename(_op.nn.relu), - "where": _mx_where, "broadcast_add": _rename(_op.add), "broadcast_plus": _rename(_op.add), "broadcast_sub": _rename(_op.subtract), diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 8e7818bca4b6..a4f6721de6ca 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -312,26 +312,27 @@ def test_forward_slice(): @tvm.testing.uses_gpu -@pytest.mark.parametrize( - "cond_shape,x_shape,y_shape", - [[(2, 2, 2), (2, 2, 2), (2, 2, 2)], [(2, 7, 2), (7, 2), (7, 1)]], -) -def test_forward_where(cond_shape, x_shape, y_shape): +def test_forward_where(): cond = mx.sym.var("cond") x = mx.sym.var("x") y = mx.sym.var("y") + dshape = (2, 2) dtype = "float32" mx_sym = mx.sym.where(cond, x, y) - np_cond = np.random.randint(0, 2, cond_shape).astype(dtype=dtype) - np_x = np.random.uniform(size=x_shape).astype(dtype) - np_y = np.random.uniform(size=y_shape).astype(dtype) + np_cond = np.array([[0, 1], [-1, 0]]).astype(dtype) + np_x = np.random.uniform(size=dshape).astype(dtype) + np_y = np.random.uniform(size=dshape).astype(dtype) mx_cond = mx.nd.array(np_cond) mx_x = mx.nd.array(np_x) mx_y = mx.nd.array(np_y) - shapes = {"cond": cond_shape, "x": x_shape, "y": y_shape} + shapes = {"cond": dshape, "x": dshape, "y": dshape} + mod = mx.mod.Module(mx_sym, label_names=None, data_names=["cond", "x", "y"]) + mod.bind(data_shapes=shapes.items(), for_training=False) + mod.init_params() + args, auxs = mod.get_params() mx_out = mx.nd.where(mx_cond, mx_x, mx_y).asnumpy() - mod, _ = relay.frontend.from_mxnet(mx_sym, shapes, None, None) + mod, _ = relay.frontend.from_mxnet(mx_sym, shapes, args, auxs) for target, ctx in tvm.testing.enabled_targets(): for kind in ["graph", "debug"]: intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) From 0c8ec3c859f09992d19f86b96ee6df587e5bfc27 Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Fri, 16 Oct 2020 17:51:15 -0700 Subject: [PATCH 11/18] Update mxnet.py --- python/tvm/relay/frontend/mxnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 050103b58d83..c85135beafa5 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -2450,7 +2450,7 @@ def _mx_npi_where_rscalar(inputs, attrs): "sinh", "tan", "tanh", - "where" + "where", ] _convert_map = { From d0bf6c22076c12b2668b5deac99c41a888c2d5ab Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Fri, 16 Oct 2020 18:23:42 -0700 Subject: [PATCH 12/18] Update mxnet.py --- python/tvm/relay/frontend/mxnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index c85135beafa5..114582d76b61 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -2302,7 +2302,7 @@ def _mx_npi_pad(inputs, attrs): raise tvm.error.OpAttributeRequired('Attribute "pad_width" not found in operator pad.') if None in pad_width: raise tvm.error.OpAttributeInvalid( - 'Value None in attribute "pad_width" of operator Slice is not valid.' + 'Value None in attribute "pad_width" of operator Pad is not valid.' ) constant_values = attrs.get_float("constant_values", 0.0) padding = tuple(tuple((b, a)) for b, a in zip(pad_width[::2], pad_width[1::2])) From 7cc95c63773083a23f1848e590f8b6e404f2646c Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Fri, 16 Oct 2020 18:26:45 -0700 Subject: [PATCH 13/18] Update mxnet.py debug Update common.py update Update mxnet.py update Update test_forward.py Update test_forward.py --- python/tvm/relay/frontend/mxnet.py | 18 +++++++++--------- tests/python/frontend/mxnet/test_forward.py | 13 ++++++++----- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 114582d76b61..2f66eec63ec5 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -2297,18 +2297,18 @@ def _mx_npi_pad(inputs, attrs): raise tvm.error.OpAttributeRequired('Attribute "mode" not found in operator pad.') if pad_mode not in ["constant", "edge", "reflect"]: raise tvm.error.OpAttributeInvalid("Value " + mode + ' in attribute "mode" is not valid') - pad_width = attrs.get_int_tuple("pad_width", None) - if pad_width is None: + # Special handling of pad_width + if 'pad_width' not in attrs.attrs: raise tvm.error.OpAttributeRequired('Attribute "pad_width" not found in operator pad.') - if None in pad_width: - raise tvm.error.OpAttributeInvalid( - 'Value None in attribute "pad_width" of operator Pad is not valid.' - ) + else: + # Begin to parse tuple of tuple + pad_width = attrs.attrs['pad_width'] + pad_width = pad_width.replace('(', '[') + pad_width = pad_width.replace(')', ']') + pad_width = json.loads(pad_width) constant_values = attrs.get_float("constant_values", 0.0) - padding = tuple(tuple((b, a)) for b, a in zip(pad_width[::2], pad_width[1::2])) - return _op.nn.pad( - data=inputs[0], pad_width=padding, pad_value=constant_values, pad_mode=pad_mode + data=inputs[0], pad_width=pad_width, pad_value=constant_values, pad_mode=pad_mode ) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index a4f6721de6ca..936c64c5d300 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1914,7 +1914,10 @@ def verify(data_shape, axis, use_length, length): @pytest.mark.skipif(not hasattr(mx.sym.np, "pad"), reason="mx.sym.np.pad hasn't been publish yet") @pytest.mark.parametrize( "data_shape, pad_width", - [((1, 1, 3, 5), (0, 0, 0, 0, 1, 2, 3, 4)), ((1, 1, 3, 5, 7), (0, 0, 0, 0, 1, 2, 3, 4, 5, 6))], + [ + ((1, 1, 3, 5), ((0, 0), (0, 0), (1, 2), (3, 4))), + ((1, 1, 3, 5, 7), ((0, 0), (0, 0), (1, 2), (3, 4), (5, 6))) + ], ) @pytest.mark.parametrize("mode", ["constant", "edge", "reflect"]) @pytest.mark.parametrize("dtype", ["float64", "float32", "int64", "int32"]) @@ -1925,19 +1928,19 @@ def test_forward_npi_pad(data_shape, pad_width, mode, dtype, constant_value, tar data_np = np.random.uniform(size=data_shape).astype(dtype) data = mx.sym.var("data") if mode == "constant": - ref_res = mx.ndarray.pad( - mx.nd.array(data_np), mode=mode, pad_width=pad_width, constant_value=constant_value + ref_res = np.pad( + data_np, mode=mode, pad_width=pad_width, constant_values=constant_value ) mx_sym = mx.sym.np.pad( data.as_np_ndarray(), mode=mode, pad_width=pad_width, constant_values=constant_value ) else: - ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode, pad_width=pad_width) + ref_res = np.pad(data_np, mode=mode, pad_width=pad_width) mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width) mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5) @pytest.mark.skipif( From 18dbe35071b888a2208acdd39d369162781019ea Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Sat, 17 Oct 2020 17:38:14 -0700 Subject: [PATCH 14/18] update --- python/tvm/relay/frontend/mxnet.py | 8 ++++---- tests/python/frontend/mxnet/test_forward.py | 6 ++---- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 2f66eec63ec5..2305a4ad747f 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -2298,13 +2298,13 @@ def _mx_npi_pad(inputs, attrs): if pad_mode not in ["constant", "edge", "reflect"]: raise tvm.error.OpAttributeInvalid("Value " + mode + ' in attribute "mode" is not valid') # Special handling of pad_width - if 'pad_width' not in attrs.attrs: + if "pad_width" not in attrs.attrs: raise tvm.error.OpAttributeRequired('Attribute "pad_width" not found in operator pad.') else: # Begin to parse tuple of tuple - pad_width = attrs.attrs['pad_width'] - pad_width = pad_width.replace('(', '[') - pad_width = pad_width.replace(')', ']') + pad_width = attrs.attrs["pad_width"] + pad_width = pad_width.replace("(", "[") + pad_width = pad_width.replace(")", "]") pad_width = json.loads(pad_width) constant_values = attrs.get_float("constant_values", 0.0) return _op.nn.pad( diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 936c64c5d300..ac38d5079aef 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1916,7 +1916,7 @@ def verify(data_shape, axis, use_length, length): "data_shape, pad_width", [ ((1, 1, 3, 5), ((0, 0), (0, 0), (1, 2), (3, 4))), - ((1, 1, 3, 5, 7), ((0, 0), (0, 0), (1, 2), (3, 4), (5, 6))) + ((1, 1, 3, 5, 7), ((0, 0), (0, 0), (1, 2), (3, 4), (5, 6))), ], ) @pytest.mark.parametrize("mode", ["constant", "edge", "reflect"]) @@ -1928,9 +1928,7 @@ def test_forward_npi_pad(data_shape, pad_width, mode, dtype, constant_value, tar data_np = np.random.uniform(size=data_shape).astype(dtype) data = mx.sym.var("data") if mode == "constant": - ref_res = np.pad( - data_np, mode=mode, pad_width=pad_width, constant_values=constant_value - ) + ref_res = np.pad(data_np, mode=mode, pad_width=pad_width, constant_values=constant_value) mx_sym = mx.sym.np.pad( data.as_np_ndarray(), mode=mode, pad_width=pad_width, constant_values=constant_value ) From 44d3efb2fefc8027189348aa119405df617ca495 Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Sat, 17 Oct 2020 17:49:59 -0700 Subject: [PATCH 15/18] fix lint --- python/tvm/relay/frontend/mxnet.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 2305a4ad747f..c1fdf6092e9d 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -2297,15 +2297,13 @@ def _mx_npi_pad(inputs, attrs): raise tvm.error.OpAttributeRequired('Attribute "mode" not found in operator pad.') if pad_mode not in ["constant", "edge", "reflect"]: raise tvm.error.OpAttributeInvalid("Value " + mode + ' in attribute "mode" is not valid') - # Special handling of pad_width if "pad_width" not in attrs.attrs: raise tvm.error.OpAttributeRequired('Attribute "pad_width" not found in operator pad.') - else: - # Begin to parse tuple of tuple - pad_width = attrs.attrs["pad_width"] - pad_width = pad_width.replace("(", "[") - pad_width = pad_width.replace(")", "]") - pad_width = json.loads(pad_width) + # Begin to parse tuple of tuple, we cannot use get_int_tuple here because it's a tuple of tuple. + pad_width = attrs.attrs["pad_width"] + pad_width = pad_width.replace("(", "[") + pad_width = pad_width.replace(")", "]") + pad_width = json.loads(pad_width) constant_values = attrs.get_float("constant_values", 0.0) return _op.nn.pad( data=inputs[0], pad_width=pad_width, pad_value=constant_values, pad_mode=pad_mode From 5f1d11ab0e776bdf665827b51eefb5bf844265bc Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Sat, 17 Oct 2020 18:32:20 -0700 Subject: [PATCH 16/18] Update mxnet.py --- python/tvm/relay/frontend/mxnet.py | 1 + 1 file changed, 1 insertion(+) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index c1fdf6092e9d..4ffaa643593f 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -2677,6 +2677,7 @@ def _mx_npi_where_rscalar(inputs, attrs): "_npi_add_scalar": _binop_scalar(_op.add), "_npi_where_rscalar": _mx_npi_where_rscalar, "_npi_less": _rename(_op.less), + "_npi_less_equal": _mx_compare(_op.less_equal, _rename), "_npi_tanh": _rename(_op.tanh), "_npi_true_divide_scalar": _binop_scalar(_op.divide), } From 83c6e082c66b8d0d718e63238b15f3616afe3c68 Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Sat, 17 Oct 2020 20:38:55 -0700 Subject: [PATCH 17/18] Update test_op_level1.py --- tests/python/relay/test_op_level1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/python/relay/test_op_level1.py b/tests/python/relay/test_op_level1.py index 8c724daaa9d0..c222e6f5ecde 100644 --- a/tests/python/relay/test_op_level1.py +++ b/tests/python/relay/test_op_level1.py @@ -134,7 +134,7 @@ def check_binary_op(opfunc, ref, dtype): continue intrp = relay.create_executor("graph", ctx=ctx, target=target) op_res = intrp.evaluate(func)(x_data, y_data) - np.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=0.01) + np.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=0.01, atol=1E-3) for opfunc, ref in [ (relay.add, np.add), From 2a501c41570ede6864aeae61322f13d314168955 Mon Sep 17 00:00:00 2001 From: Xingjian Shi Date: Sat, 17 Oct 2020 20:46:20 -0700 Subject: [PATCH 18/18] fix lint --- tests/python/relay/test_op_level1.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/python/relay/test_op_level1.py b/tests/python/relay/test_op_level1.py index c222e6f5ecde..37a59c30f410 100644 --- a/tests/python/relay/test_op_level1.py +++ b/tests/python/relay/test_op_level1.py @@ -134,7 +134,7 @@ def check_binary_op(opfunc, ref, dtype): continue intrp = relay.create_executor("graph", ctx=ctx, target=target) op_res = intrp.evaluate(func)(x_data, y_data) - np.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=0.01, atol=1E-3) + np.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=0.01, atol=1e-3) for opfunc, ref in [ (relay.add, np.add),