From b04dad406bf4264175420edbd20f140c21cb9039 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Tue, 14 Jul 2020 13:25:49 +0000 Subject: [PATCH 01/16] [RELAY][MXNET][FRONTEND] add supports for OPs in numpy from mxnet --- python/tvm/relay/frontend/mxnet.py | 103 ++++++++ tests/python/frontend/mxnet/test_forward.py | 247 +++++++++++++++++++- 2 files changed, 349 insertions(+), 1 deletion(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 321b145cfe61..3f0b70b322eb 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -2020,6 +2020,92 @@ def impl(inputs, input_types): return impl +def _mx_npi_transpose(inputs, attrs): + axes = attrs.get_int_tuple("axes", None) + # translate default case + axes = None if len(axes) == 0 or axes[0] == None else axes + return _op.transpose(inputs[0], axes=axes) + + +def _mx_npi_pad(inputs, attrs): + pad_mode = attrs.get_str('mode', None) + if pad_mode is None: + raise tvm.error.OpAttributeRequired( + 'Attribute "mode" not found in operator pad.') + if pad_mode not in ['constant', 'edge', 'reflect']: + raise tvm.error.OpAttributeInvalid( + 'Value ' + mode + ' in attribute "mode" is not valid') + pad_width = attrs.get_int_tuple('pad_width', None) + if pad_width is None: + raise tvm.error.OpAttributeRequired( + 'Attribute "pad_width" not found in operator pad.') + if None in pad_width: + raise tvm.error.OpAttributeInvalid( + 'Value None in attribute "pad_width" of operator Slice is not valid.') + constant_values = attrs.get_float('constant_values', 0.0) + padding = tuple(tuple((b, a)) for b, a in zip(pad_width[::2], pad_width[1::2])) + + return _op.nn.pad(data=inputs[0], + pad_width=padding, + pad_value=constant_values, + pad_mode=pad_mode) + + +def _mx_npi_concatenate(inputs, attrs): + axis = attrs.get_str("axis", "0") + if axis == "None": + return _op.reshape(_op.concatenate(tuple(inputs), axis=0), (-1,)) + else: + return _op.concatenate(tuple(inputs), axis=int(axis)) + + +def _mx_npx_reshape(inputs, attrs): + shape = attrs.get_int_tuple("newshape") + reverse = attrs.get_bool("reverse", False) + shape_list = list(shape) + new_shape_list = [] + for i in range(len(shape_list)): + if shape_list[i] > 0 or shape_list[i] == -1: + new_shape_list.append(shape_list[i]) + elif shape_list[i] == -2: + new_shape_list.append(0) + elif shape_list[i] == -4: + new_shape_list.append(-2) + elif shape_list[i] == -5: + new_shape_list.append(-3) + elif shape_list[i] == -6: + new_shape_list.append(-4) + else: + raise tvm.error.OpAttributeInvalid('Shape dimension %d is not supported' % shape_list[i]) + shape = tuple(new_shape_list) + if reverse: + return _op.reverse_reshape(inputs[0], newshape=shape) + return _op.reshape(inputs[0], newshape=shape) + + +def _mx_split_v2(inputs, attrs): + axis = attrs.get_int("axis") + indices = list(attrs.get_int_tuple("indices", [])) + # remove the prefix '0' + if len(indices) != 0 and indices[0] == 0: + indices.remove(0) + sections = attrs.get_int("sections", 0) + indices_or_sections = list(indices) if len(indices) != 0 else sections + res = _op.split(inputs[0], indices_or_sections=indices_or_sections, axis=axis) + if attrs.get_bool("squeeze_axis", False): + res = tuple([_op.squeeze(x, axis=[axis]) for x in res]) + return res + + +def _mx_npi_where_rscalar(inputs, attrs): + scalar = attrs.get_float("scalar") + dtype = _infer_type(inputs[1]).checked_type.dtype + scalar = _expr.const(scalar, dtype=dtype) + ones = _op.ones_like(inputs[1]) + scalar = _op.multiply(ones, scalar) + return _op.where(inputs[0], inputs[1], scalar) + + # Note: due to attribute conversion constraint # ops in the identity set must be attribute free _identity_list = [ @@ -2173,6 +2259,7 @@ def impl(inputs, input_types): "slice_axis" : _mx_slice_axis, "SliceChannel" : _mx_split, "split" : _mx_split, + "_split_v2" : _mx_split_v2, "SwapAxis" : _mx_swap_axis, "expand_dims" : _mx_expand_dims, "Concat" : _mx_concat, @@ -2250,6 +2337,22 @@ def impl(inputs, input_types): "_contrib_quantized_pooling": _qnn_pooling, "_contrib_quantized_batch_norm" : _qnn_batch_norm, "_sg_mkldnn_fully_connected": _qnn_fully_connected, + # numpy + "_npi_transpose" : _mx_npi_transpose, + "_npi_pad" : _mx_npi_pad, + "_npi_concatenate" : _mx_npi_concatenate, + "_npx_reshape" : _mx_npx_reshape, + "_np_copy" : _rename(_op.copy), + "_npi_power" : _rename(_op.power), + "_npi_power_scalar" : _binop_scalar(_op.power), + "_npi_multiply" : _rename(_op.multiply), + "_npi_multiply_scalar" : _binop_scalar(_op.multiply), + "_npi_add" : _rename(_op.add), + "_npi_add_scalar" : _binop_scalar(_op.add), + "_npi_where_rscalar" : _mx_npi_where_rscalar, + "_npi_less" : _rename(_op.less), + "_npi_tanh" : _rename(_op.tanh), + "_npi_true_divide_scalar" : _binop_scalar(_op.divide), } # set identity list diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index ae5ed455bc7f..ba0dbe6706d8 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -27,7 +27,7 @@ from mxnet import gluon from mxnet.gluon.model_zoo import vision import model_zoo - +import random def verify_mxnet_frontend_impl(mx_symbol, data_shape=(1, 3, 224, 224), @@ -1306,6 +1306,240 @@ def verify(batch, seq_length, num_heads, head_dim): verify(3, 10, 6, 8) +def test_forward_npi_pad(): + def verify(data_shape, out_shape, mode, pad_width, constant_value=0.0): + data_np = np.random.uniform(size=data_shape).astype("float32") + data = mx.sym.var('data') + if mode == 'constant': + ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width, constant_value=constant_value) + mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width, constant_values=constant_value) + else: + ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width) + mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}) + for target, ctx in ctx_list(): + for kind in ["debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + + verify(data_shape=(1,1,3,5), out_shape=(1,1,6,12), mode="constant", + pad_width=(0,0,0,0,1,2,3,4)) + verify(data_shape=(1,1,3,5), out_shape=(1,1,6,12), mode="constant", + pad_width=(0,0,0,0,1,2,3,4), constant_value=3.0) + verify(data_shape=(1,1,3,5), out_shape=(1,1,6,12), mode="edge", + pad_width=(0,0,0,0,1,2,3,4)) + verify(data_shape=(1,1,3,5), out_shape=(1,1,6,12), mode="reflect", + pad_width=(0,0,0,0,1,2,3,4)) + verify(data_shape=(1,1,3,5,7), out_shape=(1,1,6,12,18), mode="constant", + pad_width=(0,0,0,0,1,2,3,4,5,6)) + verify(data_shape=(1,1,3,5,7), out_shape=(1,1,6,12,18), mode="constant", + pad_width=(0,0,0,0,1,2,3,4,5,6), constant_value=3.0) + verify(data_shape=(1,1,3,5,7), out_shape=(1,1,6,12,18), mode="edge", + pad_width=(0,0,0,0,1,2,3,4,5,6)) + verify(data_shape=(1,1,3,5,7), out_shape=(1,1,6,12,18), mode="reflect", + pad_width=(0,0,0,0,1,2,3,4,5,6)) + + +def test_forward_npi_transpose(): + def verify(data_shape, axes=None): + data_np = np.random.uniform(size=data_shape).astype("float32") + data = mx.sym.var('data') + ref_res = mx.np.transpose(mx.nd.array(data_np), axes=axes) + mx_sym = mx.sym.np.transpose(data.as_np_ndarray(), axes=axes) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + + verify(data_shape=(2,2,2), axes=(1,0,2)) + verify(data_shape=(2,7,2), axes=None) + + +def test_forward_npi_concatenate(): + def verify(data_shape1, data_shape2, axis=None): + data_np1 = np.random.uniform(size=data_shape1).astype("float32") + data_np2 = np.random.uniform(size=data_shape2).astype("float32") + data1 = mx.sym.var('data1') + data2 = mx.sym.var('data2') + ref_res = mx.np.concatenate([mx.np.array(data_np1), mx.np.array(data_np2)], axis=axis) + mx_sym = mx.sym.np.concatenate([data1.as_np_ndarray(), data2.as_np_ndarray()], axis=axis) + mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data1": data_shape1, "data2": data_shape2}) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1, data_np2) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + + verify(data_shape1=(2,2),data_shape2=(2,2),axis=1) + verify(data_shape1=(2,4),data_shape2=(2,3),axis=1) + verify(data_shape1=(1,3,2),data_shape2=(1,3,5),axis=2) + verify(data_shape1=(1,3,3),data_shape2=(1,3,3),axis=1) + verify(data_shape1=(1,3),data_shape2=(1,3),axis=0) + verify(data_shape1=(1,3,4),data_shape2=(1,3,4)) + verify(data_shape1=(1,3,4),data_shape2=(1,3,4)) + + +def test_forward_np_copy(): + def verify(data_shape, out_shape=None): + data_np = np.random.uniform(size=data_shape).astype("float32") + data = mx.sym.var('data') + ref_res = mx.np.copy(mx.np.array(data_np)) + mx_sym = mx.sym.np.copy(data.as_np_ndarray()) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + + verify(data_shape=(2,2,2)) + verify(data_shape=(2,2,2,1,2,3,1)) + verify(data_shape=(1,8)) + + +def test_forward_npx_reshape(): + def verify(data_shape, out_shape, reverse=False): + data_np = np.random.uniform(size=data_shape).astype("float32") + data = mx.sym.var('data') + ref_res = mx.npx.reshape(mx.np.array(data_np), newshape=out_shape, reverse=reverse) + mx_sym = mx.sym.npx.reshape(data.as_np_ndarray(), newshape=out_shape, reverse=reverse) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + + verify(data_shape=(2, 3, 8), out_shape=(-2, -2, 2, -1)) + verify(data_shape=(8, 3, 3, 3, 4, 4), out_shape=(-6, 2, -1, -4)) + verify(data_shape=(8, 3, 3, 3, 4, 4), out_shape=(-5, -4)) + verify(data_shape=(8, 3, 3, 3, 3, 8), out_shape=(-4, -5), reverse=True) + verify(data_shape=(8, 3, 2, 4, 8), out_shape=(-4, -1, 2, -6), reverse=True) + + +def test_forward_npi_binary(): + def verify(data_shape): + ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.less] + mx_ops = [mx.sym.np.power, mx.sym.np.multiply, mx.sym.np.add, mx.sym.np.less] + for i in range(len(ref_ops)): + ref_op = ref_ops[i] + mx_op = mx_ops[i] + data_np1 = np.random.uniform(size=data_shape).astype("float32") + data_np2 = np.random.uniform(size=data_shape).astype("float32") + data1 = mx.sym.var('lhs') + data2 = mx.sym.var('rhs') + ref_res = ref_op(mx.nd.array(data_np1), mx.nd.array(data_np2)) + mx_sym = mx_op(data1.as_np_ndarray(), data2.as_np_ndarray()) + mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape, "rhs": data_shape}) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1, data_np2) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + + verify(data_shape=(2,2)) + verify(data_shape=(2,4)) + verify(data_shape=(1,3,2)) + verify(data_shape=(1,3,3)) + verify(data_shape=(1,3)) + verify(data_shape=(1,3,4)) + verify(data_shape=(1,3,4)) + + +def test_forward_npi_binary_scalar(): + def verify(data_shape, scalar): + ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.true_divide] + mx_ops = [mx.sym.np.power, mx.sym.np.multiply, mx.sym.np.add, mx.sym.np.true_divide] + for i in range(len(ref_ops)): + ref_op = ref_ops[i] + mx_op = mx_ops[i] + data_np1 = np.random.uniform(size=data_shape).astype("float32") + data1 = mx.sym.var('lhs') + ref_res = ref_op(mx.nd.array(data_np1), scalar) + mx_sym = mx_op(data1.as_np_ndarray(), scalar) + mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape}, dtype="float32") + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + + verify(data_shape=(2,2), scalar=1.0) + verify(data_shape=(2,4), scalar=2.0) + verify(data_shape=(1,3,2), scalar=3.0) + verify(data_shape=(1,3,3), scalar=4.0) + + +def test_forward_npi_tanh(): + def verify(data_shape): + data_np1 = np.random.uniform(size=data_shape).astype("float32") + data1 = mx.sym.var('data') + ref_res = mx.np.tanh(mx.nd.array(data_np1)) + mx_sym = mx.sym.np.tanh(data1.as_np_ndarray()) + mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data": data_shape}, dtype="float32") + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + + verify(data_shape=(2,2)) + verify(data_shape=(2,4)) + verify(data_shape=(1,3,2)) + verify(data_shape=(1,3,3)) + + +def test_forward_npi_where_rscalar(): + def verify(data_shape, scalar): + cond_np = np.random.uniform(size=data_shape).astype("bool") + data_np = np.random.uniform(size=data_shape).astype("float32") + cond = mx.sym.var('condition') + data = mx.sym.var('x') + ref_res = mx.np.where(mx.nd.array(cond_np), mx.nd.array(data_np), scalar) + mx_sym = mx.sym.np.where(cond.as_np_ndarray(), data.as_np_ndarray(), scalar) + mod, _ = relay.frontend.from_mxnet( + mx_sym, shape={"condition": data_shape, "x": data_shape}, + dtype={"condition": "bool", "x": "float32"}) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(cond_np, data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + + verify(data_shape=(2,2), scalar=1.0) + verify(data_shape=(2,4), scalar=2.0) + verify(data_shape=(1,3,2), scalar=3.0) + verify(data_shape=(1,3,3), scalar=4.0) + + +def test_forward_split_v2(): + def verify(data_shape, axis=0, indices_or_sections=1, squeeze_axis=False): + data_np = np.random.uniform(size=data_shape).astype("float32") + data = mx.sym.var('data') + ref_res = mx.ndarray.split_v2(mx.nd.array(data_np), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) + mx_sym = mx.sym.split_v2(data.as_nd_ndarray(), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + op_res_ = [] + for arr in op_res: + op_res_.append(arr.asnumpy().tolist()) + ref_res_ = [] + for arr in ref_res: + ref_res_.append(arr.asnumpy().tolist()) + tvm.testing.assert_allclose(op_res_, ref_res_, rtol=1e-5) + + verify((3, 2, 1), axis=1, indices_or_sections=2) + verify((3, 2, 1), axis=0, indices_or_sections=3) + verify((3, 2, 1), axis=0, indices_or_sections=3, squeeze_axis=True) + verify((3, 2, 1), axis=0, indices_or_sections=(1, 2)) + + if __name__ == '__main__': test_forward_mlp() test_forward_vgg() @@ -1379,3 +1613,14 @@ def verify(batch, seq_length, num_heads, head_dim): test_forward_arange_like() test_forward_interleaved_matmul_selfatt_qk() test_forward_interleaved_matmul_selfatt_valatt() + test_forward_npi_pad() + test_forward_npi_transpose() + test_forward_npi_concatenate() + test_forward_np_copy() + test_forward_npx_reshape() + test_forward_npi_binary() + test_forward_npi_binary_scalar() + test_forward_npi_tanh() + test_forward_npi_where_rscalar() + test_forward_split_v2() + \ No newline at end of file From 851f47a2d53979f528833995483e283e25b58430 Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Tue, 14 Jul 2020 21:44:35 +0800 Subject: [PATCH 02/16] Update test_forward.py From fb1d50bf971403be14213da97ecbd806b956da48 Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Tue, 14 Jul 2020 21:54:58 +0800 Subject: [PATCH 03/16] Update mxnet.py --- python/tvm/relay/frontend/mxnet.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 0b0e546c83ce..98a410fb2b40 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -2074,7 +2074,7 @@ def impl(inputs, input_types): def _mx_npi_transpose(inputs, attrs): axes = attrs.get_int_tuple("axes", None) # translate default case - axes = None if len(axes) == 0 or axes[0] == None else axes + axes = None if len(axes) == 0 or axes[0] is None else axes return _op.transpose(inputs[0], axes=axes) @@ -2115,19 +2115,20 @@ def _mx_npx_reshape(inputs, attrs): reverse = attrs.get_bool("reverse", False) shape_list = list(shape) new_shape_list = [] - for i in range(len(shape_list)): - if shape_list[i] > 0 or shape_list[i] == -1: - new_shape_list.append(shape_list[i]) - elif shape_list[i] == -2: + for num in shape_list: + if num > 0 or num == -1: + new_shape_list.append(num) + elif num == -2: new_shape_list.append(0) - elif shape_list[i] == -4: + elif num == -4: new_shape_list.append(-2) - elif shape_list[i] == -5: + elif num == -5: new_shape_list.append(-3) - elif shape_list[i] == -6: + elif num == -6: new_shape_list.append(-4) else: - raise tvm.error.OpAttributeInvalid('Shape dimension %d is not supported' % shape_list[i]) + raise tvm.error.OpAttributeInvalid( + 'Shape dimension %d is not supported' % num) shape = tuple(new_shape_list) if reverse: return _op.reverse_reshape(inputs[0], newshape=shape) From 5188efa21b06679ec3eaa256d0493f92cf2837f5 Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Tue, 14 Jul 2020 21:59:04 +0800 Subject: [PATCH 04/16] Update mxnet.py --- python/tvm/relay/frontend/mxnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index 98a410fb2b40..d74a826d433a 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -2128,7 +2128,7 @@ def _mx_npx_reshape(inputs, attrs): new_shape_list.append(-4) else: raise tvm.error.OpAttributeInvalid( - 'Shape dimension %d is not supported' % num) + 'Shape dimension %d is not supported' % num) shape = tuple(new_shape_list) if reverse: return _op.reverse_reshape(inputs[0], newshape=shape) From 781dd73df308010f7c1f7ba92bfad1a7e2fb9bcf Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Wed, 15 Jul 2020 16:03:06 +0800 Subject: [PATCH 05/16] Update test_forward.py --- tests/python/frontend/mxnet/test_forward.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 11c8ab91ade4..8ead794d9836 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1374,6 +1374,9 @@ def verify(data_shape, anchor_shape, stds=[1, 1, 1, 1], clip=-1, in_format="corn def test_forward_npi_pad(): + if not hasattr(mx.sym.np, 'pad'): + pytest.skip("mx.sym.np.pad hasn't been publish yet") + def verify(data_shape, out_shape, mode, pad_width, constant_value=0.0): data_np = np.random.uniform(size=data_shape).astype("float32") data = mx.sym.var('data') @@ -1412,7 +1415,7 @@ def test_forward_npi_transpose(): def verify(data_shape, axes=None): data_np = np.random.uniform(size=data_shape).astype("float32") data = mx.sym.var('data') - ref_res = mx.np.transpose(mx.nd.array(data_np), axes=axes) + ref_res = mx.np.transpose(mx.np.array(data_np), axes=axes) mx_sym = mx.sym.np.transpose(data.as_np_ndarray(), axes=axes) mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}) for target, ctx in ctx_list(): @@ -1498,7 +1501,7 @@ def verify(data_shape): data_np2 = np.random.uniform(size=data_shape).astype("float32") data1 = mx.sym.var('lhs') data2 = mx.sym.var('rhs') - ref_res = ref_op(mx.nd.array(data_np1), mx.nd.array(data_np2)) + ref_res = ref_op(mx.np.array(data_np1), mx.np.array(data_np2)) mx_sym = mx_op(data1.as_np_ndarray(), data2.as_np_ndarray()) mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape, "rhs": data_shape}) for target, ctx in ctx_list(): @@ -1525,7 +1528,7 @@ def verify(data_shape, scalar): mx_op = mx_ops[i] data_np1 = np.random.uniform(size=data_shape).astype("float32") data1 = mx.sym.var('lhs') - ref_res = ref_op(mx.nd.array(data_np1), scalar) + ref_res = ref_op(mx.np.array(data_np1), scalar) mx_sym = mx_op(data1.as_np_ndarray(), scalar) mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape}, dtype="float32") for target, ctx in ctx_list(): @@ -1544,7 +1547,7 @@ def test_forward_npi_tanh(): def verify(data_shape): data_np1 = np.random.uniform(size=data_shape).astype("float32") data1 = mx.sym.var('data') - ref_res = mx.np.tanh(mx.nd.array(data_np1)) + ref_res = mx.np.tanh(mx.np.array(data_np1)) mx_sym = mx.sym.np.tanh(data1.as_np_ndarray()) mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data": data_shape}, dtype="float32") for target, ctx in ctx_list(): @@ -1560,12 +1563,15 @@ def verify(data_shape): def test_forward_npi_where_rscalar(): + if not hasattr(mx.np, 'where'): + pytest.skip("mx.np.where hasn't been publish yet") + def verify(data_shape, scalar): cond_np = np.random.uniform(size=data_shape).astype("bool") data_np = np.random.uniform(size=data_shape).astype("float32") cond = mx.sym.var('condition') data = mx.sym.var('x') - ref_res = mx.np.where(mx.nd.array(cond_np), mx.nd.array(data_np), scalar) + ref_res = mx.np.where(mx.np.array(cond_np), mx.np.array(data_np), scalar) mx_sym = mx.sym.np.where(cond.as_np_ndarray(), data.as_np_ndarray(), scalar) mod, _ = relay.frontend.from_mxnet( mx_sym, shape={"condition": data_shape, "x": data_shape}, From 45fdde4d0663dcd839613da3b10b347d4be648de Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Wed, 15 Jul 2020 09:59:55 +0000 Subject: [PATCH 06/16] update and bugfix --- python/tvm/relay/frontend/mxnet.py | 1 + tests/python/frontend/mxnet/test_forward.py | 1 + 2 files changed, 2 insertions(+) diff --git a/python/tvm/relay/frontend/mxnet.py b/python/tvm/relay/frontend/mxnet.py index d74a826d433a..d070bf77df2b 100644 --- a/python/tvm/relay/frontend/mxnet.py +++ b/python/tvm/relay/frontend/mxnet.py @@ -2393,6 +2393,7 @@ def _mx_npi_where_rscalar(inputs, attrs): "_contrib_quantized_batch_norm" : _qnn_batch_norm, "_sg_mkldnn_fully_connected": _qnn_fully_connected, # numpy + "_np_transpose" : _mx_npi_transpose, "_npi_transpose" : _mx_npi_transpose, "_npi_pad" : _mx_npi_pad, "_npi_concatenate" : _mx_npi_concatenate, diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 8ead794d9836..894bf895cd44 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -28,6 +28,7 @@ from mxnet.gluon.model_zoo import vision import model_zoo import random +import pytest def verify_mxnet_frontend_impl(mx_symbol, data_shape=(1, 3, 224, 224), From e8f25bc34822b100c5a6cbce9b58b7e90423d48d Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Thu, 16 Jul 2020 18:27:10 +0800 Subject: [PATCH 07/16] test for multiple dtypes --- tests/python/frontend/mxnet/test_forward.py | 292 +++++++++++--------- 1 file changed, 156 insertions(+), 136 deletions(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 894bf895cd44..96728984fe02 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1373,57 +1373,56 @@ def verify(data_shape, anchor_shape, stds=[1, 1, 1, 1], clip=-1, in_format="corn verify((1, 10, 4), (1, 10, 4), clip=1) verify((1, 10, 4), (1, 10, 4), in_format="center") - + +dtype_list = ['float64', 'float32', 'int64', 'int32', 'uint64', 'uint32', 'bool'] + def test_forward_npi_pad(): if not hasattr(mx.sym.np, 'pad'): pytest.skip("mx.sym.np.pad hasn't been publish yet") - def verify(data_shape, out_shape, mode, pad_width, constant_value=0.0): - data_np = np.random.uniform(size=data_shape).astype("float32") - data = mx.sym.var('data') - if mode == 'constant': - ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width, constant_value=constant_value) - mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width, constant_values=constant_value) - else: - ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width) - mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}) - for target, ctx in ctx_list(): - for kind in ["debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + def verify(data_shape, mode, pad_width, constant_value=0.0): + for dtype in dtype_list: + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + # mx.np.pad only support double type + if dtype == 'bool': + continue + if mode == 'constant': + ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width, constant_value=constant_value) + mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width, constant_values=constant_value) + else: + ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width) + mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) - verify(data_shape=(1,1,3,5), out_shape=(1,1,6,12), mode="constant", - pad_width=(0,0,0,0,1,2,3,4)) - verify(data_shape=(1,1,3,5), out_shape=(1,1,6,12), mode="constant", - pad_width=(0,0,0,0,1,2,3,4), constant_value=3.0) - verify(data_shape=(1,1,3,5), out_shape=(1,1,6,12), mode="edge", - pad_width=(0,0,0,0,1,2,3,4)) - verify(data_shape=(1,1,3,5), out_shape=(1,1,6,12), mode="reflect", - pad_width=(0,0,0,0,1,2,3,4)) - verify(data_shape=(1,1,3,5,7), out_shape=(1,1,6,12,18), mode="constant", - pad_width=(0,0,0,0,1,2,3,4,5,6)) - verify(data_shape=(1,1,3,5,7), out_shape=(1,1,6,12,18), mode="constant", - pad_width=(0,0,0,0,1,2,3,4,5,6), constant_value=3.0) - verify(data_shape=(1,1,3,5,7), out_shape=(1,1,6,12,18), mode="edge", - pad_width=(0,0,0,0,1,2,3,4,5,6)) - verify(data_shape=(1,1,3,5,7), out_shape=(1,1,6,12,18), mode="reflect", - pad_width=(0,0,0,0,1,2,3,4,5,6)) + verify(data_shape=(1,1,3,5), mode="constant", pad_width=(0,0,0,0,1,2,3,4)) + verify(data_shape=(1,1,3,5), mode="constant", pad_width=(0,0,0,0,1,2,3,4), constant_value=3.0) + verify(data_shape=(1,1,3,5), mode="edge", pad_width=(0,0,0,0,1,2,3,4)) + verify(data_shape=(1,1,3,5), mode="reflect", pad_width=(0,0,0,0,1,2,3,4)) + verify(data_shape=(1,1,3,5,7), mode="constant", pad_width=(0,0,0,0,1,2,3,4,5,6)) + verify(data_shape=(1,1,3,5,7), mode="constant", pad_width=(0,0,0,0,1,2,3,4,5,6), constant_value=3.0) + verify(data_shape=(1,1,3,5,7), mode="edge", pad_width=(0,0,0,0,1,2,3,4,5,6)) + verify(data_shape=(1,1,3,5,7), mode="reflect", pad_width=(0,0,0,0,1,2,3,4,5,6)) def test_forward_npi_transpose(): def verify(data_shape, axes=None): - data_np = np.random.uniform(size=data_shape).astype("float32") - data = mx.sym.var('data') - ref_res = mx.np.transpose(mx.np.array(data_np), axes=axes) - mx_sym = mx.sym.np.transpose(data.as_np_ndarray(), axes=axes) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + for dtype in dtype_list: + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + ref_res = mx.np.transpose(mx.np.array(data_np), axes=axes) + mx_sym = mx.sym.np.transpose(data.as_np_ndarray(), axes=axes) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) verify(data_shape=(2,2,2), axes=(1,0,2)) verify(data_shape=(2,7,2), axes=None) @@ -1431,18 +1430,19 @@ def verify(data_shape, axes=None): def test_forward_npi_concatenate(): def verify(data_shape1, data_shape2, axis=None): - data_np1 = np.random.uniform(size=data_shape1).astype("float32") - data_np2 = np.random.uniform(size=data_shape2).astype("float32") - data1 = mx.sym.var('data1') - data2 = mx.sym.var('data2') - ref_res = mx.np.concatenate([mx.np.array(data_np1), mx.np.array(data_np2)], axis=axis) - mx_sym = mx.sym.np.concatenate([data1.as_np_ndarray(), data2.as_np_ndarray()], axis=axis) - mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data1": data_shape1, "data2": data_shape2}) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np1, data_np2) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + for dtype in dtype_list: + data_np1 = np.random.uniform(size=data_shape1).astype(dtype) + data_np2 = np.random.uniform(size=data_shape2).astype(dtype) + data1 = mx.sym.var('data1') + data2 = mx.sym.var('data2') + ref_res = mx.np.concatenate([mx.np.array(data_np1), mx.np.array(data_np2)], axis=axis) + mx_sym = mx.sym.np.concatenate([data1.as_np_ndarray(), data2.as_np_ndarray()], axis=axis) + mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data1": data_shape1, "data2": data_shape2}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1, data_np2) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) verify(data_shape1=(2,2),data_shape2=(2,2),axis=1) verify(data_shape1=(2,4),data_shape2=(2,3),axis=1) @@ -1455,16 +1455,17 @@ def verify(data_shape1, data_shape2, axis=None): def test_forward_np_copy(): def verify(data_shape, out_shape=None): - data_np = np.random.uniform(size=data_shape).astype("float32") - data = mx.sym.var('data') - ref_res = mx.np.copy(mx.np.array(data_np)) - mx_sym = mx.sym.np.copy(data.as_np_ndarray()) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + for dtype in dtype_list: + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + ref_res = mx.np.copy(mx.np.array(data_np)) + mx_sym = mx.sym.np.copy(data.as_np_ndarray()) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) verify(data_shape=(2,2,2)) verify(data_shape=(2,2,2,1,2,3,1)) @@ -1473,16 +1474,17 @@ def verify(data_shape, out_shape=None): def test_forward_npx_reshape(): def verify(data_shape, out_shape, reverse=False): - data_np = np.random.uniform(size=data_shape).astype("float32") - data = mx.sym.var('data') - ref_res = mx.npx.reshape(mx.np.array(data_np), newshape=out_shape, reverse=reverse) - mx_sym = mx.sym.npx.reshape(data.as_np_ndarray(), newshape=out_shape, reverse=reverse) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + for dtype in dtype_list: + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + ref_res = mx.npx.reshape(mx.np.array(data_np), newshape=out_shape, reverse=reverse) + mx_sym = mx.sym.npx.reshape(data.as_np_ndarray(), newshape=out_shape, reverse=reverse) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) verify(data_shape=(2, 3, 8), out_shape=(-2, -2, 2, -1)) verify(data_shape=(8, 3, 3, 3, 4, 4), out_shape=(-6, 2, -1, -4)) @@ -1498,18 +1500,22 @@ def verify(data_shape): for i in range(len(ref_ops)): ref_op = ref_ops[i] mx_op = mx_ops[i] - data_np1 = np.random.uniform(size=data_shape).astype("float32") - data_np2 = np.random.uniform(size=data_shape).astype("float32") - data1 = mx.sym.var('lhs') - data2 = mx.sym.var('rhs') - ref_res = ref_op(mx.np.array(data_np1), mx.np.array(data_np2)) - mx_sym = mx_op(data1.as_np_ndarray(), data2.as_np_ndarray()) - mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape, "rhs": data_shape}) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np1, data_np2) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + # mx.np.power only support float type + if ref_op == mx.np.power: + dtype_list = ['float64', 'float32'] + for dtype in dtype_list: + data_np1 = np.random.uniform(size=data_shape).astype(dtype) + data_np2 = np.random.uniform(size=data_shape).astype(dtype) + data1 = mx.sym.var('lhs') + data2 = mx.sym.var('rhs') + ref_res = ref_op(mx.np.array(data_np1), mx.np.array(data_np2)) + mx_sym = mx_op(data1.as_np_ndarray(), data2.as_np_ndarray()) + mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape, "rhs": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1, data_np2) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) verify(data_shape=(2,2)) verify(data_shape=(2,4)) @@ -1527,16 +1533,20 @@ def verify(data_shape, scalar): for i in range(len(ref_ops)): ref_op = ref_ops[i] mx_op = mx_ops[i] - data_np1 = np.random.uniform(size=data_shape).astype("float32") - data1 = mx.sym.var('lhs') - ref_res = ref_op(mx.np.array(data_np1), scalar) - mx_sym = mx_op(data1.as_np_ndarray(), scalar) - mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape}, dtype="float32") - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np1) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + # mx.np.power only support float type + if ref_op == mx.np.power: + dtype_list = ['float64', 'float32'] + for dtype in dtype_list: + data_np1 = np.random.uniform(size=data_shape).astype(dtype) + data1 = mx.sym.var('lhs') + ref_res = ref_op(mx.np.array(data_np1), scalar) + mx_sym = mx_op(data1.as_np_ndarray(), scalar) + mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) verify(data_shape=(2,2), scalar=1.0) verify(data_shape=(2,4), scalar=2.0) @@ -1546,16 +1556,18 @@ def verify(data_shape, scalar): def test_forward_npi_tanh(): def verify(data_shape): - data_np1 = np.random.uniform(size=data_shape).astype("float32") - data1 = mx.sym.var('data') - ref_res = mx.np.tanh(mx.np.array(data_np1)) - mx_sym = mx.sym.np.tanh(data1.as_np_ndarray()) - mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data": data_shape}, dtype="float32") - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np1) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + dtype_list = ['float64', 'float32'] + for dtype in dtype_list: + data_np1 = np.random.uniform(size=data_shape).astype(dtype) + data1 = mx.sym.var('data') + ref_res = mx.np.tanh(mx.np.array(data_np1)) + mx_sym = mx.sym.np.tanh(data1.as_np_ndarray()) + mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) verify(data_shape=(2,2)) verify(data_shape=(2,4)) @@ -1568,20 +1580,27 @@ def test_forward_npi_where_rscalar(): pytest.skip("mx.np.where hasn't been publish yet") def verify(data_shape, scalar): - cond_np = np.random.uniform(size=data_shape).astype("bool") - data_np = np.random.uniform(size=data_shape).astype("float32") - cond = mx.sym.var('condition') - data = mx.sym.var('x') - ref_res = mx.np.where(mx.np.array(cond_np), mx.np.array(data_np), scalar) - mx_sym = mx.sym.np.where(cond.as_np_ndarray(), data.as_np_ndarray(), scalar) - mod, _ = relay.frontend.from_mxnet( - mx_sym, shape={"condition": data_shape, "x": data_shape}, - dtype={"condition": "bool", "x": "float32"}) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(cond_np, data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + for cond_dtype in dtype_list: + for data_dtype in dtype_list: + if data_dtype == 'bool': + scalar = scalar == 0.0 + cond_np = np.random.uniform(size=data_shape).astype(cond_dtype) + data_np = np.random.uniform(size=data_shape).astype(data_dtype) + cond = mx.sym.var('condition') + data = mx.sym.var('x') + ref_res = mx.np.where(mx.np.array(cond_np), mx.np.array(data_np), scalar) + mx_sym = mx.sym.np.where(cond.as_np_ndarray(), data.as_np_ndarray(), scalar) + dtypeDic = {} + dtypeDic["condition"] = cond_dtype + dtypeDic["x"] = data_dtype + mod, _ = relay.frontend.from_mxnet( + mx_sym, shape={"condition": data_shape, "x": data_shape}, + dtype=dtypeDic) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(cond_np, data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) verify(data_shape=(2,2), scalar=1.0) verify(data_shape=(2,4), scalar=2.0) @@ -1591,22 +1610,23 @@ def verify(data_shape, scalar): def test_forward_split_v2(): def verify(data_shape, axis=0, indices_or_sections=1, squeeze_axis=False): - data_np = np.random.uniform(size=data_shape).astype("float32") - data = mx.sym.var('data') - ref_res = mx.ndarray.split_v2(mx.nd.array(data_np), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) - mx_sym = mx.sym.split_v2(data.as_nd_ndarray(), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - op_res_ = [] - for arr in op_res: - op_res_.append(arr.asnumpy().tolist()) - ref_res_ = [] - for arr in ref_res: - ref_res_.append(arr.asnumpy().tolist()) - tvm.testing.assert_allclose(op_res_, ref_res_, rtol=1e-5) + for dtype in dtype_list: + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + ref_res = mx.ndarray.split_v2(mx.nd.array(data_np), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) + mx_sym = mx.sym.split_v2(data.as_nd_ndarray(), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + op_res_ = [] + for arr in op_res: + op_res_.append(arr.asnumpy().tolist()) + ref_res_ = [] + for arr in ref_res: + ref_res_.append(arr.asnumpy().tolist()) + tvm.testing.assert_allclose(op_res_, ref_res_, rtol=1e-5) verify((3, 2, 1), axis=1, indices_or_sections=2) verify((3, 2, 1), axis=0, indices_or_sections=3) From bca8e36ebcbb37ffe6ebbfeccdb4f561bf2c7ab8 Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Fri, 17 Jul 2020 00:13:06 +0800 Subject: [PATCH 08/16] Update test_forward.py --- tests/python/frontend/mxnet/test_forward.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 96728984fe02..26b3089f490b 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1374,7 +1374,7 @@ def verify(data_shape, anchor_shape, stds=[1, 1, 1, 1], clip=-1, in_format="corn verify((1, 10, 4), (1, 10, 4), in_format="center") -dtype_list = ['float64', 'float32', 'int64', 'int32', 'uint64', 'uint32', 'bool'] +dtype_list = ['float64', 'float32', 'int64', 'int32', 'bool'] def test_forward_npi_pad(): if not hasattr(mx.sym.np, 'pad'): From 04e5552d274bbe42c86c4d0ca53036dd2914b267 Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Mon, 20 Jul 2020 22:56:55 +0800 Subject: [PATCH 09/16] add data type and optimize coding style --- tests/python/frontend/mxnet/test_forward.py | 419 +++++++++----------- 1 file changed, 188 insertions(+), 231 deletions(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 26b3089f490b..9163fe705a68 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1351,7 +1351,7 @@ def verify(batch, seq_length, num_heads, head_dim): verify(1, 10, 4, 16) verify(3, 10, 6, 8) - + def test_forward_box_decode(): def verify(data_shape, anchor_shape, stds=[1, 1, 1, 1], clip=-1, in_format="corner"): dtype = "float32" @@ -1374,117 +1374,97 @@ def verify(data_shape, anchor_shape, stds=[1, 1, 1, 1], clip=-1, in_format="corn verify((1, 10, 4), (1, 10, 4), in_format="center") -dtype_list = ['float64', 'float32', 'int64', 'int32', 'bool'] - -def test_forward_npi_pad(): +@pytest.mark.parametrize( + "data_shape, pad_width", + [((1,1,3,5),(0,0,0,0,1,2,3,4)), ((1,1,3,5,7),(0,0,0,0,1,2,3,4,5,6))] +) +@pytest.mark.parametrize("mode", ["constant", "edge", "reflect"]) +@pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32']) +@pytest.mark.parametrize("constant_value", [0.0, 3.0]) +def test_forward_npi_pad(data_shape, pad_width, mode, dtype, constant_value): if not hasattr(mx.sym.np, 'pad'): pytest.skip("mx.sym.np.pad hasn't been publish yet") + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + if mode == 'constant': + ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width, constant_value=constant_value) + mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width, constant_values=constant_value) + else: + ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width) + mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) - def verify(data_shape, mode, pad_width, constant_value=0.0): - for dtype in dtype_list: - data_np = np.random.uniform(size=data_shape).astype(dtype) - data = mx.sym.var('data') - # mx.np.pad only support double type - if dtype == 'bool': - continue - if mode == 'constant': - ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width, constant_value=constant_value) - mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width, constant_values=constant_value) - else: - ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width) - mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) - verify(data_shape=(1,1,3,5), mode="constant", pad_width=(0,0,0,0,1,2,3,4)) - verify(data_shape=(1,1,3,5), mode="constant", pad_width=(0,0,0,0,1,2,3,4), constant_value=3.0) - verify(data_shape=(1,1,3,5), mode="edge", pad_width=(0,0,0,0,1,2,3,4)) - verify(data_shape=(1,1,3,5), mode="reflect", pad_width=(0,0,0,0,1,2,3,4)) - verify(data_shape=(1,1,3,5,7), mode="constant", pad_width=(0,0,0,0,1,2,3,4,5,6)) - verify(data_shape=(1,1,3,5,7), mode="constant", pad_width=(0,0,0,0,1,2,3,4,5,6), constant_value=3.0) - verify(data_shape=(1,1,3,5,7), mode="edge", pad_width=(0,0,0,0,1,2,3,4,5,6)) - verify(data_shape=(1,1,3,5,7), mode="reflect", pad_width=(0,0,0,0,1,2,3,4,5,6)) +@pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2)]) +@pytest.mark.parametrize("axes", [(1,0,2),None]) +@pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) +def test_forward_npi_transpose(data_shape, axes, dtype): + def verify(data_shape, axes=None): + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + ref_res = mx.np.transpose(mx.np.array(data_np), axes=axes) + mx_sym = mx.sym.np.transpose(data.as_np_ndarray(), axes=axes) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) -def test_forward_npi_transpose(): - def verify(data_shape, axes=None): - for dtype in dtype_list: - data_np = np.random.uniform(size=data_shape).astype(dtype) - data = mx.sym.var('data') - ref_res = mx.np.transpose(mx.np.array(data_np), axes=axes) - mx_sym = mx.sym.np.transpose(data.as_np_ndarray(), axes=axes) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) - - verify(data_shape=(2,2,2), axes=(1,0,2)) - verify(data_shape=(2,7,2), axes=None) - - -def test_forward_npi_concatenate(): - def verify(data_shape1, data_shape2, axis=None): - for dtype in dtype_list: - data_np1 = np.random.uniform(size=data_shape1).astype(dtype) - data_np2 = np.random.uniform(size=data_shape2).astype(dtype) - data1 = mx.sym.var('data1') - data2 = mx.sym.var('data2') - ref_res = mx.np.concatenate([mx.np.array(data_np1), mx.np.array(data_np2)], axis=axis) - mx_sym = mx.sym.np.concatenate([data1.as_np_ndarray(), data2.as_np_ndarray()], axis=axis) - mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data1": data_shape1, "data2": data_shape2}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np1, data_np2) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) - - verify(data_shape1=(2,2),data_shape2=(2,2),axis=1) - verify(data_shape1=(2,4),data_shape2=(2,3),axis=1) - verify(data_shape1=(1,3,2),data_shape2=(1,3,5),axis=2) - verify(data_shape1=(1,3,3),data_shape2=(1,3,3),axis=1) - verify(data_shape1=(1,3),data_shape2=(1,3),axis=0) - verify(data_shape1=(1,3,4),data_shape2=(1,3,4)) - verify(data_shape1=(1,3,4),data_shape2=(1,3,4)) - - -def test_forward_np_copy(): - def verify(data_shape, out_shape=None): - for dtype in dtype_list: - data_np = np.random.uniform(size=data_shape).astype(dtype) - data = mx.sym.var('data') - ref_res = mx.np.copy(mx.np.array(data_np)) - mx_sym = mx.sym.np.copy(data.as_np_ndarray()) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) +@pytest.mark.parametrize( + "data_shape1, data_shape2, axis", + [((2,2),(2,2),1),((2,4),(2,3),1),((1,3,2),(1,3,5),2),((1,3,3),(1,3,3),1),((1,3),(1,3),0)] +) +@pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) +def test_forward_npi_concatenate(data_shape1, data_shape2, axis, dtype): + data_np1 = np.random.uniform(size=data_shape1).astype(dtype) + data_np2 = np.random.uniform(size=data_shape2).astype(dtype) + data1 = mx.sym.var('data1') + data2 = mx.sym.var('data2') + ref_res = mx.np.concatenate([mx.np.array(data_np1), mx.np.array(data_np2)], axis=axis) + mx_sym = mx.sym.np.concatenate([data1.as_np_ndarray(), data2.as_np_ndarray()], axis=axis) + mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data1": data_shape1, "data2": data_shape2}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1, data_np2) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) - verify(data_shape=(2,2,2)) - verify(data_shape=(2,2,2,1,2,3,1)) - verify(data_shape=(1,8)) +@pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8)]) +@pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) +def test_forward_np_copy(data_shape,dtype): + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + ref_res = mx.np.copy(mx.np.array(data_np)) + mx_sym = mx.sym.np.copy(data.as_np_ndarray()) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) -def test_forward_npx_reshape(): - def verify(data_shape, out_shape, reverse=False): - for dtype in dtype_list: - data_np = np.random.uniform(size=data_shape).astype(dtype) - data = mx.sym.var('data') - ref_res = mx.npx.reshape(mx.np.array(data_np), newshape=out_shape, reverse=reverse) - mx_sym = mx.sym.npx.reshape(data.as_np_ndarray(), newshape=out_shape, reverse=reverse) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + +@pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) +def test_forward_npx_reshape(dtype): + def verify(data_shape,out_shape,reverse=False): + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + ref_res = mx.npx.reshape(mx.np.array(data_np), newshape=out_shape, reverse=reverse) + mx_sym = mx.sym.npx.reshape(data.as_np_ndarray(), newshape=out_shape, reverse=reverse) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) verify(data_shape=(2, 3, 8), out_shape=(-2, -2, 2, -1)) verify(data_shape=(8, 3, 3, 3, 4, 4), out_shape=(-6, 2, -1, -4)) @@ -1493,140 +1473,117 @@ def verify(data_shape, out_shape, reverse=False): verify(data_shape=(8, 3, 2, 4, 8), out_shape=(-4, -1, 2, -6), reverse=True) -def test_forward_npi_binary(): - def verify(data_shape): - ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.less] - mx_ops = [mx.sym.np.power, mx.sym.np.multiply, mx.sym.np.add, mx.sym.np.less] - for i in range(len(ref_ops)): - ref_op = ref_ops[i] - mx_op = mx_ops[i] - # mx.np.power only support float type - if ref_op == mx.np.power: - dtype_list = ['float64', 'float32'] - for dtype in dtype_list: - data_np1 = np.random.uniform(size=data_shape).astype(dtype) - data_np2 = np.random.uniform(size=data_shape).astype(dtype) - data1 = mx.sym.var('lhs') - data2 = mx.sym.var('rhs') - ref_res = ref_op(mx.np.array(data_np1), mx.np.array(data_np2)) - mx_sym = mx_op(data1.as_np_ndarray(), data2.as_np_ndarray()) - mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape, "rhs": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np1, data_np2) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) - - verify(data_shape=(2,2)) - verify(data_shape=(2,4)) - verify(data_shape=(1,3,2)) - verify(data_shape=(1,3,3)) - verify(data_shape=(1,3)) - verify(data_shape=(1,3,4)) - verify(data_shape=(1,3,4)) - - -def test_forward_npi_binary_scalar(): - def verify(data_shape, scalar): - ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.true_divide] - mx_ops = [mx.sym.np.power, mx.sym.np.multiply, mx.sym.np.add, mx.sym.np.true_divide] - for i in range(len(ref_ops)): - ref_op = ref_ops[i] - mx_op = mx_ops[i] - # mx.np.power only support float type - if ref_op == mx.np.power: - dtype_list = ['float64', 'float32'] - for dtype in dtype_list: - data_np1 = np.random.uniform(size=data_shape).astype(dtype) - data1 = mx.sym.var('lhs') - ref_res = ref_op(mx.np.array(data_np1), scalar) - mx_sym = mx_op(data1.as_np_ndarray(), scalar) - mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np1) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) - - verify(data_shape=(2,2), scalar=1.0) - verify(data_shape=(2,4), scalar=2.0) - verify(data_shape=(1,3,2), scalar=3.0) - verify(data_shape=(1,3,3), scalar=4.0) - - -def test_forward_npi_tanh(): - def verify(data_shape): - dtype_list = ['float64', 'float32'] - for dtype in dtype_list: - data_np1 = np.random.uniform(size=data_shape).astype(dtype) - data1 = mx.sym.var('data') - ref_res = mx.np.tanh(mx.np.array(data_np1)) - mx_sym = mx.sym.np.tanh(data1.as_np_ndarray()) - mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np1) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) - - verify(data_shape=(2,2)) - verify(data_shape=(2,4)) - verify(data_shape=(1,3,2)) - verify(data_shape=(1,3,3)) +@pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8),(2,2),(1,3)]) +@pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32']) +def test_forward_npi_binary(data_shape,dtype): + ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.less] + mx_ops = [mx.sym.np.power, mx.sym.np.multiply, mx.sym.np.add, mx.sym.np.less] + for i in range(len(ref_ops)): + ref_op = ref_ops[i] + mx_op = mx_ops[i] + # mx.np.power only support float type + if ref_op == mx.np.power and dtype not in ['float64', 'float32']: + continue + data_np1 = np.random.uniform(size=data_shape).astype(dtype) + data_np2 = np.random.uniform(size=data_shape).astype(dtype) + data1 = mx.sym.var('lhs') + data2 = mx.sym.var('rhs') + ref_res = ref_op(mx.np.array(data_np1), mx.np.array(data_np2)) + mx_sym = mx_op(data1.as_np_ndarray(), data2.as_np_ndarray()) + mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape, "rhs": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1, data_np2) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + +@pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8),(2,2),(1,3)]) +@pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32']) +@pytest.mark.parametrize("scalar", [1.0,2.0,3.0,4.0]) +def test_forward_npi_binary_scalar(data_shape,dtype,scalar): + ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.true_divide] + mx_ops = [mx.sym.np.power, mx.sym.np.multiply, mx.sym.np.add, mx.sym.np.true_divide] + for i in range(len(ref_ops)): + ref_op = ref_ops[i] + mx_op = mx_ops[i] + # mx.np.power only support float type + if ref_op == mx.np.power and dtype not in ['float64', 'float32']: + continue + data_np1 = np.random.uniform(size=data_shape).astype(dtype) + data1 = mx.sym.var('lhs') + ref_res = ref_op(mx.np.array(data_np1), scalar) + mx_sym = mx_op(data1.as_np_ndarray(), scalar) + mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) -def test_forward_npi_where_rscalar(): + +@pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8),(2,2),(1,3)]) +@pytest.mark.parametrize("dtype", ['float64', 'float32']) +def test_forward_npi_tanh(data_shape,dtype): + data_np1 = np.random.uniform(size=data_shape).astype(dtype) + data1 = mx.sym.var('data') + ref_res = mx.np.tanh(mx.np.array(data_np1)) + mx_sym = mx.sym.np.tanh(data1.as_np_ndarray()) + mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + + +@pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(1,8),(2,2),(1,3)]) +@pytest.mark.parametrize("cond_dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) +@pytest.mark.parametrize("data_dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) +@pytest.mark.parametrize("scalar", [1.0,2.0]) +def test_forward_npi_where_rscalar(data_shape,cond_dtype,data_dtype,scalar): if not hasattr(mx.np, 'where'): pytest.skip("mx.np.where hasn't been publish yet") + if data_dtype == 'bool': + scalar = scalar == 0.0 + cond_np = np.random.uniform(size=data_shape).astype(cond_dtype) + data_np = np.random.uniform(size=data_shape).astype(data_dtype) + cond = mx.sym.var('condition') + data = mx.sym.var('x') + ref_res = mx.np.where(mx.np.array(cond_np), mx.np.array(data_np), scalar) + mx_sym = mx.sym.np.where(cond.as_np_ndarray(), data.as_np_ndarray(), scalar) + dtypeDic = {} + dtypeDic["condition"] = cond_dtype + dtypeDic["x"] = data_dtype + mod, _ = relay.frontend.from_mxnet( + mx_sym, shape={"condition": data_shape, "x": data_shape}, + dtype=dtypeDic) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(cond_np, data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + - def verify(data_shape, scalar): - for cond_dtype in dtype_list: - for data_dtype in dtype_list: - if data_dtype == 'bool': - scalar = scalar == 0.0 - cond_np = np.random.uniform(size=data_shape).astype(cond_dtype) - data_np = np.random.uniform(size=data_shape).astype(data_dtype) - cond = mx.sym.var('condition') - data = mx.sym.var('x') - ref_res = mx.np.where(mx.np.array(cond_np), mx.np.array(data_np), scalar) - mx_sym = mx.sym.np.where(cond.as_np_ndarray(), data.as_np_ndarray(), scalar) - dtypeDic = {} - dtypeDic["condition"] = cond_dtype - dtypeDic["x"] = data_dtype - mod, _ = relay.frontend.from_mxnet( - mx_sym, shape={"condition": data_shape, "x": data_shape}, - dtype=dtypeDic) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(cond_np, data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) - - verify(data_shape=(2,2), scalar=1.0) - verify(data_shape=(2,4), scalar=2.0) - verify(data_shape=(1,3,2), scalar=3.0) - verify(data_shape=(1,3,3), scalar=4.0) - - -def test_forward_split_v2(): +@pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) +def test_forward_split_v2(dtype): def verify(data_shape, axis=0, indices_or_sections=1, squeeze_axis=False): - for dtype in dtype_list: - data_np = np.random.uniform(size=data_shape).astype(dtype) - data = mx.sym.var('data') - ref_res = mx.ndarray.split_v2(mx.nd.array(data_np), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) - mx_sym = mx.sym.split_v2(data.as_nd_ndarray(), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - op_res_ = [] - for arr in op_res: - op_res_.append(arr.asnumpy().tolist()) - ref_res_ = [] - for arr in ref_res: - ref_res_.append(arr.asnumpy().tolist()) - tvm.testing.assert_allclose(op_res_, ref_res_, rtol=1e-5) + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + ref_res = mx.ndarray.split_v2(mx.nd.array(data_np), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) + mx_sym = mx.sym.split_v2(data.as_nd_ndarray(), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + for target, ctx in ctx_list(): + for kind in ["graph", "vm", "debug"]: + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + op_res_ = [] + for arr in op_res: + op_res_.append(arr.asnumpy().tolist()) + ref_res_ = [] + for arr in ref_res: + ref_res_.append(arr.asnumpy().tolist()) + tvm.testing.assert_allclose(op_res_, ref_res_, rtol=1e-5) verify((3, 2, 1), axis=1, indices_or_sections=2) verify((3, 2, 1), axis=0, indices_or_sections=3) From 84ab57edde34d906d8edbc93f70361a6c22fd57a Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Wed, 22 Jul 2020 23:23:51 +0800 Subject: [PATCH 10/16] replace pytest.skip with @pytest.mark.skipif --- tests/python/frontend/mxnet/test_forward.py | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 9163fe705a68..7b9e6c2727d8 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1374,6 +1374,7 @@ def verify(data_shape, anchor_shape, stds=[1, 1, 1, 1], clip=-1, in_format="corn verify((1, 10, 4), (1, 10, 4), in_format="center") +@pytest.mark.skipif(not hasattr(mx.sym.np, 'pad'), reason="mx.sym.np.pad hasn't been publish yet") @pytest.mark.parametrize( "data_shape, pad_width", [((1,1,3,5),(0,0,0,0,1,2,3,4)), ((1,1,3,5,7),(0,0,0,0,1,2,3,4,5,6))] @@ -1382,8 +1383,6 @@ def verify(data_shape, anchor_shape, stds=[1, 1, 1, 1], clip=-1, in_format="corn @pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32']) @pytest.mark.parametrize("constant_value", [0.0, 3.0]) def test_forward_npi_pad(data_shape, pad_width, mode, dtype, constant_value): - if not hasattr(mx.sym.np, 'pad'): - pytest.skip("mx.sym.np.pad hasn't been publish yet") data_np = np.random.uniform(size=data_shape).astype(dtype) data = mx.sym.var('data') if mode == 'constant': @@ -1421,7 +1420,7 @@ def verify(data_shape, axes=None): "data_shape1, data_shape2, axis", [((2,2),(2,2),1),((2,4),(2,3),1),((1,3,2),(1,3,5),2),((1,3,3),(1,3,3),1),((1,3),(1,3),0)] ) -@pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) +@pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32']) def test_forward_npi_concatenate(data_shape1, data_shape2, axis, dtype): data_np1 = np.random.uniform(size=data_shape1).astype(dtype) data_np2 = np.random.uniform(size=data_shape2).astype(dtype) @@ -1537,13 +1536,12 @@ def test_forward_npi_tanh(data_shape,dtype): tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) +@pytest.mark.skipif(not hasattr(mx.np, 'where'), reason="mx.np.where hasn't been publish yet") @pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(1,8),(2,2),(1,3)]) @pytest.mark.parametrize("cond_dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) @pytest.mark.parametrize("data_dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) @pytest.mark.parametrize("scalar", [1.0,2.0]) def test_forward_npi_where_rscalar(data_shape,cond_dtype,data_dtype,scalar): - if not hasattr(mx.np, 'where'): - pytest.skip("mx.np.where hasn't been publish yet") if data_dtype == 'bool': scalar = scalar == 0.0 cond_np = np.random.uniform(size=data_shape).astype(cond_dtype) @@ -1667,13 +1665,3 @@ def verify(data_shape, axis=0, indices_or_sections=1, squeeze_axis=False): test_forward_box_decode() test_forward_amp_multicast() test_forward_amp_cast() - test_forward_npi_pad() - test_forward_npi_transpose() - test_forward_npi_concatenate() - test_forward_np_copy() - test_forward_npx_reshape() - test_forward_npi_binary() - test_forward_npi_binary_scalar() - test_forward_npi_tanh() - test_forward_npi_where_rscalar() - test_forward_split_v2() From 915101990e3b1d13e0bd499cb36770fb03586e67 Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Thu, 23 Jul 2020 20:42:14 +0800 Subject: [PATCH 11/16] Update test_forward.py --- tests/python/frontend/mxnet/test_forward.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 2ec5bb0263a7..1c63e9858096 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1437,8 +1437,8 @@ def test_forward_npi_pad(data_shape, pad_width, mode, dtype, constant_value): @pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2)]) -@pytest.mark.parametrize("axes", [(1,0,2),None]) @pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) +@pytest.mark.parametrize("axes", [(1,0,2),None]) def test_forward_npi_transpose(data_shape, axes, dtype): def verify(data_shape, axes=None): data_np = np.random.uniform(size=data_shape).astype(dtype) From cb08135a2ff23937524cdc4340a39743b545be56 Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Wed, 29 Jul 2020 01:03:13 +0800 Subject: [PATCH 12/16] update pytest style --- tests/python/frontend/mxnet/test_forward.py | 261 +++++++------------- 1 file changed, 90 insertions(+), 171 deletions(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 1c63e9858096..2a12d64d8389 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1419,7 +1419,9 @@ def verify(data_shape, axis, use_length, length): @pytest.mark.parametrize("mode", ["constant", "edge", "reflect"]) @pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32']) @pytest.mark.parametrize("constant_value", [0.0, 3.0]) -def test_forward_npi_pad(data_shape, pad_width, mode, dtype, constant_value): +@pytest.mark.parametrize("target, ctx", ctx_list()) +@pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) +def test_forward_npi_pad(data_shape, pad_width, mode, dtype, constant_value,target, ctx, kind): data_np = np.random.uniform(size=data_shape).astype(dtype) data = mx.sym.var('data') if mode == 'constant': @@ -1429,28 +1431,25 @@ def test_forward_npi_pad(data_shape, pad_width, mode, dtype, constant_value): ref_res = mx.ndarray.pad(mx.nd.array(data_np), mode=mode,pad_width=pad_width) mx_sym = mx.sym.np.pad(data.as_np_ndarray(), mode=mode, pad_width=pad_width) mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) @pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2)]) @pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) @pytest.mark.parametrize("axes", [(1,0,2),None]) -def test_forward_npi_transpose(data_shape, axes, dtype): - def verify(data_shape, axes=None): - data_np = np.random.uniform(size=data_shape).astype(dtype) - data = mx.sym.var('data') - ref_res = mx.np.transpose(mx.np.array(data_np), axes=axes) - mx_sym = mx.sym.np.transpose(data.as_np_ndarray(), axes=axes) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) +@pytest.mark.parametrize("target, ctx", ctx_list()) +@pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) +def test_forward_npi_transpose(data_shape, axes, dtype,target, ctx, kind): + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + ref_res = mx.np.transpose(mx.np.array(data_np), axes=axes) + mx_sym = mx.sym.np.transpose(data.as_np_ndarray(), axes=axes) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) @pytest.mark.parametrize( @@ -1458,7 +1457,9 @@ def verify(data_shape, axes=None): [((2,2),(2,2),1),((2,4),(2,3),1),((1,3,2),(1,3,5),2),((1,3,3),(1,3,3),1),((1,3),(1,3),0)] ) @pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32']) -def test_forward_npi_concatenate(data_shape1, data_shape2, axis, dtype): +@pytest.mark.parametrize("target, ctx", ctx_list()) +@pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) +def test_forward_npi_concatenate(data_shape1, data_shape2, axis, dtype,target, ctx, kind): data_np1 = np.random.uniform(size=data_shape1).astype(dtype) data_np2 = np.random.uniform(size=data_shape2).astype(dtype) data1 = mx.sym.var('data1') @@ -1466,52 +1467,51 @@ def test_forward_npi_concatenate(data_shape1, data_shape2, axis, dtype): ref_res = mx.np.concatenate([mx.np.array(data_np1), mx.np.array(data_np2)], axis=axis) mx_sym = mx.sym.np.concatenate([data1.as_np_ndarray(), data2.as_np_ndarray()], axis=axis) mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data1": data_shape1, "data2": data_shape2}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np1, data_np2) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1, data_np2) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) @pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8)]) @pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) -def test_forward_np_copy(data_shape,dtype): +@pytest.mark.parametrize("target, ctx", ctx_list()) +@pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) +def test_forward_np_copy(data_shape,dtype,target, ctx, kind): data_np = np.random.uniform(size=data_shape).astype(dtype) data = mx.sym.var('data') ref_res = mx.np.copy(mx.np.array(data_np)) mx_sym = mx.sym.np.copy(data.as_np_ndarray()) mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) @pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) -def test_forward_npx_reshape(dtype): - def verify(data_shape,out_shape,reverse=False): - data_np = np.random.uniform(size=data_shape).astype(dtype) - data = mx.sym.var('data') - ref_res = mx.npx.reshape(mx.np.array(data_np), newshape=out_shape, reverse=reverse) - mx_sym = mx.sym.npx.reshape(data.as_np_ndarray(), newshape=out_shape, reverse=reverse) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) - - verify(data_shape=(2, 3, 8), out_shape=(-2, -2, 2, -1)) - verify(data_shape=(8, 3, 3, 3, 4, 4), out_shape=(-6, 2, -1, -4)) - verify(data_shape=(8, 3, 3, 3, 4, 4), out_shape=(-5, -4)) - verify(data_shape=(8, 3, 3, 3, 3, 8), out_shape=(-4, -5), reverse=True) - verify(data_shape=(8, 3, 2, 4, 8), out_shape=(-4, -1, 2, -6), reverse=True) +@pytest.mark.parametrize("target, ctx", ctx_list()) +@pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) +@pytest.mark.parametrize("data_shape,out_shape,reverse", + [((2, 3, 8),(-2, -2, 2, -1),False), + ((8, 3, 3, 3, 4, 4),(-6, 2, -1, -4),False), + ((8, 3, 3, 3, 4, 4),(-5, -4),False), + ((8, 3, 3, 3, 3, 8),(-4, -5),True), + ((8, 3, 2, 4, 8),(-4, -1, 2, -6),True)]) +def test_forward_npx_reshape(data_shape,out_shape,dtype,target,reverse, ctx, kind): + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + ref_res = mx.npx.reshape(mx.np.array(data_np), newshape=out_shape, reverse=reverse) + mx_sym = mx.sym.npx.reshape(data.as_np_ndarray(), newshape=out_shape, reverse=reverse) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) @pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8),(2,2),(1,3)]) @pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32']) -def test_forward_npi_binary(data_shape,dtype): +@pytest.mark.parametrize("target, ctx", ctx_list()) +@pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) +def test_forward_npi_binary(data_shape,dtype,target, ctx, kind): ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.less] mx_ops = [mx.sym.np.power, mx.sym.np.multiply, mx.sym.np.add, mx.sym.np.less] for i in range(len(ref_ops)): @@ -1527,17 +1527,17 @@ def test_forward_npi_binary(data_shape,dtype): ref_res = ref_op(mx.np.array(data_np1), mx.np.array(data_np2)) mx_sym = mx_op(data1.as_np_ndarray(), data2.as_np_ndarray()) mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape, "rhs": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np1, data_np2) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1, data_np2) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) @pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8),(2,2),(1,3)]) @pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32']) @pytest.mark.parametrize("scalar", [1.0,2.0,3.0,4.0]) -def test_forward_npi_binary_scalar(data_shape,dtype,scalar): +@pytest.mark.parametrize("target, ctx", ctx_list()) +@pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) +def test_forward_npi_binary_scalar(data_shape,dtype,scalar,target, ctx, kind): ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.true_divide] mx_ops = [mx.sym.np.power, mx.sym.np.multiply, mx.sym.np.add, mx.sym.np.true_divide] for i in range(len(ref_ops)): @@ -1551,26 +1551,24 @@ def test_forward_npi_binary_scalar(data_shape,dtype,scalar): ref_res = ref_op(mx.np.array(data_np1), scalar) mx_sym = mx_op(data1.as_np_ndarray(), scalar) mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"lhs": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np1) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) @pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8),(2,2),(1,3)]) @pytest.mark.parametrize("dtype", ['float64', 'float32']) -def test_forward_npi_tanh(data_shape,dtype): +@pytest.mark.parametrize("target, ctx", ctx_list()) +@pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) +def test_forward_npi_tanh(data_shape,dtype,target, ctx, kind): data_np1 = np.random.uniform(size=data_shape).astype(dtype) data1 = mx.sym.var('data') ref_res = mx.np.tanh(mx.np.array(data_np1)) mx_sym = mx.sym.np.tanh(data1.as_np_ndarray()) mod, _ = relay.frontend.from_mxnet(mx_sym, shape={"data": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np1) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np1) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) @pytest.mark.skipif(not hasattr(mx.np, 'where'), reason="mx.np.where hasn't been publish yet") @@ -1578,7 +1576,9 @@ def test_forward_npi_tanh(data_shape,dtype): @pytest.mark.parametrize("cond_dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) @pytest.mark.parametrize("data_dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) @pytest.mark.parametrize("scalar", [1.0,2.0]) -def test_forward_npi_where_rscalar(data_shape,cond_dtype,data_dtype,scalar): +@pytest.mark.parametrize("target, ctx", ctx_list()) +@pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) +def test_forward_npi_where_rscalar(data_shape,cond_dtype,data_dtype,scalar,target, ctx, kind): if data_dtype == 'bool': scalar = scalar == 0.0 cond_np = np.random.uniform(size=data_shape).astype(cond_dtype) @@ -1593,113 +1593,32 @@ def test_forward_npi_where_rscalar(data_shape,cond_dtype,data_dtype,scalar): mod, _ = relay.frontend.from_mxnet( mx_sym, shape={"condition": data_shape, "x": data_shape}, dtype=dtypeDic) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(cond_np, data_np) - tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(cond_np, data_np) + tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) @pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) -def test_forward_split_v2(dtype): - def verify(data_shape, axis=0, indices_or_sections=1, squeeze_axis=False): - data_np = np.random.uniform(size=data_shape).astype(dtype) - data = mx.sym.var('data') - ref_res = mx.ndarray.split_v2(mx.nd.array(data_np), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) - mx_sym = mx.sym.split_v2(data.as_nd_ndarray(), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) - mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) - for target, ctx in ctx_list(): - for kind in ["graph", "vm", "debug"]: - intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) - op_res = intrp.evaluate()(data_np) - op_res_ = [] - for arr in op_res: - op_res_.append(arr.asnumpy().tolist()) - ref_res_ = [] - for arr in ref_res: - ref_res_.append(arr.asnumpy().tolist()) - tvm.testing.assert_allclose(op_res_, ref_res_, rtol=1e-5) - - verify((3, 2, 1), axis=1, indices_or_sections=2) - verify((3, 2, 1), axis=0, indices_or_sections=3) - verify((3, 2, 1), axis=0, indices_or_sections=3, squeeze_axis=True) - verify((3, 2, 1), axis=0, indices_or_sections=(1, 2)) +@pytest.mark.parametrize("target, ctx", ctx_list()) +@pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) +@pytest.mark.parametrize("data_shape, axis, indices_or_sections, squeeze_axis", + [((3,2,1),1,2,False),((3,2,1),0,3,False),((3,2,1),0,3,True),((3,2,1),0,(1,2),False)]) +def test_forward_split_v2(data_shape, axis, dtype, indices_or_sections, squeeze_axis, target, ctx, kind): + data_np = np.random.uniform(size=data_shape).astype(dtype) + data = mx.sym.var('data') + ref_res = mx.ndarray.split_v2(mx.nd.array(data_np), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) + mx_sym = mx.sym.split_v2(data.as_nd_ndarray(), indices_or_sections, axis=axis, squeeze_axis=squeeze_axis) + mod, _ = relay.frontend.from_mxnet(mx_sym, {"data": data_shape}, dtype=dtype) + intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target) + op_res = intrp.evaluate()(data_np) + op_res_ = [] + for arr in op_res: + op_res_.append(arr.asnumpy().tolist()) + ref_res_ = [] + for arr in ref_res: + ref_res_.append(arr.asnumpy().tolist()) + tvm.testing.assert_allclose(op_res_, ref_res_, rtol=1e-5) if __name__ == '__main__': - test_forward_mlp() - test_forward_vgg() - test_forward_resnet() - test_forward_leaky_relu() - test_forward_elu() - test_forward_rrelu() - test_forward_prelu() - test_forward_gelu() - test_forward_softrelu() - test_forward_softmin() - test_forward_fc_flatten() - test_forward_clip() - test_forward_split() - test_forward_split_squeeze() - test_forward_expand_dims() - test_forward_pad() - test_forward_slice() - test_forward_pooling() - test_forward_pooling3d() - test_forward_adaptive_pooling() - test_forward_lrn() - test_forward_ones() - test_forward_zeros() - test_forward_ones_like() - test_forward_zeros_like() - test_forward_argmax() - test_forward_argmin() - test_forward_where() - test_forward_arange() - test_forward_broadcast_ops() - test_forward_broadcast_to() - test_forward_logical_not() - test_forward_elemwise_ops() - test_forward_unary_ops() - test_forward_scalar_ops() - test_forward_slice_like() - test_forward_slice_axis() - test_forward_sequence_reverse() - test_forward_l2_normalize() - test_forward_shape_array() - test_forward_squeeze() - test_forward_broadcast_axis() - test_forward_full() - test_forward_embedding() - test_forward_smooth_l1() - test_forward_take() - test_forward_gather_nd() - test_forward_bilinear_resize() - test_forward_rnn_layer() - test_forward_Crop() - test_forward_argsort() - test_forward_topk() - test_forward_sequence_mask() - test_forward_contrib_div_sqrt_dim() - test_forward_batch_norm() - test_forward_instance_norm() - test_forward_layer_norm() - test_forward_one_hot() - test_forward_depth_to_space() - test_forward_space_to_depth() - test_forward_convolution() - test_forward_deconvolution() - test_forward_cond() - test_forward_make_loss() - test_forward_unravel_index() - test_forward_swap_axis() - test_forward_correlation() - test_forward_grid_generator() - test_forward_bilinear_sampler() - test_forward_arange_like() - test_forward_interleaved_matmul_selfatt_qk() - test_forward_interleaved_matmul_selfatt_valatt() - test_forward_box_decode() - test_forward_amp_multicast() - test_forward_amp_cast() - test_forward_softmax() + pytest.main(['test_forward.py']) From bbb9c33a57808c12832f9c426db1a1b96de78379 Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Thu, 30 Jul 2020 22:20:34 +0800 Subject: [PATCH 13/16] Update test_forward.py --- tests/python/frontend/mxnet/test_forward.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 2a12d64d8389..b596a6e2ab84 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1534,8 +1534,8 @@ def test_forward_npi_binary(data_shape,dtype,target, ctx, kind): @pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(2,2,2,1,2,3,1),(1,8),(2,2),(1,3)]) @pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32']) -@pytest.mark.parametrize("scalar", [1.0,2.0,3.0,4.0]) @pytest.mark.parametrize("target, ctx", ctx_list()) +@pytest.mark.parametrize("scalar", [1.0,2.0,3.0,4.0]) @pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) def test_forward_npi_binary_scalar(data_shape,dtype,scalar,target, ctx, kind): ref_ops = [mx.np.power, mx.np.multiply, mx.np.add, mx.np.true_divide] From 7fee8c075fb5434d39d8dc98d6c8729b6147e605 Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Mon, 3 Aug 2020 19:35:35 +0800 Subject: [PATCH 14/16] Update test_forward.py --- tests/python/frontend/mxnet/test_forward.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index b596a6e2ab84..11211ce0d942 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1573,8 +1573,8 @@ def test_forward_npi_tanh(data_shape,dtype,target, ctx, kind): @pytest.mark.skipif(not hasattr(mx.np, 'where'), reason="mx.np.where hasn't been publish yet") @pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(1,8),(2,2),(1,3)]) -@pytest.mark.parametrize("cond_dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) @pytest.mark.parametrize("data_dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) +@pytest.mark.parametrize("cond_dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) @pytest.mark.parametrize("scalar", [1.0,2.0]) @pytest.mark.parametrize("target, ctx", ctx_list()) @pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) From 77a6553cb2cd764e2fc04bf1ae8d9067b6607758 Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Mon, 3 Aug 2020 19:35:35 +0800 Subject: [PATCH 15/16] Update test_forward.py --- tests/python/frontend/mxnet/test_forward.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index b596a6e2ab84..11211ce0d942 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1573,8 +1573,8 @@ def test_forward_npi_tanh(data_shape,dtype,target, ctx, kind): @pytest.mark.skipif(not hasattr(mx.np, 'where'), reason="mx.np.where hasn't been publish yet") @pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2),(1,8),(2,2),(1,3)]) -@pytest.mark.parametrize("cond_dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) @pytest.mark.parametrize("data_dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) +@pytest.mark.parametrize("cond_dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) @pytest.mark.parametrize("scalar", [1.0,2.0]) @pytest.mark.parametrize("target, ctx", ctx_list()) @pytest.mark.parametrize("kind", ["graph", "vm", "debug"]) From 45e3e5503a926d079e6d3348f47478b2fd658f13 Mon Sep 17 00:00:00 2001 From: sandyhu533 <64646082+sandyhu533@users.noreply.github.com> Date: Thu, 20 Aug 2020 18:56:26 +0800 Subject: [PATCH 16/16] Update test_forward.py --- tests/python/frontend/mxnet/test_forward.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/python/frontend/mxnet/test_forward.py b/tests/python/frontend/mxnet/test_forward.py index 11211ce0d942..594ffe72faf0 100644 --- a/tests/python/frontend/mxnet/test_forward.py +++ b/tests/python/frontend/mxnet/test_forward.py @@ -1435,7 +1435,8 @@ def test_forward_npi_pad(data_shape, pad_width, mode, dtype, constant_value,targ op_res = intrp.evaluate()(data_np) tvm.testing.assert_allclose(op_res.asnumpy(), ref_res.asnumpy(), rtol=1e-5) - + +@pytest.mark.skipif(not hasattr(mx.sym.np, 'pad'), reason="test'll abort with Mxnet 1.x, skip for now") @pytest.mark.parametrize("data_shape", [(2,2,2),(2,7,2)]) @pytest.mark.parametrize("dtype", ['float64', 'float32', 'int64', 'int32', 'bool']) @pytest.mark.parametrize("axes", [(1,0,2),None])