diff --git a/python/oneflow/test/modules/test_consistent_adaptive_pool.py b/python/oneflow/test/modules/test_consistent_adaptive_pool.py index 88f58934bc8..89f90a2d675 100644 --- a/python/oneflow/test/modules/test_consistent_adaptive_pool.py +++ b/python/oneflow/test/modules/test_consistent_adaptive_pool.py @@ -65,12 +65,12 @@ class TestAdaptiveAvgPool(flow.unittest.TestCase): def test_adaptive_avgpool(test_case): for placement in all_placement(): ndim = 3 - for sbp in all_sbp(placement, max_dim=ndim): + for sbp in all_sbp(placement, max_dim=2): _test_adaptive_avgpoolnd(test_case, ndim, 1, placement, sbp) _test_adaptive_avgpoolnd_functional(test_case, ndim, 1, placement, sbp) ndim = 4 - for sbp in all_sbp(placement, max_dim=ndim): + for sbp in all_sbp(placement, max_dim=2): _test_adaptive_avgpoolnd(test_case, ndim, 2, placement, sbp) _test_adaptive_avgpoolnd_functional(test_case, ndim, 2, placement, sbp) @@ -81,7 +81,7 @@ def test_adaptive_avgpool(test_case): ): continue ndim = 5 - for sbp in all_sbp(placement, max_dim=ndim): + for sbp in all_sbp(placement, max_dim=2): _test_adaptive_avgpoolnd(test_case, ndim, 3, placement, sbp) _test_adaptive_avgpoolnd_functional(test_case, ndim, 3, placement, sbp) diff --git a/python/oneflow/test/modules/test_consistent_rnn_cell.py b/python/oneflow/test/modules/test_consistent_rnn_cell.py index 8ab9a42454d..41fdf87ed17 100644 --- a/python/oneflow/test/modules/test_consistent_rnn_cell.py +++ b/python/oneflow/test/modules/test_consistent_rnn_cell.py @@ -22,7 +22,7 @@ from oneflow.test_utils.automated_test_util import * -@autotest(n=2, check_graph=False) +@autotest(n=1, check_graph=False) def _test_lstm_cell(test_case, placement, sbp): batch_size = random(2, 3) * 8 time_steps = random(2, 3) * 8 @@ -68,7 +68,7 @@ def _test_lstm_cell(test_case, placement, sbp): return res[0] -@autotest(n=2, check_graph=False) +@autotest(n=1, check_graph=False) def _test_rnn_relu_cell(test_case, placement, sbp): batch_size = random(2, 3) * 8 time_steps = random(2, 3) * 8 @@ -112,7 +112,7 @@ def _test_rnn_relu_cell(test_case, placement, sbp): return hx -@autotest(n=2, check_graph=False) +@autotest(n=1, check_graph=False) def _test_rnn_tanh_cell(test_case, placement, sbp): batch_size = random(2, 3) * 8 time_steps = random(2, 3) * 8 @@ -156,7 +156,7 @@ def _test_rnn_tanh_cell(test_case, placement, sbp): return hx -@autotest(n=2, check_graph=False) +@autotest(n=1, check_graph=False) def _test_gru_cell(test_case, placement, sbp): batch_size = random(2, 3) * 8 time_steps = random(2, 3) * 8 diff --git a/python/oneflow/test/modules/test_consistent_slice.py b/python/oneflow/test/modules/test_consistent_slice.py index 55ea1752165..0a7422d3f63 100644 --- a/python/oneflow/test/modules/test_consistent_slice.py +++ b/python/oneflow/test/modules/test_consistent_slice.py @@ -99,7 +99,11 @@ def _test_slice_with_bool(test_case, placement, sbp): test_case.assertTrue(np.array_equal(y.numpy(), x_numpy[0:1:1])) -def _test_slice_with_grad(test_case, placement, sbp): +@autotest( + n=2, auto_backward=False, check_graph=False, +) +def _test_slice_with_grad(test_case, placement): + sbp = random_sbp(placement, max_dim=2).value() x = random_tensor(2, 8, 16, requires_grad=True).oneflow x_numpy = x.detach().cpu().numpy() @@ -157,7 +161,11 @@ def test_slice(test_case): _test_negative_index(test_case, placement, sbp) _test_slice_ellipsis_type(test_case, placement, sbp) _test_slice_with_bool(test_case, placement, sbp) - _test_slice_with_grad(test_case, placement, sbp) + + @globaltest + def test_graph_slice(test_case): + for placement in all_placement(): + _test_slice_with_grad(test_case, placement) if __name__ == "__main__": diff --git a/python/oneflow/test/modules/test_consistent_slice_update.py b/python/oneflow/test/modules/test_consistent_slice_update.py index 0c09f38f3eb..e1acb85b0f1 100644 --- a/python/oneflow/test/modules/test_consistent_slice_update.py +++ b/python/oneflow/test/modules/test_consistent_slice_update.py @@ -119,12 +119,13 @@ class TestGlobalSliceUpdate(flow.unittest.TestCase): @globaltest def test_slice_update(test_case): for placement in all_placement(): - for sbp in all_sbp(placement, max_dim=2): - # TODO(wyg): It will be infer all broadcast sbp when 1n1d, - # slice_update will get error when doing inplace operator. - # Remove this judgement after refactor sbp infer method in Operator class. - if placement.ranks.size == 1: - continue + # TODO(wyg): It will be infer all broadcast sbp when 1n1d, + # slice_update will get error when doing inplace operator. + # Remove this judgement after refactor sbp infer method in Operator class. + if placement.ranks.size == 1: + continue + for _ in range(2): + sbp = random_sbp(placement, max_dim=2).value() _test_slice_update(test_case, placement, sbp) _test_graph_slice_update(test_case, placement, sbp) diff --git a/python/oneflow/test/modules/test_consistent_var.py b/python/oneflow/test/modules/test_consistent_var.py index faf9f7e2427..5bd3f2a8a8f 100644 --- a/python/oneflow/test/modules/test_consistent_var.py +++ b/python/oneflow/test/modules/test_consistent_var.py @@ -25,28 +25,20 @@ @autotest(n=1, check_graph=False) def _test_flow_global_var_all_dim_with_random_data(test_case, placement, sbp): x = random_tensor( - ndim=4, - dim0=random(1, 3).to(int) * 8, - dim1=random(1, 3).to(int) * 8, - dim2=random(1, 3).to(int) * 8, - dim3=random(1, 3).to(int) * 8, + ndim=2, dim0=random(1, 3).to(int) * 8, dim1=random(1, 3).to(int) * 8, ).to_global(placement, sbp) y = torch.var(x) return y -@autotest(n=2, check_graph=False) +@autotest(n=1, check_graph=False) def _test_flow_global_var_one_dim_with_random_data(test_case, placement, sbp): x = random_tensor( - ndim=4, - dim0=random(1, 3).to(int) * 8, - dim1=random(1, 3).to(int) * 8, - dim2=random(1, 3).to(int) * 8, - dim3=random(1, 3).to(int) * 8, + ndim=2, dim0=random(1, 3).to(int) * 8, dim1=random(1, 3).to(int) * 8, ).to_global(placement, sbp) y = torch.var( x, - dim=random(low=0, high=4).to(int), + dim=random(low=0, high=2).to(int), unbiased=random().to(bool), keepdim=random().to(bool), ) @@ -55,10 +47,10 @@ def _test_flow_global_var_one_dim_with_random_data(test_case, placement, sbp): @autotest(n=1, auto_backward=True, check_graph=False) def _test_flow_var_0_size_data_with_random_data(test_case, placement, sbp): - x = random_tensor(4, 8, 16, 0, 8).to_global(placement, sbp) + x = random_tensor(3, 8, 0, 8).to_global(placement, sbp) y = torch.var( x, - dim=random(low=0, high=4).to(int), + dim=random(low=0, high=3).to(int), unbiased=random().to(bool), keepdim=random().to(bool), ) @@ -69,7 +61,7 @@ class TestVar(flow.unittest.TestCase): @globaltest def test_flow_global_var_all_dim_with_random_data(test_case): for placement in all_placement(): - for sbp in all_sbp(placement, max_dim=4): + for sbp in all_sbp(placement, max_dim=2): _test_flow_global_var_all_dim_with_random_data( test_case, placement, sbp ) @@ -77,7 +69,7 @@ def test_flow_global_var_all_dim_with_random_data(test_case): @globaltest def test_flow_global_var_one_dim_with_random_data(test_case): for placement in all_placement(): - for sbp in all_sbp(placement, max_dim=4): + for sbp in all_sbp(placement, max_dim=2): _test_flow_global_var_one_dim_with_random_data( test_case, placement, sbp ) @@ -85,7 +77,7 @@ def test_flow_global_var_one_dim_with_random_data(test_case): @globaltest def test_flow_var_0_size_data_with_random_data(test_case): for placement in all_placement(): - for sbp in all_sbp(placement, max_dim=4, valid_split_axis=[0, 1, 3]): + for sbp in all_sbp(placement, max_dim=2, valid_split_axis=[0]): _test_flow_var_0_size_data_with_random_data(test_case, placement, sbp)