Skip to content

Commit

Permalink
speedup global test (#8468)
Browse files Browse the repository at this point in the history
* speedup global test

* Test refine slice ops test (#8471)

* refine consistent_slice test from 112s -> 30s in 4 device

* test(SliceUpdate): refine test from 119s -> 28s in 4 device

* delete useless code

* auto format by CI

Co-authored-by: Yinggang Wang <wyg19970408@gmail.com>
Co-authored-by: wyg1997 <wangyinggang@foxmail.com>
Co-authored-by: oneflow-ci-bot <ci-bot@oneflow.org>
  • Loading branch information
4 people authored Jun 23, 2022
1 parent 64e6e4d commit 8238431
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 32 deletions.
6 changes: 3 additions & 3 deletions python/oneflow/test/modules/test_consistent_adaptive_pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,12 +65,12 @@ class TestAdaptiveAvgPool(flow.unittest.TestCase):
def test_adaptive_avgpool(test_case):
for placement in all_placement():
ndim = 3
for sbp in all_sbp(placement, max_dim=ndim):
for sbp in all_sbp(placement, max_dim=2):
_test_adaptive_avgpoolnd(test_case, ndim, 1, placement, sbp)
_test_adaptive_avgpoolnd_functional(test_case, ndim, 1, placement, sbp)

ndim = 4
for sbp in all_sbp(placement, max_dim=ndim):
for sbp in all_sbp(placement, max_dim=2):
_test_adaptive_avgpoolnd(test_case, ndim, 2, placement, sbp)
_test_adaptive_avgpoolnd_functional(test_case, ndim, 2, placement, sbp)

Expand All @@ -81,7 +81,7 @@ def test_adaptive_avgpool(test_case):
):
continue
ndim = 5
for sbp in all_sbp(placement, max_dim=ndim):
for sbp in all_sbp(placement, max_dim=2):
_test_adaptive_avgpoolnd(test_case, ndim, 3, placement, sbp)
_test_adaptive_avgpoolnd_functional(test_case, ndim, 3, placement, sbp)

Expand Down
8 changes: 4 additions & 4 deletions python/oneflow/test/modules/test_consistent_rnn_cell.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from oneflow.test_utils.automated_test_util import *


@autotest(n=2, check_graph=False)
@autotest(n=1, check_graph=False)
def _test_lstm_cell(test_case, placement, sbp):
batch_size = random(2, 3) * 8
time_steps = random(2, 3) * 8
Expand Down Expand Up @@ -68,7 +68,7 @@ def _test_lstm_cell(test_case, placement, sbp):
return res[0]


@autotest(n=2, check_graph=False)
@autotest(n=1, check_graph=False)
def _test_rnn_relu_cell(test_case, placement, sbp):
batch_size = random(2, 3) * 8
time_steps = random(2, 3) * 8
Expand Down Expand Up @@ -112,7 +112,7 @@ def _test_rnn_relu_cell(test_case, placement, sbp):
return hx


@autotest(n=2, check_graph=False)
@autotest(n=1, check_graph=False)
def _test_rnn_tanh_cell(test_case, placement, sbp):
batch_size = random(2, 3) * 8
time_steps = random(2, 3) * 8
Expand Down Expand Up @@ -156,7 +156,7 @@ def _test_rnn_tanh_cell(test_case, placement, sbp):
return hx


@autotest(n=2, check_graph=False)
@autotest(n=1, check_graph=False)
def _test_gru_cell(test_case, placement, sbp):
batch_size = random(2, 3) * 8
time_steps = random(2, 3) * 8
Expand Down
12 changes: 10 additions & 2 deletions python/oneflow/test/modules/test_consistent_slice.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,11 @@ def _test_slice_with_bool(test_case, placement, sbp):
test_case.assertTrue(np.array_equal(y.numpy(), x_numpy[0:1:1]))


def _test_slice_with_grad(test_case, placement, sbp):
@autotest(
n=2, auto_backward=False, check_graph=False,
)
def _test_slice_with_grad(test_case, placement):
sbp = random_sbp(placement, max_dim=2).value()
x = random_tensor(2, 8, 16, requires_grad=True).oneflow
x_numpy = x.detach().cpu().numpy()

Expand Down Expand Up @@ -157,7 +161,11 @@ def test_slice(test_case):
_test_negative_index(test_case, placement, sbp)
_test_slice_ellipsis_type(test_case, placement, sbp)
_test_slice_with_bool(test_case, placement, sbp)
_test_slice_with_grad(test_case, placement, sbp)

@globaltest
def test_graph_slice(test_case):
for placement in all_placement():
_test_slice_with_grad(test_case, placement)


if __name__ == "__main__":
Expand Down
13 changes: 7 additions & 6 deletions python/oneflow/test/modules/test_consistent_slice_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,12 +119,13 @@ class TestGlobalSliceUpdate(flow.unittest.TestCase):
@globaltest
def test_slice_update(test_case):
for placement in all_placement():
for sbp in all_sbp(placement, max_dim=2):
# TODO(wyg): It will be infer all broadcast sbp when 1n1d,
# slice_update will get error when doing inplace operator.
# Remove this judgement after refactor sbp infer method in Operator class.
if placement.ranks.size == 1:
continue
# TODO(wyg): It will be infer all broadcast sbp when 1n1d,
# slice_update will get error when doing inplace operator.
# Remove this judgement after refactor sbp infer method in Operator class.
if placement.ranks.size == 1:
continue
for _ in range(2):
sbp = random_sbp(placement, max_dim=2).value()
_test_slice_update(test_case, placement, sbp)
_test_graph_slice_update(test_case, placement, sbp)

Expand Down
26 changes: 9 additions & 17 deletions python/oneflow/test/modules/test_consistent_var.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,28 +25,20 @@
@autotest(n=1, check_graph=False)
def _test_flow_global_var_all_dim_with_random_data(test_case, placement, sbp):
x = random_tensor(
ndim=4,
dim0=random(1, 3).to(int) * 8,
dim1=random(1, 3).to(int) * 8,
dim2=random(1, 3).to(int) * 8,
dim3=random(1, 3).to(int) * 8,
ndim=2, dim0=random(1, 3).to(int) * 8, dim1=random(1, 3).to(int) * 8,
).to_global(placement, sbp)
y = torch.var(x)
return y


@autotest(n=2, check_graph=False)
@autotest(n=1, check_graph=False)
def _test_flow_global_var_one_dim_with_random_data(test_case, placement, sbp):
x = random_tensor(
ndim=4,
dim0=random(1, 3).to(int) * 8,
dim1=random(1, 3).to(int) * 8,
dim2=random(1, 3).to(int) * 8,
dim3=random(1, 3).to(int) * 8,
ndim=2, dim0=random(1, 3).to(int) * 8, dim1=random(1, 3).to(int) * 8,
).to_global(placement, sbp)
y = torch.var(
x,
dim=random(low=0, high=4).to(int),
dim=random(low=0, high=2).to(int),
unbiased=random().to(bool),
keepdim=random().to(bool),
)
Expand All @@ -55,10 +47,10 @@ def _test_flow_global_var_one_dim_with_random_data(test_case, placement, sbp):

@autotest(n=1, auto_backward=True, check_graph=False)
def _test_flow_var_0_size_data_with_random_data(test_case, placement, sbp):
x = random_tensor(4, 8, 16, 0, 8).to_global(placement, sbp)
x = random_tensor(3, 8, 0, 8).to_global(placement, sbp)
y = torch.var(
x,
dim=random(low=0, high=4).to(int),
dim=random(low=0, high=3).to(int),
unbiased=random().to(bool),
keepdim=random().to(bool),
)
Expand All @@ -69,23 +61,23 @@ class TestVar(flow.unittest.TestCase):
@globaltest
def test_flow_global_var_all_dim_with_random_data(test_case):
for placement in all_placement():
for sbp in all_sbp(placement, max_dim=4):
for sbp in all_sbp(placement, max_dim=2):
_test_flow_global_var_all_dim_with_random_data(
test_case, placement, sbp
)

@globaltest
def test_flow_global_var_one_dim_with_random_data(test_case):
for placement in all_placement():
for sbp in all_sbp(placement, max_dim=4):
for sbp in all_sbp(placement, max_dim=2):
_test_flow_global_var_one_dim_with_random_data(
test_case, placement, sbp
)

@globaltest
def test_flow_var_0_size_data_with_random_data(test_case):
for placement in all_placement():
for sbp in all_sbp(placement, max_dim=4, valid_split_axis=[0, 1, 3]):
for sbp in all_sbp(placement, max_dim=2, valid_split_axis=[0]):
_test_flow_var_0_size_data_with_random_data(test_case, placement, sbp)


Expand Down

0 comments on commit 8238431

Please sign in to comment.