Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
[v1.x] Nightly Large Tensor test cherrypicks (#19194) (#19215)
Browse files Browse the repository at this point in the history
* fixing batch_norm and layer_norm for large tensors (#17805)

Co-authored-by: Rohit Kumar Srivastava <srivastava.141@buckeyemail.osu.edu>

* Fix nightly large_vector test caused by incorrect with_seed path (#18178)

* add back the missing environment function

Co-authored-by: Rohit Kumar Srivastava <srivastava.141@osu.edu>
Co-authored-by: Rohit Kumar Srivastava <srivastava.141@buckeyemail.osu.edu>

Co-authored-by: Rohit Kumar Srivastava <srivastava.141@osu.edu>
Co-authored-by: Rohit Kumar Srivastava <srivastava.141@buckeyemail.osu.edu>
  • Loading branch information
3 people committed Sep 24, 2020
1 parent 975aa6e commit 7c9046a
Show file tree
Hide file tree
Showing 5 changed files with 4 additions and 27 deletions.
2 changes: 1 addition & 1 deletion src/operator/nn/batch_norm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ static bool BatchNormShape(const nnvm::NodeAttrs& attrs,
: param.axis);
CHECK_LT(channelAxis, dshape.ndim()) << "Channel axis out of range: " << param.axis;

const int channelCount = dshape[channelAxis];
const index_t channelCount = dshape[channelAxis];

in_shape->at(batchnorm::kGamma) = mxnet::TShape(Shape1(channelCount));
in_shape->at(batchnorm::kBeta) = mxnet::TShape(Shape1(channelCount));
Expand Down
2 changes: 1 addition & 1 deletion src/operator/nn/layer_norm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ static bool LayerNormShape(const nnvm::NodeAttrs& attrs,
CHECK(axis >= 0 && axis < dshape.ndim())
<< "Channel axis out of range: axis=" << param.axis;

const int channelCount = dshape[axis];
const index_t channelCount = dshape[axis];

SHAPE_ASSIGN_CHECK(*in_shape,
layernorm::kGamma,
Expand Down
3 changes: 1 addition & 2 deletions tests/nightly/test_large_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,8 @@

from mxnet.test_utils import rand_ndarray, assert_almost_equal, rand_coord_2d, default_context, check_symbolic_forward, create_2d_tensor, get_identity_mat, get_identity_mat_batch
from mxnet import gluon, nd
from common import with_seed, with_post_test_cleanup, assertRaises
from common import with_seed, assertRaises
from mxnet.base import MXNetError
from nose.tools import with_setup
import unittest

# dimension constants
Expand Down
3 changes: 1 addition & 2 deletions tests/nightly/test_large_vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,8 @@

from mxnet.test_utils import rand_ndarray, assert_almost_equal, rand_coord_2d, create_vector
from mxnet import gluon, nd
from tests.python.unittest.common import with_seed, assertRaises
from common import with_seed, assertRaises
from mxnet.base import MXNetError
from nose.tools import with_setup
import unittest

# dimension constants
Expand Down
21 changes: 0 additions & 21 deletions tests/python/unittest/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,26 +314,6 @@ def teardown():
"""
mx.nd.waitall()


def with_post_test_cleanup():
"""
Helper function that cleans up memory by releasing it from memory pool
Required especially by large tensor tests that have memory footprints in GBs.
"""
def test_helper(orig_test):
@make_decorator(orig_test)
def test_new(*args, **kwargs):
logger = default_logger()
try:
orig_test(*args, **kwargs)
except:
logger.info(test_msg)
raise
finally:
mx.nd.waitall()
mx.cpu().empty_cache()


def with_environment(*args_):
"""
Helper function that takes a dictionary of environment variables and their
Expand All @@ -349,7 +329,6 @@ def test_new(*args, **kwargs):
return test_new
return test_helper


def run_in_spawned_process(func, env, *args):
"""
Helper function to run a test in its own process.
Expand Down

0 comments on commit 7c9046a

Please sign in to comment.