diff --git a/ci/docker/runtime_functions.sh b/ci/docker/runtime_functions.sh index aab49f28a427..b53db3f980f1 100755 --- a/ci/docker/runtime_functions.sh +++ b/ci/docker/runtime_functions.sh @@ -1481,8 +1481,9 @@ nightly_test_installation() { nightly_test_imagenet_inference() { set -ex echo $PWD - cp /work/mxnet/build/cpp-package/example/imagenet_inference . - /work/mxnet/cpp-package/example/inference/unit_test_imagenet_inference.sh + cp /work/mxnet/build/cpp-package/example/imagenet_inference /work/mxnet/cpp-package/example/inference/ + cd /work/mxnet/cpp-package/example/inference/ + ./unit_test_imagenet_inference.sh } #Runs a simple MNIST training example diff --git a/tests/nightly/JenkinsfileForBinaries b/tests/nightly/JenkinsfileForBinaries index a66159d0075b..af87b2c35658 100755 --- a/tests/nightly/JenkinsfileForBinaries +++ b/tests/nightly/JenkinsfileForBinaries @@ -20,7 +20,7 @@ mx_lib = 'lib/libmxnet.so, lib/libmxnet.a, lib/libtvm_runtime.so, lib/libtvmop.so, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a' mx_cmake_lib = 'build/libmxnet.so, build/libmxnet.a, build/3rdparty/tvm/libtvm_runtime.so, build/libtvmop.so, build/3rdparty/dmlc-core/libdmlc.a, build/tests/mxnet_unit_tests, build/3rdparty/openmp/runtime/src/libomp.so' -mx_lib_cpp_example = 'lib/libmxnet.so, lib/libmxnet.a, lib/libtvm_runtime.so, lib/libtvmop.so, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a, build/cpp-package/example/imagenet_inference' +mx_lib_cpp_example_mkl = 'lib/libmxnet.so, lib/libmxnet.a, lib/libtvm_runtime.so, lib/libtvmop.so, 3rdparty/dmlc-core/libdmlc.a, 3rdparty/tvm/nnvm/lib/libnnvm.a, build/cpp-package/example/imagenet_inference, lib/libmkldnn.so.0, lib/libmklml_intel.so' node('utility') { // Loading the utilities requires a node context unfortunately @@ -34,10 +34,10 @@ core_logic: { stage('Build') { parallel 'GPU: CUDA10.1+cuDNN7': { node(NODE_LINUX_CPU) { - ws('workspace/build-gpu') { + ws('workspace/build-mkldnn-gpu') { utils.init_git() - utils.docker_run('ubuntu_build_cuda', 'build_ubuntu_gpu_cuda101_cudnn7', false) - utils.pack_lib('gpu', mx_lib_cpp_example) + utils.docker_run('ubuntu_build_cuda', 'build_ubuntu_gpu_mkldnn', false) + utils.pack_lib('gpu', mx_lib_cpp_example_mkl) } } }/*, @@ -73,7 +73,7 @@ core_logic: { 'ImageNet Inference: GPU': { node(NODE_LINUX_GPU) { ws('workspace/nt-ImageInferenceTest') { - utils.unpack_and_init('gpu', mx_lib_cpp_example) + utils.unpack_and_init('gpu', mx_lib_cpp_example_mkl) utils.docker_run('ubuntu_nightly_gpu', 'nightly_test_imagenet_inference', true) } }