Skip to content

Commit

Permalink
Merge pull request #81 from GATEOverflow/mlperf-inference
Browse files Browse the repository at this point in the history
Merge from go
  • Loading branch information
arjunsuresh authored Jun 20, 2024
2 parents 1d5df2c + 7cf9819 commit 5d29bbb
Show file tree
Hide file tree
Showing 5 changed files with 95 additions and 7 deletions.
76 changes: 75 additions & 1 deletion script/app-mlperf-inference-intel/_cm.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ deps:
names:
- resnet50-model
- ml-model
tags: get,ml-model,resnet50,_fp32,_onnx,_from-tf
tags: get,ml-model,resnet50,_fp32,_pytorch

- enable_if_env:
CM_MODEL:
Expand Down Expand Up @@ -310,6 +310,80 @@ variations:
- pip-package
- optimum

resnet50,pytorch:
adr:
conda-package:
tags: _name.resnet50-pt
deps:
- tags: get,conda,_name.resnet50-pt
- tags: get,python,_conda.resnet50-pt
adr:
conda-python:
version: "3.9"
- names:
- conda-package
- mkl
tags: get,generic,conda-package,_package.mkl,_source.conda-forge
enable_if_env:
CM_MLPERF_INFERENCE_CODE_VERSION:
- v4.0
- names:
- conda-package
- mkl-include
tags: get,generic,conda-package,_package.mkl-include,_source.intel
- names:
- conda-package
- llvm-openmp
tags: get,generic,conda-package,_package.llvm-openmp,_source.conda-forge
- names:
- conda-package
- ncurses
tags: get,generic,conda-package,_package.ncurses,_source.conda-forge
- names:
- conda-package
- ncurses
tags: get,generic,conda-package,_package.ncurses,_source.conda-forge
- tags: get,generic-sys-util,_numactl
- tags: get,generic,conda-package,_package.jemalloc,_source.conda-forge
names:
- conda-package
- jemalloc
- tags: install,ipex,from.src,_for-intel-mlperf-inference-v3.1-resnet50
names:
- ipex-from-src
- tags: get,generic,conda-package,_package.ninja
names:
- conda-package
- ninja
- tags: get,mlcommons,inference,src
names:
- inference-src
- tags: get,mlcommons,inference,loadgen,_custom-python
names:
- inference-loadgen
env:
CM_PYTHON_BIN_WITH_PATH: "<<<CM_CONDA_BIN_PATH>>>/python3"
- tags: get,generic-python-lib,_custom-python,_package.torch,_url.git+https://github.com/pytorch/pytorch.git@927dc662386af052018212c7d01309a506fc94cd
enable_if_env:
CM_MLPERF_INFERENCE_CODE_VERSION:
- v3.1
env:
CM_PYTHON_BIN_WITH_PATH: "<<<CM_CONDA_BIN_PATH>>>/python3"
"+ CXXFLAGS":
- "-Wno-nonnull"
- "-Wno-maybe-uninitialized"
- "-Wno-uninitialized"
- "-Wno-free-nonheap-object"
- tags: get,generic-python-lib,_custom-python,_package.torch
env:
CM_GENERIC_PYTHON_PIP_EXTRA_INDEX_URL: https://download.pytorch.org/whl/cpu
enable_if_env:
CM_MLPERF_INFERENCE_CODE_VERSION:
- 'v4.0'
- tags: install,intel-neural-speed,_for-intel-mlperf-inference-v4.0-gptj,_branch.mlperf-v4-0
enable_if_env:
CM_MLPERF_INFERENCE_CODE_VERSION:
- 'v4.0'
gptj_,pytorch:
adr:
conda-package:
Expand Down
12 changes: 12 additions & 0 deletions script/app-mlperf-inference-mlcommons-python/_cm.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -612,6 +612,9 @@ variations:
tags: _NCHW
ml-model:
tags: raw,_onnx
numpy:
version_max: "1.26.4"
version_max_usable: "1.26.4"
env:
CM_MLPERF_BACKEND: onnxruntime

Expand All @@ -633,6 +636,9 @@ variations:
tags: _NCHW
ml-model:
tags: raw,_pytorch
numpy:
version_max: "1.26.4"
version_max_usable: "1.26.4"
env:
CM_MLPERF_BACKEND: pytorch
CM_MLPERF_BACKEND_VERSION: <<<CM_TORCH_VERSION>>>
Expand Down Expand Up @@ -980,6 +986,8 @@ variations:
- tags: get,generic-python-lib,_tqdm
- tags: get,generic-python-lib,_onnx
- tags: get,generic-python-lib,_numpy
names:
- numpy
- tags: get,generic-python-lib,_package.torchrec
- tags: get,generic-python-lib,_package.pyre-extensions
- tags: get,generic-python-lib,_package.torchsnapshot
Expand Down Expand Up @@ -1008,6 +1016,8 @@ variations:
deps:
- tags: get,generic-python-lib,_opencv-python
- tags: get,generic-python-lib,_numpy
names:
- numpy
- tags: get,generic-python-lib,_pycocotools

env:
Expand All @@ -1024,6 +1034,8 @@ variations:
deps:
- tags: get,generic-python-lib,_opencv-python
- tags: get,generic-python-lib,_numpy
names:
- numpy
- tags: get,generic-python-lib,_pycocotools
prehook_deps:
- tags: get,generic-python-lib,_protobuf
Expand Down
3 changes: 3 additions & 0 deletions script/app-mlperf-inference-nvidia/_cm.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -416,6 +416,9 @@ variations:
- tags: get,generic-python-lib,_package.nibabel
- tags: get,generic-python-lib,_pandas
version_max: "1.5.3"
- tags: get,generic-python-lib,_package.onnx
version: 1.13.1
- tags: get,generic-python-lib,_onnx-graphsurgeon

3d-unet-99:
group: model
Expand Down
3 changes: 0 additions & 3 deletions script/app-mlperf-inference/_cm.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1225,9 +1225,6 @@ variations:


invalid_variation_combinations:
-
- resnet50
- pytorch
-
- retinanet
- tf
Expand Down
8 changes: 5 additions & 3 deletions script/get-mlperf-inference-loadgen/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -44,12 +44,14 @@ PYTHON_MINOR_VERSION=${PYTHON_SHORT_VERSION#*.}
MLPERF_INFERENCE_PYTHON_SITE_BASE=${INSTALL_DIR}"/python"

cd "${CM_MLPERF_INFERENCE_SOURCE}/loadgen"
CFLAGS="-std=c++14 -O3" ${CM_PYTHON_BIN_WITH_PATH} setup.py bdist_wheel
${CM_PYTHON_BIN_WITH_PATH} -m pip install --force-reinstall `ls dist/mlperf_loadgen-*cp3${PYTHON_MINOR_VERSION}*.whl` --target="${MLPERF_INFERENCE_PYTHON_SITE_BASE}"
#CFLAGS="-std=c++14 -O3" ${CM_PYTHON_BIN_WITH_PATH} setup.py bdist_wheel
#${CM_PYTHON_BIN_WITH_PATH} -m pip install --force-reinstall `ls dist/mlperf_loadgen-*cp3${PYTHON_MINOR_VERSION}*.whl` --target="${MLPERF_INFERENCE_PYTHON_SITE_BASE}"
${CM_PYTHON_BIN_WITH_PATH} -m pip install .

if [ "${?}" != "0" ]; then exit 1; fi

# Clean the built wheel
find . -name 'mlperf_loadgen*.whl' | xargs rm
find . -name 'mlcommons_loadgen*.whl' | xargs rm

echo "******************************************************"
echo "Loadgen is built and installed to ${INSTALL_DIR} ..."

0 comments on commit 5d29bbb

Please sign in to comment.